diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 36edd03e..7030d53d 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -22,22 +22,24 @@ jobs: ['ASTVersion'] ['Modules']['a.b/c']['Dependencies']['a.b/c'] ['Modules']['a.b/c/cmdx']['Dependencies']['a.b/c/cmdx'] + ['NameToFile'] + ['NameToLocations'] steps: - name: Checkout pull request code uses: actions/checkout@v4 with: - path: 'pr_repo' + path: "pr_repo" - name: Checkout main branch code uses: actions/checkout@v4 with: - ref: 'main' - path: 'main_repo' + ref: "main" + path: "main_repo" - name: Setup Go environment uses: actions/setup-go@v5 with: - go-version: '1.22' + go-version: "1.22" cache-dependency-path: | main_repo/go.sum pr_repo/go.sum @@ -51,18 +53,18 @@ jobs: - name: Setup Python environment uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: "3.11" - name: Setup JDK 21 uses: actions/setup-java@v4 with: - java-version: '21' - distribution: 'temurin' + java-version: "21" + distribution: "temurin" - name: Setup Node.js uses: actions/setup-node@v4 with: - node-version: '22' + node-version: "22" - name: Compile both binaries run: | @@ -82,8 +84,7 @@ jobs: echo "JDTLS_ROOT_PATH=$(realpath ./main_repo/lang/java/lsp/jdtls/jdt-language-server-*)" >> $GITHUB_ENV - name: Run OLD abcoder - run: - OUTDIR=out_old ABCEXE=./abcoder_old ./main_repo/script/run_testdata.sh all + run: OUTDIR=out_old ABCEXE=./abcoder_old ./main_repo/script/run_testdata.sh all - name: Reset dependencies run: | @@ -95,8 +96,7 @@ jobs: OUTDIR=out_new ABCEXE=./abcoder_new ./pr_repo/script/run_testdata.sh first - name: Run NEW abcoder - run: - OUTDIR=out_new ABCEXE=./abcoder_new ./pr_repo/script/run_testdata.sh all + run: OUTDIR=out_new ABCEXE=./abcoder_new ./pr_repo/script/run_testdata.sh all - name: Upload output directories uses: actions/upload-artifact@v4 diff --git a/.gitignore b/.gitignore index 61573efe..5963f29c 100644 --- a/.gitignore +++ b/.gitignore @@ -79,4 +79,5 @@ tools !testdata/asts/*.json -.claude/ \ No newline at end of file +.claude/ +!internal/cmd/assets/.claude/ diff --git a/README.md b/README.md index eab158ee..e09d8d1b 100644 --- a/README.md +++ b/README.md @@ -118,6 +118,45 @@ Start coding(sub-agent) ─────────→ Execute Implementation > Watch the demo video [here](https://github.com/cloudwego/abcoder/pull/141) +## Use ABCoder as a Skill + +The **Skill** interface provides native Claude Code integration without MCP protocol overhead. It uses Claude Code's built-in skill system for a more streamlined workflow. + +### Setup + +The Skill is automatically configured when you run `abcoder init-spec`. The skill definitions are located in `internal/cmd/assets/.claude/skills/`. + +### Available Tools + +| Tool | Description | +|------|-------------| +| `list_repos` | List all available repositories | +| `tree_repo` | Get repository file structure | +| `get_file_structure` | Get all symbols in a file | +| `get_file_symbol` | Get symbol details with dependencies and references | +| `search_symbol` | Search symbols by name pattern | + +### Usage Example + +```bash +# List all repositories +abcoder cli list_repos + +# Get repository file tree +abcoder cli tree_repo 'repo_name' + +# Get file structure +abcoder cli get_file_structure 'repo_name' 'path/to/file.go' + +# Get symbol details +abcoder cli get_file_symbol 'repo_name' 'path/to/file.go' 'SymbolName' + +# Search symbols +abcoder cli search_symbol 'repo_name' 'Pattern*' +``` + +For Claude Code integration, the skill tools are invoked directly via slash commands like `/abcoder:schedule`. + ## Use ABCoder as a MCP server 1. Install ABCoder: @@ -177,6 +216,44 @@ Start coding(sub-agent) ─────────→ Execute Implementation - Try to use [the recommended prompt](llm/prompt/analyzer.md) and combine planning/memory tools like [sequential-thinking](https://github.com/modelcontextprotocol/servers/tree/main/src/sequentialthinking) in your AI agent. +### Skill vs MCP + +ABCoder provides two integration methods with Claude Code: + +| Feature | MCP (mcp__abcoder) | Skill (skill__abcoder) | +|---------|-------------------|----------------------| +| **Invocation** | `mcp__abcoder__tool_name` | `skill__abcoder__tool_name` | +| **Definition** | MCP protocol | .claude/skills/ | +| **Use Case** | General AI agents | Claude Code workflow | +| **Auto Detection** | - | Auto-detect `current_repo` from cwd | +| **Memory Efficient** | - | Sonic lazy-load, on-demand parsing | +| **Pipeline Support** | - | `rg` filter, `jq` extract | +| **Symbol Search** | - | Regex pattern support | +| **Example** | `mcp__abcoder__get_file_symbol` | `skill__abcoder__get_file_symbol` | + +The **Skill** interface is the recommended approach for Claude Code users, providing a more streamlined workflow: + +- **Auto-detect current repo**: `list_repos` automatically detects repos that match current working directory +- **Memory efficient**: Uses Sonic for lazy JSON parsing, only loads needed data +- **Pipeline friendly**: Output can be piped to `rg` for filtering or `jq` for extraction +- **Regex search**: `search_symbol` supports regex patterns to precisely locate symbols + +**Pipeline Examples:** +```bash +# Filter current repo(s) only +abcoder cli list_repos | jq '.current_repo' + +# Search with regex +abcoder cli search_symbol myrepo "^Get.*User$" + +# Filter related file +abcoder cli tree_repo myrepo | rg 'related-file' + +# Filter dependencies only +abcoder cli get_file_symbol myrepo src/main.go MyFunc | jq '.node.dependencies' +``` + +For detailed usage, see [Skill Definitions](internal/cmd/assets/.claude/skills/). ## Use ABCoder as an Agent (WIP) diff --git a/go.mod b/go.mod index 632a72fd..a20df35d 100644 --- a/go.mod +++ b/go.mod @@ -22,6 +22,7 @@ require ( github.com/spf13/cobra v1.8.1 github.com/stretchr/testify v1.10.0 github.com/vifraa/gopom v1.0.0 + golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa golang.org/x/mod v0.24.0 golang.org/x/tools v0.32.0 ) @@ -89,7 +90,6 @@ require ( github.com/yargevad/filepathx v1.0.0 // indirect github.com/yosida95/uritemplate/v3 v3.0.2 // indirect golang.org/x/arch v0.14.0 // indirect - golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa // indirect golang.org/x/net v0.39.0 // indirect golang.org/x/sync v0.13.0 // indirect golang.org/x/sys v0.33.0 // indirect diff --git a/idl/get_file_structure.proto b/idl/get_file_structure.proto new file mode 100644 index 00000000..aa9b785f --- /dev/null +++ b/idl/get_file_structure.proto @@ -0,0 +1,52 @@ +// get_file_structure IDL + +syntax = "proto3"; + +package abcoder; + +// Request +message GetFileStructReq { + string repo_name = 1; // the name of the repository (output of list_repos tool) + string file_path = 2; // relative file path (output of get_repo_structure tool, e.g., 'src/main.go') +} + +// Response +message GetFileStructResp { + FileStruct file_struct = 1; + string error = 2; // optional +} + +message FileStruct { + string file_path = 1; // the path of the file + string mod_path = 2; // optional, the module path + string pkg_path = 3; // optional, the package path + repeated Import imports = 4; // optional, the imports of the file + repeated NodeStruct nodes = 5; // optional, the node structs of the file +} + +message Import { + string path = 1; // import path + string alias = 2; // optional, import alias +} + +message NodeStruct { + string mod_path = 1; // optional, the module path + string pkg_path = 2; // optional, the package path + string name = 3; // the name of the node + string type = 4; // optional, the type of the node + string signature = 5; // optional, the func signature of the node (omitted when nodes > 500) + string file = 6; // optional, the file path of the node + int32 line = 7; // optional, the line of the node + string codes = 8; // optional, the codes of the node + repeated NodeID dependencies = 9; // optional, the dependencies of the node + repeated NodeID references = 10; // optional, the references of the node + repeated NodeID implements = 11; // optional, the implements of the node + repeated NodeID groups = 12; // optional, the groups of the node + repeated NodeID inherits = 13; // optional, the inherits of the node +} + +message NodeID { + string mod_path = 1; // module path of the node (from get_repo_structure) + string pkg_path = 2; // package path of the node (from get_repo_structure) + string name = 3; // name of the node (from get_package_structure or get_file_structure) +} diff --git a/idl/get_file_symbol.proto b/idl/get_file_symbol.proto new file mode 100644 index 00000000..ae5b4593 --- /dev/null +++ b/idl/get_file_symbol.proto @@ -0,0 +1,39 @@ +// get_file_symbol IDL + +syntax = "proto3"; + +package abcoder; + +// Request +message GetFileSymbolReq { + string repo_name = 1; // the name of the repository (output of list_repos tool) + string file_path = 2; // the file path (output of get_repo_structure tool) + string name = 3; // the name of the symbol (function, type, or variable) to query +} + +// Response +message GetFileSymbolResp { + FileNodeStruct node = 1; // the ast node + string error = 2; // optional, the error message +} + +// FileNodeStruct 文件节点结构(使用 FileNodeID) +message FileNodeStruct { + string name = 1; // the name of the node + string type = 2; // optional, the type of the node + string signature = 3; // optional, the func signature of the node + string file = 4; // optional, the file path of the node + int32 line = 5; // optional, the line of the node + string codes = 6; // optional, the codes of the node + repeated FileNodeID dependencies = 7; // optional, the dependencies of the node + repeated FileNodeID references = 8; // optional, the references of the node + repeated FileNodeID implements = 9; // optional, the implements of the node + repeated FileNodeID groups = 10; // optional, the groups of the node + repeated FileNodeID inherits = 11; // optional, the inherits of the node +} + +// FileNodeID 文件节点标识(用于 get_file_symbol 输出) +message FileNodeID { + string file_path = 1; // file path relative to repo root + string name = 2; // symbol name in the file +} diff --git a/idl/uniast.proto b/idl/uniast.proto new file mode 100644 index 00000000..55a870b2 --- /dev/null +++ b/idl/uniast.proto @@ -0,0 +1,270 @@ +// uniast.proto - UniAST Universal AST Schema +// This file defines the unified AST format for multiple languages +// Currently used by: Go Parser, Rust Parser (via rust-analyzer) + +syntax = "proto3"; + +package uniast; + +option go_package = "github.com/cloudwego/abcoder/lang/uniast"; + +// ============ Core Types ============ +// Language is a simple string: "go", "rust", "cxx", "python", "typescript", "java", "kotlin" + +// ============ Identity System ============ + +// Identity uniquely identifies a node in the AST +// Format: {ModPath}?{PkgPath}#{Name} +// Example: "github.com/user/repo?path/to/pkg#FunctionName" +message Identity { + string mod_path = 1; // module id, unique within a repo + string pkg_path = 2; // path id, unique within a module + string name = 3; // symbol id, unique within a package +} + +// FileLine represents a filename and line number +message FileLine { + string file = 1; // file path relative to repo root + int32 line = 2; // start line number (1-based) + int32 end_line = 5; // end line number (1-based) [新增] + int32 start_offset = 3; // start byte offset in file + int32 end_offset = 4; // end byte offset in file +} + +// ============ Relation System ============ + +// RelationKind represents the type of relationship between nodes +// Using string for JSON compatibility: "Dependency", "Implement", "Inherit", "Group" +message RelationKind { + string value = 1; +} + +// Relation represents a relationship between two nodes +message Relation { + string kind = 1; // kind of relation as string: "Dependency", "Implement", "Inherit", "Group" + // Flat identity fields (for JSON compatibility with Go implementation) + // NOTE: Use uppercase first char so serde(camelCase) produces PascalCase + string ModPath = 6; + string PkgPath = 7; + string Name = 8; + int32 line = 3; // start line-offset relative to current node's codes + string desc = 4; // information about this relation + string codes = 5; // related codes representing this relation +} + +// ============ Repository ============ + +// Repository represents a parsed codebase +message Repository { + string id = 1; // repository name + string ast_version = 2; // uniast version + string tool_version = 3; // abcoder version + string path = 4; // repo absolute path + RepoVersion repo_version = 5; // repository version info + map modules = 6; // module name => module + map graph = 7; // node id => node + + // [新增] name → files 反向索引 + // 加速 search_symbol API,无需独立 .idx 文件 + // 从 File.function_names/type_names/var_names 推导,不冗余 + map name_to_locations = 8; +} + +// NameLocations represents all locations of a symbol name +// [新增] 用于反向索引 name → files +message NameLocations { + repeated string files = 1; // 包含该 name 的文件列表(去重) +} + +// Repository version info +// NOTE: Proto3 JSON conversion uses camelCase by default +// So commit_hash -> CommitHash, parse_time -> ParseTime +message RepoVersion { + string commit_hash = 1; // Git commit hash (JSON: CommitHash) + string parse_time = 2; // Parse timestamp RFC3339 (JSON: ParseTime) +} + +// ============ Module ============ + +// Module represents a single module/package (e.g., Go module, Rust crate) +message Module { + string language = 1; // language: "go", "rust", "cxx", "python", "typescript", "java", "kotlin" + string version = 2; // module version (e.g., "1.0.0") + string name = 3; // module name + string dir = 4; // relative path to repo root + map packages = 5; // package import path => package + map dependencies = 6; // module name => module_path@version + map files = 7; // relative path => file info + repeated string load_errors = 8; // loading errors + string compress_data = 9; // module compress info +} + +// Node represents a symbol node in the graph +// NOTE: For JSON compatibility with Go implementation, use flat format: +// - mod_path, pkg_path, name (扁平 Identity) +// - type (string: "FUNC", "TYPE", "VAR") +// - dependencies, references, implements, inherits, groups (relations) +message Node { + // Flat identity fields (matching Go's embedded Identity) + string mod_path = 1; + string pkg_path = 2; + string name = 3; + + // Node type as string: "FUNC", "TYPE", "VAR" + string type = 4; + + // Unified relation fields + repeated Relation dependencies = 10; + repeated Relation references = 11; + repeated Relation implements = 12; + repeated Relation inherits = 13; + repeated Relation groups = 14; + +} + +// ============ Package ============ + +message Package { + bool is_main = 1; + bool is_test = 2; + string pkg_path = 3; // package import path + map functions = 4; // function name => function + map types = 5; // type name => type + map vars = 6; // var name => var + string compress_data = 7; // package compress info +} + +// ============ File ============ + +// File represents a source file +// Lightweight index: only stores names, full data in Package/Graph +message File { + string path = 1; // relative path to repo root + repeated Import imports = 2; // import statements + + // Identity fields (matching Module/Package hierarchy) + string mod_path = 3; // module path + string pkg_path = 4; // package path + + // Lightweight symbol name index (reference only, full data in Package) + // [新增] Accelerates get_file_structure API + repeated string function_names = 5; + repeated string type_names = 6; + repeated string var_names = 7; +} + +// Import represents an import/use statement +message Import { + string alias = 1; // optional alias + string path = 2; // raw import path +} + +// ============ Function ============ + +// Function represents a function or method +// NOTE: Using flat fields for JSON compatibility with Go implementation +message Function { + bool exported = 1; // if the function is exported + bool is_method = 2; // if it's a method (has receiver) + bool is_interface_method = 3; // if it's an interface method stub + + // Flat identity fields (matching Go's embedded Identity) + string mod_path = 4; + string pkg_path = 5; + string name = 6; + + // Flat FileLine fields + string file = 7; + int32 line = 8; + + string content = 9; // full function source code + string signature = 10; // function signature + + Receiver receiver = 11; // method receiver (if is_method) + repeated Relation params = 12; // function parameters + repeated Relation results = 13; // function return values +} + +// Receiver represents a method receiver +message Receiver { + bool is_pointer = 1; + Identity type = 2; // receiver type identity +} + +// ============ Type ============ + +// Type represents a struct, enum, trait, or interface +message Type { + bool exported = 1; // if the type is exported + TypeKind type_kind = 2; // struct, interface, enum, typedef + Identity identity = 3; // unique identity + FileLine file_line = 4; // location in source + string content = 5; // type declaration source + + // For struct: field types + repeated Relation sub_struct = 6; // field types + repeated Relation inline_struct = 7; // inherited/anonymous fields + + // Methods defined on this type + map methods = 8; + + // Legacy: use implements relation instead + // repeated Identity implements = 9; + + // LLM compress result + string compress_data = 10; +} + +// TypeKind represents the kind of type +enum TypeKind { + STRUCT = 0; + INTERFACE = 1; + TYPEDEF = 2; + ENUM = 3; +} + +// ============ Variable ============ + +// Var represents a variable or constant +message Var { + bool exported = 1; // if the variable is exported + bool is_const = 2; // if it's a constant + bool is_pointer = 3; // if the type is a pointer + Identity identity = 4; // unique identity + FileLine file_line = 5; // location in source + Identity type = 6; // type of the variable + string content = 7; // variable declaration source + + // Legacy: use relations instead + // repeated Dependency dependencies = 8; + + // For enum variants or grouped constants + repeated Identity groups = 9; + + // LLM compress result + string compress_data = 10; +} + +// ============ Rust-Specific Extensions ============ + +// Rust only: Trait represents a Rust trait +message Trait { + Type base = 1; // embed Type + repeated Identity super_traits = 2; // supertraits + map associated_items = 3; // associated items +} + +// Rust only: Impl represents an impl block +message Impl { + bool is_trait_impl = 1; // if it's a trait impl + Identity trait = 2; // trait being implemented (if any) + Identity self_type = 3; // self type + repeated Identity items = 4; // items in the impl block +} + +// Rust only: Macro represents a macro definition +message Macro { + Identity identity = 1; + FileLine file_line = 2; + string body = 3; // macro body +} diff --git a/internal/cmd/assets/.claude/CLAUDE.md b/internal/cmd/assets/.claude/CLAUDE.md index 96679ce3..b383d799 100644 --- a/internal/cmd/assets/.claude/CLAUDE.md +++ b/internal/cmd/assets/.claude/CLAUDE.md @@ -1,76 +1,54 @@ # AST-Driven Coding -你是 AST-Driven Coder,通过整合 `mcp__abcoder` 和 `mcp__sequential_thinking`,为用户提供无幻觉上下文、模糊需求质询、诚实推理和精确执行。 +你是 AST-Driven Coder,通过整合 `skill__abcoder` 和 `mcp__sequential_thinking`,为用户提供无幻觉上下文、模糊需求质询、诚实推理和精确执行。 -## MCP 工具使用体系 +## Tone Style +- 保持诚实:不为"友善"而含糊技术缺陷判断。 +- 面向用户,隐藏实现细节,仅透出必要API出入参数 +- 保持简洁;保持风格一致 -### 工具优先级决策 -**代码分析优先级**: `mcp__abcoder` > Read/Search +## Never break userspace +- 任何导致现有程序崩溃的改动都是bug,无论多么"理论正确" +- 内核的职责是服务用户,而不是教育用户 +- 向后兼容性是不可侵犯的 + +## 工具优先级决策 +**代码分析优先级**: `skill__abcoder` > Read/Search | 工具 | 适用场景 | 核心价值 | |------|----------|----------| -| `mcp__abcoder` | 本地代码深度分析 | UniAST + LSP无幻觉理解代码结构、类型信息、调用链。优于Read/Search | +| `skill__abcoder` | 本地代码深度分析 | UniAST + LSP无幻觉理解代码结构、类型信息、调用链。优于Read/Search | | `mcp__sequential_thinking` | 复杂问题分解 | 多步骤问题的系统化思考 | + ## ABCoder SOP 1. 问题分析: - 基于用户问题分析相关关键词 - - MUST 使用 `list_repos` 确认`repo_name` + - MUST 使用 `list_repos` 确认 `repo_name` -2. 代码定位 (repo→package→node→ast node relationship): - - 2.1 定位package: 基于 `get_repo_structure` 返回的package list选择目标package - - 2.2 定位node: 通过 `get_package_structure` 返回的file信息,确认目标node;无法确认时,调用 `get_files_structure` - - 2.3 确认ast node relationship: 递归调用 `get_ast_node` 获取node详细(dependencies, references, inheritance, implementation, grouping) +2. 代码定位 (repo→file→node→ast symbol relationship): + - 2.1 定位file: 基于 `tree_repo` 返回的file list选择目标file + - 2.2 定位symbol: 通过 `get_file_structure` 返回的file信息,确认目标symbol name + - 2.3 确认ast symbol relationship: 调用 `get_file_symbol` 获取symbol详细(dependencies, references);根据depends/refers的 递归调用`get_file_symbol` ### 开发中的 abcoder 使用 -- 编写前:使用 `get_package_structure` 分析相似代码模式,`get_ast_node` 学习项目最佳实践 +- 编写前:使用 `search_symbol`, `get_file_symbol` 分析相似代码模式、学习项目最佳实践; IMPORTANT: MUST 输出 数据流转API, 对齐所有 Input/Output IDL和类型 ## 分阶段开发理念 - -IMPORTANT: 开发前,MUST 与用户对齐CODE_TASK需求;对于CODE_TASK中不明确的任务(例如任务需要的SDK Method定义、返回值的JSON/IDL),质询用户 -IMPORTANT: 开始开发前,阐述此次CODE_TASK的调用链路、相关SDK Method定义、cURL JSON定义 -### 开发阶段 -1. MVP阶段:核心功能可工作,基本类型安全 -2. 完善阶段:错误处理、边界情况、测试覆盖 -3. 优化阶段:性能优化、代码重构、文档完善 - -## 代码质量标准 - -### 实现要求(按优先级) -MUST: -- Never 使用简化/Mock实现,使用真实SDK/cURL -- 类型安全:核心逻辑必须有明确类型定义 -- 基本错误处理:处理可预见的异常情况 - -SHOULD: -- 完整的边界条件处理 -- 性能敏感场景的优化 -- 复杂逻辑的注释说明 - -COULD: -- 100%遵循SOLID编码规范 -- 极致的性能优化 - -### 验证标准 -- 关键路径和边界条件 MUST 有测试 -- 通过 linter 和类型检查 -- 手动验证主要用户场景 +IMPORTANT: 开发前,MUST 与用户对齐CODE_TASK需求;对于CODE_TASK中不明确的任务(例如任务需要的相关API SDK Method cURL的IDL和类型),质询用户 +IMPORTANT: 开始开发前,阐述此次CODE_TASK的数据流转、调用链路、相关API SDK Method cURL的IDL和类型 ## 用户协作模式 | 用户行为 | 响应策略 | |----------|----------| -| 模糊需求 | 使用 `mcp__sequential_thinking` 澄清,提供具体选项 | -| BUG修复 | 使用 `mcp__abcoder__get_ast_node` 详细分析,根本解决 | -| 重构替换 | 使用 `semgrep` 和 `comby` 结构化搜索替换 | -| 代码分析请求 | MUST 使用 `mcp__abcoder` SOP | +| 模糊需求 | 使用 `AskUserQuestion` 澄清,提供具体选项 | +| BUG修复 | 使用 `skill__abcoder__get_file_symbol` 详细分析,根本解决 | +| 代码分析请求 | MUST 使用 `skill__abcoder` SOP | ## 执行要求 -1. 绝不假设 - 任何不确定代码,MUST 通过`mcp__abcoder__get_ast_node`工具验证 -2. 工具链整合 - 充分利用ABCoder等工具提升效率 -3. 质量内建 - 代码质量要求融入每个环节 -4. 渐进交付 - 复杂任务分解为可验证的小步骤 - -- **使用SubAgent时提醒使用ABCoder** - 当需要使用subAgent(如@agent-Explore、@agent-coding-executor)进行代码分析时,应该提醒SubAgent使用`mcp__abcoder__get_ast_node`以获得更准确的分析结果 -- **Never 说英语**:MUST 使用中文 +1. 绝不假设 - 任何不确定代码,MUST 通过`skill__abcoder__get_file_symbol`工具验证 +2. Demo-First - 任何新引入的外部库API,优先编写Demo代码, 验证数据流转,调试通过后根据项目上下文编写TDD;最后更新项目代码 +3. 工具链整合 - 充分利用ABCoder等工具提升效率 +4. 数据流转 - 使用一切方法(ABCoder, `go doc`, ...)明确数据流转API的Input/Output IDL和类型; 明确后, 才能更新项目代码 diff --git a/internal/cmd/assets/.claude/commands/abcoder/schedule.md b/internal/cmd/assets/.claude/commands/abcoder/schedule.md index be81cd09..30ae3ce9 100644 --- a/internal/cmd/assets/.claude/commands/abcoder/schedule.md +++ b/internal/cmd/assets/.claude/commands/abcoder/schedule.md @@ -1,34 +1,71 @@ --- name: ABCoder: Schedule -description: Design implementation plan using mcp__abcoder analysis and code exploration. +description: Design implementation plan using skill__abcoder analysis and code exploration. category: ABCoder tags: [abcoder, schedule, planning] --- -使用mcp__abcoder分析相关仓库(下钻到mcp__abcoder__get_ast_node查看细节),帮助用户设计实现方案。 -**Guardrails** -- 最大化复用项目已有功能,避免重复造轮子。 +使用`skill__abcoder`分析相关仓库(下钻到`skill__abcoder__get_file_symbol`查看细节),帮助用户设计实现方案。 + +## Guardrails +IMPORTANT: 默认保持向后兼容;但是如果新需求和旧代码存在冲突,请你清晰具体告知,质询用户 +IMPORTANT: 制定系统、根本性的解决方案 +- 最大化复用项目已有功能、优先使用外部成熟库;不重复造轮子。 +- 学习项目/外部库已有最佳实践;保持风格一致。 - 优先采用直接、最小改动的实现方式,只有在用户明确要求时才增加复杂度。 -- 严格限制修改影响面在所请求的结果范围内。 -- 找出任何模糊或含糊不清的细节,并在修改文件前提出必要的后续问题。 -- 在 Schedule 阶段禁止编写代码,禁止使用 agent。 +- 严格限制修改影响面在必要但**全面**范围。 +- 找出任何模糊或含糊不清的细节,并在`abcoder:spec`前提出必要的后续问题。 +- 在Schedule阶段禁止编写代码,禁止使用agent。 +IMPORTANT: MUST 从`skill__abcoder__list_repos`开始, 下钻到`skill__abcoder__get_file_symbol` + +在开始任何分析前,先问自己: +1. "这是个真问题还是臆想出来的?" - 拒绝过度设计 +2. "有更简单的方法吗?" - 永远寻找最简方案 +3. "会破坏什么吗?" - 向后兼容是铁律 + +结构化问题分解思考 + 第一层:数据结构分析. "Bad programmers worry about the code. Good programmers worry about data structures." + - 核心数据IDL是什么?它们的关系如何?类型是否兼容 + - 数据流向哪里?谁拥有它?谁修改它? + - 有没有不必要的数据复制或转换? + 第二层:特殊情况识别. "好代码没有特殊情况" + - 找出所有 if/else 分支 + - 哪些是真正的业务逻辑?哪些是糟糕设计的补丁? + - 能否重新设计数据结构来消除这些分支? + 第三层:复杂度审查. "如果实现需要超过3层缩进,重新设计它" + - 这个功能的本质是什么?(一句话说清) + - 当前方案用了多少概念来解决? + - 能否减少到一半?再少一半? + 第四层:破坏性分析. "Never break userspace" - 向后兼容是铁律 + - 列出所有可能受影响的现有功能 + - 哪些依赖会被破坏? + - 如何在不破坏任何东西的前提下改进? + 第五层:实用性验证. "Theory and practice sometimes clash. Theory loses. Every single time." + - 这个问题在生产环境真实存在吗? + - 有多少用户真正遇到这个问题? + - 解决方案的复杂度是否与问题的严重性匹配? + +## Style +- 面向E2E用户,隐藏实现细节(一句话总结) +- 仅暴露必要的SDK/API/Method出入参数 +- 保持简洁, 回复保持在500字以内;除非用户明确要求,不要包含代码函数体,仅透出signature、IDL数据流向 **Steps** Track these steps as TODOs and complete them one by one. -1. 从 `mcp__abcoder__get_repo_structure` 开始,获取目标仓库结构。 -2. 根据任务描述,定位相关的 package。 -3. 使用 `mcp__abcoder__get_package_structure` 获取 package 内的文件和节点列表。 -4. 使用 `mcp__abcoder__get_ast_node` 深入分析相关代码节点,理解现有实现模式。 -5. 分析依赖关系、调用链、类型信息等。 +1. 从 `skill__abcoder__tree_repo` 开始,获取目标仓库结构。 +2. 根据任务描述,生成pattern, `skill__abcoder__search_symbol` 定位相关的 symbol。 +3. 使用 `skill__abcoder__get_file_structure` 获取 file 所有 symbol。 +4. 使用 `skill__abcoder__get_file_symbol` 获取 symbol 源代码/dependence/reference, 使用depend/refer的file-path和name, 继续调用 `get_file_symbol`, 追溯数据流向。 +5. 分析依赖关系、数据流向调用链、类型信息等。 6. 设计实现方案,确保最大化复用已有功能、最小化改动。 7. 找出任何模糊或缺失的技术细节,并向用户提出后续问题。 8. 输出清晰的技术方案,包括修改范围、涉及的文件、关键实现步骤。 **Reference** -- `mcp__abcoder__list_repos` - 列出所有可用仓库 -- `mcp__abcoder__get_repo_structure` - 获取仓库结构(必须作为第一步) -- `mcp__abcoder__get_package_structure` - 获取 package 结构 -- `mcp__abcoder__get_file_structure` - 获取文件结构 -- `mcp__abcoder__get_ast_node` - 获取 AST 节点详情(下钻分析) +- `skill__abcoder__list_repos` - 列出所有可用仓库 +- `skill__abcoder__tree_repo` - 获取仓库结构(必须作为第一步) +- `skill__abcoder__search_symbol` - 搜索仓库的相关symbol(支持regex pattern) +- `skill__abcoder__get_file_structure` - 获取 file 的所有 symbol +- `skill__abcoder__get_file_symbol` - 获取 symbol 的 源代码/dependence/reference diff --git a/internal/cmd/assets/.claude/skills/abcoder__get_file_structure/SKILL.md b/internal/cmd/assets/.claude/skills/abcoder__get_file_structure/SKILL.md new file mode 100644 index 00000000..9d6653e7 --- /dev/null +++ b/internal/cmd/assets/.claude/skills/abcoder__get_file_structure/SKILL.md @@ -0,0 +1,46 @@ +--- +name: skill__abcoder__get_file_structure +description: skill__abcoder__get_file_structure `abcoder cli get_file_structure 'repo_name' 'file_path'` [STRUCTURE] Step 3/4: Get available symbol names of a file. Input: repo_name, file_path from `tree_repo` output. Output: symbol names with signatures. You MUST call `get_file_symbol` later. +--- + +Execute the get_file_structure command to examine file-level nodes: + +```bash +abcoder cli get_file_structure 'repo_name' 'file_path' +``` + +**Expected Output:** +- Complete node list with type, signature, line +- Imports for the file +- Node IDs for detailed analysis + +**Parameters:** +- `repo_name` (required): Repository name from `list_repos` +- `file_path` (required): Relative file path from `get_repo_structure` +``` +{ + "description": "[STRUCTURE] level3/4: Get file structure with node list. Input: repo_name, file_path from get_repo_structure output. Output: nodes with signatures.", + "inputSchema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "repo_name": { + "type": "string", + "description": "the name of the repository (output of list_repos tool)" + }, + "file_path": { + "type": "string", + "description": "relative file path (output of get_repo_structure tool" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "repo_name", + "file_path" + ] + }, + "name": "get_file_structure" +} +``` + +This tool is **Level 3** in the ABCoder discovery hierarchy. Next: Use [`skill__abcoder__get_file_symbol`](~/.claude/skills/skill__abcoder__get_file_symbol/SKILL.md) to get detailed code information. diff --git a/internal/cmd/assets/.claude/skills/abcoder__get_file_symbol/SKILL.md b/internal/cmd/assets/.claude/skills/abcoder__get_file_symbol/SKILL.md new file mode 100644 index 00000000..5a2cfa58 --- /dev/null +++ b/internal/cmd/assets/.claude/skills/abcoder__get_file_symbol/SKILL.md @@ -0,0 +1,74 @@ +--- +name: skill__abcoder__get_file_symbol +description: skill__abcoder_get_file_symbol `abcoder cli get_file_symbol 'repo_name' 'relative_file_path' 'symbol_name'` [ANALYSIS] Step 4/4: Get symbol's code, dependencies and references; use refer/depend's file_path and name as next `get_file_symbol` input. Input: repo_name, file_path, name. Output: codes, dependencies, references. You MUST call `get_file_symbol` with refers/depends file_path and name to check its code, call-chain or data-flow detail. +--- + +Execute the get_file_symbol command to get detailed symbol information: + +```bash +abcoder cli get_file_symbol 'repo_name' 'relative_file_path' 'symbol_name' +``` + +**Expected Output:** +``` +{ + "nodes": [ + { + "file_path": "string", + "name": "string", + "type": "string", + "line": "number", + "codes": "string", + "dependencies": [ + { + "file_path": "string", + "names": ["string"] + } + ], + "references": [ + { + "file_path": "string", + "names": ["string"] + } + ] + } + ] +} +``` + +**Parameters:** +- `repo_name` (required): Repository name from `list_repos` +- `file_path` (required): File path from `get_repo_structure` +- `symbol_name` (required): Name of the symbol to query +``` +{ + "description": "[ANALYSIS] level4/4: Get detailed AST node info by file path and symbol name. Output: codes, dependencies, references, implementations (all grouped by file_path).", + "inputSchema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "repo_name": { + "type": "string", + "description": "the name of the repository (output of list_repos tool)" + }, + "file_path": { + "type": "string", + "description": "the file path (output of get_repo_structure tool)" + }, + "symbol_name": { + "type": "string", + "description": "the name of the symbol (function, type, or variable) to query" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "repo_name", + "file_path", + "symbol_name" + ] + }, + "name": "get_file_symbol" +} +``` + +**Recursive Analysis:** Use this tool recursively to trace code calling chains. From the `dependencies` and `references` arrays, extract the `file_path` and `symbol_name` for related nodes, then call `get_file_symbol` again to dive deeper into the calling chain. diff --git a/internal/cmd/assets/.claude/skills/abcoder__list_repos/SKILL.md b/internal/cmd/assets/.claude/skills/abcoder__list_repos/SKILL.md new file mode 100644 index 00000000..4608f7d3 --- /dev/null +++ b/internal/cmd/assets/.claude/skills/abcoder__list_repos/SKILL.md @@ -0,0 +1,33 @@ +--- +name: skill__abcoder__list_repos +description: skill__abcoder__list_repos `abcoder cli list_repos` [DISCOVERY] Step 1/4: List available repositories. Always the first step in ABCoder workflow. You MUST call `tree_repo` later. 获取当前仓库名: `abcoder cli list_repos | jq .current_repo` +--- + +Execute the list_repos command to discover all available repositories: + +```bash +abcoder cli list_repos +``` + +获取当前仓库名(根据 pwd 匹配): +```bash +abcoder cli list_repos | jq .current_repo +``` + +## Workflow Context + +This tool is **Level 1** in the 4-level ABCoder discovery hierarchy: + +1. **Level 1 (This Tool)**: `list_repos` - List all repositories +2. **Level 2**: `tree_repo` - Get repository structure +3. **Level 3**: `get_file_structure` - Get file nodes details +4. **Level 4**: `get_file_symbol` - Get detailed AST node information + +## Usage Pattern +Output +``` +{ + "repo_names": ["string"], + "current_repo": "string" +} +``` diff --git a/internal/cmd/assets/.claude/skills/abcoder__search_symbol/SKILL.md b/internal/cmd/assets/.claude/skills/abcoder__search_symbol/SKILL.md new file mode 100644 index 00000000..6d31e079 --- /dev/null +++ b/internal/cmd/assets/.claude/skills/abcoder__search_symbol/SKILL.md @@ -0,0 +1,62 @@ +--- +name: skill__abcoder__search_symbol +description: skill__abcoder__search_symbol `abcoder cli search_symbol [--path ]` Search symbols in a repository by name pattern. Supports substring match, prefix match (pattern*), suffix match (*pattern), wildcard (*pattern*), and path prefix filtering (--path). You MUST call `get_file_symbol` later. +--- + +Execute the search_symbol command to search symbols by name: + +```bash +abcoder cli search_symbol [--path ] +``` + +## Arguments + +| Argument | Description | +|----------|-------------| +| `repo_name` | Repository name | +| `pattern` | Symbol name pattern (supports *, regex) | +| `--path` | (optional) Filter by file path prefix (e.g., `src/main/java/com/uniast/parser`) | + +## Examples + +```bash +# Substring match - search for "Get" in all symbol names +abcoder cli search_symbol myrepo Get + +# Prefix match - search for symbols starting with "Get" +abcoder cli search_symbol myrepo "Get*" + +# Suffix match - search for symbols ending with "User" +abcoder cli search_symbol myrepo "*User" + +# Wildcard match - search for symbols containing "GetUser" +abcoder cli search_symbol myrepo "*GetUser*" + +# Path filter - search symbols in specific directory +abcoder cli search_symbol myrepo "Graph" --path "src/main/java/com/uniast/parser" +``` + +## Output Format + +```json +{ + "repo_name": "string", + "pattern": "string", + "results": { + "file_path": { + "FUNC": ["function_name1", "function_name2"], + "TYPE": ["type_name"], + "VAR": ["var_name"] + } + } +} +``` + +## Notes + +A powerful search tool based on ABCoder + + Usage: - ALWAYS use `abcoder__search_symbol` for search tasks. NEVER invoke `grep` or `rg` as a Bash command. The `abcoder__search_symbol` tool has been optimized for correct permissions and access. + - Supports full regex syntax (e.g., "Get*", "Domain*Controller") + - Dynamic patterns for open-ended searches requiring multiple rounds + - Pattern syntax: Uses ripgrep (not grep) - literal braces need escaping (use `interface\{\}` to find `interface{}` in Go code) diff --git a/internal/cmd/assets/.claude/skills/abcoder__tree_repo/SKILL.md b/internal/cmd/assets/.claude/skills/abcoder__tree_repo/SKILL.md new file mode 100644 index 00000000..e1c014c3 --- /dev/null +++ b/internal/cmd/assets/.claude/skills/abcoder__tree_repo/SKILL.md @@ -0,0 +1,16 @@ +--- +name: skill__abcoder__tree_repo +description: skill__abcoder__tree_repo `abcoder cli tree_repo 'repo_name' [DISCOVERY] Level 2/4: [STRUCTURE] Step 2/4: Get available file_paths of a repo. Input: repo_name from `list_repos` output. Output: available file_paths. You MUST call `get_file_structure` later. +--- + +Execute the tree_repo command to examine repository-level structure: + +```bash +abcoder cli tree_repo 'repo_name' +``` + +**Expected Output:** +- Complete repository file paths + +**Parameters:** +- `repo_name` (required): Repository name from `list_repos` output diff --git a/internal/cmd/cli/cli.go b/internal/cmd/cli/cli.go new file mode 100644 index 00000000..8aa8908e --- /dev/null +++ b/internal/cmd/cli/cli.go @@ -0,0 +1,52 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "github.com/spf13/cobra" +) + +var verbose bool + +// NewCliCmd returns the parent command for CLI operations. +func NewCliCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "cli", + Short: "CLI commands for AST analysis", + Long: `CLI commands for directly querying AST data without MCP protocol. + +These commands provide direct access to repository, file, and symbol information.`, + Example: `abcoder cli list-repos`, + PersistentPreRun: func(cmd *cobra.Command, args []string) { + // 解析 -v flag + v, err := cmd.Flags().GetBool("verbose") + if err == nil { + verbose = v + } + }, + } + + cmd.PersistentFlags().BoolVarP(&verbose, "verbose", "v", false, "verbose output for debugging") + + // Add subcommands + cmd.AddCommand(newListReposCmd()) + cmd.AddCommand(newTreeRepoCmd()) + cmd.AddCommand(newGetFileStructureCmd()) + cmd.AddCommand(newGetFileSymbolCmd()) + cmd.AddCommand(newExtractSymbolCmd()) + cmd.AddCommand(newSearchSymbolCmd()) + + return cmd +} diff --git a/internal/cmd/cli/extract_symbol.go b/internal/cmd/cli/extract_symbol.go new file mode 100644 index 00000000..d2c33067 --- /dev/null +++ b/internal/cmd/cli/extract_symbol.go @@ -0,0 +1,284 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "encoding/json" + "fmt" + "os" + "strings" + + "github.com/bytedance/sonic" + "github.com/cloudwego/abcoder/lang/utils" + "github.com/cloudwego/abcoder/lang/uniast" + "github.com/spf13/cobra" +) + +// buildNameToLocations 从 JSON 数据构建 NameToLocations +// 如果 pathFilter 不为空,则只收集匹配前缀的 file +// 返回: name -> type -> fileSet (去重) +func buildNameToLocations(data []byte, pathFilter string) (map[string]map[string]map[string]bool, error) { + // 一次性反序列化整个 Modules + var result struct { + Modules map[string]*uniast.Module `json:"modules"` + } + if err := sonic.Unmarshal(data, &result); err != nil { + return nil, err + } + + // name -> type -> files (去重) + nameToTypeFiles := make(map[string]map[string]map[string]bool) + + // 遍历所有模块 + for _, mod := range result.Modules { + // 跳过外部模块 + if mod.IsExternal() { + continue + } + + // 遍历所有包 + for _, pkg := range mod.Packages { + // 提取 Functions + for name, fn := range pkg.Functions { + if pathFilter != "" && !strings.HasPrefix(fn.File, pathFilter) { + continue + } + if nameToTypeFiles[name] == nil { + nameToTypeFiles[name] = make(map[string]map[string]bool) + } + if nameToTypeFiles[name]["FUNC"] == nil { + nameToTypeFiles[name]["FUNC"] = make(map[string]bool) + } + nameToTypeFiles[name]["FUNC"][fn.File] = true + } + + // 提取 Types + for name, typ := range pkg.Types { + if pathFilter != "" && !strings.HasPrefix(typ.FileLine.File, pathFilter) { + continue + } + if nameToTypeFiles[name] == nil { + nameToTypeFiles[name] = make(map[string]map[string]bool) + } + if nameToTypeFiles[name]["TYPE"] == nil { + nameToTypeFiles[name]["TYPE"] = make(map[string]bool) + } + nameToTypeFiles[name]["TYPE"][typ.FileLine.File] = true + } + + // 提取 Vars + for name, v := range pkg.Vars { + if pathFilter != "" && !strings.HasPrefix(v.FileLine.File, pathFilter) { + continue + } + if nameToTypeFiles[name] == nil { + nameToTypeFiles[name] = make(map[string]map[string]bool) + } + if nameToTypeFiles[name]["VAR"] == nil { + nameToTypeFiles[name]["VAR"] = make(map[string]bool) + } + nameToTypeFiles[name]["VAR"][v.FileLine.File] = true + } + } + } + + return nameToTypeFiles, nil +} + +// saveNameToLocations 写回 NameToLocations 到 JSON 文件 +func saveNameToLocations(repoFile string, nameToLocs map[string][]string) error { + data, err := os.ReadFile(repoFile) + if err != nil { + return err + } + + // 使用标准库 JSON 反序列化 + var result map[string]interface{} + if err := json.Unmarshal(data, &result); err != nil { + return err + } + + // 添加 NameToLocations + result["NameToLocations"] = nameToLocs + + // 重新Marshal(保持缩进格式) + prettyJSON, err := json.MarshalIndent(result, "", " ") + if err != nil { + return err + } + + // 写入 .tmp 再 rename + tmpPath := repoFile + ".tmp" + if err := utils.MustWriteFile(tmpPath, prettyJSON); err != nil { + return err + } + return os.Rename(tmpPath, repoFile) +} + +type Symbol struct { + Name string `json:"name"` + File string `json:"file"` + Type string `json:"type"` // FUNC, TYPE, VAR +} + +type ExtractResult struct { + RepoName string `json:"repo_name"` + Files map[string]map[string][]string `json:"files"` // file -> type -> names +} + +func newExtractSymbolCmd() *cobra.Command { + return &cobra.Command{ + Use: "extract_symbol ", + Short: "Extract all symbols from repo JSON", + Long: `Extract all symbol names and file paths from a repository's uniast JSON. +Only extracts filepath + name (no content), for use with search_node.`, + Example: `abcoder cli extract_symbol myrepo`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + astsDir, err := getASTsDir(cmd) + if err != nil { + return err + } + + repoName := args[0] + + repoFile := findRepoFile(astsDir, repoName) + if repoFile == "" { + return fmt.Errorf("repo not found: %s", repoName) + } + + data, err := os.ReadFile(repoFile) + if err != nil { + return fmt.Errorf("failed to read repo file: %w", err) + } + + // 方式1: 优先用 sonic 读取 NameToLocations + nameToLocsVal, err := sonic.Get(data, "NameToLocations") + if err == nil && nameToLocsVal.Exists() { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] using existing NameToLocations\n") + } + + // 获取所有 name keys + nameToLocsMap, _ := nameToLocsVal.Map() + + // 转换为输出格式: file -> type -> names + files := make(map[string]map[string][]string) + for name := range nameToLocsMap { + filesVal, _ := sonic.Get(data, "NameToLocations", name, "Files") + if filesVal.Exists() { + fileList, err := filesVal.Array() + if err == nil { + for _, f := range fileList { + fileStr, _ := f.(string) + if files[fileStr] == nil { + files[fileStr] = map[string][]string{ + "FUNC": {}, + "TYPE": {}, + "VAR": {}, + } + } + // NameToLocations 不区分类型,都归为 FUNC + files[fileStr]["FUNC"] = append(files[fileStr]["FUNC"], name) + } + } + } + } + + result := ExtractResult{ + RepoName: repoName, + Files: files, + } + b, _ := json.MarshalIndent(result, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + } + + // 方式2: 没有 NameToLocations,遍历提取并写回 JSON + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] building NameToLocations\n") + } + + // 使用公共函数构建 + nameToTypeFiles, err := buildNameToLocations(data, "") + if err != nil { + return err + } + + // 转换为 NameToLocations 格式: name -> []file + // 拍平 type,只保留 files + nameToLocsMap := make(map[string][]string) + for name, typeFiles := range nameToTypeFiles { + fileSet := make(map[string]bool) + for _, files := range typeFiles { + for file := range files { + fileSet[file] = true + } + } + var fileList []string + for file := range fileSet { + fileList = append(fileList, file) + } + nameToLocsMap[name] = fileList + } + + // 写回 JSON + if err := saveNameToLocations(repoFile, nameToLocsMap); err != nil { + fmt.Fprintf(os.Stderr, "Warning: failed to save NameToLocations: %v\n", err) + } else if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] saved NameToLocations to %s\n", repoFile) + } + + // 转换为输出格式: file -> type -> names + files := make(map[string]map[string][]string) + for name, typeFiles := range nameToTypeFiles { + for typ, fileSet := range typeFiles { + for file := range fileSet { + if files[file] == nil { + files[file] = map[string][]string{ + "FUNC": {}, + "TYPE": {}, + "VAR": {}, + } + } + files[file][typ] = append(files[file][typ], name) + } + } + } + + // 过滤掉空的 TYPE 和 VAR + for file, types := range files { + if len(types["TYPE"]) == 0 { + delete(types, "TYPE") + } + if len(types["VAR"]) == 0 { + delete(types, "VAR") + } + if len(types["FUNC"]) == 0 { + delete(files, file) + } + } + + result := ExtractResult{ + RepoName: repoName, + Files: files, + } + + b, _ := json.MarshalIndent(result, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + }, + } +} diff --git a/internal/cmd/cli/get_file_structure.go b/internal/cmd/cli/get_file_structure.go new file mode 100644 index 00000000..ca48fc37 --- /dev/null +++ b/internal/cmd/cli/get_file_structure.go @@ -0,0 +1,101 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "encoding/json" + "fmt" + "os" + + "github.com/spf13/cobra" +) + +func newGetFileStructureCmd() *cobra.Command { + return &cobra.Command{ + Use: "get_file_structure ", + Short: "Get symbol names of a file", + Long: `Get the symbol names and signatures of a file in the repository. + +Returns a list of functions, types, and variables defined in the file.`, + Example: `abcoder cli get_file_structure myrepo src/main.go`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + astsDir, err := getASTsDir(cmd) + if err != nil { + return err + } + + repoName := args[0] + filePath := args[1] + + repoFile := findRepoFile(astsDir, repoName) + if repoFile == "" { + return fmt.Errorf("repo not found: %s", repoName) + } + + // 加载 data(用于后续按需读取) + data, err := loadRepoFileData(repoFile) + if err != nil { + return err + } + + // 1. 定位 pkgPath(极致按需:只读取 File 字段验证) + modPath, pkgPath, err := findPkgPathByFile(data, filePath) + if err != nil { + return fmt.Errorf("file '%s' not found in repo", filePath) + } + + // 2. 读取该文件所有 symbols + syms, err := getFileSymbolsByFile(data, modPath, pkgPath, filePath) + if err != nil || len(syms) == 0 { + return fmt.Errorf("no symbols found in file '%s'", filePath) + } + + type Node struct { + Name string `json:"name"` + Line int `json:"line"` + Signature string `json:"signature,omitempty"` + TypeKind string `json:"typeKind,omitempty"` // class, typedef, struct, enum, interface + } + + var nodes []Node + for _, sym := range syms { + n := Node{ + Name: sym["Name"].(string), + Line: int(sym["Line"].(float64)), + } + if sig, ok := sym["Signature"].(string); ok { + n.Signature = sig + } + // 添加 TypeKind (class, typedef, struct, enum, interface) + if tk, ok := sym["TypeKind"].(string); ok && tk != "" { + n.TypeKind = tk + } + nodes = append(nodes, n) + } + + resp := map[string]interface{}{ + "file_path": filePath, + "mod_path": modPath, + "pkg_path": pkgPath, + "nodes": nodes, + } + + b, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + }, + } +} diff --git a/internal/cmd/cli/get_file_symbol.go b/internal/cmd/cli/get_file_symbol.go new file mode 100644 index 00000000..0a1cac6a --- /dev/null +++ b/internal/cmd/cli/get_file_symbol.go @@ -0,0 +1,139 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "encoding/json" + "fmt" + "os" + + "github.com/spf13/cobra" +) + +func newGetFileSymbolCmd() *cobra.Command { + return &cobra.Command{ + Use: "get_file_symbol ", + Short: "Get detailed symbol information", + Long: `Get detailed information about a symbol including code, dependencies, and references. + +Returns the symbol's code, type, line number, and relationship with other symbols.`, + Example: `abcoder cli get_file_symbol myrepo src/main.go MyFunction`, + Args: cobra.ExactArgs(3), + RunE: func(cmd *cobra.Command, args []string) error { + astsDir, err := getASTsDir(cmd) + if err != nil { + return err + } + + repoName := args[0] + filePath := args[1] + symbolName := args[2] + + repoFile := findRepoFile(astsDir, repoName) + if repoFile == "" { + return fmt.Errorf("repo not found: %s", repoName) + } + + // 加载 data(用于后续按需读取) + data, err := loadRepoFileData(repoFile) + if err != nil { + return err + } + + // 1. 定位 pkgPath(极致按需:只读取 File 字段验证) + modPath, pkgPath, err := findPkgPathByFile(data, filePath) + if err != nil { + return fmt.Errorf("symbol '%s' not found in file '%s'", symbolName, filePath) + } + + // 2. 读取 symbol 完整内容 + sym, err := getSymbolByFileFull(data, modPath, pkgPath, filePath, symbolName) + if err != nil { + return fmt.Errorf("symbol '%s' not found in file '%s'", symbolName, filePath) + } + + // 找到 symbol,构建返回结构 + nodeType := "FUNC" + if t, ok := sym["node_type"].(string); ok { + nodeType = t + } + + signature := "" + if s, ok := sym["Signature"].(string); ok { + signature = s + } + content := "" + if c, ok := sym["Content"].(string); ok { + content = c + } + + // 3. 按需读取 Graph References + refs, err := getSymbolReferences(data, modPath, pkgPath, symbolName) + if err != nil { + fmt.Fprintf(os.Stderr, "DEBUG: getSymbolReferences error: %v\n", err) + return err + } + + // 按 Kind 分类,并按 file_path 分组聚合 + depMap := make(map[string][]string) + refMap := make(map[string][]string) + for _, r := range refs { + // 通过 ModPath + PkgPath + Name 反向查找 FilePath + filePath := findSymbolFile(data, r["mod_path"], r["pkg_path"], r["name"]) + + // Dependency, Inherit, Implement 都视为依赖关系 + if r["kind"] == "Dependency" || r["kind"] == "Inherit" || r["kind"] == "Implement" { + depMap[filePath] = append(depMap[filePath], r["name"]) + } else { + refMap[filePath] = append(refMap[filePath], r["name"]) + } + } + + // 转换为 FileNodeID 格式(按 file_path 分组,names 为数组) + var deps, refsOnly []map[string]interface{} + for fp, names := range depMap { + deps = append(deps, map[string]interface{}{ + "file_path": fp, + "names": names, + }) + } + for fp, names := range refMap { + refsOnly = append(refsOnly, map[string]interface{}{ + "file_path": fp, + "names": names, + }) + } + + node := map[string]interface{}{ + "name": symbolName, + "type": nodeType, + "file": filePath, + "line": int(sym["Line"].(float64)), + "codes": content, + "signature": signature, + "dependencies": deps, + "references": refsOnly, + } + + resp := map[string]interface{}{ + "node": node, + } + + b, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + }, + } +} diff --git a/internal/cmd/cli/list_repos.go b/internal/cmd/cli/list_repos.go new file mode 100644 index 00000000..82368131 --- /dev/null +++ b/internal/cmd/cli/list_repos.go @@ -0,0 +1,138 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/bytedance/sonic" + "github.com/spf13/cobra" + "golang.org/x/exp/maps" +) + +func newListReposCmd() *cobra.Command { + return &cobra.Command{ + Use: "list_repos", + Short: "List available repositories", + Long: `List all available repositories in the AST directory. + +The repositories are loaded from *.json files in the --asts-dir directory.`, + Example: `abcoder cli list-repos`, + RunE: func(cmd *cobra.Command, args []string) error { + astsDir, err := getASTsDir(cmd) + if err != nil { + return err + } + + // 获取当前工作目录 + cwd, err := os.Getwd() + if err != nil { + return err + } + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] cwd: %s\n", cwd) + } + + // 扫描所有 JSON 文件,读取 id 和 path + repoNamesMap := make(map[string]struct{}) + var currentRepos []string + type pathItem struct { + id string + path string + } + var pathItems []pathItem + + files, err := filepath.Glob(filepath.Join(astsDir, "*.json")) + if err != nil { + return err + } + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] found %d json files\n", len(files)) + } + for _, f := range files { + // 跳过 _repo_index.json + if strings.HasSuffix(f, "_repo_index.json") || strings.HasSuffix(f, ".repo_index.json") { + continue + } + // 使用 sonic 快速读取 id 和 path 字段 + if data, err := os.ReadFile(f); err == nil { + // 读取 id + idVal, err := sonic.Get(data, "id") + if err != nil { + continue + } + id, err := idVal.String() + if err != nil || id == "" { + continue + } + repoNamesMap[id] = struct{}{} + + // 读取 path + pathVal, err := sonic.Get(data, "Path") + if err == nil { + path, err := pathVal.String() + if err == nil && path != "" { + pathItems = append(pathItems, pathItem{id: id, path: path}) + } + } + } + } + + // 按 path 排序,用于前缀匹配时提前退出 + sort.Slice(pathItems, func(i, j int) bool { + return pathItems[i].path < pathItems[j].path + }) + + // 查找 cwd 前缀匹配的 repo + for _, item := range pathItems { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] checking: id=%s, path=%s\n", item.id, item.path) + } + // 如果 path 比 cwd 短,不可能匹配,提前退出 + if len(item.path) < len(cwd) { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] early exit: path shorter than cwd\n") + } + continue + } + if strings.HasPrefix(item.path, cwd) { + currentRepos = append(currentRepos, item.id) + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] MATCH: id=%s, path=%s\n", item.id, item.path) + } + } + } + + repoNames := maps.Keys(repoNamesMap) + + type ListReposOutput struct { + RepoNames []string `json:"repo_names"` + CurrentRepos []string `json:"current_repo,omitempty"` + } + resp := ListReposOutput{ + RepoNames: repoNames, + CurrentRepos: currentRepos, + } + b, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + }, + } +} diff --git a/internal/cmd/cli/search_symbol.go b/internal/cmd/cli/search_symbol.go new file mode 100644 index 00000000..f174c8ab --- /dev/null +++ b/internal/cmd/cli/search_symbol.go @@ -0,0 +1,277 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "regexp" + "strings" + + "github.com/bytedance/sonic" + "github.com/spf13/cobra" +) + +// SearchResult 搜索结果 +type SearchResult struct { + RepoName string `json:"repo_name"` + Query string `json:"query"` + Results map[string]map[string][]string `json:"results"` // file -> type -> names +} + +const indexDir = ".index" + +type SymbolIndex struct { + Mtime int64 `json:"mtime"` + Data map[string][]NameMatch `json:"data"` // name -> []NameMatch +} + +type NameMatch struct { + File string `json:"file"` + Type string `json:"type"` +} + +// loadSymbolIndex 加载符号索引 +func loadSymbolIndex(astsDir, repoName, repoFile string) (*SymbolIndex, error) { + idxPath := filepath.Join(astsDir, indexDir, repoName+".idx") + + // 检查索引文件是否存在 + if _, err := os.Stat(idxPath); err != nil { + // 索引不存在,返回 nil,让调用者知道需要生成 + return nil, nil + } + + // 读取索引 + data, err := os.ReadFile(idxPath) + if err != nil { + return nil, err + } + + var idx SymbolIndex + if err := json.Unmarshal(data, &idx); err != nil { + return nil, err + } + + // 检查 mtime 是否一致 + repoInfo, err := os.Stat(repoFile) + if err != nil { + return nil, err + } + if idx.Mtime != repoInfo.ModTime().UnixMilli() { + // mtime 不一致,返回 nil,让调用者知道需要重新生成 + return nil, nil + } + + return &idx, nil +} + +// hasRegexMetaChars 检查是否包含正则元字符 +func hasRegexMetaChars(s string) bool { + return strings.ContainsAny(s, ".+?{}[]|\\^$") +} + +// matchName 检查 name 是否匹配 query +// 支持: ripgrep 正则语法 +// - 普通字符串: 包含匹配 (*query*) +// - 含 * : 通配符匹配 (转为 .*) +// - 含其他正则元字符: 正则包含匹配 (.*query.*) +func matchName(name, query string) bool { + // 如果包含 * (通配符) + if strings.Contains(query, "*") { + // 转为 .* 并做包含匹配 + pattern := strings.ReplaceAll(query, "*", ".*") + pattern = ".*" + pattern + ".*" + matched, _ := regexp.MatchString(pattern, name) + return matched + } + + // 如果包含正则元字符,使用正则包含匹配 + if hasRegexMetaChars(query) { + pattern := ".*" + query + ".*" + matched, err := regexp.MatchString(pattern, name) + if err != nil { + // 回退到子串包含匹配 + return strings.Contains(name, query) + } + return matched + } + + // 普通字符串:包含匹配 (*query*) + pattern := ".*" + query + ".*" + matched, _ := regexp.MatchString(pattern, name) + return matched +} + +func newSearchSymbolCmd() *cobra.Command { + var pathFilter string + + cmd := &cobra.Command{ + Use: "search_symbol ", + Short: "Search symbols by name", + Long: `Search symbols in a repository by name pattern. +Supports substring match, prefix match (query*), suffix match (*query), and wildcard (*query*).`, + Example: `abcoder cli search_symbol myrepo GetUser +abcoder cli search_symbol myrepo "*User" +abcoder cli search_symbol myrepo "Get*" +abcoder cli search_symbol myrepo "Graph" --path "src/main/java/com/uniast/parser"`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + astsDir, err := getASTsDir(cmd) + if err != nil { + return err + } + + repoName := args[0] + query := args[1] + + repoFile := findRepoFile(astsDir, repoName) + if repoFile == "" { + return fmt.Errorf("repo not found: %s", repoName) + } + + // 读取 JSON 文件 + data, err := os.ReadFile(repoFile) + if err != nil { + return fmt.Errorf("failed to read repo file: %w", err) + } + + var results = make(map[string]map[string][]string) + + // 方式1: 使用 NameToLocations(新增字段,O(1)) + nameToLocsVal, err := sonic.Get(data, "NameToLocations") + if err == nil && nameToLocsVal.Exists() { + nameToLocs, err := nameToLocsVal.Map() + if err == nil { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] using NameToLocations (new)\n") + } + for name := range nameToLocs { + if matchName(name, query) { + // 支持两种格式: + // 1. 数组格式: {"GetUser": ["src/user.rs"]} + // 2. 对象格式: {"GetUser": {"Files": ["src/user.rs"]}} + locVal := nameToLocsVal.Get(name) + var files []interface{} + filesVal := locVal.Get("Files") + if filesVal.Exists() { + files, _ = filesVal.Array() + } else { + // 尝试数组格式 + files, _ = locVal.Array() + } + if len(files) > 0 { + for _, f := range files { + fileStr, _ := f.(string) + // path 前缀过滤 + if pathFilter != "" && !strings.HasPrefix(fileStr, pathFilter) { + continue + } + if results[fileStr] == nil { + results[fileStr] = map[string][]string{ + "FUNC": {}, + "TYPE": {}, + "VAR": {}, + } + } + results[fileStr]["FUNC"] = append(results[fileStr]["FUNC"], name) + } + } + } + } + + // 无论是否有结果都直接返回 + output := SearchResult{ + RepoName: repoName, + Query: query, + Results: results, + } + b, _ := json.MarshalIndent(output, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + } + } + + // 方式2: 没有 NameToLocations,构建并写回 JSON + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] building NameToLocations\n") + } + + // 使用公共函数构建 + nameToTypeFiles, err := buildNameToLocations(data, pathFilter) + if err != nil { + return err + } + + // 写回 JSON(使用完整 path 构建,否则后续搜索会丢失数据) + fullNameToTypeFiles, err := buildNameToLocations(data, "") + if err == nil { + // 转换为 name -> []file 格式 + fullNameToLocsMap := make(map[string][]string) + for name, typeFiles := range fullNameToTypeFiles { + fileSet := make(map[string]bool) + for _, files := range typeFiles { + for file := range files { + fileSet[file] = true + } + } + var fileList []string + for file := range fileSet { + fileList = append(fileList, file) + } + fullNameToLocsMap[name] = fileList + } + if err := saveNameToLocations(repoFile, fullNameToLocsMap); err != nil { + if verbose { + fmt.Fprintf(os.Stderr, "Warning: failed to save NameToLocations: %v\n", err) + } + } else if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] saved NameToLocations to %s\n", repoFile) + } + } + + // 转换为输出格式(全部归为 FUNC,因为 JSON 里没存 type) + for name, typeFiles := range nameToTypeFiles { + for _, fileSet := range typeFiles { + for file := range fileSet { + if results[file] == nil { + results[file] = map[string][]string{ + "FUNC": {}, + "TYPE": {}, + "VAR": {}, + } + } + results[file]["FUNC"] = append(results[file]["FUNC"], name) + } + } + } + + output := SearchResult{ + RepoName: repoName, + Query: query, + Results: results, + } + + b, _ := json.MarshalIndent(output, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + }, + } + + cmd.Flags().StringVar(&pathFilter, "path", "", "filter by file path prefix (e.g., src/main/java/com/uniast/parser)") + + return cmd +} diff --git a/internal/cmd/cli/tree_repo.go b/internal/cmd/cli/tree_repo.go new file mode 100644 index 00000000..6b1be04d --- /dev/null +++ b/internal/cmd/cli/tree_repo.go @@ -0,0 +1,111 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/bytedance/sonic" + "github.com/spf13/cobra" +) + +func newTreeRepoCmd() *cobra.Command { + return &cobra.Command{ + Use: "tree_repo ", + Short: "Get file tree of a repository", + Long: `Get the file tree structure of a repository. + +Returns a map of directories to file lists.`, + Example: `abcoder cli tree_repo myrepo`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + astsDir, err := getASTsDir(cmd) + if err != nil { + return err + } + + repoName := args[0] + repoFile := findRepoFile(astsDir, repoName) + if repoFile == "" { + return fmt.Errorf("repo not found: %s", repoName) + } + + data, err := loadRepoFileData(repoFile) + if err != nil { + return err + } + + // 获取所有 mod keys + modKeys, err := getModuleKeys(data) + if err != nil { + return err + } + + // 收集所有文件,按目录聚合 + files := make(map[string][]string) + for _, modPath := range modKeys { + // 跳过外部模块(通过 IsExternal 字段判断) + isExtVal, _ := sonic.Get(data, "Modules", modPath, "IsExternal") + if isExt, _ := isExtVal.Bool(); isExt { + continue + } + + // 只遍历 Files 的 keys(极致按需:不加载 value) + filePaths, err := iterModFiles(data, modPath) + if err != nil { + continue + } + + for _, path := range filePaths { + // 过滤掉非当前仓库的文件 + if strings.HasPrefix(path, "..") { + continue + } + + // 获取目录路径 + dir := filepath.Dir(path) + if dir == "." { + dir = "./" + } + // 添加 '/' 后缀 + if dir != "" && dir != "./" && !strings.HasSuffix(dir, "/") { + dir = dir + "/" + } + + // 获取文件名 + name := filepath.Base(path) + files[dir] = append(files[dir], name) + } + } + + // 排序 + for dir := range files { + sort.Strings(files[dir]) + } + + resp := map[string]interface{}{ + "files": files, + } + b, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "%s\n", b) + return nil + }, + } +} diff --git a/internal/cmd/cli/utils.go b/internal/cmd/cli/utils.go new file mode 100644 index 00000000..f39ee0b0 --- /dev/null +++ b/internal/cmd/cli/utils.go @@ -0,0 +1,652 @@ +// Copyright 2025 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/bytedance/sonic" + "github.com/bytedance/sonic/ast" + "github.com/spf13/cobra" +) + +// getASTsDir returns the ASTs directory path from command flags or default location. +func getASTsDir(cmd *cobra.Command) (string, error) { + astsDir, err := cmd.Flags().GetString("asts-dir") + if err != nil { + return "", err + } + if astsDir == "" { + astsDir = filepath.Join(os.Getenv("HOME"), ".asts") + } + if _, err := os.Stat(astsDir); os.IsNotExist(err) { + return "", fmt.Errorf("ASTs directory does not exist: %s", astsDir) + } + return astsDir, nil +} + +// findRepoFile 查找 repo 对应的 JSON 文件 +func findRepoFile(astsDir, repoName string) string { + // 先尝试直接匹配文件名 + patterns := []string{ + repoName + ".json", + repoName, + } + + // 处理特殊字符 + encoded := strings.ReplaceAll(repoName, "/", "-") + encoded = strings.ReplaceAll(encoded, ":", "-") + patterns = append(patterns, encoded+".json") + + // glob 模式 + patterns = append(patterns, "*-"+encoded+".json") + + for _, pattern := range patterns { + if match, err := filepath.Glob(filepath.Join(astsDir, pattern)); err == nil { + for _, f := range match { + return f + } + } + } + + // 遍历所有文件匹配 + files, _ := filepath.Glob(filepath.Join(astsDir, "*.json")) + for _, f := range files { + if strings.HasSuffix(f, "_repo_index.json") || strings.HasSuffix(f, ".repo_index.json") { + continue + } + // 读取 id 字段匹配 + if data, err := os.ReadFile(f); err == nil { + if val, err := sonic.Get(data, "id"); err == nil { + if id, err := val.String(); err == nil && id == repoName { + return f + } + } + } + } + + return "" +} + +// loadRepoModules 用 sonic 读取 repo 的 Modules 结构 +func loadRepoModules(repoFile string) (map[string]interface{}, error) { + data, err := os.ReadFile(repoFile) + if err != nil { + return nil, fmt.Errorf("failed to read repo file: %w", err) + } + + modsVal, err := sonic.Get(data, "Modules") + if err != nil { + return nil, fmt.Errorf("failed to get modules: %w", err) + } + mods, err := modsVal.Map() + if err != nil { + return nil, fmt.Errorf("failed to parse modules: %w", err) + } + return mods, nil +} + +// pathMatchesCwd 检查 mappings 中的文件名对应的 json 文件的 Path 字段是否匹配 cwd +// 支持前缀匹配:cwd 以 path 开头(支持 monorepo 场景) +func pathMatchesCwd(astsDir, filename, cwd string) bool { + repoFile := filepath.Join(astsDir, filename) + data, err := os.ReadFile(repoFile) + if err != nil { + return false + } + val, err := sonic.Get(data, "Path") + if err != nil { + return false + } + path, err := val.String() + if err != nil { + return false + } + // 前缀匹配:cwd 以 path 开头 + return strings.HasPrefix(cwd, path) +} + +// loadRepoFileData 读取整个 repo JSON 文件,返回 raw data 供后续 sonic.Get 按需读取 +func loadRepoFileData(repoFile string) ([]byte, error) { + return os.ReadFile(repoFile) +} + +// getModuleKeys 获取 Modules 的所有 key(不加载 value) +func getModuleKeys(data []byte) ([]string, error) { + val, err := sonic.Get(data, "Modules") + if err != nil { + return nil, err + } + var keys []string + iter, err := val.Properties() + if err != nil { + return nil, err + } + var p ast.Pair + for iter.Next(&p) { + keys = append(keys, p.Key) + } + return keys, nil +} + +// getPackageKeys 获取指定 module 下 Packages 的所有 key(不加载 value) +func getPackageKeys(data []byte, modPath string) ([]string, error) { + val, err := sonic.Get(data, "Modules", modPath, "Packages") + if err != nil { + return nil, err + } + var keys []string + iter, err := val.Properties() + if err != nil { + return nil, err + } + var p ast.Pair + for iter.Next(&p) { + keys = append(keys, p.Key) + } + return keys, nil +} + +// iterModFiles 遍历指定 mod 的 Files,只返回 file path(不加载 value) +func iterModFiles(data []byte, modPath string) ([]string, error) { + val, err := sonic.Get(data, "Modules", modPath, "Files") + if err != nil { + return nil, err + } + iter, err := val.Properties() + if err != nil { + return nil, err + } + var keys []string + var p ast.Pair + for iter.Next(&p) { + keys = append(keys, p.Key) + } + return keys, nil +} + +// iterSymbolNameFile 遍历指定 category (Functions/Types/Vars) 的所有 symbol +// 只读取 Name 和 File 字段,不读取完整内容 +// 返回: [][]string{{name, file}, ...} +func iterSymbolNameFile(data []byte, modPath, pkgPath, category string) ([][]string, error) { + val, err := sonic.Get(data, "Modules", modPath, "Packages", pkgPath, category) + if err != nil { + return nil, err + } + if !val.Exists() { + return nil, nil + } + iter, err := val.Properties() + if err != nil { + return nil, err + } + var results [][]string + var p ast.Pair + for iter.Next(&p) { + symName := p.Key + // 只读取 File 字段 + fileVal, err := sonic.Get(data, "Modules", modPath, "Packages", pkgPath, category, symName, "File") + if err != nil || !fileVal.Exists() { + continue + } + filePath, err := fileVal.String() + if err != nil { + continue + } + results = append(results, []string{symName, filePath}) + } + return results, nil +} + +// findPkgPathByFile 通过 filePath 查找 pkgPath +// 返回: modPath, pkgPath +// 使用 File.ModPath + File.PkgPath 实现 O(1) 查找 +func findPkgPathByFile(data []byte, filePath string) (string, string, error) { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] findPkgPathByFile: filePath=%s\n", filePath) + } + + // 1. 遍历 Modules,尝试直接通过 Files[filePath] 找到 File + modsVal, err := sonic.Get(data, "Modules") + if err != nil { + return "", "", err + } + modsIter, err := modsVal.Properties() + if err != nil { + return "", "", err + } + var modPair ast.Pair + + for modsIter.Next(&modPair) { + modPath := modPair.Key + + // 直接查找 Module.Files[filePath] + fileVal, err := sonic.Get(data, "Modules", modPath, "Files", filePath) + if err != nil || !fileVal.Exists() { + continue + } + + // 读取 File.ModPath 和 File.PkgPath(JSON 字段名是大写) + modPathVal, _ := fileVal.Get("ModPath").String() + pkgPathVal, _ := fileVal.Get("PkgPath").String() + + if modPathVal != "" && pkgPathVal != "" { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] HIT via Files: modPath=%s, pkgPath=%s\n", modPathVal, pkgPathVal) + } + return modPathVal, pkgPathVal, nil + } + } + + // 2. 回退:使用旧的推导方式(兼容旧数据) + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] fallback to derived path\n") + } + return findPkgPathByFileDerived(data, filePath) +} + +// findPkgPathByFileDerived 通过推导查找 pkgPath(旧逻辑,兼容) +func findPkgPathByFileDerived(data []byte, filePath string) (string, string, error) { + derivedPkg := filepath.Dir(filePath) + if derivedPkg == "." { + derivedPkg = "" + } + + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] findPkgPathByFileDerived: filePath=%s, derivedPkg=%s\n", filePath, derivedPkg) + } + + modsVal, err := sonic.Get(data, "Modules") + if err != nil { + return "", "", err + } + modsIter, err := modsVal.Properties() + if err != nil { + return "", "", err + } + var modPair ast.Pair + + for modsIter.Next(&modPair) { + modPath := modPair.Key + + var fullPkgPath string + if derivedPkg == "" { + fullPkgPath = modPath + } else { + fullPkgPath = modPath + "/" + derivedPkg + } + + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] trying direct: modPath=%s, fullPkgPath=%s\n", modPath, fullPkgPath) + } + + if matched, _ := pkgHasFile(data, modPath, fullPkgPath, filePath, "", "Functions"); matched { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] HIT via direct: modPath=%s, fullPkgPath=%s\n", modPath, fullPkgPath) + } + return modPath, fullPkgPath, nil + } + if matched, _ := pkgHasFile(data, modPath, fullPkgPath, filePath, "", "Types"); matched { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] HIT via direct: modPath=%s, fullPkgPath=%s\n", modPath, fullPkgPath) + } + return modPath, fullPkgPath, nil + } + if matched, _ := pkgHasFile(data, modPath, fullPkgPath, filePath, "", "Vars"); matched { + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] HIT via direct: modPath=%s, fullPkgPath=%s\n", modPath, fullPkgPath) + } + return modPath, fullPkgPath, nil + } + } + + if verbose { + fmt.Fprintf(os.Stderr, "[VERBOSE] fallback to findPkgPathByFileFullLoad\n") + } + return findPkgPathByFileFullLoad(data, filePath) +} + +// findPkgPathByFileFullLoad 全量加载方案:一次性加载 Modules.Packages,建立 file→{modPath,pkgPath} 索引 +func findPkgPathByFileFullLoad(data []byte, filePath string) (string, string, error) { + // 一次性反序列化 Modules.Packages(只加载 File 字段) + var result struct { + Modules map[string]struct { + Packages map[string]struct { + Functions map[string]struct { + File string `json:"File"` + } `json:"Functions"` + Types map[string]struct { + File string `json:"File"` + } `json:"Types"` + Vars map[string]struct { + File string `json:"File"` + } `json:"Vars"` + } `json:"Packages"` + } `json:"Modules"` + } + if err := sonic.Unmarshal(data, &result); err != nil { + return "", "", fmt.Errorf("unmarshal failed: %w", err) + } + + // 遍历建立 file → {modPath, pkgPath} 索引 + fileIndex := make(map[string][2]string) + for modPath, mod := range result.Modules { + for pkgPath, pkg := range mod.Packages { + for _, fn := range pkg.Functions { + if fn.File != "" { + fileIndex[fn.File] = [2]string{modPath, pkgPath} + } + } + for _, t := range pkg.Types { + if t.File != "" { + fileIndex[t.File] = [2]string{modPath, pkgPath} + } + } + for _, v := range pkg.Vars { + if v.File != "" { + fileIndex[v.File] = [2]string{modPath, pkgPath} + } + } + } + } + + // 直接查找 + if info, ok := fileIndex[filePath]; ok { + return info[0], info[1], nil + } + + return "", "", fmt.Errorf("file not found: %s", filePath) +} + +// pkgHasFile 检查指定 category (Functions/Types/Vars) 中是否有匹配的 file +// 如果 symbolName 不为空,则同时匹配 symbolName +func pkgHasFile(data []byte, modPath, pkgPath, filePath, symbolName, category string) (bool, error) { + categoryVal, err := sonic.Get(data, "Modules", modPath, "Packages", pkgPath, category) + if err != nil { + return false, err + } + if !categoryVal.Exists() { + return false, nil + } + + iter, err := categoryVal.Properties() + if err != nil { + return false, err + } + var pair ast.Pair + + for iter.Next(&pair) { + symName := pair.Key + + // 如果指定了 symbolName,则只检查该 symbol + if symbolName != "" && symName != symbolName { + continue + } + + // 只读取 File 字段进行比对 + fileVal, err := sonic.Get(data, "Modules", modPath, "Packages", pkgPath, category, symName, "File") + if err != nil { + continue + } + if !fileVal.Exists() { + continue + } + fnFile, err := fileVal.String() + if err != nil { + continue + } + + if fnFile == filePath { + return true, nil + } + } + + return false, nil +} + +// getSymbolByFileFull 完整读取 package 内容后匹配 symbol +// 在找到目标 pkg 后调用此函数读取完整内容 +func getSymbolByFileFull(data []byte, modPath, pkgPath, filePath, symbolName string) (map[string]interface{}, error) { + // 读取目标 package 的内容 + pkgVal, err := sonic.Get(data, "Modules", modPath, "Packages", pkgPath) + if err != nil { + return nil, fmt.Errorf("sonic.Get(Packages) failed for %s/%s: %w", modPath, pkgPath, err) + } + if !pkgVal.Exists() { + return nil, fmt.Errorf("Packages does not exist for %s/%s", modPath, pkgPath) + } + pkg, err := pkgVal.Map() + if err != nil { + return nil, fmt.Errorf("pkgVal.Map() failed: %w", err) + } + + // 检查 Functions + if fns, ok := pkg["Functions"].(map[string]interface{}); ok { + for fnName, fnVal := range fns { + if fnName == symbolName { + fn, ok := fnVal.(map[string]interface{}) + if !ok { + continue + } + if fn["File"] == filePath { + fn["node_type"] = "FUNC" + return fn, nil + } + } + } + } + // 检查 Types + if types, ok := pkg["Types"].(map[string]interface{}); ok { + for typeName, typeVal := range types { + if typeName == symbolName { + t, ok := typeVal.(map[string]interface{}) + if !ok { + continue + } + if t["File"] == filePath { + t["node_type"] = "TYPE" + return t, nil + } + } + } + } + // 检查 Vars + if vars, ok := pkg["Vars"].(map[string]interface{}); ok { + for varName, varVal := range vars { + if varName == symbolName { + v, ok := varVal.(map[string]interface{}) + if !ok { + continue + } + if v["File"] == filePath { + v["node_type"] = "VAR" + return v, nil + } + } + } + } + return nil, fmt.Errorf("symbol not found") +} + +// getFileSymbolsByFile 按需读取: modPath → pkgPath → 获取该文件所有 symbols +func getFileSymbolsByFile(data []byte, modPath, pkgPath, filePath string) ([]map[string]interface{}, error) { + // 读取目标 package 的内容 + pkgVal, err := sonic.Get(data, "Modules", modPath, "Packages", pkgPath) + if err != nil { + return nil, err + } + pkg, err := pkgVal.Map() + if err != nil { + return nil, err + } + + var nodes []map[string]interface{} + + // 检查 Functions + if fns, ok := pkg["Functions"].(map[string]interface{}); ok { + for _, fnVal := range fns { + fn, ok := fnVal.(map[string]interface{}) + if !ok { + continue + } + if fn["File"] == filePath { + fn["node_type"] = "FUNC" + nodes = append(nodes, fn) + } + } + } + // 检查 Types + if types, ok := pkg["Types"].(map[string]interface{}); ok { + for _, typeVal := range types { + t, ok := typeVal.(map[string]interface{}) + if !ok { + continue + } + if t["File"] == filePath { + t["node_type"] = "TYPE" + nodes = append(nodes, t) + } + } + } + // 检查 Vars + if vars, ok := pkg["Vars"].(map[string]interface{}); ok { + for _, varVal := range vars { + v, ok := varVal.(map[string]interface{}) + if !ok { + continue + } + if v["File"] == filePath { + v["node_type"] = "VAR" + nodes = append(nodes, v) + } + } + } + return nodes, nil +} + +// getSymbolReferences 用 sonic 按需读取 Graph 节点的 Dependencies 和 References +// Identity 格式: {ModPath}?{PkgPath}#{Name} +func getSymbolReferences(data []byte, modPath, pkgPath, symbolName string) ([]map[string]string, error) { + // Graph key 格式: {ModPath}?{PkgPath}#{Name} + // Python 根目录文件的 PkgPath 是 ".",需要映射为 "." + graphKey := modPath + "?" + pkgPath + "#" + symbolName + + // 使用嵌套 Get 避免特殊字符 (?#) 处理问题 + graphVal, err := sonic.Get(data, "Graph") + if err != nil { + return nil, err + } + nodeVal := graphVal.Get(graphKey) + if !nodeVal.Exists() { + return nil, nil // 没有 Graph 节点,返回空 + } + + var refs []map[string]string + + // 读取 Dependencies(当前节点依赖的) + depsVal := nodeVal.Get("Dependencies") + if depsVal.Exists() { + deps, err := parseRelationItems(*depsVal, "Dependency") + if err != nil { + return nil, err + } + refs = append(refs, deps...) + } + + // 读取 References(引用当前节点的) + refsVal := nodeVal.Get("References") + if refsVal.Exists() { + refsItems, err := parseRelationItems(*refsVal, "Reference") + if err != nil { + return nil, err + } + refs = append(refs, refsItems...) + } + + return refs, nil +} + +// parseRelationItems 解析关系数组,添加 kind 来源标记 +func parseRelationItems(val ast.Node, kind string) ([]map[string]string, error) { + arr, err := val.Array() + if err != nil { + return nil, err + } + + var refs []map[string]string + for _, item := range arr { + m, ok := item.(map[string]interface{}) + if !ok { + continue + } + ref := make(map[string]string) + // 使用固定的 kind,不依赖 JSON 中的 Kind 字段 + ref["kind"] = kind + if v, ok := m["Name"].(string); ok { + ref["name"] = v + } + if v, ok := m["ModPath"].(string); ok { + ref["mod_path"] = v + } + if v, ok := m["PkgPath"].(string); ok { + ref["pkg_path"] = v + } + if f, ok := m["File"].(string); ok { + ref["file"] = f + } + if n, ok := m["Line"].(float64); ok { + ref["line"] = fmt.Sprintf("%d", int(n)) + } + refs = append(refs, ref) + } + return refs, nil +} + +// findSymbolFile 通过 ModPath + PkgPath + Name 反向查找 FilePath +// 路径格式: .Modules[ModPath].Packages[PkgPath].Functions[Name].File +func findSymbolFile(data []byte, modPath, pkgPath, name string) string { + if modPath == "" || pkgPath == "" || name == "" { + return "" + } + + // 尝试 Functions + fileVal, _ := sonic.Get(data, "Modules", modPath, "Packages", pkgPath, "Functions", name, "File") + if fileVal.Exists() { + if f, err := fileVal.String(); err == nil { + return f + } + } + + // 尝试 Types + fileVal, _ = sonic.Get(data, "Modules", modPath, "Packages", pkgPath, "Types", name, "File") + if fileVal.Exists() { + if f, err := fileVal.String(); err == nil { + return f + } + } + + // 尝试 Vars + fileVal, _ = sonic.Get(data, "Modules", modPath, "Packages", pkgPath, "Vars", name, "File") + if fileVal.Exists() { + if f, err := fileVal.String(); err == nil { + return f + } + } + + return "" +} diff --git a/internal/cmd/init_spec.go b/internal/cmd/init_spec.go index 8649c75e..98685a3d 100644 --- a/internal/cmd/init_spec.go +++ b/internal/cmd/init_spec.go @@ -81,10 +81,10 @@ func RunInitSpec(targetDir string) error { } claudeConfigPath := filepath.Join(homeDir, ".claude.json") - if err := configureMCPServers(claudeConfigPath, astsDir); err != nil { - return fmt.Errorf("failed to configure MCP servers: %w", err) - } - log.Info("Configured MCP servers in %s", claudeConfigPath) + // if err := configureMCPServers(claudeConfigPath, astsDir); err != nil { + // return fmt.Errorf("failed to configure MCP servers: %w", err) + // } + // log.Info("Configured MCP servers in %s", claudeConfigPath) // 4. Print success message printSuccessMessage(targetDirAbs, claudeConfigPath, astsDir) diff --git a/lang/golang/parser/parser.go b/lang/golang/parser/parser.go index 7b7dc2e4..13270498 100644 --- a/lang/golang/parser/parser.go +++ b/lang/golang/parser/parser.go @@ -234,7 +234,7 @@ func getDeps(dir string, workDirs map[string]bool) (a map[string]string, cgoPkgs cmd.Env = append(os.Environ(), "GOSUMDB=off") output, err = cmd.CombinedOutput() if err != nil { - return nil, cgoPkgs, fmt.Errorf("failed to execute 'go list -json all', err: %v, output: %s, cmd string: %s, dir: %s", err, string(output), cmd.String(), dir) + return nil, cgoPkgs, fmt.Errorf("failed to execute 'go list -m -json all', err: %v, output: %s, cmd string: %s, dir: %s", err, string(output), cmd.String(), dir) } // ignore content until first open index := strings.Index(string(output), "{") @@ -295,6 +295,7 @@ func (p *GoParser) ParseRepo() (Repository, error) { } p.associateStructWithMethods() p.associateImplements() + p.buildNameToLocations() fmt.Fprintf(os.Stderr, "total call packages.Load %d times\n", loadCount) return p.getRepo(), nil } @@ -362,6 +363,74 @@ func (p *GoParser) getRepo() Repository { return p.repo } +// buildNameToLocations 构建 name → files 反向索引 +// 解析完成后调用,一次遍历 Package 填充 +func (p *GoParser) buildNameToLocations() { + if p.repo.NameToLocations == nil { + p.repo.NameToLocations = make(map[string]NameLocations) + } + + for _, mod := range p.repo.Modules { + for _, pkg := range mod.Packages { + // Functions + for name, fn := range pkg.Functions { + if fn.File == "" { + continue + } + loc := p.repo.NameToLocations[name] + // 去重 + exists := false + for _, f := range loc.Files { + if f == fn.File { + exists = true + break + } + } + if !exists { + loc.Files = append(loc.Files, fn.File) + } + p.repo.NameToLocations[name] = loc + } + // Types + for name, t := range pkg.Types { + if t.FileLine.File == "" { + continue + } + loc := p.repo.NameToLocations[name] + exists := false + for _, f := range loc.Files { + if f == t.FileLine.File { + exists = true + break + } + } + if !exists { + loc.Files = append(loc.Files, t.FileLine.File) + } + p.repo.NameToLocations[name] = loc + } + // Vars + for name, v := range pkg.Vars { + if v.FileLine.File == "" { + continue + } + loc := p.repo.NameToLocations[name] + exists := false + for _, f := range loc.Files { + if f == v.FileLine.File { + exists = true + break + } + } + if !exists { + loc.Files = append(loc.Files, v.FileLine.File) + } + p.repo.NameToLocations[name] = loc + } + } + } +} + // ToABS converts a local package path to absolute path // If the path is not a local package, return empty string // func (p *goParser) pkgPathToABS(path PkgPath) string { diff --git a/lang/java/ipc/uniast-java-parser-1.0.0-SNAPSHOT-jar-with-dependencies.jar b/lang/java/ipc/uniast-java-parser-1.0.0-SNAPSHOT-jar-with-dependencies.jar new file mode 100644 index 00000000..42ccdbef Binary files /dev/null and b/lang/java/ipc/uniast-java-parser-1.0.0-SNAPSHOT-jar-with-dependencies.jar differ diff --git a/lang/uniast/ast.go b/lang/uniast/ast.go index d62f5d1f..a3e00ef7 100644 --- a/lang/uniast/ast.go +++ b/lang/uniast/ast.go @@ -95,6 +95,16 @@ type Repository struct { Path string // repo absolute path Modules map[string]*Module // module name => module Graph NodeGraph // node id => node + + // [新增] name → files 反向索引 + // 加速 search_symbol API,无需独立 .idx 文件 + NameToLocations map[string]NameLocations `json:"NameToLocations,omitempty"` +} + +// NameLocations represents all locations of a symbol name +// [新增] 用于反向索引 name → files +type NameLocations struct { + Files []string `json:"Files,omitempty"` } func (r Repository) ID() string { @@ -126,7 +136,14 @@ func NewRepository(name string) Repository { type File struct { Path string Imports []Import `json:",omitempty"` - Package PkgPath `json:",omitempty"` + + // Package 兼容旧字段 + Package PkgPath `json:",omitempty"` + + // [新增] Identity fields for O(1) lookup + // 解析时直接赋值,无需查询 + ModPath ModPath `json:"ModPath,omitempty"` + PkgPath PkgPath `json:"PkgPath,omitempty"` } type Import struct { @@ -569,6 +586,9 @@ type FileLine struct { // NOTICE: start line. line number start from 1 Line int + // [新增] end line number (1-based) + EndLine int `json:"EndLine,omitempty"` + // start offset in file StartOffset int diff --git a/llm/mcp/handler.go b/llm/mcp/handler.go index 3b17ed6d..ffe4da47 100644 --- a/llm/mcp/handler.go +++ b/llm/mcp/handler.go @@ -59,10 +59,12 @@ func getASTTools(opts tool.ASTReadToolsOptions) []Tool { ast := tool.NewASTReadTools(opts) return []Tool{ NewTool(tool.ToolListRepos, tool.DescListRepos, tool.SchemaListRepos, ast.ListRepos), - NewTool(tool.ToolGetRepoStructure, tool.DescGetRepoStructure, tool.SchemaGetRepoStructure, ast.GetRepoStructure), - NewTool(tool.ToolGetPackageStructure, tool.DescGetPackageStructure, tool.SchemaGetPackageStructure, ast.GetPackageStructure), + // NewTool(tool.ToolGetRepoStructure, tool.DescGetRepoStructure, tool.SchemaGetRepoStructure, ast.GetRepoStructure), + // NewTool(tool.ToolGetPackageStructure, tool.DescGetPackageStructure, tool.SchemaGetPackageStructure, ast.GetPackageStructure), + NewTool(tool.ToolTreeRepo, tool.DescTreeRepo, tool.SchemaTreeRepo, ast.TreeRepo), NewTool(tool.ToolGetFileStructure, tool.DescGetFileStructure, tool.SchemaGetFileStructure, ast.GetFileStructure), - NewTool(tool.ToolGetASTNode, tool.DescGetASTNode, tool.SchemaGetASTNode, ast.GetASTNode), + // NewTool(tool.ToolGetASTNode, tool.DescGetASTNode, tool.SchemaGetASTNode, ast.GetASTNode), + NewTool(tool.ToolGetFileSymbol, tool.DescGetFileSymbol, tool.SchemaGetFileSymbol, ast.GetFileSymbol), } } diff --git a/llm/tool/ast_read.go b/llm/tool/ast_read.go index 520eaca8..e08036e5 100644 --- a/llm/tool/ast_read.go +++ b/llm/tool/ast_read.go @@ -18,8 +18,10 @@ package tool import ( "context" + "encoding/json" "fmt" "path/filepath" + "sort" "strings" "sync" @@ -33,13 +35,18 @@ import ( const ( ToolListRepos = "list_repos" - DescListRepos = "[DISCOVERY] level1/4: List all repositories. No parameters required. Always the first step in any analysis workflow." + DescListRepos = "[DISCOVERY] Step 1/4: List available repositories. Always the first step in ABCoder workflow. You MUST call `tree_repo` later." + ToolTreeRepo = "tree_repo" + DescTreeRepo = "[STRUCTURE] Step 2/4: Get available file_paths of a repo. Input: repo_name from `list_repos` output. Output: available file_paths. You MUST call `get_file_structure` later." + ToolGetFileStructure = "get_file_structure" + DescGetFileStructure = "[STRUCTURE] Step 3/4: Get available symbol names of a file. Input: repo_name, file_path from `tree_repo` output. Output: symbol names with signatures. You MUST call `get_file_symbol` later." + ToolGetFileSymbol = "get_file_symbol" + DescGetFileSymbol = "[ANALYSIS] Step 4/4: Get symbol's code, dependencies and references; use refer/depend's file_path and name as next `get_file_symbol` input. Input: repo_name, file_path, name. Output: codes, dependencies, references. You MUST call `get_file_symbol` with refers/depends file_path and name to check its code, call-chain or data-flow detail." + ToolGetRepoStructure = "get_repo_structure" DescGetRepoStructure = "[STRUCTURE] level2/4: Get repository structure. Input: repo_name from list_repos output. Output: modules with packages and files." ToolGetPackageStructure = "get_package_structure" DescGetPackageStructure = "[STRUCTURE] level3/4: Get package structure with node_ids. Input: repo_name, mod_path, pkg_path from get_repo_structure output. Output: files with node_ids." - ToolGetFileStructure = "get_file_structure" - DescGetFileStructure = "[STRUCTURE] level3/4: Get file structure with node list. Input: repo_name, file_path from get_repo_structure output. Output: nodes with signatures." ToolGetASTNode = "get_ast_node" DescGetASTNode = "[ANALYSIS] level4/4: Get detailed AST node info. Input: repo_name, node_ids from previous calls. Output: codes, dependencies, references, implementations." // ToolWriteASTNode = "write_ast_node" @@ -51,11 +58,14 @@ var ( SchemaGetPackageStructure = GetJSONSchema(GetPackageStructReq{}) SchemaGetFileStructure = GetJSONSchema(GetFileStructReq{}) SchemaGetASTNode = GetJSONSchema(GetASTNodeReq{}) + SchemaGetFileSymbol = GetJSONSchema(GetFileSymbolReq{}) + SchemaTreeRepo = GetJSONSchema(TreeRepoReq{}) ) type ASTReadToolsOptions struct { // PatchOptions patch.Options - RepoASTsDir string + RepoASTsDir string + DisableWatch bool } type ASTReadTools struct { @@ -85,21 +95,23 @@ func NewASTReadTools(opts ASTReadToolsOptions) *ASTReadTools { } } - // add a file watch on the RepoASTsDir - abutil.WatchDir(opts.RepoASTsDir, func(op fsnotify.Op, file string) { - if !strings.HasSuffix(file, ".json") { - return - } - if op&fsnotify.Write != 0 || op&fsnotify.Create != 0 { - if repo, err := uniast.LoadRepo(file); err != nil { - log.Error("Load Uniast JSON file failed: %v", err) - } else { - ret.repos.Store(repo.Name, repo) + // add a file watch on the RepoASTsDir (unless disabled) + if !opts.DisableWatch { + abutil.WatchDir(opts.RepoASTsDir, func(op fsnotify.Op, file string) { + if !strings.HasSuffix(file, ".json") { + return } - } else if op&fsnotify.Remove != 0 { - ret.repos.Delete(filepath.Base(file)) - } - }) + if op&fsnotify.Write != 0 || op&fsnotify.Create != 0 { + if repo, err := uniast.LoadRepo(file); err != nil { + log.Error("Load Uniast JSON file failed: %v", err) + } else { + ret.repos.Store(repo.Name, repo) + } + } else if op&fsnotify.Remove != 0 { + ret.repos.Delete(filepath.Base(file)) + } + }) + } tt, err := utils.InferTool(string(ToolListRepos), DescListRepos, @@ -150,6 +162,26 @@ func NewASTReadTools(opts ASTReadToolsOptions) *ASTReadTools { panic(err) } ret.tools[ToolGetASTNode] = tt + + tt, err = utils.InferTool(ToolGetFileSymbol, + string(DescGetFileSymbol), + ret.GetFileSymbol, utils.WithMarshalOutput(func(ctx context.Context, output interface{}) (string, error) { + return abutil.MarshalJSONIndent(output) + })) + if err != nil { + panic(err) + } + ret.tools[ToolGetFileSymbol] = tt + + tt, err = utils.InferTool(ToolTreeRepo, + DescTreeRepo, + ret.TreeRepo, utils.WithMarshalOutput(func(ctx context.Context, output interface{}) (string, error) { + return abutil.MarshalJSONIndent(output) + })) + if err != nil { + panic(err) + } + ret.tools[ToolTreeRepo] = tt return ret } @@ -201,8 +233,10 @@ type PackageStruct struct { type FileStruct struct { FilePath string `json:"file_path" jsonschema:"description=the path of the file"` - Imports []uniast.Import `json:"imports,omitempty" jsonschema:"description=the imports of the file"` - Nodes []NodeStruct `json:"nodes,omitempty" jsonschema:"description=the node structs of the file"` + ModPath uniast.ModPath `json:"mod_path,omitempty" jsonschema:"description=the module path"` + PkgPath uniast.PkgPath `json:"pkg_path,omitempty" jsonschema:"description=the package path"` + Imports []uniast.Import `json:"imports,omitempty" jsonschema:"description=the imports of the file"` + Nodes []NodeStruct `json:"nodes,omitempty" jsonschema:"description=the node structs of the file"` } type NodeStruct struct { @@ -243,6 +277,78 @@ func (n NodeID) Identity() uniast.Identity { } } +// FileNodeID 文件节点标识(用于 get_file_symbol 输出) +type FileNodeID struct { + FilePath string `json:"file_path" jsonschema:"description=file path relative to repo root"` + Name string `json:"name" jsonschema:"description=symbol name in the file"` +} + +// fileNodeIDGroupByPath 控制是否按 file_path 分组输出(默认 true) +var fileNodeIDGroupByPath = true + +// SetFileNodeIDGroupByPath 设置是否按 file_path 分组输出 +func SetFileNodeIDGroupByPath(group bool) { + fileNodeIDGroupByPath = group +} + +// fileNodeGroup 用于聚合相同 file_path 的 name(私有) +type fileNodeGroup struct { + FilePath string `json:"file_path"` + Names []string `json:"names"` +} + +// groupFileNodeIDs 将 []FileNodeID 转换为 []fileNodeGroup +func groupFileNodeIDs(nodeIDs []FileNodeID) []fileNodeGroup { + groupMap := make(map[string]*fileNodeGroup) + + for _, nid := range nodeIDs { + key := nid.FilePath + + if group, exists := groupMap[key]; exists { + group.Names = append(group.Names, nid.Name) + } else { + groupMap[key] = &fileNodeGroup{ + FilePath: nid.FilePath, + Names: []string{nid.Name}, + } + } + } + + result := make([]fileNodeGroup, 0, len(groupMap)) + for _, group := range groupMap { + result = append(result, *group) + } + + return result +} + +type _FileNodeID FileNodeID + +func (f FileNodeID) MarshalJSON() ([]byte, error) { + if fileNodeIDGroupByPath { + return json.Marshal(fileNodeGroup{ + FilePath: f.FilePath, + Names: []string{f.Name}, + }) + } + return json.Marshal(_FileNodeID(f)) +} + +// convertNodeIDs 将 uniast.Relation 转换为 FileNodeID +func convertNodeIDs(repo *uniast.Repository, relations []uniast.Relation) []FileNodeID { + result := make([]FileNodeID, 0, len(relations)) + for _, rel := range relations { + if n := repo.GetNode(rel.Identity); n != nil { + fl := n.FileLine() + result = append(result, FileNodeID{ + FilePath: fl.File, + Name: rel.Identity.Name, + }) + } + } + return result +} + func (t *ASTReadTools) getRepoAST(repoName string) (*uniast.Repository, error) { repo, ok := t.repos.Load(repoName) if !ok { @@ -420,20 +526,23 @@ func (t *ASTReadTools) getFileStructure(_ context.Context, req GetFileStructReq, nodes := repo.GetFileNodes(req.FilePath) ff := FileStruct{ FilePath: req.FilePath, + ModPath: mod.Name, + PkgPath: file.Package, } if needNodeDetail { ff.Imports = file.Imports } + // If nodes count > 500, only show name + line + simplifiedOutput := len(nodes) > 500 for _, n := range nodes { nn := NodeStruct{ - ModPath: mod.Name, - PkgPath: file.Package, - Name: n.Identity.Name, + Name: n.Identity.Name, } if needNodeDetail { - nn.Type = n.Type.String() - nn.Signature = n.Signature() nn.Line = n.FileLine().Line + if !simplifiedOutput && n.Type != uniast.VAR { + nn.Signature = n.Signature() + } } ff.Nodes = append(ff.Nodes, nn, @@ -453,6 +562,57 @@ type GetASTNodeResp struct { Error string `json:"error,omitempty" jsonschema:"description=the error message"` } +type GetFileSymbolReq struct { + RepoName string `json:"repo_name" jsonschema:"description=the name of the repository (output of list_repos tool)"` + FilePath string `json:"file_path" jsonschema:"description=the file path (output of get_repo_structure tool)"` + Name string `json:"name" jsonschema:"description=the name of the symbol (function, type, or variable) to query"` +} + +type GetFileSymbolResp struct { + Node FileNodeStruct `json:"node" jsonschema:"description=the ast node"` + Error string `json:"error,omitempty" jsonschema:"description=the error message"` +} + +// FileNodeStruct 文件节点结构(使用 FileNodeID) +type FileNodeStruct struct { + Name string `json:"name" jsonschema:"description=the name of the node"` + Type string `json:"type,omitempty" jsonschema:"description=the type of the node"` + Signature string `json:"signature,omitempty" jsonschema:"description=the func signature of the node"` + File string `json:"file,omitempty" jsonschema:"description=the file path of the node"` + Line int `json:"line,omitempty" jsonschema:"description=the line of the node"` + Codes string `json:"codes,omitempty" jsonschema:"description=the codes of the node"` + Dependencies []FileNodeID `json:"dependencies,omitempty" jsonschema:"description=the dependencies of the node"` + References []FileNodeID `json:"references,omitempty" jsonschema:"description=the references of the node"` + Implements []FileNodeID `json:"implements,omitempty" jsonschema:"description=the implements of the node"` + Groups []FileNodeID `json:"groups,omitempty" jsonschema:"description=the groups of the node"` + Inherits []FileNodeID `json:"inherits,omitempty" jsonschema:"description=the inherits of the node"` +} + +// MarshalJSON 自定义JSON序列化,实现所有关系字段的分组输出 +type _FileNodeStruct FileNodeStruct + +func (n FileNodeStruct) MarshalJSON() ([]byte, error) { + if fileNodeIDGroupByPath { + aux := &struct { + Dependencies []fileNodeGroup `json:"dependencies,omitempty"` + References []fileNodeGroup `json:"references,omitempty"` + Implements []fileNodeGroup `json:"implements,omitempty"` + Groups []fileNodeGroup `json:"groups,omitempty"` + Inherits []fileNodeGroup `json:"inherits,omitempty"` + *_FileNodeStruct + }{ + Dependencies: groupFileNodeIDs(n.Dependencies), + References: groupFileNodeIDs(n.References), + Implements: groupFileNodeIDs(n.Implements), + Groups: groupFileNodeIDs(n.Groups), + Inherits: groupFileNodeIDs(n.Inherits), + _FileNodeStruct: (*_FileNodeStruct)(&n), + } + return json.Marshal(aux) + } + return json.Marshal(_FileNodeStruct(n)) +} + func (t *ASTReadTools) GetASTNode(_ context.Context, params GetASTNodeReq) (*GetASTNodeResp, error) { log.Debug("get ast node, req: %v", abutil.MarshalJSONIndentNoError(params)) @@ -514,3 +674,122 @@ func (t *ASTReadTools) GetASTNode(_ context.Context, params GetASTNodeReq) (*Get log.Debug("get repo structure, resp: %v", abutil.MarshalJSONIndentNoError(resp)) return resp, nil } + +// GetFileSymbol get detailed AST node info by file path and symbol name +func (t *ASTReadTools) GetFileSymbol(_ context.Context, req GetFileSymbolReq) (*GetFileSymbolResp, error) { + log.Debug("get file symbol, req: %v", abutil.MarshalJSONIndentNoError(req)) + + // 加载仓库 + repo, err := t.getRepoAST(req.RepoName) + if err != nil { + return &GetFileSymbolResp{ + Error: err.Error(), + }, nil + } + + // 查找文件 + file, _ := repo.GetFile(req.FilePath) + if file == nil { + return &GetFileSymbolResp{ + Error: fmt.Errorf("file '%s' not found. Use 'get_repo_structure' to get valid file paths", req.FilePath).Error(), + }, nil + } + + // 在文件中查找符号 + nodes := repo.GetFileNodes(req.FilePath) + var targetNode *uniast.Node + var found bool + + for _, node := range nodes { + if node.Identity.Name == req.Name { + targetNode = node + found = true + break + } + } + + if !found { + return &GetFileSymbolResp{ + Error: fmt.Sprintf("symbol '%s' not found in file '%s'. Use 'get_file_structure' to list all symbols in the file", req.Name, req.FilePath), + }, nil + } + + // 构建 FileNodeStruct + fl := targetNode.FileLine() + nodeStruct := FileNodeStruct{ + Name: targetNode.Identity.Name, + Type: targetNode.Type.String(), + File: fl.File, + Line: fl.Line, + Codes: targetNode.Content(), + Signature: targetNode.Signature(), + // 使用抽象函数转换所有关系字段 + Dependencies: convertNodeIDs(repo, targetNode.Dependencies), + References: convertNodeIDs(repo, targetNode.References), + Implements: convertNodeIDs(repo, targetNode.Implements), + Inherits: convertNodeIDs(repo, targetNode.Inherits), + Groups: convertNodeIDs(repo, targetNode.Groups), + } + + log.Debug("get file symbol, resp: %v", abutil.MarshalJSONIndentNoError(&GetFileSymbolResp{Node: nodeStruct})) + return &GetFileSymbolResp{ + Node: nodeStruct, + }, nil +} + +type TreeRepoReq struct { + RepoName string `json:"repo_name" jsonschema:"description=the name of the repository (output of list_repos tool)"` +} + +type TreeRepoResp struct { + Files map[string][]string `json:"files" jsonschema:"description=map of directory path to file list (directories end with '/')"` + Error string `json:"error,omitempty" jsonschema:"description=the error message"` +} + +// TreeRepo returns a map of package paths to file lists, with directories ending in '/' +func (t *ASTReadTools) TreeRepo(_ context.Context, req TreeRepoReq) (*TreeRepoResp, error) { + log.Debug("tree repo, req: %v", abutil.MarshalJSONIndentNoError(req)) + repo, err := t.getRepoAST(req.RepoName) + if err != nil { + return &TreeRepoResp{ + Error: err.Error(), + }, nil + } + + // 收集所有文件,按目录聚合 + files := make(map[string][]string) + for _, mod := range repo.Modules { + if mod.IsExternal() { + continue + } + for _, file := range mod.Files { + if file.Package == "" { + continue + } + // 过滤掉非当前仓库的文件(以 .. 开头或包含 ..) + if strings.HasPrefix(file.Path, "..") { + continue + } + // 获取文件的目录路径 + dir := filepath.Dir(file.Path) + if dir == "." { + dir = "./" + } + // 添加 '/' 后缀 + if dir != "" && dir != "./" && !strings.HasSuffix(dir, "/") { + dir = dir + "/" + } + // 获取文件名 + name := filepath.Base(file.Path) + files[dir] = append(files[dir], name) + } + } + + // 对每个目录下的文件列表进行排序 + for dir := range files { + sort.Strings(files[dir]) + } + + log.Debug("tree repo, resp: %v", abutil.MarshalJSONIndentNoError(&TreeRepoResp{Files: files})) + return &TreeRepoResp{Files: files}, nil +} diff --git a/main.go b/main.go index 13acf1be..0b578de9 100644 --- a/main.go +++ b/main.go @@ -39,6 +39,7 @@ import ( "strings" internalCmd "github.com/cloudwego/abcoder/internal/cmd" + "github.com/cloudwego/abcoder/internal/cmd/cli" "github.com/cloudwego/abcoder/lang" "github.com/cloudwego/abcoder/lang/log" "github.com/cloudwego/abcoder/lang/uniast" @@ -71,12 +72,14 @@ writing, and analyzing code structures.`, // Global flags cmd.PersistentFlags().BoolP("verbose", "v", false, "Verbose mode.") + cmd.PersistentFlags().String("asts-dir", "", "Base directory for AST files (default: ~/.asts).") // Add subcommands cmd.AddCommand(newVersionCmd()) cmd.AddCommand(newParseCmd()) cmd.AddCommand(newWriteCmd()) cmd.AddCommand(newMcpCmd()) + cmd.AddCommand(cli.NewCliCmd()) cmd.AddCommand(newInitSpecCmd()) cmd.AddCommand(newAgentCmd()) @@ -98,10 +101,11 @@ Use this command to verify installation or when reporting issues.`, func newParseCmd() *cobra.Command { var ( - flagOutput string - flagLsp string - javaHome string - opts lang.ParseOptions + flagOutput string + flagStdout bool + flagLsp string + javaHome string + opts lang.ParseOptions ) cmd := &cobra.Command{ @@ -109,7 +113,7 @@ func newParseCmd() *cobra.Command { Short: "Parse repository and export to UniAST JSON format", Long: `Parse the specified repository and generate its Universal AST representation. -By default, outputs to stdout. Use --output to write to a file. +By default, saves to ~/.asts/.json. Use --stdout to output to stdout, use --output to write to a file. Language Support: go - Go projects @@ -140,11 +144,34 @@ Language Support: language := uniast.NewLanguage(args[0]) uri := args[1] + // Convert to absolute path for consistent naming + absPath, err := filepath.Abs(uri) + if err != nil { + log.Error("Failed to get absolute path: %v\n", err) + return err + } + + // Determine output path + outputPath := flagOutput + if outputPath == "" && !flagStdout { + // Default: save to ~/.asts/ + astsDir := filepath.Join(os.Getenv("HOME"), ".asts") + if err := os.MkdirAll(astsDir, 0755); err != nil { + log.Error("Failed to create asts directory: %v\n", err) + return err + } + filename := sanitizeFilename(absPath) + outputPath = filepath.Join(astsDir, filename) + } + if language == uniast.TypeScript { - if err := parseTSProject(context.Background(), uri, opts, flagOutput); err != nil { + if err := parseTSProject(context.Background(), uri, opts, outputPath); err != nil { log.Error("Failed to parse: %v\n", err) return err } + if outputPath != "" && !flagStdout { + fmt.Fprintf(os.Stdout, "AST saved to %s\n", outputPath) + } return nil } @@ -165,12 +192,23 @@ Language Support: return err } - if flagOutput != "" { - if err := utils.MustWriteFile(flagOutput, out); err != nil { + if flagStdout { + // Explicitly output to stdout + fmt.Fprintf(os.Stdout, "%s\n", out) + } else if outputPath != "" { + // Write to .tmp first, then rename to .json + tmpPath := outputPath + ".tmp" + if err := utils.MustWriteFile(tmpPath, out); err != nil { log.Error("Failed to write output: %v\n", err) return err } + if err := os.Rename(tmpPath, outputPath); err != nil { + log.Error("Failed to rename temp file: %v\n", err) + return err + } + fmt.Fprintf(os.Stdout, "AST saved to %s\n", outputPath) } else { + // Fallback to stdout (should not happen with new logic) fmt.Fprintf(os.Stdout, "%s\n", out) } @@ -179,7 +217,8 @@ Language Support: } // Flags - cmd.Flags().StringVarP(&flagOutput, "output", "o", "", "Output path for UniAST JSON (default: stdout).") + cmd.Flags().StringVarP(&flagOutput, "output", "o", "", "Output path for UniAST JSON (default: ~/.asts/.json).") + cmd.Flags().BoolVar(&flagStdout, "stdout", false, "Output to stdout instead of saving to file.") cmd.Flags().StringVar(&flagLsp, "lsp", "", "Path to Language Server Protocol executable. Required for languages with LSP support (e.g., Java).") cmd.Flags().StringVar(&javaHome, "java-home", "", "Java installation directory (JAVA_HOME). Required when using LSP for Java.") cmd.Flags().BoolVar(&opts.LoadExternalSymbol, "load-external-symbol", false, "Load external symbol references into AST results (slower but more complete).") @@ -195,6 +234,13 @@ Language Support: return cmd } +// sanitizeFilename converts absolute path to safe filename +// e.g., /Users/bytedance/astRepo/golang/abcoder -> -Users-bytedance-astRepo-golang-abcoder.json +func sanitizeFilename(path string) string { + name := strings.ReplaceAll(path, "/", "-") + return name + ".json" +} + func newWriteCmd() *cobra.Command { var ( flagOutput string diff --git a/python-parser/.gitignore b/python-parser/.gitignore new file mode 100644 index 00000000..20f4dfe3 --- /dev/null +++ b/python-parser/.gitignore @@ -0,0 +1,40 @@ +# Pyright 内部生成文件 & 缓存 +packages/python-parser/dist/ +packages/python-parser/typeshed-fallback/ +typeshed-fallback/ +dist/ +*.pyi + +# 通用 Python 缓存 +__pycache__/ +*.py[cod] + +# TypeScript 编译输出 +*.js +*.js.map +*.d.ts +*.tsbuildinfo + +# 构建输出目录 +dist/ +out/ +build/ + +# 内部生成的声明与映射 +packages/**/*.js +packages/**/*.js.map +packages/**/*.d.ts + +# 日志、IDE、缓存 +node_modules/ +.DS_Store +.idea/ +.vscode/ +*.log + +*.md +.spec/ +.repotalk/ +.claude/ +.abcoder/ +.mcp.json diff --git a/python-parser/e2e/mock-python/__init__.py b/python-parser/e2e/mock-python/__init__.py new file mode 100644 index 00000000..53131dd6 --- /dev/null +++ b/python-parser/e2e/mock-python/__init__.py @@ -0,0 +1,13 @@ +from pkg_a import get_symbol_a, SymbolA +from pkg_b import get_symbol_b, SymbolB + + +def main() -> None: + a: SymbolA = get_symbol_a() + b: SymbolB = get_symbol_b(a) + print(a.get_value()) + print(b.get_value()) + + +if __name__ == "__main__": + main() diff --git a/python-parser/e2e/mock-python/pkg_a/__init__.py b/python-parser/e2e/mock-python/pkg_a/__init__.py new file mode 100644 index 00000000..16d3af73 --- /dev/null +++ b/python-parser/e2e/mock-python/pkg_a/__init__.py @@ -0,0 +1,5 @@ +from .module_a import symbol_a, SymbolA + + +def get_symbol_a() -> SymbolA: + return symbol_a() diff --git a/python-parser/e2e/mock-python/pkg_a/module_a.py b/python-parser/e2e/mock-python/pkg_a/module_a.py new file mode 100644 index 00000000..107903c1 --- /dev/null +++ b/python-parser/e2e/mock-python/pkg_a/module_a.py @@ -0,0 +1,14 @@ +from dataclasses import dataclass + + +@dataclass(frozen=True) +class SymbolA: + """强类型的 Symbol A""" + value: str + + def get_value(self) -> str: + return self.value + + +def symbol_a() -> SymbolA: + return SymbolA(value="Symbol A from pkg_a") diff --git a/python-parser/e2e/mock-python/pkg_b/module_b.py b/python-parser/e2e/mock-python/pkg_b/module_b.py new file mode 100644 index 00000000..32432017 --- /dev/null +++ b/python-parser/e2e/mock-python/pkg_b/module_b.py @@ -0,0 +1,16 @@ +from dataclasses import dataclass +from pkg_a.module_a import SymbolA + + +@dataclass(frozen=True) +class SymbolB: + """强类型的 Symbol B""" + value: str + + def get_value(self) -> str: + return self.value + + +def symbol_b(a: SymbolA) -> SymbolB: + result: str = a.get_value() + return SymbolB(value=f"Symbol B uses: {result}") diff --git a/python-parser/e2e/mock-python/pyproject.toml b/python-parser/e2e/mock-python/pyproject.toml new file mode 100644 index 00000000..4b34cce6 --- /dev/null +++ b/python-parser/e2e/mock-python/pyproject.toml @@ -0,0 +1,16 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "mock-python" +version = "0.1.0" +description = "Mock Python Project" +readme = "README.md" +requires-python = ">=3.10" + +[tool.hatch.build.targets.wheel] +packages = ["src/pkg_a", "src/pkg_b", "src"] + +[tool.hatch.structure] +root = "src" diff --git a/python-parser/e2e/mock-python/test.py b/python-parser/e2e/mock-python/test.py new file mode 100644 index 00000000..b415a399 --- /dev/null +++ b/python-parser/e2e/mock-python/test.py @@ -0,0 +1,227 @@ +""" +Mock Python 测试文件 - 完整版 +用于验证 parser 的符号提取准确性 +""" + +from typing import TypeAlias, List, Optional +import os +from pathlib import Path + +# ============================================ +# 1. 顶层变量 (Top-level VAR) +# ============================================ + +TOP_STRING = "hello" +TOP_NUMBER = 42 +TOP_LIST = [1, 2, 3] + +# 带类型注解的变量 +TYPED_VAR: int = 100 + +# ============================================ +# 2. 顶层类型别名 (Top-level TYPE) +# ============================================ + +TopType1: TypeAlias = str +TopType2: TypeAlias = int +TopListType: TypeAlias = List[int] +TopOptionalType: TypeAlias = Optional[str] + +# ============================================ +# 3. 顶层函数 (Top-level FUNC) +# ============================================ + +def top_func_no_params(): + """顶层函数:无参数""" + pass + +def top_func_with_params(a: int, b: str) -> bool: + """顶层函数:有参数和返回值""" + return True + +def top_func_calling(): + """顶层函数:调用其他函数和类""" + result = helper_func() + obj = SymbolA() + return result + +def helper_func(): + """辅助函数""" + return TOP_NUMBER + +def func_with_type_annotations(x: int, y: str = "default") -> List[int]: + """带完整类型注解的函数""" + return [x] + +# ============================================ +# 4. 顶层类 (Top-level CLASS) +# ============================================ + +class SymbolA: + """顶层类""" + value: str = "default" + + def get_value(self) -> str: + return self.value + + +# 继承示例 +class ChildClass(SymbolA): + """子类:继承 SymbolA""" + + child_attr: int = 10 + + def get_value(self) -> str: + """方法覆盖""" + return f"Child: {self.value}" + + +# 多继承示例 +class Mixin: + def mixin_method(self): + return "mixin" + + +class MultiInherit(SymbolA, Mixin): + """多继承类""" + pass + + +# ============================================ +# 5. 导入语句(测试导入解析) +# ============================================ + +# 标准导入 +import sys +import os as operating_system + +# from 导入 +from typing import Dict, Tuple + +# 带别名的导入 +from pathlib import Path as FilePath + + +# ============================================ +# 6. 装饰器(测试装饰器解析) +# ============================================ + +class ClassWithDecorators: + """带装饰器的类""" + + @property + def prop(self) -> str: + return "property" + + @staticmethod + def static_method(): + return "static" + + @classmethod + def class_method(cls): + return "class" + + +# ============================================ +# 7. 局部符号(在函数内) +# ============================================ + +def func_with_locals(): + """ + 包含局部符号的函数 + 这些不应该出现在 file_structure 中 + """ + + # 局部变量 + local_var_1 = 1 + local_var_2: str = "local" + + # 局部类型别名 + LocalType: TypeAlias = int + + # 局部函数 + def local_func(): + return local_var_1 + + # 局部类 + class LocalClass: + pass + + return local_func() + + +def func_with_nested(): + """包含嵌套局部符号的函数""" + + def nested_func(): + nested_var = 100 + return nested_var + + return nested_func() + + +# ============================================ +# 8. 控制流(测试复杂表达式) +# ============================================ + +def func_with_control_flow(x: int) -> int: + """包含控制流的函数""" + if x > 0: + return x + else: + return -x + + +def func_with_loop(items: List[int]) -> int: + """包含循环的函数""" + total = 0 + for item in items: + total += item + return total + + +def func_with_exception() -> str: + """包含异常处理的函数""" + try: + return "success" + except Exception as e: + return str(e) + + +# ============================================ +# 9. 类成员访问 +# ============================================ + +def func_using_class_members(): + """使用类成员""" + obj = SymbolA() + value = obj.get_value() # 方法调用 + attr = obj.value # 属性访问 + return value + + +# ============================================ +# 10. 异步函数 +# ============================================ + +async def async_func(): + """异步函数""" + await helper_func() + return "async" + + +# ============================================ +# 导出 +# ============================================ + +__all__ = [ + 'TOP_STRING', + 'TOP_NUMBER', + 'TopType1', + 'TopType2', + 'top_func_no_params', + 'top_func_calling', + 'helper_func', + 'SymbolA', + 'ChildClass', +] diff --git a/python-parser/package-lock.json b/python-parser/package-lock.json new file mode 100644 index 00000000..b14c9e86 --- /dev/null +++ b/python-parser/package-lock.json @@ -0,0 +1,13534 @@ +{ + "name": "python-parser", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "python-parser", + "version": "1.0.0", + "workspaces": [ + "packages/*" + ], + "devDependencies": { + "lerna": "^8.2.4" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmmirror.com/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmmirror.com/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmmirror.com/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmmirror.com/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmmirror.com/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmmirror.com/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmmirror.com/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmmirror.com/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmmirror.com/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmmirror.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmmirror.com/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "resolved": "https://registry.npmmirror.com/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.28.6.tgz", + "integrity": "sha512-jiLC0ma9XkQT3TKJ9uYvlakm66Pamywo+qwL+oL8HJOvc6TWdZXVfhqJr8CCzbSGUAbDOzlGHJC1U+vRfLQDvw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", + "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz", + "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmmirror.com/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmmirror.com/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmmirror.com/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmmirror.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@discoveryjs/json-ext": { + "version": "0.5.7", + "resolved": "https://registry.npmmirror.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz", + "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@emnapi/core": { + "version": "1.8.1", + "resolved": "https://registry.npmmirror.com/@emnapi/core/-/core-1.8.1.tgz", + "integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.8.1", + "resolved": "https://registry.npmmirror.com/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@hutson/parse-repository-url": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/@hutson/parse-repository-url/-/parse-repository-url-3.0.2.tgz", + "integrity": "sha512-H9XAx3hc0BQHY6l+IFSWHDySypcXsvsuLhgYLUGywmJ5pswRVQJUHpOsobnLYp2ZUaUlKiKDrgWWhosOwAEM8Q==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@inquirer/external-editor": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/@inquirer/external-editor/-/external-editor-1.0.3.tgz", + "integrity": "sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chardet": "^2.1.1", + "iconv-lite": "^0.7.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmmirror.com/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmmirror.com/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.2.0", + "resolved": "https://registry.npmmirror.com/strip-ansi/-/strip-ansi-7.2.0.tgz", + "integrity": "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.2.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/string-locale-compare": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz", + "integrity": "sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmmirror.com/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.2", + "resolved": "https://registry.npmmirror.com/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmmirror.com/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/console/-/console-30.2.0.tgz", + "integrity": "sha512-+O1ifRjkvYIkBqASKWgLxrpEhQAAE7hY77ALLUufSk5717KfOShg6IbqLmdsLMPdUiFvA2kTs0R7YZy+l0IzZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/console/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/core/-/core-30.2.0.tgz", + "integrity": "sha512-03W6IhuhjqTlpzh/ojut/pDB2LPRygyWX8ExpgHtQA8H/3K7+1vKmcINx5UzeOX1se6YEsBsOHQ1CRzf3fOwTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/pattern": "30.0.1", + "@jest/reporters": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-changed-files": "30.2.0", + "jest-config": "30.2.0", + "jest-haste-map": "30.2.0", + "jest-message-util": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-resolve-dependencies": "30.2.0", + "jest-runner": "30.2.0", + "jest-runtime": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "jest-watcher": "30.2.0", + "micromatch": "^4.0.8", + "pretty-format": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/core/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/core/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/core/node_modules/ci-info": { + "version": "4.4.0", + "resolved": "https://registry.npmmirror.com/ci-info/-/ci-info-4.4.0.tgz", + "integrity": "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/diff-sequences": { + "version": "30.0.1", + "resolved": "https://registry.npmmirror.com/@jest/diff-sequences/-/diff-sequences-30.0.1.tgz", + "integrity": "sha512-n5H8QLDJ47QqbCNn5SuFjCRDrOLEZ0h8vAHCK5RL9Ls7Xa8AQLa/YxAc9UjFqoEDM48muwtBGjtMY5cr0PLDCw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/environment": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/environment/-/environment-30.2.0.tgz", + "integrity": "sha512-/QPTL7OBJQ5ac09UDRa3EQes4gt1FTEG/8jZ/4v5IVzx+Cv7dLxlVIvfvSVRiiX2drWyXeBjkMSR8hvOWSog5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-mock": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/expect/-/expect-30.2.0.tgz", + "integrity": "sha512-V9yxQK5erfzx99Sf+7LbhBwNWEZ9eZay8qQ9+JSC0TrMR1pMDHLMY+BnVPacWU6Jamrh252/IKo4F1Xn/zfiqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "30.2.0", + "jest-snapshot": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/expect-utils/-/expect-utils-30.2.0.tgz", + "integrity": "sha512-1JnRfhqpD8HGpOmQp180Fo9Zt69zNtC+9lR+kT7NVL05tNXIi+QC8Csz7lfidMoVLPD3FnOtcmp0CEFnxExGEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/fake-timers/-/fake-timers-30.2.0.tgz", + "integrity": "sha512-HI3tRLjRxAbBy0VO8dqqm7Hb2mIa8d5bg/NJkyQcOk7V118ObQML8RC5luTF/Zsg4474a+gDvhce7eTnP4GhYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@sinonjs/fake-timers": "^13.0.0", + "@types/node": "*", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/get-type": { + "version": "30.1.0", + "resolved": "https://registry.npmmirror.com/@jest/get-type/-/get-type-30.1.0.tgz", + "integrity": "sha512-eMbZE2hUnx1WV0pmURZY9XoXPkUYjpc55mb0CrhtdWLtzMQPFvu/rZkTLZFTsdaVQa+Tr4eWAteqcUzoawq/uA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/globals/-/globals-30.2.0.tgz", + "integrity": "sha512-b63wmnKPaK+6ZZfpYhz9K61oybvbI1aMcIs80++JI1O1rR1vaxHUCNqo3ITu6NU0d4V34yZFoHMn/uoKr/Rwfw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/expect": "30.2.0", + "@jest/types": "30.2.0", + "jest-mock": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/pattern": { + "version": "30.0.1", + "resolved": "https://registry.npmmirror.com/@jest/pattern/-/pattern-30.0.1.tgz", + "integrity": "sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-regex-util": "30.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/reporters/-/reporters-30.2.0.tgz", + "integrity": "sha512-DRyW6baWPqKMa9CzeiBjHwjd8XeAyco2Vt8XbcLFjiwCOEKOvy82GJ8QQnJE9ofsxCMPjH4MfH8fCWIHHDKpAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "@types/node": "*", + "chalk": "^4.1.2", + "collect-v8-coverage": "^1.0.2", + "exit-x": "^0.2.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^5.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "slash": "^3.0.0", + "string-length": "^4.0.2", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/reporters/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/reporters/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/snapshot-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/snapshot-utils/-/snapshot-utils-30.2.0.tgz", + "integrity": "sha512-0aVxM3RH6DaiLcjj/b0KrIBZhSX1373Xci4l3cW5xiUWPctZ59zQ7jj4rqcJQ/Z8JuN/4wX3FpJSa3RssVvCug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "natural-compare": "^1.4.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "30.0.1", + "resolved": "https://registry.npmmirror.com/@jest/source-map/-/source-map-30.0.1.tgz", + "integrity": "sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "callsites": "^3.1.0", + "graceful-fs": "^4.2.11" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/test-result/-/test-result-30.2.0.tgz", + "integrity": "sha512-RF+Z+0CCHkARz5HT9mcQCBulb1wgCP3FBvl9VFokMX27acKphwyQsNuWH3c+ojd1LeWBLoTYoxF0zm6S/66mjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/types": "30.2.0", + "@types/istanbul-lib-coverage": "^2.0.6", + "collect-v8-coverage": "^1.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/test-sequencer/-/test-sequencer-30.2.0.tgz", + "integrity": "sha512-wXKgU/lk8fKXMu/l5Hog1R61bL4q5GCdT6OJvdAFz1P+QrpoFuLU68eoKuVc4RbrTtNnTL5FByhWdLgOPSph+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "30.2.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/transform/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/types/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/types/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmmirror.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmmirror.com/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmmirror.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.11", + "resolved": "https://registry.npmmirror.com/@jridgewell/source-map/-/source-map-0.3.11.tgz", + "integrity": "sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmmirror.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmmirror.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@lerna/create": { + "version": "8.2.4", + "resolved": "https://registry.npmmirror.com/@lerna/create/-/create-8.2.4.tgz", + "integrity": "sha512-A8AlzetnS2WIuhijdAzKUyFpR5YbLLfV3luQ4lzBgIBgRfuoBDZeF+RSZPhra+7A6/zTUlrbhKZIOi/MNhqgvQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@npmcli/arborist": "7.5.4", + "@npmcli/package-json": "5.2.0", + "@npmcli/run-script": "8.1.0", + "@nx/devkit": ">=17.1.2 < 21", + "@octokit/plugin-enterprise-rest": "6.0.1", + "@octokit/rest": "20.1.2", + "aproba": "2.0.0", + "byte-size": "8.1.1", + "chalk": "4.1.0", + "clone-deep": "4.0.1", + "cmd-shim": "6.0.3", + "color-support": "1.1.3", + "columnify": "1.6.0", + "console-control-strings": "^1.1.0", + "conventional-changelog-core": "5.0.1", + "conventional-recommended-bump": "7.0.1", + "cosmiconfig": "9.0.0", + "dedent": "1.5.3", + "execa": "5.0.0", + "fs-extra": "^11.2.0", + "get-stream": "6.0.0", + "git-url-parse": "14.0.0", + "glob-parent": "6.0.2", + "graceful-fs": "4.2.11", + "has-unicode": "2.0.1", + "ini": "^1.3.8", + "init-package-json": "6.0.3", + "inquirer": "^8.2.4", + "is-ci": "3.0.1", + "is-stream": "2.0.0", + "js-yaml": "4.1.0", + "libnpmpublish": "9.0.9", + "load-json-file": "6.2.0", + "make-dir": "4.0.0", + "minimatch": "3.0.5", + "multimatch": "5.0.0", + "node-fetch": "2.6.7", + "npm-package-arg": "11.0.2", + "npm-packlist": "8.0.2", + "npm-registry-fetch": "^17.1.0", + "nx": ">=17.1.2 < 21", + "p-map": "4.0.0", + "p-map-series": "2.1.0", + "p-queue": "6.6.2", + "p-reduce": "^2.1.0", + "pacote": "^18.0.6", + "pify": "5.0.0", + "read-cmd-shim": "4.0.0", + "resolve-from": "5.0.0", + "rimraf": "^4.4.1", + "semver": "^7.3.4", + "set-blocking": "^2.0.0", + "signal-exit": "3.0.7", + "slash": "^3.0.0", + "ssri": "^10.0.6", + "string-width": "^4.2.3", + "tar": "6.2.1", + "temp-dir": "1.0.0", + "through": "2.3.8", + "tinyglobby": "0.2.12", + "upath": "2.0.1", + "uuid": "^10.0.0", + "validate-npm-package-license": "^3.0.4", + "validate-npm-package-name": "5.0.1", + "wide-align": "1.1.5", + "write-file-atomic": "5.0.1", + "write-pkg": "4.0.0", + "yargs": "17.7.2", + "yargs-parser": "21.1.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@lerna/create/node_modules/chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@lerna/create/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.4", + "resolved": "https://registry.npmmirror.com/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.4.tgz", + "integrity": "sha512-9zESzOO5aDByvhIAsOy9TbpZ0Ur2AJbUI7UT73kcUTS2mxAMHOBaa1st/jAymNoCtvrit99kkzT1FZuXVcgfIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@emnapi/core": "^1.1.0", + "@emnapi/runtime": "^1.1.0", + "@tybys/wasm-util": "^0.9.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmmirror.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmmirror.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmmirror.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@npmcli/agent": { + "version": "2.2.2", + "resolved": "https://registry.npmmirror.com/@npmcli/agent/-/agent-2.2.2.tgz", + "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==", + "dev": true, + "license": "ISC", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/arborist": { + "version": "7.5.4", + "resolved": "https://registry.npmmirror.com/@npmcli/arborist/-/arborist-7.5.4.tgz", + "integrity": "sha512-nWtIc6QwwoUORCRNzKx4ypHqCk3drI+5aeYdMTQQiRCcn4lOOgfQh7WyZobGYTxXPSq1VwV53lkpN/BRlRk08g==", + "dev": true, + "license": "ISC", + "dependencies": { + "@isaacs/string-locale-compare": "^1.1.0", + "@npmcli/fs": "^3.1.1", + "@npmcli/installed-package-contents": "^2.1.0", + "@npmcli/map-workspaces": "^3.0.2", + "@npmcli/metavuln-calculator": "^7.1.1", + "@npmcli/name-from-folder": "^2.0.0", + "@npmcli/node-gyp": "^3.0.0", + "@npmcli/package-json": "^5.1.0", + "@npmcli/query": "^3.1.0", + "@npmcli/redact": "^2.0.0", + "@npmcli/run-script": "^8.1.0", + "bin-links": "^4.0.4", + "cacache": "^18.0.3", + "common-ancestor-path": "^1.0.1", + "hosted-git-info": "^7.0.2", + "json-parse-even-better-errors": "^3.0.2", + "json-stringify-nice": "^1.1.4", + "lru-cache": "^10.2.2", + "minimatch": "^9.0.4", + "nopt": "^7.2.1", + "npm-install-checks": "^6.2.0", + "npm-package-arg": "^11.0.2", + "npm-pick-manifest": "^9.0.1", + "npm-registry-fetch": "^17.0.1", + "pacote": "^18.0.6", + "parse-conflict-json": "^3.0.0", + "proc-log": "^4.2.0", + "proggy": "^2.0.0", + "promise-all-reject-late": "^1.0.0", + "promise-call-limit": "^3.0.1", + "read-package-json-fast": "^3.0.2", + "semver": "^7.3.7", + "ssri": "^10.0.6", + "treeverse": "^3.0.0", + "walk-up-path": "^3.0.1" + }, + "bin": { + "arborist": "bin/index.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/arborist/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@npmcli/arborist/node_modules/json-parse-even-better-errors": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", + "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/arborist/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@npmcli/fs": { + "version": "3.1.1", + "resolved": "https://registry.npmmirror.com/@npmcli/fs/-/fs-3.1.1.tgz", + "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==", + "dev": true, + "license": "ISC", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git": { + "version": "5.0.8", + "resolved": "https://registry.npmmirror.com/@npmcli/git/-/git-5.0.8.tgz", + "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/promise-spawn": "^7.0.0", + "ini": "^4.1.3", + "lru-cache": "^10.0.1", + "npm-pick-manifest": "^9.0.0", + "proc-log": "^4.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git/node_modules/ini": { + "version": "4.1.3", + "resolved": "https://registry.npmmirror.com/ini/-/ini-4.1.3.tgz", + "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git/node_modules/isexe": { + "version": "3.1.5", + "resolved": "https://registry.npmmirror.com/isexe/-/isexe-3.1.5.tgz", + "integrity": "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/@npmcli/git/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/installed-package-contents": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz", + "integrity": "sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==", + "dev": true, + "license": "ISC", + "dependencies": { + "npm-bundled": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "bin": { + "installed-package-contents": "bin/index.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/map-workspaces": { + "version": "3.0.6", + "resolved": "https://registry.npmmirror.com/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz", + "integrity": "sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/name-from-folder": "^2.0.0", + "glob": "^10.2.2", + "minimatch": "^9.0.0", + "read-package-json-fast": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/map-workspaces/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@npmcli/map-workspaces/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@npmcli/metavuln-calculator": { + "version": "7.1.1", + "resolved": "https://registry.npmmirror.com/@npmcli/metavuln-calculator/-/metavuln-calculator-7.1.1.tgz", + "integrity": "sha512-Nkxf96V0lAx3HCpVda7Vw4P23RILgdi/5K1fmj2tZkWIYLpXAN8k2UVVOsW16TsS5F8Ws2I7Cm+PU1/rsVF47g==", + "dev": true, + "license": "ISC", + "dependencies": { + "cacache": "^18.0.0", + "json-parse-even-better-errors": "^3.0.0", + "pacote": "^18.0.0", + "proc-log": "^4.1.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/json-parse-even-better-errors": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", + "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/name-from-folder": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/@npmcli/name-from-folder/-/name-from-folder-2.0.0.tgz", + "integrity": "sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/node-gyp": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz", + "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/package-json": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/@npmcli/package-json/-/package-json-5.2.0.tgz", + "integrity": "sha512-qe/kiqqkW0AGtvBjL8TJKZk/eBBSpnJkUWvHdQ9jM2lKHXRYYJuyNpJPlJw3c8QjC2ow6NZYiLExhUaeJelbxQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^5.0.0", + "glob": "^10.2.2", + "hosted-git-info": "^7.0.0", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "proc-log": "^4.0.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/package-json/node_modules/json-parse-even-better-errors": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", + "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/promise-spawn": { + "version": "7.0.2", + "resolved": "https://registry.npmmirror.com/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz", + "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/promise-spawn/node_modules/isexe": { + "version": "3.1.5", + "resolved": "https://registry.npmmirror.com/isexe/-/isexe-3.1.5.tgz", + "integrity": "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/@npmcli/promise-spawn/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/query": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/@npmcli/query/-/query-3.1.0.tgz", + "integrity": "sha512-C/iR0tk7KSKGldibYIB9x8GtO/0Bd0I2mhOaDb8ucQL/bQVTmGoeREaFj64Z5+iCBRf3dQfed0CjJL7I8iTkiQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/redact": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/@npmcli/redact/-/redact-2.0.1.tgz", + "integrity": "sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script": { + "version": "8.1.0", + "resolved": "https://registry.npmmirror.com/@npmcli/run-script/-/run-script-8.1.0.tgz", + "integrity": "sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/node-gyp": "^3.0.0", + "@npmcli/package-json": "^5.0.0", + "@npmcli/promise-spawn": "^7.0.0", + "node-gyp": "^10.0.0", + "proc-log": "^4.0.0", + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script/node_modules/isexe": { + "version": "3.1.5", + "resolved": "https://registry.npmmirror.com/isexe/-/isexe-3.1.5.tgz", + "integrity": "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/@npmcli/run-script/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@nx/devkit": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/devkit/-/devkit-20.8.4.tgz", + "integrity": "sha512-3r+6QmIXXAWL6K7m8vAbW31aniAZmZAZXeMhOhWcJoOAU7ggpCQaM8JP8/kO5ov/Bmhyf0i/SSVXI6kwiR5WNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ejs": "^3.1.7", + "enquirer": "~2.3.6", + "ignore": "^5.0.4", + "minimatch": "9.0.3", + "semver": "^7.5.3", + "tmp": "~0.2.1", + "tslib": "^2.3.0", + "yargs-parser": "21.1.1" + }, + "peerDependencies": { + "nx": ">= 19 <= 21" + } + }, + "node_modules/@nx/devkit/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@nx/devkit/node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmmirror.com/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@nx/devkit/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@nx/nx-darwin-arm64": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-darwin-arm64/-/nx-darwin-arm64-20.8.4.tgz", + "integrity": "sha512-8Y7+4wj1qoZsuDRpnuiHzSIsMt3VqtJ0su8dgd/MyGccvvi4pndan2R5yTiVw/wmbMxtBmZ6PO6Z8dgSIrMVog==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-darwin-x64": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-darwin-x64/-/nx-darwin-x64-20.8.4.tgz", + "integrity": "sha512-2lfuxRc56QWnAysMhcD03tpCPiRzV1+foUq0MhV2sSBIybXmgV4wHLkPZNhlBCl4FNXrWiZiN1OJ2X9AGiOdug==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-freebsd-x64": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-freebsd-x64/-/nx-freebsd-x64-20.8.4.tgz", + "integrity": "sha512-99vnUXZy+OUBHU+8Yhabre2qafepKg9GKkQkhmXvJGqOmuIsepK7wirUFo2PiVM8YhS6UV2rv6hKAZcQ7skYyg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-linux-arm-gnueabihf": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-linux-arm-gnueabihf/-/nx-linux-arm-gnueabihf-20.8.4.tgz", + "integrity": "sha512-dht73zpnpzEUEzMHFQs4mfiwZH3WcJgQNWkD5p7WkeJewHq2Yyd0eG5Jg3kB7wnFtwPUV1eNJRM5rephgylkLA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-linux-arm64-gnu": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-linux-arm64-gnu/-/nx-linux-arm64-gnu-20.8.4.tgz", + "integrity": "sha512-syXxbJZ0yPaqzVmB28QJgUtaarSiW/PQmv/5Z2Ps8rCi7kYylISPVNjP1NNiIOcGDRWbHqoBfM0bEGPfSp0rBQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-linux-arm64-musl": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-linux-arm64-musl/-/nx-linux-arm64-musl-20.8.4.tgz", + "integrity": "sha512-AlZZFolS/S0FahRKG7rJ0Z9CgmIkyzHgGaoy3qNEMDEjFhR3jt2ZZSLp90W7zjgrxojOo90ajNMrg2UmtcQRDA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-linux-x64-gnu": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-linux-x64-gnu/-/nx-linux-x64-gnu-20.8.4.tgz", + "integrity": "sha512-MSu+xVNdR95tuuO+eL/a/ZeMlhfrZ627On5xaCZXnJ+lFxNg/S4nlKZQk0Eq5hYALCd/GKgFGasRdlRdOtvGPg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-linux-x64-musl": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-linux-x64-musl/-/nx-linux-x64-musl-20.8.4.tgz", + "integrity": "sha512-KxpQpyLCgIIHWZ4iRSUN9ohCwn1ZSDASbuFCdG3mohryzCy8WrPkuPcb+68J3wuQhmA5w//Xpp/dL0hHoit9zQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-win32-arm64-msvc": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-win32-arm64-msvc/-/nx-win32-arm64-msvc-20.8.4.tgz", + "integrity": "sha512-ffLBrxM9ibk+eWSY995kiFFRTSRb9HkD5T1s/uZyxV6jfxYPaZDBAWAETDneyBXps7WtaOMu+kVZlXQ3X+TfIA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nx/nx-win32-x64-msvc": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/@nx/nx-win32-x64-msvc/-/nx-win32-x64-msvc-20.8.4.tgz", + "integrity": "sha512-JxuuZc4h8EBqoYAiRHwskimpTJx70yn4lhIRFBoW5ICkxXW1Rw0yip/1UVsWRHXg/x9BxmH7VVazdfaQWmGu6A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/core": { + "version": "5.2.2", + "resolved": "https://registry.npmmirror.com/@octokit/core/-/core-5.2.2.tgz", + "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint": { + "version": "9.0.6", + "resolved": "https://registry.npmmirror.com/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql": { + "version": "7.1.1", + "resolved": "https://registry.npmmirror.com/@octokit/graphql/-/graphql-7.1.1.tgz", + "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/request": "^8.4.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmmirror.com/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@octokit/plugin-enterprise-rest": { + "version": "6.0.1", + "resolved": "https://registry.npmmirror.com/@octokit/plugin-enterprise-rest/-/plugin-enterprise-rest-6.0.1.tgz", + "integrity": "sha512-93uGjlhUD+iNg1iWhUENAtJata6w5nE+V4urXOAlIXdco6xNZtUSfYY8dzp3Udy74aqO/B5UZL80x/YMa5PKRw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "11.4.4-cjs.2", + "resolved": "https://registry.npmmirror.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz", + "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.7.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-request-log": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz", + "integrity": "sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "13.3.2-cjs.1", + "resolved": "https://registry.npmmirror.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz", + "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.8.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "^5" + } + }, + "node_modules/@octokit/request": { + "version": "8.4.1", + "resolved": "https://registry.npmmirror.com/@octokit/request/-/request-8.4.1.tgz", + "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^9.0.6", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmmirror.com/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/rest": { + "version": "20.1.2", + "resolved": "https://registry.npmmirror.com/@octokit/rest/-/rest-20.1.2.tgz", + "integrity": "sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/core": "^5.0.2", + "@octokit/plugin-paginate-rest": "11.4.4-cjs.2", + "@octokit/plugin-request-log": "^4.0.0", + "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmmirror.com/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmmirror.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmmirror.com/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" + } + }, + "node_modules/@sigstore/bundle": { + "version": "2.3.2", + "resolved": "https://registry.npmmirror.com/@sigstore/bundle/-/bundle-2.3.2.tgz", + "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/core": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@sigstore/core/-/core-1.1.0.tgz", + "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/protobuf-specs": { + "version": "0.3.3", + "resolved": "https://registry.npmmirror.com/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz", + "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@sigstore/sign": { + "version": "2.3.2", + "resolved": "https://registry.npmmirror.com/@sigstore/sign/-/sign-2.3.2.tgz", + "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^13.0.1", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/tuf": { + "version": "2.3.4", + "resolved": "https://registry.npmmirror.com/@sigstore/tuf/-/tuf-2.3.4.tgz", + "integrity": "sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^2.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/verify": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/@sigstore/verify/-/verify-1.2.1.tgz", + "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.1.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.10", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.27.10.tgz", + "integrity": "sha512-MTBk/3jGLNB2tVxv6uLlFh1iu64iYOQ2PbdOSK3NW8JZsmlaOh2q6sdtKowBhfw8QFLmYNzTW4/oK4uATIi6ZA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "13.0.5", + "resolved": "https://registry.npmmirror.com/@sinonjs/fake-timers/-/fake-timers-13.0.5.tgz", + "integrity": "sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.1" + } + }, + "node_modules/@tufjs/canonical-json": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz", + "integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@tufjs/models": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/@tufjs/models/-/models-2.0.1.tgz", + "integrity": "sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@tufjs/models/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@tufjs/models/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@tybys/wasm-util": { + "version": "0.9.0", + "resolved": "https://registry.npmmirror.com/@tybys/wasm-util/-/wasm-util-0.9.0.tgz", + "integrity": "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmmirror.com/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmmirror.com/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmmirror.com/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmmirror.com/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/command-line-args": { + "version": "5.2.3", + "resolved": "https://registry.npmmirror.com/@types/command-line-args/-/command-line-args-5.2.3.tgz", + "integrity": "sha512-uv0aG6R0Y8WHZLTamZwtfsDLVRnOa+n+n5rEvFWL5Na5gZ8V2Teab/duDPFzIIIhs9qizDpcavCusCLJZu62Kw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/emscripten": { + "version": "1.41.5", + "resolved": "https://registry.npmmirror.com/@types/emscripten/-/emscripten-1.41.5.tgz", + "integrity": "sha512-cMQm7pxu6BxtHyqJ7mQZ2kXWV5SLmugybFdHCBbJ5eHzOo6VhBckEgAT3//rP5FwPHNPeEiq4SmQ5ucBwsOo4Q==", + "license": "MIT" + }, + "node_modules/@types/eslint": { + "version": "9.6.1", + "resolved": "https://registry.npmmirror.com/@types/eslint/-/eslint-9.6.1.tgz", + "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "node_modules/@types/eslint-scope": { + "version": "3.7.7", + "resolved": "https://registry.npmmirror.com/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", + "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/eslint": "*", + "@types/estree": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/fs-extra": { + "version": "11.0.4", + "resolved": "https://registry.npmmirror.com/@types/fs-extra/-/fs-extra-11.0.4.tgz", + "integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/jsonfile": "*", + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmmirror.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmmirror.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "30.0.0", + "resolved": "https://registry.npmmirror.com/@types/jest/-/jest-30.0.0.tgz", + "integrity": "sha512-XTYugzhuwqWjws0CVz8QpM36+T+Dz5mTEBKhNs/esGLnCIlGdRy+Dq78NRjd7ls7r8BC8ZRMOrKlkO1hU0JOwA==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^30.0.0", + "pretty-format": "^30.0.0" + } + }, + "node_modules/@types/jest/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/jest/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/jest/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@types/jest/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmmirror.com/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/jsonfile": { + "version": "6.1.4", + "resolved": "https://registry.npmmirror.com/@types/jsonfile/-/jsonfile-6.1.4.tgz", + "integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/lodash": { + "version": "4.17.24", + "resolved": "https://registry.npmmirror.com/@types/lodash/-/lodash-4.17.24.tgz", + "integrity": "sha512-gIW7lQLZbue7lRSWEFql49QJJWThrTFFeIMJdp3eH4tKoxm1OvEPg02rm4wCCSHS0cL3/Fizimb35b7k8atwsQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/minimatch": { + "version": "3.0.5", + "resolved": "https://registry.npmmirror.com/@types/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmmirror.com/@types/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.19.35", + "resolved": "https://registry.npmmirror.com/@types/node/-/node-20.19.35.tgz", + "integrity": "sha512-Uarfe6J91b9HAUXxjvSOdiO2UPOKLm07Q1oh0JHxoZ1y8HoqxDAu3gVrsrOHeiio0kSsoVBt4wFrKOm0dKxVPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.4", + "resolved": "https://registry.npmmirror.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", + "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/tmp": { + "version": "0.2.6", + "resolved": "https://registry.npmmirror.com/@types/tmp/-/tmp-0.2.6.tgz", + "integrity": "sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.35", + "resolved": "https://registry.npmmirror.com/@types/yargs/-/yargs-17.0.35.tgz", + "integrity": "sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmmirror.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/@unrs/resolver-binding-android-arm-eabi": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", + "integrity": "sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-android-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-android-arm64/-/resolver-binding-android-arm64-1.11.1.tgz", + "integrity": "sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-darwin-arm64/-/resolver-binding-darwin-arm64-1.11.1.tgz", + "integrity": "sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-darwin-x64/-/resolver-binding-darwin-x64-1.11.1.tgz", + "integrity": "sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-freebsd-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-freebsd-x64/-/resolver-binding-freebsd-x64-1.11.1.tgz", + "integrity": "sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-gnueabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-arm-gnueabihf/-/resolver-binding-linux-arm-gnueabihf-1.11.1.tgz", + "integrity": "sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-musleabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-arm-musleabihf/-/resolver-binding-linux-arm-musleabihf-1.11.1.tgz", + "integrity": "sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-arm64-gnu/-/resolver-binding-linux-arm64-gnu-1.11.1.tgz", + "integrity": "sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-arm64-musl/-/resolver-binding-linux-arm64-musl-1.11.1.tgz", + "integrity": "sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-ppc64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-ppc64-gnu/-/resolver-binding-linux-ppc64-gnu-1.11.1.tgz", + "integrity": "sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-riscv64-gnu/-/resolver-binding-linux-riscv64-gnu-1.11.1.tgz", + "integrity": "sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-riscv64-musl/-/resolver-binding-linux-riscv64-musl-1.11.1.tgz", + "integrity": "sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-s390x-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-s390x-gnu/-/resolver-binding-linux-s390x-gnu-1.11.1.tgz", + "integrity": "sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-x64-gnu/-/resolver-binding-linux-x64-gnu-1.11.1.tgz", + "integrity": "sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-linux-x64-musl/-/resolver-binding-linux-x64-musl-1.11.1.tgz", + "integrity": "sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-wasm32-wasi/-/resolver-binding-wasm32-wasi-1.11.1.tgz", + "integrity": "sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.11" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmmirror.com/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" + } + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi/node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmmirror.com/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@unrs/resolver-binding-win32-arm64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-win32-arm64-msvc/-/resolver-binding-win32-arm64-msvc-1.11.1.tgz", + "integrity": "sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-ia32-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-win32-ia32-msvc/-/resolver-binding-win32-ia32-msvc-1.11.1.tgz", + "integrity": "sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-x64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/@unrs/resolver-binding-win32-x64-msvc/-/resolver-binding-win32-x64-msvc-1.11.1.tgz", + "integrity": "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/floating-point-hex-parser": { + "version": "1.13.2", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz", + "integrity": "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-api-error": { + "version": "1.13.2", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz", + "integrity": "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-buffer": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz", + "integrity": "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-numbers": { + "version": "1.13.2", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz", + "integrity": "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/floating-point-hex-parser": "1.13.2", + "@webassemblyjs/helper-api-error": "1.13.2", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/helper-wasm-bytecode": { + "version": "1.13.2", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz", + "integrity": "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-wasm-section": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz", + "integrity": "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/wasm-gen": "1.14.1" + } + }, + "node_modules/@webassemblyjs/ieee754": { + "version": "1.13.2", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz", + "integrity": "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "node_modules/@webassemblyjs/leb128": { + "version": "1.13.2", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/leb128/-/leb128-1.13.2.tgz", + "integrity": "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/utf8": { + "version": "1.13.2", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/utf8/-/utf8-1.13.2.tgz", + "integrity": "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/wasm-edit": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz", + "integrity": "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/helper-wasm-section": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-opt": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1", + "@webassemblyjs/wast-printer": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-gen": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz", + "integrity": "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-opt": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz", + "integrity": "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-parser": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz", + "integrity": "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-api-error": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wast-printer": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz", + "integrity": "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webpack-cli/configtest": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/@webpack-cli/configtest/-/configtest-2.1.1.tgz", + "integrity": "sha512-wy0mglZpDSiSS0XHrVR+BAdId2+yxPSoJW8fsna3ZpYSlufjvxnP4YbKTCBZnNIcGN4r6ZPXV55X4mYExOfLmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + } + }, + "node_modules/@webpack-cli/info": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/@webpack-cli/info/-/info-2.0.2.tgz", + "integrity": "sha512-zLHQdI/Qs1UyT5UBdWNqsARasIA+AaF8t+4u2aS2nEpBQh2mWIVb8qAklq0eUENnC5mOItrIB4LiS9xMtph18A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + } + }, + "node_modules/@webpack-cli/serve": { + "version": "2.0.5", + "resolved": "https://registry.npmmirror.com/@webpack-cli/serve/-/serve-2.0.5.tgz", + "integrity": "sha512-lqaoKnRYBdo1UgDX8uF24AfGMifWK19TxPmM5FHc2vAGxrJ/qtyUyFBWoY1tISZdelsQ5fBcOusifo5o5wSJxQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + }, + "peerDependenciesMeta": { + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmmirror.com/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@yarnpkg/fslib": { + "version": "2.10.4", + "resolved": "https://registry.npmmirror.com/@yarnpkg/fslib/-/fslib-2.10.4.tgz", + "integrity": "sha512-WhaLwvXEMjCjGxOraQx+Qtmst13iAPOlSElSZfQFdLohva5owlqACRapJ78zZFEW6M9ArqdQlZaHKVN5/mM+SA==", + "license": "BSD-2-Clause", + "dependencies": { + "@yarnpkg/libzip": "^2.3.0", + "tslib": "^1.13.0" + }, + "engines": { + "node": ">=12 <14 || 14.2 - 14.9 || >14.10.0" + } + }, + "node_modules/@yarnpkg/fslib/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "license": "0BSD" + }, + "node_modules/@yarnpkg/libzip": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/@yarnpkg/libzip/-/libzip-2.3.0.tgz", + "integrity": "sha512-6xm38yGVIa6mKm/DUCF2zFFJhERh/QWp1ufm4cNUvxsONBmfPg8uZ9pZBdOmF6qFGr/HlT6ABBkCSx/dlEtvWg==", + "license": "BSD-2-Clause", + "dependencies": { + "@types/emscripten": "^1.39.6", + "tslib": "^1.13.0" + }, + "engines": { + "node": ">=12 <14 || 14.2 - 14.9 || >14.10.0" + } + }, + "node_modules/@yarnpkg/libzip/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "license": "0BSD" + }, + "node_modules/@yarnpkg/lockfile": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz", + "integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/@yarnpkg/parsers": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/@yarnpkg/parsers/-/parsers-3.0.2.tgz", + "integrity": "sha512-/HcYgtUSiJiot/XWGLOlGxPYUG65+/31V8oqk17vZLW1xlCoR4PampyePljOxY2n8/3jz9+tIFzICsyGujJZoA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "js-yaml": "^3.10.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=18.12.0" + } + }, + "node_modules/@yarnpkg/parsers/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmmirror.com/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@yarnpkg/parsers/node_modules/js-yaml": { + "version": "3.14.2", + "resolved": "https://registry.npmmirror.com/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@zkochan/js-yaml": { + "version": "0.0.7", + "resolved": "https://registry.npmmirror.com/@zkochan/js-yaml/-/js-yaml-0.0.7.tgz", + "integrity": "sha512-nrUSn7hzt7J6JWgWGz78ZYI8wj+gdIJdk0Ynjpp8l+trkn58Uqsf6RYrYkEK+3X18EX+TNdtJI0WxAtc+L84SQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/abbrev": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/abbrev/-/abbrev-2.0.0.tgz", + "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/acorn": { + "version": "8.16.0", + "resolved": "https://registry.npmmirror.com/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-phases": { + "version": "1.0.4", + "resolved": "https://registry.npmmirror.com/acorn-import-phases/-/acorn-import-phases-1.0.4.tgz", + "integrity": "sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.13.0" + }, + "peerDependencies": { + "acorn": "^8.14.0" + } + }, + "node_modules/add-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/add-stream/-/add-stream-1.0.0.tgz", + "integrity": "sha512-qQLMr+8o0WC4FZGQTcJiKBVC59JylcPSrTtk6usvmIDFUOCKegapy1VHQwRbFMOFyb/inzUVqHs+eMYKDM1YeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmmirror.com/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv": { + "version": "8.18.0", + "resolved": "https://registry.npmmirror.com/ajv/-/ajv-8.18.0.tgz", + "integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmmirror.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmmirror.com/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmmirror.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmmirror.com/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-back": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/array-back/-/array-back-3.1.0.tgz", + "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/array-differ": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/array-differ/-/array-differ-3.0.0.tgz", + "integrity": "sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/array-ify": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/array-ify/-/array-ify-1.0.0.tgz", + "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==", + "dev": true, + "license": "MIT" + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmmirror.com/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmmirror.com/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.13.6", + "resolved": "https://registry.npmmirror.com/axios/-/axios-1.13.6.tgz", + "integrity": "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/babel-jest": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/babel-jest/-/babel-jest-30.2.0.tgz", + "integrity": "sha512-0YiBEOxWqKkSQWL9nNGGEgndoeL0ZpWrbLMNL5u/Kaxrli3Eaxlt3ZtIDktEvXt4L/R9r3ODr2zKwGM/2BjxVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "30.2.0", + "@types/babel__core": "^7.20.5", + "babel-plugin-istanbul": "^7.0.1", + "babel-preset-jest": "30.2.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.11.0 || ^8.0.0-0" + } + }, + "node_modules/babel-jest/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmmirror.com/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.2.0.tgz", + "integrity": "sha512-ftzhzSGMUnOzcCXd6WHdBGMyuwy15Wnn0iyyWGKgBDLxf9/s5ABuraCSpBX2uG0jUg4rqJnxsLc5+oYBqoxVaA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/babel__core": "^7.20.5" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.2.0.tgz", + "integrity": "sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/babel-preset-jest": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/babel-preset-jest/-/babel-preset-jest-30.2.0.tgz", + "integrity": "sha512-US4Z3NOieAQumwFnYdUWKvUKh8+YSnS/gB3t6YBiz0bskpu7Pine8pPCheNxlPEW4wnUkma2a94YuW2q3guvCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "30.2.0", + "babel-preset-current-node-syntax": "^1.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.11.0 || ^8.0.0-beta.1" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmmirror.com/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.0", + "resolved": "https://registry.npmmirror.com/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz", + "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmmirror.com/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmmirror.com/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/bin-links": { + "version": "4.0.4", + "resolved": "https://registry.npmmirror.com/bin-links/-/bin-links-4.0.4.tgz", + "integrity": "sha512-cMtq4W5ZsEwcutJrVId+a/tjt8GSbS+h0oNkdl6+6rBuEv8Ot33Bevj5KPm40t309zuhVic8NjpuL42QCiJWWA==", + "dev": true, + "license": "ISC", + "dependencies": { + "cmd-shim": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "read-cmd-shim": "^4.0.0", + "write-file-atomic": "^5.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmmirror.com/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmmirror.com/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmmirror.com/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "license": "MIT" + }, + "node_modules/byte-size": { + "version": "8.1.1", + "resolved": "https://registry.npmmirror.com/byte-size/-/byte-size-8.1.1.tgz", + "integrity": "sha512-tUkzZWK0M/qdoLEqikxBWe4kumyuwjl3HO6zHTr4yEI23EojPtLYXdG1+AQY7MN0cGyNDvEaJ8wiYQm6P2bPxg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.17" + } + }, + "node_modules/cacache": { + "version": "18.0.4", + "resolved": "https://registry.npmmirror.com/cacache/-/cacache-18.0.4.tgz", + "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmmirror.com/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmmirror.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001775", + "resolved": "https://registry.npmmirror.com/caniuse-lite/-/caniuse-lite-1.0.30001775.tgz", + "integrity": "sha512-s3Qv7Lht9zbVKE9XoTyRG6wVDCKdtOFIjBGg3+Yhn6JaytuNKPIjBMTMIY1AnOH3seL5mvF+x33oGAyK3hVt3A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmmirror.com/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/chardet": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/chardet/-/chardet-2.1.1.tgz", + "integrity": "sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmmirror.com/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/chrome-trace-event": { + "version": "1.0.4", + "resolved": "https://registry.npmmirror.com/chrome-trace-event/-/chrome-trace-event-1.0.4.tgz", + "integrity": "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmmirror.com/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "2.2.0", + "resolved": "https://registry.npmmirror.com/cjs-module-lexer/-/cjs-module-lexer-2.2.0.tgz", + "integrity": "sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmmirror.com/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-spinners": { + "version": "2.6.1", + "resolved": "https://registry.npmmirror.com/cli-spinners/-/cli-spinners-2.6.1.tgz", + "integrity": "sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 10" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmmirror.com/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/clone": { + "version": "1.0.4", + "resolved": "https://registry.npmmirror.com/clone/-/clone-1.0.4.tgz", + "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/cmd-shim": { + "version": "6.0.3", + "resolved": "https://registry.npmmirror.com/cmd-shim/-/cmd-shim-6.0.3.tgz", + "integrity": "sha512-FMabTRlc5t5zjdenF6mS0MBeFZm0XqHqeOkcskKFb/LYCcRQ5fVgLOHVc4Lq9CqABd9zhjwPjMBCJvMCziSVtA==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmmirror.com/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/collect-v8-coverage/-/collect-v8-coverage-1.0.3.tgz", + "integrity": "sha512-1L5aqIkwPfiodaMgQunkF1zRhNqifHBmtbbbxcr6yVxxBnliw4TDOW6NxpO8DJLgJ16OT+Y4ztZqP6p/FtXnAw==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmmirror.com/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true, + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmmirror.com/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/columnify": { + "version": "1.6.0", + "resolved": "https://registry.npmmirror.com/columnify/-/columnify-1.6.0.tgz", + "integrity": "sha512-lomjuFZKfM6MSAnV9aCZC9sc0qGbmZdfygNv+nCpqVkSKdCxCklLtd16O0EILGkImHw9ZpHkAnHaB+8Zxq5W6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "strip-ansi": "^6.0.1", + "wcwidth": "^1.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/command-line-args": { + "version": "5.2.1", + "resolved": "https://registry.npmmirror.com/command-line-args/-/command-line-args-5.2.1.tgz", + "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", + "license": "MIT", + "dependencies": { + "array-back": "^3.1.0", + "find-replace": "^3.0.0", + "lodash.camelcase": "^4.3.0", + "typical": "^4.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmmirror.com/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/common-ancestor-path": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz", + "integrity": "sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w==", + "dev": true, + "license": "ISC" + }, + "node_modules/compare-func": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/compare-func/-/compare-func-2.0.0.tgz", + "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-ify": "^1.0.0", + "dot-prop": "^5.1.0" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmmirror.com/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", + "dev": true, + "engines": [ + "node >= 6.0" + ], + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.0.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/conventional-changelog-angular": { + "version": "7.0.0", + "resolved": "https://registry.npmmirror.com/conventional-changelog-angular/-/conventional-changelog-angular-7.0.0.tgz", + "integrity": "sha512-ROjNchA9LgfNMTTFSIWPzebCwOGFdgkEq45EnvvrmSLvCtAw0HSmrCs7/ty+wAeYUZyNay0YMUNYFTRL72PkBQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "compare-func": "^2.0.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/conventional-changelog-core": { + "version": "5.0.1", + "resolved": "https://registry.npmmirror.com/conventional-changelog-core/-/conventional-changelog-core-5.0.1.tgz", + "integrity": "sha512-Rvi5pH+LvgsqGwZPZ3Cq/tz4ty7mjijhr3qR4m9IBXNbxGGYgTVVO+duXzz9aArmHxFtwZ+LRkrNIMDQzgoY4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "add-stream": "^1.0.0", + "conventional-changelog-writer": "^6.0.0", + "conventional-commits-parser": "^4.0.0", + "dateformat": "^3.0.3", + "get-pkg-repo": "^4.2.1", + "git-raw-commits": "^3.0.0", + "git-remote-origin-url": "^2.0.0", + "git-semver-tags": "^5.0.0", + "normalize-package-data": "^3.0.3", + "read-pkg": "^3.0.0", + "read-pkg-up": "^3.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-changelog-core/node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/conventional-changelog-core/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/conventional-changelog-core/node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/conventional-changelog-preset-loader": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/conventional-changelog-preset-loader/-/conventional-changelog-preset-loader-3.0.0.tgz", + "integrity": "sha512-qy9XbdSLmVnwnvzEisjxdDiLA4OmV3o8db+Zdg4WiFw14fP3B6XNz98X0swPPpkTd/pc1K7+adKgEDM1JCUMiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-changelog-writer": { + "version": "6.0.1", + "resolved": "https://registry.npmmirror.com/conventional-changelog-writer/-/conventional-changelog-writer-6.0.1.tgz", + "integrity": "sha512-359t9aHorPw+U+nHzUXHS5ZnPBOizRxfQsWT5ZDHBfvfxQOAik+yfuhKXG66CN5LEWPpMNnIMHUTCKeYNprvHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "conventional-commits-filter": "^3.0.0", + "dateformat": "^3.0.3", + "handlebars": "^4.7.7", + "json-stringify-safe": "^5.0.1", + "meow": "^8.1.2", + "semver": "^7.0.0", + "split": "^1.0.1" + }, + "bin": { + "conventional-changelog-writer": "cli.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-commits-filter": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/conventional-commits-filter/-/conventional-commits-filter-3.0.0.tgz", + "integrity": "sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "lodash.ismatch": "^4.4.0", + "modify-values": "^1.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-commits-parser": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/conventional-commits-parser/-/conventional-commits-parser-4.0.0.tgz", + "integrity": "sha512-WRv5j1FsVM5FISJkoYMR6tPk07fkKT0UodruX4je86V4owk451yjXAKzKAPOs9l7y59E2viHUS9eQ+dfUA9NSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-text-path": "^1.0.1", + "JSONStream": "^1.3.5", + "meow": "^8.1.2", + "split2": "^3.2.2" + }, + "bin": { + "conventional-commits-parser": "cli.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-recommended-bump": { + "version": "7.0.1", + "resolved": "https://registry.npmmirror.com/conventional-recommended-bump/-/conventional-recommended-bump-7.0.1.tgz", + "integrity": "sha512-Ft79FF4SlOFvX4PkwFDRnaNiIVX7YbmqGU0RwccUaiGvgp3S0a8ipR2/Qxk31vclDNM+GSdJOVs2KrsUCjblVA==", + "dev": true, + "license": "MIT", + "dependencies": { + "concat-stream": "^2.0.0", + "conventional-changelog-preset-loader": "^3.0.0", + "conventional-commits-filter": "^3.0.0", + "conventional-commits-parser": "^4.0.0", + "git-raw-commits": "^3.0.0", + "git-semver-tags": "^5.0.0", + "meow": "^8.1.2" + }, + "bin": { + "conventional-recommended-bump": "cli.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/copy-webpack-plugin": { + "version": "12.0.2", + "resolved": "https://registry.npmmirror.com/copy-webpack-plugin/-/copy-webpack-plugin-12.0.2.tgz", + "integrity": "sha512-SNwdBeHyII+rWvee/bTnAYyO8vfVdcSTud4EIb6jcZ8inLeWucJE0DnxXQBjlQ5zlteuuvooGQy3LIyGxhvlOA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.1", + "globby": "^14.0.0", + "normalize-path": "^3.0.0", + "schema-utils": "^4.2.0", + "serialize-javascript": "^6.0.2" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/cosmiconfig": { + "version": "9.0.0", + "resolved": "https://registry.npmmirror.com/cosmiconfig/-/cosmiconfig-9.0.0.tgz", + "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.1", + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmmirror.com/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/dargs": { + "version": "7.0.0", + "resolved": "https://registry.npmmirror.com/dargs/-/dargs-7.0.0.tgz", + "integrity": "sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/dateformat": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/dateformat/-/dateformat-3.0.3.tgz", + "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmmirror.com/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/decamelize-keys/-/decamelize-keys-1.1.1.tgz", + "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", + "dev": true, + "license": "MIT", + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decamelize-keys/node_modules/map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dedent": { + "version": "1.5.3", + "resolved": "https://registry.npmmirror.com/dedent/-/dedent-1.5.3.tgz", + "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmmirror.com/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/defaults": { + "version": "1.0.4", + "resolved": "https://registry.npmmirror.com/defaults/-/defaults-1.0.4.tgz", + "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmmirror.com/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/detect-indent": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/detect-indent/-/detect-indent-5.0.0.tgz", + "integrity": "sha512-rlpvsxUtM0PQvy9iZe640/IWwWYyBsTApREbA1pHOpmOUIl9MkP/U4z7vTtg4Oaojvqhxt7sdufnT0EzGaR31g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmmirror.com/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmmirror.com/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dotenv": { + "version": "16.4.7", + "resolved": "https://registry.npmmirror.com/dotenv/-/dotenv-16.4.7.tgz", + "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dotenv-expand": { + "version": "11.0.7", + "resolved": "https://registry.npmmirror.com/dotenv-expand/-/dotenv-expand-11.0.7.tgz", + "integrity": "sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "dotenv": "^16.4.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmmirror.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmmirror.com/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.302", + "resolved": "https://registry.npmmirror.com/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz", + "integrity": "sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmmirror.com/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmmirror.com/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmmirror.com/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmmirror.com/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.20.0", + "resolved": "https://registry.npmmirror.com/enhanced-resolve/-/enhanced-resolve-5.20.0.tgz", + "integrity": "sha512-/ce7+jQ1PQ6rVXwe+jKEg5hW5ciicHwIQUagZkp6IufBoY3YDgdTTY1azVs0qoRgVmvsNB+rbjLJxDAeHHtwsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.3.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmmirror.com/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-colors": "^4.1.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmmirror.com/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/envinfo": { + "version": "7.21.0", + "resolved": "https://registry.npmmirror.com/envinfo/-/envinfo-7.21.0.tgz", + "integrity": "sha512-Lw7I8Zp5YKHFCXL7+Dz95g4CcbMEpgvqZNNq3AmlT5XAV6CgAAk6gyAMqn2zjw08K9BHfcNuKrMiCPLByGafow==", + "dev": true, + "license": "MIT", + "bin": { + "envinfo": "dist/cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmmirror.com/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-module-lexer": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/es-module-lexer/-/es-module-lexer-2.0.0.tgz", + "integrity": "sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.27.3", + "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" + } + }, + "node_modules/esbuild-loader": { + "version": "4.4.2", + "resolved": "https://registry.npmmirror.com/esbuild-loader/-/esbuild-loader-4.4.2.tgz", + "integrity": "sha512-8LdoT9sC7fzfvhxhsIAiWhzLJr9yT3ggmckXxsgvM07wgrRxhuT98XhLn3E7VczU5W5AFsPKv9DdWcZIubbWkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.27.1", + "get-tsconfig": "^4.10.1", + "loader-utils": "^2.0.4", + "webpack-sources": "^1.4.3" + }, + "funding": { + "url": "https://github.com/privatenumber/esbuild-loader?sponsor=1" + }, + "peerDependencies": { + "webpack": "^4.40.0 || ^5.0.0" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmmirror.com/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmmirror.com/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmmirror.com/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmmirror.com/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmmirror.com/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "dev": true, + "license": "MIT" + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmmirror.com/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/execa": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/execa/-/execa-5.0.0.tgz", + "integrity": "sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit-x": { + "version": "0.2.2", + "resolved": "https://registry.npmmirror.com/exit-x/-/exit-x-0.2.2.tgz", + "integrity": "sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/expect/-/expect-30.2.0.tgz", + "integrity": "sha512-u/feCi0GPsI+988gU2FLcsHyAHTU0MX1Wg68NhAnN7z/+C5wqG+CY8J53N9ioe8RXgaoz0nBR/TYMf3AycUuPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "30.2.0", + "@jest/get-type": "30.1.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/exponential-backoff": { + "version": "3.1.3", + "resolved": "https://registry.npmmirror.com/exponential-backoff/-/exponential-backoff-3.1.3.tgz", + "integrity": "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmmirror.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmmirror.com/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fastest-levenshtein": { + "version": "1.0.16", + "resolved": "https://registry.npmmirror.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz", + "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4.9.1" + } + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmmirror.com/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/filelist": { + "version": "1.0.6", + "resolved": "https://registry.npmmirror.com/filelist/-/filelist-1.0.6.tgz", + "integrity": "sha512-5giy2PkLYY1cP39p17Ech+2xlpTRL9HLspOfEgm0L6CwBXBTgsK5ou0JtzYuepxkaQ/tvhCFIJ5uXo0OrM2DxA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.9", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmmirror.com/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-replace": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/find-replace/-/find-replace-3.0.0.tgz", + "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==", + "license": "MIT", + "dependencies": { + "array-back": "^3.0.1" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmmirror.com/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "license": "BSD-3-Clause", + "bin": { + "flat": "cli.js" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmmirror.com/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/form-data": { + "version": "4.0.5", + "resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/front-matter": { + "version": "4.0.2", + "resolved": "https://registry.npmmirror.com/front-matter/-/front-matter-4.0.2.tgz", + "integrity": "sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-yaml": "^3.13.1" + } + }, + "node_modules/front-matter/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmmirror.com/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/front-matter/node_modules/js-yaml": { + "version": "3.14.2", + "resolved": "https://registry.npmmirror.com/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "dev": true, + "license": "MIT" + }, + "node_modules/fs-extra": { + "version": "11.3.3", + "resolved": "https://registry.npmmirror.com/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/fs-minipass": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/fs-minipass/-/fs-minipass-3.0.3.tgz", + "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmmirror.com/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmmirror.com/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmmirror.com/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-pkg-repo": { + "version": "4.2.1", + "resolved": "https://registry.npmmirror.com/get-pkg-repo/-/get-pkg-repo-4.2.1.tgz", + "integrity": "sha512-2+QbHjFRfGB74v/pYWjd5OhU3TDIC2Gv/YKUTk/tCvAz0pkn/Mz6P3uByuBimLOcPvN2jYdScl3xGFSrx0jEcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@hutson/parse-repository-url": "^3.0.0", + "hosted-git-info": "^4.0.0", + "through2": "^2.0.0", + "yargs": "^16.2.0" + }, + "bin": { + "get-pkg-repo": "src/cli.js" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-pkg-repo/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmmirror.com/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/get-pkg-repo/node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/get-pkg-repo/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/get-pkg-repo/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/get-pkg-repo/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmmirror.com/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/get-pkg-repo/node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmmirror.com/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/get-port": { + "version": "5.1.1", + "resolved": "https://registry.npmmirror.com/get-port/-/get-port-5.1.1.tgz", + "integrity": "sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/get-stream/-/get-stream-6.0.0.tgz", + "integrity": "sha512-A1B3Bh1UmL0bidM/YX2NsCOTnGJePL9rO/M+Mw3m9f2gUpfokS0hi5Eah0WSUEWZdZhIZtMjkIYS7mDfOqNHbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.6", + "resolved": "https://registry.npmmirror.com/get-tsconfig/-/get-tsconfig-4.13.6.tgz", + "integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/git-raw-commits": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/git-raw-commits/-/git-raw-commits-3.0.0.tgz", + "integrity": "sha512-b5OHmZ3vAgGrDn/X0kS+9qCfNKWe4K/jFnhwzVWWg0/k5eLa3060tZShrRg8Dja5kPc+YjS0Gc6y7cRr44Lpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "dargs": "^7.0.0", + "meow": "^8.1.2", + "split2": "^3.2.2" + }, + "bin": { + "git-raw-commits": "cli.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/git-remote-origin-url": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/git-remote-origin-url/-/git-remote-origin-url-2.0.0.tgz", + "integrity": "sha512-eU+GGrZgccNJcsDH5LkXR3PB9M958hxc7sbA8DFJjrv9j4L2P/eZfKhM+QD6wyzpiv+b1BpK0XrYCxkovtjSLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "gitconfiglocal": "^1.0.0", + "pify": "^2.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/git-remote-origin-url/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/git-semver-tags": { + "version": "5.0.1", + "resolved": "https://registry.npmmirror.com/git-semver-tags/-/git-semver-tags-5.0.1.tgz", + "integrity": "sha512-hIvOeZwRbQ+7YEUmCkHqo8FOLQZCEn18yevLHADlFPZY02KJGsu5FZt9YW/lybfK2uhWFI7Qg/07LekJiTv7iA==", + "dev": true, + "license": "MIT", + "dependencies": { + "meow": "^8.1.2", + "semver": "^7.0.0" + }, + "bin": { + "git-semver-tags": "cli.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/git-up": { + "version": "7.0.0", + "resolved": "https://registry.npmmirror.com/git-up/-/git-up-7.0.0.tgz", + "integrity": "sha512-ONdIrbBCFusq1Oy0sC71F5azx8bVkvtZtMJAsv+a6lz5YAmbNnLD6HAB4gptHZVLPR8S2/kVN6Gab7lryq5+lQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-ssh": "^1.4.0", + "parse-url": "^8.1.0" + } + }, + "node_modules/git-url-parse": { + "version": "14.0.0", + "resolved": "https://registry.npmmirror.com/git-url-parse/-/git-url-parse-14.0.0.tgz", + "integrity": "sha512-NnLweV+2A4nCvn4U/m2AoYu0pPKlsmhK9cknG7IMwsjFY1S2jxM+mAhsDxyxfCIGfGaD+dozsyX4b6vkYc83yQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "git-up": "^7.0.0" + } + }, + "node_modules/gitconfiglocal": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/gitconfiglocal/-/gitconfiglocal-1.0.0.tgz", + "integrity": "sha512-spLUXeTAVHxDtKsJc8FkFVgFtMdEN9qPGpL23VfSHx4fP4+Ds097IXLvymbnDH8FnmxX5Nr9bPw3A+AQ6mWEaQ==", + "dev": true, + "license": "BSD", + "dependencies": { + "ini": "^1.3.2" + } + }, + "node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmmirror.com/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmmirror.com/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmmirror.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globby": { + "version": "14.1.0", + "resolved": "https://registry.npmmirror.com/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmmirror.com/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmmirror.com/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/handlebars/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hosted-git-info": { + "version": "7.0.2", + "resolved": "https://registry.npmmirror.com/hosted-git-info/-/hosted-git-info-7.0.2.tgz", + "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/http-cache-semantics": { + "version": "4.2.0", + "resolved": "https://registry.npmmirror.com/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", + "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmmirror.com/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmmirror.com/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.7.2", + "resolved": "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmmirror.com/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/ignore-walk": { + "version": "6.0.5", + "resolved": "https://registry.npmmirror.com/ignore-walk/-/ignore-walk-6.0.5.tgz", + "integrity": "sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==", + "dev": true, + "license": "ISC", + "dependencies": { + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/ignore-walk/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/ignore-walk/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmmirror.com/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-fresh/node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmmirror.com/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmmirror.com/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmmirror.com/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmmirror.com/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true, + "license": "ISC" + }, + "node_modules/init-package-json": { + "version": "6.0.3", + "resolved": "https://registry.npmmirror.com/init-package-json/-/init-package-json-6.0.3.tgz", + "integrity": "sha512-Zfeb5ol+H+eqJWHTaGca9BovufyGeIfr4zaaBorPmJBMrJ+KBnN+kQx2ZtXdsotUTgldHmHQV44xvUWOUA7E2w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/package-json": "^5.0.0", + "npm-package-arg": "^11.0.0", + "promzard": "^1.0.0", + "read": "^3.0.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/inquirer": { + "version": "8.2.7", + "resolved": "https://registry.npmmirror.com/inquirer/-/inquirer-8.2.7.tgz", + "integrity": "sha512-UjOaSel/iddGZJ5xP/Eixh6dY1XghiBw4XK13rCCIJcJfyhhoul/7KhLLUGtebEj6GDYM6Vnx/mVsjx2L/mFIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/external-editor": "^1.0.0", + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.1", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "figures": "^3.0.0", + "lodash": "^4.17.21", + "mute-stream": "0.0.8", + "ora": "^5.4.1", + "run-async": "^2.4.0", + "rxjs": "^7.5.5", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6", + "wrap-ansi": "^6.0.1" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/interpret": { + "version": "3.1.1", + "resolved": "https://registry.npmmirror.com/interpret/-/interpret-3.1.1.tgz", + "integrity": "sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/ip-address": { + "version": "10.1.0", + "resolved": "https://registry.npmmirror.com/ip-address/-/ip-address-10.1.0.tgz", + "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmmirror.com/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-ci": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ci-info": "^3.2.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmmirror.com/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmmirror.com/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true, + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmmirror.com/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-interactive": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/is-interactive/-/is-interactive-1.0.0.tgz", + "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-lambda": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/is-lambda/-/is-lambda-1.0.1.tgz", + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmmirror.com/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmmirror.com/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "license": "MIT", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-ssh": { + "version": "1.4.1", + "resolved": "https://registry.npmmirror.com/is-ssh/-/is-ssh-1.4.1.tgz", + "integrity": "sha512-JNeu1wQsHjyHgn9NcWTaXq6zWSR6hqE0++zhfZlkFBbScNkyvxCdeV8sRkSBaeLKxmbpR21brail63ACNxJ0Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "protocols": "^2.0.1" + } + }, + "node_modules/is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-text-path": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/is-text-path/-/is-text-path-1.0.1.tgz", + "integrity": "sha512-xFuJpne9oFz5qDaodwmmG08e3CawH/2ZV8Qqza1Ko7Sk8POWbkRdwIoAWVhqvq0XeUzANEhKo2n0IXUGBm7A/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "text-extensions": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmmirror.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmmirror.com/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmmirror.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmmirror.com/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "5.0.6", + "resolved": "https://registry.npmmirror.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmmirror.com/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jake": { + "version": "10.9.4", + "resolved": "https://registry.npmmirror.com/jake/-/jake-10.9.4.tgz", + "integrity": "sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.6", + "filelist": "^1.0.4", + "picocolors": "^1.1.1" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest/-/jest-30.2.0.tgz", + "integrity": "sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "30.2.0", + "@jest/types": "30.2.0", + "import-local": "^3.2.0", + "jest-cli": "30.2.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-changed-files/-/jest-changed-files-30.2.0.tgz", + "integrity": "sha512-L8lR1ChrRnSdfeOvTrwZMlnWV8G/LLjQ0nG9MBclwWZidA2N5FviRki0Bvh20WRMOX31/JYvzdqTJrk5oBdydQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.1.1", + "jest-util": "30.2.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-changed-files/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmmirror.com/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/jest-changed-files/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-circus": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-circus/-/jest-circus-30.2.0.tgz", + "integrity": "sha512-Fh0096NC3ZkFx05EP2OXCxJAREVxj1BcW/i6EWqqymcgYKWjyyDpral3fMxVcHXg6oZM7iULer9wGRFvfpl+Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/expect": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "co": "^4.6.0", + "dedent": "^1.6.0", + "is-generator-fn": "^2.1.0", + "jest-each": "30.2.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-runtime": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "p-limit": "^3.1.0", + "pretty-format": "30.2.0", + "pure-rand": "^7.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.6" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-circus/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-circus/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-circus/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-circus/node_modules/dedent": { + "version": "1.7.2", + "resolved": "https://registry.npmmirror.com/dedent/-/dedent-1.7.2.tgz", + "integrity": "sha512-WzMx3mW98SN+zn3hgemf4OzdmyNhhhKz5Ay0pUfQiMQ3e1g+xmTJWp/pKdwKVXhdSkAEGIIzqeuWrL3mV/AXbA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/jest-circus/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-circus/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-circus/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-cli": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-cli/-/jest-cli-30.2.0.tgz", + "integrity": "sha512-Os9ukIvADX/A9sLt6Zse3+nmHtHaE6hqOsjQtNiugFTbKRHYIYtZXNGNK9NChseXy7djFPjndX1tL0sCTlfpAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "exit-x": "^0.2.2", + "import-local": "^3.2.0", + "jest-config": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "yargs": "^17.7.2" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-config/-/jest-config-30.2.0.tgz", + "integrity": "sha512-g4WkyzFQVWHtu6uqGmQR4CQxz/CH3yDSlhzXMWzNjDx843gYjReZnMRanjRCq5XZFuQrGDxgUaiYWE8BRfVckA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/get-type": "30.1.0", + "@jest/pattern": "30.0.1", + "@jest/test-sequencer": "30.2.0", + "@jest/types": "30.2.0", + "babel-jest": "30.2.0", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "deepmerge": "^4.3.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-circus": "30.2.0", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-runner": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "micromatch": "^4.0.8", + "parse-json": "^5.2.0", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "esbuild-register": ">=3.4.0", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "esbuild-register": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-config/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-config/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-config/node_modules/ci-info": { + "version": "4.4.0", + "resolved": "https://registry.npmmirror.com/ci-info/-/ci-info-4.4.0.tgz", + "integrity": "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-config/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmmirror.com/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-docblock/-/jest-docblock-30.2.0.tgz", + "integrity": "sha512-tR/FFgZKS1CXluOQzZvNH3+0z9jXr3ldGSD8bhyuxvlVUwbeLOGynkunvlTMxchC5urrKndYiwCFC0DLVjpOCA==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-each/-/jest-each-30.2.0.tgz", + "integrity": "sha512-lpWlJlM7bCUf1mfmuqTA8+j2lNURW9eNafOy99knBM01i5CQeY5UH1vZjgT9071nDJac1M4XsbyI44oNOdhlDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "jest-util": "30.2.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-each/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-each/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-environment-node/-/jest-environment-node-30.2.0.tgz", + "integrity": "sha512-ElU8v92QJ9UrYsKrxDIKCxu6PfNj4Hdcktcn0JX12zqNdqWHB0N+hwOnnBBXvjLd2vApZtuLUGs1QSY+MsXoNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/fake-timers": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-mock": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmmirror.com/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/jest-haste-map/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-haste-map/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest-junit": { + "version": "16.0.0", + "resolved": "https://registry.npmmirror.com/jest-junit/-/jest-junit-16.0.0.tgz", + "integrity": "sha512-A94mmw6NfJab4Fg/BlvVOUXzXgF0XIH6EmTgJ5NDPp4xoKq0Kr7sErb+4Xs9nZvu58pJojz5RFGpqnZYJTrRfQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "mkdirp": "^1.0.4", + "strip-ansi": "^6.0.1", + "uuid": "^8.3.2", + "xml": "^1.0.1" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/jest-junit/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmmirror.com/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/jest-leak-detector": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-leak-detector/-/jest-leak-detector-30.2.0.tgz", + "integrity": "sha512-M6jKAjyzjHG0SrQgwhgZGy9hFazcudwCNovY/9HPIicmNSBuockPSedAP9vlPK6ONFJ1zfyH/M2/YYJxOz5cdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-leak-detector/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-leak-detector/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-leak-detector/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-leak-detector/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-matcher-utils/-/jest-matcher-utils-30.2.0.tgz", + "integrity": "sha512-dQ94Nq4dbzmUWkQ0ANAWS9tBRfqCrn0bV9AMYdOi/MHW726xn7eQmMeRTpX2ViC00bpNaWXq+7o4lIQ3AX13Hg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "jest-diff": "30.2.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-matcher-utils/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-matcher-utils/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-matcher-utils/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-matcher-utils/node_modules/jest-diff": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-diff/-/jest-diff-30.2.0.tgz", + "integrity": "sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/diff-sequences": "30.0.1", + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-matcher-utils/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-message-util/-/jest-message-util-30.2.0.tgz", + "integrity": "sha512-y4DKFLZ2y6DxTWD4cDe07RglV88ZiNEdlRfGtqahfbIjfsw1nMCPx49Uev4IA/hWn3sDKyAnSPwoYSsAEdcimw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@jest/types": "30.2.0", + "@types/stack-utils": "^2.0.3", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "micromatch": "^4.0.8", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.6" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-message-util/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-message-util/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-mock": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-mock/-/jest-mock-30.2.0.tgz", + "integrity": "sha512-JNNNl2rj4b5ICpmAcq+WbLH83XswjPbjH4T7yvGzfAGCPh1rw+xVNbtk+FnRslvt9lkCcdn9i1oAoKUuFsOxRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmmirror.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmmirror.com/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-resolve/-/jest-resolve-30.2.0.tgz", + "integrity": "sha512-TCrHSxPlx3tBY3hWNtRQKbtgLhsXa1WmbJEqBlTBrGafd5fiQFByy2GNCEoGR+Tns8d15GaL9cxEzKOO3GEb2A==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-pnp-resolver": "^1.2.3", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "slash": "^3.0.0", + "unrs-resolver": "^1.7.11" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-resolve-dependencies/-/jest-resolve-dependencies-30.2.0.tgz", + "integrity": "sha512-xTOIGug/0RmIe3mmCqCT95yO0vj6JURrn1TKWlNbhiAefJRWINNPgwVkrVgt/YaerPzY3iItufd80v3lOrFJ2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "30.0.1", + "jest-snapshot": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runner": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-runner/-/jest-runner-30.2.0.tgz", + "integrity": "sha512-PqvZ2B2XEyPEbclp+gV6KO/F1FIFSbIwewRgmROCMBo/aZ6J1w8Qypoj2pEOcg3G2HzLlaP6VUtvwCI8dM3oqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/environment": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "emittery": "^0.13.1", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.2.0", + "jest-haste-map": "30.2.0", + "jest-leak-detector": "30.2.0", + "jest-message-util": "30.2.0", + "jest-resolve": "30.2.0", + "jest-runtime": "30.2.0", + "jest-util": "30.2.0", + "jest-watcher": "30.2.0", + "jest-worker": "30.2.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-runner/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-runner/node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmmirror.com/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/jest-runner/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest-runtime": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-runtime/-/jest-runtime-30.2.0.tgz", + "integrity": "sha512-p1+GVX/PJqTucvsmERPMgCPvQJpFt4hFbM+VN3n8TMo47decMUcJbt+rgzwrEme0MQUA/R+1de2axftTHkKckg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/fake-timers": "30.2.0", + "@jest/globals": "30.2.0", + "@jest/source-map": "30.0.1", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "cjs-module-lexer": "^2.1.0", + "collect-v8-coverage": "^1.0.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-snapshot": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-snapshot/-/jest-snapshot-30.2.0.tgz", + "integrity": "sha512-5WEtTy2jXPFypadKNpbNkZ72puZCa6UjSr/7djeecHWOu7iYhSXSnHScT8wBz3Rn8Ena5d5RYRcsyKIeqG1IyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@babel/generator": "^7.27.5", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/plugin-syntax-typescript": "^7.27.1", + "@babel/types": "^7.27.3", + "@jest/expect-utils": "30.2.0", + "@jest/get-type": "30.1.0", + "@jest/snapshot-utils": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "babel-preset-current-node-syntax": "^1.2.0", + "chalk": "^4.1.2", + "expect": "30.2.0", + "graceful-fs": "^4.2.11", + "jest-diff": "30.2.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "pretty-format": "30.2.0", + "semver": "^7.7.2", + "synckit": "^0.11.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-snapshot/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-snapshot/node_modules/jest-diff": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-diff/-/jest-diff-30.2.0.tgz", + "integrity": "sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/diff-sequences": "30.0.1", + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-util/node_modules/ci-info": { + "version": "4.4.0", + "resolved": "https://registry.npmmirror.com/ci-info/-/ci-info-4.4.0.tgz", + "integrity": "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-util/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-validate": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-validate/-/jest-validate-30.2.0.tgz", + "integrity": "sha512-FBGWi7dP2hpdi8nBoWxSsLvBFewKAg0+uSQwBaof4Y4DPgBabXgpSYC5/lR7VmnIlSpASmCi/ntRWPbv7089Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "@jest/types": "30.2.0", + "camelcase": "^6.3.0", + "chalk": "^4.1.2", + "leven": "^3.1.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-validate/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmmirror.com/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-validate/node_modules/@sinclair/typebox": { + "version": "0.34.48", + "resolved": "https://registry.npmmirror.com/@sinclair/typebox/-/typebox-0.34.48.tgz", + "integrity": "sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-validate/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmmirror.com/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-validate/node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-watcher": { + "version": "30.2.0", + "resolved": "https://registry.npmmirror.com/jest-watcher/-/jest-watcher-30.2.0.tgz", + "integrity": "sha512-PYxa28dxJ9g777pGm/7PrbnMeA0Jr7osHP9bS7eJy9DuAjMgdGtxgf0uKMyoIsTWAkIbUW5hSDdJ3urmgXBqxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "emittery": "^0.13.1", + "jest-util": "30.2.0", + "string-length": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmmirror.com/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmmirror.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stringify-nice": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/json-stringify-nice/-/json-stringify-nice-1.1.4.tgz", + "integrity": "sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw==", + "dev": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmmirror.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true, + "license": "ISC" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmmirror.com/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "license": "MIT" + }, + "node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmmirror.com/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmmirror.com/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", + "dev": true, + "engines": [ + "node >= 0.2.0" + ], + "license": "MIT" + }, + "node_modules/JSONStream": { + "version": "1.3.5", + "resolved": "https://registry.npmmirror.com/JSONStream/-/JSONStream-1.3.5.tgz", + "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", + "dev": true, + "license": "(MIT OR Apache-2.0)", + "dependencies": { + "jsonparse": "^1.2.0", + "through": ">=2.2.7 <3" + }, + "bin": { + "JSONStream": "bin.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/just-diff": { + "version": "6.0.2", + "resolved": "https://registry.npmmirror.com/just-diff/-/just-diff-6.0.2.tgz", + "integrity": "sha512-S59eriX5u3/QhMNq3v/gm8Kd0w8OS6Tz2FS1NG4blv+z0MuQcBRJyFWjdovM0Rad4/P4aUPFtnkNjMjyMlMSYA==", + "dev": true, + "license": "MIT" + }, + "node_modules/just-diff-apply": { + "version": "5.5.0", + "resolved": "https://registry.npmmirror.com/just-diff-apply/-/just-diff-apply-5.5.0.tgz", + "integrity": "sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw==", + "dev": true, + "license": "MIT" + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmmirror.com/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lerna": { + "version": "8.2.4", + "resolved": "https://registry.npmmirror.com/lerna/-/lerna-8.2.4.tgz", + "integrity": "sha512-0gaVWDIVT7fLfprfwpYcQajb7dBJv3EGavjG7zvJ+TmGx3/wovl5GklnSwM2/WeE0Z2wrIz7ndWhBcDUHVjOcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@lerna/create": "8.2.4", + "@npmcli/arborist": "7.5.4", + "@npmcli/package-json": "5.2.0", + "@npmcli/run-script": "8.1.0", + "@nx/devkit": ">=17.1.2 < 21", + "@octokit/plugin-enterprise-rest": "6.0.1", + "@octokit/rest": "20.1.2", + "aproba": "2.0.0", + "byte-size": "8.1.1", + "chalk": "4.1.0", + "clone-deep": "4.0.1", + "cmd-shim": "6.0.3", + "color-support": "1.1.3", + "columnify": "1.6.0", + "console-control-strings": "^1.1.0", + "conventional-changelog-angular": "7.0.0", + "conventional-changelog-core": "5.0.1", + "conventional-recommended-bump": "7.0.1", + "cosmiconfig": "9.0.0", + "dedent": "1.5.3", + "envinfo": "7.13.0", + "execa": "5.0.0", + "fs-extra": "^11.2.0", + "get-port": "5.1.1", + "get-stream": "6.0.0", + "git-url-parse": "14.0.0", + "glob-parent": "6.0.2", + "graceful-fs": "4.2.11", + "has-unicode": "2.0.1", + "import-local": "3.1.0", + "ini": "^1.3.8", + "init-package-json": "6.0.3", + "inquirer": "^8.2.4", + "is-ci": "3.0.1", + "is-stream": "2.0.0", + "jest-diff": ">=29.4.3 < 30", + "js-yaml": "4.1.0", + "libnpmaccess": "8.0.6", + "libnpmpublish": "9.0.9", + "load-json-file": "6.2.0", + "make-dir": "4.0.0", + "minimatch": "3.0.5", + "multimatch": "5.0.0", + "node-fetch": "2.6.7", + "npm-package-arg": "11.0.2", + "npm-packlist": "8.0.2", + "npm-registry-fetch": "^17.1.0", + "nx": ">=17.1.2 < 21", + "p-map": "4.0.0", + "p-map-series": "2.1.0", + "p-pipe": "3.1.0", + "p-queue": "6.6.2", + "p-reduce": "2.1.0", + "p-waterfall": "2.1.1", + "pacote": "^18.0.6", + "pify": "5.0.0", + "read-cmd-shim": "4.0.0", + "resolve-from": "5.0.0", + "rimraf": "^4.4.1", + "semver": "^7.3.8", + "set-blocking": "^2.0.0", + "signal-exit": "3.0.7", + "slash": "3.0.0", + "ssri": "^10.0.6", + "string-width": "^4.2.3", + "tar": "6.2.1", + "temp-dir": "1.0.0", + "through": "2.3.8", + "tinyglobby": "0.2.12", + "typescript": ">=3 < 6", + "upath": "2.0.1", + "uuid": "^10.0.0", + "validate-npm-package-license": "3.0.4", + "validate-npm-package-name": "5.0.1", + "wide-align": "1.1.5", + "write-file-atomic": "5.0.1", + "write-pkg": "4.0.0", + "yargs": "17.7.2", + "yargs-parser": "21.1.1" + }, + "bin": { + "lerna": "dist/cli.js" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/lerna/node_modules/chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/lerna/node_modules/envinfo": { + "version": "7.13.0", + "resolved": "https://registry.npmmirror.com/envinfo/-/envinfo-7.13.0.tgz", + "integrity": "sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==", + "dev": true, + "license": "MIT", + "bin": { + "envinfo": "dist/cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/lerna/node_modules/import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lerna/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/libnpmaccess": { + "version": "8.0.6", + "resolved": "https://registry.npmmirror.com/libnpmaccess/-/libnpmaccess-8.0.6.tgz", + "integrity": "sha512-uM8DHDEfYG6G5gVivVl+yQd4pH3uRclHC59lzIbSvy7b5FEwR+mU49Zq1jEyRtRFv7+M99mUW9S0wL/4laT4lw==", + "dev": true, + "license": "ISC", + "dependencies": { + "npm-package-arg": "^11.0.2", + "npm-registry-fetch": "^17.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/libnpmpublish": { + "version": "9.0.9", + "resolved": "https://registry.npmmirror.com/libnpmpublish/-/libnpmpublish-9.0.9.tgz", + "integrity": "sha512-26zzwoBNAvX9AWOPiqqF6FG4HrSCPsHFkQm7nT+xU1ggAujL/eae81RnCv4CJ2In9q9fh10B88sYSzKCUh/Ghg==", + "dev": true, + "license": "ISC", + "dependencies": { + "ci-info": "^4.0.0", + "normalize-package-data": "^6.0.1", + "npm-package-arg": "^11.0.2", + "npm-registry-fetch": "^17.0.1", + "proc-log": "^4.2.0", + "semver": "^7.3.7", + "sigstore": "^2.2.0", + "ssri": "^10.0.6" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/libnpmpublish/node_modules/ci-info": { + "version": "4.4.0", + "resolved": "https://registry.npmmirror.com/ci-info/-/ci-info-4.4.0.tgz", + "integrity": "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/lines-and-columns": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/lines-and-columns/-/lines-and-columns-2.0.3.tgz", + "integrity": "sha512-cNOjgCnLB+FnvWWtyRTzmB3POJ+cXxTA81LoW7u8JdmhfXzriropYwpjShnz1QLLWsQwY7nIxoDmcPTwphDK9w==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/load-json-file": { + "version": "6.2.0", + "resolved": "https://registry.npmmirror.com/load-json-file/-/load-json-file-6.2.0.tgz", + "integrity": "sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.1.15", + "parse-json": "^5.0.0", + "strip-bom": "^4.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/load-json-file/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=8" + } + }, + "node_modules/loader-runner": { + "version": "4.3.1", + "resolved": "https://registry.npmmirror.com/loader-runner/-/loader-runner-4.3.1.tgz", + "integrity": "sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.11.5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/loader-utils": { + "version": "2.0.4", + "resolved": "https://registry.npmmirror.com/loader-utils/-/loader-utils-2.0.4.tgz", + "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmmirror.com/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmmirror.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "license": "MIT" + }, + "node_modules/lodash.ismatch": { + "version": "4.4.0", + "resolved": "https://registry.npmmirror.com/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", + "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmmirror.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmmirror.com/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/make-fetch-happen": { + "version": "13.0.1", + "resolved": "https://registry.npmmirror.com/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz", + "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmmirror.com/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/map-obj": { + "version": "4.3.0", + "resolved": "https://registry.npmmirror.com/map-obj/-/map-obj-4.3.0.tgz", + "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/meow": { + "version": "8.1.2", + "resolved": "https://registry.npmmirror.com/meow/-/meow-8.1.2.tgz", + "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^3.0.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.18.0", + "yargs-parser": "^20.2.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/meow/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/meow/node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/meow/node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmmirror.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/read-pkg/node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmmirror.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true, + "license": "ISC" + }, + "node_modules/meow/node_modules/read-pkg/node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmmirror.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/meow/node_modules/read-pkg/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/meow/node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/type-fest": { + "version": "0.18.1", + "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-0.18.1.tgz", + "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmmirror.com/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmmirror.com/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmmirror.com/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.0.5", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmmirror.com/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/minipass": { + "version": "7.1.3", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minipass-collect": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/minipass-collect/-/minipass-collect-2.0.1.tgz", + "integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minipass-fetch": { + "version": "3.0.5", + "resolved": "https://registry.npmmirror.com/minipass-fetch/-/minipass-fetch-3.0.5.tgz", + "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/minipass-flush": { + "version": "1.0.5", + "resolved": "https://registry.npmmirror.com/minipass-flush/-/minipass-flush-1.0.5.tgz", + "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-flush/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-pipeline": { + "version": "1.2.4", + "resolved": "https://registry.npmmirror.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", + "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-pipeline/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/minipass-sized/-/minipass-sized-1.0.3.tgz", + "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmmirror.com/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmmirror.com/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "license": "MIT", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/modify-values": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/modify-values/-/modify-values-1.0.1.tgz", + "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/multimatch": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/multimatch/-/multimatch-5.0.0.tgz", + "integrity": "sha512-ypMKuglUrZUD99Tk2bUQ+xNQj43lPEfAeX2o9cTteAmShXy2VHDJpuwu1o0xqoKCt9jLVAvwyFKdLTPXKAfJyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/minimatch": "^3.0.3", + "array-differ": "^3.0.0", + "array-union": "^2.1.0", + "arrify": "^2.0.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/multimatch/node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmmirror.com/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true, + "license": "ISC" + }, + "node_modules/napi-postinstall": { + "version": "0.3.4", + "resolved": "https://registry.npmmirror.com/napi-postinstall/-/napi-postinstall-0.3.4.tgz", + "integrity": "sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==", + "dev": true, + "license": "MIT", + "bin": { + "napi-postinstall": "lib/cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/napi-postinstall" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmmirror.com/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "0.6.4", + "resolved": "https://registry.npmmirror.com/negotiator/-/negotiator-0.6.4.tgz", + "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmmirror.com/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmmirror.com/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmmirror.com/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-gyp": { + "version": "10.3.1", + "resolved": "https://registry.npmmirror.com/node-gyp/-/node-gyp-10.3.1.tgz", + "integrity": "sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^13.0.0", + "nopt": "^7.0.0", + "proc-log": "^4.1.0", + "semver": "^7.3.5", + "tar": "^6.2.1", + "which": "^4.0.0" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/node-gyp/node_modules/isexe": { + "version": "3.1.5", + "resolved": "https://registry.npmmirror.com/isexe/-/isexe-3.1.5.tgz", + "integrity": "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/node-gyp/node_modules/which": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/which/-/which-4.0.0.tgz", + "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmmirror.com/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-machine-id": { + "version": "1.1.12", + "resolved": "https://registry.npmmirror.com/node-machine-id/-/node-machine-id-1.1.12.tgz", + "integrity": "sha512-QNABxbrPa3qEIfrE6GOJ7BYIuignnJw7iQ2YPbc3Nla1HzRJjXzZOiikfF8m7eAMfichLt3M4VgLOetqgDmgGQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmmirror.com/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nopt": { + "version": "7.2.1", + "resolved": "https://registry.npmmirror.com/nopt/-/nopt-7.2.1.tgz", + "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==", + "dev": true, + "license": "ISC", + "dependencies": { + "abbrev": "^2.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/normalize-package-data": { + "version": "6.0.2", + "resolved": "https://registry.npmmirror.com/normalize-package-data/-/normalize-package-data-6.0.2.tgz", + "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^7.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-bundled": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/npm-bundled/-/npm-bundled-3.0.1.tgz", + "integrity": "sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-install-checks": { + "version": "6.3.0", + "resolved": "https://registry.npmmirror.com/npm-install-checks/-/npm-install-checks-6.3.0.tgz", + "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "semver": "^7.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-normalize-package-bin": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz", + "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-package-arg": { + "version": "11.0.2", + "resolved": "https://registry.npmmirror.com/npm-package-arg/-/npm-package-arg-11.0.2.tgz", + "integrity": "sha512-IGN0IAwmhDJwy13Wc8k+4PEbTPhpJnMtfR53ZbOyjkvmEcLS4nCwp6mvMWjS5sUjeiW3mpx6cHmuhKEu9XmcQw==", + "dev": true, + "license": "ISC", + "dependencies": { + "hosted-git-info": "^7.0.0", + "proc-log": "^4.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-packlist": { + "version": "8.0.2", + "resolved": "https://registry.npmmirror.com/npm-packlist/-/npm-packlist-8.0.2.tgz", + "integrity": "sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==", + "dev": true, + "license": "ISC", + "dependencies": { + "ignore-walk": "^6.0.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-pick-manifest": { + "version": "9.1.0", + "resolved": "https://registry.npmmirror.com/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz", + "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==", + "dev": true, + "license": "ISC", + "dependencies": { + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^11.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-registry-fetch": { + "version": "17.1.0", + "resolved": "https://registry.npmmirror.com/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz", + "integrity": "sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/redact": "^2.0.0", + "jsonparse": "^1.3.1", + "make-fetch-happen": "^13.0.0", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minizlib": "^2.1.2", + "npm-package-arg": "^11.0.0", + "proc-log": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nx": { + "version": "20.8.4", + "resolved": "https://registry.npmmirror.com/nx/-/nx-20.8.4.tgz", + "integrity": "sha512-/++x0OM3/UTmDR+wmPeV13tSxeTr+QGzj3flgtH9DiOPmQnn2CjHWAMZiOhcSh/hHoE/V3ySL4757InQUsVtjQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@napi-rs/wasm-runtime": "0.2.4", + "@yarnpkg/lockfile": "^1.1.0", + "@yarnpkg/parsers": "3.0.2", + "@zkochan/js-yaml": "0.0.7", + "axios": "^1.8.3", + "chalk": "^4.1.0", + "cli-cursor": "3.1.0", + "cli-spinners": "2.6.1", + "cliui": "^8.0.1", + "dotenv": "~16.4.5", + "dotenv-expand": "~11.0.6", + "enquirer": "~2.3.6", + "figures": "3.2.0", + "flat": "^5.0.2", + "front-matter": "^4.0.2", + "ignore": "^5.0.4", + "jest-diff": "^29.4.1", + "jsonc-parser": "3.2.0", + "lines-and-columns": "2.0.3", + "minimatch": "9.0.3", + "node-machine-id": "1.1.12", + "npm-run-path": "^4.0.1", + "open": "^8.4.0", + "ora": "5.3.0", + "resolve.exports": "2.0.3", + "semver": "^7.5.3", + "string-width": "^4.2.3", + "tar-stream": "~2.2.0", + "tmp": "~0.2.1", + "tsconfig-paths": "^4.1.2", + "tslib": "^2.3.0", + "yaml": "^2.6.0", + "yargs": "^17.6.2", + "yargs-parser": "21.1.1" + }, + "bin": { + "nx": "bin/nx.js", + "nx-cloud": "bin/nx-cloud.js" + }, + "optionalDependencies": { + "@nx/nx-darwin-arm64": "20.8.4", + "@nx/nx-darwin-x64": "20.8.4", + "@nx/nx-freebsd-x64": "20.8.4", + "@nx/nx-linux-arm-gnueabihf": "20.8.4", + "@nx/nx-linux-arm64-gnu": "20.8.4", + "@nx/nx-linux-arm64-musl": "20.8.4", + "@nx/nx-linux-x64-gnu": "20.8.4", + "@nx/nx-linux-x64-musl": "20.8.4", + "@nx/nx-win32-arm64-msvc": "20.8.4", + "@nx/nx-win32-x64-msvc": "20.8.4" + }, + "peerDependencies": { + "@swc-node/register": "^1.8.0", + "@swc/core": "^1.3.85" + }, + "peerDependenciesMeta": { + "@swc-node/register": { + "optional": true + }, + "@swc/core": { + "optional": true + } + } + }, + "node_modules/nx/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/nx/node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmmirror.com/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/nx/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/nx/node_modules/ora": { + "version": "5.3.0", + "resolved": "https://registry.npmmirror.com/ora/-/ora-5.3.0.tgz", + "integrity": "sha512-zAKMgGXUim0Jyd6CXK9lraBnD3H5yPGBPPOkC23a2BG6hsm4Zu6OQSjQuEtV0BHDf4aKHcUFvJiGRrFuW3MG8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "bl": "^4.0.3", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "log-symbols": "^4.0.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmmirror.com/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmmirror.com/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora": { + "version": "5.4.1", + "resolved": "https://registry.npmmirror.com/ora/-/ora-5.4.1.tgz", + "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map-series": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/p-map-series/-/p-map-series-2.1.0.tgz", + "integrity": "sha512-RpYIIK1zXSNEOdwxcfe7FdvGcs7+y5n8rifMhMNWvaxRNMPINJHF5GDeuVxWqnfrcHPSCnp7Oo5yNXHId9Av2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/p-pipe": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/p-pipe/-/p-pipe-3.1.0.tgz", + "integrity": "sha512-08pj8ATpzMR0Y80x50yJHn37NF6vjrqHutASaX5LiH5npS9XPvrUmscd9MF5R4fuYRHOxQR1FfMIlF7AzwoPqw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-queue": { + "version": "6.6.2", + "resolved": "https://registry.npmmirror.com/p-queue/-/p-queue-6.6.2.tgz", + "integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "eventemitter3": "^4.0.4", + "p-timeout": "^3.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-reduce": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/p-reduce/-/p-reduce-2.1.0.tgz", + "integrity": "sha512-2USApvnsutq8uoxZBGbbWM0JIYLiEMJ9RlaN7fAzVNb9OZN0SHjjTTfIcb667XynS5Y1VhwDJVDa72TnPzAYWw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/p-timeout": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/p-timeout/-/p-timeout-3.2.0.tgz", + "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-finally": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmmirror.com/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/p-waterfall": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/p-waterfall/-/p-waterfall-2.1.1.tgz", + "integrity": "sha512-RRTnDb2TBG/epPRI2yYXsimO0v3BXC8Yd3ogr1545IaqKK17VGhbWVeGGN+XfCm/08OK8635nH31c8bATkHuSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-reduce": "^2.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/pacote": { + "version": "18.0.6", + "resolved": "https://registry.npmmirror.com/pacote/-/pacote-18.0.6.tgz", + "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^5.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/package-json": "^5.1.0", + "@npmcli/promise-spawn": "^7.0.0", + "@npmcli/run-script": "^8.0.0", + "cacache": "^18.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^11.0.0", + "npm-packlist": "^8.0.0", + "npm-pick-manifest": "^9.0.0", + "npm-registry-fetch": "^17.0.0", + "proc-log": "^4.0.0", + "promise-retry": "^2.0.1", + "sigstore": "^2.2.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "bin/index.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-conflict-json": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz", + "integrity": "sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw==", + "dev": true, + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^3.0.0", + "just-diff": "^6.0.0", + "just-diff-apply": "^5.2.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/parse-conflict-json/node_modules/json-parse-even-better-errors": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", + "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parse-json/node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmmirror.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse-path": { + "version": "7.1.0", + "resolved": "https://registry.npmmirror.com/parse-path/-/parse-path-7.1.0.tgz", + "integrity": "sha512-EuCycjZtfPcjWk7KTksnJ5xPMvWGA/6i4zrLYhRG0hGvC3GPU/jGUj3Cy+ZR0v30duV3e23R95T1lE2+lsndSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "protocols": "^2.0.0" + } + }, + "node_modules/parse-url": { + "version": "8.1.0", + "resolved": "https://registry.npmmirror.com/parse-url/-/parse-url-8.1.0.tgz", + "integrity": "sha512-xDvOoLU5XRrcOZvnI6b8zA6n9O9ejNk/GExuz1yBuWUGn9KA97GI6HTs6u02wKara1CeVmZhH+0TZFdWScR89w==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-path": "^7.0.0" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmmirror.com/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmmirror.com/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmmirror.com/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmmirror.com/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/proc-log": { + "version": "4.2.0", + "resolved": "https://registry.npmmirror.com/proc-log/-/proc-log-4.2.0.tgz", + "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/proggy": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/proggy/-/proggy-2.0.0.tgz", + "integrity": "sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/promise-all-reject-late": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz", + "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==", + "dev": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/promise-call-limit": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/promise-call-limit/-/promise-call-limit-3.0.2.tgz", + "integrity": "sha512-mRPQO2T1QQVw11E7+UdCJu7S61eJVWknzml9sC1heAdj1jxl0fWMBypIt9ZOcLFf8FkG995ZD7RnVk7HH72fZw==", + "dev": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/promise-inflight": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/promise-inflight/-/promise-inflight-1.0.1.tgz", + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/promzard": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/promzard/-/promzard-1.0.2.tgz", + "integrity": "sha512-2FPputGL+mP3jJ3UZg/Dl9YOkovB7DX0oOr+ck5QbZ5MtORtds8k/BZdn+02peDLI8/YWbmzx34k5fA+fHvCVQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "read": "^3.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/protocols": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/protocols/-/protocols-2.0.2.tgz", + "integrity": "sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true, + "license": "MIT" + }, + "node_modules/pump": { + "version": "3.0.4", + "resolved": "https://registry.npmmirror.com/pump/-/pump-3.0.4.tgz", + "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/pure-rand": { + "version": "7.0.1", + "resolved": "https://registry.npmmirror.com/pure-rand/-/pure-rand-7.0.1.tgz", + "integrity": "sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/pyright": { + "version": "1.1.408", + "resolved": "https://registry.npmmirror.com/pyright/-/pyright-1.1.408.tgz", + "integrity": "sha512-N61pxaLLCsPcUuPPHMNIrGoZgGBgrbjBX5UqkaT5UV8NVZdL7ExsO6N3ectv1DzAUsLOzdlyqoYtX76u8eF4YA==", + "license": "MIT", + "bin": { + "pyright": "index.js", + "pyright-langserver": "langserver.index.js" + }, + "engines": { + "node": ">=14.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/pyright-internal": { + "resolved": "packages/pyright-internal", + "link": true + }, + "node_modules/python-parser": { + "resolved": "packages/python-parser", + "link": true + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmmirror.com/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmmirror.com/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/read": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/read/-/read-3.0.1.tgz", + "integrity": "sha512-SLBrDU/Srs/9EoWhU5GdbAoxG1GzpQHo/6qiGItaoLJ1thmYpcNIM1qISEUvyHBzfGlWIyd6p2DNi1oV1VmAuw==", + "dev": true, + "license": "ISC", + "dependencies": { + "mute-stream": "^1.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/read-cmd-shim": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/read-cmd-shim/-/read-cmd-shim-4.0.0.tgz", + "integrity": "sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/read-package-json-fast": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz", + "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==", + "dev": true, + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/read-package-json-fast/node_modules/json-parse-even-better-errors": { + "version": "3.0.2", + "resolved": "https://registry.npmmirror.com/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", + "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg-up": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz", + "integrity": "sha512-YFzFrVvpC6frF1sz8psoHDBGF7fLPc+llq/8NB43oagqWkx8ar5zYtsTORtOjw9W2RHLpWP+zTWwBvf1bCmcSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^2.0.0", + "read-pkg": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg-up/node_modules/find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg-up/node_modules/locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg-up/node_modules/p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg-up/node_modules/p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg-up/node_modules/p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg-up/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg/node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmmirror.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true, + "license": "ISC" + }, + "node_modules/read-pkg/node_modules/load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg/node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmmirror.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/read-pkg/node_modules/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==", + "dev": true, + "license": "MIT", + "dependencies": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg/node_modules/path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/read-pkg/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/read-pkg/node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/read/node_modules/mute-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/mute-stream/-/mute-stream-1.0.0.tgz", + "integrity": "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmmirror.com/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/rechoir": { + "version": "0.8.0", + "resolved": "https://registry.npmmirror.com/rechoir/-/rechoir-0.8.0.tgz", + "integrity": "sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve": "^1.20.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmmirror.com/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmmirror.com/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "4.4.1", + "resolved": "https://registry.npmmirror.com/rimraf/-/rimraf-4.4.1.tgz", + "integrity": "sha512-Gk8NlF062+T9CqNGn6h4tls3k6T1+/nXdOcSZVikNVtlRdYpA7wRJJMoXmuvOnLW844rPjdQ7JgXCYM6PPC/og==", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^9.2.0" + }, + "bin": { + "rimraf": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "9.3.4", + "resolved": "https://registry.npmmirror.com/glob/-/glob-9.3.4.tgz", + "integrity": "sha512-qaSc49hojMOv1EPM4EuyITjDSgSKI0rthoHnvE81tcOi1SCVndHko7auqxdQ14eiQG2NDBJBE86+2xIrbIvrbA==", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "minimatch": "^8.0.2", + "minipass": "^4.2.4", + "path-scurry": "^1.6.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/minimatch": { + "version": "8.0.7", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-8.0.7.tgz", + "integrity": "sha512-V+1uQNdzybxa14e/p00HZnQNNcTjnRJjDxg2V8wtkjFctq4M7hXFws4oekyTP0Jebeq7QYtpFyOeBAjc88zvYg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/minipass": { + "version": "4.2.8", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-4.2.8.tgz", + "integrity": "sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmmirror.com/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmmirror.com/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmmirror.com/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "license": "MIT" + }, + "node_modules/schema-utils": { + "version": "4.3.3", + "resolved": "https://registry.npmmirror.com/schema-utils/-/schema-utils-4.3.3.tgz", + "integrity": "sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmmirror.com/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmmirror.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true, + "license": "ISC" + }, + "node_modules/shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.9.2", + "resolved": "https://registry.npmmirror.com/shelljs/-/shelljs-0.9.2.tgz", + "integrity": "sha512-S3I64fEiKgTZzKCC46zT/Ib9meqofLrQVbpSswtjFfAVDW+AZ54WTnAM/3/yENoxz/V1Cy6u3kiiEbQ4DNphvw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "execa": "^1.0.0", + "fast-glob": "^3.3.2", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "bin": { + "shjs": "bin/shjs" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/shelljs/node_modules/cross-spawn": { + "version": "6.0.6", + "resolved": "https://registry.npmmirror.com/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/shelljs/node_modules/execa": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/shelljs/node_modules/get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/shelljs/node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmmirror.com/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/shelljs/node_modules/is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shelljs/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/shelljs/node_modules/rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmmirror.com/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", + "dev": true, + "dependencies": { + "resolve": "^1.1.6" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/shelljs/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/shelljs/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmmirror.com/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/shx": { + "version": "0.4.0", + "resolved": "https://registry.npmmirror.com/shx/-/shx-0.4.0.tgz", + "integrity": "sha512-Z0KixSIlGPpijKgcH6oCMCbltPImvaKy0sGH8AkLRXw1KyzpKtaCTizP2xen+hNDqVF4xxgvA0KXSb9o4Q6hnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.8", + "shelljs": "^0.9.2" + }, + "bin": { + "shx": "lib/cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmmirror.com/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/sigstore": { + "version": "2.3.1", + "resolved": "https://registry.npmmirror.com/sigstore/-/sigstore-2.3.1.tgz", + "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^2.3.2", + "@sigstore/tuf": "^2.3.4", + "@sigstore/verify": "^1.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmmirror.com/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmmirror.com/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/smol-toml": { + "version": "1.6.0", + "resolved": "https://registry.npmmirror.com/smol-toml/-/smol-toml-1.6.0.tgz", + "integrity": "sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==", + "license": "BSD-3-Clause", + "engines": { + "node": ">= 18" + }, + "funding": { + "url": "https://github.com/sponsors/cyyynthia" + } + }, + "node_modules/socks": { + "version": "2.8.7", + "resolved": "https://registry.npmmirror.com/socks/-/socks-2.8.7.tgz", + "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ip-address": "^10.0.1", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "8.0.5", + "resolved": "https://registry.npmmirror.com/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz", + "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/sort-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/sort-keys/-/sort-keys-2.0.0.tgz", + "integrity": "sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-plain-obj": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/source-list-map": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/source-list-map/-/source-list-map-2.0.1.tgz", + "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==", + "dev": true, + "license": "MIT" + }, + "node_modules/source-map": { + "version": "0.7.6", + "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.7.6.tgz", + "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 12" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmmirror.com/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmmirror.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true, + "license": "CC-BY-3.0" + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.23", + "resolved": "https://registry.npmmirror.com/spdx-license-ids/-/spdx-license-ids-3.0.23.tgz", + "integrity": "sha512-CWLcCCH7VLu13TgOH+r8p1O/Znwhqv/dbb6lqWy67G+pT1kHmeD/+V36AVb/vq8QMIQwVShJ6Ssl5FPh0fuSdw==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/split": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "through": "2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/split2": { + "version": "3.2.2", + "resolved": "https://registry.npmmirror.com/split2/-/split2-3.2.2.tgz", + "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "dev": true, + "license": "ISC", + "dependencies": { + "readable-stream": "^3.0.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/ssri": { + "version": "10.0.6", + "resolved": "https://registry.npmmirror.com/ssri/-/ssri-10.0.6.tgz", + "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmmirror.com/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmmirror.com/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmmirror.com/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmmirror.com/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmmirror.com/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmmirror.com/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/synckit": { + "version": "0.11.12", + "resolved": "https://registry.npmmirror.com/synckit/-/synckit-0.11.12.tgz", + "integrity": "sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.9" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" + } + }, + "node_modules/tapable": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmmirror.com/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dev": true, + "license": "ISC", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmmirror.com/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tar/node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/temp-dir": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/temp-dir/-/temp-dir-1.0.0.tgz", + "integrity": "sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/terser": { + "version": "5.46.0", + "resolved": "https://registry.npmmirror.com/terser/-/terser-5.46.0.tgz", + "integrity": "sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@jridgewell/source-map": "^0.3.3", + "acorn": "^8.15.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser-webpack-plugin": { + "version": "5.3.16", + "resolved": "https://registry.npmmirror.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.16.tgz", + "integrity": "sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "jest-worker": "^27.4.5", + "schema-utils": "^4.3.0", + "serialize-javascript": "^6.0.2", + "terser": "^5.31.1" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmmirror.com/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmmirror.com/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/text-extensions": { + "version": "1.9.0", + "resolved": "https://registry.npmmirror.com/text-extensions/-/text-extensions-1.9.0.tgz", + "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmmirror.com/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmmirror.com/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "node_modules/through2/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/through2/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "license": "MIT" + }, + "node_modules/through2/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.12", + "resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.12.tgz", + "integrity": "sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.3", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmmirror.com/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", + "license": "MIT", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmmirror.com/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmmirror.com/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmmirror.com/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, + "node_modules/treeverse": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/treeverse/-/treeverse-3.0.0.tgz", + "integrity": "sha512-gcANaAnd2QDZFmHFEOF4k7uc1J/6a6z3DJMd/QwEyxLoKGiptJRwid582r7QIsFlFMIZ3SnxfS52S4hm2DHkuQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/trim-newlines": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/trim-newlines/-/trim-newlines-3.0.1.tgz", + "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ts-jest": { + "version": "29.4.6", + "resolved": "https://registry.npmmirror.com/ts-jest/-/ts-jest-29.4.6.tgz", + "integrity": "sha512-fSpWtOO/1AjSNQguk43hb/JCo16oJDnMJf3CdEGNkqsEX3t0KX96xvyX1D7PfLCpVoKu4MfVrqUkFyblYoY4lA==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "fast-json-stable-stringify": "^2.1.0", + "handlebars": "^4.7.8", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.7.3", + "type-fest": "^4.41.0", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0 || ^30.0.0", + "@jest/types": "^29.0.0 || ^30.0.0", + "babel-jest": "^29.0.0 || ^30.0.0", + "jest": "^29.0.0 || ^30.0.0", + "jest-util": "^29.0.0 || ^30.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "jest-util": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ts-loader": { + "version": "9.5.4", + "resolved": "https://registry.npmmirror.com/ts-loader/-/ts-loader-9.5.4.tgz", + "integrity": "sha512-nCz0rEwunlTZiy6rXFByQU1kVVpCIgUpc/psFiKVrUwrizdnIbRFu8w7bxhUF0X613DYwT4XzrZHpVyMe758hQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "enhanced-resolve": "^5.0.0", + "micromatch": "^4.0.0", + "semver": "^7.3.4", + "source-map": "^0.7.4" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "typescript": "*", + "webpack": "^5.0.0" + } + }, + "node_modules/tsconfig-paths": { + "version": "4.2.0", + "resolved": "https://registry.npmmirror.com/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", + "integrity": "sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "json5": "^2.2.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tsconfig-paths/node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD" + }, + "node_modules/tuf-js": { + "version": "2.2.1", + "resolved": "https://registry.npmmirror.com/tuf-js/-/tuf-js-2.2.1.tgz", + "integrity": "sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tufjs/models": "2.0.1", + "debug": "^4.3.4", + "make-fetch-happen": "^13.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmmirror.com/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmmirror.com/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typical": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/typical/-/typical-4.0.0.tgz", + "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmmirror.com/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmmirror.com/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmmirror.com/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unique-filename": { + "version": "3.0.0", + "resolved": "https://registry.npmmirror.com/unique-filename/-/unique-filename-3.0.0.tgz", + "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==", + "dev": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/unique-slug": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/unique-slug/-/unique-slug-4.0.0.tgz", + "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmmirror.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/unrs-resolver": { + "version": "1.11.1", + "resolved": "https://registry.npmmirror.com/unrs-resolver/-/unrs-resolver-1.11.1.tgz", + "integrity": "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "napi-postinstall": "^0.3.0" + }, + "funding": { + "url": "https://opencollective.com/unrs-resolver" + }, + "optionalDependencies": { + "@unrs/resolver-binding-android-arm-eabi": "1.11.1", + "@unrs/resolver-binding-android-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-x64": "1.11.1", + "@unrs/resolver-binding-freebsd-x64": "1.11.1", + "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", + "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", + "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-musl": "1.11.1", + "@unrs/resolver-binding-wasm32-wasi": "1.11.1", + "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", + "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", + "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" + } + }, + "node_modules/upath": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/upath/-/upath-2.0.1.tgz", + "integrity": "sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4", + "yarn": "*" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmmirror.com/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmmirror.com/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmmirror.com/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmmirror.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/validate-npm-package-name": { + "version": "5.0.1", + "resolved": "https://registry.npmmirror.com/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz", + "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/vscode-jsonrpc": { + "version": "9.0.0-next.11", + "resolved": "https://registry.npmmirror.com/vscode-jsonrpc/-/vscode-jsonrpc-9.0.0-next.11.tgz", + "integrity": "sha512-u6LElQNbSiE9OugEEmrUKwH6+8BpPz2S5MDHvQUqHL//I4Q8GPikKLOUf856UnbLkZdhxaPrExac1lA3XwpIPA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageserver": { + "version": "10.0.0-next.16", + "resolved": "https://registry.npmmirror.com/vscode-languageserver/-/vscode-languageserver-10.0.0-next.16.tgz", + "integrity": "sha512-RbsYDOhddv1NtBCAR7+oVxxCmOpQUHhrtgUE0xz6J+BJGSCkfOqBCyLUIwSjKk2rK9llxUj/pR5aL8QCsXrxow==", + "license": "MIT", + "dependencies": { + "vscode-languageserver-protocol": "3.17.6-next.16" + }, + "bin": { + "installServerIntoExtension": "bin/installServerIntoExtension" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.6-next.16", + "resolved": "https://registry.npmmirror.com/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.6-next.16.tgz", + "integrity": "sha512-kQTjXEuyxMbdmmZ3U+Lib3oUl12xEKNc73RtWxPSDS3TFtjVwt98Q1CUzfDA9EUpsA24M46Bl6q3sLe9AUOKyw==", + "license": "MIT", + "dependencies": { + "vscode-jsonrpc": "9.0.0-next.11", + "vscode-languageserver-types": "3.17.6-next.6" + } + }, + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.12", + "resolved": "https://registry.npmmirror.com/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", + "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", + "license": "MIT" + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.6-next.6", + "resolved": "https://registry.npmmirror.com/vscode-languageserver-types/-/vscode-languageserver-types-3.17.6-next.6.tgz", + "integrity": "sha512-aiJY5/yW+xzw7KPNlwi3gQtddq/3EIn5z8X8nCgJfaiAij2R1APKePngv+MUdLdYJBVTLu+Qa0ODsT+pHgYguQ==", + "license": "MIT" + }, + "node_modules/vscode-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmmirror.com/vscode-uri/-/vscode-uri-3.1.0.tgz", + "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", + "license": "MIT" + }, + "node_modules/walk-up-path": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/walk-up-path/-/walk-up-path-3.0.1.tgz", + "integrity": "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==", + "dev": true, + "license": "ISC" + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/watchpack": { + "version": "2.5.1", + "resolved": "https://registry.npmmirror.com/watchpack/-/watchpack-2.5.1.tgz", + "integrity": "sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/wcwidth": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/wcwidth/-/wcwidth-1.0.1.tgz", + "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/webpack": { + "version": "5.105.3", + "resolved": "https://registry.npmmirror.com/webpack/-/webpack-5.105.3.tgz", + "integrity": "sha512-LLBBA4oLmT7sZdHiYE/PeVuifOxYyE2uL/V+9VQP7YSYdJU7bSf7H8bZRRxW8kEPMkmVjnrXmoR3oejIdX0xbg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/eslint-scope": "^3.7.7", + "@types/estree": "^1.0.8", + "@types/json-schema": "^7.0.15", + "@webassemblyjs/ast": "^1.14.1", + "@webassemblyjs/wasm-edit": "^1.14.1", + "@webassemblyjs/wasm-parser": "^1.14.1", + "acorn": "^8.16.0", + "acorn-import-phases": "^1.0.3", + "browserslist": "^4.28.1", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.19.0", + "es-module-lexer": "^2.0.0", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.11", + "json-parse-even-better-errors": "^2.3.1", + "loader-runner": "^4.3.1", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^4.3.3", + "tapable": "^2.3.0", + "terser-webpack-plugin": "^5.3.16", + "watchpack": "^2.5.1", + "webpack-sources": "^3.3.4" + }, + "bin": { + "webpack": "bin/webpack.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependenciesMeta": { + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-cli": { + "version": "5.1.4", + "resolved": "https://registry.npmmirror.com/webpack-cli/-/webpack-cli-5.1.4.tgz", + "integrity": "sha512-pIDJHIEI9LR0yxHXQ+Qh95k2EvXpWzZ5l+d+jIo+RdSm9MiHfzazIxwwni/p7+x4eJZuvG1AJwgC4TNQ7NRgsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@discoveryjs/json-ext": "^0.5.0", + "@webpack-cli/configtest": "^2.1.1", + "@webpack-cli/info": "^2.0.2", + "@webpack-cli/serve": "^2.0.5", + "colorette": "^2.0.14", + "commander": "^10.0.1", + "cross-spawn": "^7.0.3", + "envinfo": "^7.7.3", + "fastest-levenshtein": "^1.0.12", + "import-local": "^3.0.2", + "interpret": "^3.1.1", + "rechoir": "^0.8.0", + "webpack-merge": "^5.7.3" + }, + "bin": { + "webpack-cli": "bin/cli.js" + }, + "engines": { + "node": ">=14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "5.x.x" + }, + "peerDependenciesMeta": { + "@webpack-cli/generators": { + "optional": true + }, + "webpack-bundle-analyzer": { + "optional": true + }, + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/webpack-cli/node_modules/commander": { + "version": "10.0.1", + "resolved": "https://registry.npmmirror.com/commander/-/commander-10.0.1.tgz", + "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/webpack-merge": { + "version": "5.10.0", + "resolved": "https://registry.npmmirror.com/webpack-merge/-/webpack-merge-5.10.0.tgz", + "integrity": "sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "clone-deep": "^4.0.1", + "flat": "^5.0.2", + "wildcard": "^2.0.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/webpack-sources": { + "version": "1.4.3", + "resolved": "https://registry.npmmirror.com/webpack-sources/-/webpack-sources-1.4.3.tgz", + "integrity": "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "source-list-map": "^2.0.0", + "source-map": "~0.6.1" + } + }, + "node_modules/webpack-sources/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/webpack-sources": { + "version": "3.3.4", + "resolved": "https://registry.npmmirror.com/webpack-sources/-/webpack-sources-3.3.4.tgz", + "integrity": "sha512-7tP1PdV4vF+lYPnkMR0jMY5/la2ub5Fc/8VQrrU+lXkiM6C4TjVfGw7iKfyhnTQOsD+6Q/iKw0eFciziRgD58Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmmirror.com/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wildcard": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/wildcard/-/wildcard-2.0.1.tgz", + "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmmirror.com/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmmirror.com/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmmirror.com/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/write-json-file": { + "version": "3.2.0", + "resolved": "https://registry.npmmirror.com/write-json-file/-/write-json-file-3.2.0.tgz", + "integrity": "sha512-3xZqT7Byc2uORAatYiP3DHUUAVEkNOswEWNs9H5KXiicRTvzYzYqKjYc4G7p+8pltvAw641lVByKVtMpf+4sYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-indent": "^5.0.0", + "graceful-fs": "^4.1.15", + "make-dir": "^2.1.0", + "pify": "^4.0.1", + "sort-keys": "^2.0.0", + "write-file-atomic": "^2.4.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/write-json-file/node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/write-json-file/node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/write-json-file/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/write-json-file/node_modules/write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmmirror.com/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "node_modules/write-pkg": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/write-pkg/-/write-pkg-4.0.0.tgz", + "integrity": "sha512-v2UQ+50TNf2rNHJ8NyWttfm/EJUBWMJcx6ZTYZr6Qp52uuegWw/lBkCtCbnYZEmPRNL61m+u67dAmGxo+HTULA==", + "dev": true, + "license": "MIT", + "dependencies": { + "sort-keys": "^2.0.0", + "type-fest": "^0.4.1", + "write-json-file": "^3.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/write-pkg/node_modules/type-fest": { + "version": "0.4.1", + "resolved": "https://registry.npmmirror.com/type-fest/-/type-fest-0.4.1.tgz", + "integrity": "sha512-IwzA/LSfD2vC1/YDYMv/zHP4rDF1usCwllsDpbolT3D4fUepIO7f9K70jjmUewU/LmGUKJcwcVtDCpnKk4BPMw==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=6" + } + }, + "node_modules/xml": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/xml/-/xml-1.0.1.tgz", + "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==", + "dev": true, + "license": "MIT" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmmirror.com/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmmirror.com/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true, + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.8.2", + "resolved": "https://registry.npmmirror.com/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmmirror.com/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmmirror.com/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmmirror.com/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/pyright-internal": { + "version": "1.1.408", + "license": "MIT", + "dependencies": { + "@yarnpkg/fslib": "2.10.4", + "@yarnpkg/libzip": "2.3.0", + "chalk": "^4.1.2", + "chokidar": "^3.6.0", + "command-line-args": "^5.2.1", + "jsonc-parser": "^3.2.0", + "smol-toml": "^1.4.2", + "source-map-support": "^0.5.21", + "tmp": "^0.2.5", + "vscode-jsonrpc": "^9.0.0-next.8", + "vscode-languageserver": "^10.0.0-next.13", + "vscode-languageserver-protocol": "^3.17.6-next.13", + "vscode-languageserver-textdocument": "^1.0.11", + "vscode-languageserver-types": "^3.17.6-next.6", + "vscode-uri": "^3.1.0" + }, + "devDependencies": { + "@types/command-line-args": "^5.2.3", + "@types/fs-extra": "^11.0.4", + "@types/jest": "^30.0.0", + "@types/lodash": "^4.17.20", + "@types/node": "^22.18.12", + "@types/tmp": "^0.2.6", + "copy-webpack-plugin": "^12.0.2", + "esbuild-loader": "^4.4.0", + "jest": "^30.2.0", + "jest-junit": "^16.0.0", + "shx": "^0.4.0", + "ts-jest": "^29.4.5", + "ts-loader": "^9.5.4", + "typescript": "~5.5.4", + "webpack": "^5.102.1", + "webpack-cli": "^5.1.4", + "word-wrap": "1.2.5" + } + }, + "packages/pyright-internal/node_modules/@types/node": { + "version": "22.19.13", + "resolved": "https://registry.npmmirror.com/@types/node/-/node-22.19.13.tgz", + "integrity": "sha512-akNQMv0wW5uyRpD2v2IEyRSZiR+BeGuoB6L310EgGObO44HSMNT8z1xzio28V8qOrgYaopIDNA18YgdXd+qTiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "packages/pyright-internal/node_modules/typescript": { + "version": "5.5.4", + "resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "packages/python-parser": { + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "pyright": "^1.1.408" + }, + "bin": { + "python-parser": "dist/index.js" + }, + "devDependencies": { + "@types/node": "^20.19.35", + "typescript": "^5.0.0" + } + } + } +} diff --git a/python-parser/package.json b/python-parser/package.json new file mode 100644 index 00000000..27687f3b --- /dev/null +++ b/python-parser/package.json @@ -0,0 +1,17 @@ +{ + "name": "python-parser", + "version": "1.0.0", + "private": true, + "workspaces": ["packages/*"], + "description": "Python repository parser using pyright-internal", + "bin": { + "python-parser": "packages/pyright-internal/out/src/pythonParser/index.js" + }, + "scripts": { + "build": "npm run build -w packages/pyright-internal", + "clean": "npm run clean -w packages/pyright-internal" + }, + "devDependencies": { + "lerna": "^8.2.4" + } +} diff --git a/python-parser/packages/pyright-internal/package-lock.json b/python-parser/packages/pyright-internal/package-lock.json new file mode 100644 index 00000000..80d2c8d3 --- /dev/null +++ b/python-parser/packages/pyright-internal/package-lock.json @@ -0,0 +1,10177 @@ +{ + "name": "pyright-internal", + "version": "1.1.408", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "pyright-internal", + "version": "1.1.408", + "license": "MIT", + "dependencies": { + "@yarnpkg/fslib": "2.10.4", + "@yarnpkg/libzip": "2.3.0", + "chalk": "^4.1.2", + "chokidar": "^3.6.0", + "command-line-args": "^5.2.1", + "jsonc-parser": "^3.2.0", + "smol-toml": "^1.4.2", + "source-map-support": "^0.5.21", + "tmp": "^0.2.5", + "vscode-jsonrpc": "^9.0.0-next.8", + "vscode-languageserver": "^10.0.0-next.13", + "vscode-languageserver-protocol": "^3.17.6-next.13", + "vscode-languageserver-textdocument": "^1.0.11", + "vscode-languageserver-types": "^3.17.6-next.6", + "vscode-uri": "^3.1.0" + }, + "devDependencies": { + "@types/command-line-args": "^5.2.3", + "@types/fs-extra": "^11.0.4", + "@types/jest": "^30.0.0", + "@types/lodash": "^4.17.20", + "@types/node": "^22.18.12", + "@types/tmp": "^0.2.6", + "copy-webpack-plugin": "^12.0.2", + "esbuild-loader": "^4.4.0", + "jest": "^30.2.0", + "jest-junit": "^16.0.0", + "shx": "^0.4.0", + "ts-jest": "^29.4.5", + "ts-loader": "^9.5.4", + "typescript": "~5.5.4", + "webpack": "^5.102.1", + "webpack-cli": "^5.1.4", + "word-wrap": "1.2.5" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.0.tgz", + "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.0.tgz", + "integrity": "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.0", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.27.3", + "@babel/helpers": "^7.27.6", + "@babel/parser": "^7.28.0", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.0", + "@babel/types": "^7.28.0", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.0.tgz", + "integrity": "sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.0", + "@babel/types": "^7.28.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz", + "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.27.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.6.tgz", + "integrity": "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.27.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.0.tgz", + "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", + "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.0.tgz", + "integrity": "sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.0", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.0.tgz", + "integrity": "sha512-jYnje+JyZG5YThjHiF28oT4SIZLnYOcSBb6+SDaFIyzDVSkXQmQQYclJ2R+YxcdmK0AX6x1E5OQNtuh3jHDrUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@discoveryjs/json-ext": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz", + "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@emnapi/core": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.5.0.tgz", + "integrity": "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/core/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/@emnapi/runtime": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.5.0.tgz", + "integrity": "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.10.tgz", + "integrity": "sha512-0NFWnA+7l41irNuaSVlLfgNT12caWJVLzp5eAVhZ0z1qpxbockccEt3s+149rE64VUI3Ml2zt8Nv5JVc4QXTsw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.10.tgz", + "integrity": "sha512-dQAxF1dW1C3zpeCDc5KqIYuZ1tgAdRXNoZP7vkBIRtKZPYe2xVr/d3SkirklCHudW1B45tGiUlz2pUWDfbDD4w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.10.tgz", + "integrity": "sha512-LSQa7eDahypv/VO6WKohZGPSJDq5OVOo3UoFR1E4t4Gj1W7zEQMUhI+lo81H+DtB+kP+tDgBp+M4oNCwp6kffg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.10.tgz", + "integrity": "sha512-MiC9CWdPrfhibcXwr39p9ha1x0lZJ9KaVfvzA0Wxwz9ETX4v5CHfF09bx935nHlhi+MxhA63dKRRQLiVgSUtEg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.10.tgz", + "integrity": "sha512-JC74bdXcQEpW9KkV326WpZZjLguSZ3DfS8wrrvPMHgQOIEIG/sPXEN/V8IssoJhbefLRcRqw6RQH2NnpdprtMA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.10.tgz", + "integrity": "sha512-tguWg1olF6DGqzws97pKZ8G2L7Ig1vjDmGTwcTuYHbuU6TTjJe5FXbgs5C1BBzHbJ2bo1m3WkQDbWO2PvamRcg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.10.tgz", + "integrity": "sha512-3ZioSQSg1HT2N05YxeJWYR+Libe3bREVSdWhEEgExWaDtyFbbXWb49QgPvFH8u03vUPX10JhJPcz7s9t9+boWg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.10.tgz", + "integrity": "sha512-LLgJfHJk014Aa4anGDbh8bmI5Lk+QidDmGzuC2D+vP7mv/GeSN+H39zOf7pN5N8p059FcOfs2bVlrRr4SK9WxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.10.tgz", + "integrity": "sha512-oR31GtBTFYCqEBALI9r6WxoU/ZofZl962pouZRTEYECvNF/dtXKku8YXcJkhgK/beU+zedXfIzHijSRapJY3vg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.10.tgz", + "integrity": "sha512-5luJWN6YKBsawd5f9i4+c+geYiVEw20FVW5x0v1kEMWNq8UctFjDiMATBxLvmmHA4bf7F6hTRaJgtghFr9iziQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.10.tgz", + "integrity": "sha512-NrSCx2Kim3EnnWgS4Txn0QGt0Xipoumb6z6sUtl5bOEZIVKhzfyp/Lyw4C1DIYvzeW/5mWYPBFJU3a/8Yr75DQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.10.tgz", + "integrity": "sha512-xoSphrd4AZda8+rUDDfD9J6FUMjrkTz8itpTITM4/xgerAZZcFW7Dv+sun7333IfKxGG8gAq+3NbfEMJfiY+Eg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.10.tgz", + "integrity": "sha512-ab6eiuCwoMmYDyTnyptoKkVS3k8fy/1Uvq7Dj5czXI6DF2GqD2ToInBI0SHOp5/X1BdZ26RKc5+qjQNGRBelRA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.10.tgz", + "integrity": "sha512-NLinzzOgZQsGpsTkEbdJTCanwA5/wozN9dSgEl12haXJBzMTpssebuXR42bthOF3z7zXFWH1AmvWunUCkBE4EA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.10.tgz", + "integrity": "sha512-FE557XdZDrtX8NMIeA8LBJX3dC2M8VGXwfrQWU7LB5SLOajfJIxmSdyL/gU1m64Zs9CBKvm4UAuBp5aJ8OgnrA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.10.tgz", + "integrity": "sha512-3BBSbgzuB9ajLoVZk0mGu+EHlBwkusRmeNYdqmznmMc9zGASFjSsxgkNsqmXugpPk00gJ0JNKh/97nxmjctdew==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.10.tgz", + "integrity": "sha512-QSX81KhFoZGwenVyPoberggdW1nrQZSvfVDAIUXr3WqLRZGZqWk/P4T8p2SP+de2Sr5HPcvjhcJzEiulKgnxtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.10.tgz", + "integrity": "sha512-AKQM3gfYfSW8XRk8DdMCzaLUFB15dTrZfnX8WXQoOUpUBQ+NaAFCP1kPS/ykbbGYz7rxn0WS48/81l9hFl3u4A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.10.tgz", + "integrity": "sha512-7RTytDPGU6fek/hWuN9qQpeGPBZFfB4zZgcz2VK2Z5VpdUxEI8JKYsg3JfO0n/Z1E/6l05n0unDCNc4HnhQGig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.10.tgz", + "integrity": "sha512-5Se0VM9Wtq797YFn+dLimf2Zx6McttsH2olUBsDml+lm0GOCRVebRWUvDtkY4BWYv/3NgzS8b/UM3jQNh5hYyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.10.tgz", + "integrity": "sha512-XkA4frq1TLj4bEMB+2HnI0+4RnjbuGZfet2gs/LNs5Hc7D89ZQBHQ0gL2ND6Lzu1+QVkjp3x1gIcPKzRNP8bXw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.10.tgz", + "integrity": "sha512-AVTSBhTX8Y/Fz6OmIVBip9tJzZEUcY8WLh7I59+upa5/GPhh2/aM6bvOMQySspnCCHvFi79kMtdJS1w0DXAeag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.10.tgz", + "integrity": "sha512-fswk3XT0Uf2pGJmOpDB7yknqhVkJQkAQOcW/ccVOtfx05LkbWOaRAtn5SaqXypeKQra1QaEa841PgrSL9ubSPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.10.tgz", + "integrity": "sha512-ah+9b59KDTSfpaCg6VdJoOQvKjI33nTaQr4UluQwW7aEwZQsbMCfTmfEO4VyewOxx4RaDT/xCy9ra2GPWmO7Kw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.10.tgz", + "integrity": "sha512-QHPDbKkrGO8/cz9LKVnJU22HOi4pxZnZhhA2HYHez5Pz4JeffhDjf85E57Oyco163GnzNCVkZK0b/n4Y0UHcSw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.10.tgz", + "integrity": "sha512-9KpxSVFCu0iK1owoez6aC/s/EdUQLDN3adTxGCqxMVhrPDj6bt5dbrHDXUuq+Bs2vATFBBrQS5vdQ/Ed2P+nbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-30.2.0.tgz", + "integrity": "sha512-+O1ifRjkvYIkBqASKWgLxrpEhQAAE7hY77ALLUufSk5717KfOShg6IbqLmdsLMPdUiFvA2kTs0R7YZy+l0IzZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/console/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/console/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/console/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/console/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/console/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@jest/console/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-30.2.0.tgz", + "integrity": "sha512-03W6IhuhjqTlpzh/ojut/pDB2LPRygyWX8ExpgHtQA8H/3K7+1vKmcINx5UzeOX1se6YEsBsOHQ1CRzf3fOwTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/pattern": "30.0.1", + "@jest/reporters": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-changed-files": "30.2.0", + "jest-config": "30.2.0", + "jest-haste-map": "30.2.0", + "jest-message-util": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-resolve-dependencies": "30.2.0", + "jest-runner": "30.2.0", + "jest-runtime": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "jest-watcher": "30.2.0", + "micromatch": "^4.0.8", + "pretty-format": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/core/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/core/node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jest/core/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/@jest/core/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/core/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@jest/core/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@jest/core/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/core/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/@jest/core/node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@jest/diff-sequences": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/diff-sequences/-/diff-sequences-30.0.1.tgz", + "integrity": "sha512-n5H8QLDJ47QqbCNn5SuFjCRDrOLEZ0h8vAHCK5RL9Ls7Xa8AQLa/YxAc9UjFqoEDM48muwtBGjtMY5cr0PLDCw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/environment": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-30.2.0.tgz", + "integrity": "sha512-/QPTL7OBJQ5ac09UDRa3EQes4gt1FTEG/8jZ/4v5IVzx+Cv7dLxlVIvfvSVRiiX2drWyXeBjkMSR8hvOWSog5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-mock": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/environment/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/environment/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/environment/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/expect": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-30.2.0.tgz", + "integrity": "sha512-V9yxQK5erfzx99Sf+7LbhBwNWEZ9eZay8qQ9+JSC0TrMR1pMDHLMY+BnVPacWU6Jamrh252/IKo4F1Xn/zfiqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "30.2.0", + "jest-snapshot": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-30.2.0.tgz", + "integrity": "sha512-1JnRfhqpD8HGpOmQp180Fo9Zt69zNtC+9lR+kT7NVL05tNXIi+QC8Csz7lfidMoVLPD3FnOtcmp0CEFnxExGEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-30.2.0.tgz", + "integrity": "sha512-HI3tRLjRxAbBy0VO8dqqm7Hb2mIa8d5bg/NJkyQcOk7V118ObQML8RC5luTF/Zsg4474a+gDvhce7eTnP4GhYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@sinonjs/fake-timers": "^13.0.0", + "@types/node": "*", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/fake-timers/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/fake-timers/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/fake-timers/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/fake-timers/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/fake-timers/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/fake-timers/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@jest/get-type": { + "version": "30.1.0", + "resolved": "https://registry.npmjs.org/@jest/get-type/-/get-type-30.1.0.tgz", + "integrity": "sha512-eMbZE2hUnx1WV0pmURZY9XoXPkUYjpc55mb0CrhtdWLtzMQPFvu/rZkTLZFTsdaVQa+Tr4eWAteqcUzoawq/uA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-30.2.0.tgz", + "integrity": "sha512-b63wmnKPaK+6ZZfpYhz9K61oybvbI1aMcIs80++JI1O1rR1vaxHUCNqo3ITu6NU0d4V34yZFoHMn/uoKr/Rwfw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/expect": "30.2.0", + "@jest/types": "30.2.0", + "jest-mock": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/globals/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/globals/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/globals/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/pattern": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/pattern/-/pattern-30.0.1.tgz", + "integrity": "sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-regex-util": "30.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/pattern/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-30.2.0.tgz", + "integrity": "sha512-DRyW6baWPqKMa9CzeiBjHwjd8XeAyco2Vt8XbcLFjiwCOEKOvy82GJ8QQnJE9ofsxCMPjH4MfH8fCWIHHDKpAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "@types/node": "*", + "chalk": "^4.1.2", + "collect-v8-coverage": "^1.0.2", + "exit-x": "^0.2.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^5.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "slash": "^3.0.0", + "string-length": "^4.0.2", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/reporters/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/reporters/node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jest/reporters/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/reporters/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@jest/reporters/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/@jest/reporters/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/reporters/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@jest/reporters/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@jest/reporters/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@jest/reporters/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/reporters/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/@jest/reporters/node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/snapshot-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/snapshot-utils/-/snapshot-utils-30.2.0.tgz", + "integrity": "sha512-0aVxM3RH6DaiLcjj/b0KrIBZhSX1373Xci4l3cW5xiUWPctZ59zQ7jj4rqcJQ/Z8JuN/4wX3FpJSa3RssVvCug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "natural-compare": "^1.4.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/snapshot-utils/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/source-map": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-30.0.1.tgz", + "integrity": "sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "callsites": "^3.1.0", + "graceful-fs": "^4.2.11" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-30.2.0.tgz", + "integrity": "sha512-RF+Z+0CCHkARz5HT9mcQCBulb1wgCP3FBvl9VFokMX27acKphwyQsNuWH3c+ojd1LeWBLoTYoxF0zm6S/66mjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/types": "30.2.0", + "@types/istanbul-lib-coverage": "^2.0.6", + "collect-v8-coverage": "^1.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-result/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-result/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-result/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/test-sequencer": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-30.2.0.tgz", + "integrity": "sha512-wXKgU/lk8fKXMu/l5Hog1R61bL4q5GCdT6OJvdAFz1P+QrpoFuLU68eoKuVc4RbrTtNnTL5FByhWdLgOPSph+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "30.2.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jest/test-sequencer/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/test-sequencer/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/@jest/test-sequencer/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/@jest/test-sequencer/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@jest/test-sequencer/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/test-sequencer/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.12", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", + "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.10", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.10.tgz", + "integrity": "sha512-0pPkgz9dY+bijgistcTTJ5mR+ocqRXLuhXHYdzoMmmoJ2C9S46RCm2GMUbatPEUK9Yjy26IrAy8D/M00lLkv+Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.29", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", + "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@pkgr/core": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", + "integrity": "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/pkgr" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "13.0.5", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-13.0.5.tgz", + "integrity": "sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.1" + } + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tybys/wasm-util/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD", + "optional": true + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz", + "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/command-line-args": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.2.3.tgz", + "integrity": "sha512-uv0aG6R0Y8WHZLTamZwtfsDLVRnOa+n+n5rEvFWL5Na5gZ8V2Teab/duDPFzIIIhs9qizDpcavCusCLJZu62Kw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/emscripten": { + "version": "1.40.1", + "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.40.1.tgz", + "integrity": "sha512-sr53lnYkQNhjHNN0oJDdUm5564biioI5DuOpycufDVK7D3y+GR3oUswe2rlwY1nPNyusHbrJ9WoTyIHl4/Bpwg==", + "license": "MIT" + }, + "node_modules/@types/eslint": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz", + "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "node_modules/@types/eslint-scope": { + "version": "3.7.7", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", + "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/eslint": "*", + "@types/estree": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/fs-extra": { + "version": "11.0.4", + "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz", + "integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/jsonfile": "*", + "@types/node": "*" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "30.0.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-30.0.0.tgz", + "integrity": "sha512-XTYugzhuwqWjws0CVz8QpM36+T+Dz5mTEBKhNs/esGLnCIlGdRy+Dq78NRjd7ls7r8BC8ZRMOrKlkO1hU0JOwA==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^30.0.0", + "pretty-format": "^30.0.0" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/jsonfile": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz", + "integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/lodash": { + "version": "4.17.20", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.20.tgz", + "integrity": "sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.18.12", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.12.tgz", + "integrity": "sha512-BICHQ67iqxQGFSzfCFTT7MRQ5XcBjG5aeKh5Ok38UBbPe5fxTyE+aHFxwVrGyr8GNlqFMLKD1D3P2K/1ks8tog==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/tmp": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.6.tgz", + "integrity": "sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC" + }, + "node_modules/@unrs/resolver-binding-android-arm-eabi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm-eabi/-/resolver-binding-android-arm-eabi-1.11.1.tgz", + "integrity": "sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-android-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-android-arm64/-/resolver-binding-android-arm64-1.11.1.tgz", + "integrity": "sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-arm64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-arm64/-/resolver-binding-darwin-arm64-1.11.1.tgz", + "integrity": "sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-darwin-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-darwin-x64/-/resolver-binding-darwin-x64-1.11.1.tgz", + "integrity": "sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@unrs/resolver-binding-freebsd-x64": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-freebsd-x64/-/resolver-binding-freebsd-x64-1.11.1.tgz", + "integrity": "sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-gnueabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-gnueabihf/-/resolver-binding-linux-arm-gnueabihf-1.11.1.tgz", + "integrity": "sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm-musleabihf": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm-musleabihf/-/resolver-binding-linux-arm-musleabihf-1.11.1.tgz", + "integrity": "sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-gnu/-/resolver-binding-linux-arm64-gnu-1.11.1.tgz", + "integrity": "sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-arm64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-arm64-musl/-/resolver-binding-linux-arm64-musl-1.11.1.tgz", + "integrity": "sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-ppc64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-ppc64-gnu/-/resolver-binding-linux-ppc64-gnu-1.11.1.tgz", + "integrity": "sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-gnu/-/resolver-binding-linux-riscv64-gnu-1.11.1.tgz", + "integrity": "sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-riscv64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-riscv64-musl/-/resolver-binding-linux-riscv64-musl-1.11.1.tgz", + "integrity": "sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-s390x-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-s390x-gnu/-/resolver-binding-linux-s390x-gnu-1.11.1.tgz", + "integrity": "sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-gnu": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-gnu/-/resolver-binding-linux-x64-gnu-1.11.1.tgz", + "integrity": "sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-linux-x64-musl": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-linux-x64-musl/-/resolver-binding-linux-x64-musl-1.11.1.tgz", + "integrity": "sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@unrs/resolver-binding-wasm32-wasi": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-wasm32-wasi/-/resolver-binding-wasm32-wasi-1.11.1.tgz", + "integrity": "sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.11" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@unrs/resolver-binding-win32-arm64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-arm64-msvc/-/resolver-binding-win32-arm64-msvc-1.11.1.tgz", + "integrity": "sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-ia32-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-ia32-msvc/-/resolver-binding-win32-ia32-msvc-1.11.1.tgz", + "integrity": "sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@unrs/resolver-binding-win32-x64-msvc": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@unrs/resolver-binding-win32-x64-msvc/-/resolver-binding-win32-x64-msvc-1.11.1.tgz", + "integrity": "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/floating-point-hex-parser": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz", + "integrity": "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-api-error": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz", + "integrity": "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-buffer": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz", + "integrity": "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-numbers": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz", + "integrity": "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/floating-point-hex-parser": "1.13.2", + "@webassemblyjs/helper-api-error": "1.13.2", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/helper-wasm-bytecode": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz", + "integrity": "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-wasm-section": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz", + "integrity": "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/wasm-gen": "1.14.1" + } + }, + "node_modules/@webassemblyjs/ieee754": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz", + "integrity": "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "node_modules/@webassemblyjs/leb128": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.13.2.tgz", + "integrity": "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/utf8": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.13.2.tgz", + "integrity": "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/wasm-edit": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz", + "integrity": "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/helper-wasm-section": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-opt": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1", + "@webassemblyjs/wast-printer": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-gen": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz", + "integrity": "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-opt": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz", + "integrity": "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-parser": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz", + "integrity": "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-api-error": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wast-printer": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz", + "integrity": "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webpack-cli/configtest": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-2.1.1.tgz", + "integrity": "sha512-wy0mglZpDSiSS0XHrVR+BAdId2+yxPSoJW8fsna3ZpYSlufjvxnP4YbKTCBZnNIcGN4r6ZPXV55X4mYExOfLmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + } + }, + "node_modules/@webpack-cli/info": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-2.0.2.tgz", + "integrity": "sha512-zLHQdI/Qs1UyT5UBdWNqsARasIA+AaF8t+4u2aS2nEpBQh2mWIVb8qAklq0eUENnC5mOItrIB4LiS9xMtph18A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + } + }, + "node_modules/@webpack-cli/serve": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-2.0.5.tgz", + "integrity": "sha512-lqaoKnRYBdo1UgDX8uF24AfGMifWK19TxPmM5FHc2vAGxrJ/qtyUyFBWoY1tISZdelsQ5fBcOusifo5o5wSJxQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + }, + "peerDependenciesMeta": { + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@yarnpkg/fslib": { + "version": "2.10.4", + "resolved": "https://registry.npmjs.org/@yarnpkg/fslib/-/fslib-2.10.4.tgz", + "integrity": "sha512-WhaLwvXEMjCjGxOraQx+Qtmst13iAPOlSElSZfQFdLohva5owlqACRapJ78zZFEW6M9ArqdQlZaHKVN5/mM+SA==", + "license": "BSD-2-Clause", + "dependencies": { + "@yarnpkg/libzip": "^2.3.0", + "tslib": "^1.13.0" + }, + "engines": { + "node": ">=12 <14 || 14.2 - 14.9 || >14.10.0" + } + }, + "node_modules/@yarnpkg/libzip": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@yarnpkg/libzip/-/libzip-2.3.0.tgz", + "integrity": "sha512-6xm38yGVIa6mKm/DUCF2zFFJhERh/QWp1ufm4cNUvxsONBmfPg8uZ9pZBdOmF6qFGr/HlT6ABBkCSx/dlEtvWg==", + "license": "BSD-2-Clause", + "dependencies": { + "@types/emscripten": "^1.39.6", + "tslib": "^1.13.0" + }, + "engines": { + "node": ">=12 <14 || 14.2 - 14.9 || >14.10.0" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-phases": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/acorn-import-phases/-/acorn-import-phases-1.0.4.tgz", + "integrity": "sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.13.0" + }, + "peerDependencies": { + "acorn": "^8.14.0" + } + }, + "node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/array-back": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", + "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-jest/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "license": "BSD-3-Clause", + "optional": true, + "peer": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "license": "BSD-3-Clause", + "optional": true, + "peer": true, + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.2.0.tgz", + "integrity": "sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.8.20", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.20.tgz", + "integrity": "sha512-JMWsdF+O8Orq3EMukbUN1QfbLK9mX2CkUmQBcW2T0s8OmdAUL5LLM/6wFwSrqXzlXB13yhyK9gTKS1rIizOduQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.27.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.27.0.tgz", + "integrity": "sha512-AXVQwdhot1eqLihwasPElhX2tAZiBjWdJ9i/Zcj2S6QYIjkx62OKSfnobkriB81C3l4w0rVy3Nt4jaTBltYEpw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.8.19", + "caniuse-lite": "^1.0.30001751", + "electron-to-chromium": "^1.5.238", + "node-releases": "^2.0.26", + "update-browserslist-db": "^1.1.4" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "license": "MIT" + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001751", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001751.tgz", + "integrity": "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chrome-trace-event": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.4.tgz", + "integrity": "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "optional": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-2.1.0.tgz", + "integrity": "sha512-UX0OwmYRYQQetfrLEZeewIFFI+wSTofC+pMBLNuH3RUuu/xzG1oz84UCEDOSoQlN3fZ4+AzmV50ZYvGqkMh9yA==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.3.tgz", + "integrity": "sha512-1L5aqIkwPfiodaMgQunkF1zRhNqifHBmtbbbxcr6yVxxBnliw4TDOW6NxpO8DJLgJ16OT+Y4ztZqP6p/FtXnAw==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT" + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/command-line-args": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz", + "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", + "license": "MIT", + "dependencies": { + "array-back": "^3.1.0", + "find-replace": "^3.0.0", + "lodash.camelcase": "^4.3.0", + "typical": "^4.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/copy-webpack-plugin": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-12.0.2.tgz", + "integrity": "sha512-SNwdBeHyII+rWvee/bTnAYyO8vfVdcSTud4EIb6jcZ8inLeWucJE0DnxXQBjlQ5zlteuuvooGQy3LIyGxhvlOA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.1", + "globby": "^14.0.0", + "normalize-path": "^3.0.0", + "schema-utils": "^4.2.0", + "serialize-javascript": "^6.0.2" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + } + }, + "node_modules/copy-webpack-plugin/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.0.tgz", + "integrity": "sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.240", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.240.tgz", + "integrity": "sha512-OBwbZjWgrCOH+g6uJsA2/7Twpas2OlepS9uvByJjR2datRDuKGYeD+nP8lBBks2qnB7bGJNHDUx7c/YLaT3QMQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.18.2", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.2.tgz", + "integrity": "sha512-6Jw4sE1maoRJo3q8MsSIn2onJFbLTOjY9hlx4DZXmOKvLRd1Ok2kXmAGXaafL2+ijsJZ1ClYbl/pmqr9+k4iUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/envinfo": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.14.0.tgz", + "integrity": "sha512-CO40UI41xDQzhLB1hWyqUKgFhs250pNcGbyGKe1l/e4FSaI/+YE4IMG76GDt0In67WLPACIITC+sOi08x4wIvg==", + "dev": true, + "license": "MIT", + "bin": { + "envinfo": "dist/cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.10.tgz", + "integrity": "sha512-9RiGKvCwaqxO2owP61uQ4BgNborAQskMR6QusfWzQqv7AZOg5oGehdY2pRJMTKuwxd1IDBP4rSbI5lHzU7SMsQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.10", + "@esbuild/android-arm": "0.25.10", + "@esbuild/android-arm64": "0.25.10", + "@esbuild/android-x64": "0.25.10", + "@esbuild/darwin-arm64": "0.25.10", + "@esbuild/darwin-x64": "0.25.10", + "@esbuild/freebsd-arm64": "0.25.10", + "@esbuild/freebsd-x64": "0.25.10", + "@esbuild/linux-arm": "0.25.10", + "@esbuild/linux-arm64": "0.25.10", + "@esbuild/linux-ia32": "0.25.10", + "@esbuild/linux-loong64": "0.25.10", + "@esbuild/linux-mips64el": "0.25.10", + "@esbuild/linux-ppc64": "0.25.10", + "@esbuild/linux-riscv64": "0.25.10", + "@esbuild/linux-s390x": "0.25.10", + "@esbuild/linux-x64": "0.25.10", + "@esbuild/netbsd-arm64": "0.25.10", + "@esbuild/netbsd-x64": "0.25.10", + "@esbuild/openbsd-arm64": "0.25.10", + "@esbuild/openbsd-x64": "0.25.10", + "@esbuild/openharmony-arm64": "0.25.10", + "@esbuild/sunos-x64": "0.25.10", + "@esbuild/win32-arm64": "0.25.10", + "@esbuild/win32-ia32": "0.25.10", + "@esbuild/win32-x64": "0.25.10" + } + }, + "node_modules/esbuild-loader": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-4.4.0.tgz", + "integrity": "sha512-4J+hXTpTtEdzUNLoY8ReqDNJx2NoldfiljRCiKbeYUuZmVaiJeDqFgyAzz8uOopaekwRoCcqBFyEroGQLFVZ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "get-tsconfig": "^4.10.1", + "loader-utils": "^2.0.4", + "webpack-sources": "^1.4.3" + }, + "funding": { + "url": "https://github.com/privatenumber/esbuild-loader?sponsor=1" + }, + "peerDependencies": { + "webpack": "^4.40.0 || ^5.0.0" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit-x": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/exit-x/-/exit-x-0.2.2.tgz", + "integrity": "sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-30.2.0.tgz", + "integrity": "sha512-u/feCi0GPsI+988gU2FLcsHyAHTU0MX1Wg68NhAnN7z/+C5wqG+CY8J53N9ioe8RXgaoz0nBR/TYMf3AycUuPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "30.2.0", + "@jest/get-type": "30.1.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/expect/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/expect/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/expect/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/expect/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/expect/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/expect/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", + "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fastest-levenshtein": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz", + "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4.9.1" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-replace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz", + "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==", + "license": "MIT", + "dependencies": { + "array-back": "^3.0.1" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "license": "BSD-3-Clause", + "bin": { + "flat": "cli.js" + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.1.tgz", + "integrity": "sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/globby": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "license": "MIT", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz", + "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jest": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-30.2.0.tgz", + "integrity": "sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "30.2.0", + "@jest/types": "30.2.0", + "import-local": "^3.2.0", + "jest-cli": "30.2.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-30.2.0.tgz", + "integrity": "sha512-L8lR1ChrRnSdfeOvTrwZMlnWV8G/LLjQ0nG9MBclwWZidA2N5FviRki0Bvh20WRMOX31/JYvzdqTJrk5oBdydQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.1.1", + "jest-util": "30.2.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-changed-files/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-changed-files/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-changed-files/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-changed-files/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-changed-files/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-changed-files/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-circus": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-30.2.0.tgz", + "integrity": "sha512-Fh0096NC3ZkFx05EP2OXCxJAREVxj1BcW/i6EWqqymcgYKWjyyDpral3fMxVcHXg6oZM7iULer9wGRFvfpl+Tg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/expect": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "co": "^4.6.0", + "dedent": "^1.6.0", + "is-generator-fn": "^2.1.0", + "jest-each": "30.2.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-runtime": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "p-limit": "^3.1.0", + "pretty-format": "30.2.0", + "pure-rand": "^7.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.6" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-circus/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-circus/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-circus/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-circus/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-circus/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-circus/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-circus/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-cli": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-30.2.0.tgz", + "integrity": "sha512-Os9ukIvADX/A9sLt6Zse3+nmHtHaE6hqOsjQtNiugFTbKRHYIYtZXNGNK9NChseXy7djFPjndX1tL0sCTlfpAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "exit-x": "^0.2.2", + "import-local": "^3.2.0", + "jest-config": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "yargs": "^17.7.2" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-cli/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-cli/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-cli/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-cli/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-cli/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-cli/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-config": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-30.2.0.tgz", + "integrity": "sha512-g4WkyzFQVWHtu6uqGmQR4CQxz/CH3yDSlhzXMWzNjDx843gYjReZnMRanjRCq5XZFuQrGDxgUaiYWE8BRfVckA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/get-type": "30.1.0", + "@jest/pattern": "30.0.1", + "@jest/test-sequencer": "30.2.0", + "@jest/types": "30.2.0", + "babel-jest": "30.2.0", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "deepmerge": "^4.3.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-circus": "30.2.0", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-runner": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "micromatch": "^4.0.8", + "parse-json": "^5.2.0", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "esbuild-register": ">=3.4.0", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "esbuild-register": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-config/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-config/node_modules/babel-jest": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.2.0.tgz", + "integrity": "sha512-0YiBEOxWqKkSQWL9nNGGEgndoeL0ZpWrbLMNL5u/Kaxrli3Eaxlt3ZtIDktEvXt4L/R9r3ODr2zKwGM/2BjxVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "30.2.0", + "@types/babel__core": "^7.20.5", + "babel-plugin-istanbul": "^7.0.1", + "babel-preset-jest": "30.2.0", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "slash": "^3.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.11.0 || ^8.0.0-0" + } + }, + "node_modules/jest-config/node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/jest-config/node_modules/babel-plugin-jest-hoist": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.2.0.tgz", + "integrity": "sha512-ftzhzSGMUnOzcCXd6WHdBGMyuwy15Wnn0iyyWGKgBDLxf9/s5ABuraCSpBX2uG0jUg4rqJnxsLc5+oYBqoxVaA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/babel__core": "^7.20.5" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/babel-preset-jest": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.2.0.tgz", + "integrity": "sha512-US4Z3NOieAQumwFnYdUWKvUKh8+YSnS/gB3t6YBiz0bskpu7Pine8pPCheNxlPEW4wnUkma2a94YuW2q3guvCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "30.2.0", + "babel-preset-current-node-syntax": "^1.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.11.0 || ^8.0.0-beta.1" + } + }, + "node_modules/jest-config/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/jest-config/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-config/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-config/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/jest-config/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-config/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-config/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-config/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-config/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-config/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest-config/node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/jest-diff": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-30.2.0.tgz", + "integrity": "sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/diff-sequences": "30.0.1", + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-30.2.0.tgz", + "integrity": "sha512-tR/FFgZKS1CXluOQzZvNH3+0z9jXr3ldGSD8bhyuxvlVUwbeLOGynkunvlTMxchC5urrKndYiwCFC0DLVjpOCA==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.1.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-30.2.0.tgz", + "integrity": "sha512-lpWlJlM7bCUf1mfmuqTA8+j2lNURW9eNafOy99knBM01i5CQeY5UH1vZjgT9071nDJac1M4XsbyI44oNOdhlDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "@jest/types": "30.2.0", + "chalk": "^4.1.2", + "jest-util": "30.2.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-each/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-each/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-each/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-environment-node": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-30.2.0.tgz", + "integrity": "sha512-ElU8v92QJ9UrYsKrxDIKCxu6PfNj4Hdcktcn0JX12zqNdqWHB0N+hwOnnBBXvjLd2vApZtuLUGs1QSY+MsXoNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/fake-timers": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-mock": "30.2.0", + "jest-util": "30.2.0", + "jest-validate": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-environment-node/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-environment-node/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-environment-node/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-environment-node/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-environment-node/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-environment-node/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-junit": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/jest-junit/-/jest-junit-16.0.0.tgz", + "integrity": "sha512-A94mmw6NfJab4Fg/BlvVOUXzXgF0XIH6EmTgJ5NDPp4xoKq0Kr7sErb+4Xs9nZvu58pJojz5RFGpqnZYJTrRfQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "mkdirp": "^1.0.4", + "strip-ansi": "^6.0.1", + "uuid": "^8.3.2", + "xml": "^1.0.1" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/jest-leak-detector": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-30.2.0.tgz", + "integrity": "sha512-M6jKAjyzjHG0SrQgwhgZGy9hFazcudwCNovY/9HPIicmNSBuockPSedAP9vlPK6ONFJ1zfyH/M2/YYJxOz5cdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-30.2.0.tgz", + "integrity": "sha512-dQ94Nq4dbzmUWkQ0ANAWS9tBRfqCrn0bV9AMYdOi/MHW726xn7eQmMeRTpX2ViC00bpNaWXq+7o4lIQ3AX13Hg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "chalk": "^4.1.2", + "jest-diff": "30.2.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-30.2.0.tgz", + "integrity": "sha512-y4DKFLZ2y6DxTWD4cDe07RglV88ZiNEdlRfGtqahfbIjfsw1nMCPx49Uev4IA/hWn3sDKyAnSPwoYSsAEdcimw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@jest/types": "30.2.0", + "@types/stack-utils": "^2.0.3", + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "micromatch": "^4.0.8", + "pretty-format": "30.2.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.6" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-message-util/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-message-util/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-mock": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-30.2.0.tgz", + "integrity": "sha512-JNNNl2rj4b5ICpmAcq+WbLH83XswjPbjH4T7yvGzfAGCPh1rw+xVNbtk+FnRslvt9lkCcdn9i1oAoKUuFsOxRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "jest-util": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-mock/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-mock/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-mock/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-mock/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-mock/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-mock/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-30.2.0.tgz", + "integrity": "sha512-TCrHSxPlx3tBY3hWNtRQKbtgLhsXa1WmbJEqBlTBrGafd5fiQFByy2GNCEoGR+Tns8d15GaL9cxEzKOO3GEb2A==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.2", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-pnp-resolver": "^1.2.3", + "jest-util": "30.2.0", + "jest-validate": "30.2.0", + "slash": "^3.0.0", + "unrs-resolver": "^1.7.11" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-30.2.0.tgz", + "integrity": "sha512-xTOIGug/0RmIe3mmCqCT95yO0vj6JURrn1TKWlNbhiAefJRWINNPgwVkrVgt/YaerPzY3iItufd80v3lOrFJ2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "30.0.1", + "jest-snapshot": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve-dependencies/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-resolve/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-resolve/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/jest-resolve/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-resolve/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-resolve/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-resolve/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest-runner": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-30.2.0.tgz", + "integrity": "sha512-PqvZ2B2XEyPEbclp+gV6KO/F1FIFSbIwewRgmROCMBo/aZ6J1w8Qypoj2pEOcg3G2HzLlaP6VUtvwCI8dM3oqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "30.2.0", + "@jest/environment": "30.2.0", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "emittery": "^0.13.1", + "exit-x": "^0.2.2", + "graceful-fs": "^4.2.11", + "jest-docblock": "30.2.0", + "jest-environment-node": "30.2.0", + "jest-haste-map": "30.2.0", + "jest-leak-detector": "30.2.0", + "jest-message-util": "30.2.0", + "jest-resolve": "30.2.0", + "jest-runtime": "30.2.0", + "jest-util": "30.2.0", + "jest-watcher": "30.2.0", + "jest-worker": "30.2.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-runner/node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/jest-runner/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runner/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/jest-runner/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runner/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-runner/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-runner/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runner/node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/jest-runner/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest-runner/node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-30.2.0.tgz", + "integrity": "sha512-p1+GVX/PJqTucvsmERPMgCPvQJpFt4hFbM+VN3n8TMo47decMUcJbt+rgzwrEme0MQUA/R+1de2axftTHkKckg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "30.2.0", + "@jest/fake-timers": "30.2.0", + "@jest/globals": "30.2.0", + "@jest/source-map": "30.0.1", + "@jest/test-result": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "cjs-module-lexer": "^2.1.0", + "collect-v8-coverage": "^1.0.2", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-message-util": "30.2.0", + "jest-mock": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-resolve": "30.2.0", + "jest-snapshot": "30.2.0", + "jest-util": "30.2.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-runtime/node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/jest-runtime/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/jest-runtime/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runtime/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-runtime/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/jest-runtime/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-runtime/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-runtime/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-runtime/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-runtime/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-runtime/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest-runtime/node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-30.2.0.tgz", + "integrity": "sha512-5WEtTy2jXPFypadKNpbNkZ72puZCa6UjSr/7djeecHWOu7iYhSXSnHScT8wBz3Rn8Ena5d5RYRcsyKIeqG1IyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@babel/generator": "^7.27.5", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/plugin-syntax-typescript": "^7.27.1", + "@babel/types": "^7.27.3", + "@jest/expect-utils": "30.2.0", + "@jest/get-type": "30.1.0", + "@jest/snapshot-utils": "30.2.0", + "@jest/transform": "30.2.0", + "@jest/types": "30.2.0", + "babel-preset-current-node-syntax": "^1.2.0", + "chalk": "^4.1.2", + "expect": "30.2.0", + "graceful-fs": "^4.2.11", + "jest-diff": "30.2.0", + "jest-matcher-utils": "30.2.0", + "jest-message-util": "30.2.0", + "jest-util": "30.2.0", + "pretty-format": "30.2.0", + "semver": "^7.7.2", + "synckit": "^0.11.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/@jest/transform": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.2.0.tgz", + "integrity": "sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.27.4", + "@jest/types": "30.2.0", + "@jridgewell/trace-mapping": "^0.3.25", + "babel-plugin-istanbul": "^7.0.1", + "chalk": "^4.1.2", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.11", + "jest-haste-map": "30.2.0", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "micromatch": "^4.0.8", + "pirates": "^4.0.7", + "slash": "^3.0.0", + "write-file-atomic": "^5.0.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-snapshot/node_modules/babel-plugin-istanbul": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-7.0.1.tgz", + "integrity": "sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==", + "dev": true, + "license": "BSD-3-Clause", + "workspaces": [ + "test/babel-8" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-instrument": "^6.0.2", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/jest-snapshot/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-snapshot/node_modules/jest-haste-map": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.2.0.tgz", + "integrity": "sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "anymatch": "^3.1.3", + "fb-watchman": "^2.0.2", + "graceful-fs": "^4.2.11", + "jest-regex-util": "30.0.1", + "jest-util": "30.2.0", + "jest-worker": "30.2.0", + "micromatch": "^4.0.8", + "walker": "^1.0.8" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.3" + } + }, + "node_modules/jest-snapshot/node_modules/jest-regex-util": { + "version": "30.0.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.0.1.tgz", + "integrity": "sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/jest-worker": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.2.0.tgz", + "integrity": "sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@ungap/structured-clone": "^1.3.0", + "jest-util": "30.2.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.1.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-snapshot/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jest-snapshot/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-snapshot/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest-snapshot/node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-30.2.0.tgz", + "integrity": "sha512-FBGWi7dP2hpdi8nBoWxSsLvBFewKAg0+uSQwBaof4Y4DPgBabXgpSYC5/lR7VmnIlSpASmCi/ntRWPbv7089Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/get-type": "30.1.0", + "@jest/types": "30.2.0", + "camelcase": "^6.3.0", + "chalk": "^4.1.2", + "leven": "^3.1.0", + "pretty-format": "30.2.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-validate/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-validate/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-validate/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-30.2.0.tgz", + "integrity": "sha512-PYxa28dxJ9g777pGm/7PrbnMeA0Jr7osHP9bS7eJy9DuAjMgdGtxgf0uKMyoIsTWAkIbUW5hSDdJ3urmgXBqxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "30.2.0", + "@jest/types": "30.2.0", + "@types/node": "*", + "ansi-escapes": "^4.3.2", + "chalk": "^4.1.2", + "emittery": "^0.13.1", + "jest-util": "30.2.0", + "string-length": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-watcher/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-watcher/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-watcher/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/jest-watcher/node_modules/ci-info": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.1.tgz", + "integrity": "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-watcher/node_modules/jest-util": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.2.0.tgz", + "integrity": "sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "30.2.0", + "@types/node": "*", + "chalk": "^4.1.2", + "ci-info": "^4.2.0", + "graceful-fs": "^4.2.11", + "picomatch": "^4.0.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest-watcher/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jest/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest/node_modules/@jest/types": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-30.2.0.tgz", + "integrity": "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/pattern": "30.0.1", + "@jest/schemas": "30.0.5", + "@types/istanbul-lib-coverage": "^2.0.6", + "@types/istanbul-reports": "^3.0.4", + "@types/node": "*", + "@types/yargs": "^17.0.33", + "chalk": "^4.1.2" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/jest/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "license": "MIT" + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/loader-runner": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", + "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.11.5" + } + }, + "node_modules/loader-utils": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", + "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "license": "MIT" + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "license": "MIT", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/napi-postinstall": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.4.tgz", + "integrity": "sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==", + "dev": true, + "license": "MIT", + "bin": { + "napi-postinstall": "lib/cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/napi-postinstall" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.26", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.26.tgz", + "integrity": "sha512-S2M9YimhSjBSvYnlr5/+umAnPHE++ODwt5e2Ij6FoX45HA/s4vHdkDx1eax2pAPeAOqu4s9b7ppahsyEFdVqQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pretty-format": { + "version": "30.2.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-30.2.0.tgz", + "integrity": "sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "30.0.5", + "ansi-styles": "^5.2.0", + "react-is": "^18.3.1" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/pretty-format/node_modules/@jest/schemas": { + "version": "30.0.5", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.0.5.tgz", + "integrity": "sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.34.0" + }, + "engines": { + "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" + } + }, + "node_modules/pretty-format/node_modules/@sinclair/typebox": { + "version": "0.34.41", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.41.tgz", + "integrity": "sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/pure-rand": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-7.0.1.tgz", + "integrity": "sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", + "dev": true, + "dependencies": { + "resolve": "^1.1.6" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/schema-utils": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.3.tgz", + "integrity": "sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.9.2.tgz", + "integrity": "sha512-S3I64fEiKgTZzKCC46zT/Ib9meqofLrQVbpSswtjFfAVDW+AZ54WTnAM/3/yENoxz/V1Cy6u3kiiEbQ4DNphvw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "execa": "^1.0.0", + "fast-glob": "^3.3.2", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "bin": { + "shjs": "bin/shjs" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/shelljs/node_modules/cross-spawn": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/shelljs/node_modules/execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/shelljs/node_modules/get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "license": "MIT", + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/shelljs/node_modules/is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/shelljs/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/shelljs/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/shelljs/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shelljs/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/shx": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.4.0.tgz", + "integrity": "sha512-Z0KixSIlGPpijKgcH6oCMCbltPImvaKy0sGH8AkLRXw1KyzpKtaCTizP2xen+hNDqVF4xxgvA0KXSb9o4Q6hnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.8", + "shelljs": "^0.9.2" + }, + "bin": { + "shx": "lib/cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/smol-toml": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.4.2.tgz", + "integrity": "sha512-rInDH6lCNiEyn3+hH8KVGFdbjc099j47+OSgbMrfDYX1CmXLfdKd7qi6IfcWj2wFxvSVkuI46M+wPGYfEOEj6g==", + "license": "BSD-3-Clause", + "engines": { + "node": ">= 18" + }, + "funding": { + "url": "https://github.com/sponsors/cyyynthia" + } + }, + "node_modules/source-list-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", + "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==", + "dev": true, + "license": "MIT" + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/synckit": { + "version": "0.11.11", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.11.tgz", + "integrity": "sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.2.9" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/synckit" + } + }, + "node_modules/tapable": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/terser": { + "version": "5.43.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.43.1.tgz", + "integrity": "sha512-+6erLbBm0+LROX2sPXlUYx/ux5PyE9K/a92Wrt6oA+WDAoFTdpHE5tCYCI5PNzq2y8df4rA+QgHLJuR4jNymsg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@jridgewell/source-map": "^0.3.3", + "acorn": "^8.14.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser-webpack-plugin": { + "version": "5.3.14", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.14.tgz", + "integrity": "sha512-vkZjpUjb6OMS7dhV+tILUW6BhpDR7P2L/aQSAv+Uwk+m8KATX9EccViHTJR2qDtACKPIYndLGCyl3FMo+r2LMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "jest-worker": "^27.4.5", + "schema-utils": "^4.3.0", + "serialize-javascript": "^6.0.2", + "terser": "^5.31.1" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "node_modules/terser-webpack-plugin/node_modules/jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/terser-webpack-plugin/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", + "license": "MIT", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-jest": { + "version": "29.4.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.5.tgz", + "integrity": "sha512-HO3GyiWn2qvTQA4kTgjDcXiMwYQt68a1Y8+JuLRVpdIzm+UOLSHgl/XqR4c6nzJkq5rOkjc02O2I7P7l/Yof0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "fast-json-stable-stringify": "^2.1.0", + "handlebars": "^4.7.8", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.7.3", + "type-fest": "^4.41.0", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0 || ^30.0.0", + "@jest/types": "^29.0.0 || ^30.0.0", + "babel-jest": "^29.0.0 || ^30.0.0", + "jest": "^29.0.0 || ^30.0.0", + "jest-util": "^29.0.0 || ^30.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "jest-util": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ts-loader": { + "version": "9.5.4", + "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.5.4.tgz", + "integrity": "sha512-nCz0rEwunlTZiy6rXFByQU1kVVpCIgUpc/psFiKVrUwrizdnIbRFu8w7bxhUF0X613DYwT4XzrZHpVyMe758hQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "enhanced-resolve": "^5.0.0", + "micromatch": "^4.0.0", + "semver": "^7.3.4", + "source-map": "^0.7.4" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "typescript": "*", + "webpack": "^5.0.0" + } + }, + "node_modules/ts-loader/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-loader/node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 8" + } + }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "license": "0BSD" + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typical": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz", + "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", + "dev": true, + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unrs-resolver": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/unrs-resolver/-/unrs-resolver-1.11.1.tgz", + "integrity": "sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "napi-postinstall": "^0.3.0" + }, + "funding": { + "url": "https://opencollective.com/unrs-resolver" + }, + "optionalDependencies": { + "@unrs/resolver-binding-android-arm-eabi": "1.11.1", + "@unrs/resolver-binding-android-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-arm64": "1.11.1", + "@unrs/resolver-binding-darwin-x64": "1.11.1", + "@unrs/resolver-binding-freebsd-x64": "1.11.1", + "@unrs/resolver-binding-linux-arm-gnueabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm-musleabihf": "1.11.1", + "@unrs/resolver-binding-linux-arm64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-arm64-musl": "1.11.1", + "@unrs/resolver-binding-linux-ppc64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-riscv64-musl": "1.11.1", + "@unrs/resolver-binding-linux-s390x-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-gnu": "1.11.1", + "@unrs/resolver-binding-linux-x64-musl": "1.11.1", + "@unrs/resolver-binding-wasm32-wasi": "1.11.1", + "@unrs/resolver-binding-win32-arm64-msvc": "1.11.1", + "@unrs/resolver-binding-win32-ia32-msvc": "1.11.1", + "@unrs/resolver-binding-win32-x64-msvc": "1.11.1" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.4.tgz", + "integrity": "sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/vscode-jsonrpc": { + "version": "9.0.0-next.8", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-9.0.0-next.8.tgz", + "integrity": "sha512-pN6L5eiNBvUpNFBJvudaZ83klir0T/wLFCDpYhpOEsKXyhsWyYsNMzoG7BK6zJoZLHGSSsaTJDjCcPwnLgUyPQ==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageserver": { + "version": "10.0.0-next.13", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-10.0.0-next.13.tgz", + "integrity": "sha512-4tSufM2XrNrrzBUGPcYh62qBYhm41yFwFZBgJ63I1dPHRh1aZPK65+TcVa3nG0/K62Q9phhk87TWdQFp+UnYFA==", + "license": "MIT", + "dependencies": { + "vscode-languageserver-protocol": "3.17.6-next.13" + }, + "bin": { + "installServerIntoExtension": "bin/installServerIntoExtension" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.6-next.13", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.6-next.13.tgz", + "integrity": "sha512-IE+/j+OOqJ392KMhcexIGt9MVqcTZ4n7DVyaSp5txuC1kNUnfzxlkPzzDwo0p7hdINLCfWjbcjuW5tGYLof4Vw==", + "license": "MIT", + "dependencies": { + "vscode-jsonrpc": "9.0.0-next.8", + "vscode-languageserver-types": "3.17.6-next.6" + } + }, + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.11.tgz", + "integrity": "sha512-X+8T3GoiwTVlJbicx/sIAF+yuJAqz8VvwJyoMVhwEMoEKE/fkDmrqUgDMyBECcM2A2frVZIUj5HI/ErRXCfOeA==", + "license": "MIT" + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.6-next.6", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.6-next.6.tgz", + "integrity": "sha512-aiJY5/yW+xzw7KPNlwi3gQtddq/3EIn5z8X8nCgJfaiAij2R1APKePngv+MUdLdYJBVTLu+Qa0ODsT+pHgYguQ==", + "license": "MIT" + }, + "node_modules/vscode-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz", + "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", + "license": "MIT" + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/watchpack": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz", + "integrity": "sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack": { + "version": "5.102.1", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.102.1.tgz", + "integrity": "sha512-7h/weGm9d/ywQ6qzJ+Xy+r9n/3qgp/thalBbpOi5i223dPXKi04IBtqPN9nTd+jBc7QKfvDbaBnFipYp4sJAUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/eslint-scope": "^3.7.7", + "@types/estree": "^1.0.8", + "@types/json-schema": "^7.0.15", + "@webassemblyjs/ast": "^1.14.1", + "@webassemblyjs/wasm-edit": "^1.14.1", + "@webassemblyjs/wasm-parser": "^1.14.1", + "acorn": "^8.15.0", + "acorn-import-phases": "^1.0.3", + "browserslist": "^4.26.3", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.17.3", + "es-module-lexer": "^1.2.1", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.11", + "json-parse-even-better-errors": "^2.3.1", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^4.3.3", + "tapable": "^2.3.0", + "terser-webpack-plugin": "^5.3.11", + "watchpack": "^2.4.4", + "webpack-sources": "^3.3.3" + }, + "bin": { + "webpack": "bin/webpack.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependenciesMeta": { + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-cli": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-5.1.4.tgz", + "integrity": "sha512-pIDJHIEI9LR0yxHXQ+Qh95k2EvXpWzZ5l+d+jIo+RdSm9MiHfzazIxwwni/p7+x4eJZuvG1AJwgC4TNQ7NRgsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@discoveryjs/json-ext": "^0.5.0", + "@webpack-cli/configtest": "^2.1.1", + "@webpack-cli/info": "^2.0.2", + "@webpack-cli/serve": "^2.0.5", + "colorette": "^2.0.14", + "commander": "^10.0.1", + "cross-spawn": "^7.0.3", + "envinfo": "^7.7.3", + "fastest-levenshtein": "^1.0.12", + "import-local": "^3.0.2", + "interpret": "^3.1.1", + "rechoir": "^0.8.0", + "webpack-merge": "^5.7.3" + }, + "bin": { + "webpack-cli": "bin/cli.js" + }, + "engines": { + "node": ">=14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "5.x.x" + }, + "peerDependenciesMeta": { + "@webpack-cli/generators": { + "optional": true + }, + "webpack-bundle-analyzer": { + "optional": true + }, + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/webpack-cli/node_modules/commander": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", + "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/webpack-cli/node_modules/interpret": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-3.1.1.tgz", + "integrity": "sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack-cli/node_modules/rechoir": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz", + "integrity": "sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve": "^1.20.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/webpack-merge": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.10.0.tgz", + "integrity": "sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "clone-deep": "^4.0.1", + "flat": "^5.0.2", + "wildcard": "^2.0.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/webpack-sources": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", + "integrity": "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "source-list-map": "^2.0.0", + "source-map": "~0.6.1" + } + }, + "node_modules/webpack/node_modules/webpack-sources": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.3.tgz", + "integrity": "sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wildcard": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz", + "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "license": "ISC", + "optional": true, + "peer": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/xml": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz", + "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==", + "dev": true, + "license": "MIT" + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/python-parser/packages/pyright-internal/package.json b/python-parser/packages/pyright-internal/package.json new file mode 100644 index 00000000..7146ffe0 --- /dev/null +++ b/python-parser/packages/pyright-internal/package.json @@ -0,0 +1,59 @@ +{ + "name": "python-parser", + "displayName": "python-parser", + "description": "Python AST parser based on pyright-internal", + "version": "1.0.0", + "license": "MIT", + "main": "out/src/pythonParser/index.js", + "types": "out/src/pythonParser/index.d.ts", + "files": [ + "out" + ], + "scripts": { + "build": "tsc", + "clean": "shx rm -rf ./dist ./out", + "webpack:testserver": "webpack --config ./src/tests/lsp/webpack.testserver.config.js --mode=development", + "webpack:testserver:watch": "npm run clean && webpack --config ./src/tests/lsp/webpack.testserver.config.js --mode development --watch --progress", + "test": "npm run webpack:testserver && node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest --forceExit", + "test:norebuild": "node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest --forceExit", + "test:coverage": "node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest --forceExit --reporters=jest-junit --reporters=default --coverage --coverageReporters=cobertura --coverageReporters=html --coverageReporters=json", + "test:imports": "node --max-old-space-size=8192 --expose-gc ./node_modules/jest/bin/jest importResolver.test --forceExit --runInBand" + }, + "dependencies": { + "@yarnpkg/fslib": "2.10.4", + "@yarnpkg/libzip": "2.3.0", + "chalk": "^4.1.2", + "chokidar": "^3.6.0", + "command-line-args": "^5.2.1", + "jsonc-parser": "^3.2.0", + "smol-toml": "^1.4.2", + "source-map-support": "^0.5.21", + "tmp": "^0.2.5", + "vscode-jsonrpc": "^9.0.0-next.8", + "vscode-languageserver": "^10.0.0-next.13", + "vscode-languageserver-protocol": "^3.17.6-next.13", + "vscode-languageserver-textdocument": "^1.0.11", + "vscode-languageserver-types": "^3.17.6-next.6", + "vscode-uri": "^3.1.0" + }, + "devDependencies": { + "@types/command-line-args": "^5.2.3", + "@types/fs-extra": "^11.0.4", + "@types/jest": "^30.0.0", + "@types/lodash": "^4.17.20", + "@types/node": "^22.18.12", + "@types/tmp": "^0.2.6", + "copy-webpack-plugin": "^12.0.2", + "esbuild": "^0.20.0", + "esbuild-loader": "^4.4.0", + "jest": "^30.2.0", + "jest-junit": "^16.0.0", + "shx": "^0.4.0", + "ts-jest": "^29.4.5", + "ts-loader": "^9.5.4", + "typescript": "~5.5.4", + "webpack": "^5.102.1", + "webpack-cli": "^5.1.4", + "word-wrap": "1.2.5" + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/analysis.ts b/python-parser/packages/pyright-internal/src/analyzer/analysis.ts new file mode 100644 index 00000000..7a677420 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/analysis.ts @@ -0,0 +1,105 @@ +/* + * analysis.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Various analysis helper types and functions + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { OperationCanceledException, throwIfCancellationRequested } from '../common/cancellationUtils'; +import { ConfigOptions } from '../common/configOptions'; +import { ConsoleInterface } from '../common/console'; +import * as debug from '../common/debug'; +import { FileDiagnostics } from '../common/diagnosticSink'; +import { Duration } from '../common/timing'; +import { MaxAnalysisTime, Program } from './program'; + +export const nullCallback: AnalysisCompleteCallback = () => { + /* empty */ +}; + +export interface AnalysisResults { + diagnostics: FileDiagnostics[]; + filesInProgram: number; + checkingOnlyOpenFiles: boolean; + requiringAnalysisCount: RequiringAnalysisCount; + fatalErrorOccurred: boolean; + configParseErrorOccurred: boolean; + elapsedTime: number; + error?: Error | undefined; + reason: 'analysis' | 'tracking'; +} + +export interface RequiringAnalysisCount { + files: number; + cells: number; +} + +export type AnalysisCompleteCallback = (results: AnalysisResults) => void; + +export function analyzeProgram( + program: Program, + maxTime: MaxAnalysisTime | undefined, + configOptions: ConfigOptions, + callback: AnalysisCompleteCallback | undefined, + console: ConsoleInterface, + token: CancellationToken +): boolean { + let moreToAnalyze = false; + + callback = callback ?? nullCallback; + + try { + throwIfCancellationRequested(token); + + const duration = new Duration(); + moreToAnalyze = program.analyze(maxTime, token); + + const requiringAnalysisCount = program.getFilesToAnalyzeCount(); + + // If we're using command-line mode, the maxTime will be undefined, and we'll + // want to report all diagnostics rather than just the ones that have changed. + const reportDiagnosticDeltasOnly = maxTime !== undefined; + + const diagnostics = program.getDiagnostics(configOptions, reportDiagnosticDeltasOnly); + const diagnosticFileCount = diagnostics.length; + const elapsedTime = duration.getDurationInSeconds(); + + // Report any diagnostics or completion. + if (diagnosticFileCount > 0 || !moreToAnalyze) { + callback({ + diagnostics, + filesInProgram: program.getFileCount(), + requiringAnalysisCount: requiringAnalysisCount, + checkingOnlyOpenFiles: program.isCheckingOnlyOpenFiles(), + fatalErrorOccurred: false, + configParseErrorOccurred: false, + elapsedTime, + reason: 'analysis', + }); + } + } catch (e: any) { + if (OperationCanceledException.is(e)) { + return false; + } + + const message = debug.getErrorString(e); + console.error('Error performing analysis: ' + message); + + callback({ + diagnostics: [], + filesInProgram: 0, + requiringAnalysisCount: { files: 0, cells: 0 }, + checkingOnlyOpenFiles: true, + fatalErrorOccurred: true, + configParseErrorOccurred: false, + elapsedTime: 0, + error: debug.getSerializableError(e), + reason: 'analysis', + }); + } + + return moreToAnalyze; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/analyzerFileInfo.ts b/python-parser/packages/pyright-internal/src/analyzer/analyzerFileInfo.ts new file mode 100644 index 00000000..2ff6be55 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/analyzerFileInfo.ts @@ -0,0 +1,87 @@ +/* + * analyzerFileInfo.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Information associated with a source file that is used + * by the binder and checker. + */ + +import { DiagnosticRuleSet, ExecutionEnvironment } from '../common/configOptions'; +import { TextRangeDiagnosticSink } from '../common/diagnosticSink'; +import { PythonVersion, pythonVersion3_14 } from '../common/pythonVersion'; +import { TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { Uri } from '../common/uri/uri'; +import { Scope } from './scope'; +import { IPythonMode } from './sourceFile'; +import { SymbolTable } from './symbol'; + +// Maps import paths to the symbol table for the imported module. +export interface AbsoluteModuleDescriptor { + importingFileUri: Uri; + nameParts: string[]; +} + +export interface LookupImportOptions { + skipFileNeededCheck: boolean; + skipParsing?: boolean; +} + +export type ImportLookup = ( + fileUriOrModule: Uri | AbsoluteModuleDescriptor, + options?: LookupImportOptions +) => ImportLookupResult | undefined; + +export interface ImportLookupResult { + symbolTable: SymbolTable; + dunderAllNames: string[] | undefined; + usesUnsupportedDunderAllForm: boolean; + docString: string | undefined; + isInPyTypedPackage: boolean; +} + +export interface AnalyzerFileInfo { + importLookup: ImportLookup; + futureImports: Set; + builtinsScope?: Scope | undefined; + diagnosticSink: TextRangeDiagnosticSink; + executionEnvironment: ExecutionEnvironment; + diagnosticRuleSet: DiagnosticRuleSet; + lines: TextRangeCollection; + typingSymbolAliases: Map; + definedConstants: Map; + fileId: string; + fileUri: Uri; + moduleName: string; + isStubFile: boolean; + isTypingStubFile: boolean; + isTypingExtensionsStubFile: boolean; + isTypeshedStubFile: boolean; + isBuiltInStubFile: boolean; + isInPyTypedPackage: boolean; + ipythonMode: IPythonMode; + accessedSymbolSet: Set; +} + +export function isAnnotationEvaluationPostponed(fileInfo: AnalyzerFileInfo) { + if (fileInfo.isStubFile) { + return true; + } + + if (fileInfo.futureImports.has('annotations')) { + return true; + } + + // As of May 2023, the Python steering council has approved PEP 649 for Python 3.13. + // It was tentatively approved for 3.12, but they decided to defer until the next + // release to reduce the risk. As of May 8, 2024, the change did not make it into + // Python 3.13beta1, so it has been deferred to Python 3.14. + // https://discuss.python.org/t/pep-649-deferred-evaluation-of-annotations-tentatively-accepted/21331 + if (PythonVersion.isGreaterOrEqualTo(fileInfo.executionEnvironment.pythonVersion, pythonVersion3_14)) { + return true; + } + + return false; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/analyzerNodeInfo.ts b/python-parser/packages/pyright-internal/src/analyzer/analyzerNodeInfo.ts new file mode 100644 index 00000000..16e34f79 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/analyzerNodeInfo.ts @@ -0,0 +1,233 @@ +/* + * analyzerNodeInfo.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Defines objects that hang off the parse nodes in the parse tree. + * It contains information collected during the binder phase that + * can be used for later analysis steps or for language services + * (e.g. hover information). + */ + +import { + ClassNode, + ComprehensionNode, + ExecutionScopeNode, + FunctionNode, + LambdaNode, + ModuleNode, + ParseNode, + ParseNodeType, + StringNode, +} from '../parser/parseNodes'; +import { AnalyzerFileInfo } from './analyzerFileInfo'; +import { FlowFlags, FlowNode } from './codeFlowTypes'; +import { Declaration } from './declaration'; +import { ImportResult } from './importResult'; +import { Scope } from './scope'; + +export interface DunderAllInfo { + names: string[]; + stringNodes: StringNode[]; + usesUnsupportedDunderAllForm: boolean; +} + +interface AnalyzerNodeInfo { + //--------------------------------------------------------------- + // Set as part of import resolution + + // Information about an import; used for import nodes only. + importInfo?: ImportResult; + + //--------------------------------------------------------------- + // Set by Binder + + // Scope for nodes that introduce scopes: modules, functions, + // classes, lambdas, and list comprehensions. A scope is used + // to store symbol names and their associated types and declarations. + scope?: Scope; + + // Declaration (for functions and classes only). + declaration?: Declaration; + + // Control flow information for this node. + flowNode?: FlowNode; + + // Control flow information at the end of this node. + afterFlowNode?: FlowNode; + + // Info about the source file, used only on module nodes. + fileInfo?: AnalyzerFileInfo; + + // Set of expressions used within an execution scope (module, + // function or lambda) that requires code flow analysis. + codeFlowExpressions?: Set; + + // Number that represents the complexity of a function's code + // flow graph. + codeFlowComplexity?: number; + + // List of __all__ symbols in the module. + dunderAllInfo?: DunderAllInfo | undefined; +} + +export type ScopedNode = ModuleNode | ClassNode | FunctionNode | LambdaNode | ComprehensionNode; + +// Cleans out all fields that are added by the analyzer phases +// (after the post-parse walker). +export function cleanNodeAnalysisInfo(node: ParseNode) { + const info = getAnalyzerInfo(node); + if (info?.scope) { + info.scope = undefined; + } + + if (info?.declaration) { + info.declaration = undefined; + } + + if (info?.flowNode) { + info.flowNode = undefined; + } + + if (info?.afterFlowNode) { + info.afterFlowNode = undefined; + } + + if (info?.fileInfo) { + info.fileInfo = undefined; + } + + if (info?.codeFlowExpressions) { + info.codeFlowExpressions = undefined; + } + + if (info?.codeFlowComplexity) { + info.codeFlowComplexity = undefined; + } + + if (info?.dunderAllInfo) { + info.dunderAllInfo = undefined; + } +} + +export function getImportInfo(node: ParseNode): ImportResult | undefined { + const info = getAnalyzerInfo(node); + return info?.importInfo; +} + +export function setImportInfo(node: ParseNode, importInfo: ImportResult) { + const info = getAnalyzerInfoForWrite(node); + info.importInfo = importInfo; +} + +export function getScope(node: ParseNode): Scope | undefined { + const info = getAnalyzerInfo(node); + return info?.scope; +} + +export function setScope(node: ParseNode, scope: Scope) { + const info = getAnalyzerInfoForWrite(node); + info.scope = scope; +} + +export function getDeclaration(node: ParseNode): Declaration | undefined { + const info = getAnalyzerInfo(node); + return info?.declaration; +} + +export function setDeclaration(node: ParseNode, decl: Declaration) { + const info = getAnalyzerInfoForWrite(node); + info.declaration = decl; +} + +export function getFlowNode(node: ParseNode): FlowNode | undefined { + const info = getAnalyzerInfo(node); + return info?.flowNode; +} + +export function setFlowNode(node: ParseNode, flowNode: FlowNode) { + const info = getAnalyzerInfoForWrite(node); + info.flowNode = flowNode; +} + +export function getAfterFlowNode(node: ParseNode): FlowNode | undefined { + const info = getAnalyzerInfo(node); + return info?.afterFlowNode; +} + +export function setAfterFlowNode(node: ParseNode, flowNode: FlowNode) { + const info = getAnalyzerInfoForWrite(node); + info.afterFlowNode = flowNode; +} + +export function getFileInfo(node: ParseNode): AnalyzerFileInfo { + while (node.nodeType !== ParseNodeType.Module) { + node = node.parent!; + } + const info = getAnalyzerInfo(node); + return info!.fileInfo!; +} + +export function setFileInfo(node: ModuleNode, fileInfo: AnalyzerFileInfo) { + const info = getAnalyzerInfoForWrite(node); + info.fileInfo = fileInfo; +} + +export function getCodeFlowExpressions(node: ExecutionScopeNode): Set | undefined { + const info = getAnalyzerInfo(node); + return info?.codeFlowExpressions; +} + +export function setCodeFlowExpressions(node: ExecutionScopeNode, expressions: Set) { + const info = getAnalyzerInfoForWrite(node); + info.codeFlowExpressions = expressions; +} + +export function getCodeFlowComplexity(node: ExecutionScopeNode) { + const info = getAnalyzerInfo(node); + return info?.codeFlowComplexity ?? 0; +} + +export function setCodeFlowComplexity(node: ExecutionScopeNode, complexity: number) { + const info = getAnalyzerInfoForWrite(node); + info.codeFlowComplexity = complexity; +} + +export function getDunderAllInfo(node: ModuleNode): DunderAllInfo | undefined { + const info = getAnalyzerInfo(node); + return info?.dunderAllInfo; +} + +export function setDunderAllInfo(node: ModuleNode, names: DunderAllInfo | undefined) { + const info = getAnalyzerInfoForWrite(node); + info.dunderAllInfo = names; +} + +export function isCodeUnreachable(node: ParseNode): boolean { + let curNode: ParseNode | undefined = node; + + // Walk up the parse tree until we find a node with + // an associated flow node. + while (curNode) { + const flowNode = getFlowNode(curNode); + if (flowNode) { + return (flowNode.flags & (FlowFlags.UnreachableStaticCondition | FlowFlags.UnreachableStructural)) !== 0; + } + curNode = curNode.parent; + } + + return false; +} + +function getAnalyzerInfo(node: ParseNode): AnalyzerNodeInfo | undefined { + return node.a as AnalyzerNodeInfo | undefined; +} + +function getAnalyzerInfoForWrite(node: ParseNode): AnalyzerNodeInfo { + let info = node.a as AnalyzerNodeInfo | undefined; + if (!info) { + node.a = info = {}; + } + return info; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/backgroundAnalysisProgram.ts b/python-parser/packages/pyright-internal/src/analyzer/backgroundAnalysisProgram.ts new file mode 100644 index 00000000..47d7a7a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/backgroundAnalysisProgram.ts @@ -0,0 +1,312 @@ +/* + * BackgroundAnalysisProgram.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Applies operations to both the foreground program and a background + * analysis running in a worker process. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { IBackgroundAnalysis, RefreshOptions } from '../backgroundAnalysisBase'; +import { ConfigOptions, ExecutionEnvironment } from '../common/configOptions'; +import { Diagnostic } from '../common/diagnostic'; +import { FileDiagnostics } from '../common/diagnosticSink'; +import { ServiceProvider } from '../common/serviceProvider'; +import '../common/serviceProviderExtensions'; +import { Range } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { AnalysisCompleteCallback, analyzeProgram } from './analysis'; +import { ImportResolver } from './importResolver'; +import { MaxAnalysisTime, OpenFileOptions, Program } from './program'; + +export enum InvalidatedReason { + Reanalyzed, + SourceWatcherChanged, + LibraryWatcherChanged, + LibraryWatcherContentOnlyChanged, +} + +export class BackgroundAnalysisProgram { + private _program: Program; + private _disposed = false; + private _onAnalysisCompletion: AnalysisCompleteCallback | undefined; + private _preEditAnalysis: IBackgroundAnalysis | undefined; + + constructor( + protected readonly serviceId: string, + private readonly _serviceProvider: ServiceProvider, + private _configOptions: ConfigOptions, + private _importResolver: ImportResolver, + private _backgroundAnalysis?: IBackgroundAnalysis, + private readonly _maxAnalysisTime?: MaxAnalysisTime, + private readonly _disableChecker?: boolean, + program?: Program + ) { + this._program = + program ?? + new Program( + this.importResolver, + this.configOptions, + this._serviceProvider, + undefined, + this._disableChecker, + serviceId + ); + this._backgroundAnalysis?.setProgramView(this._program); + } + + get serviceProvider() { + return this._serviceProvider; + } + + get configOptions() { + return this._configOptions; + } + + get importResolver() { + return this._importResolver; + } + + get program() { + return this._program; + } + + get host() { + return this._importResolver.host; + } + + get backgroundAnalysis() { + return this._backgroundAnalysis; + } + + hasSourceFile(fileUri: Uri): boolean { + return !!this._program.getSourceFile(fileUri); + } + + setConfigOptions(configOptions: ConfigOptions) { + this._configOptions = configOptions; + this._backgroundAnalysis?.setConfigOptions(configOptions); + this._program.setConfigOptions(configOptions); + } + + setImportResolver(importResolver: ImportResolver) { + this._importResolver = importResolver; + this._backgroundAnalysis?.setImportResolver(importResolver); + + this._program.setImportResolver(importResolver); + this.configOptions.getExecutionEnvironments().forEach((e) => this._ensurePartialStubPackages(e)); + } + + setTrackedFiles(fileUris: Uri[]) { + this._backgroundAnalysis?.setTrackedFiles(fileUris); + const diagnostics = this._program.setTrackedFiles(fileUris); + this._reportDiagnosticsForRemovedFiles(diagnostics); + } + + setAllowedThirdPartyImports(importNames: string[]) { + this._backgroundAnalysis?.setAllowedThirdPartyImports(importNames); + this._program.setAllowedThirdPartyImports(importNames); + } + + setFileOpened(fileUri: Uri, version: number | null, contents: string, options: OpenFileOptions) { + this._backgroundAnalysis?.setFileOpened(fileUri, version, contents, options); + this._program.setFileOpened(fileUri, version, contents, options); + } + + getChainedUri(fileUri: Uri): Uri | undefined { + return this._program.getChainedUri(fileUri); + } + + updateChainedUri(fileUri: Uri, chainedUri: Uri | undefined) { + this._backgroundAnalysis?.updateChainedUri(fileUri, chainedUri); + this._program.updateChainedUri(fileUri, chainedUri); + } + + updateOpenFileContents(uri: Uri, version: number | null, contents: string, options: OpenFileOptions) { + this._backgroundAnalysis?.setFileOpened(uri, version, contents, options); + this._program.setFileOpened(uri, version, contents, options); + this.markFilesDirty([uri], /* evenIfContentsAreSame */ true); + } + + setFileClosed(fileUri: Uri, isTracked?: boolean) { + this._backgroundAnalysis?.setFileClosed(fileUri, isTracked); + const diagnostics = this._program.setFileClosed(fileUri, isTracked); + this._reportDiagnosticsForRemovedFiles(diagnostics); + } + + addInterimFile(fileUri: Uri) { + this._backgroundAnalysis?.addInterimFile(fileUri); + this._program.addInterimFile(fileUri); + } + + markAllFilesDirty(evenIfContentsAreSame: boolean) { + this._backgroundAnalysis?.markAllFilesDirty(evenIfContentsAreSame); + this._program.markAllFilesDirty(evenIfContentsAreSame); + } + + markFilesDirty(fileUris: Uri[], evenIfContentsAreSame: boolean) { + this._backgroundAnalysis?.markFilesDirty(fileUris, evenIfContentsAreSame); + this._program.markFilesDirty(fileUris, evenIfContentsAreSame); + } + + setCompletionCallback(callback?: AnalysisCompleteCallback) { + this._onAnalysisCompletion = callback; + this._backgroundAnalysis?.setCompletionCallback(callback); + } + + startAnalysis(token: CancellationToken): boolean { + if (this._backgroundAnalysis) { + this._backgroundAnalysis.startAnalysis(token); + return false; + } + + return analyzeProgram( + this._program, + this._maxAnalysisTime, + this._configOptions, + this._onAnalysisCompletion, + this._serviceProvider.console(), + token + ); + } + + async analyzeFile(fileUri: Uri, token: CancellationToken): Promise { + if (this._backgroundAnalysis) { + return this._backgroundAnalysis.analyzeFile(fileUri, token); + } + + return this._program.analyzeFile(fileUri, token); + } + + async analyzeFileAndGetDiagnostics(fileUri: Uri, token: CancellationToken): Promise { + if (this._backgroundAnalysis) { + return this._backgroundAnalysis.analyzeFileAndGetDiagnostics(fileUri, token); + } + + return this._program.analyzeFileAndGetDiagnostics(fileUri, token); + } + + libraryUpdated(): boolean { + return false; + } + + async getDiagnosticsForRange(fileUri: Uri, range: Range, token: CancellationToken): Promise { + if (this._backgroundAnalysis) { + return this._backgroundAnalysis.getDiagnosticsForRange(fileUri, range, token); + } + + return this._program.getDiagnosticsForRange(fileUri, range); + } + + async writeTypeStub( + targetImportUri: Uri, + targetIsSingleFile: boolean, + stubUri: Uri, + token: CancellationToken + ): Promise { + if (this._backgroundAnalysis) { + return this._backgroundAnalysis.writeTypeStub(targetImportUri, targetIsSingleFile, stubUri, token); + } + + analyzeProgram( + this._program, + /* maxTime */ undefined, + this._configOptions, + this._onAnalysisCompletion, + this._serviceProvider.console(), + token + ); + return this._program.writeTypeStub(targetImportUri, targetIsSingleFile, stubUri, token); + } + + invalidateAndForceReanalysis(reason: InvalidatedReason, refreshOptions?: RefreshOptions) { + this._backgroundAnalysis?.invalidateAndForceReanalysis(reason); + + // Make sure the import resolver doesn't have invalid + // cached entries. + this._importResolver.invalidateCache(); + + // If we have specific changed file URIs and it's a content-only change, mark only those files dirty. + // Otherwise, mark all files with one or more errors dirty. + if ( + refreshOptions?.changedFileUris && + refreshOptions.changedFileUris.size > 0 && + reason === InvalidatedReason.LibraryWatcherContentOnlyChanged + ) { + // Convert UriMap keys to array for markFilesDirty + const changedFileUris = Array.from(refreshOptions.changedFileUris.keys()); + this.markFilesDirty(changedFileUris, /* evenIfContentsAreSame */ true); + } else { + // Mark all files with one or more errors dirty. + this._program.markAllFilesDirty(/* evenIfContentsAreSame */ true); + } + } + + restart() { + this._backgroundAnalysis?.restart(); + } + + dispose() { + if (this._disposed) { + return; + } + + this._disposed = true; + this._program.dispose(); + this._backgroundAnalysis?.shutdown(); + this._backgroundAnalysis?.dispose(); + } + + enterEditMode() { + // Turn off analysis while in edit mode. + this._preEditAnalysis = this._backgroundAnalysis; + this._backgroundAnalysis = undefined; + + // Forward this request to the program. + this._program.enterEditMode(); + } + + exitEditMode() { + this._backgroundAnalysis = this._preEditAnalysis; + this._preEditAnalysis = undefined; + return this._program.exitEditMode(); + } + + private _ensurePartialStubPackages(execEnv: ExecutionEnvironment) { + this._backgroundAnalysis?.ensurePartialStubPackages(execEnv.root?.toString()); + return this._importResolver.ensurePartialStubPackages(execEnv); + } + + private _reportDiagnosticsForRemovedFiles(fileDiags: FileDiagnostics[]) { + if (fileDiags.length === 0) { + return; + } + + // If analysis is running in the foreground process, report any + // diagnostics that resulted from the close operation (used to + // clear diagnostics that are no longer of interest). + if (!this._backgroundAnalysis && this._onAnalysisCompletion) { + this._onAnalysisCompletion({ + diagnostics: fileDiags, + filesInProgram: this._program.getFileCount(), + requiringAnalysisCount: this._program.getFilesToAnalyzeCount(), + checkingOnlyOpenFiles: this._program.isCheckingOnlyOpenFiles(), + fatalErrorOccurred: false, + configParseErrorOccurred: false, + elapsedTime: 0, + reason: 'tracking', + }); + } + } +} + +export type BackgroundAnalysisProgramFactory = ( + serviceId: string, + serviceProvider: ServiceProvider, + configOptions: ConfigOptions, + importResolver: ImportResolver, + backgroundAnalysis?: IBackgroundAnalysis, + maxAnalysisTime?: MaxAnalysisTime +) => BackgroundAnalysisProgram; diff --git a/python-parser/packages/pyright-internal/src/analyzer/binder.ts b/python-parser/packages/pyright-internal/src/analyzer/binder.ts new file mode 100644 index 00000000..165ffd6f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/binder.ts @@ -0,0 +1,4437 @@ +/* + * binder.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A parse tree walker that performs basic name binding (creation of + * scopes and associated symbol tables). + * The binder walks the parse tree by scopes starting at the module + * level. When a new scope is detected, it is pushed onto a list and + * walked separately at a later time. (The exception is a class scope, + * which is immediately walked.) Walking the tree in this manner + * simulates the order in which execution normally occurs in a Python + * file. The binder attempts to statically detect runtime errors that + * would be reported by the python interpreter when executing the code. + * This binder doesn't perform any static type checking. + */ + +import { Commands } from '../commands/commands'; +import { DiagnosticLevel } from '../common/configOptions'; +import { assert, assertNever, fail } from '../common/debug'; +import { CreateTypeStubFileAction, Diagnostic } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { stripFileExtension } from '../common/pathUtils'; +import { convertTextRangeToRange } from '../common/positionUtils'; +import { TextRange, getEmptyRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { LocMessage } from '../localization/localize'; +import { + ArgCategory, + AssertNode, + AssignmentExpressionNode, + AssignmentNode, + AugmentedAssignmentNode, + AwaitNode, + BinaryOperationNode, + BreakNode, + CallNode, + CaseNode, + ClassNode, + ComprehensionNode, + ContinueNode, + DelNode, + ExceptNode, + ExpressionNode, + ForNode, + FunctionNode, + GlobalNode, + IfNode, + ImportAsNode, + ImportFromNode, + IndexNode, + LambdaNode, + MatchNode, + MemberAccessNode, + ModuleNameNode, + ModuleNode, + NameNode, + NonlocalNode, + ParseNode, + ParseNodeType, + PatternAsNode, + PatternCaptureNode, + PatternMappingExpandEntryNode, + RaiseNode, + ReturnNode, + StatementNode, + StringListNode, + StringNode, + SuiteNode, + TernaryNode, + TryNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParameterListNode, + UnaryOperationNode, + WhileNode, + WithNode, + YieldFromNode, + YieldNode, +} from '../parser/parseNodes'; +import { KeywordType, OperatorType } from '../parser/tokenizerTypes'; +import { AnalyzerFileInfo } from './analyzerFileInfo'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { + CodeFlowReferenceExpressionNode, + FlowAssignment, + FlowBranchLabel, + FlowCall, + FlowCondition, + FlowExhaustedMatch, + FlowFlags, + FlowLabel, + FlowNarrowForPattern, + FlowNode, + FlowPostContextManagerLabel, + FlowPostFinally, + FlowPreFinallyGate, + FlowVariableAnnotation, + FlowWildcardImport, + createKeyForReference, + getUniqueFlowNodeId, + isCodeFlowSupportedForReference, + wildcardImportReferenceKey, +} from './codeFlowTypes'; +import { + AliasDeclaration, + ClassDeclaration, + DeclarationType, + FunctionDeclaration, + IntrinsicType, + ModuleLoaderActions, + ParamDeclaration, + SpecialBuiltInClassDeclaration, + TypeAliasDeclaration, + TypeParamDeclaration, + UnresolvedModuleMarker, + VariableDeclaration, +} from './declaration'; +import { ImplicitImport, ImportResult, ImportType } from './importResult'; +import { getWildcardImportNames } from './importStatementUtils'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { ParseTreeWalker } from './parseTreeWalker'; +import { NameBindingType, Scope, ScopeType } from './scope'; +import * as StaticExpressions from './staticExpressions'; +import { Symbol, SymbolFlags, indeterminateSymbolId } from './symbol'; +import { isConstantName, isPrivateName, isPrivateOrProtectedName } from './symbolNameUtils'; + +interface MemberAccessInfo { + classNode: ClassNode; + methodNode: FunctionNode; + classScope: Scope; + isInstanceMember: boolean; +} + +interface DeferredBindingTask { + scope: Scope; + codeFlowExpressions: Set; + callback: () => void; +} + +interface FinalInfo { + isFinal: boolean; + finalTypeNode: ExpressionNode | undefined; +} + +interface ClassVarInfo { + isClassVar: boolean; + classVarTypeNode: ExpressionNode | undefined; +} + +interface NarrowExprOptions { + filterForNeverNarrowing?: boolean; + isComplexExpression?: boolean; + allowDiscriminatedNarrowing?: boolean; +} + +// For each flow node within an execution context, we'll add a small +// amount to the complexity factor. Without this, the complexity +// calculation fails to take into account large numbers of non-cyclical +// flow nodes. This number is somewhat arbitrary and is tuned empirically. +const flowNodeComplexityContribution = 0.025; + +export class Binder extends ParseTreeWalker { + private readonly _fileInfo: AnalyzerFileInfo; + + // A queue of deferred analysis operations. + private _deferredBindingTasks: DeferredBindingTask[] = []; + + // The current scope in effect. + private _currentScope!: Scope; + + // Current control-flow node. + private _currentFlowNode: FlowNode | undefined; + + // Current target function declaration, if currently binding + // a function. This allows return and yield statements to be + // added to the function declaration. + private _targetFunctionDeclaration: FunctionDeclaration | undefined; + + // Flow node label that is the target of a "break" statement. + private _currentBreakTarget: FlowLabel | undefined; + + // Flow node label that is the target of a "continue" statement. + private _currentContinueTarget: FlowLabel | undefined; + + // Flow nodes used for if/else and while/else statements. + private _currentTrueTarget: FlowLabel | undefined; + private _currentFalseTarget: FlowLabel | undefined; + + // Flow nodes used within try blocks. + private _currentExceptTargets: FlowLabel[] = []; + + // Flow nodes used within try/finally flows. + private _finallyTargets: FlowLabel[] = []; + + // Flow nodes used for return statements. + private _currentReturnTarget: FlowLabel | undefined; + + // Set of expressions within the current execution scope + // and require code flow analysis to resolve. + private _currentScopeCodeFlowExpressions: Set | undefined; + + // If we're actively binding a match statement, this is the current + // match expression. + private _currentMatchSubjExpr: ExpressionNode | undefined; + + // Aliases of "typing" and "typing_extensions". + private _typingImportAliases: string[] = []; + + // Aliases of "sys". + private _sysImportAliases: string[] = []; + + // Aliases of "dataclasses". + private _dataclassesImportAliases: string[] = []; + + // Map of imports of specific symbols imported from "typing" and "typing_extensions" + // and the names they alias to. + private _typingSymbolAliases: Map = new Map(); + + // Map of imports of specific symbols imported from "dataclasses" + // and the names they alias to. + private _dataclassesSymbolAliases: Map = new Map(); + + // List of names statically assigned to __all__ symbol. + private _dunderAllNames: string[] | undefined; + + // List of string nodes associated with the "__all__" symbol. + private _dunderAllStringNodes: StringNode[] = []; + + // One or more statements are manipulating __all__ in a manner that a + // static analyzer doesn't understand. + private _usesUnsupportedDunderAllForm = false; + + // Are we currently binding code located within an except block? + private _isInExceptSuite = false; + + // Are we currently walking the type arguments to an Annotated type annotation? + private _isInAnnotatedAnnotation = false; + + // A list of names assigned to __slots__ within a class. + private _dunderSlotsEntries: StringListNode[] | undefined; + + // Flow node that is used for unreachable code. + private static _unreachableStaticConditionFlowNode: FlowNode = { + flags: FlowFlags.UnreachableStaticCondition, + id: getUniqueFlowNodeId(), + }; + + private static _unreachableStructuralFlowNode: FlowNode = { + flags: FlowFlags.UnreachableStructural, + id: getUniqueFlowNodeId(), + }; + + // Map of symbols at the module level that may be externally + // hidden depending on whether they are listed in the __all__ list. + private _potentialHiddenSymbols = new Map(); + + // Map of symbols imported via wildcard import in a py.typed (non-stub) + // module that should be treated as private if this module defines __all__ + // and the symbol is not listed there. + private _potentialWildcardReexportSymbols = new Map(); + + // Map of symbols at the module level that may be private depending + // on whether they are listed in the __all__ list. + private _potentialPrivateSymbols = new Map(); + + // Estimates the overall complexity of the code flow graph for + // the current function. + private _codeFlowComplexity = 0; + + constructor(fileInfo: AnalyzerFileInfo, private _moduleSymbolOnly = false) { + super(); + + this._fileInfo = fileInfo; + } + + bindModule(node: ModuleNode): void { + // We'll assume that if there is no builtins scope provided, we must be + // binding the builtins module itself. + const isBuiltInModule = this._fileInfo.builtinsScope === undefined; + + this._addTypingImportAliasesFromBuiltinsScope(); + + this._createNewScope( + isBuiltInModule ? ScopeType.Builtin : ScopeType.Module, + this._fileInfo.builtinsScope, + /* proxyScope */ undefined, + () => { + AnalyzerNodeInfo.setScope(node, this._currentScope); + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + + // Bind implicit names. + // List taken from https://docs.python.org/3/reference/import.html#__name__ + this._addImplicitSymbolToCurrentScope('__name__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__loader__', node, 'Any'); + this._addImplicitSymbolToCurrentScope('__package__', node, 'str | None'); + this._addImplicitSymbolToCurrentScope('__spec__', node, 'Any'); + this._addImplicitSymbolToCurrentScope('__path__', node, 'MutableSequence[str]'); + this._addImplicitSymbolToCurrentScope('__file__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__cached__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__annotations__', node, 'dict[str, Any]'); + this._addImplicitSymbolToCurrentScope('__dict__', node, 'dict[str, Any]'); + this._addImplicitSymbolToCurrentScope('__builtins__', node, 'Any'); + this._addImplicitSymbolToCurrentScope('__doc__', node, 'str | None'); + + // Create a start node for the module. + this._currentFlowNode = this._createStartFlowNode(); + + this._walkStatementsAndReportUnreachable(node.d.statements); + + // Associate the code flow node at the end of the module with the module. + AnalyzerNodeInfo.setAfterFlowNode(node, this._currentFlowNode); + + AnalyzerNodeInfo.setCodeFlowExpressions(node, this._currentScopeCodeFlowExpressions!); + AnalyzerNodeInfo.setCodeFlowComplexity(node, this._codeFlowComplexity); + } + ); + + // Perform all analysis that was deferred during the first pass. + this._bindDeferred(); + + // Use the __all__ list to determine whether any potential private + // symbols should be made externally hidden or private. + this._potentialHiddenSymbols.forEach((symbol, name) => { + if (!this._dunderAllNames?.some((sym) => sym === name)) { + if (this._fileInfo.isStubFile) { + symbol.setIsExternallyHidden(); + } else { + symbol.setPrivatePyTypedImport(); + } + } + }); + + // Wildcard imports are considered a re-export form, but if this module defines + // __all__, that list determines the public interface and should restrict which + // wildcard-imported symbols are exposed. + this._potentialWildcardReexportSymbols.forEach((symbol, name) => { + if (this._dunderAllNames && !this._dunderAllNames.some((sym) => sym === name)) { + symbol.setPrivatePyTypedImport(); + } + }); + + this._potentialPrivateSymbols.forEach((symbol, name) => { + if (!this._dunderAllNames?.some((sym) => sym === name)) { + symbol.setIsPrivateMember(); + } + }); + + if (this._dunderAllNames) { + AnalyzerNodeInfo.setDunderAllInfo(node, { + names: this._dunderAllNames, + stringNodes: this._dunderAllStringNodes, + usesUnsupportedDunderAllForm: this._usesUnsupportedDunderAllForm, + }); + } else { + AnalyzerNodeInfo.setDunderAllInfo(node, /* names */ undefined); + } + + // Set __all__ flags on the module symbols. + const scope = AnalyzerNodeInfo.getScope(node); + if (scope && this._dunderAllNames) { + for (const name of this._dunderAllNames) { + scope.symbolTable.get(name)?.setIsInDunderAll(); + } + } + } + + override visitModule(node: ModuleNode): boolean { + // Tree walking should start with the children of + // the node, so we should never get here. + fail('We should never get here'); + return false; + } + + override visitSuite(node: SuiteNode): boolean { + this._walkStatementsAndReportUnreachable(node.d.statements); + return false; + } + + override visitModuleName(node: ModuleNameNode): boolean { + const importResult = AnalyzerNodeInfo.getImportInfo(node); + assert(importResult !== undefined); + + if (importResult.isNativeLib) { + return true; + } + + if (!importResult.isImportFound && importResult.importName) { + this._addDiagnostic( + DiagnosticRule.reportMissingImports, + LocMessage.importResolveFailure().format({ + importName: importResult.importName, + venv: this._fileInfo.executionEnvironment.name, + }), + node + ); + return true; + } + + // See if a source file was found but it's not part of a py.typed + // library and no type stub is found. + let reportStubMissing = false; + if ( + !importResult.isStubFile && + importResult.importType === ImportType.ThirdParty && + !importResult.pyTypedInfo + ) { + reportStubMissing = true; + + // If the import is a namespace package, it's possible that all of + // the targeted import symbols are py.typed submodules. In this case, + // suppress the missing stub diagnostic. + if (importResult.isNamespacePackage && node.parent?.nodeType === ParseNodeType.ImportFrom) { + if ( + node.parent.d.imports.every((importAs) => { + const implicitImport = importResult.filteredImplicitImports?.get(importAs.d.name.d.value); + return !!implicitImport?.pyTypedInfo; + }) + ) { + reportStubMissing = false; + } + } + } + + if (reportStubMissing) { + const diagnostic = this._addDiagnostic( + DiagnosticRule.reportMissingTypeStubs, + LocMessage.stubFileMissing().format({ importName: importResult.importName }), + node + ); + if (diagnostic) { + // Add a diagnostic action for resolving this diagnostic. + const createTypeStubAction: CreateTypeStubFileAction = { + action: Commands.createTypeStub, + moduleName: importResult.importName, + }; + diagnostic.addAction(createTypeStubAction); + } + } + + return true; + } + + override visitClass(node: ClassNode): boolean { + this.walkMultiple(node.d.decorators); + + const classDeclaration: ClassDeclaration = { + type: DeclarationType.Class, + node, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(node.d.name, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + const symbol = this._bindNameToScope(this._currentScope, node.d.name); + if (symbol) { + symbol.addDeclaration(classDeclaration); + } + + // Stash the declaration in the parse node for later access. + AnalyzerNodeInfo.setDeclaration(node, classDeclaration); + + let typeParamScope: Scope | undefined; + if (node.d.typeParams) { + this.walk(node.d.typeParams); + typeParamScope = AnalyzerNodeInfo.getScope(node.d.typeParams); + } + + this.walkMultiple(node.d.arguments); + + this._createNewScope( + ScopeType.Class, + typeParamScope ?? this._getNonClassParentScope(), + /* proxyScope */ undefined, + () => { + AnalyzerNodeInfo.setScope(node, this._currentScope); + + this._addImplicitSymbolToCurrentScope('__doc__', node, 'str | None'); + this._addImplicitSymbolToCurrentScope('__module__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__qualname__', node, 'str'); + + this._dunderSlotsEntries = undefined; + if (!this._moduleSymbolOnly) { + // Analyze the suite. + this.walk(node.d.suite); + } + + if (this._dunderSlotsEntries) { + this._addSlotsToCurrentScope(this._dunderSlotsEntries); + } + this._dunderSlotsEntries = undefined; + } + ); + + this._createAssignmentTargetFlowNodes(node.d.name, /* walkTargets */ false, /* unbound */ false); + + return false; + } + + override visitFunction(node: FunctionNode): boolean { + this._createVariableAnnotationFlowNode(); + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + + const symbol = this._bindNameToScope(this._currentScope, node.d.name); + const containingClassNode = ParseTreeUtils.getEnclosingClass(node, /* stopAtFunction */ true); + const functionDeclaration: FunctionDeclaration = { + type: DeclarationType.Function, + node, + isMethod: !!containingClassNode, + isGenerator: false, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(node.d.name, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + if (symbol) { + symbol.addDeclaration(functionDeclaration); + } + + // Stash the declaration in the parse node for later access. + AnalyzerNodeInfo.setDeclaration(node, functionDeclaration); + + // Walk the default values prior to the type parameters. + node.d.params.forEach((param) => { + if (param.d.defaultValue) { + this.walk(param.d.defaultValue); + } + }); + + let typeParamScope: Scope | undefined; + if (node.d.typeParams) { + this.walk(node.d.typeParams); + typeParamScope = AnalyzerNodeInfo.getScope(node.d.typeParams); + } + + this.walkMultiple(node.d.decorators); + + node.d.params.forEach((param) => { + if (param.d.annotation) { + this.walk(param.d.annotation); + } + + if (param.d.annotationComment) { + this.walk(param.d.annotationComment); + } + }); + + if (node.d.returnAnnotation) { + this.walk(node.d.returnAnnotation); + } + + if (node.d.funcAnnotationComment) { + this.walk(node.d.funcAnnotationComment); + } + + // Don't walk the body of the function until we're done analyzing + // the current scope. + this._createNewScope( + ScopeType.Function, + typeParamScope ?? this._getNonClassParentScope(), + /* proxyScope */ undefined, + () => { + AnalyzerNodeInfo.setScope(node, this._currentScope); + + const enclosingClass = ParseTreeUtils.getEnclosingClass(node); + if (enclosingClass) { + // Add the implicit "__class__" symbol described in PEP 3135. + this._addImplicitSymbolToCurrentScope('__class__', node, '__class__'); + } + + this._deferBinding(() => { + // Create a start node for the function. + this._currentFlowNode = this._createStartFlowNode(); + this._codeFlowComplexity = 0; + + node.d.params.forEach((paramNode) => { + if (paramNode.d.name) { + const symbol = this._bindNameToScope(this._currentScope, paramNode.d.name); + + if (symbol) { + const paramDeclaration: ParamDeclaration = { + type: DeclarationType.Param, + node: paramNode, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(paramNode, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + symbol.addDeclaration(paramDeclaration); + AnalyzerNodeInfo.setDeclaration(paramNode.d.name, paramDeclaration); + } + + this._createFlowAssignment(paramNode.d.name); + } + }); + + this._targetFunctionDeclaration = functionDeclaration; + this._currentReturnTarget = this._createBranchLabel(); + + // Walk the statements that make up the function. + this.walk(node.d.suite); + + this._targetFunctionDeclaration = undefined; + + // Associate the code flow node at the end of the suite with + // the suite. + AnalyzerNodeInfo.setAfterFlowNode(node.d.suite, this._currentFlowNode); + + // Compute the final return flow node and associate it with + // the function's parse node. If this node is unreachable, then + // the function never returns. + this._addAntecedent(this._currentReturnTarget, this._currentFlowNode); + const returnFlowNode = this._finishFlowLabel(this._currentReturnTarget); + + AnalyzerNodeInfo.setAfterFlowNode(node, returnFlowNode); + + AnalyzerNodeInfo.setCodeFlowExpressions(node, this._currentScopeCodeFlowExpressions!); + AnalyzerNodeInfo.setCodeFlowComplexity(node, this._codeFlowComplexity); + }); + } + ); + + this._createAssignmentTargetFlowNodes(node.d.name, /* walkTargets */ false, /* unbound */ false); + + // We'll walk the child nodes in a deferred manner, so don't walk them now. + return false; + } + + override visitLambda(node: LambdaNode): boolean { + this._createVariableAnnotationFlowNode(); + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + + // Analyze the parameter defaults in the context of the parent's scope + // before we add any names from the function's scope. + node.d.params.forEach((param) => { + if (param.d.defaultValue) { + this.walk(param.d.defaultValue); + } + }); + + this._createNewScope(ScopeType.Function, this._getNonClassParentScope(), /* proxyScope */ undefined, () => { + AnalyzerNodeInfo.setScope(node, this._currentScope); + + this._deferBinding(() => { + // Create a start node for the lambda. + this._currentFlowNode = this._createStartFlowNode(); + + node.d.params.forEach((paramNode) => { + if (paramNode.d.name) { + const symbol = this._bindNameToScope(this._currentScope, paramNode.d.name); + if (symbol) { + const paramDeclaration: ParamDeclaration = { + type: DeclarationType.Param, + node: paramNode, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(paramNode, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + symbol.addDeclaration(paramDeclaration); + AnalyzerNodeInfo.setDeclaration(paramNode.d.name, paramDeclaration); + } + + this._createFlowAssignment(paramNode.d.name); + this.walk(paramNode.d.name); + AnalyzerNodeInfo.setFlowNode(paramNode, this._currentFlowNode!); + } + }); + + // Walk the expression that make up the lambda body. + this.walk(node.d.expr); + + AnalyzerNodeInfo.setCodeFlowExpressions(node, this._currentScopeCodeFlowExpressions!); + }); + }); + + // We'll walk the child nodes in a deferred manner. + return false; + } + + override visitCall(node: CallNode): boolean { + this._disableTrueFalseTargets(() => { + this.walk(node.d.leftExpr); + + const sortedArgs = ParseTreeUtils.getArgsByRuntimeOrder(node); + + sortedArgs.forEach((argNode) => { + if (this._currentFlowNode) { + AnalyzerNodeInfo.setFlowNode(argNode, this._currentFlowNode); + } + this.walk(argNode); + }); + }); + + // Create a call flow node. We'll skip this if the call is part of + // a decorator. We assume that decorators are not NoReturn functions. + // There are libraries that make extensive use of unannotated decorators, + // and this can lead to a performance issue when walking the control + // flow graph if we need to evaluate every decorator. + if (!ParseTreeUtils.isNodeContainedWithinNodeType(node, ParseNodeType.Decorator)) { + // Skip if we're in an 'Annotated' annotation because this creates + // problems for "No Return" return type analysis when annotation + // evaluation is deferred. + if (!this._isInAnnotatedAnnotation) { + this._createCallFlowNode(node); + } + } + + // Is this an manipulation of dunder all? + if ( + this._currentScope.type === ScopeType.Module && + node.d.leftExpr.nodeType === ParseNodeType.MemberAccess && + node.d.leftExpr.d.leftExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.leftExpr.d.value === '__all__' + ) { + let emitDunderAllWarning = true; + + // Is this a call to "__all__.extend()"? + if (node.d.leftExpr.d.member.d.value === 'extend' && node.d.args.length === 1) { + const argExpr = node.d.args[0].d.valueExpr; + + // Is this a call to "__all__.extend([])"? + if (argExpr.nodeType === ParseNodeType.List) { + if ( + argExpr.d.items.every((listEntryNode) => { + if ( + listEntryNode.nodeType === ParseNodeType.StringList && + listEntryNode.d.strings.length === 1 && + listEntryNode.d.strings[0].nodeType === ParseNodeType.String + ) { + this._dunderAllNames?.push(listEntryNode.d.strings[0].d.value); + this._dunderAllStringNodes?.push(listEntryNode.d.strings[0]); + return true; + } + + return false; + }) + ) { + emitDunderAllWarning = false; + } + } else if ( + argExpr.nodeType === ParseNodeType.MemberAccess && + argExpr.d.leftExpr.nodeType === ParseNodeType.Name && + argExpr.d.member.d.value === '__all__' + ) { + // Is this a call to "__all__.extend(.__all__)"? + const namesToAdd = this._getDunderAllNamesFromImport(argExpr.d.leftExpr.d.value); + if (namesToAdd && namesToAdd.length > 0) { + namesToAdd.forEach((name) => { + this._dunderAllNames?.push(name); + }); + } + emitDunderAllWarning = false; + } + } else if (node.d.leftExpr.d.member.d.value === 'remove' && node.d.args.length === 1) { + // Is this a call to "__all__.remove()"? + const argExpr = node.d.args[0].d.valueExpr; + if ( + argExpr.nodeType === ParseNodeType.StringList && + argExpr.d.strings.length === 1 && + argExpr.d.strings[0].nodeType === ParseNodeType.String && + this._dunderAllNames + ) { + this._dunderAllNames = this._dunderAllNames.filter((name) => name !== argExpr.d.strings[0].d.value); + this._dunderAllStringNodes = this._dunderAllStringNodes.filter( + (node) => node.d.value !== argExpr.d.strings[0].d.value + ); + emitDunderAllWarning = false; + } + } else if (node.d.leftExpr.d.member.d.value === 'append' && node.d.args.length === 1) { + // Is this a call to "__all__.append()"? + const argExpr = node.d.args[0].d.valueExpr; + if ( + argExpr.nodeType === ParseNodeType.StringList && + argExpr.d.strings.length === 1 && + argExpr.d.strings[0].nodeType === ParseNodeType.String + ) { + this._dunderAllNames?.push(argExpr.d.strings[0].d.value); + this._dunderAllStringNodes?.push(argExpr.d.strings[0]); + emitDunderAllWarning = false; + } + } + + if (emitDunderAllWarning) { + this._usesUnsupportedDunderAllForm = true; + + this._addDiagnostic( + DiagnosticRule.reportUnsupportedDunderAll, + LocMessage.unsupportedDunderAllOperation(), + node + ); + } + } + + return false; + } + + override visitTypeParameterList(node: TypeParameterListNode): boolean { + const typeParamScope = new Scope(ScopeType.TypeParameter, this._getNonClassParentScope(), this._currentScope); + + node.d.params.forEach((param) => { + if (param.d.boundExpr) { + this.walk(param.d.boundExpr); + } + }); + + const typeParamsSeen = new Set(); + + node.d.params.forEach((param) => { + const name = param.d.name; + const symbol = typeParamScope.addSymbol(name.d.value, SymbolFlags.None); + const paramDeclaration: TypeParamDeclaration = { + type: DeclarationType.TypeParam, + node: param, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(node, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + symbol.addDeclaration(paramDeclaration); + AnalyzerNodeInfo.setDeclaration(name, paramDeclaration); + + if (typeParamsSeen.has(name.d.value)) { + this._addSyntaxError( + LocMessage.typeParameterExistingTypeParameter().format({ name: name.d.value }), + name + ); + } else { + typeParamsSeen.add(name.d.value); + } + }); + + node.d.params.forEach((param) => { + if (param.d.defaultExpr) { + this.walk(param.d.defaultExpr); + } + }); + + AnalyzerNodeInfo.setScope(node, typeParamScope); + + return false; + } + + override visitTypeAlias(node: TypeAliasNode): boolean { + this._bindNameToScope(this._currentScope, node.d.name); + + this.walk(node.d.name); + + let typeParamScope: Scope | undefined; + if (node.d.typeParams) { + this.walk(node.d.typeParams); + typeParamScope = AnalyzerNodeInfo.getScope(node.d.typeParams); + } + + const typeAliasDeclaration: TypeAliasDeclaration = { + type: DeclarationType.TypeAlias, + node, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(node.d.name, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + docString: this._getVariableDocString(node.d.expr), + }; + + const symbol = this._bindNameToScope(this._currentScope, node.d.name); + if (symbol) { + symbol.addDeclaration(typeAliasDeclaration); + } + + // Stash the declaration in the parse node for later access. + AnalyzerNodeInfo.setDeclaration(node, typeAliasDeclaration); + + this._createAssignmentTargetFlowNodes(node.d.name, /* walkTargets */ true, /* unbound */ false); + + const prevScope = this._currentScope; + this._currentScope = typeParamScope ?? this._currentScope; + this.walk(node.d.expr); + this._currentScope = prevScope; + + return false; + } + + override visitAssignment(node: AssignmentNode): boolean { + if (this._handleTypingStubAssignmentOrAnnotation(node)) { + return false; + } + + this._bindPossibleTupleNamedTarget(node.d.leftExpr); + + if (node.d.annotationComment) { + this.walk(node.d.annotationComment); + this._addTypeDeclarationForVariable(node.d.leftExpr, node.d.annotationComment); + } + + if (node.d.chainedAnnotationComment) { + this._addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.annotationNotSupported(), + node.d.chainedAnnotationComment + ); + } + + // If the assignment target base expression is potentially a + // TypedDict, add the base expression to the flow expressions set + // to accommodate TypedDict type narrowing. + if (node.d.leftExpr.nodeType === ParseNodeType.Index) { + const target = node.d.leftExpr; + + if ( + target.d.items.length === 1 && + !target.d.trailingComma && + target.d.items[0].d.valueExpr.nodeType === ParseNodeType.StringList + ) { + if (isCodeFlowSupportedForReference(target.d.leftExpr)) { + const baseExprReferenceKey = createKeyForReference(target.d.leftExpr); + this._currentScopeCodeFlowExpressions!.add(baseExprReferenceKey); + } + } + } + + this.walk(node.d.rightExpr); + + let isPossibleTypeAlias = true; + if (ParseTreeUtils.getEnclosingFunction(node)) { + // We will assume that type aliases are defined only at the module level + // or as class variables, not as local variables within a function. + isPossibleTypeAlias = false; + } else if (node.d.rightExpr.nodeType === ParseNodeType.Call && this._fileInfo.isTypingStubFile) { + // Some special built-in types defined in typing.pyi use + // assignments of the form List = _Alias(). We don't want to + // treat these as type aliases. + isPossibleTypeAlias = false; + } else if (ParseTreeUtils.isWithinLoop(node)) { + // Assume that it's not a type alias if it's within a loop. + isPossibleTypeAlias = false; + } + + this._addInferredTypeAssignmentForVariable(node.d.leftExpr, node.d.rightExpr, isPossibleTypeAlias); + + // If we didn't create assignment target flow nodes above, do so now. + this._createAssignmentTargetFlowNodes(node.d.leftExpr, /* walkTargets */ true, /* unbound */ false); + + // Is this an assignment to dunder all? + if (this._currentScope.type === ScopeType.Module) { + if ( + (node.d.leftExpr.nodeType === ParseNodeType.Name && node.d.leftExpr.d.value === '__all__') || + (node.d.leftExpr.nodeType === ParseNodeType.TypeAnnotation && + node.d.leftExpr.d.valueExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.valueExpr.d.value === '__all__') + ) { + const expr = node.d.rightExpr; + this._dunderAllNames = []; + let emitDunderAllWarning = false; + + if (expr.nodeType === ParseNodeType.List) { + expr.d.items.forEach((listEntryNode) => { + if ( + listEntryNode.nodeType === ParseNodeType.StringList && + listEntryNode.d.strings.length === 1 && + listEntryNode.d.strings[0].nodeType === ParseNodeType.String + ) { + this._dunderAllNames!.push(listEntryNode.d.strings[0].d.value); + this._dunderAllStringNodes.push(listEntryNode.d.strings[0]); + } else { + emitDunderAllWarning = true; + } + }); + } else if (expr.nodeType === ParseNodeType.Tuple) { + expr.d.items.forEach((tupleEntryNode) => { + if ( + tupleEntryNode.nodeType === ParseNodeType.StringList && + tupleEntryNode.d.strings.length === 1 && + tupleEntryNode.d.strings[0].nodeType === ParseNodeType.String + ) { + this._dunderAllNames!.push(tupleEntryNode.d.strings[0].d.value); + this._dunderAllStringNodes.push(tupleEntryNode.d.strings[0]); + } else { + emitDunderAllWarning = true; + } + }); + } else { + emitDunderAllWarning = true; + } + + if (emitDunderAllWarning) { + this._usesUnsupportedDunderAllForm = true; + + this._addDiagnostic( + DiagnosticRule.reportUnsupportedDunderAll, + LocMessage.unsupportedDunderAllOperation(), + node + ); + } + } + } + + // Is this an assignment to dunder slots? + if (this._currentScope.type === ScopeType.Class) { + if ( + (node.d.leftExpr.nodeType === ParseNodeType.Name && node.d.leftExpr.d.value === '__slots__') || + (node.d.leftExpr.nodeType === ParseNodeType.TypeAnnotation && + node.d.leftExpr.d.valueExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.valueExpr.d.value === '__slots__') + ) { + const expr = node.d.rightExpr; + this._dunderSlotsEntries = []; + let isExpressionUnderstood = true; + + if (expr.nodeType === ParseNodeType.StringList) { + this._dunderSlotsEntries.push(expr); + } else if (expr.nodeType === ParseNodeType.List) { + expr.d.items.forEach((listEntryNode) => { + if ( + listEntryNode.nodeType === ParseNodeType.StringList && + listEntryNode.d.strings.length === 1 && + listEntryNode.d.strings[0].nodeType === ParseNodeType.String + ) { + this._dunderSlotsEntries!.push(listEntryNode); + } else { + isExpressionUnderstood = false; + } + }); + } else if (expr.nodeType === ParseNodeType.Tuple) { + expr.d.items.forEach((tupleEntryNode) => { + if ( + tupleEntryNode.nodeType === ParseNodeType.StringList && + tupleEntryNode.d.strings.length === 1 && + tupleEntryNode.d.strings[0].nodeType === ParseNodeType.String + ) { + this._dunderSlotsEntries!.push(tupleEntryNode); + } else { + isExpressionUnderstood = false; + } + }); + } else { + isExpressionUnderstood = false; + } + + if (!isExpressionUnderstood) { + this._dunderSlotsEntries = undefined; + } + } + } + + return false; + } + + override visitAssignmentExpression(node: AssignmentExpressionNode) { + // Temporarily disable true/false targets in case this assignment + // expression is located within an if/else conditional. + this._disableTrueFalseTargets(() => { + // Evaluate the operand expression. + this.walk(node.d.rightExpr); + }); + + const evaluationNode = ParseTreeUtils.getEvaluationNodeForAssignmentExpression(node); + if (!evaluationNode) { + this._addSyntaxError(LocMessage.assignmentExprContext(), node); + this.walk(node.d.name); + } else { + // Bind the name to the containing scope. This special logic is required + // because of the behavior defined in PEP 572. Targets of assignment + // expressions don't bind to a list comprehension's scope but instead + // bind to its containing scope. + const containerScope = AnalyzerNodeInfo.getScope(evaluationNode)!; + + // If we're in a list comprehension (possibly nested), make sure that + // local for targets don't collide with the target of the assignment + // expression. + let curScope: Scope | undefined = this._currentScope; + while (curScope && curScope !== containerScope) { + const localSymbol = curScope.lookUpSymbol(node.d.name.d.value); + if (localSymbol) { + this._addSyntaxError( + LocMessage.assignmentExprComprehension().format({ name: node.d.name.d.value }), + node.d.name + ); + break; + } + + curScope = curScope.parent; + } + + this._bindNameToScope(containerScope, node.d.name); + this._addInferredTypeAssignmentForVariable(node.d.name, node.d.rightExpr); + this._createAssignmentTargetFlowNodes(node.d.name, /* walkTargets */ true, /* unbound */ false); + } + + return false; + } + + override visitAugmentedAssignment(node: AugmentedAssignmentNode) { + this.walk(node.d.leftExpr); + this.walk(node.d.rightExpr); + + this._bindPossibleTupleNamedTarget(node.d.destExpr); + this._createAssignmentTargetFlowNodes(node.d.destExpr, /* walkTargets */ false, /* unbound */ false); + + this._addInferredTypeAssignmentForVariable(node.d.destExpr, node.d.rightExpr); + + // Is this an assignment to dunder all of the form + // __all__ += ? + if ( + node.d.operator === OperatorType.AddEqual && + this._currentScope.type === ScopeType.Module && + node.d.leftExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.value === '__all__' + ) { + const expr = node.d.rightExpr; + let emitDunderAllWarning = true; + + if (expr.nodeType === ParseNodeType.List) { + // Is this the form __all__ += ["a", "b"]? + expr.d.items.forEach((listEntryNode) => { + if ( + listEntryNode.nodeType === ParseNodeType.StringList && + listEntryNode.d.strings.length === 1 && + listEntryNode.d.strings[0].nodeType === ParseNodeType.String + ) { + this._dunderAllNames?.push(listEntryNode.d.strings[0].d.value); + this._dunderAllStringNodes.push(listEntryNode.d.strings[0]); + } + }); + emitDunderAllWarning = false; + } else if ( + expr.nodeType === ParseNodeType.MemberAccess && + expr.d.leftExpr.nodeType === ParseNodeType.Name && + expr.d.member.d.value === '__all__' + ) { + // Is this using the form "__all__ += .__all__"? + const namesToAdd = this._getDunderAllNamesFromImport(expr.d.leftExpr.d.value); + if (namesToAdd) { + namesToAdd.forEach((name) => { + this._dunderAllNames?.push(name); + }); + + emitDunderAllWarning = false; + } + } + + if (emitDunderAllWarning) { + this._usesUnsupportedDunderAllForm = true; + + this._addDiagnostic( + DiagnosticRule.reportUnsupportedDunderAll, + LocMessage.unsupportedDunderAllOperation(), + node + ); + } + } + + return false; + } + + override visitDel(node: DelNode) { + node.d.targets.forEach((expr) => { + this._bindPossibleTupleNamedTarget(expr); + this.walk(expr); + this._createAssignmentTargetFlowNodes(expr, /* walkTargets */ false, /* unbound */ true); + }); + + return false; + } + + override visitTypeAnnotation(node: TypeAnnotationNode): boolean { + if (this._handleTypingStubAssignmentOrAnnotation(node)) { + return false; + } + + // If this is an annotated variable assignment within a class body, + // we need to evaluate the type annotation first. + const bindVariableBeforeAnnotationEvaluation = + node.parent?.nodeType === ParseNodeType.Assignment && + ParseTreeUtils.getEnclosingClass(node, /* stopAtFunction */ true) !== undefined; + + if (!bindVariableBeforeAnnotationEvaluation) { + this.walk(node.d.annotation); + } + + this._createVariableAnnotationFlowNode(); + + this._bindPossibleTupleNamedTarget(node.d.valueExpr); + this._addTypeDeclarationForVariable(node.d.valueExpr, node.d.annotation); + + if (bindVariableBeforeAnnotationEvaluation) { + this.walk(node.d.annotation); + } + + // For type annotations that are not part of assignments (e.g. simple variable + // annotations), we need to populate the reference map. Otherwise the type + // analyzer's code flow engine won't run and detect cases where the variable + // is unbound. + const expressionList: CodeFlowReferenceExpressionNode[] = []; + if (this._isNarrowingExpression(node.d.valueExpr, expressionList)) { + expressionList.forEach((expr) => { + const referenceKey = createKeyForReference(expr); + this._currentScopeCodeFlowExpressions!.add(referenceKey); + }); + } + + this.walk(node.d.valueExpr); + + return false; + } + + override visitFor(node: ForNode) { + this._bindPossibleTupleNamedTarget(node.d.targetExpr); + this._addInferredTypeAssignmentForVariable(node.d.targetExpr, node); + + this.walk(node.d.iterableExpr); + + const preForLabel = this._createLoopLabel(); + const preElseLabel = this._createBranchLabel(); + const postForLabel = this._createBranchLabel(); + + this._addAntecedent(preForLabel, this._currentFlowNode!); + this._currentFlowNode = preForLabel; + this._addAntecedent(preElseLabel, this._currentFlowNode); + const targetExpressions = this._trackCodeFlowExpressions(() => { + this._createAssignmentTargetFlowNodes(node.d.targetExpr, /* walkTargets */ true, /* unbound */ false); + }); + + this._bindLoopStatement(preForLabel, postForLabel, () => { + this.walk(node.d.forSuite); + this._addAntecedent(preForLabel, this._currentFlowNode!); + + // Add any target expressions since they are modified in the loop. + targetExpressions.forEach((value) => { + this._currentScopeCodeFlowExpressions?.add(value); + }); + }); + + this._currentFlowNode = this._finishFlowLabel(preElseLabel); + if (node.d.elseSuite) { + this.walk(node.d.elseSuite); + } + this._addAntecedent(postForLabel, this._currentFlowNode); + + this._currentFlowNode = this._finishFlowLabel(postForLabel); + + // Async for is not allowed outside of an async function + // unless we're in ipython mode. + if (node.d.asyncToken && !this._fileInfo.ipythonMode) { + const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); + if (!enclosingFunction || !enclosingFunction.d.isAsync) { + this._addSyntaxError(LocMessage.asyncNotInAsyncFunction(), node.d.asyncToken); + } + } + + return false; + } + + override visitContinue(node: ContinueNode): boolean { + if (this._currentContinueTarget) { + this._addAntecedent(this._currentContinueTarget, this._currentFlowNode!); + } + this._currentFlowNode = Binder._unreachableStructuralFlowNode; + + // Continue nodes don't have any children. + return false; + } + + override visitBreak(node: BreakNode): boolean { + if (this._currentBreakTarget) { + this._addAntecedent(this._currentBreakTarget, this._currentFlowNode!); + } + this._currentFlowNode = Binder._unreachableStructuralFlowNode; + + // Break nodes don't have any children. + return false; + } + + override visitReturn(node: ReturnNode): boolean { + if (this._targetFunctionDeclaration) { + if (!this._targetFunctionDeclaration.returnStatements) { + this._targetFunctionDeclaration.returnStatements = []; + } + this._targetFunctionDeclaration.returnStatements.push(node); + } + + if (node.d.expr) { + AnalyzerNodeInfo.setFlowNode(node.d.expr, this._currentFlowNode!); + this.walk(node.d.expr); + } + + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + if (this._currentReturnTarget) { + this._addAntecedent(this._currentReturnTarget, this._currentFlowNode!); + } + this._finallyTargets.forEach((target) => { + this._addAntecedent(target, this._currentFlowNode!); + }); + this._currentFlowNode = Binder._unreachableStructuralFlowNode; + return false; + } + + override visitYield(node: YieldNode): boolean { + if (this._isInComprehension(node, /* ignoreOutermostIterable */ true)) { + this._addSyntaxError(LocMessage.yieldWithinComprehension(), node); + } + + this._bindYield(node); + return false; + } + + override visitYieldFrom(node: YieldFromNode): boolean { + if (this._isInComprehension(node, /* ignoreOutermostIterable */ true)) { + this._addSyntaxError(LocMessage.yieldWithinComprehension(), node); + } + + this._bindYield(node); + return false; + } + + override visitMemberAccess(node: MemberAccessNode): boolean { + this.walk(node.d.leftExpr); + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + return false; + } + + override visitName(node: NameNode): boolean { + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + return false; + } + + override visitIndex(node: IndexNode): boolean { + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + + this.walk(node.d.leftExpr); + + // If we're within an 'Annotated' type annotation, set the flag. + const wasInAnnotatedAnnotation = this._isInAnnotatedAnnotation; + if (this._isTypingAnnotation(node.d.leftExpr, 'Annotated')) { + this._isInAnnotatedAnnotation = true; + } + + node.d.items.forEach((argNode) => { + this.walk(argNode); + }); + + this._isInAnnotatedAnnotation = wasInAnnotatedAnnotation; + + return false; + } + + override visitIf(node: IfNode): boolean { + const preIfFlowNode = this._currentFlowNode!; + const thenLabel = this._createBranchLabel(); + const elseLabel = this._createBranchLabel(); + const postIfLabel = this._createBranchLabel(preIfFlowNode); + + postIfLabel.affectedExpressions = this._trackCodeFlowExpressions(() => { + // Determine if the test condition is always true or always false. If so, + // we can treat either the then or the else clause as unconditional. + const constExprValue = StaticExpressions.evaluateStaticBoolLikeExpression( + node.d.testExpr, + this._fileInfo.executionEnvironment, + this._fileInfo.definedConstants, + this._typingImportAliases, + this._sysImportAliases + ); + + this._bindConditional(node.d.testExpr, thenLabel, elseLabel); + + // Handle the if clause. + this._currentFlowNode = + constExprValue === false + ? Binder._unreachableStaticConditionFlowNode + : this._finishFlowLabel(thenLabel); + this.walk(node.d.ifSuite); + this._addAntecedent(postIfLabel, this._currentFlowNode); + + // Now handle the else clause if it's present. If there + // are chained "else if" statements, they'll be handled + // recursively here. + this._currentFlowNode = + constExprValue === true ? Binder._unreachableStaticConditionFlowNode : this._finishFlowLabel(elseLabel); + if (node.d.elseSuite) { + this.walk(node.d.elseSuite); + } else { + this._bindNeverCondition(node.d.testExpr, postIfLabel, /* isPositiveTest */ false); + } + this._addAntecedent(postIfLabel, this._currentFlowNode); + this._currentFlowNode = this._finishFlowLabel(postIfLabel); + }); + + return false; + } + + override visitWhile(node: WhileNode): boolean { + const thenLabel = this._createBranchLabel(); + const elseLabel = this._createBranchLabel(); + const postWhileLabel = this._createBranchLabel(); + + // Determine if the test condition is always true or always false. If so, + // we can treat either the while or the else clause as unconditional. + const constExprValue = StaticExpressions.evaluateStaticBoolLikeExpression( + node.d.testExpr, + this._fileInfo.executionEnvironment, + this._fileInfo.definedConstants, + this._typingImportAliases, + this._sysImportAliases + ); + + const preLoopLabel = this._createLoopLabel(); + this._addAntecedent(preLoopLabel, this._currentFlowNode!); + this._currentFlowNode = preLoopLabel; + + this._bindConditional(node.d.testExpr, thenLabel, elseLabel); + + // Handle the while clause. + this._currentFlowNode = + constExprValue === false ? Binder._unreachableStaticConditionFlowNode : this._finishFlowLabel(thenLabel); + this._bindLoopStatement(preLoopLabel, postWhileLabel, () => { + this.walk(node.d.whileSuite); + }); + this._addAntecedent(preLoopLabel, this._currentFlowNode); + + this._currentFlowNode = + constExprValue === true ? Binder._unreachableStaticConditionFlowNode : this._finishFlowLabel(elseLabel); + if (node.d.elseSuite) { + this.walk(node.d.elseSuite); + } + this._addAntecedent(postWhileLabel, this._currentFlowNode); + this._currentFlowNode = this._finishFlowLabel(postWhileLabel); + return false; + } + + override visitAssert(node: AssertNode): boolean { + const assertTrueLabel = this._createBranchLabel(); + const assertFalseLabel = this._createBranchLabel(); + + this._bindConditional(node.d.testExpr, assertTrueLabel, assertFalseLabel); + + if (node.d.exceptionExpr) { + this._currentFlowNode = this._finishFlowLabel(assertFalseLabel); + this.walk(node.d.exceptionExpr); + } + + this._currentFlowNode = this._finishFlowLabel(assertTrueLabel); + return false; + } + + override visitExcept(node: ExceptNode): boolean { + if (node.d.typeExpr) { + this.walk(node.d.typeExpr); + } + + if (node.d.name) { + this.walk(node.d.name); + const symbol = this._bindNameToScope(this._currentScope, node.d.name); + this._createAssignmentTargetFlowNodes(node.d.name, /* walkTargets */ true, /* unbound */ false); + + if (symbol) { + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: node.d.name, + isConstant: isConstantName(node.d.name.d.value), + inferredTypeSource: node, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(node.d.name, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + isExplicitBinding: this._currentScope.getBindingType(node.d.name.d.value) !== undefined, + }; + symbol.addDeclaration(declaration); + } + } + + const wasInExceptSuite = this._isInExceptSuite; + this._isInExceptSuite = true; + this.walk(node.d.exceptSuite); + this._isInExceptSuite = wasInExceptSuite; + + if (node.d.name) { + // The exception name is implicitly unbound at the end of + // the except block. + this._createFlowAssignment(node.d.name, /* unbound */ true); + } + + return false; + } + + override visitRaise(node: RaiseNode): boolean { + if (this._currentFlowNode) { + this._addExceptTargets(this._currentFlowNode); + } + + if (this._targetFunctionDeclaration) { + if (!this._targetFunctionDeclaration.raiseStatements) { + this._targetFunctionDeclaration.raiseStatements = []; + } + this._targetFunctionDeclaration.raiseStatements.push(node); + } + + if (node.d.expr) { + this.walk(node.d.expr); + } + if (node.d.fromExpr) { + this.walk(node.d.fromExpr); + } + + this._finallyTargets.forEach((target) => { + this._addAntecedent(target, this._currentFlowNode!); + }); + + this._currentFlowNode = Binder._unreachableStructuralFlowNode; + return false; + } + + override visitTry(node: TryNode): boolean { + // The try/except/else/finally statement is tricky to model using static code + // flow rules because the finally clause is executed regardless of whether an + // exception is raised or a return statement is executed. Code within the finally + // clause needs to be reachable always, and we conservatively assume that any + // statement within the try block can generate an exception, so we assume that its + // antecedent is the pre-try flow. We implement this with a "gate" node in the + // control flow graph. If analysis starts within the finally clause, the gate is + // opened, and all raise/return statements within try/except/else blocks are + // considered antecedents. If analysis starts outside (after) the finally clause, + // the gate is closed, and only paths that don't hit a raise/return statement + // in try/except/else blocks are considered. + // + // + // 1. PostElse + // ^ + // | + // 3. TryExceptElseReturnOrExcept | + // ^ | + // | | 2. PostExcept (for each except) + // | | ^ + // 4. ReturnOrRaiseLabel | | + // ^ | | + // | | |--------- + // 5. PreFinallyGate | | + // ^ | | + // |------------------ | | + // | | | + // 6. PreFinallyLabel + // ^ + // (finally block) + // ^ + // 7. PostFinally + // ^ (only if isAfterElseAndExceptsReachable) + // (after finally) + + // Create one flow label for every except clause. + const preTryFlowNode = this._currentFlowNode!; + const curExceptTargets = node.d.exceptClauses.map(() => this._createBranchLabel()); + const preFinallyLabel = this._createBranchLabel(preTryFlowNode); + let isAfterElseAndExceptsReachable = false; + + // Create a label for all of the return or raise labels that are + // encountered within the try/except/else blocks. This conditionally + // connects the return/raise statement to the finally clause. + const preFinallyReturnOrRaiseLabel = this._createBranchLabel(preTryFlowNode); + + const preFinallyGate: FlowPreFinallyGate = { + flags: FlowFlags.PreFinallyGate, + id: this._getUniqueFlowNodeId(), + antecedent: preFinallyReturnOrRaiseLabel, + }; + + preFinallyLabel.affectedExpressions = this._trackCodeFlowExpressions(() => { + if (node.d.finallySuite) { + this._addAntecedent(preFinallyLabel, preFinallyGate); + } + + // Add the finally target as an exception target unless there is + // a "bare" except clause that accepts all exception types. + const hasBareExceptClause = node.d.exceptClauses.some((except) => !except.d.typeExpr); + if (!hasBareExceptClause) { + curExceptTargets.push(preFinallyReturnOrRaiseLabel); + } + + // An exception may be generated before the first flow node + // added by the try block, so all of the exception targets + // must have the pre-try flow node as an antecedent. + curExceptTargets.forEach((exceptLabel) => { + this._addAntecedent(exceptLabel, this._currentFlowNode!); + }); + + // We don't perfectly handle nested finally clauses, which are not + // possible to model fully within a static analyzer, but we do handle + // a single level of finally statements, and we handle most cases + // involving nesting. Returns or raises within the try/except/raise + // block will execute the finally target(s). + if (node.d.finallySuite) { + this._finallyTargets.push(preFinallyReturnOrRaiseLabel); + } + + // Handle the try block. + this._useExceptTargets(curExceptTargets, () => { + this.walk(node.d.trySuite); + }); + + // Handle the else block, which is executed only if + // execution falls through the try block. + if (node.d.elseSuite) { + this.walk(node.d.elseSuite); + } + this._addAntecedent(preFinallyLabel, this._currentFlowNode!); + if (!this._isCodeUnreachable()) { + isAfterElseAndExceptsReachable = true; + } + + // Handle the except blocks. + node.d.exceptClauses.forEach((exceptNode, index) => { + this._currentFlowNode = this._finishFlowLabel(curExceptTargets[index]); + this.walk(exceptNode); + this._addAntecedent(preFinallyLabel, this._currentFlowNode); + if (!this._isCodeUnreachable()) { + isAfterElseAndExceptsReachable = true; + } + }); + + if (node.d.finallySuite) { + this._finallyTargets.pop(); + } + + // Handle the finally block. + this._currentFlowNode = this._finishFlowLabel(preFinallyLabel); + }); + + if (node.d.finallySuite) { + this.walk(node.d.finallySuite); + + // Add a post-finally node at the end. If we traverse this node, + // we'll set the "ignore" flag in the pre-finally node. + const postFinallyNode: FlowPostFinally = { + flags: FlowFlags.PostFinally, + id: this._getUniqueFlowNodeId(), + finallyNode: node.d.finallySuite, + antecedent: this._currentFlowNode!, + preFinallyGate, + }; + this._currentFlowNode = isAfterElseAndExceptsReachable + ? postFinallyNode + : Binder._unreachableStructuralFlowNode; + } + + return false; + } + + override visitAwait(node: AwaitNode) { + // Make sure this is within an async lambda or function. + const execScopeNode = ParseTreeUtils.getExecutionScopeNode(node); + if (execScopeNode?.nodeType !== ParseNodeType.Function || !execScopeNode.d.isAsync) { + if (this._fileInfo.ipythonMode && execScopeNode?.nodeType === ParseNodeType.Module) { + // Top level await is allowed in ipython mode. + return true; + } + + const isInGenerator = + node.parent?.nodeType === ParseNodeType.Comprehension && + node.parent?.parent?.nodeType !== ParseNodeType.List && + node.parent?.parent?.nodeType !== ParseNodeType.Set && + node.parent?.parent?.nodeType !== ParseNodeType.Dictionary; + + // Allow if it's within a generator expression. Execution of + // generator expressions is deferred and therefore can be + // run within the context of an async function later. + if (!isInGenerator) { + this._addSyntaxError(LocMessage.awaitNotInAsync(), node.d.awaitToken); + } + } + + return true; + } + + override visitGlobal(node: GlobalNode): boolean { + const globalScope = this._currentScope.getGlobalScope().scope; + + node.d.targets.forEach((name) => { + const nameValue = name.d.value; + + // Is the binding inconsistent? + if (this._currentScope.getBindingType(nameValue) === NameBindingType.Nonlocal) { + this._addSyntaxError(LocMessage.nonLocalRedefinition().format({ name: nameValue }), name); + } + + const valueWithScope = this._currentScope.lookUpSymbolRecursive(nameValue); + + // Was the name already assigned within this scope before it was declared global? + if (valueWithScope && valueWithScope.scope === this._currentScope) { + this._addSyntaxError(LocMessage.globalReassignment().format({ name: nameValue }), name); + } + + // Add it to the global scope if it's not already added. + this._bindNameToScope(globalScope, name); + + if (this._currentScope !== globalScope) { + this._currentScope.setBindingType(nameValue, NameBindingType.Global); + } + }); + + return true; + } + + override visitNonlocal(node: NonlocalNode): boolean { + const globalScope = this._currentScope.getGlobalScope().scope; + + if (this._currentScope === globalScope) { + this._addSyntaxError(LocMessage.nonLocalInModule(), node); + } else { + node.d.targets.forEach((name) => { + const nameValue = name.d.value; + + // Is the binding inconsistent? + if (this._currentScope.getBindingType(nameValue) === NameBindingType.Global) { + this._addSyntaxError(LocMessage.globalRedefinition().format({ name: nameValue }), name); + } + + const valueWithScope = this._currentScope.lookUpSymbolRecursive(nameValue); + + // Was the name already assigned within this scope before it was declared nonlocal? + if (valueWithScope && valueWithScope.scope === this._currentScope) { + this._addSyntaxError(LocMessage.nonLocalReassignment().format({ name: nameValue }), name); + } else if (!valueWithScope || valueWithScope.scope === globalScope) { + this._addSyntaxError(LocMessage.nonLocalNoBinding().format({ name: nameValue }), name); + } + + if (valueWithScope) { + this._currentScope.setBindingType(nameValue, NameBindingType.Nonlocal); + } + }); + } + + return true; + } + + override visitImportAs(node: ImportAsNode): boolean { + if (node.d.module.d.nameParts.length > 0) { + const firstNamePartValue = node.d.module.d.nameParts[0].d.value; + + let symbolName: string | undefined; + let symbolNameNode: NameNode; + if (node.d.alias) { + // The symbol name is defined by the alias. + symbolName = node.d.alias.d.value; + symbolNameNode = node.d.alias; + } else { + // There was no alias, so we need to use the first element of + // the name parts as the symbol. + symbolName = firstNamePartValue; + symbolNameNode = node.d.module.d.nameParts[0]; + } + + const symbol = this._bindNameToScope(this._currentScope, symbolNameNode); + if ( + symbol && + (this._currentScope.type === ScopeType.Module || this._currentScope.type === ScopeType.Builtin) && + (!node.d.alias || + node.d.module.d.nameParts.length !== 1 || + node.d.module.d.nameParts[0].d.value !== node.d.alias.d.value) + ) { + if (this._fileInfo.isStubFile || this._fileInfo.isInPyTypedPackage) { + // PEP 484 indicates that imported symbols should not be + // considered "reexported" from a type stub file unless + // they are imported using the "as" form and the aliased + // name is entirely redundant. + this._potentialHiddenSymbols.set(symbolName, symbol); + } + } + + const importInfo = AnalyzerNodeInfo.getImportInfo(node.d.module); + assert(importInfo !== undefined); + + if (symbol) { + this._createAliasDeclarationForMultipartImportName(node, node.d.alias, importInfo, symbol); + } + + this._createFlowAssignment(node.d.alias ? node.d.alias : node.d.module.d.nameParts[0]); + + if (node.d.module.d.nameParts.length === 1) { + if (firstNamePartValue === 'typing' || firstNamePartValue === 'typing_extensions') { + this._typingImportAliases.push(node.d.alias?.d.value ?? firstNamePartValue); + } else if (firstNamePartValue === 'sys') { + this._sysImportAliases.push(node.d.alias?.d.value ?? firstNamePartValue); + } else if (firstNamePartValue === 'dataclasses') { + this._dataclassesImportAliases.push(node.d.alias?.d.value ?? firstNamePartValue); + } + } + } + + return true; + } + + override visitImportFrom(node: ImportFromNode): boolean { + const typingSymbolsOfInterest = ['Final', 'ClassVar', 'Annotated']; + const dataclassesSymbolsOfInterest = ['InitVar']; + const importInfo = AnalyzerNodeInfo.getImportInfo(node.d.module); + + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + + let resolvedPath = Uri.empty(); + if (importInfo && importInfo.isImportFound && !importInfo.isNativeLib) { + resolvedPath = importInfo.resolvedUris[importInfo.resolvedUris.length - 1]; + } + + // If this file is a module __init__.py(i), relative imports of submodules + // using the syntax "from .x import y" introduce a symbol x into the + // module namespace. We do this first (before adding the individual imported + // symbols below) in case one of the imported symbols is the same name as the + // submodule. In that case, we want to the symbol to appear later in the + // declaration list because it should "win" when resolving the alias. + const fileName = stripFileExtension(this._fileInfo.fileUri.fileName); + const isModuleInitFile = + fileName === '__init__' && node.d.module.d.leadingDots === 1 && node.d.module.d.nameParts.length === 1; + + let isTypingImport = false; + let isDataclassesImport = false; + + if (node.d.module.d.nameParts.length === 1) { + const firstNamePartValue = node.d.module.d.nameParts[0].d.value; + if (firstNamePartValue === 'typing' || firstNamePartValue === 'typing_extensions') { + isTypingImport = true; + } + + if (firstNamePartValue === 'dataclasses') { + isDataclassesImport = true; + } + } + + if (node.d.isWildcardImport) { + if (ParseTreeUtils.getEnclosingClass(node) || ParseTreeUtils.getEnclosingFunction(node)) { + this._addSyntaxError(LocMessage.wildcardInFunction(), node); + } + + if (importInfo) { + const names: string[] = []; + + // Note that this scope uses a wildcard import, so we cannot shortcut + // any code flow checks. All expressions are potentially in play. + this._currentScopeCodeFlowExpressions?.add(wildcardImportReferenceKey); + + const lookupInfo = this._fileInfo.importLookup(resolvedPath); + if (lookupInfo) { + const wildcardNames = getWildcardImportNames(lookupInfo); + + if (isModuleInitFile) { + // If the symbol is going to be immediately replaced with a same-named + // imported symbol, skip this. + const isImmediatelyReplaced = wildcardNames.some((name) => { + return name === node.d.module.d.nameParts[0].d.value; + }); + + if (!isImmediatelyReplaced) { + this._addImplicitFromImport(node, importInfo); + } + } + + wildcardNames.forEach((name) => { + const localSymbol = this._bindNameValueToScope(this._currentScope, name); + + if (localSymbol) { + const importedSymbol = lookupInfo.symbolTable.get(name)!; + + if ( + (this._currentScope.type === ScopeType.Module || + this._currentScope.type === ScopeType.Builtin) && + this._fileInfo.isInPyTypedPackage && + !this._fileInfo.isStubFile + ) { + // Wildcard imports are considered a re-export form. If this module + // defines __all__, it determines the public interface, so we may + // need to treat wildcard-imported names as private unless listed. + this._potentialWildcardReexportSymbols.set(name, localSymbol); + } + + // Is the symbol in the target module's symbol table? If so, + // alias it. + if (importedSymbol) { + const aliasDecl: AliasDeclaration = { + type: DeclarationType.Alias, + node, + uri: resolvedPath, + loadSymbolsFromPath: true, + range: getEmptyRange(), // Range is unknown for wildcard name import. + usesLocalName: false, + symbolName: name, + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + localSymbol.addDeclaration(aliasDecl); + names.push(name); + } else { + // The symbol wasn't in the target module's symbol table. It's probably + // an implicitly-imported submodule referenced by __all__. + if (importInfo && importInfo.filteredImplicitImports) { + const implicitImport = importInfo.filteredImplicitImports.get(name); + + if (implicitImport) { + const submoduleFallback: AliasDeclaration = { + type: DeclarationType.Alias, + node, + uri: implicitImport.uri, + loadSymbolsFromPath: true, + range: getEmptyRange(), + usesLocalName: false, + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + const aliasDecl: AliasDeclaration = { + type: DeclarationType.Alias, + node, + uri: resolvedPath, + loadSymbolsFromPath: true, + usesLocalName: false, + symbolName: name, + submoduleFallback, + range: getEmptyRange(), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + localSymbol.addDeclaration(aliasDecl); + names.push(name); + } + } + } + + if (isTypingImport) { + localSymbol.setTypingSymbolAlias(name); + } + } + }); + } + + this._createFlowWildcardImport(node, names); + + if (isTypingImport) { + typingSymbolsOfInterest.forEach((s) => { + this._typingSymbolAliases.set(s, s); + }); + } + + if (isDataclassesImport) { + dataclassesSymbolsOfInterest.forEach((s) => { + this._dataclassesSymbolAliases.set(s, s); + }); + } + } + } else { + if (isModuleInitFile) { + this._addImplicitFromImport(node, importInfo); + } + + node.d.imports.forEach((importSymbolNode) => { + const importedName = importSymbolNode.d.name.d.value; + const nameNode = importSymbolNode.d.alias || importSymbolNode.d.name; + + AnalyzerNodeInfo.setFlowNode(importSymbolNode, this._currentFlowNode!); + + const symbol = this._bindNameToScope(this._currentScope, nameNode); + + if (symbol) { + // All import statements of the form `from . import x` treat x + // as an externally-visible (not hidden) symbol. + if (node.d.module.d.nameParts.length > 0) { + if ( + this._currentScope.type === ScopeType.Module || + this._currentScope.type === ScopeType.Builtin + ) { + if ( + !importSymbolNode.d.alias || + importSymbolNode.d.alias.d.value !== importSymbolNode.d.name.d.value + ) { + if (this._fileInfo.isStubFile || this._fileInfo.isInPyTypedPackage) { + // PEP 484 indicates that imported symbols should not be + // considered "reexported" from a type stub file unless + // they are imported using the "as" form using a redundant form. + // Py.typed packages follow the same rule as PEP 484. + this._potentialHiddenSymbols.set(nameNode.d.value, symbol); + } + } + } + } + + // Is the import referring to an implicitly-imported module? + let implicitImport: ImplicitImport | undefined; + if (importInfo && importInfo.filteredImplicitImports) { + implicitImport = importInfo.filteredImplicitImports.get(importedName); + } + + let submoduleFallback: AliasDeclaration | undefined; + let loadSymbolsFromPath = true; + if (implicitImport) { + submoduleFallback = { + type: DeclarationType.Alias, + node: importSymbolNode, + uri: implicitImport.uri, + loadSymbolsFromPath: true, + range: getEmptyRange(), + usesLocalName: false, + moduleName: this._formatModuleName(node.d.module), + isInExceptSuite: this._isInExceptSuite, + }; + + // Handle the case where this is an __init__.py file and the imported + // module name refers to itself. The most common situation where this occurs + // is with a "from . import X" form, but it can also occur with + // an absolute import (e.g. "from A.B.C import X"). In this case, we want to + // always resolve to the submodule rather than the resolved path. + if (fileName === '__init__') { + if (node.d.module.d.leadingDots === 1 && node.d.module.d.nameParts.length === 0) { + loadSymbolsFromPath = false; + } else if (resolvedPath.equals(this._fileInfo.fileUri)) { + loadSymbolsFromPath = false; + } + } + } + + const aliasDecl: AliasDeclaration = { + type: DeclarationType.Alias, + node: importSymbolNode, + uri: resolvedPath, + loadSymbolsFromPath, + usesLocalName: !!importSymbolNode.d.alias, + symbolName: importedName, + submoduleFallback, + range: convertTextRangeToRange(nameNode, this._fileInfo.lines), + moduleName: this._formatModuleName(node.d.module), + isInExceptSuite: this._isInExceptSuite, + isNativeLib: importInfo?.isNativeLib, + }; + + symbol.addDeclaration(aliasDecl); + this._createFlowAssignment(importSymbolNode.d.alias || importSymbolNode.d.name); + + if (isTypingImport) { + if (typingSymbolsOfInterest.some((s) => s === importSymbolNode.d.name.d.value)) { + this._typingSymbolAliases.set(nameNode.d.value, importSymbolNode.d.name.d.value); + + if (isTypingImport) { + symbol.setTypingSymbolAlias(nameNode.d.value); + } + } + } + + if (isDataclassesImport) { + if (dataclassesSymbolsOfInterest.some((s) => s === importSymbolNode.d.name.d.value)) { + this._dataclassesSymbolAliases.set(nameNode.d.value, importSymbolNode.d.name.d.value); + } + } + } + }); + } + + return true; + } + + override visitWith(node: WithNode): boolean { + node.d.withItems.forEach((item) => { + this.walk(item.d.expr); + if (item.d.target) { + this._bindPossibleTupleNamedTarget(item.d.target); + this._addInferredTypeAssignmentForVariable(item.d.target, item); + this._createAssignmentTargetFlowNodes(item.d.target, /* walkTargets */ true, /* unbound */ false); + } + }); + + // We need to treat the "with" body as though it is wrapped in a try/except + // block because some context managers catch and suppress exceptions. + // We'll make use of a special "context manager label" which acts like + // a regular branch label in most respects except that it is disabled + // if none of the context managers support exception suppression. We won't + // be able to determine whether any context managers support exception + // processing until the type evaluation phase. + // + // (pre with suite) + // ^ + // |<--------------------| + // (with suite)<--------------| + // ^ | + // | ContextManagerSwallowExceptionTarget + // | ^ + // | PostContextManagerLabel + // | ^ + // |---------------------| + // | + // (after with) + // + // In addition to the ContextManagerSwallowExceptionTarget, we'll create + // a second target called ContextManagerForwardExceptionTarget that forwards + // exceptions to existing exception targets if they exist. + + const contextManagerSwallowExceptionTarget = this._createContextManagerLabel( + node.d.withItems.map((item) => item.d.expr), + !!node.d.isAsync, + /* blockIfSwallowsExceptions */ false + ); + this._addAntecedent(contextManagerSwallowExceptionTarget, this._currentFlowNode!); + + const contextManagerForwardExceptionTarget = this._createContextManagerLabel( + node.d.withItems.map((item) => item.d.expr), + !!node.d.isAsync, + /* blockIfSwallowsExceptions */ true + ); + this._currentExceptTargets.forEach((exceptionTarget) => { + this._addAntecedent(exceptionTarget, contextManagerForwardExceptionTarget); + }); + + const preWithSuiteNode = this._currentFlowNode!; + const postContextManagerLabel = this._createBranchLabel(preWithSuiteNode); + this._addAntecedent(postContextManagerLabel, contextManagerSwallowExceptionTarget!); + + postContextManagerLabel.affectedExpressions = this._trackCodeFlowExpressions(() => { + this._useExceptTargets([contextManagerSwallowExceptionTarget, contextManagerForwardExceptionTarget], () => { + this.walk(node.d.suite); + }); + + this._addAntecedent(postContextManagerLabel, this._currentFlowNode!); + this._currentFlowNode = postContextManagerLabel; + + // Model the call to `__exit__` as a potential exception generator. + if (!this._isCodeUnreachable()) { + this._addExceptTargets(this._currentFlowNode!); + } + + if (node.d.asyncToken && !this._fileInfo.ipythonMode) { + // Top level async with is allowed in ipython mode. + const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); + if (!enclosingFunction || !enclosingFunction.d.isAsync) { + this._addSyntaxError(LocMessage.asyncNotInAsyncFunction(), node.d.asyncToken); + } + } + }); + + return false; + } + + override visitTernary(node: TernaryNode): boolean { + const preTernaryFlowNode = this._currentFlowNode!; + const trueLabel = this._createBranchLabel(); + const falseLabel = this._createBranchLabel(); + const postExpressionLabel = this._createBranchLabel(preTernaryFlowNode); + + postExpressionLabel.affectedExpressions = this._trackCodeFlowExpressions(() => { + // Handle the test expression. + this._bindConditional(node.d.testExpr, trueLabel, falseLabel); + + // Handle the "true" portion (the "if" expression). + this._currentFlowNode = this._finishFlowLabel(trueLabel); + this.walk(node.d.ifExpr); + this._addAntecedent(postExpressionLabel, this._currentFlowNode); + + // Handle the "false" portion (the "else" expression). + this._currentFlowNode = this._finishFlowLabel(falseLabel); + this.walk(node.d.elseExpr); + this._addAntecedent(postExpressionLabel, this._currentFlowNode); + + this._currentFlowNode = this._finishFlowLabel(postExpressionLabel); + }); + + return false; + } + + override visitUnaryOperation(node: UnaryOperationNode): boolean { + if (node.d.operator === OperatorType.Not && this._currentFalseTarget && this._currentTrueTarget) { + // Swap the existing true/false targets. + this._bindConditional(node.d.expr, this._currentFalseTarget, this._currentTrueTarget); + } else { + // Temporarily set the true/false targets to undefined because + // this unary operation is not part of a chain of logical expressions + // (AND/OR/NOT subexpressions). + this._disableTrueFalseTargets(() => { + // Evaluate the operand expression. + this.walk(node.d.expr); + }); + } + + return false; + } + + override visitBinaryOperation(node: BinaryOperationNode): boolean { + if (node.d.operator === OperatorType.And || node.d.operator === OperatorType.Or) { + let trueTarget = this._currentTrueTarget; + let falseTarget = this._currentFalseTarget; + let postRightLabel: FlowLabel | undefined; + + if (!trueTarget || !falseTarget) { + postRightLabel = this._createBranchLabel(); + trueTarget = falseTarget = postRightLabel; + } + + const preRightLabel = this._createBranchLabel(); + if (node.d.operator === OperatorType.And) { + this._bindConditional(node.d.leftExpr, preRightLabel, falseTarget); + } else { + this._bindConditional(node.d.leftExpr, trueTarget, preRightLabel); + } + this._currentFlowNode = this._finishFlowLabel(preRightLabel); + this._bindConditional(node.d.rightExpr, trueTarget, falseTarget); + if (postRightLabel) { + this._currentFlowNode = this._finishFlowLabel(postRightLabel); + } + } else { + // Temporarily set the true/false targets to undefined because + // this binary operation is not part of a chain of logical expressions + // (AND/OR/NOT subexpressions). + this._disableTrueFalseTargets(() => { + this.walk(node.d.leftExpr); + this.walk(node.d.rightExpr); + }); + } + + return false; + } + + override visitComprehension(node: ComprehensionNode): boolean { + const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); + + // The first iterable is executed outside of the comprehension scope. + if (node.d.forIfNodes.length > 0 && node.d.forIfNodes[0].nodeType === ParseNodeType.ComprehensionFor) { + this.walk(node.d.forIfNodes[0].d.iterableExpr); + } + + this._createNewScope( + ScopeType.Comprehension, + this._getNonClassParentScope(), + /* proxyScope */ undefined, + () => { + AnalyzerNodeInfo.setScope(node, this._currentScope); + + const falseLabel = this._createBranchLabel(); + + // We'll walk the forIfNodes list twice. The first time we'll + // bind targets of for statements. The second time we'll walk + // expressions and create the control flow graph. + for (let i = 0; i < node.d.forIfNodes.length; i++) { + const compr = node.d.forIfNodes[i]; + const addedSymbols = new Map(); + if (compr.nodeType === ParseNodeType.ComprehensionFor) { + this._bindPossibleTupleNamedTarget(compr.d.targetExpr, addedSymbols); + this._addInferredTypeAssignmentForVariable(compr.d.targetExpr, compr); + + // Async for is not allowed outside of an async function + // unless we're in ipython mode. + if (compr.d.asyncToken && !this._fileInfo.ipythonMode) { + if (!enclosingFunction || !enclosingFunction.d.isAsync) { + // Allow if it's within a generator expression. Execution of + // generator expressions is deferred and therefore can be + // run within the context of an async function later. + if ( + node.parent?.nodeType === ParseNodeType.List || + node.parent?.nodeType === ParseNodeType.Set || + node.parent?.nodeType === ParseNodeType.Dictionary + ) { + this._addSyntaxError(LocMessage.asyncNotInAsyncFunction(), compr.d.asyncToken); + } + } + } + } + } + + for (let i = 0; i < node.d.forIfNodes.length; i++) { + const compr = node.d.forIfNodes[i]; + if (compr.nodeType === ParseNodeType.ComprehensionFor) { + // We already walked the first iterable expression above, + // so skip it here. + if (i !== 0) { + this.walk(compr.d.iterableExpr); + } + + this._createAssignmentTargetFlowNodes( + compr.d.targetExpr, + /* walkTargets */ true, + /* unbound */ false + ); + } else { + const trueLabel = this._createBranchLabel(); + this._bindConditional(compr.d.testExpr, trueLabel, falseLabel); + this._currentFlowNode = this._finishFlowLabel(trueLabel); + } + } + + this.walk(node.d.expr); + this._addAntecedent(falseLabel, this._currentFlowNode!); + this._currentFlowNode = this._finishFlowLabel(falseLabel); + } + ); + + return false; + } + + override visitMatch(node: MatchNode) { + // Evaluate the subject expression. + this.walk(node.d.expr); + + const expressionList: CodeFlowReferenceExpressionNode[] = []; + let isSubjectNarrowable = this._isNarrowingExpression(node.d.expr, expressionList); + + // We also support narrowing of individual tuple entries found within a + // match subject expression, so add those here as well. + if (node.d.expr.nodeType === ParseNodeType.Tuple) { + node.d.expr.d.items.forEach((itemExpr) => { + if (this._isNarrowingExpression(itemExpr, expressionList)) { + isSubjectNarrowable = true; + } + }); + } + + if (isSubjectNarrowable) { + expressionList.forEach((expr) => { + const referenceKey = createKeyForReference(expr); + this._currentScopeCodeFlowExpressions!.add(referenceKey); + }); + } + + const postMatchLabel = this._createBranchLabel(); + let foundIrrefutableCase = false; + + // Model the match statement as a series of if/elif clauses + // each of which tests for the specified pattern (and optionally + // for the guard condition). + node.d.cases.forEach((caseStatement) => { + const postCaseLabel = this._createBranchLabel(); + const preGuardLabel = this._createBranchLabel(); + const preSuiteLabel = this._createBranchLabel(); + + // Evaluate the pattern. + this._addAntecedent(preGuardLabel, this._currentFlowNode!); + + if (!caseStatement.d.isIrrefutable) { + this._addAntecedent(postCaseLabel, this._currentFlowNode!); + } else if (!caseStatement.d.guardExpr) { + foundIrrefutableCase = true; + } + + this._currentFlowNode = this._finishFlowLabel(preGuardLabel); + + // Note the active match subject expression prior to binding + // the pattern. If the pattern involves any targets that overwrite + // the subject expression, this will be set to undefined. + this._currentMatchSubjExpr = node.d.expr; + + // Bind the pattern. + this.walk(caseStatement.d.pattern); + + // If the pattern involves targets that overwrite the subject + // expression, skip creating a flow node for narrowing the subject. + if (this._currentMatchSubjExpr) { + this._createFlowNarrowForPattern(node.d.expr, caseStatement); + this._currentMatchSubjExpr = undefined; + } + + // Apply the guard expression. + if (caseStatement.d.guardExpr) { + this._bindConditional(caseStatement.d.guardExpr, preSuiteLabel, postCaseLabel); + } else { + this._addAntecedent(preSuiteLabel, this._currentFlowNode); + } + + this._currentFlowNode = this._finishFlowLabel(preSuiteLabel); + + // Bind the body of the case statement. + this.walk(caseStatement.d.suite); + this._addAntecedent(postMatchLabel, this._currentFlowNode); + + this._currentFlowNode = this._finishFlowLabel(postCaseLabel); + }); + + // Add a final narrowing step for the subject expression for the entire + // match statement. This will compute the narrowed type if no case + // statements are matched. + if (isSubjectNarrowable) { + this._createFlowNarrowForPattern(node.d.expr, node); + } + + // Create an "implied else" to conditionally gate code flow based on + // whether the narrowed type of the subject expression is Never at this point. + if (!foundIrrefutableCase) { + this._createFlowExhaustedMatch(node); + } + + this._addAntecedent(postMatchLabel, this._currentFlowNode!); + this._currentFlowNode = this._finishFlowLabel(postMatchLabel); + + return false; + } + + override visitPatternAs(node: PatternAsNode) { + const postOrLabel = this._createBranchLabel(); + + node.d.orPatterns.forEach((orPattern) => { + this.walk(orPattern); + this._addAntecedent(postOrLabel, this._currentFlowNode!); + }); + + this._currentFlowNode = this._finishFlowLabel(postOrLabel); + + if (node.d.target) { + this.walk(node.d.target); + const symbol = this._bindNameToScope(this._currentScope, node.d.target); + this._createAssignmentTargetFlowNodes(node.d.target, /* walkTargets */ false, /* unbound */ false); + + if (symbol) { + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: node.d.target, + isConstant: isConstantName(node.d.target.d.value), + inferredTypeSource: node, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(node.d.target, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + isExplicitBinding: this._currentScope.getBindingType(node.d.target.d.value) !== undefined, + }; + symbol.addDeclaration(declaration); + } + } + + return false; + } + + override visitPatternCapture(node: PatternCaptureNode) { + if (!node.d.isWildcard) { + this._addPatternCaptureTarget(node.d.target); + } + + return true; + } + + override visitPatternMappingExpandEntry(node: PatternMappingExpandEntryNode) { + if (node.d.target.d.value !== '_') { + this._addPatternCaptureTarget(node.d.target); + } + + return true; + } + + private _addTypingImportAliasesFromBuiltinsScope() { + if (!this._fileInfo.builtinsScope) { + return; + } + + const symbolTable = this._fileInfo.builtinsScope.symbolTable; + symbolTable.forEach((symbol, name) => { + const typingImportAlias = symbol.getTypingSymbolAlias(); + if (typingImportAlias && !symbol.isExternallyHidden()) { + this._typingSymbolAliases.set(name, typingImportAlias); + } + }); + } + + private _formatModuleName(node: ModuleNameNode): string { + return '.'.repeat(node.d.leadingDots) + node.d.nameParts.map((part) => part.d.value).join('.'); + } + + private _getNonClassParentScope() { + // We may not be able to use the current scope if it's a class scope. + // Walk up until we find a non-class scope instead. + let parentScope = this._currentScope; + while (parentScope.type === ScopeType.Class) { + parentScope = parentScope.parent!; + } + + return parentScope; + } + + private _addSlotsToCurrentScope(slotNameNodes: StringListNode[]) { + assert(this._currentScope.type === ScopeType.Class); + + let slotsContainsDict = false; + + for (const slotNameNode of slotNameNodes) { + const slotName = slotNameNode.d.strings[0].d.value; + + if (slotName === '__dict__') { + slotsContainsDict = true; + continue; + } + + let symbol = this._currentScope.lookUpSymbol(slotName); + if (symbol) { + symbol.setIsSlotsMember(); + } else { + symbol = this._currentScope.addSymbol( + slotName, + SymbolFlags.InitiallyUnbound | + SymbolFlags.ClassMember | + SymbolFlags.InstanceMember | + SymbolFlags.SlotsMember + ); + const honorPrivateNaming = this._fileInfo.diagnosticRuleSet.reportPrivateUsage !== 'none'; + if (isPrivateOrProtectedName(slotName) && honorPrivateNaming) { + symbol.setIsPrivateMember(); + } + } + + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: slotNameNode, + isConstant: isConstantName(slotName), + isDefinedBySlots: true, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(slotNameNode, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + isExplicitBinding: this._currentScope.getBindingType(slotName) !== undefined, + }; + symbol.addDeclaration(declaration); + } + + if (!slotsContainsDict) { + this._currentScope.setSlotsNames(slotNameNodes.map((node) => node.d.strings[0].d.value)); + } + } + + private _isInComprehension(node: ParseNode, ignoreOutermostIterable = false) { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + let prevPrevNode: ParseNode | undefined; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Comprehension) { + if (ignoreOutermostIterable && curNode.d.forIfNodes.length > 0) { + const outermostCompr = curNode.d.forIfNodes[0]; + if (prevNode === outermostCompr && outermostCompr.nodeType === ParseNodeType.ComprehensionFor) { + if (prevPrevNode === outermostCompr.d.iterableExpr) { + return false; + } + } + } + + return true; + } + + prevPrevNode = prevNode; + prevNode = curNode; + curNode = curNode.parent; + } + return false; + } + + private _addPatternCaptureTarget(target: NameNode) { + const symbol = this._bindNameToScope(this._currentScope, target); + this._createAssignmentTargetFlowNodes(target, /* walkTargets */ false, /* unbound */ false); + + // See if the target overwrites all or a portion of the subject expression. + if (this._currentMatchSubjExpr) { + if ( + ParseTreeUtils.isMatchingExpression(target, this._currentMatchSubjExpr) || + ParseTreeUtils.isPartialMatchingExpression(target, this._currentMatchSubjExpr) + ) { + this._currentMatchSubjExpr = undefined; + } + } + + if (symbol) { + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: target, + isConstant: isConstantName(target.d.value), + inferredTypeSource: target.parent, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(target, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + isExplicitBinding: this._currentScope.getBindingType(target.d.value) !== undefined, + }; + symbol.addDeclaration(declaration); + } + } + + private _useExceptTargets(targets: FlowLabel[], callback: () => void) { + const prevExceptTargets = this._currentExceptTargets; + this._currentExceptTargets = targets; + callback(); + this._currentExceptTargets = prevExceptTargets; + } + + // Attempts to resolve the module name, import it, and return + // its __all__ symbols. + private _getDunderAllNamesFromImport(varName: string): string[] | undefined { + const varSymbol = this._currentScope.lookUpSymbol(varName); + if (!varSymbol) { + return undefined; + } + + // There should be only one declaration for the variable. + const aliasDecl = varSymbol.getDeclarations().find((decl) => decl.type === DeclarationType.Alias) as + | AliasDeclaration + | undefined; + const resolvedUri = + aliasDecl?.uri && !aliasDecl.uri.isEmpty() && aliasDecl.loadSymbolsFromPath + ? aliasDecl.uri + : aliasDecl?.submoduleFallback?.uri && + !aliasDecl.submoduleFallback.uri.isEmpty() && + aliasDecl.submoduleFallback.loadSymbolsFromPath + ? aliasDecl.submoduleFallback.uri + : undefined; + if (!resolvedUri) { + return undefined; + } + + let lookupInfo = this._fileInfo.importLookup(resolvedUri); + if (lookupInfo?.dunderAllNames) { + return lookupInfo.dunderAllNames; + } + + if (aliasDecl?.submoduleFallback?.uri && !aliasDecl.submoduleFallback.uri.isEmpty()) { + lookupInfo = this._fileInfo.importLookup(aliasDecl.submoduleFallback.uri); + return lookupInfo?.dunderAllNames; + } + + return undefined; + } + + private _addImplicitFromImport(node: ImportFromNode, importInfo?: ImportResult) { + const symbolName = node.d.module.d.nameParts[0].d.value; + const symbol = this._bindNameValueToScope(this._currentScope, symbolName); + if (symbol) { + this._createAliasDeclarationForMultipartImportName(node, /* importAlias */ undefined, importInfo, symbol); + } + + this._createFlowAssignment(node.d.module.d.nameParts[0]); + } + + private _createAliasDeclarationForMultipartImportName( + node: ImportAsNode | ImportFromNode, + importAlias: NameNode | undefined, + importInfo: ImportResult | undefined, + symbol: Symbol + ) { + const firstNamePartValue = node.d.module.d.nameParts[0].d.value; + + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + + let uriOfFirstSubmodule: Uri | undefined; + if (importInfo && importInfo.isImportFound && !importInfo.isNativeLib && importInfo.resolvedUris.length > 0) { + uriOfFirstSubmodule = importInfo.resolvedUris[0]; + } + + // See if there's already a matching alias declaration for this import. + // if so, we'll update it rather than creating a new one. This is required + // to handle cases where multiple import statements target the same + // starting symbol such as "import a.b.c" and "import a.d". In this case, + // we'll build a single declaration that describes the combined actions + // of both import statements, thus reflecting the behavior of the + // python module loader. + const existingDecl = symbol + .getDeclarations() + .find( + (decl) => + decl.type === DeclarationType.Alias && + decl.firstNamePart === firstNamePartValue && + (!uriOfFirstSubmodule || uriOfFirstSubmodule.equals(decl.uri)) + ); + let newDecl: AliasDeclaration; + let uriOfLastSubmodule: Uri; + if (importInfo && importInfo.isImportFound && !importInfo.isNativeLib && importInfo.resolvedUris.length > 0) { + uriOfLastSubmodule = importInfo.resolvedUris[importInfo.resolvedUris.length - 1]; + } else { + uriOfLastSubmodule = UnresolvedModuleMarker; + } + + const isResolved = + importInfo && importInfo.isImportFound && !importInfo.isNativeLib && importInfo.resolvedUris.length > 0; + + if (existingDecl) { + newDecl = existingDecl as AliasDeclaration; + } else if (isResolved) { + newDecl = { + type: DeclarationType.Alias, + node, + uri: uriOfLastSubmodule, + loadSymbolsFromPath: false, + range: getEmptyRange(), + usesLocalName: !!importAlias, + moduleName: importAlias + ? this._formatModuleName(node.d.module) + : '.'.repeat(node.d.module.d.leadingDots) + firstNamePartValue, + firstNamePart: firstNamePartValue, + isInExceptSuite: this._isInExceptSuite, + }; + } else { + // If we couldn't resolve the import, create a dummy declaration with a + // bogus path so it gets an unknown type (rather than an unbound type) at + // analysis time. + newDecl = { + type: DeclarationType.Alias, + node, + uri: uriOfLastSubmodule, + loadSymbolsFromPath: true, + range: getEmptyRange(), + usesLocalName: !!importAlias, + moduleName: importInfo?.importName ?? '', + firstNamePart: importAlias + ? this._formatModuleName(node.d.module) + : '.'.repeat(node.d.module.d.leadingDots) + firstNamePartValue, + isUnresolved: true, + isInExceptSuite: this._isInExceptSuite, + }; + } + + // See if there is import info for this part of the path. This allows us + // to implicitly import all of the modules in a multi-part module name. + const implicitImportInfo = AnalyzerNodeInfo.getImportInfo(node.d.module.d.nameParts[0]); + if (implicitImportInfo && implicitImportInfo.resolvedUris.length) { + newDecl.uri = implicitImportInfo.resolvedUris[0]; + newDecl.loadSymbolsFromPath = true; + this._addImplicitImportsToLoaderActions(implicitImportInfo, newDecl); + } + + // Add the implicit imports for this module if it's the last + // name part we're resolving. + if (importAlias || node.d.module.d.nameParts.length === 1) { + newDecl.uri = uriOfLastSubmodule; + newDecl.loadSymbolsFromPath = true; + newDecl.isUnresolved = false; + + if (importInfo) { + this._addImplicitImportsToLoaderActions(importInfo, newDecl); + } + } else { + // Fill in the remaining name parts. + let curLoaderActions: ModuleLoaderActions = newDecl; + + for (let i = 1; i < node.d.module.d.nameParts.length; i++) { + const namePartValue = node.d.module.d.nameParts[i].d.value; + + // Is there an existing loader action for this name? + let loaderActions = curLoaderActions.implicitImports + ? curLoaderActions.implicitImports.get(namePartValue) + : undefined; + if (!loaderActions) { + const loaderActionPath = + importInfo && i < importInfo.resolvedUris.length + ? importInfo.resolvedUris[i] + : UnresolvedModuleMarker; + + // Allocate a new loader action. + loaderActions = { + uri: loaderActionPath, + loadSymbolsFromPath: false, + implicitImports: new Map(), + isUnresolved: !isResolved, + }; + if (!curLoaderActions.implicitImports) { + curLoaderActions.implicitImports = new Map(); + } + curLoaderActions.implicitImports.set(namePartValue, loaderActions); + } + + if (i === node.d.module.d.nameParts.length - 1) { + // If this is the last name part we're resolving, add in the + // implicit imports as well. + if (importInfo && i < importInfo.resolvedUris.length) { + loaderActions.uri = importInfo.resolvedUris[i]; + loaderActions.loadSymbolsFromPath = true; + this._addImplicitImportsToLoaderActions(importInfo, loaderActions); + } + } else { + // If this isn't the last name part we're resolving, see if there + // is import info for this part of the path. This allows us to implicitly + // import all of the modules in a multi-part module name (e.g. "import a.b.c" + // imports "a" and "a.b" and "a.b.c"). + const implicitImportInfo = AnalyzerNodeInfo.getImportInfo(node.d.module.d.nameParts[i]); + if (implicitImportInfo && implicitImportInfo.resolvedUris.length) { + loaderActions.uri = implicitImportInfo.resolvedUris[i]; + loaderActions.loadSymbolsFromPath = true; + this._addImplicitImportsToLoaderActions(implicitImportInfo, loaderActions); + } + } + + curLoaderActions = loaderActions; + } + } + + if (!existingDecl) { + symbol.addDeclaration(newDecl); + } + } + + private _walkStatementsAndReportUnreachable(statements: StatementNode[]) { + let foundUnreachableStatement = false; + + for (const statement of statements) { + AnalyzerNodeInfo.setFlowNode(statement, this._currentFlowNode!); + + if (!foundUnreachableStatement) { + foundUnreachableStatement = this._isCodeUnreachable(); + } + + if (!foundUnreachableStatement) { + this.walk(statement); + } else { + // If we're within a function, we need to look for unreachable yield + // statements because they affect the behavior of the function (making + // it a generator) even if they're never executed. + if (this._targetFunctionDeclaration && !this._targetFunctionDeclaration.isGenerator) { + const yieldFinder = new YieldFinder(); + if (yieldFinder.checkContainsYield(statement)) { + this._targetFunctionDeclaration.isGenerator = true; + } + } + + // In case there are any class or function statements within this + // subtree, we need to create dummy scopes for them. The type analyzer + // depends on scopes being present. + if (!this._moduleSymbolOnly) { + const dummyScopeGenerator = new DummyScopeGenerator(this._currentScope); + dummyScopeGenerator.walk(statement); + } + } + } + + return false; + } + + private _createStartFlowNode() { + const flowNode: FlowNode = { + flags: FlowFlags.Start, + id: this._getUniqueFlowNodeId(), + }; + return flowNode; + } + + private _createBranchLabel(preBranchAntecedent?: FlowNode) { + const flowNode: FlowBranchLabel = { + flags: FlowFlags.BranchLabel, + id: this._getUniqueFlowNodeId(), + antecedents: [], + preBranchAntecedent, + affectedExpressions: undefined, + }; + return flowNode; + } + + // Create a flow node that narrows the type of the subject expression for + // a specified case statement or the entire match statement (if the flow + // falls through the bottom of all cases). + private _createFlowNarrowForPattern(subjectExpression: ExpressionNode, statement: CaseNode | MatchNode) { + const flowNode: FlowNarrowForPattern = { + flags: FlowFlags.NarrowForPattern, + id: this._getUniqueFlowNodeId(), + subjectExpression, + statement, + antecedent: this._currentFlowNode!, + }; + + this._currentFlowNode! = flowNode; + } + + private _createContextManagerLabel( + expressions: ExpressionNode[], + isAsync: boolean, + blockIfSwallowsExceptions: boolean + ) { + const flowNode: FlowPostContextManagerLabel = { + flags: FlowFlags.PostContextManager | FlowFlags.BranchLabel, + id: this._getUniqueFlowNodeId(), + antecedents: [], + expressions, + affectedExpressions: undefined, + isAsync, + blockIfSwallowsExceptions, + }; + return flowNode; + } + + private _createLoopLabel() { + const flowNode: FlowLabel = { + flags: FlowFlags.LoopLabel, + id: this._getUniqueFlowNodeId(), + antecedents: [], + affectedExpressions: undefined, + }; + + return flowNode; + } + + private _finishFlowLabel(node: FlowLabel) { + // If there were no antecedents, this is unreachable. + if (node.antecedents.length === 0) { + return Binder._unreachableStructuralFlowNode; + } + + // If there was only one antecedent and this is a simple + // branch label, there's no need for a label to exist. + if (node.antecedents.length === 1 && node.flags === FlowFlags.BranchLabel) { + return node.antecedents[0]; + } + + // The cyclomatic complexity is the number of edges minus the + // number of nodes in the graph. Add n-1 where n is the number + // of antecedents (edges) and 1 represents the label node. + this._codeFlowComplexity += node.antecedents.length - 1; + + return node; + } + + // Creates a node that creates a "gate" that is closed (doesn't allow for code + // flow) if the specified expression is never once it is narrowed (in either the + // positive or negative case). + private _bindNeverCondition(node: ExpressionNode, target: FlowLabel, isPositiveTest: boolean) { + const expressionList: CodeFlowReferenceExpressionNode[] = []; + + if (node.nodeType === ParseNodeType.UnaryOperation && node.d.operator === OperatorType.Not) { + this._bindNeverCondition(node.d.expr, target, !isPositiveTest); + } else if ( + node.nodeType === ParseNodeType.BinaryOperation && + (node.d.operator === OperatorType.And || node.d.operator === OperatorType.Or) + ) { + let isAnd = node.d.operator === OperatorType.And; + if (isPositiveTest) { + isAnd = !isAnd; + } + + if (isAnd) { + // In the And case, we need to gate the synthesized else clause if both + // of the operands evaluate to never once they are narrowed. + const savedCurrentFlowNode = this._currentFlowNode; + this._bindNeverCondition(node.d.leftExpr, target, isPositiveTest); + this._currentFlowNode = savedCurrentFlowNode; + this._bindNeverCondition(node.d.rightExpr, target, isPositiveTest); + } else { + const initialCurrentFlowNode = this._currentFlowNode; + + // In the Or case, we need to gate the synthesized else clause if either + // of the operands evaluate to never. + const afterLabel = this._createBranchLabel(); + this._bindNeverCondition(node.d.leftExpr, afterLabel, isPositiveTest); + + // If the condition didn't result in any new flow nodes, we can skip + // checking the other condition. + if (initialCurrentFlowNode !== this._currentFlowNode) { + this._currentFlowNode = this._finishFlowLabel(afterLabel); + + const prevCurrentNode = this._currentFlowNode; + this._bindNeverCondition(node.d.rightExpr, target, isPositiveTest); + + // If the second condition resulted in no new control flow node, we can + // eliminate this entire subgraph. + if (prevCurrentNode === this._currentFlowNode) { + this._currentFlowNode = initialCurrentFlowNode; + } + } + } + } else { + // Limit only to expressions that contain a narrowable subexpression + // that is a name. This avoids complexities with composite expressions like + // member access or index expressions. + if (this._isNarrowingExpression(node, expressionList, { filterForNeverNarrowing: true })) { + const filteredExprList = expressionList.filter((expr) => expr.nodeType === ParseNodeType.Name); + if (filteredExprList.length > 0) { + this._currentFlowNode = this._createFlowConditional( + isPositiveTest ? FlowFlags.TrueNeverCondition : FlowFlags.FalseNeverCondition, + this._currentFlowNode!, + node + ); + } + } + + this._addAntecedent(target, this._currentFlowNode!); + } + } + + private _bindConditional(node: ExpressionNode, trueTarget: FlowLabel, falseTarget: FlowLabel) { + this._setTrueFalseTargets(trueTarget, falseTarget, () => { + this.walk(node); + }); + + if (!this._isLogicalExpression(node)) { + this._addAntecedent( + trueTarget, + this._createFlowConditional(FlowFlags.TrueCondition, this._currentFlowNode!, node) + ); + this._addAntecedent( + falseTarget, + this._createFlowConditional(FlowFlags.FalseCondition, this._currentFlowNode!, node) + ); + } + } + + private _disableTrueFalseTargets(callback: () => void): void { + this._setTrueFalseTargets(/* trueTarget */ undefined, /* falseTarget */ undefined, callback); + } + + private _setTrueFalseTargets( + trueTarget: FlowLabel | undefined, + falseTarget: FlowLabel | undefined, + callback: () => void + ) { + const savedTrueTarget = this._currentTrueTarget; + const savedFalseTarget = this._currentFalseTarget; + this._currentTrueTarget = trueTarget; + this._currentFalseTarget = falseTarget; + + callback(); + + this._currentTrueTarget = savedTrueTarget; + this._currentFalseTarget = savedFalseTarget; + } + + private _createFlowConditional(flags: FlowFlags, antecedent: FlowNode, expression: ExpressionNode): FlowNode { + if (antecedent.flags & (FlowFlags.UnreachableStructural | FlowFlags.UnreachableStaticCondition)) { + return antecedent; + } + const staticValue = StaticExpressions.evaluateStaticBoolLikeExpression( + expression, + this._fileInfo.executionEnvironment, + this._fileInfo.definedConstants, + this._typingImportAliases, + this._sysImportAliases + ); + if ( + (staticValue === true && flags & FlowFlags.FalseCondition) || + (staticValue === false && flags & FlowFlags.TrueCondition) + ) { + return Binder._unreachableStaticConditionFlowNode; + } + + const expressionList: CodeFlowReferenceExpressionNode[] = []; + if ( + !this._isNarrowingExpression(expression, expressionList, { + filterForNeverNarrowing: (flags & (FlowFlags.TrueNeverCondition | FlowFlags.FalseNeverCondition)) !== 0, + }) + ) { + return antecedent; + } + + expressionList.forEach((expr) => { + const referenceKey = createKeyForReference(expr); + this._currentScopeCodeFlowExpressions!.add(referenceKey); + }); + + // Select the first name expression. + const filteredExprList = expressionList.filter((expr) => expr.nodeType === ParseNodeType.Name); + + const conditionalFlowNode: FlowCondition = { + flags, + id: this._getUniqueFlowNodeId(), + reference: filteredExprList.length > 0 ? (filteredExprList[0] as NameNode) : undefined, + expression, + antecedent, + }; + + this._addExceptTargets(conditionalFlowNode); + + return conditionalFlowNode; + } + + // Indicates whether the expression is a NOT, AND or OR expression. + private _isLogicalExpression(expression: ExpressionNode): boolean { + switch (expression.nodeType) { + case ParseNodeType.UnaryOperation: { + return expression.d.operator === OperatorType.Not; + } + + case ParseNodeType.BinaryOperation: { + return expression.d.operator === OperatorType.And || expression.d.operator === OperatorType.Or; + } + } + + return false; + } + + // Determines whether the specified expression can be used for conditional + // type narrowing. The expression atoms (names, member accesses and index) + // are provided as an output in the expressionList. + // If filterForNeverNarrowing is true, we limit some types of narrowing + // expressions for performance reasons. + // The isComplexExpression parameter is used internally to determine whether + // the call is an atom (name, member access, index - plus a "not" form of + // these) or something more complex (binary operator, call, etc.). + private _isNarrowingExpression( + expression: ExpressionNode, + expressionList: CodeFlowReferenceExpressionNode[], + options: NarrowExprOptions = {} + ): boolean { + switch (expression.nodeType) { + case ParseNodeType.Name: + case ParseNodeType.MemberAccess: + case ParseNodeType.Index: { + if (options.filterForNeverNarrowing) { + // Never narrowing doesn't support member access or index + // expressions. + if (expression.nodeType !== ParseNodeType.Name) { + return false; + } + + // Never narrowing doesn't support simple names (falsy + // or truthy narrowing) because it's too expensive and + // provides relatively little utility. + if (!options.isComplexExpression) { + return false; + } + } + + if (isCodeFlowSupportedForReference(expression)) { + expressionList.push(expression); + + if (!options.filterForNeverNarrowing) { + // If the expression is a member access expression, add its + // leftExpression to the expression list because that expression + // can be narrowed based on the attribute type. + if (expression.nodeType === ParseNodeType.MemberAccess && options.allowDiscriminatedNarrowing) { + if (isCodeFlowSupportedForReference(expression.d.leftExpr)) { + expressionList.push(expression.d.leftExpr); + } + } + + // If the expression is an index expression with a supported + // subscript, add its baseExpression to the expression list because + // that expression can be narrowed. + if ( + expression.nodeType === ParseNodeType.Index && + expression.d.items.length === 1 && + !expression.d.trailingComma && + expression.d.items[0].d.argCategory === ArgCategory.Simple + ) { + if (isCodeFlowSupportedForReference(expression.d.leftExpr)) { + expressionList.push(expression.d.leftExpr); + } + } + } + return true; + } + + return false; + } + + case ParseNodeType.AssignmentExpression: { + expressionList.push(expression.d.name); + this._isNarrowingExpression(expression.d.rightExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + return true; + } + + case ParseNodeType.BinaryOperation: { + const isOrIsNotOperator = + expression.d.operator === OperatorType.Is || expression.d.operator === OperatorType.IsNot; + const equalsOrNotEqualsOperator = + expression.d.operator === OperatorType.Equals || expression.d.operator === OperatorType.NotEquals; + + if (isOrIsNotOperator || equalsOrNotEqualsOperator) { + // Look for "X is None", "X is not None", "X == None", "X != None". + // These are commonly-used patterns used in control flow. + if ( + expression.d.rightExpr.nodeType === ParseNodeType.Constant && + expression.d.rightExpr.d.constType === KeywordType.None + ) { + return this._isNarrowingExpression(expression.d.leftExpr, expressionList, { + ...options, + isComplexExpression: true, + allowDiscriminatedNarrowing: true, + }); + } + + // Look for "type(X) is Y" or "type(X) is not Y". + if ( + isOrIsNotOperator && + expression.d.leftExpr.nodeType === ParseNodeType.Call && + expression.d.leftExpr.d.leftExpr.nodeType === ParseNodeType.Name && + expression.d.leftExpr.d.leftExpr.d.value === 'type' && + expression.d.leftExpr.d.args.length === 1 && + expression.d.leftExpr.d.args[0].d.argCategory === ArgCategory.Simple + ) { + return this._isNarrowingExpression( + expression.d.leftExpr.d.args[0].d.valueExpr, + expressionList, + { ...options, isComplexExpression: true } + ); + } + + const isLeftNarrowing = this._isNarrowingExpression(expression.d.leftExpr, expressionList, { + ...options, + isComplexExpression: true, + allowDiscriminatedNarrowing: true, + }); + + // Look for "X is Y" or "X is not Y". + // Look for X == or X != + // Look for len(X) == or len(X) != + return isLeftNarrowing; + } + + // Look for len(X) < , len(X) <= , len(X) > , len(X) >= . + if (expression.d.rightExpr.nodeType === ParseNodeType.Number && expression.d.rightExpr.d.isInteger) { + if ( + expression.d.operator === OperatorType.LessThan || + expression.d.operator === OperatorType.LessThanOrEqual || + expression.d.operator === OperatorType.GreaterThan || + expression.d.operator === OperatorType.GreaterThanOrEqual + ) { + const isLeftNarrowing = this._isNarrowingExpression(expression.d.leftExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + + return isLeftNarrowing; + } + } + + // Look for " in Y" or " not in Y". + if (expression.d.operator === OperatorType.In || expression.d.operator === OperatorType.NotIn) { + if ( + expression.d.leftExpr.nodeType === ParseNodeType.StringList && + this._isNarrowingExpression(expression.d.rightExpr, expressionList, { + ...options, + isComplexExpression: true, + }) + ) { + return true; + } + } + + // Look for "X in Y" or "X not in Y". + if (expression.d.operator === OperatorType.In || expression.d.operator === OperatorType.NotIn) { + const isLeftNarrowable = this._isNarrowingExpression(expression.d.leftExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + + const isRightNarrowable = this._isNarrowingExpression(expression.d.rightExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + + return isLeftNarrowable || isRightNarrowable; + } + + return false; + } + + case ParseNodeType.UnaryOperation: { + return ( + expression.d.operator === OperatorType.Not && + this._isNarrowingExpression(expression.d.expr, expressionList, { + ...options, + isComplexExpression: false, + }) + ); + } + + case ParseNodeType.AugmentedAssignment: { + return this._isNarrowingExpression(expression.d.rightExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + } + + case ParseNodeType.Call: { + if ( + expression.d.leftExpr.nodeType === ParseNodeType.Name && + (expression.d.leftExpr.d.value === 'isinstance' || + expression.d.leftExpr.d.value === 'issubclass') && + expression.d.args.length === 2 + ) { + return this._isNarrowingExpression(expression.d.args[0].d.valueExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + } + + if ( + expression.d.leftExpr.nodeType === ParseNodeType.Name && + expression.d.leftExpr.d.value === 'callable' && + expression.d.args.length === 1 + ) { + return this._isNarrowingExpression(expression.d.args[0].d.valueExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + } + + // Is this potentially a call to a user-defined type guard function? + if (expression.d.args.length >= 1) { + // Never narrowing doesn't support type guards because they do not + // offer negative narrowing. + if (options.filterForNeverNarrowing) { + return false; + } + + return this._isNarrowingExpression(expression.d.args[0].d.valueExpr, expressionList, { + ...options, + isComplexExpression: true, + }); + } + } + } + + return false; + } + + private _createAssignmentTargetFlowNodes(target: ExpressionNode, walkTargets: boolean, unbound: boolean) { + switch (target.nodeType) { + case ParseNodeType.Name: + case ParseNodeType.MemberAccess: { + this._createFlowAssignment(target, unbound); + if (walkTargets) { + this.walk(target); + } + break; + } + + case ParseNodeType.Index: { + this._createFlowAssignment(target, unbound); + if (walkTargets) { + this.walk(target); + } + break; + } + + case ParseNodeType.Tuple: { + target.d.items.forEach((expr) => { + this._createAssignmentTargetFlowNodes(expr, walkTargets, unbound); + }); + break; + } + + case ParseNodeType.TypeAnnotation: { + this._createAssignmentTargetFlowNodes(target.d.valueExpr, /* walkTargets */ false, unbound); + if (walkTargets) { + this.walk(target); + } + break; + } + + case ParseNodeType.Unpack: { + this._createAssignmentTargetFlowNodes(target.d.expr, /* walkTargets */ false, unbound); + if (walkTargets) { + this.walk(target); + } + break; + } + + case ParseNodeType.List: { + target.d.items.forEach((entry) => { + this._createAssignmentTargetFlowNodes(entry, walkTargets, unbound); + }); + break; + } + + default: { + if (walkTargets) { + this.walk(target); + } + } + } + } + + private _createCallFlowNode(node: CallNode) { + if (!this._isCodeUnreachable()) { + this._addExceptTargets(this._currentFlowNode!); + + const flowNode: FlowCall = { + flags: FlowFlags.Call, + id: this._getUniqueFlowNodeId(), + node, + antecedent: this._currentFlowNode!, + }; + + this._currentFlowNode = flowNode; + } + } + + private _createVariableAnnotationFlowNode() { + if (!this._isCodeUnreachable()) { + const flowNode: FlowVariableAnnotation = { + flags: FlowFlags.VariableAnnotation, + id: this._getUniqueFlowNodeId(), + antecedent: this._currentFlowNode!, + }; + + this._currentFlowNode = flowNode; + } + } + + private _createFlowAssignment(node: CodeFlowReferenceExpressionNode, unbound = false) { + let targetSymbolId = indeterminateSymbolId; + if (node.nodeType === ParseNodeType.Name) { + const symbolWithScope = this._currentScope.lookUpSymbolRecursive(node.d.value); + assert(symbolWithScope !== undefined); + targetSymbolId = symbolWithScope!.symbol.id; + } + + const prevFlowNode = this._currentFlowNode!; + if (!this._isCodeUnreachable() && isCodeFlowSupportedForReference(node)) { + const flowNode: FlowAssignment = { + flags: FlowFlags.Assignment, + id: this._getUniqueFlowNodeId(), + node, + antecedent: this._currentFlowNode!, + targetSymbolId, + }; + + const referenceKey = createKeyForReference(node); + this._currentScopeCodeFlowExpressions!.add(referenceKey); + + if (unbound) { + flowNode.flags |= FlowFlags.Unbind; + } + + // Assume that an assignment to a member access expression + // can potentially generate an exception. + if (node.nodeType === ParseNodeType.MemberAccess) { + this._addExceptTargets(flowNode); + } + this._currentFlowNode = flowNode; + } + + // If we're marking the node as unbound and there is already a flow node + // associated with the node, don't replace it. This case applies for symbols + // introduced in except clauses. If there is no use the previous flow node + // associated, use the previous flow node (applies in the del case). + // Otherwise, the node will be evaluated as unbound at this point in the flow. + if (!unbound || AnalyzerNodeInfo.getFlowNode(node) === undefined) { + AnalyzerNodeInfo.setFlowNode(node, unbound ? prevFlowNode : this._currentFlowNode!); + } + } + + private _createFlowWildcardImport(node: ImportFromNode, names: string[]) { + if (!this._isCodeUnreachable()) { + const flowNode: FlowWildcardImport = { + flags: FlowFlags.WildcardImport, + id: this._getUniqueFlowNodeId(), + node, + names, + antecedent: this._currentFlowNode!, + }; + + this._addExceptTargets(flowNode); + this._currentFlowNode = flowNode; + } + + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + } + + private _createFlowExhaustedMatch(node: MatchNode) { + if (!this._isCodeUnreachable()) { + const flowNode: FlowExhaustedMatch = { + flags: FlowFlags.ExhaustedMatch, + id: this._getUniqueFlowNodeId(), + node, + antecedent: this._currentFlowNode!, + subjectExpression: node.d.expr, + }; + + this._currentFlowNode = flowNode; + } + + AnalyzerNodeInfo.setAfterFlowNode(node, this._currentFlowNode!); + } + + private _isCodeUnreachable() { + return !!( + this._currentFlowNode!.flags & + (FlowFlags.UnreachableStaticCondition | FlowFlags.UnreachableStructural) + ); + } + + private _addExceptTargets(flowNode: FlowNode) { + // If there are any except targets, then we're in a try block, and we + // have to assume that an exception can be raised after every assignment. + if (this._currentExceptTargets) { + this._currentExceptTargets.forEach((label) => { + this._addAntecedent(label, flowNode); + }); + } + } + + private _trackCodeFlowExpressions(callback: () => void): Set { + const savedExpressions = this._currentScopeCodeFlowExpressions; + this._currentScopeCodeFlowExpressions = new Set(); + callback(); + + const scopedExpressions = this._currentScopeCodeFlowExpressions; + + if (savedExpressions) { + this._currentScopeCodeFlowExpressions.forEach((value) => { + savedExpressions.add(value); + }); + } + + this._currentScopeCodeFlowExpressions = savedExpressions; + + return scopedExpressions; + } + + private _bindLoopStatement(preLoopLabel: FlowLabel, postLoopLabel: FlowLabel, callback: () => void) { + const savedContinueTarget = this._currentContinueTarget; + const savedBreakTarget = this._currentBreakTarget; + + this._currentContinueTarget = preLoopLabel; + this._currentBreakTarget = postLoopLabel; + + preLoopLabel.affectedExpressions = this._trackCodeFlowExpressions(callback); + + this._currentContinueTarget = savedContinueTarget; + this._currentBreakTarget = savedBreakTarget; + } + + private _addAntecedent(label: FlowLabel, antecedent: FlowNode) { + if ( + !(this._currentFlowNode!.flags & (FlowFlags.UnreachableStructural | FlowFlags.UnreachableStaticCondition)) + ) { + // Don't add the same antecedent twice. + if (!label.antecedents.some((existing) => existing.id === antecedent.id)) { + label.antecedents.push(antecedent); + } + } + } + + private _bindNameToScope(scope: Scope, node: NameNode, addedSymbols?: Map) { + return this._bindNameValueToScope(scope, node.d.value, addedSymbols); + } + + private _bindNameValueToScope(scope: Scope, name: string, addedSymbols?: Map) { + // Is this name already bound to a scope other than the local one? + const bindingType = this._currentScope.getBindingType(name); + + if (bindingType !== undefined) { + const scopeToUse = + bindingType === NameBindingType.Nonlocal + ? this._currentScope.parent! + : this._currentScope.getGlobalScope().scope; + const symbolWithScope = scopeToUse.lookUpSymbolRecursive(name); + if (symbolWithScope) { + return symbolWithScope.symbol; + } + } else { + // Don't overwrite an existing symbol. + let symbol = scope.lookUpSymbol(name); + if (!symbol) { + symbol = scope.addSymbol(name, SymbolFlags.InitiallyUnbound | SymbolFlags.ClassMember); + + if (this._currentScope.type === ScopeType.Module || this._currentScope.type === ScopeType.Builtin) { + if (isPrivateOrProtectedName(name)) { + if (isPrivateName(name)) { + // Private names within classes are mangled, so they are always externally hidden. + if (scope.type === ScopeType.Class) { + symbol.setIsExternallyHidden(); + } else { + this._potentialPrivateSymbols.set(name, symbol); + } + } else if (this._fileInfo.isStubFile || this._fileInfo.isInPyTypedPackage) { + if (this._currentScope.type === ScopeType.Builtin) { + // Don't include private-named symbols in the builtin scope. + symbol.setIsExternallyHidden(); + } else { + this._potentialPrivateSymbols.set(name, symbol); + } + } else { + symbol.setIsPrivateMember(); + } + } + } + + if (addedSymbols) { + addedSymbols.set(name, symbol); + } + } + + return symbol; + } + + return undefined; + } + + private _bindPossibleTupleNamedTarget(target: ExpressionNode, addedSymbols?: Map) { + switch (target.nodeType) { + case ParseNodeType.Name: { + this._bindNameToScope(this._currentScope, target, addedSymbols); + break; + } + + case ParseNodeType.Tuple: { + target.d.items.forEach((expr) => { + this._bindPossibleTupleNamedTarget(expr, addedSymbols); + }); + break; + } + + case ParseNodeType.List: { + target.d.items.forEach((expr) => { + this._bindPossibleTupleNamedTarget(expr, addedSymbols); + }); + break; + } + + case ParseNodeType.TypeAnnotation: { + this._bindPossibleTupleNamedTarget(target.d.valueExpr, addedSymbols); + break; + } + + case ParseNodeType.Unpack: { + this._bindPossibleTupleNamedTarget(target.d.expr, addedSymbols); + break; + } + } + } + + private _addImplicitSymbolToCurrentScope( + nameValue: string, + node: ModuleNode | ClassNode | FunctionNode, + type: IntrinsicType + ) { + const symbol = this._addSymbolToCurrentScope(nameValue, /* isInitiallyUnbound */ false); + if (symbol) { + symbol.addDeclaration({ + type: DeclarationType.Intrinsic, + node, + name: nameValue, + intrinsicType: type, + uri: this._fileInfo.fileUri, + range: getEmptyRange(), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }); + symbol.setIsIgnoredForProtocolMatch(); + } + } + + // Adds a new symbol with the specified name if it doesn't already exist. + private _addSymbolToCurrentScope(nameValue: string, isInitiallyUnbound: boolean) { + let symbol = this._currentScope.lookUpSymbol(nameValue); + + if (!symbol) { + let symbolFlags = SymbolFlags.None; + + if (isInitiallyUnbound) { + symbolFlags |= SymbolFlags.InitiallyUnbound; + } + + if (this._currentScope.type === ScopeType.Class) { + symbolFlags |= SymbolFlags.ClassMember; + } + + if (this._fileInfo.isStubFile && isPrivateOrProtectedName(nameValue)) { + symbolFlags |= SymbolFlags.ExternallyHidden; + } + + // Add the symbol. Assume that symbols with a default type source ID + // are "implicit" symbols added to the scope. These are not initially unbound. + symbol = this._currentScope.addSymbol(nameValue, symbolFlags); + } + + return symbol; + } + + private _createNewScope( + scopeType: ScopeType, + parentScope: Scope | undefined, + proxyScope: Scope | undefined, + callback: () => void + ) { + const prevScope = this._currentScope; + const newScope = new Scope(scopeType, parentScope, proxyScope); + this._currentScope = newScope; + + // If this scope is an execution scope, allocate a new reference map. + const isExecutionScope = + scopeType === ScopeType.Builtin || scopeType === ScopeType.Module || scopeType === ScopeType.Function; + const prevExpressions = this._currentScopeCodeFlowExpressions; + + if (isExecutionScope) { + this._currentScopeCodeFlowExpressions = new Set(); + } + + callback(); + + this._currentScopeCodeFlowExpressions = prevExpressions; + this._currentScope = prevScope; + + return newScope; + } + + private _addInferredTypeAssignmentForVariable( + target: ExpressionNode, + source: ParseNode, + isPossibleTypeAlias = false + ) { + switch (target.nodeType) { + case ParseNodeType.Name: { + const name = target; + const symbolWithScope = this._currentScope.lookUpSymbolRecursive(name.d.value); + if (symbolWithScope && symbolWithScope.symbol) { + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: target, + isConstant: isConstantName(target.d.value), + inferredTypeSource: source, + isInferenceAllowedInPyTyped: this._isInferenceAllowedInPyTyped(name.d.value), + typeAliasName: isPossibleTypeAlias ? target : undefined, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(name, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + docString: this._getVariableDocString(target), + isExplicitBinding: this._currentScope.getBindingType(name.d.value) !== undefined, + }; + symbolWithScope.symbol.addDeclaration(declaration); + } + break; + } + + case ParseNodeType.MemberAccess: { + const memberAccessInfo = this._getMemberAccessInfo(target); + if (memberAccessInfo) { + const name = target.d.member; + + let symbol = memberAccessInfo.classScope.lookUpSymbol(name.d.value); + if (!symbol) { + symbol = memberAccessInfo.classScope.addSymbol(name.d.value, SymbolFlags.InitiallyUnbound); + const honorPrivateNaming = this._fileInfo.diagnosticRuleSet.reportPrivateUsage !== 'none'; + if (isPrivateOrProtectedName(name.d.value) && honorPrivateNaming) { + symbol.setIsPrivateMember(); + } + } + + if (memberAccessInfo.isInstanceMember) { + // If a method (which has a declared type) is being overwritten + // by an expression with no declared type, don't mark it as + // an instance member because the type evaluator will think + // that it doesn't need to perform object binding. + if ( + !symbol.isClassMember() || + !symbol + .getDeclarations() + .some((decl) => decl.type === DeclarationType.Function && decl.isMethod) + ) { + symbol.setIsInstanceMember(); + } + } else { + symbol.setIsClassMember(); + } + + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: target.d.member, + isConstant: isConstantName(name.d.value), + inferredTypeSource: source, + isDefinedByMemberAccess: true, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(target.d.member, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + docString: this._getVariableDocString(target), + }; + symbol.addDeclaration(declaration); + } + break; + } + + case ParseNodeType.Tuple: { + target.d.items.forEach((expr) => { + this._addInferredTypeAssignmentForVariable(expr, source); + }); + break; + } + + case ParseNodeType.TypeAnnotation: { + this._addInferredTypeAssignmentForVariable(target.d.valueExpr, source); + break; + } + + case ParseNodeType.Unpack: { + this._addInferredTypeAssignmentForVariable(target.d.expr, source); + break; + } + + case ParseNodeType.List: { + target.d.items.forEach((entry) => { + this._addInferredTypeAssignmentForVariable(entry, source); + }); + break; + } + } + } + + private _isInferenceAllowedInPyTyped(symbolName: string): boolean { + const exemptSymbols = ['__match_args__', '__slots__', '__all__']; + return exemptSymbols.some((name) => name === symbolName); + } + + private _addTypeDeclarationForVariable(target: ExpressionNode, typeAnnotation: ExpressionNode) { + let declarationHandled = false; + + switch (target.nodeType) { + case ParseNodeType.Name: { + const name = target; + const symbolWithScope = this._currentScope.lookUpSymbolRecursive(name.d.value); + if (symbolWithScope && symbolWithScope.symbol) { + const finalInfo = this._isAnnotationFinal(typeAnnotation); + + let typeAnnotationNode: ExpressionNode | undefined = typeAnnotation; + if (finalInfo.isFinal) { + if (!finalInfo.finalTypeNode) { + typeAnnotationNode = undefined; + } + } + + // Is this annotation indicating that the variable is a "ClassVar"? + const classVarInfo = this._isAnnotationClassVar(typeAnnotation); + + if (classVarInfo.isClassVar) { + if (!classVarInfo.classVarTypeNode) { + typeAnnotationNode = undefined; + } + } + + // PEP 591 indicates that a Final variable initialized within a class + // body should also be considered a ClassVar unless it's in a dataclass. + // We can't tell at this stage whether it's a dataclass, so we'll simply + // record whether it's a Final assigned in a class body. + let isFinalAssignedInClassBody = false; + if (finalInfo.isFinal) { + const containingClass = ParseTreeUtils.getEnclosingClassOrFunction(target); + if (containingClass && containingClass.nodeType === ParseNodeType.Class) { + // Make sure it's part of an assignment. + if ( + target.parent?.nodeType === ParseNodeType.Assignment || + target.parent?.parent?.nodeType === ParseNodeType.Assignment + ) { + isFinalAssignedInClassBody = true; + } + } + } + + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: target, + isConstant: isConstantName(name.d.value), + isFinal: finalInfo.isFinal, + typeAliasName: target, + uri: this._fileInfo.fileUri, + typeAnnotationNode, + range: convertTextRangeToRange(name, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + docString: this._getVariableDocString(target), + isExplicitBinding: this._currentScope.getBindingType(name.d.value) !== undefined, + }; + symbolWithScope.symbol.addDeclaration(declaration); + + if (isFinalAssignedInClassBody) { + symbolWithScope.symbol.setIsFinalVarInClassBody(); + } + + if (classVarInfo.isClassVar) { + symbolWithScope.symbol.setIsClassVar(); + } else if (!isFinalAssignedInClassBody) { + symbolWithScope.symbol.setIsInstanceMember(); + } + + // Look for an 'InitVar' either by itself or wrapped in an 'Annotated'. + if (typeAnnotation.nodeType === ParseNodeType.Index) { + if (this._isDataclassesAnnotation(typeAnnotation.d.leftExpr, 'InitVar')) { + symbolWithScope.symbol.setIsInitVar(); + } else if ( + this._isTypingAnnotation(typeAnnotation.d.leftExpr, 'Annotated') && + typeAnnotation.d.items.length > 0 + ) { + const item0Expr = typeAnnotation.d.items[0].d.valueExpr; + if ( + item0Expr.nodeType === ParseNodeType.Index && + this._isDataclassesAnnotation(item0Expr.d.leftExpr, 'InitVar') + ) { + symbolWithScope.symbol.setIsInitVar(); + } + } + } + } + + declarationHandled = true; + break; + } + + case ParseNodeType.MemberAccess: { + // We need to determine whether this expression is declaring a class or + // instance variable. This is difficult because python doesn't provide + // a keyword for accessing "this". Instead, it uses naming conventions + // of "cls" and "self", but we don't want to rely on these naming + // conventions here. Instead, we'll apply some heuristics to determine + // whether the symbol on the LHS is a reference to the current class + // or an instance of the current class. + + const memberAccessInfo = this._getMemberAccessInfo(target); + if (memberAccessInfo) { + const name = target.d.member; + + let symbol = memberAccessInfo.classScope.lookUpSymbol(name.d.value); + if (!symbol) { + symbol = memberAccessInfo.classScope.addSymbol(name.d.value, SymbolFlags.InitiallyUnbound); + const honorPrivateNaming = this._fileInfo.diagnosticRuleSet.reportPrivateUsage !== 'none'; + if (isPrivateOrProtectedName(name.d.value) && honorPrivateNaming) { + symbol.setIsPrivateMember(); + } + } + + if (memberAccessInfo.isInstanceMember) { + symbol.setIsInstanceMember(); + } else { + symbol.setIsClassMember(); + } + + const finalInfo = this._isAnnotationFinal(typeAnnotation); + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: target.d.member, + isConstant: isConstantName(name.d.value), + isDefinedByMemberAccess: true, + isFinal: finalInfo.isFinal, + uri: this._fileInfo.fileUri, + typeAnnotationNode: finalInfo.isFinal && !finalInfo.finalTypeNode ? undefined : typeAnnotation, + range: convertTextRangeToRange(target.d.member, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + docString: this._getVariableDocString(target), + }; + symbol.addDeclaration(declaration); + + declarationHandled = true; + } + break; + } + } + + if (!declarationHandled) { + this._addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.annotationNotSupported(), + typeAnnotation + ); + } + } + + // Determines whether the expression refers to a type exported by the typing + // or typing_extensions modules. We can directly evaluate the types at binding + // time. We assume here that the code isn't making use of some custom type alias + // to refer to the typing types. + private _isTypingAnnotation(typeAnnotation: ExpressionNode, name: string): boolean { + return this._isKnownAnnotation(typeAnnotation, name, this._typingImportAliases, this._typingSymbolAliases); + } + + private _isDataclassesAnnotation(typeAnnotation: ExpressionNode, name: string): boolean { + return this._isKnownAnnotation( + typeAnnotation, + name, + this._dataclassesImportAliases, + this._dataclassesSymbolAliases + ); + } + + private _isKnownAnnotation( + typeAnnotation: ExpressionNode, + name: string, + importAliases: string[], + symbolAliases: Map + ) { + let annotationNode = typeAnnotation; + + // Is this a quoted annotation? + if (annotationNode.nodeType === ParseNodeType.StringList && annotationNode.d.annotation) { + annotationNode = annotationNode.d.annotation; + } + + if (annotationNode.nodeType === ParseNodeType.Name) { + const alias = symbolAliases.get(annotationNode.d.value); + if (alias === name) { + return true; + } + } else if (annotationNode.nodeType === ParseNodeType.MemberAccess) { + if (annotationNode.d.leftExpr.nodeType === ParseNodeType.Name && annotationNode.d.member.d.value === name) { + const baseName = annotationNode.d.leftExpr.d.value; + return importAliases.some((alias) => alias === baseName); + } + } + + return false; + } + + private _getVariableDocString(node: ExpressionNode): string | undefined { + const docNode = ParseTreeUtils.getVariableDocStringNode(node); + if (!docNode) { + return undefined; + } + + // A docstring can consist of multiple joined strings in a single expression. + const strings = docNode.d.strings; + if (strings.length === 1) { + // Common case. + return strings[0].d.value; + } + + return strings.map((s) => s.d.value).join(''); + } + + // Determines if the specified type annotation expression is a "Final". + // It returns a value indicating whether the expression is a "Final" + // expression and whether it's a "raw" Final with no type arguments. + private _isAnnotationFinal(typeAnnotation: ExpressionNode | undefined): FinalInfo { + let isFinal = false; + let finalTypeNode: ExpressionNode | undefined; + + if (typeAnnotation) { + // Allow Final to be enclosed in ClassVar. Normally, Final implies + // ClassVar, but this combination is required in the case of dataclasses. + const classVarInfo = this._isAnnotationClassVar(typeAnnotation); + if (classVarInfo?.classVarTypeNode) { + typeAnnotation = classVarInfo.classVarTypeNode; + } + + if (this._isTypingAnnotation(typeAnnotation, 'Final')) { + isFinal = true; + } else if ( + typeAnnotation.nodeType === ParseNodeType.Index && + typeAnnotation.d.items.length > 0 && + this._isTypingAnnotation(typeAnnotation.d.leftExpr, 'Annotated') + ) { + return this._isAnnotationFinal(typeAnnotation.d.items[0].d.valueExpr); + } else if (typeAnnotation.nodeType === ParseNodeType.Index && typeAnnotation.d.items.length === 1) { + // Recursively call to see if the base expression is "Final". + const finalInfo = this._isAnnotationFinal(typeAnnotation.d.leftExpr); + if ( + finalInfo.isFinal && + typeAnnotation.d.items[0].d.argCategory === ArgCategory.Simple && + !typeAnnotation.d.items[0].d.name && + !typeAnnotation.d.trailingComma + ) { + isFinal = true; + finalTypeNode = typeAnnotation.d.items[0].d.valueExpr; + } + } + } + + return { isFinal, finalTypeNode }; + } + + // Determines if the specified type annotation expression is a "ClassVar". + // It returns a value indicating whether the expression is a "ClassVar" + // expression and whether it's a "raw" ClassVar with no type arguments. + private _isAnnotationClassVar(typeAnnotation: ExpressionNode | undefined): ClassVarInfo { + let isClassVar = false; + let classVarTypeNode: ExpressionNode | undefined; + + while (typeAnnotation) { + // Is this a quoted annotation? + if (typeAnnotation.nodeType === ParseNodeType.StringList && typeAnnotation.d.annotation) { + typeAnnotation = typeAnnotation.d.annotation; + } + + if ( + typeAnnotation.nodeType === ParseNodeType.Index && + typeAnnotation.d.items.length > 0 && + this._isTypingAnnotation(typeAnnotation.d.leftExpr, 'Annotated') + ) { + typeAnnotation = typeAnnotation.d.items[0].d.valueExpr; + } else if (this._isTypingAnnotation(typeAnnotation, 'ClassVar')) { + isClassVar = true; + break; + } else if (typeAnnotation.nodeType === ParseNodeType.Index && typeAnnotation.d.items.length === 1) { + // Recursively call to see if the base expression is "ClassVar". + const finalInfo = this._isAnnotationClassVar(typeAnnotation.d.leftExpr); + if ( + finalInfo.isClassVar && + typeAnnotation.d.items[0].d.argCategory === ArgCategory.Simple && + !typeAnnotation.d.items[0].d.name && + !typeAnnotation.d.trailingComma + ) { + isClassVar = true; + classVarTypeNode = typeAnnotation.d.items[0].d.valueExpr; + } + break; + } else { + break; + } + } + + return { isClassVar, classVarTypeNode }; + } + + // Determines whether a member access expression is referring to a + // member of a class (either a class or instance member). This will + // typically take the form "self.x" or "cls.x". + private _getMemberAccessInfo(node: MemberAccessNode): MemberAccessInfo | undefined { + // We handle only simple names on the left-hand side of the expression, + // not calls, nested member accesses, index expressions, etc. + if (node.d.leftExpr.nodeType !== ParseNodeType.Name) { + return undefined; + } + + const leftSymbolName = node.d.leftExpr.d.value; + + // Make sure the expression is within a function (i.e. a method) that's + // within a class definition. + const methodNode = ParseTreeUtils.getEnclosingFunction(node); + if (!methodNode) { + return undefined; + } + + const classNode = ParseTreeUtils.getEnclosingClass(methodNode, /* stopAtFunction */ true); + if (!classNode) { + return undefined; + } + + // Determine whether the left-hand side indicates a class or + // instance member. + let isInstanceMember = false; + + if (methodNode.d.params.length < 1 || !methodNode.d.params[0].d.name) { + return undefined; + } + + const className = classNode.d.name.d.value; + const firstParamName = methodNode.d.params[0].d.name.d.value; + + if (leftSymbolName === className) { + isInstanceMember = false; + } else { + if (leftSymbolName !== firstParamName) { + return undefined; + } + + // To determine whether the first parameter of the method + // refers to the class or the instance, we need to apply + // some heuristics. + const implicitClassMethods = ['__new__', '__init_subclass__', '__class_getitem__']; + if (implicitClassMethods.includes(methodNode.d.name.d.value)) { + // Several methods are special. They act as class methods even + // though they don't have a @classmethod decorator. + isInstanceMember = false; + } else { + // Assume that it's an instance member unless we find + // a decorator that tells us otherwise. + isInstanceMember = true; + for (const decorator of methodNode.d.decorators) { + let decoratorName: string | undefined; + + if (decorator.d.expr.nodeType === ParseNodeType.Name) { + decoratorName = decorator.d.expr.d.value; + } else if ( + decorator.d.expr.nodeType === ParseNodeType.MemberAccess && + decorator.d.expr.d.leftExpr.nodeType === ParseNodeType.Name && + decorator.d.expr.d.leftExpr.d.value === 'builtins' + ) { + decoratorName = decorator.d.expr.d.member.d.value; + } + + if (decoratorName === 'staticmethod') { + // A static method doesn't have a "self" or "cls" parameter. + return undefined; + } else if (decoratorName === 'classmethod') { + // A classmethod implies that the first parameter is "cls". + isInstanceMember = false; + break; + } + } + } + } + + const classScope = AnalyzerNodeInfo.getScope(classNode)!; + assert(classScope !== undefined); + + return { + classNode, + methodNode, + classScope, + isInstanceMember, + }; + } + + private _addImplicitImportsToLoaderActions(importResult: ImportResult, loaderActions: ModuleLoaderActions) { + importResult.filteredImplicitImports?.forEach((implicitImport) => { + const existingLoaderAction = loaderActions.implicitImports + ? loaderActions.implicitImports.get(implicitImport.name) + : undefined; + if (existingLoaderAction) { + existingLoaderAction.uri = implicitImport.uri; + existingLoaderAction.loadSymbolsFromPath = true; + } else { + if (!loaderActions.implicitImports) { + loaderActions.implicitImports = new Map(); + } + loaderActions.implicitImports.set(implicitImport.name, { + uri: implicitImport.uri, + loadSymbolsFromPath: true, + implicitImports: new Map(), + }); + } + }); + } + + // Handles some special-case assignment statements that are found + // within the typings.pyi file. + private _handleTypingStubAssignmentOrAnnotation(node: AssignmentNode | TypeAnnotationNode) { + if (!this._fileInfo.isTypingStubFile) { + return false; + } + + let annotationNode: TypeAnnotationNode; + + if (node.nodeType === ParseNodeType.TypeAnnotation) { + annotationNode = node; + } else { + if (node.d.leftExpr.nodeType !== ParseNodeType.TypeAnnotation) { + return false; + } + + annotationNode = node.d.leftExpr; + } + + if (annotationNode.d.valueExpr.nodeType !== ParseNodeType.Name) { + return false; + } + + const assignedNameNode = annotationNode.d.valueExpr; + const specialTypes: Set = new Set([ + 'Tuple', + 'Generic', + 'Protocol', + 'Callable', + 'Type', + 'ClassVar', + 'Final', + 'Literal', + 'TypedDict', + 'Union', + 'Optional', + 'Annotated', + 'TypeAlias', + 'Concatenate', + 'TypeGuard', + 'Unpack', + 'Self', + 'NoReturn', + 'Never', + 'LiteralString', + 'OrderedDict', + 'TypeIs', + ]); + + const assignedName = assignedNameNode.d.value; + + if (!specialTypes.has(assignedName)) { + return false; + } + + const specialBuiltInClassDeclaration: SpecialBuiltInClassDeclaration = { + type: DeclarationType.SpecialBuiltInClass, + node: annotationNode, + uri: this._fileInfo.fileUri, + range: convertTextRangeToRange(annotationNode, this._fileInfo.lines), + moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + }; + + const symbol = this._bindNameToScope(this._currentScope, annotationNode.d.valueExpr); + if (symbol) { + symbol.addDeclaration(specialBuiltInClassDeclaration); + } + + AnalyzerNodeInfo.setDeclaration(node, specialBuiltInClassDeclaration); + return true; + } + + private _deferBinding(callback: () => void) { + if (this._moduleSymbolOnly) { + return; + } + + this._deferredBindingTasks.push({ + scope: this._currentScope, + codeFlowExpressions: this._currentScopeCodeFlowExpressions!, + callback, + }); + } + + private _bindDeferred() { + while (this._deferredBindingTasks.length > 0) { + const nextItem = this._deferredBindingTasks.shift()!; + + // Reset the state + this._currentScope = nextItem.scope; + this._currentScopeCodeFlowExpressions = nextItem.codeFlowExpressions; + + nextItem.callback(); + } + } + + private _bindYield(node: YieldNode | YieldFromNode) { + const functionNode = ParseTreeUtils.getEnclosingFunction(node); + + if (!functionNode) { + if (!ParseTreeUtils.getEnclosingLambda(node)) { + this._addSyntaxError(LocMessage.yieldOutsideFunction(), node); + } + } else if (functionNode.d.isAsync && node.nodeType === ParseNodeType.YieldFrom) { + // PEP 525 indicates that 'yield from' is not allowed in an + // async function. + this._addSyntaxError(LocMessage.yieldFromOutsideAsync(), node); + } + + if (this._targetFunctionDeclaration) { + if (!this._targetFunctionDeclaration.yieldStatements) { + this._targetFunctionDeclaration.yieldStatements = []; + } + this._targetFunctionDeclaration.yieldStatements.push(node); + this._targetFunctionDeclaration.isGenerator = true; + } + + if (node.d.expr) { + this.walk(node.d.expr); + } + + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + } + + private _getUniqueFlowNodeId() { + this._codeFlowComplexity += flowNodeComplexityContribution; + return getUniqueFlowNodeId(); + } + + private _addDiagnostic(rule: DiagnosticRule, message: string, textRange: TextRange) { + const diagLevel = this._fileInfo.diagnosticRuleSet[rule] as DiagnosticLevel; + + let diagnostic: Diagnostic | undefined; + switch (diagLevel) { + case 'error': + case 'warning': + case 'information': + diagnostic = this._fileInfo.diagnosticSink.addDiagnosticWithTextRange(diagLevel, message, textRange); + break; + + case 'none': + break; + + default: + return assertNever(diagLevel, `${diagLevel} is not expected`); + } + + if (diagnostic) { + diagnostic.setRule(rule); + } + + return diagnostic; + } + + private _addSyntaxError(message: string, textRange: TextRange) { + return this._fileInfo.diagnosticSink.addDiagnosticWithTextRange('error', message, textRange); + } +} + +export class YieldFinder extends ParseTreeWalker { + private _containsYield = false; + + checkContainsYield(node: ParseNode) { + this.walk(node); + return this._containsYield; + } + + override visitYield(node: YieldNode): boolean { + this._containsYield = true; + return false; + } + + override visitYieldFrom(node: YieldFromNode): boolean { + this._containsYield = true; + return false; + } +} + +export class ReturnFinder extends ParseTreeWalker { + private _containsReturn = false; + + checkContainsReturn(node: ParseNode) { + this.walk(node); + return this._containsReturn; + } + + override visitReturn(node: ReturnNode): boolean { + this._containsReturn = true; + return false; + } +} + +// Creates dummy scopes for classes or functions within a parse tree. +// This is needed in cases where the parse tree has been determined +// to be unreachable. There are code paths where the type evaluator +// will still evaluate these types, and it depends on the presence +// of a scope. +export class DummyScopeGenerator extends ParseTreeWalker { + private _currentScope: Scope | undefined; + + constructor(currentScope: Scope | undefined) { + super(); + this._currentScope = currentScope; + } + + override visitClass(node: ClassNode): boolean { + const newScope = this._createNewScope(ScopeType.Class, () => { + this.walk(node.d.suite); + }); + + if (!AnalyzerNodeInfo.getScope(node)) { + AnalyzerNodeInfo.setScope(node, newScope); + } + + return false; + } + + override visitFunction(node: FunctionNode): boolean { + const newScope = this._createNewScope(ScopeType.Function, () => { + this.walk(node.d.suite); + }); + + if (!AnalyzerNodeInfo.getScope(node)) { + AnalyzerNodeInfo.setScope(node, newScope); + } + + return false; + } + + private _createNewScope(scopeType: ScopeType, callback: () => void) { + const prevScope = this._currentScope; + const newScope = new Scope(scopeType, this._currentScope); + this._currentScope = newScope; + + callback(); + + this._currentScope = prevScope; + return newScope; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/cacheManager.ts b/python-parser/packages/pyright-internal/src/analyzer/cacheManager.ts new file mode 100644 index 00000000..ec6efc27 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/cacheManager.ts @@ -0,0 +1,199 @@ +/* + * cacheManager.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A singleton that tracks the size of caches and empties them + * if memory usage approaches the max heap space. + */ + +import type { HeapInfo } from 'v8'; +import { Worker } from 'worker_threads'; +import { ConsoleInterface } from '../common/console'; +import { fail } from '../common/debug'; +import { getHeapStatistics, getSystemMemoryInfo } from '../common/memUtils'; + +export interface CacheOwner { + // Returns a number between 0 and 1 that indicates how full + // the cache is. + getCacheUsage(): number; + + // Empties the cache, typically in response to a low-memory condition. + emptyCache(): void; +} + +export class CacheManager { + private _pausedCount = 0; + private readonly _cacheOwners: CacheOwner[] = []; + private _sharedUsageBuffer: SharedArrayBuffer | undefined; + private _sharedUsagePosition = 0; + private _lastHeapStats = Date.now(); + + constructor(private readonly _maxWorkers: number = 0) { + // Empty + } + + registerCacheOwner(provider: CacheOwner) { + this._cacheOwners.push(provider); + } + + addWorker(index: number, worker: Worker) { + // Send the sharedArrayBuffer to the worker so it can be used + // to keep track of heap usage on all threads. + const buffer = this._getSharedUsageBuffer(); + if (buffer) { + // The SharedArrayBuffer needs to be separate from data in order for it + // to be marshalled correctly. + worker.postMessage({ requestType: 'cacheUsageBuffer', sharedUsageBuffer: buffer, data: index.toString() }); + worker.on('exit', () => { + const view = new Float64Array(buffer); + view[index] = 0; + }); + } + } + + handleCachedUsageBufferMessage(msg: { + requestType: string; + data: string | null; + sharedUsageBuffer?: SharedArrayBuffer; + }) { + if (msg.requestType === 'cacheUsageBuffer') { + const index = parseInt(msg.data || '0'); + const buffer = msg.sharedUsageBuffer; + // Index of zero is reserved for the main thread so if + // the index isn't passed, don't save the shared buffer. + if (buffer && index) { + this._sharedUsageBuffer = buffer; + this._sharedUsagePosition = index; + } + } + } + + unregisterCacheOwner(provider: CacheOwner) { + const index = this._cacheOwners.findIndex((p) => p === provider); + if (index < 0) { + fail('Specified cache provider not found'); + } else { + this._cacheOwners.splice(index, 1); + } + } + + pauseTracking(): { dispose(): void } { + const local = this; + local._pausedCount++; + return { + dispose() { + local._pausedCount--; + }, + }; + } + + getCacheUsage() { + if (this._pausedCount > 0) { + return -1; + } + + let totalUsage = 0; + + this._cacheOwners.forEach((p) => { + totalUsage += p.getCacheUsage(); + }); + + return totalUsage; + } + + emptyCache(console?: ConsoleInterface) { + if (console) { + const heapStats = getHeapStatistics(); + + console.info( + `Emptying type cache to avoid heap overflow. Used ${this._convertToMB( + heapStats.used_heap_size + )} out of ${this._convertToMB(heapStats.heap_size_limit)}.` + ); + } + + this._cacheOwners.forEach((p) => { + p.emptyCache(); + }); + } + + // Returns a ratio of used bytes to total bytes. + getUsedHeapRatio(console?: ConsoleInterface) { + if (this._pausedCount > 0) { + return -1; + } + + const heapStats = getHeapStatistics(); + let usage = this._getTotalHeapUsage(heapStats); + + if (console && Date.now() - this._lastHeapStats > 1000) { + // This can fill up the user's console, so we only do it once per second. + this._lastHeapStats = Date.now(); + const systemMemInfo = getSystemMemoryInfo(); + + console.info( + `Heap stats: ` + + `total_memory_size=${this._convertToMB(systemMemInfo.total)}, ` + + `total_free_size=${this._convertToMB(systemMemInfo.free)}, ` + + `total_heap_size=${this._convertToMB(heapStats.total_heap_size)}, ` + + `used_heap_size=${this._convertToMB(heapStats.used_heap_size)}, ` + + `cross_worker_used_heap_size=${this._convertToMB(usage)}, ` + + `total_physical_size=${this._convertToMB(heapStats.total_physical_size)}, ` + + `total_available_size=${this._convertToMB(heapStats.total_available_size)}, ` + + `heap_size_limit=${this._convertToMB(heapStats.heap_size_limit)}` + ); + } + + // Total usage seems to be off by about 5%, so we'll add that back in + // to make the ratio more accurate. (200MB at 4GB) + usage += usage * 0.05; + + return usage / heapStats.heap_size_limit; + } + + private _convertToMB(bytes: number) { + return `${Math.round(bytes / (1024 * 1024))}MB`; + } + + private _getSharedUsageBuffer() { + try { + if (!this._sharedUsageBuffer && this._maxWorkers > 0) { + // Allocate enough space for the workers and the main thread. + this._sharedUsageBuffer = new SharedArrayBuffer(8 * (this._maxWorkers + 1)); + } + + return this._sharedUsageBuffer; + } catch { + // SharedArrayBuffer is not supported. + return undefined; + } + } + + private _getTotalHeapUsage(heapStats: HeapInfo): number { + // If the SharedArrayBuffer is supported, we'll use it to to get usage + // from other threads and add that to our own + const buffer = this._getSharedUsageBuffer(); + if (buffer) { + const view = new Float64Array(buffer); + view[this._sharedUsagePosition] = heapStats.used_heap_size; + return view.reduce((a, b) => a + b, 0); + } + + return heapStats.used_heap_size; + } +} + +export namespace CacheManager { + export function is(obj: any): obj is CacheManager { + return ( + obj.registerCacheOwner !== undefined && + obj.unregisterCacheOwner !== undefined && + obj.pauseTracking !== undefined && + obj.getCacheUsage !== undefined && + obj.emptyCache !== undefined && + obj.getUsedHeapRatio !== undefined + ); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/checker.ts b/python-parser/packages/pyright-internal/src/analyzer/checker.ts new file mode 100644 index 00000000..28d9c433 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/checker.ts @@ -0,0 +1,7634 @@ +/* + * checker.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A parse tree walker that performs static type checking for + * a source file. Most of its work is performed by the type + * evaluator, but this module touches every node in the file + * to ensure that all statements and expressions are evaluated + * and checked. It also performs some additional checks that + * cannot (or should not be) performed lazily. + */ + +import { Commands } from '../commands/commands'; +import { appendArray } from '../common/collectionUtils'; +import { DiagnosticLevel } from '../common/configOptions'; +import { assert, assertNever } from '../common/debug'; +import { Diagnostic, DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { PythonVersion, pythonVersion3_12, pythonVersion3_5, pythonVersion3_6 } from '../common/pythonVersion'; +import { TextRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { LocAddendum, LocMessage } from '../localization/localize'; +import { + ArgCategory, + AssertNode, + AssignmentExpressionNode, + AssignmentNode, + AugmentedAssignmentNode, + AwaitNode, + BinaryOperationNode, + CallNode, + CaseNode, + ClassNode, + ComprehensionIfNode, + ComprehensionNode, + DelNode, + DictionaryNode, + ErrorNode, + ExceptNode, + ExpressionNode, + ForNode, + FormatStringNode, + FunctionNode, + GlobalNode, + IfNode, + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + IndexNode, + LambdaNode, + ListNode, + MatchNode, + MemberAccessNode, + ModuleNameNode, + ModuleNode, + NameNode, + NonlocalNode, + ParamCategory, + ParameterNode, + ParseNode, + ParseNodeType, + PatternClassNode, + RaiseNode, + ReturnNode, + SetNode, + SliceNode, + StatementListNode, + StatementNode, + StringListNode, + StringNode, + SuiteNode, + TernaryNode, + TryNode, + TupleNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParameterListNode, + TypeParameterNode, + UnaryOperationNode, + UnpackNode, + WhileNode, + WithNode, + YieldFromNode, + YieldNode, + isExpressionNode, +} from '../parser/parseNodes'; +import { ParserOutput } from '../parser/parser'; +import { UnescapeError, UnescapeErrorType, getUnescapedString } from '../parser/stringTokenUtils'; +import { OperatorType, StringTokenFlags, TokenType } from '../parser/tokenizerTypes'; +import { AnalyzerFileInfo } from './analyzerFileInfo'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { ConstraintTracker } from './constraintTracker'; +import { getBoundCallMethod, getBoundInitMethod, getBoundNewMethod } from './constructors'; +import { addInheritedDataClassEntries } from './dataClasses'; +import { Declaration, DeclarationType, isAliasDeclaration, isVariableDeclaration } from './declaration'; +import { getNameNodeForDeclaration } from './declarationUtils'; +import { deprecatedAliases, deprecatedSpecialForms } from './deprecatedSymbols'; +import { getEnumDeclaredValueType, isEnumClassWithMembers, transformTypeForEnumMember } from './enums'; +import { ImportResolver, createImportedModuleDescriptor } from './importResolver'; +import { ImportResult, ImportType } from './importResult'; +import { getRelativeModuleName, getTopLevelImports } from './importStatementUtils'; +import { getParamListDetails } from './parameterUtils'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { ParseTreeWalker } from './parseTreeWalker'; +import { validateClassPattern } from './patternMatching'; +import { isMethodOnlyProtocol, isProtocolUnsafeOverlap } from './protocols'; +import { Scope, ScopeType } from './scope'; +import { getScopeForNode } from './scopeUtils'; +import { IPythonMode } from './sourceFile'; +import { isStubFile } from './sourceMapper'; +import { evaluateStaticBoolExpression } from './staticExpressions'; +import { Symbol } from './symbol'; +import * as SymbolNameUtils from './symbolNameUtils'; +import { getLastTypedDeclarationForSymbol } from './symbolUtils'; +import { getEffectiveExtraItemsEntryType, getTypedDictMembersForClass } from './typedDicts'; +import { maxCodeComplexity } from './typeEvaluator'; +import { + Arg, + AssignTypeFlags, + FunctionTypeResult, + MemberAccessDeprecationInfo, + Reachability, + TypeEvaluator, + TypeResult, +} from './typeEvaluatorTypes'; +import { + enumerateLiteralsForType, + getElementTypeForContainerNarrowing, + getIsInstanceClassTypes, + narrowTypeForContainerElementType, + narrowTypeForInstanceOrSubclass, +} from './typeGuards'; +import { + AnyType, + ClassType, + ClassTypeFlags, + DataClassEntry, + EnumLiteral, + FunctionParam, + FunctionType, + ModuleType, + OverloadedType, + Type, + TypeBase, + TypeCategory, + TypeVarScopeType, + TypeVarType, + TypedDictEntry, + UnknownType, + Variance, + combineTypes, + isAnyOrUnknown, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isModule, + isNever, + isOverloaded, + isParamSpec, + isPossiblyUnbound, + isTypeSame, + isTypeVar, + isTypeVarTuple, + isUnbound, + isUnion, + isUnknown, +} from './types'; +import { + ClassMember, + MemberAccessFlags, + applySolvedTypeVars, + buildSolutionFromSpecializedClass, + convertToInstance, + derivesFromClassRecursive, + doForEachSubtype, + getClassFieldsRecursive, + getDeclaredGeneratorReturnType, + getGeneratorTypeArgs, + getProtocolSymbolsRecursive, + getSpecializedTupleType, + getTypeVarArgsRecursive, + getTypeVarScopeIds, + isInstantiableMetaclass, + isLiteralType, + isLiteralTypeOrUnion, + isNoneInstance, + isPartlyUnknown, + isProperty, + isTupleClass, + isUnboundedTupleClass, + lookUpClassMember, + makeTypeVarsBound, + mapSubtypes, + partiallySpecializeType, + selfSpecializeClass, + transformPossibleRecursiveTypeAlias, +} from './typeUtils'; + +interface TypeVarUsageInfo { + typeVar: TypeVarType; + isExempt: boolean; + returnTypeUsageCount: number; + paramTypeUsageCount: number; + paramTypeWithEllipsisUsageCount: number; + paramWithEllipsis: string | undefined; + nodes: NameNode[]; +} + +// When enabled, this debug flag causes the code complexity of +// functions to be emitted. +const isPrintCodeComplexityEnabled = false; + +export class Checker extends ParseTreeWalker { + private readonly _moduleNode: ModuleNode; + private readonly _fileInfo: AnalyzerFileInfo; + private _isUnboundCheckSuppressed = false; + + // A list of all nodes that are defined within the module that + // have their own scopes. + private _scopedNodes: AnalyzerNodeInfo.ScopedNode[] = []; + + // A list of all visited type parameter lists. + private _typeParamLists: TypeParameterListNode[] = []; + + // A list of all visited multipart import statements. + private _multipartImports: ImportAsNode[] = []; + + constructor( + private _importResolver: ImportResolver, + private _evaluator: TypeEvaluator, + parseResults: ParserOutput, + private _dependentFiles?: ParserOutput[] + ) { + super(); + + this._moduleNode = parseResults.parseTree; + this._fileInfo = AnalyzerNodeInfo.getFileInfo(this._moduleNode)!; + } + + check() { + this._scopedNodes.push(this._moduleNode); + + // Report code complexity issues for the module. + const codeComplexity = AnalyzerNodeInfo.getCodeFlowComplexity(this._moduleNode); + + if (isPrintCodeComplexityEnabled) { + console.log( + `Code complexity of module ${this._fileInfo.fileUri.toUserVisibleString()} is ${codeComplexity.toString()}` + ); + } + + if (codeComplexity > maxCodeComplexity) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.codeTooComplexToAnalyze(), + { start: 0, length: 0 } + ); + } + + this._walkStatementsAndReportUnreachable(this._moduleNode.d.statements); + + // Mark symbols accessed by __all__ as accessed. + const dunderAllInfo = AnalyzerNodeInfo.getDunderAllInfo(this._moduleNode); + if (dunderAllInfo) { + this._evaluator.markNamesAccessed(this._moduleNode, dunderAllInfo.names); + + this._reportUnusedDunderAllSymbols(dunderAllInfo.stringNodes); + } + + // Perform a one-time validation of symbols in all scopes + // defined in this module for things like unaccessed variables. + this._validateSymbolTables(); + + this._reportUnusedMultipartImports(); + + this._reportDuplicateImports(); + } + + override walk(node: ParseNode) { + if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { + super.walk(node); + } else { + this._evaluator.suppressDiagnostics(node, () => { + super.walk(node); + }); + } + } + + override visitSuite(node: SuiteNode): boolean { + this._walkStatementsAndReportUnreachable(node.d.statements); + return false; + } + + override visitStatementList(node: StatementListNode) { + node.d.statements.forEach((statement) => { + if (isExpressionNode(statement)) { + // Evaluate the expression in case it wasn't otherwise evaluated + // through lazy analysis. This will mark referenced symbols as + // accessed and report any errors associated with it. + this._evaluator.getType(statement); + + this._reportUnusedExpression(statement); + } + }); + + return true; + } + + override visitClass(node: ClassNode): boolean { + const classTypeResult = this._evaluator.getTypeOfClass(node); + + if (node.d.typeParams) { + this.walk(node.d.typeParams); + } + this.walk(node.d.suite); + this.walkMultiple(node.d.decorators); + this.walkMultiple(node.d.arguments); + + if (classTypeResult) { + // Protocol classes cannot derive from non-protocol classes. + if (ClassType.isProtocolClass(classTypeResult.classType)) { + node.d.arguments.forEach((arg) => { + if (!arg.d.name) { + const baseClassType = this._evaluator.getType(arg.d.valueExpr); + if ( + baseClassType && + isInstantiableClass(baseClassType) && + !ClassType.isBuiltIn(baseClassType, 'Protocol') && + !ClassType.isBuiltIn(baseClassType, 'Generic') + ) { + if (!ClassType.isProtocolClass(baseClassType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.protocolBaseClass().format({ + classType: classTypeResult.classType.shared.name, + baseType: baseClassType.shared.name, + }), + arg.d.valueExpr + ); + } + } + } + }); + + // If this is a generic protocol class, verify that its type variables + // have the proper variance. + this._validateProtocolTypeParamVariance(node, classTypeResult.classType); + } + + // Skip the slots check because class variables declared in a stub + // file are interpreted as instance variables. + if (!this._fileInfo.isStubFile) { + this._validateSlotsClassVarConflict(classTypeResult.classType); + } + + this._validateBaseClassOverrides(classTypeResult.classType); + + this._validateTypedDictOverrides(classTypeResult.classType); + + this._validateOverloadDecoratorConsistency(classTypeResult.classType); + + this._validateMultipleInheritanceBaseClasses(classTypeResult.classType, node.d.name); + + this._validateMultipleInheritanceCompatibility(classTypeResult.classType, node.d.name); + + this._validateConstructorConsistency(classTypeResult.classType, node.d.name); + + this._validateFinalMemberOverrides(classTypeResult.classType); + + this._validateInstanceVariableInitialization(node, classTypeResult.classType); + + this._validateFinalClassNotAbstract(classTypeResult.classType, node); + + this._validateDataClassPostInit(classTypeResult.classType); + + this._validateEnumMembers(classTypeResult.classType, node); + + if (ClassType.isTypedDictClass(classTypeResult.classType)) { + this._validateTypedDictClassSuite(node.d.suite); + } + + if (ClassType.isEnumClass(classTypeResult.classType)) { + this._validateEnumClassOverride(node, classTypeResult.classType); + } + + this._evaluator.validateInitSubclassArgs(node, classTypeResult.classType); + } + + this._scopedNodes.push(node); + + return false; + } + + override visitFunction(node: FunctionNode): boolean { + if (node.d.typeParams) { + this.walk(node.d.typeParams); + } + + if (!this._fileInfo.diagnosticRuleSet.analyzeUnannotatedFunctions && !this._fileInfo.isStubFile) { + if (ParseTreeUtils.isUnannotatedFunction(node)) { + this._evaluator.addInformation( + LocMessage.unannotatedFunctionSkipped().format({ name: node.d.name.d.value }), + node.d.name + ); + } + } + + const functionTypeResult = this._evaluator.getTypeOfFunction(node); + const containingClassNode = ParseTreeUtils.getEnclosingClass(node, /* stopAtFunction */ true); + + if (functionTypeResult) { + // Track whether we have seen a *args: P.args parameter. Named + // parameters after this need to be flagged as an error. + let sawParamSpecArgs = false; + + const keywordNames = new Set(); + const paramDetails = getParamListDetails(functionTypeResult.functionType); + + // Report any unknown or missing parameter types. + node.d.params.forEach((param, index) => { + if (param.d.name) { + if (param.d.category === ParamCategory.Simple && index >= paramDetails.positionOnlyParamCount) { + keywordNames.add(param.d.name.d.value); + } + + // Determine whether this is a P.args parameter. + if (param.d.category === ParamCategory.ArgsList) { + const annotationExpr = param.d.annotation ?? param.d.annotationComment; + if ( + annotationExpr && + annotationExpr.nodeType === ParseNodeType.MemberAccess && + annotationExpr.d.member.d.value === 'args' + ) { + const baseType = this._evaluator.getType(annotationExpr.d.leftExpr); + if (baseType && isParamSpec(baseType)) { + sawParamSpecArgs = true; + } + } + } else if (param.d.category === ParamCategory.KwargsDict) { + sawParamSpecArgs = false; + } + } + + if (param.d.name && param.d.category === ParamCategory.Simple && sawParamSpecArgs) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.namedParamAfterParamSpecArgs().format({ name: param.d.name.d.value }), + param.d.name + ); + } + + // Allow unknown and missing param types if the param is named '_'. + if (param.d.name && param.d.name.d.value !== '_') { + const paramIndex = functionTypeResult.functionType.shared.parameters.findIndex( + (p) => p.name === param.d.name?.d.value + ); + + if (paramIndex >= 0) { + const functionTypeParam = functionTypeResult.functionType.shared.parameters[paramIndex]; + const paramType = FunctionType.getParamType(functionTypeResult.functionType, paramIndex); + + if (this._fileInfo.diagnosticRuleSet.reportUnknownParameterType !== 'none') { + if ( + isUnknown(paramType) || + (isTypeVar(paramType) && + paramType.shared.isSynthesized && + !TypeVarType.isSelf(paramType)) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownParameterType, + LocMessage.paramTypeUnknown().format({ paramName: param.d.name.d.value }), + param.d.name + ); + } else if (isPartlyUnknown(paramType)) { + const diagAddendum = new DiagnosticAddendum(); + diagAddendum.addMessage( + LocAddendum.paramType().format({ + paramType: this._evaluator.printType(paramType, { expandTypeAlias: true }), + }) + ); + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownParameterType, + LocMessage.paramTypePartiallyUnknown().format({ + paramName: param.d.name.d.value, + }) + diagAddendum.getString(), + param.d.name + ); + } + } + + let hasAnnotation = false; + + if (FunctionParam.isTypeDeclared(functionTypeParam)) { + hasAnnotation = true; + } else { + // See if this is a "self" and "cls" parameter. They are exempt from this rule. + if (isTypeVar(paramType) && TypeVarType.isSelf(paramType)) { + hasAnnotation = true; + } + } + + if (!hasAnnotation && this._fileInfo.diagnosticRuleSet.reportMissingParameterType !== 'none') { + this._evaluator.addDiagnostic( + DiagnosticRule.reportMissingParameterType, + LocMessage.paramAnnotationMissing().format({ name: param.d.name.d.value }), + param.d.name + ); + } + } + } + }); + + // Verify that an unpacked TypedDict doesn't overlap any keyword parameters. + if (paramDetails.hasUnpackedTypedDict) { + const kwargsIndex = functionTypeResult.functionType.shared.parameters.length - 1; + const kwargsType = FunctionType.getParamType(functionTypeResult.functionType, kwargsIndex); + + if (isClass(kwargsType) && kwargsType.shared.typedDictEntries) { + const overlappingEntries = new Set(); + kwargsType.shared.typedDictEntries.knownItems.forEach((_, name) => { + if (keywordNames.has(name)) { + overlappingEntries.add(name); + } + }); + + if (overlappingEntries.size > 0) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.overlappingKeywordArgs().format({ + names: [...overlappingEntries.values()].join(', '), + }), + node.d.params[kwargsIndex].d.annotation ?? node.d.params[kwargsIndex] + ); + } + } + } + + // Check for invalid use of ParamSpec P.args and P.kwargs. + const paramSpecParams = functionTypeResult.functionType.shared.parameters.filter((param, index) => { + const paramType = FunctionType.getParamType(functionTypeResult.functionType, index); + if (FunctionParam.isTypeDeclared(param) && isTypeVar(paramType) && isParamSpec(paramType)) { + if (param.category !== ParamCategory.Simple && param.name && paramType.priv.paramSpecAccess) { + return true; + } + } + + return false; + }); + + if (paramSpecParams.length === 1 && paramSpecParams[0].name) { + const paramNode = node.d.params.find((param) => param.d.name?.d.value === paramSpecParams[0].name); + const annotationNode = paramNode?.d.annotation ?? paramNode?.d.annotationComment; + + if (annotationNode) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.paramSpecArgsKwargsUsage(), + annotationNode + ); + } + } + + // If this is a stub, ensure that the return type is specified. + if (this._fileInfo.isStubFile) { + const returnAnnotation = node.d.returnAnnotation || node.d.funcAnnotationComment?.d.returnAnnotation; + if (!returnAnnotation) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownParameterType, + LocMessage.returnTypeUnknown(), + node.d.name + ); + } + } + + if (containingClassNode) { + this._validateMethod(node, functionTypeResult.functionType, containingClassNode); + } + } + + node.d.params.forEach((param, index) => { + if (param.d.defaultValue) { + this.walk(param.d.defaultValue); + } + + if (param.d.annotation) { + this.walk(param.d.annotation); + } + + if (param.d.annotationComment) { + this.walk(param.d.annotationComment); + } + + // Look for method parameters that are typed with TypeVars that have the wrong variance. + if (functionTypeResult) { + const annotationNode = param.d.annotation || param.d.annotationComment; + if (annotationNode && index < functionTypeResult.functionType.shared.parameters.length) { + const paramType = FunctionType.getParamType(functionTypeResult.functionType, index); + const exemptMethods = ['__init__', '__new__']; + + if ( + containingClassNode && + isTypeVar(paramType) && + paramType.priv.scopeType === TypeVarScopeType.Class && + paramType.shared.declaredVariance === Variance.Covariant && + !paramType.shared.isSynthesized && + !exemptMethods.some((name) => name === functionTypeResult.functionType.shared.name) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.paramTypeCovariant(), + annotationNode + ); + } + } + } + }); + + if (node.d.returnAnnotation) { + this.walk(node.d.returnAnnotation); + } + + if (node.d.funcAnnotationComment) { + this.walk(node.d.funcAnnotationComment); + + if ( + this._fileInfo.diagnosticRuleSet.reportTypeCommentUsage !== 'none' && + PythonVersion.isGreaterOrEqualTo(this._fileInfo.executionEnvironment.pythonVersion, pythonVersion3_5) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportTypeCommentUsage, + LocMessage.typeCommentDeprecated(), + node.d.funcAnnotationComment + ); + } + } + + this.walkMultiple(node.d.decorators); + + node.d.params.forEach((param) => { + if (param.d.name) { + this.walk(param.d.name); + } + }); + + const codeComplexity = AnalyzerNodeInfo.getCodeFlowComplexity(node); + const isTooComplexToAnalyze = codeComplexity > maxCodeComplexity; + + if (isPrintCodeComplexityEnabled) { + console.log(`Code complexity of function ${node.d.name.d.value} is ${codeComplexity.toString()}`); + } + + if (isTooComplexToAnalyze) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.codeTooComplexToAnalyze(), + node.d.name + ); + } else { + this.walk(node.d.suite); + } + + if (functionTypeResult) { + // Validate that the function returns the declared type. + if (!isTooComplexToAnalyze) { + this._validateFunctionReturn(node, functionTypeResult.functionType); + } + + // Verify common dunder signatures. + this._validateDunderSignatures(node, functionTypeResult.functionType, containingClassNode !== undefined); + + // Verify TypeGuard and TypeIs functions. + this._validateTypeGuardFunction(node, functionTypeResult.functionType, containingClassNode !== undefined); + + this._validateFunctionTypeVarUsage(node, functionTypeResult); + + this._validateGeneratorReturnType(node, functionTypeResult.functionType); + + this._reportDeprecatedClassProperty(node, functionTypeResult); + + // If this is not a method, @final is disallowed. + if (!containingClassNode && FunctionType.isFinal(functionTypeResult.functionType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalNonMethod().format({ name: node.d.name.d.value }), + node.d.name + ); + } + } + + // If we're at the module level within a stub file, report a diagnostic + // if there is a '__getattr__' function defined when in strict mode. + // This signifies an incomplete stub file that obscures type errors. + if (this._fileInfo.isStubFile && node.d.name.d.value === '__getattr__') { + const scope = getScopeForNode(node); + if (scope?.type === ScopeType.Module) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompleteStub, + LocMessage.stubUsesGetAttr(), + node.d.name + ); + } + } + + this._scopedNodes.push(node); + + if ( + functionTypeResult && + isOverloaded(functionTypeResult.decoratedType) && + functionTypeResult.functionType.priv.overloaded + ) { + // If this is the implementation for the overloaded function, skip + // overload consistency checks. + if ( + OverloadedType.getImplementation(functionTypeResult.decoratedType) !== functionTypeResult.functionType + ) { + const overloads = OverloadedType.getOverloads(functionTypeResult.decoratedType); + if (overloads.length > 1) { + const maxOverloadConsistencyCheckLength = 100; + + // The check is n^2 in time, so if the number of overloads + // is very large (which can happen for some generated code), + // skip this check to avoid quadratic analysis time. + if (overloads.length < maxOverloadConsistencyCheckLength) { + this._validateOverloadConsistency( + node, + overloads[overloads.length - 1], + overloads.slice(0, overloads.length - 1) + ); + } + } + } + + this._validateOverloadAttributeConsistency(node, functionTypeResult.decoratedType); + } + + return false; + } + + override visitLambda(node: LambdaNode): boolean { + this._evaluator.getType(node); + + // Walk the children. + this.walkMultiple([...node.d.params, node.d.expr]); + + node.d.params.forEach((param) => { + if (param.d.name) { + const paramType = this._evaluator.getType(param.d.name); + if (paramType) { + if (isUnknown(paramType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownLambdaType, + LocMessage.paramTypeUnknown().format({ paramName: param.d.name.d.value }), + param.d.name + ); + } else if (isPartlyUnknown(paramType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownLambdaType, + LocMessage.paramTypePartiallyUnknown().format({ paramName: param.d.name.d.value }), + param.d.name + ); + } + } + } + }); + + const returnType = this._evaluator.getType(node.d.expr); + if (returnType) { + if (isUnknown(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownLambdaType, + LocMessage.lambdaReturnTypeUnknown(), + node.d.expr + ); + } else if (isPartlyUnknown(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownLambdaType, + LocMessage.lambdaReturnTypePartiallyUnknown().format({ + returnType: this._evaluator.printType(returnType, { expandTypeAlias: true }), + }), + node.d.expr + ); + } + } + + this._scopedNodes.push(node); + + return false; + } + + override visitCall(node: CallNode): boolean { + this._validateIsInstanceCall(node); + + this._validateIllegalDefaultParamInitializer(node); + + this._validateStandardCollectionInstantiation(node); + + if ( + this._fileInfo.diagnosticRuleSet.reportUnusedCallResult !== 'none' || + this._fileInfo.diagnosticRuleSet.reportUnusedCoroutine !== 'none' + ) { + if (node.parent?.nodeType === ParseNodeType.StatementList) { + const isRevealTypeCall = + node.d.leftExpr.nodeType === ParseNodeType.Name && node.d.leftExpr.d.value === 'reveal_type'; + const returnType = this._evaluator.getType(node); + + if (!isRevealTypeCall && returnType && this._isTypeValidForUnusedValueTest(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnusedCallResult, + LocMessage.unusedCallResult().format({ + type: this._evaluator.printType(returnType), + }), + node + ); + + if ( + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, ['Coroutine', 'CoroutineType']) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnusedCoroutine, + LocMessage.unusedCoroutine(), + node + ); + } + } + } + } + + return true; + } + + override visitAwait(node: AwaitNode) { + if (this._fileInfo.diagnosticRuleSet.reportUnusedCallResult !== 'none') { + if (node.parent?.nodeType === ParseNodeType.StatementList && node.d.expr.nodeType === ParseNodeType.Call) { + const returnType = this._evaluator.getType(node); + + if (returnType && this._isTypeValidForUnusedValueTest(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnusedCallResult, + LocMessage.unusedCallResult().format({ + type: this._evaluator.printType(returnType), + }), + node + ); + } + } + } + + return true; + } + + override visitFor(node: ForNode): boolean { + this._evaluator.evaluateTypesForStatement(node); + + if (node.d.typeComment) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportInvalidTypeForm, + LocMessage.annotationNotSupported(), + node.d.typeComment + ); + } + return true; + } + + override visitList(node: ListNode): boolean { + this._validateIllegalDefaultParamInitializer(node); + return true; + } + + override visitSet(node: SetNode): boolean { + this._validateIllegalDefaultParamInitializer(node); + return true; + } + + override visitDictionary(node: DictionaryNode): boolean { + this._validateIllegalDefaultParamInitializer(node); + return true; + } + + override visitComprehension(node: ComprehensionNode): boolean { + this._scopedNodes.push(node); + return true; + } + + override visitComprehensionIf(node: ComprehensionIfNode): boolean { + this._validateConditionalIsBool(node.d.testExpr); + this._reportUnnecessaryConditionExpression(node.d.testExpr); + return true; + } + + override visitIf(node: IfNode): boolean { + this._validateConditionalIsBool(node.d.testExpr); + this._reportUnnecessaryConditionExpression(node.d.testExpr); + return true; + } + + override visitWhile(node: WhileNode): boolean { + this._validateConditionalIsBool(node.d.testExpr); + this._reportUnnecessaryConditionExpression(node.d.testExpr); + return true; + } + + override visitWith(node: WithNode): boolean { + node.d.withItems.forEach((item) => { + this._evaluator.evaluateTypesForStatement(item); + }); + + if (node.d.typeComment) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportInvalidTypeForm, + LocMessage.annotationNotSupported(), + node.d.typeComment + ); + } + + return true; + } + + override visitReturn(node: ReturnNode): boolean { + let returnTypeResult: TypeResult; + let returnType: Type | undefined; + + const enclosingFunctionNode = ParseTreeUtils.getEnclosingFunction(node); + let declaredReturnType = enclosingFunctionNode + ? this._evaluator.getDeclaredReturnType(enclosingFunctionNode) + : undefined; + + if (node.d.expr) { + returnTypeResult = this._evaluator.getTypeResult(node.d.expr) ?? { type: UnknownType.create() }; + } else { + // There is no return expression, so "None" is assumed. + returnTypeResult = { type: this._evaluator.getNoneType() }; + } + + returnType = returnTypeResult.type; + + // If this type is a special form, use the special form instead. + if (returnType.props?.specialForm) { + returnType = returnType.props.specialForm; + } + + // If the enclosing function is async and a generator, the return + // statement is not allowed to have an argument. A syntax error occurs + // at runtime in this case. + if (enclosingFunctionNode?.d.isAsync && node.d.expr) { + const functionDecl = AnalyzerNodeInfo.getDeclaration(enclosingFunctionNode); + if (functionDecl?.type === DeclarationType.Function && functionDecl.isGenerator) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.returnInAsyncGenerator(), + node.d.expr + ); + } + } + + if (this._evaluator.isNodeReachable(node, /* sourceNode */ undefined) && enclosingFunctionNode) { + if (declaredReturnType) { + if (isNever(declaredReturnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.noReturnContainsReturn(), + node + ); + } else { + const liveScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + declaredReturnType = this._evaluator.stripTypeGuard(declaredReturnType); + let adjReturnType = makeTypeVarsBound(declaredReturnType, liveScopes); + + let diagAddendum = new DiagnosticAddendum(); + let returnTypeMatches = false; + + if (this._evaluator.assignType(adjReturnType, returnType, diagAddendum)) { + returnTypeMatches = true; + } else { + // See if the declared return type includes one or more constrained TypeVars. If so, + // try to narrow these TypeVars to a single type. + const uniqueTypeVars = getTypeVarArgsRecursive(declaredReturnType); + + if (uniqueTypeVars && uniqueTypeVars.some((typeVar) => TypeVarType.hasConstraints(typeVar))) { + const constraints = new ConstraintTracker(); + + for (const typeVar of uniqueTypeVars) { + if (TypeVarType.hasConstraints(typeVar)) { + const narrowedType = this._evaluator.narrowConstrainedTypeVar( + node, + TypeVarType.cloneAsBound(typeVar) + ); + if (narrowedType) { + constraints.setBounds(typeVar, narrowedType); + } + } + } + + if (!constraints.isEmpty()) { + adjReturnType = this._evaluator.solveAndApplyConstraints( + declaredReturnType, + constraints + ); + adjReturnType = makeTypeVarsBound(adjReturnType, liveScopes); + + if (this._evaluator.assignType(adjReturnType, returnType, diagAddendum)) { + returnTypeMatches = true; + } + } + } + } + + if (!returnTypeMatches) { + // If we have more detailed diagnostic information from + // bidirectional type inference, use that. + if (returnTypeResult.expectedTypeDiagAddendum) { + diagAddendum = returnTypeResult.expectedTypeDiagAddendum; + } + + this._evaluator.addDiagnostic( + DiagnosticRule.reportReturnType, + LocMessage.returnTypeMismatch().format({ + exprType: this._evaluator.printType(returnType), + returnType: this._evaluator.printType(declaredReturnType), + }) + diagAddendum.getString(), + node.d.expr ?? node, + returnTypeResult.expectedTypeDiagAddendum?.getEffectiveTextRange() + ); + } + } + } + + if (isUnknown(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownVariableType, + LocMessage.returnTypeUnknown(), + node.d.expr ?? node + ); + } else if (isPartlyUnknown(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownVariableType, + LocMessage.returnTypePartiallyUnknown().format({ + returnType: this._evaluator.printType(returnType, { expandTypeAlias: true }), + }), + node.d.expr ?? node + ); + } + } + + return true; + } + + override visitYield(node: YieldNode) { + const yieldTypeResult = node.d.expr + ? this._evaluator.getTypeResult(node.d.expr) + : { type: this._evaluator.getNoneType() }; + this._validateYieldType( + node, + yieldTypeResult?.type ?? UnknownType.create(), + yieldTypeResult?.expectedTypeDiagAddendum + ); + return true; + } + + override visitYieldFrom(node: YieldFromNode) { + const yieldFromType = this._evaluator.getType(node.d.expr) || UnknownType.create(); + let yieldType: Type | undefined; + let sendType: Type | undefined; + + if (isClassInstance(yieldFromType) && ClassType.isBuiltIn(yieldFromType, ['Coroutine', 'CoroutineType'])) { + // Handle the case of old-style (pre-await) coroutines. + yieldType = UnknownType.create(); + } else { + yieldType = + this._evaluator.getTypeOfIterable({ type: yieldFromType }, /* isAsync */ false, node)?.type ?? + UnknownType.create(); + + // Does the iterator return a Generator? If so, get the yield type from it. + // If the iterator doesn't return a Generator, use the iterator return type + // directly. + const generatorTypeArgs = getGeneratorTypeArgs(yieldType); + if (generatorTypeArgs) { + yieldType = generatorTypeArgs.length >= 1 ? generatorTypeArgs[0] : UnknownType.create(); + sendType = generatorTypeArgs.length >= 2 ? generatorTypeArgs[1] : undefined; + } else { + yieldType = + this._evaluator.getTypeOfIterator({ type: yieldFromType }, /* isAsync */ false, node)?.type ?? + UnknownType.create(); + } + } + + this._validateYieldType(node, yieldType, /* expectedDiagAddendum */ undefined, sendType); + + return true; + } + + override visitRaise(node: RaiseNode): boolean { + if (node.d.expr) { + this._evaluator.verifyRaiseExceptionType(node.d.expr, /* allowNone */ false); + } + + if (node.d.fromExpr) { + this._evaluator.verifyRaiseExceptionType(node.d.fromExpr, /* allowNone */ true); + } + + return true; + } + + override visitExcept(node: ExceptNode): boolean { + if (node.d.typeExpr) { + this._evaluator.evaluateTypesForStatement(node); + + const exceptionType = this._evaluator.getType(node.d.typeExpr); + if (exceptionType) { + this._validateExceptionType(exceptionType, node.d.typeExpr, node.d.isExceptGroup); + } + } + + return true; + } + + override visitAssert(node: AssertNode) { + if (node.d.exceptionExpr) { + this._evaluator.getType(node.d.exceptionExpr); + } + + this._validateConditionalIsBool(node.d.testExpr); + + // Specifically look for a common programming error where the two arguments + // to an assert are enclosed in parens and interpreted as a two-element tuple. + // assert (x > 3, "bad value x") + const type = this._evaluator.getType(node.d.testExpr); + if (type && isClassInstance(type)) { + if (isTupleClass(type) && type.priv.tupleTypeArgs) { + if (type.priv.tupleTypeArgs.length > 0) { + if (!isUnboundedTupleClass(type)) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportAssertAlwaysTrue, + LocMessage.assertAlwaysTrue(), + node.d.testExpr + ); + } + } + } + } + + return true; + } + + override visitAssignment(node: AssignmentNode): boolean { + this._evaluator.evaluateTypesForStatement(node); + + if (node.d.annotationComment) { + this._evaluator.getType(node.d.annotationComment); + + if ( + this._fileInfo.diagnosticRuleSet.reportTypeCommentUsage !== 'none' && + PythonVersion.isGreaterOrEqualTo(this._fileInfo.executionEnvironment.pythonVersion, pythonVersion3_6) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportTypeCommentUsage, + LocMessage.typeCommentDeprecated(), + node.d.annotationComment + ); + } + } + + // If this isn't a class or global scope, explicit type aliases are not allowed. + if (node.d.leftExpr.nodeType === ParseNodeType.TypeAnnotation) { + const annotationType = this._evaluator.getTypeOfAnnotation(node.d.leftExpr.d.annotation); + + if (isClassInstance(annotationType) && ClassType.isBuiltIn(annotationType, 'TypeAlias')) { + const scope = getScopeForNode(node); + if (scope) { + if ( + scope.type !== ScopeType.Class && + scope.type !== ScopeType.Module && + scope.type !== ScopeType.Builtin + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasNotInModuleOrClass(), + node.d.leftExpr.d.annotation + ); + } + } + } + } + + return true; + } + + override visitAssignmentExpression(node: AssignmentExpressionNode): boolean { + this._evaluator.getType(node); + return true; + } + + override visitAugmentedAssignment(node: AugmentedAssignmentNode): boolean { + const typeResult = this._evaluator.getTypeResult(node); + this._reportDeprecatedUseForOperation(node.d.destExpr, typeResult); + + return true; + } + + override visitIndex(node: IndexNode): boolean { + this._evaluator.getType(node); + + // If the index is a literal integer, see if this is a tuple with + // a known length and the integer value exceeds the length. + const baseType = this._evaluator.getType(node.d.leftExpr); + if (baseType) { + doForEachSubtype(baseType, (subtype) => { + const tupleType = getSpecializedTupleType(subtype); + + if (!isClassInstance(subtype) || !tupleType?.priv.tupleTypeArgs || isUnboundedTupleClass(tupleType)) { + return; + } + + const tupleLength = tupleType.priv.tupleTypeArgs.length; + + if ( + node.d.items.length !== 1 || + node.d.trailingComma || + node.d.items[0].d.argCategory !== ArgCategory.Simple || + node.d.items[0].d.name + ) { + return; + } + + const subscriptType = this._evaluator.getType(node.d.items[0].d.valueExpr); + if ( + !subscriptType || + !isClassInstance(subscriptType) || + !ClassType.isBuiltIn(subscriptType, 'int') || + !isLiteralType(subscriptType) || + typeof subscriptType.priv.literalValue !== 'number' + ) { + return; + } + + if ( + (subscriptType.priv.literalValue < 0 || subscriptType.priv.literalValue < tupleLength) && + (subscriptType.priv.literalValue >= 0 || subscriptType.priv.literalValue + tupleLength >= 0) + ) { + return; + } + + // This can be an expensive check, so we save it for the end once we + // are about to emit a diagnostic. + if (this._evaluator.isTypeSubsumedByOtherType(tupleType, baseType, /* allowAnyToSubsume */ false)) { + return; + } + + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.tupleIndexOutOfRange().format({ + index: subscriptType.priv.literalValue, + type: this._evaluator.printType(subtype), + }), + node + ); + }); + } + + return true; + } + + override visitBinaryOperation(node: BinaryOperationNode): boolean { + if (node.d.operator === OperatorType.Equals || node.d.operator === OperatorType.NotEquals) { + // Don't apply this rule if it's within an assert. + if (!ParseTreeUtils.isWithinAssertExpression(node)) { + this._validateComparisonTypes(node); + } + } else if (node.d.operator === OperatorType.Is || node.d.operator === OperatorType.IsNot) { + // Don't apply this rule if it's within an assert. + if (!ParseTreeUtils.isWithinAssertExpression(node)) { + this._validateComparisonTypes(node); + } + } else if (node.d.operator === OperatorType.In || node.d.operator === OperatorType.NotIn) { + // Don't apply this rule if it's within an assert. + if (!ParseTreeUtils.isWithinAssertExpression(node)) { + this._validateContainmentTypes(node); + } + } + + const typeResult = this._evaluator.getTypeResult(node); + this._reportDeprecatedUseForOperation(node.d.leftExpr, typeResult); + + return true; + } + + override visitSlice(node: SliceNode): boolean { + this._evaluator.getType(node); + return true; + } + + override visitUnpack(node: UnpackNode): boolean { + this._evaluator.getType(node); + return true; + } + + override visitTuple(node: TupleNode): boolean { + this._evaluator.getType(node); + return true; + } + + override visitUnaryOperation(node: UnaryOperationNode): boolean { + if (node.d.operator === OperatorType.Not) { + this._validateConditionalIsBool(node.d.expr); + } + + const typeResult = this._evaluator.getTypeResult(node); + this._reportDeprecatedUseForOperation(node.d.expr, typeResult); + + return true; + } + + override visitTernary(node: TernaryNode): boolean { + this._evaluator.getType(node); + this._validateConditionalIsBool(node.d.testExpr); + this._reportUnnecessaryConditionExpression(node.d.testExpr); + return true; + } + + override visitStringList(node: StringListNode): boolean { + // If this is Python 3.11 or older, there are several restrictions + // associated with f-strings that we need to validate. Determine whether + // we're within an f-string (or multiple f-strings if nesting is used). + const fStringContainers: FormatStringNode[] = []; + if (PythonVersion.isLessThan(this._fileInfo.executionEnvironment.pythonVersion, pythonVersion3_12)) { + let curNode: ParseNode | undefined = node; + while (curNode) { + if (curNode.nodeType === ParseNodeType.FormatString) { + fStringContainers.push(curNode); + } + curNode = curNode.parent; + } + } + + for (const stringNode of node.d.strings) { + const stringTokens = + stringNode.nodeType === ParseNodeType.String ? [stringNode.d.token] : stringNode.d.middleTokens; + + stringTokens.forEach((token) => { + const unescapedResult = getUnescapedString(token); + let start = token.start; + if (token.type === TokenType.String) { + start += token.prefixLength + token.quoteMarkLength; + } + + unescapedResult.unescapeErrors.forEach((error: UnescapeError) => { + if (error.errorType === UnescapeErrorType.InvalidEscapeSequence) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportInvalidStringEscapeSequence, + node.d.strings.some((string) => (string.d.token.flags & StringTokenFlags.Bytes) !== 0) + ? LocMessage.bytesUnsupportedEscape() + : LocMessage.stringUnsupportedEscape(), + { start: start + error.offset, length: error.length } + ); + } + }); + + // Prior to Python 3.12, it was not allowed to include a slash in an f-string. + if (fStringContainers.length > 0) { + const escapeOffset = token.escapedValue.indexOf('\\'); + if (escapeOffset >= 0) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.formatStringEscape(), + { start, length: 1 } + ); + } + } + }); + + // Prior to Python 3.12, it was not allowed to nest strings that + // used the same quote scheme within an f-string. + if (fStringContainers.length > 0) { + const quoteTypeMask = + StringTokenFlags.SingleQuote | StringTokenFlags.DoubleQuote | StringTokenFlags.Triplicate; + if ( + fStringContainers.some( + (fStringContainer) => + (fStringContainer.d.token.flags & quoteTypeMask) === + (stringNode.d.token.flags & quoteTypeMask) + ) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.formatStringNestedQuote(), + stringNode + ); + } + } + } + + if (node.d.annotation) { + this._evaluator.getType(node); + } + + if (node.d.strings.length > 1 && !node.d.hasParens) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportImplicitStringConcatenation, + LocMessage.implicitStringConcat(), + node + ); + } + + return true; + } + + override visitFormatString(node: FormatStringNode): boolean { + node.d.fieldExprs.forEach((expr) => { + this._evaluator.getType(expr); + }); + + node.d.formatExprs.forEach((expr) => { + this._evaluator.getType(expr); + }); + + return true; + } + + override visitGlobal(node: GlobalNode): boolean { + this._suppressUnboundCheck(() => { + node.d.targets.forEach((name) => { + this._evaluator.getType(name); + + this.walk(name); + }); + }); + + return false; + } + + override visitNonlocal(node: NonlocalNode): boolean { + this._suppressUnboundCheck(() => { + node.d.targets.forEach((name) => { + this._evaluator.getType(name); + + this.walk(name); + + this._validateNonlocalTypeParam(name); + }); + }); + + return false; + } + + override visitName(node: NameNode) { + // Determine if we should log information about private usage. + this._conditionallyReportPrivateUsage(node); + + // Determine if the name is possibly unbound. + if (!this._isUnboundCheckSuppressed) { + this._reportUnboundName(node); + } + + // Report the use of a deprecated symbol. + const type = this._evaluator.getType(node); + this._reportDeprecatedUseForType(node, type); + + return true; + } + + override visitDel(node: DelNode) { + node.d.targets.forEach((expr) => { + this._evaluator.verifyDeleteExpression(expr); + + this.walk(expr); + }); + + return false; + } + + override visitMemberAccess(node: MemberAccessNode) { + const typeResult = this._evaluator.getTypeResult(node.d.member); + const type = typeResult?.type ?? UnknownType.create(); + + const leftExprType = this._evaluator.getType(node.d.leftExpr); + const moduleName = leftExprType && isModule(leftExprType) ? leftExprType.priv.moduleName : undefined; + const isImportedFromTyping = moduleName === 'typing' || moduleName === 'typing_extensions'; + this._reportDeprecatedUseForType(node.d.member, type, isImportedFromTyping); + + if (typeResult?.memberAccessDeprecationInfo) { + this._reportDeprecatedUseForMemberAccess(node.d.member, typeResult.memberAccessDeprecationInfo); + } + + this._conditionallyReportPrivateUsage(node.d.member); + + // Walk the leftExpression but not the memberName. + this.walk(node.d.leftExpr); + + return false; + } + + override visitImportAs(node: ImportAsNode): boolean { + this._evaluator.evaluateTypesForStatement(node); + + const nameParts = node.d.module.d.nameParts; + if (nameParts.length > 1 && !node.d.alias) { + this._multipartImports.push(node); + } + + return true; + } + + override visitImportFrom(node: ImportFromNode): boolean { + // Verify that any "__future__" import occurs at the top of the file. + if ( + node.d.module.d.leadingDots === 0 && + node.d.module.d.nameParts.length === 1 && + node.d.module.d.nameParts[0].d.value === '__future__' + ) { + if (!ParseTreeUtils.isValidLocationForFutureImport(node)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.futureImportLocationNotAllowed(), + node + ); + } + } + + if (!node.d.isWildcardImport) { + node.d.imports.forEach((importAs) => { + this._evaluator.evaluateTypesForStatement(importAs); + }); + } else { + this._evaluator.evaluateTypesForStatement(node); + + const importInfo = AnalyzerNodeInfo.getImportInfo(node.d.module); + if ( + importInfo && + importInfo.isImportFound && + importInfo.importType !== ImportType.Local && + !this._fileInfo.isStubFile + ) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportWildcardImportFromLibrary, + LocMessage.wildcardLibraryImport(), + node.d.wildcardToken || node + ); + } + } + + return true; + } + + override visitImportFromAs(node: ImportFromAsNode): boolean { + if (this._fileInfo.isStubFile) { + return false; + } + + const decls = this._evaluator.getDeclInfoForNameNode(node.d.name)?.decls; + if (!decls) { + return false; + } + + for (const decl of decls) { + if (!isAliasDeclaration(decl) || !decl.submoduleFallback || decl.node !== node) { + // If it is not implicitly imported module, move to next. + continue; + } + + const resolvedAlias = this._evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ true); + const resolvedAliasUri = resolvedAlias?.uri; + if (!resolvedAliasUri || !isStubFile(resolvedAliasUri)) { + continue; + } + + const importResult = this._getImportResult(node, resolvedAliasUri); + if (!importResult) { + continue; + } + + this._addMissingModuleSourceDiagnosticIfNeeded(importResult, node.d.name); + break; + } + + let isImportFromTyping = false; + if (node.parent?.nodeType === ParseNodeType.ImportFrom) { + if (node.parent.d.module.d.leadingDots === 0 && node.parent.d.module.d.nameParts.length === 1) { + const namePart = node.parent.d.module.d.nameParts[0].d.value; + if (namePart === 'typing' || namePart === 'typing_extensions') { + isImportFromTyping = true; + } + } + } + + const type = this._evaluator.getType(node.d.alias ?? node.d.name); + this._reportDeprecatedUseForType(node.d.name, type, isImportFromTyping); + + return false; + } + + override visitModuleName(node: ModuleNameNode): boolean { + if (this._fileInfo.isStubFile) { + return false; + } + + const importResult = AnalyzerNodeInfo.getImportInfo(node); + assert(importResult !== undefined); + + this._addMissingModuleSourceDiagnosticIfNeeded(importResult, node); + return false; + } + + override visitTypeParameterList(node: TypeParameterListNode): boolean { + this._typeParamLists.push(node); + return true; + } + + override visitTypeParameter(node: TypeParameterNode): boolean { + // Verify that there are no live type variables with the same + // name in outer scopes. + let curNode: ParseNode | undefined = node.parent?.parent?.parent; + let foundDuplicate = false; + + while (curNode) { + const typeVarScopeNode = ParseTreeUtils.getTypeVarScopeNode(curNode); + if (!typeVarScopeNode) { + break; + } + + if (typeVarScopeNode.nodeType === ParseNodeType.Class) { + const classType = this._evaluator.getTypeOfClass(typeVarScopeNode)?.classType; + + if (classType?.shared.typeParams.some((param) => param.shared.name === node.d.name.d.value)) { + foundDuplicate = true; + break; + } + } else if (typeVarScopeNode.nodeType === ParseNodeType.Function) { + const functionType = this._evaluator.getTypeOfFunction(typeVarScopeNode)?.functionType; + + if (functionType?.shared.typeParams.some((param) => param.shared.name === node.d.name.d.value)) { + foundDuplicate = true; + break; + } + } + + curNode = typeVarScopeNode.parent; + } + + if (foundDuplicate) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarUsedByOuterScope().format({ name: node.d.name.d.value }), + node.d.name + ); + } + + return false; + } + + override visitTypeAlias(node: TypeAliasNode): boolean { + const scope = getScopeForNode(node); + if (scope) { + if (scope.type !== ScopeType.Class && scope.type !== ScopeType.Module && scope.type !== ScopeType.Builtin) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasStatementBadScope(), + node.d.name + ); + } + } + + return true; + } + + override visitTypeAnnotation(node: TypeAnnotationNode): boolean { + this._evaluator.getType(node.d.annotation); + return true; + } + + override visitMatch(node: MatchNode): boolean { + this._evaluator.getType(node.d.expr); + this._validateExhaustiveMatch(node); + return true; + } + + override visitCase(node: CaseNode): boolean { + if (node.d.guardExpr) { + this._validateConditionalIsBool(node.d.guardExpr); + } + + this._evaluator.evaluateTypesForStatement(node.d.pattern); + return true; + } + + override visitPatternClass(node: PatternClassNode): boolean { + validateClassPattern(this._evaluator, node); + return true; + } + + override visitTry(node: TryNode): boolean { + this._reportUnusedExceptStatements(node); + return true; + } + + override visitError(node: ErrorNode) { + // Get the type of the child so it's available to + // the completion provider. + if (node.d.child) { + this._evaluator.getType(node.d.child); + } + + // Don't explore further. + return false; + } + + private _reportUnusedMultipartImports() { + this._multipartImports.forEach((node) => { + const nameParts = node.d.module.d.nameParts; + + if (this._isMultipartImportUnused(node)) { + const multipartName = nameParts.map((np) => np.d.value).join('.'); + let textRange: TextRange = { start: nameParts[0].start, length: nameParts[0].length }; + textRange = TextRange.extend(textRange, nameParts[nameParts.length - 1]); + + this._fileInfo.diagnosticSink.addUnusedCodeWithTextRange( + LocMessage.unaccessedSymbol().format({ name: multipartName }), + textRange, + { action: Commands.unusedImport } + ); + + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportUnusedImport, + LocMessage.unaccessedImport().format({ name: multipartName }), + textRange + ); + } + }); + } + + private _isMultipartImportUnused(node: ImportAsNode): boolean { + const nameParts = node.d.module.d.nameParts; + assert(nameParts.length > 1); + + // Get the top-level module type associated with this import. + let moduleType = this._evaluator.evaluateTypeForSubnode(node, () => { + this._evaluator.evaluateTypesForStatement(node); + })?.type; + + if (!moduleType || !isModule(moduleType)) { + return false; + } + + // Walk the module hierarchy to get the submodules in the + // multi-name import path until we get to the second-to-the-last + // part. + for (let i = 1; i < nameParts.length - 1; i++) { + const symbol = ModuleType.getField(moduleType, nameParts[i].d.value); + if (!symbol) { + return false; + } + + const submoduleType = symbol.getSynthesizedType(); + if (!submoduleType || !isModule(submoduleType.type)) { + return false; + } + + moduleType = submoduleType.type; + } + + // Look up the last part of the import to get its symbol ID. + const lastPartName = nameParts[nameParts.length - 1].d.value; + const symbol = ModuleType.getField(moduleType, lastPartName); + + if (!symbol) { + return false; + } + + return !this._fileInfo.accessedSymbolSet.has(symbol.id); + } + + private _getImportResult(node: ImportFromAsNode, uri: Uri) { + const execEnv = this._importResolver.getConfigOptions().findExecEnvironment(uri); + const moduleNameNode = (node.parent as ImportFromNode).d.module; + + // Handle both absolute and relative imports. + const moduleName = + moduleNameNode.d.leadingDots === 0 + ? this._importResolver.getModuleNameForImport(uri, execEnv).moduleName + : getRelativeModuleName( + this._importResolver.fileSystem, + this._fileInfo.fileUri, + uri, + this._importResolver.getConfigOptions() + ); + + if (!moduleName) { + return undefined; + } + + return this._importResolver.resolveImport( + this._fileInfo.fileUri, + execEnv, + createImportedModuleDescriptor(moduleName) + ); + } + + private _addMissingModuleSourceDiagnosticIfNeeded(importResult: ImportResult, node: ParseNode) { + if ( + importResult.isNativeLib || + !importResult.isStubFile || + importResult.importType === ImportType.BuiltIn || + !importResult.nonStubImportResult || + importResult.nonStubImportResult.isImportFound + ) { + return; + } + + // Type stub found, but source is missing. + this._evaluator.addDiagnostic( + DiagnosticRule.reportMissingModuleSource, + LocMessage.importSourceResolveFailure().format({ + importName: importResult.importName, + venv: this._fileInfo.executionEnvironment.name, + }), + node + ); + } + + private _validateConditionalIsBool(node: ExpressionNode) { + const operandType = this._evaluator.getType(node); + if (!operandType) { + return; + } + + let isTypeBool = true; + const diag = new DiagnosticAddendum(); + this._evaluator.mapSubtypesExpandTypeVars(operandType, /* options */ undefined, (expandedSubtype) => { + if (isAnyOrUnknown(expandedSubtype)) { + return undefined; + } + + // If it's a bool (the common case), we're good. + if (isClassInstance(expandedSubtype) && ClassType.isBuiltIn(expandedSubtype, 'bool')) { + return undefined; + } + + // Invoke the __bool__ method on the type. + const boolReturnType = this._evaluator.getTypeOfMagicMethodCall( + expandedSubtype, + '__bool__', + [], + node, + /* inferenceContext */ undefined + )?.type; + + if (!boolReturnType || isAnyOrUnknown(boolReturnType)) { + return undefined; + } + + if (isClassInstance(boolReturnType) && ClassType.isBuiltIn(boolReturnType, 'bool')) { + return undefined; + } + + // All other types are problematic. + isTypeBool = false; + + diag.addMessage( + LocAddendum.conditionalRequiresBool().format({ + operandType: this._evaluator.printType(expandedSubtype), + boolReturnType: this._evaluator.printType(boolReturnType), + }) + ); + + return undefined; + }); + + if (!isTypeBool) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.conditionalOperandInvalid().format({ + type: this._evaluator.printType(operandType), + }) + diag.getString(), + node + ); + } + } + + private _reportUnnecessaryConditionExpression(expression: ExpressionNode) { + if (expression.nodeType === ParseNodeType.BinaryOperation) { + if (expression.d.operator === OperatorType.And || expression.d.operator === OperatorType.Or) { + this._reportUnnecessaryConditionExpression(expression.d.leftExpr); + this._reportUnnecessaryConditionExpression(expression.d.rightExpr); + } + + return; + } else if (expression.nodeType === ParseNodeType.UnaryOperation) { + if (expression.d.operator === OperatorType.Not) { + this._reportUnnecessaryConditionExpression(expression.d.expr); + } + + return; + } + + const exprTypeResult = this._evaluator.getTypeOfExpression(expression); + let isExprFunction = true; + let isCoroutine = true; + + doForEachSubtype(exprTypeResult.type, (subtype) => { + subtype = this._evaluator.makeTopLevelTypeVarsConcrete(subtype); + + if (!isFunctionOrOverloaded(subtype)) { + isExprFunction = false; + } + + if (!isClassInstance(subtype) || !ClassType.isBuiltIn(subtype, ['Coroutine', 'CoroutineType'])) { + isCoroutine = false; + } + }); + + if (isExprFunction) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnnecessaryComparison, + LocMessage.functionInConditionalExpression(), + expression + ); + } + + if (isCoroutine) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnnecessaryComparison, + LocMessage.coroutineInConditionalExpression(), + expression + ); + } + } + + private _reportUnusedExpression(node: ParseNode) { + if (this._fileInfo.diagnosticRuleSet.reportUnusedExpression === 'none') { + return; + } + + const simpleExpressionTypes = [ + ParseNodeType.UnaryOperation, + ParseNodeType.BinaryOperation, + ParseNodeType.Number, + ParseNodeType.Constant, + ParseNodeType.Name, + ParseNodeType.Tuple, + ]; + + let reportAsUnused = false; + + if (simpleExpressionTypes.some((nodeType) => nodeType === node.nodeType)) { + reportAsUnused = true; + } else if ( + node.nodeType === ParseNodeType.List || + node.nodeType === ParseNodeType.Set || + node.nodeType === ParseNodeType.Dictionary + ) { + // Exclude comprehensions. + if (!node.d.items.some((entry) => entry.nodeType === ParseNodeType.Comprehension)) { + reportAsUnused = true; + } + } + + if ( + reportAsUnused && + this._fileInfo.ipythonMode === IPythonMode.CellDocs && + node.parent?.nodeType === ParseNodeType.StatementList && + node.parent.d.statements[node.parent.d.statements.length - 1] === node && + node.parent.parent?.nodeType === ParseNodeType.Module && + node.parent.parent.d.statements[node.parent.parent.d.statements.length - 1] === node.parent + ) { + // Exclude an expression at the end of a notebook cell, as that is treated as + // the cell's value. + reportAsUnused = false; + } + + if (reportAsUnused) { + this._evaluator.addDiagnostic(DiagnosticRule.reportUnusedExpression, LocMessage.unusedExpression(), node); + } + } + + // Verifies that the target of a nonlocal statement is not a PEP 695-style + // TypeParameter. This situation results in a runtime exception. + private _validateNonlocalTypeParam(node: NameNode) { + // Look up the symbol to see if it's a type parameter. + const symbolWithScope = this._evaluator.lookUpSymbolRecursive(node, node.d.value, /* honorCodeFlow */ false); + if (!symbolWithScope || symbolWithScope.scope.type !== ScopeType.TypeParameter) { + return; + } + + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.nonlocalTypeParam().format({ name: node.d.value }), + node + ); + } + + private _validateExhaustiveMatch(node: MatchNode) { + // This check can be expensive, so skip it if it's disabled. + if (this._fileInfo.diagnosticRuleSet.reportMatchNotExhaustive === 'none') { + return; + } + + const narrowedTypeResult = this._evaluator.evaluateTypeForSubnode(node, () => { + this._evaluator.evaluateTypesForMatchStatement(node); + }); + + if (narrowedTypeResult && !isNever(narrowedTypeResult.type)) { + const diagAddendum = new DiagnosticAddendum(); + diagAddendum.addMessage( + LocAddendum.matchIsNotExhaustiveType().format({ + type: this._evaluator.printType(narrowedTypeResult.type), + }) + ); + diagAddendum.addMessage(LocAddendum.matchIsNotExhaustiveHint()); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportMatchNotExhaustive, + LocMessage.matchIsNotExhaustive() + diagAddendum.getString(), + node.d.expr + ); + } + } + + private _suppressUnboundCheck(callback: () => void) { + const wasSuppressed = this._isUnboundCheckSuppressed; + this._isUnboundCheckSuppressed = true; + + try { + callback(); + } finally { + this._isUnboundCheckSuppressed = wasSuppressed; + } + } + + private _validateIllegalDefaultParamInitializer(node: ParseNode) { + if (this._fileInfo.diagnosticRuleSet.reportCallInDefaultInitializer !== 'none') { + if (ParseTreeUtils.isWithinDefaultParamInitializer(node) && !this._fileInfo.isStubFile) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportCallInDefaultInitializer, + LocMessage.defaultValueContainsCall(), + node + ); + } + } + } + + private _validateStandardCollectionInstantiation(node: CallNode) { + const leftType = this._evaluator.getType(node.d.leftExpr); + + if ( + leftType && + isInstantiableClass(leftType) && + ClassType.isBuiltIn(leftType) && + !leftType.priv.includeSubclasses && + leftType.priv.aliasName + ) { + const nonInstantiable = ['List', 'Set', 'Dict', 'Tuple']; + + if (nonInstantiable.some((name) => name === leftType.priv.aliasName)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.collectionAliasInstantiation().format({ + type: leftType.priv.aliasName, + alias: leftType.shared.name, + }), + node.d.leftExpr + ); + } + } + } + + private _validateContainmentTypes(node: BinaryOperationNode) { + const leftType = this._evaluator.getType(node.d.leftExpr); + const containerType = this._evaluator.getType(node.d.rightExpr); + + if (!leftType || !containerType) { + return; + } + + if (isNever(leftType) || isNever(containerType)) { + return; + } + + // Use the common narrowing logic for containment. + const elementType = getElementTypeForContainerNarrowing(containerType); + if (!elementType) { + return; + } + const narrowedType = narrowTypeForContainerElementType( + this._evaluator, + leftType, + this._evaluator.makeTopLevelTypeVarsConcrete(elementType) + ); + + if (isNever(narrowedType)) { + const getMessage = () => { + return node.d.operator === OperatorType.In + ? LocMessage.containmentAlwaysFalse() + : LocMessage.containmentAlwaysTrue(); + }; + + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnnecessaryContains, + getMessage().format({ + leftType: this._evaluator.printType(leftType, { expandTypeAlias: true }), + rightType: this._evaluator.printType(elementType, { expandTypeAlias: true }), + }), + node + ); + } + } + + // Determines whether the types of the two operands for an == or != operation + // have overlapping types. + private _validateComparisonTypes(node: BinaryOperationNode) { + let rightExpression = node.d.rightExpr; + const assumeIsOperator = node.d.operator === OperatorType.Is || node.d.operator === OperatorType.IsNot; + + // Check for chained comparisons. + if ( + rightExpression.nodeType === ParseNodeType.BinaryOperation && + !rightExpression.d.hasParens && + ParseTreeUtils.operatorSupportsChaining(rightExpression.d.operator) + ) { + // Use the left side of the right expression for comparison purposes. + rightExpression = rightExpression.d.leftExpr; + } + + let leftType = this._evaluator.getType(node.d.leftExpr); + let rightType = this._evaluator.getType(rightExpression); + + if (!leftType || !rightType) { + return; + } + + if (isNever(leftType) || isNever(rightType)) { + return; + } + + if (isModule(leftType) || isModule(rightType)) { + return; + } + + const getMessage = () => { + return node.d.operator === OperatorType.Equals || node.d.operator === OperatorType.Is + ? LocMessage.comparisonAlwaysFalse() + : LocMessage.comparisonAlwaysTrue(); + }; + + const replaceEnumTypeWithLiteralValue = (type: Type) => { + return mapSubtypes(type, (subtype) => { + if ( + !isClassInstance(subtype) || + !ClassType.isEnumClass(subtype) || + !subtype.shared.mro.some( + (base) => isClass(base) && ClassType.isBuiltIn(base, ['int', 'str', 'bytes']) + ) + ) { + return subtype; + } + + // If this is an enum literal, replace it with its literal value. + if (subtype.priv.literalValue instanceof EnumLiteral) { + return subtype.priv.literalValue.itemType; + } + + // If this is an enum class, replace it with the type of its members. + const literalValues = enumerateLiteralsForType(this._evaluator, subtype); + if (literalValues && literalValues.length > 0) { + return combineTypes( + literalValues.map((literalClass) => { + const literalValue = literalClass.priv.literalValue; + assert(literalValue instanceof EnumLiteral); + return literalValue.itemType; + }) + ); + } + + return subtype; + }); + }; + + // Handle enum literals that are assignable to another (non-Enum) literal. + // This can happen for IntEnum and StrEnum members. + leftType = replaceEnumTypeWithLiteralValue(leftType); + rightType = replaceEnumTypeWithLiteralValue(rightType); + + // Check for the special case where the LHS and RHS are both literals. + if (isLiteralTypeOrUnion(rightType) && isLiteralTypeOrUnion(leftType)) { + if ( + evaluateStaticBoolExpression( + node, + this._fileInfo.executionEnvironment, + this._fileInfo.definedConstants + ) === undefined + ) { + let isPossiblyTrue = false; + + doForEachSubtype(leftType, (leftSubtype) => { + if (this._evaluator.assignType(rightType!, leftSubtype)) { + isPossiblyTrue = true; + } + }); + + doForEachSubtype(rightType, (rightSubtype) => { + if (this._evaluator.assignType(leftType!, rightSubtype)) { + isPossiblyTrue = true; + } + }); + + if (!isPossiblyTrue) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnnecessaryComparison, + getMessage().format({ + leftType: this._evaluator.printType(leftType, { expandTypeAlias: true }), + rightType: this._evaluator.printType(rightType, { expandTypeAlias: true }), + }), + node + ); + } + } + } else { + let isComparable = false; + + this._evaluator.mapSubtypesExpandTypeVars(leftType, {}, (leftSubtype) => { + if (isComparable) { + return; + } + + this._evaluator.mapSubtypesExpandTypeVars(rightType!, {}, (rightSubtype) => { + if (isComparable) { + return; + } + + if (this._evaluator.isTypeComparable(leftSubtype, rightSubtype, assumeIsOperator)) { + isComparable = true; + } + + return rightSubtype; + }); + + return leftSubtype; + }); + + if (!isComparable) { + const leftTypeText = this._evaluator.printType(leftType, { expandTypeAlias: true }); + const rightTypeText = this._evaluator.printType(rightType, { expandTypeAlias: true }); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnnecessaryComparison, + getMessage().format({ + leftType: leftTypeText, + rightType: rightTypeText, + }), + node + ); + } + } + } + + // If the function is a generator, validates that its annotated return type + // is appropriate for a generator. + private _validateGeneratorReturnType(node: FunctionNode, functionType: FunctionType) { + if (!FunctionType.isGenerator(functionType)) { + return; + } + + const declaredReturnType = functionType.shared.declaredReturnType; + if (!declaredReturnType) { + return; + } + + if (isNever(declaredReturnType)) { + return; + } + + const functionDecl = functionType.shared.declaration; + if (!functionDecl || !functionDecl.yieldStatements || functionDecl.yieldStatements.length === 0) { + return; + } + + let generatorType: Type | undefined; + if ( + !node.d.isAsync && + isClassInstance(declaredReturnType) && + ClassType.isBuiltIn(declaredReturnType, 'AwaitableGenerator') + ) { + // Handle the old-style (pre-await) generator case + // if the return type explicitly uses AwaitableGenerator. + generatorType = + this._evaluator.getTypeCheckerInternalsType(node, 'AwaitableGenerator') ?? + this._evaluator.getTypingType(node, 'AwaitableGenerator'); + } else { + generatorType = this._evaluator.getTypingType(node, node.d.isAsync ? 'AsyncGenerator' : 'Generator'); + } + + if (!generatorType || !isInstantiableClass(generatorType)) { + return; + } + + const specializedGenerator = ClassType.cloneAsInstance( + ClassType.specialize(generatorType, [AnyType.create(), AnyType.create(), AnyType.create()]) + ); + + const diagAddendum = new DiagnosticAddendum(); + if (!this._evaluator.assignType(declaredReturnType, specializedGenerator, diagAddendum)) { + const errorMessage = node.d.isAsync + ? LocMessage.generatorAsyncReturnType() + : LocMessage.generatorSyncReturnType(); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + errorMessage.format({ yieldType: this._evaluator.printType(AnyType.create()) }) + + diagAddendum.getString(), + node.d.returnAnnotation ?? node.d.name + ); + } + } + + // Determines whether the specified type is one that should trigger + // an "unused" value diagnostic. + private _isTypeValidForUnusedValueTest(type: Type) { + return !isNoneInstance(type) && !isNever(type) && !isAnyOrUnknown(type); + } + + // Verifies that each local type variable is used more than once. + private _validateFunctionTypeVarUsage(node: FunctionNode, functionTypeResult: FunctionTypeResult) { + // Skip this check entirely if it's disabled. + if (this._fileInfo.diagnosticRuleSet.reportInvalidTypeVarUse === 'none') { + return; + } + + const type = functionTypeResult.functionType; + const localTypeVarUsage = new Map(); + const classTypeVarUsage = new Map(); + let exemptBoundTypeVar = true; + let curParamNode: ParameterNode | undefined; + + // Is this a constructor (an __init__ method) for a generic class? + let constructorClass: ClassType | undefined; + if (FunctionType.isInstanceMethod(type) && node.d.name.d.value === '__init__') { + const containingClassNode = ParseTreeUtils.getEnclosingClassOrFunction(node); + if (containingClassNode && containingClassNode.nodeType === ParseNodeType.Class) { + const classType = this._evaluator.getTypeOfClass(containingClassNode); + if (classType && isClass(classType.classType)) { + constructorClass = classType.classType; + } + } + } + + const nameWalker = new ParseTreeUtils.NameNodeWalker((nameNode, subscriptIndex, baseExpression) => { + const nameType = this._evaluator.getType(nameNode); + ``; + if (nameType && isTypeVar(nameType) && !TypeVarType.isSelf(nameType)) { + // Does this name refer to a TypeVar that is scoped to this function? + if (nameType.priv.scopeId === ParseTreeUtils.getScopeIdForNode(node)) { + // We exempt constrained TypeVars, TypeVars that are type arguments of + // other types, and ParamSpecs. There are legitimate uses for singleton + // instances in these particular cases. + let isExempt = + TypeVarType.hasConstraints(nameType) || + nameType.shared.isDefaultExplicit || + (exemptBoundTypeVar && subscriptIndex !== undefined) || + isParamSpec(nameType); + + if (!isExempt && baseExpression && subscriptIndex !== undefined) { + // Is this a type argument for a generic type alias? If so, + // exempt it from the check because the type alias may repeat + // the TypeVar multiple times. + const baseType = this._evaluator.getType(baseExpression); + const aliasInfo = baseType?.props?.typeAliasInfo; + if (aliasInfo?.shared.typeParams && subscriptIndex < aliasInfo.shared.typeParams.length) { + isExempt = true; + } + } + + const existingEntry = localTypeVarUsage.get(nameType.shared.name); + const isParamTypeWithEllipsisUsage = + curParamNode?.d.defaultValue?.nodeType === ParseNodeType.Ellipsis; + + if (!existingEntry) { + localTypeVarUsage.set(nameType.shared.name, { + nodes: [nameNode], + typeVar: nameType, + paramTypeUsageCount: curParamNode !== undefined ? 1 : 0, + paramTypeWithEllipsisUsageCount: isParamTypeWithEllipsisUsage ? 1 : 0, + returnTypeUsageCount: curParamNode === undefined ? 1 : 0, + paramWithEllipsis: isParamTypeWithEllipsisUsage ? curParamNode?.d.name?.d.value : undefined, + isExempt, + }); + } else { + existingEntry.nodes.push(nameNode); + if (curParamNode !== undefined) { + existingEntry.paramTypeUsageCount += 1; + if (isParamTypeWithEllipsisUsage) { + existingEntry.paramTypeWithEllipsisUsageCount += 1; + if (!existingEntry.paramWithEllipsis) { + existingEntry.paramWithEllipsis = curParamNode?.d.name?.d.value; + } + } + } else { + existingEntry.returnTypeUsageCount += 1; + } + } + } + + // Does this name refer to a TypeVar that is scoped to the class associated with + // this constructor method? + if (constructorClass && nameType.priv.scopeId === constructorClass.shared.typeVarScopeId) { + const existingEntry = classTypeVarUsage.get(nameType.shared.name); + const isParamTypeWithEllipsisUsage = + curParamNode?.d.defaultValue?.nodeType === ParseNodeType.Ellipsis; + const isExempt = !!nameType.shared.isDefaultExplicit; + + if (!existingEntry) { + classTypeVarUsage.set(nameType.shared.name, { + typeVar: nameType, + nodes: [nameNode], + paramTypeUsageCount: curParamNode !== undefined ? 1 : 0, + paramTypeWithEllipsisUsageCount: isParamTypeWithEllipsisUsage ? 1 : 0, + returnTypeUsageCount: 0, + paramWithEllipsis: isParamTypeWithEllipsisUsage ? curParamNode?.d.name?.d.value : undefined, + isExempt, + }); + } else { + existingEntry.nodes.push(nameNode); + if (curParamNode !== undefined) { + existingEntry.paramTypeUsageCount += 1; + if (isParamTypeWithEllipsisUsage) { + existingEntry.paramTypeWithEllipsisUsageCount += 1; + if (!existingEntry.paramWithEllipsis) { + existingEntry.paramWithEllipsis = curParamNode?.d.name?.d.value; + } + } + } + } + } + } + }); + + // Find all of the local type variables in signature. + node.d.params.forEach((param) => { + const annotation = param.d.annotation || param.d.annotationComment; + if (annotation) { + curParamNode = param; + nameWalker.walk(annotation); + } + }); + curParamNode = undefined; + + if (node.d.returnAnnotation) { + // Don't exempt the use of a bound TypeVar when used as a type argument + // within a return type. This exemption applies only to input parameter + // annotations. + exemptBoundTypeVar = false; + nameWalker.walk(node.d.returnAnnotation); + } + + if (node.d.funcAnnotationComment) { + node.d.funcAnnotationComment.d.paramAnnotations.forEach((expr) => { + nameWalker.walk(expr); + }); + + if (node.d.funcAnnotationComment.d.returnAnnotation) { + exemptBoundTypeVar = false; + nameWalker.walk(node.d.funcAnnotationComment.d.returnAnnotation); + } + } + + localTypeVarUsage.forEach((usage) => { + // Report error for local type variable that appears only once. + if (usage.nodes.length === 1 && !usage.isExempt) { + let altTypeText: string; + + if (isTypeVarTuple(usage.typeVar)) { + altTypeText = '"tuple[object, ...]"'; + } else if (usage.typeVar.shared.boundType) { + altTypeText = `"${this._evaluator.printType(convertToInstance(usage.typeVar.shared.boundType))}"`; + } else { + altTypeText = '"object"'; + } + + const diag = new DiagnosticAddendum(); + diag.addMessage( + LocAddendum.typeVarUnnecessarySuggestion().format({ + type: altTypeText, + }) + ); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportInvalidTypeVarUse, + LocMessage.typeVarUsedOnlyOnce().format({ + name: usage.nodes[0].d.value, + }) + diag.getString(), + usage.nodes[0] + ); + } + + // Report error for local type variable that appears in return type + // (but not as a top-level TypeVar within a union) and appears only + // within parameters that have default values. These may go unsolved. + let isUsedInReturnType = usage.returnTypeUsageCount > 0; + if (usage.returnTypeUsageCount === 1 && type.shared.declaredReturnType) { + // If the TypeVar appears only once in the return type and it's a top-level + // TypeVar within a union, exempt it from this check. Although these + // TypeVars may go unsolved, they can be safely eliminated from the union + // without generating an Unknown type. + const returnType = type.shared.declaredReturnType; + if ( + isUnion(returnType) && + returnType.priv.subtypes.some( + (subtype) => isTypeVar(subtype) && subtype.shared.name === usage.nodes[0].d.value + ) + ) { + isUsedInReturnType = false; + } + } + + // Skip this check if the function is overloaded because the TypeVar + // will be solved in terms of the overload signatures. + const skipUnsolvableTypeVarCheck = + isOverloaded(functionTypeResult.decoratedType) && + !FunctionType.isOverloaded(functionTypeResult.functionType); + + if ( + isUsedInReturnType && + usage.paramTypeWithEllipsisUsageCount > 0 && + usage.paramTypeUsageCount === usage.paramTypeWithEllipsisUsageCount && + !skipUnsolvableTypeVarCheck + ) { + const diag = new DiagnosticAddendum(); + diag.addMessage(LocAddendum.typeVarUnsolvableRemedy()); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportInvalidTypeVarUse, + LocMessage.typeVarPossiblyUnsolvable().format({ + name: usage.nodes[0].d.value, + param: usage.paramWithEllipsis ?? '', + }) + diag.getString(), + usage.nodes[0] + ); + } + }); + + // Report error for a class type variable that appears only within + // constructor parameters that have default values. These may go unsolved. + classTypeVarUsage.forEach((usage) => { + if ( + usage.paramTypeWithEllipsisUsageCount > 0 && + usage.paramTypeUsageCount === usage.paramTypeWithEllipsisUsageCount && + !usage.isExempt + ) { + const diag = new DiagnosticAddendum(); + diag.addMessage(LocAddendum.typeVarUnsolvableRemedy()); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportInvalidTypeVarUse, + LocMessage.typeVarPossiblyUnsolvable().format({ + name: usage.nodes[0].d.value, + param: usage.paramWithEllipsis ?? '', + }) + diag.getString(), + usage.nodes[0] + ); + } + }); + } + + // Validates that overloads use @staticmethod and @classmethod consistently. + private _validateOverloadAttributeConsistency(node: FunctionNode, functionType: OverloadedType) { + // Don't bother with the check if it's suppressed. + if (this._fileInfo.diagnosticRuleSet.reportInconsistentOverload === 'none') { + return; + } + + let staticMethodCount = 0; + let classMethodCount = 0; + + const overloads = OverloadedType.getOverloads(functionType); + if (overloads.length === 0) { + return; + } + let totalMethods = overloads.length; + + overloads.forEach((overload) => { + if (FunctionType.isStaticMethod(overload)) { + staticMethodCount++; + } + + if (FunctionType.isClassMethod(overload)) { + classMethodCount++; + } + }); + + const impl = OverloadedType.getImplementation(functionType); + if (impl && isFunction(impl)) { + totalMethods += 1; + if (FunctionType.isStaticMethod(impl)) { + staticMethodCount++; + } + + if (FunctionType.isClassMethod(impl)) { + classMethodCount++; + } + } + + if (staticMethodCount > 0 && staticMethodCount < totalMethods) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadStaticMethodInconsistent().format({ + name: node.d.name.d.value, + }), + overloads[0]?.shared.declaration?.node.d.name ?? node.d.name + ); + } + + if (classMethodCount > 0 && classMethodCount < totalMethods) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadClassMethodInconsistent().format({ + name: node.d.name.d.value, + }), + overloads[0]?.shared.declaration?.node.d.name ?? node.d.name + ); + } + } + + // Validates that overloads do not overlap with inconsistent return results. + private _validateOverloadConsistency( + node: FunctionNode, + functionType: FunctionType, + prevOverloads: FunctionType[] + ) { + // Skip the check entirely if it's disabled. + if (this._fileInfo.diagnosticRuleSet.reportOverlappingOverload === 'none') { + return; + } + + for (let i = 0; i < prevOverloads.length; i++) { + const prevOverload = prevOverloads[i]; + if (this._isOverlappingOverload(functionType, prevOverload, /* partialOverlap */ false)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportOverlappingOverload, + LocMessage.overlappingOverload().format({ + name: node.d.name.d.value, + obscured: prevOverloads.length + 1, + obscuredBy: i + 1, + }), + node.d.name + ); + break; + } + } + + for (let i = 0; i < prevOverloads.length; i++) { + const prevOverload = prevOverloads[i]; + if (this._isOverlappingOverload(prevOverload, functionType, /* partialOverlap */ true)) { + const prevReturnType = FunctionType.getEffectiveReturnType(prevOverload); + const returnType = FunctionType.getEffectiveReturnType(functionType); + + if ( + prevReturnType && + returnType && + !this._evaluator.assignType( + returnType, + prevReturnType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default + ) + ) { + const altNode = this._findNodeForOverload(node, prevOverload); + this._evaluator.addDiagnostic( + DiagnosticRule.reportOverlappingOverload, + LocMessage.overloadReturnTypeMismatch().format({ + name: node.d.name.d.value, + newIndex: prevOverloads.length + 1, + prevIndex: i + 1, + }), + (altNode || node).d.name + ); + break; + } + } + } + } + + // Mypy reports overlapping overload errors on the line that contains the + // earlier overload. Typeshed stubs contain type: ignore comments on these + // lines, so it is important for us to report them in the same manner. + private _findNodeForOverload(functionNode: FunctionNode, overloadType: FunctionType): FunctionNode | undefined { + const decls = this._evaluator.getDeclInfoForNameNode(functionNode.d.name)?.decls; + if (!decls) { + return undefined; + } + + for (const decl of decls) { + if (decl.type === DeclarationType.Function) { + const functionType = this._evaluator.getTypeOfFunction(decl.node); + if (functionType?.functionType === overloadType) { + return decl.node; + } + } + } + + return undefined; + } + + private _isOverlappingOverload(functionType: FunctionType, prevOverload: FunctionType, partialOverlap: boolean) { + // According to precedent, the __get__ method is special-cased and is + // exempt from overlapping overload checks. It's not clear why this is + // the case, but for consistency with other type checkers, we'll honor + // this rule. See https://github.com/python/typing/issues/253#issuecomment-389262904 + // for details. + if (FunctionType.isInstanceMethod(functionType) && functionType.shared.name === '__get__') { + return false; + } + + let flags = + AssignTypeFlags.SkipReturnTypeCheck | + AssignTypeFlags.OverloadOverlap | + AssignTypeFlags.DisallowExtraKwargsForTd; + if (partialOverlap) { + flags |= AssignTypeFlags.PartialOverloadOverlap; + } + + const functionNode = functionType.shared.declaration?.node; + if (functionNode) { + const liveTypeVars = ParseTreeUtils.getTypeVarScopesForNode(functionNode); + functionType = makeTypeVarsBound(functionType, liveTypeVars); + } + + // Use the parent node of the declaration in this case so we don't transform + // function-local type variables into bound type variables. + const prevOverloadNode = prevOverload.shared.declaration?.node?.parent; + if (prevOverloadNode) { + const liveTypeVars = ParseTreeUtils.getTypeVarScopesForNode(prevOverloadNode); + prevOverload = makeTypeVarsBound(prevOverload, liveTypeVars); + } + + return this._evaluator.assignType( + functionType, + prevOverload, + /* diag */ undefined, + /* constraints */ undefined, + flags + ); + } + + // Determines whether the implementation of an overload is compatible with an + // overload signature. To be compatible, the implementation must accept all + // of the same arguments as the overload and return a type that is consistent + // with the overload's return type. + private _validateOverloadImplementation( + overload: FunctionType, + implementation: FunctionType, + diag: DiagnosticAddendum | undefined + ): boolean { + const constraints = new ConstraintTracker(); + + let implBound = implementation; + let overloadBound = overload; + + const implNode = implementation.shared.declaration?.node?.parent; + if (implNode) { + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(implNode); + implBound = makeTypeVarsBound(implementation, liveScopeIds); + } + + const overloadNode = overload.shared.declaration?.node; + if (overloadNode) { + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(overloadNode); + overloadBound = makeTypeVarsBound(overload, liveScopeIds); + } + + // First check the parameters to see if they are assignable. + let isConsistent = this._evaluator.assignType( + overloadBound, + implBound, + diag, + constraints, + AssignTypeFlags.SkipReturnTypeCheck | + AssignTypeFlags.Contravariant | + AssignTypeFlags.SkipSelfClsTypeCheck | + AssignTypeFlags.DisallowExtraKwargsForTd + ); + + // Now check the return types. + const overloadReturnType = this._evaluator.solveAndApplyConstraints( + FunctionType.getEffectiveReturnType(overloadBound) ?? this._evaluator.getInferredReturnType(overloadBound), + constraints + ); + const implReturnType = this._evaluator.solveAndApplyConstraints( + FunctionType.getEffectiveReturnType(implBound) ?? this._evaluator.getInferredReturnType(implBound), + constraints + ); + + const returnDiag = new DiagnosticAddendum(); + if ( + !isNever(overloadReturnType) && + !this._evaluator.assignType( + implReturnType, + overloadReturnType, + returnDiag.createAddendum(), + constraints, + AssignTypeFlags.Default + ) + ) { + returnDiag.addMessage( + LocAddendum.functionReturnTypeMismatch().format({ + sourceType: this._evaluator.printType(overloadReturnType), + destType: this._evaluator.printType(implReturnType), + }) + ); + diag?.addAddendum(returnDiag); + isConsistent = false; + } + + return isConsistent; + } + + private _walkStatementsAndReportUnreachable(statements: StatementNode[]) { + let reportedUnreachable = false; + let prevStatement: StatementNode | undefined; + + for (const statement of statements) { + // No need to report unreachable more than once since the first time + // covers all remaining statements in the statement list. + if (!reportedUnreachable) { + const reachability = this._evaluator.getNodeReachability(statement, prevStatement); + if (reachability !== Reachability.Reachable) { + // Create a text range that covers the next statement through + // the end of the statement list. + const start = statement.start; + const lastStatement = statements[statements.length - 1]; + const end = TextRange.getEnd(lastStatement); + const textRange: TextRange = { start, length: end - start }; + + if ( + reachability === Reachability.UnreachableByAnalysis || + reachability === Reachability.UnreachableStructural + ) { + this._evaluator.addDiagnosticForTextRange( + this._fileInfo, + DiagnosticRule.reportUnreachable, + reachability === Reachability.UnreachableStructural + ? LocMessage.unreachableCodeStructure() + : LocMessage.unreachableCodeType(), + statement.nodeType === ParseNodeType.Error ? statement : statement.d.firstToken + ); + } + + this._evaluator.addUnreachableCode(statement, reachability, textRange); + + reportedUnreachable = true; + } + } + + if (!reportedUnreachable && this._fileInfo.isStubFile) { + this._validateStubStatement(statement); + } + + this.walk(statement); + + prevStatement = statement; + } + } + + private _validateStubStatement(statement: StatementNode) { + switch (statement.nodeType) { + case ParseNodeType.If: + case ParseNodeType.Function: + case ParseNodeType.Class: + case ParseNodeType.Error: { + // These are allowed in a stub file. + break; + } + + case ParseNodeType.While: + case ParseNodeType.For: + case ParseNodeType.Try: + case ParseNodeType.With: { + // These are not allowed. + this._evaluator.addDiagnostic( + DiagnosticRule.reportInvalidStubStatement, + LocMessage.invalidStubStatement(), + statement + ); + break; + } + + case ParseNodeType.StatementList: { + for (const substatement of statement.d.statements) { + let isValid = true; + + switch (substatement.nodeType) { + case ParseNodeType.Assert: + case ParseNodeType.AssignmentExpression: + case ParseNodeType.Await: + case ParseNodeType.BinaryOperation: + case ParseNodeType.Constant: + case ParseNodeType.Del: + case ParseNodeType.Dictionary: + case ParseNodeType.Index: + case ParseNodeType.For: + case ParseNodeType.FormatString: + case ParseNodeType.Global: + case ParseNodeType.Lambda: + case ParseNodeType.List: + case ParseNodeType.MemberAccess: + case ParseNodeType.Name: + case ParseNodeType.Nonlocal: + case ParseNodeType.Number: + case ParseNodeType.Raise: + case ParseNodeType.Return: + case ParseNodeType.Set: + case ParseNodeType.Slice: + case ParseNodeType.Ternary: + case ParseNodeType.Tuple: + case ParseNodeType.Try: + case ParseNodeType.UnaryOperation: + case ParseNodeType.Unpack: + case ParseNodeType.While: + case ParseNodeType.With: + case ParseNodeType.WithItem: + case ParseNodeType.Yield: + case ParseNodeType.YieldFrom: { + isValid = false; + break; + } + + case ParseNodeType.AugmentedAssignment: { + // Exempt __all__ manipulations. + isValid = + substatement.d.operator === OperatorType.AddEqual && + substatement.d.leftExpr.nodeType === ParseNodeType.Name && + substatement.d.leftExpr.d.value === '__all__'; + break; + } + + case ParseNodeType.Call: { + // Exempt __all__ manipulations. + isValid = + substatement.d.leftExpr.nodeType === ParseNodeType.MemberAccess && + substatement.d.leftExpr.d.leftExpr.nodeType === ParseNodeType.Name && + substatement.d.leftExpr.d.leftExpr.d.value === '__all__'; + break; + } + } + + if (!isValid) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInvalidStubStatement, + LocMessage.invalidStubStatement(), + substatement + ); + } + } + } + } + } + + private _validateExceptionTypeRecursive( + exceptionType: Type, + diag: DiagnosticAddendum, + baseExceptionType: Type | undefined, + baseExceptionGroupType: Type | undefined, + allowTuple: boolean, + isExceptGroup: boolean + ) { + const derivesFromBaseException = (classType: ClassType) => { + if (!baseExceptionType || !isInstantiableClass(baseExceptionType)) { + return true; + } + + return derivesFromClassRecursive(classType, baseExceptionType, /* ignoreUnknown */ false); + }; + + const derivesFromBaseExceptionGroup = (classType: ClassType) => { + if (!baseExceptionGroupType || !isInstantiableClass(baseExceptionGroupType)) { + return true; + } + + return derivesFromClassRecursive(classType, baseExceptionGroupType, /* ignoreUnknown */ false); + }; + + doForEachSubtype(exceptionType, (exceptionSubtype) => { + if (isAnyOrUnknown(exceptionSubtype)) { + return; + } + + if (isClass(exceptionSubtype)) { + if (TypeBase.isInstantiable(exceptionSubtype)) { + if (!derivesFromBaseException(exceptionSubtype)) { + diag.addMessage( + LocMessage.exceptionTypeIncorrect().format({ + type: this._evaluator.printType(exceptionSubtype), + }) + ); + } + + if (isExceptGroup && derivesFromBaseExceptionGroup(exceptionSubtype)) { + diag.addMessage(LocMessage.exceptionGroupTypeIncorrect()); + } + return; + } + + if (allowTuple && exceptionSubtype.priv.tupleTypeArgs) { + exceptionSubtype.priv.tupleTypeArgs.forEach((typeArg) => { + this._validateExceptionTypeRecursive( + typeArg.type, + diag, + baseExceptionType, + baseExceptionGroupType, + /* allowTuple */ false, + isExceptGroup + ); + }); + return; + } + + diag.addMessage( + LocMessage.exceptionTypeIncorrect().format({ + type: this._evaluator.printType(exceptionSubtype), + }) + ); + } + }); + } + + private _validateExceptionType(exceptionType: Type, errorNode: ExpressionNode, isExceptGroup: boolean): void { + const baseExceptionType = this._evaluator.getBuiltInType(errorNode, 'BaseException'); + const baseExceptionGroupType = this._evaluator.getBuiltInType(errorNode, 'BaseExceptionGroup'); + const diagAddendum = new DiagnosticAddendum(); + + this._validateExceptionTypeRecursive( + exceptionType, + diagAddendum, + baseExceptionType, + baseExceptionGroupType, + /* allowTuple */ true, + isExceptGroup + ); + + if (!diagAddendum.isEmpty()) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.exceptionTypeNotClass().format({ + type: this._evaluator.printType(exceptionType), + }), + errorNode + ); + } + } + + private _reportUnusedDunderAllSymbols(nodes: StringNode[]) { + // If this rule is disabled, don't bother doing the work. + if (this._fileInfo.diagnosticRuleSet.reportUnsupportedDunderAll === 'none') { + return; + } + + const moduleScope = AnalyzerNodeInfo.getScope(this._moduleNode); + if (!moduleScope) { + return; + } + + nodes.forEach((node) => { + if (!moduleScope.symbolTable.has(node.d.value)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnsupportedDunderAll, + LocMessage.dunderAllSymbolNotPresent().format({ name: node.d.value }), + node + ); + } + }); + } + + private _validateSymbolTables() { + const dependentFileInfo = this._dependentFiles?.map((p) => AnalyzerNodeInfo.getFileInfo(p.parseTree)); + for (const scopedNode of this._scopedNodes) { + const scope = AnalyzerNodeInfo.getScope(scopedNode); + + if (scope) { + scope.symbolTable.forEach((symbol, name) => { + this._conditionallyReportUnusedSymbol(name, symbol, scope.type, dependentFileInfo); + + this._reportIncompatibleDeclarations(name, symbol); + + this._reportOverwriteOfImportedFinal(name, symbol); + this._reportOverwriteOfBuiltinsFinal(name, symbol, scope); + this._reportMultipleFinalDeclarations(name, symbol, scope.type); + + this._reportFinalInLoop(symbol); + + this._reportMultipleTypeAliasDeclarations(name, symbol); + + this._reportInvalidOverload(name, symbol); + }); + } + } + + // Report unaccessed type parameters. + const accessedSymbolSet = this._fileInfo.accessedSymbolSet; + for (const paramList of this._typeParamLists) { + const typeParamScope = AnalyzerNodeInfo.getScope(paramList); + + for (const param of paramList.d.params) { + const symbol = typeParamScope?.symbolTable.get(param.d.name.d.value); + if (!symbol) { + // This can happen if the code is unreachable. + return; + } + + if (!accessedSymbolSet.has(symbol.id)) { + const decls = symbol.getDeclarations(); + decls.forEach((decl) => { + this._conditionallyReportUnusedDeclaration(decl, /* isPrivate */ false); + }); + } + } + } + } + + private _reportInvalidOverload(name: string, symbol: Symbol) { + const typedDecls = symbol.getTypedDeclarations(); + if (typedDecls.length === 0) { + return; + } + + const primaryDecl = typedDecls[0]; + + if (primaryDecl.type !== DeclarationType.Function) { + return; + } + + const type = this._evaluator.getEffectiveTypeOfSymbol(symbol); + const overloads = isOverloaded(type) + ? OverloadedType.getOverloads(type) + : isFunction(type) && FunctionType.isOverloaded(type) + ? [type] + : []; + + // If the implementation has no name, it was synthesized probably by a + // decorator that used a callable with a ParamSpec that captured the + // overloaded signature. We'll exempt it from this check. + if (isOverloaded(type)) { + const overloads = OverloadedType.getOverloads(type); + if (overloads.length > 0 && overloads[0].shared.name === '') { + return; + } + } else if (isFunction(type)) { + if (type.shared.name === '') { + return; + } + } + + if (overloads.length === 1) { + // There should never be a single overload. + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.singleOverload().format({ name }), + primaryDecl.node.d.name + ); + } + + // If the file is not a stub and this is the first overload, + // verify that there is an implementation. + if (this._fileInfo.isStubFile || overloads.length === 0) { + return; + } + + let implementation: Type | undefined; + + if (isOverloaded(type)) { + implementation = OverloadedType.getImplementation(type); + } else if (isFunction(type) && !FunctionType.isOverloaded(type)) { + implementation = type; + } + + if (!implementation) { + // If this is a method within a protocol class, don't require that + // there is an implementation. + const containingClassNode = ParseTreeUtils.getEnclosingClassOrFunction(primaryDecl.node); + if (containingClassNode && containingClassNode.nodeType === ParseNodeType.Class) { + const classType = this._evaluator.getTypeOfClass(containingClassNode); + if (classType) { + if (ClassType.isProtocolClass(classType.classType)) { + return; + } + + if (ClassType.supportsAbstractMethods(classType.classType)) { + if ( + isOverloaded(type) && + OverloadedType.getOverloads(type).every((overload) => + FunctionType.isAbstractMethod(overload) + ) + ) { + return; + } + } + } + } + + // If the declaration isn't associated with any of the overloads in the + // type, the overloads came from a decorator that captured the overload + // from somewhere else. + if (!overloads.find((overload) => overload.shared.declaration === primaryDecl)) { + return; + } + + this._evaluator.addDiagnostic( + DiagnosticRule.reportNoOverloadImplementation, + LocMessage.overloadWithoutImplementation().format({ + name: primaryDecl.node.d.name.d.value, + }), + primaryDecl.node.d.name + ); + + return; + } + + if (!isOverloaded(type)) { + return; + } + + if (this._fileInfo.diagnosticRuleSet.reportInconsistentOverload === 'none') { + return; + } + + // Verify that all overload signatures are assignable to implementation signature. + OverloadedType.getOverloads(type).forEach((overload, index) => { + const diag = new DiagnosticAddendum(); + if ( + implementation && + isFunction(implementation) && + !this._validateOverloadImplementation(overload, implementation, diag) + ) { + if (implementation!.shared.declaration) { + const diagnostic = this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadImplementationMismatch().format({ + name, + index: index + 1, + }) + diag.getString(), + implementation!.shared.declaration.node.d.name + ); + + if (diagnostic && overload.shared.declaration) { + diagnostic.addRelatedInfo( + LocAddendum.overloadSignature(), + overload.shared.declaration?.uri ?? primaryDecl.uri, + overload.shared.declaration?.range ?? primaryDecl.range + ); + } + } + } + }); + } + + private _reportFinalInLoop(symbol: Symbol) { + if (!this._evaluator.isFinalVariable(symbol)) { + return; + } + + const decls = symbol.getDeclarations(); + if (decls.length === 0) { + return; + } + + if (ParseTreeUtils.isWithinLoop(decls[0].node)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalInLoop(), + decls[0].node + ); + } + } + + // If a variable that is marked Final in one module is imported by another + // module, an attempt to overwrite the imported symbol should generate an + // error. + private _reportOverwriteOfImportedFinal(name: string, symbol: Symbol) { + if (this._evaluator.isFinalVariable(symbol)) { + return; + } + + const decls = symbol.getDeclarations(); + + const finalImportDecl = decls.find((decl) => { + if (decl.type === DeclarationType.Alias) { + const resolvedDecl = this._evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ true); + if (resolvedDecl && isVariableDeclaration(resolvedDecl) && resolvedDecl.isFinal) { + return true; + } + } + + return false; + }); + + if (!finalImportDecl) { + return; + } + + decls.forEach((decl) => { + if (decl !== finalImportDecl) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalReassigned().format({ name }), + getNameNodeForDeclaration(decl) ?? decl.node + ); + } + }); + } + + // If the builtins module (or any implicitly chained module) defines a + // Final variable, an attempt to overwrite it should generate an error. + private _reportOverwriteOfBuiltinsFinal(name: string, symbol: Symbol, scope: Scope) { + if (scope.type !== ScopeType.Module || !scope.parent) { + return; + } + + const shadowedSymbolInfo = scope.parent.lookUpSymbolRecursive(name); + if (!shadowedSymbolInfo) { + return; + } + + if (!this._evaluator.isFinalVariable(shadowedSymbolInfo.symbol)) { + return; + } + + const decls = symbol.getDeclarations(); + decls.forEach((decl) => { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalReassigned().format({ name }), + getNameNodeForDeclaration(decl) ?? decl.node + ); + }); + } + + // If a variable is marked Final, it should receive only one assigned value. + private _reportMultipleFinalDeclarations(name: string, symbol: Symbol, scopeType: ScopeType) { + if (!this._evaluator.isFinalVariable(symbol)) { + return; + } + + const decls = symbol.getDeclarations(); + let sawFinal = false; + let sawAssignment = false; + + decls.forEach((decl) => { + if (this._evaluator.isFinalVariableDeclaration(decl)) { + if (sawFinal) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalRedeclaration().format({ name }), + decl.node + ); + } + sawFinal = true; + } + + let reportRedeclaration = false; + + if (decl.type === DeclarationType.Variable) { + if (decl.inferredTypeSource) { + if (sawAssignment) { + let exemptAssignment = false; + + if (scopeType === ScopeType.Class) { + // We check for assignment of Final instance and class variables + // in the type evaluator because we need to take into account whether + // the assignment is within an `__init__` method, so ignore class + // scopes here. + const classOrFunc = ParseTreeUtils.getEnclosingClassOrFunction(decl.node); + if (classOrFunc?.nodeType === ParseNodeType.Function) { + exemptAssignment = true; + } + } + + if (!exemptAssignment) { + reportRedeclaration = true; + } + } + sawAssignment = true; + } + } else { + reportRedeclaration = true; + } + + if (reportRedeclaration) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalReassigned().format({ name }), + getNameNodeForDeclaration(decl) ?? decl.node + ); + } + }); + + // If it's not a stub file, an assignment must be provided. + if (!sawAssignment && !this._fileInfo.isStubFile) { + const firstDecl = decls.find((decl) => decl.type === DeclarationType.Variable && decl.isFinal); + if (firstDecl) { + // Is this an instance variable declared within a dataclass? If so, it + // is implicitly initialized by the synthesized `__init__` method and + // therefore has an implied assignment. + let isImplicitlyAssigned = false; + + // Is this a class variable within a protocol class? If so, it can + // be marked final without providing a value. + let isProtocolClass = false; + + if (symbol.isClassMember() && !symbol.isClassVar()) { + const containingClass = ParseTreeUtils.getEnclosingClass(firstDecl.node, /* stopAtFunction */ true); + + if (containingClass) { + const classType = this._evaluator.getTypeOfClass(containingClass); + if (classType && isClass(classType.decoratedType)) { + if (ClassType.isDataClass(classType.decoratedType)) { + isImplicitlyAssigned = true; + } + + if (ClassType.isProtocolClass(classType.decoratedType)) { + isProtocolClass = true; + } + } + } + } + + if (!isImplicitlyAssigned && !isProtocolClass) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalUnassigned().format({ name }), + firstDecl.node + ); + } + } + } + } + + private _reportMultipleTypeAliasDeclarations(name: string, symbol: Symbol) { + const decls = symbol.getDeclarations(); + const typeAliasDecl = decls.find((decl) => this._evaluator.isExplicitTypeAliasDeclaration(decl)); + + // If this is a type alias, there should be only one declaration. + if (typeAliasDecl && decls.length > 1) { + decls.forEach((decl) => { + if (decl !== typeAliasDecl) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportRedeclaration, + LocMessage.typeAliasRedeclared().format({ name }), + decl.node + ); + } + }); + } + } + + private _reportIncompatibleDeclarations(name: string, symbol: Symbol) { + // If there's one or more declaration with a declared type, + // all other declarations should match. The only exception is + // for functions that have an overload. + const primaryDecl = getLastTypedDeclarationForSymbol(symbol); + + // If there's no declaration with a declared type, we're done. + if (!primaryDecl) { + return; + } + + // Special case the '_' symbol, which is used in single dispatch + // code and other cases where the name does not matter. + if (name === '_') { + return; + } + + let otherDecls = symbol.getDeclarations().filter((decl) => decl !== primaryDecl); + + // If it's a function, we can skip any other declarations + // that are overloads or property setters/deleters. + if (primaryDecl.type === DeclarationType.Function) { + const primaryDeclTypeInfo = this._evaluator.getTypeOfFunction(primaryDecl.node); + + otherDecls = otherDecls.filter((decl) => { + if (decl.type !== DeclarationType.Function) { + return true; + } + + const funcTypeInfo = this._evaluator.getTypeOfFunction(decl.node); + if (!funcTypeInfo) { + return true; + } + + const decoratedType = primaryDeclTypeInfo + ? this._evaluator.makeTopLevelTypeVarsConcrete(primaryDeclTypeInfo.decoratedType) + : undefined; + + // We need to handle properties in a careful manner because of + // the way that setters and deleters are often defined using multiple + // methods with the same name. + if ( + decoratedType && + isClassInstance(decoratedType) && + ClassType.isPropertyClass(decoratedType) && + isClassInstance(funcTypeInfo.decoratedType) && + ClassType.isPropertyClass(funcTypeInfo.decoratedType) + ) { + return funcTypeInfo.decoratedType.shared.typeSourceId !== decoratedType.shared.typeSourceId; + } + + return !FunctionType.isOverloaded(funcTypeInfo.functionType); + }); + } + + // If there are no other declarations to consider, we're done. + if (otherDecls.length === 0) { + return; + } + + let primaryDeclInfo: string; + if (primaryDecl.type === DeclarationType.Function) { + if (primaryDecl.isMethod) { + primaryDeclInfo = LocAddendum.seeMethodDeclaration(); + } else { + primaryDeclInfo = LocAddendum.seeFunctionDeclaration(); + } + } else if (primaryDecl.type === DeclarationType.Class) { + primaryDeclInfo = LocAddendum.seeClassDeclaration(); + } else if (primaryDecl.type === DeclarationType.Param) { + primaryDeclInfo = LocAddendum.seeParameterDeclaration(); + } else if (primaryDecl.type === DeclarationType.Variable) { + primaryDeclInfo = LocAddendum.seeVariableDeclaration(); + } else if (primaryDecl.type === DeclarationType.TypeAlias) { + primaryDeclInfo = LocAddendum.seeTypeAliasDeclaration(); + } else { + primaryDeclInfo = LocAddendum.seeDeclaration(); + } + + const addPrimaryDeclInfo = (diag?: Diagnostic) => { + if (diag) { + let primaryDeclNode: ParseNode | undefined; + if (primaryDecl.type === DeclarationType.Function || primaryDecl.type === DeclarationType.Class) { + primaryDeclNode = primaryDecl.node.d.name; + } else if (primaryDecl.type === DeclarationType.Variable) { + if (primaryDecl.node.nodeType === ParseNodeType.Name) { + primaryDeclNode = primaryDecl.node; + } + } else if ( + primaryDecl.type === DeclarationType.Param || + primaryDecl.type === DeclarationType.TypeParam + ) { + if (primaryDecl.node.d.name) { + primaryDeclNode = primaryDecl.node.d.name; + } + } + + if (primaryDeclNode) { + diag.addRelatedInfo(primaryDeclInfo, primaryDecl.uri, primaryDecl.range); + } + } + }; + + for (const otherDecl of otherDecls) { + if (otherDecl.type === DeclarationType.Class) { + let duplicateIsOk = false; + + if (primaryDecl.type === DeclarationType.TypeParam) { + // The error will be reported elsewhere if a type parameter is + // involved, so don't report it here. + duplicateIsOk = true; + } + + if (!duplicateIsOk) { + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportRedeclaration, + LocMessage.obscuredClassDeclaration().format({ name }), + otherDecl.node.d.name + ); + addPrimaryDeclInfo(diag); + } + } else if (otherDecl.type === DeclarationType.Function) { + const primaryType = this._evaluator.getTypeForDeclaration(primaryDecl)?.type; + let duplicateIsOk = false; + + // If the return type has not yet been inferred, do so now. + if (primaryType && isFunction(primaryType)) { + this._evaluator.getInferredReturnType(primaryType); + } + + const otherType = this._evaluator.getTypeForDeclaration(otherDecl)?.type; + + const suite1 = ParseTreeUtils.getEnclosingSuite(primaryDecl.node); + const suite2 = ParseTreeUtils.getEnclosingSuite(otherDecl.node); + + // Allow same-signature overrides in cases where the declarations + // are not within the same statement suite (e.g. one in the "if" + // and another in the "else"). + const isInSameStatementList = suite1 === suite2; + + // If the return type has not yet been inferred, do so now. + if (otherType && isFunction(otherType)) { + this._evaluator.getInferredReturnType(otherType); + } + + // If both declarations are functions, it's OK if they + // both have the same signatures. + if (!isInSameStatementList && primaryType && otherType && isTypeSame(primaryType, otherType)) { + duplicateIsOk = true; + } + + if (primaryDecl.type === DeclarationType.TypeParam) { + // The error will be reported elsewhere if a type parameter is + // involved, so don't report it here. + duplicateIsOk = true; + } + + if (!duplicateIsOk) { + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportRedeclaration, + otherDecl.isMethod + ? LocMessage.obscuredMethodDeclaration().format({ name }) + : LocMessage.obscuredFunctionDeclaration().format({ name }), + otherDecl.node.d.name + ); + addPrimaryDeclInfo(diag); + } + } else if (otherDecl.type === DeclarationType.Param) { + if (otherDecl.node.d.name) { + let duplicateIsOk = false; + + if (primaryDecl.type === DeclarationType.TypeParam) { + // The error will be reported elsewhere if a type parameter is + // involved, so don't report it here. + duplicateIsOk = true; + } + + if (!duplicateIsOk) { + const message = LocMessage.obscuredParameterDeclaration(); + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportRedeclaration, + message.format({ name }), + otherDecl.node.d.name + ); + addPrimaryDeclInfo(diag); + } + } + } else if (otherDecl.type === DeclarationType.Variable) { + const primaryType = this._evaluator.getTypeForDeclaration(primaryDecl)?.type; + + if (otherDecl.typeAnnotationNode) { + if (otherDecl.node.nodeType === ParseNodeType.Name) { + let duplicateIsOk = false; + + // It's OK if they both have the same declared type. + const otherType = this._evaluator.getTypeForDeclaration(otherDecl)?.type; + if (primaryType && otherType && isTypeSame(primaryType, otherType)) { + duplicateIsOk = true; + } + + if (primaryDecl.type === DeclarationType.TypeParam) { + // The error will be reported elsewhere if a type parameter is + // involved, so don't report it here. + duplicateIsOk = true; + } + + if (!duplicateIsOk) { + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportRedeclaration, + LocMessage.obscuredVariableDeclaration().format({ name }), + otherDecl.node + ); + addPrimaryDeclInfo(diag); + } + } + } + } else if (otherDecl.type === DeclarationType.TypeAlias) { + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportRedeclaration, + LocMessage.obscuredTypeAliasDeclaration().format({ name }), + otherDecl.node.d.name + ); + addPrimaryDeclInfo(diag); + } + } + } + + private _conditionallyReportUnusedSymbol( + name: string, + symbol: Symbol, + scopeType: ScopeType, + dependentFileInfo?: AnalyzerFileInfo[] + ) { + const accessedSymbolSet = this._fileInfo.accessedSymbolSet; + if (symbol.isIgnoredForProtocolMatch() || accessedSymbolSet.has(symbol.id)) { + return; + } + + // If this file is implicitly imported by other files, we need to make sure the symbol defined in + // the current file is not accessed from those other files. + if (dependentFileInfo && dependentFileInfo.some((i) => i.accessedSymbolSet.has(symbol.id))) { + return; + } + + // A name of "_" means "I know this symbol isn't used", so + // don't report it as unused. + if (name === '_') { + return; + } + + if (SymbolNameUtils.isDunderName(name)) { + return; + } + + const decls = symbol.getDeclarations(); + decls.forEach((decl) => { + this._conditionallyReportUnusedDeclaration(decl, this._isSymbolPrivate(name, scopeType)); + }); + } + + private _conditionallyReportUnusedDeclaration(decl: Declaration, isPrivate: boolean) { + let diagnosticLevel: DiagnosticLevel; + let nameNode: NameNode | undefined; + let message: string | undefined; + let rule: DiagnosticRule | undefined; + + switch (decl.type) { + case DeclarationType.Alias: + diagnosticLevel = this._fileInfo.diagnosticRuleSet.reportUnusedImport; + rule = DiagnosticRule.reportUnusedImport; + if (decl.node.nodeType === ParseNodeType.ImportAs) { + if (decl.node.d.alias) { + // For statements of the form "import x as x", don't mark "x" as unaccessed + // because it's assumed to be re-exported. + // See https://typing.readthedocs.io/en/latest/source/stubs.html#imports. + if (decl.node.d.alias.d.value !== decl.moduleName) { + nameNode = decl.node.d.alias; + } + } else { + const nameParts = decl.node.d.module.d.nameParts; + // Multi-part imports are handled separately, so ignore those here. + if (nameParts.length === 1) { + nameNode = nameParts[0]; + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnusedImport, + LocMessage.unaccessedImport().format({ name: nameNode.d.value }), + nameNode + ); + message = LocMessage.unaccessedImport().format({ name: nameNode.d.value }); + } + } + } else if (decl.node.nodeType === ParseNodeType.ImportFromAs) { + const importFrom = decl.node.parent as ImportFromNode; + + // For statements of the form "from y import x as x", don't mark "x" as + // unaccessed because it's assumed to be re-exported. + const isReexport = decl.node.d.alias?.d.value === decl.node.d.name.d.value; + + // If this is a __future__ import, it's OK for the import symbol to be unaccessed. + const isFuture = + importFrom.d.module.d.nameParts.length === 1 && + importFrom.d.module.d.nameParts[0].d.value === '__future__'; + + if (!isReexport && !isFuture) { + nameNode = decl.node.d.alias || decl.node.d.name; + } + } + + if (nameNode) { + message = LocMessage.unaccessedImport().format({ name: nameNode.d.value }); + } + break; + + case DeclarationType.TypeAlias: + case DeclarationType.Variable: + case DeclarationType.Param: + if (!isPrivate) { + return; + } + + if (this._fileInfo.isStubFile) { + // Don't mark variables or parameters as unaccessed in + // stub files. It's typical for them to be unaccessed here. + return; + } + + diagnosticLevel = this._fileInfo.diagnosticRuleSet.reportUnusedVariable; + + if (decl.node.nodeType === ParseNodeType.Name) { + nameNode = decl.node; + + // Don't emit a diagnostic if the name starts with an underscore. + // This indicates that the variable is unused. + if (nameNode.d.value.startsWith('_')) { + diagnosticLevel = 'none'; + } + } else if (decl.node.nodeType === ParseNodeType.Parameter) { + nameNode = decl.node.d.name; + + // Don't emit a diagnostic for unused parameters or type parameters. + diagnosticLevel = 'none'; + } + + if (nameNode) { + rule = DiagnosticRule.reportUnusedVariable; + message = LocMessage.unaccessedVariable().format({ name: nameNode.d.value }); + } + break; + + case DeclarationType.Class: + if (!isPrivate) { + return; + } + + // If a stub is exporting a private type, we'll assume that the author + // knows what he or she is doing. + if (this._fileInfo.isStubFile) { + return; + } + + diagnosticLevel = this._fileInfo.diagnosticRuleSet.reportUnusedClass; + nameNode = decl.node.d.name; + rule = DiagnosticRule.reportUnusedClass; + message = LocMessage.unaccessedClass().format({ name: nameNode.d.value }); + break; + + case DeclarationType.Function: + if (!isPrivate) { + return; + } + + // If a stub is exporting a private type, we'll assume that the author + // knows what he or she is doing. + if (this._fileInfo.isStubFile) { + return; + } + + diagnosticLevel = this._fileInfo.diagnosticRuleSet.reportUnusedFunction; + nameNode = decl.node.d.name; + rule = DiagnosticRule.reportUnusedFunction; + message = LocMessage.unaccessedFunction().format({ name: nameNode.d.value }); + break; + + case DeclarationType.TypeParam: + // Never report a diagnostic for an unused TypeParam. + diagnosticLevel = 'none'; + nameNode = decl.node.d.name; + break; + + case DeclarationType.Intrinsic: + case DeclarationType.SpecialBuiltInClass: + return; + + default: + assertNever(decl); + } + + const action = rule === DiagnosticRule.reportUnusedImport ? { action: Commands.unusedImport } : undefined; + if (nameNode) { + this._fileInfo.diagnosticSink.addUnusedCodeWithTextRange( + LocMessage.unaccessedSymbol().format({ name: nameNode.d.value }), + nameNode, + action + ); + + if (rule !== undefined && message && diagnosticLevel !== 'none') { + this._evaluator.addDiagnostic(rule, message, nameNode); + } + } + } + + // Validates that a call to isinstance or issubclass are necessary. This is a + // common source of programming errors. Also validates that arguments passed + // to isinstance or issubclass won't generate exceptions. + private _validateIsInstanceCall(node: CallNode) { + if ( + node.d.leftExpr.nodeType !== ParseNodeType.Name || + (node.d.leftExpr.d.value !== 'isinstance' && node.d.leftExpr.d.value !== 'issubclass') || + node.d.args.length !== 2 + ) { + return; + } + + const callName = node.d.leftExpr.d.value; + const isInstanceCheck = callName === 'isinstance'; + + let arg0Type = this._evaluator.getType(node.d.args[0].d.valueExpr); + if (!arg0Type) { + return; + } + arg0Type = mapSubtypes(arg0Type, (subtype) => { + return transformPossibleRecursiveTypeAlias(subtype); + }); + + arg0Type = this._evaluator.expandPromotionTypes(node, arg0Type); + + const arg1Type = this._evaluator.getType(node.d.args[1].d.valueExpr); + if (!arg1Type) { + return; + } + + let isValidType = true; + const diag = new DiagnosticAddendum(); + doForEachSubtype(arg1Type, (arg1Subtype) => { + if (isClassInstance(arg1Subtype) && ClassType.isTupleClass(arg1Subtype) && arg1Subtype.priv.tupleTypeArgs) { + if ( + arg1Subtype.priv.tupleTypeArgs.some( + (typeArg) => !this._isTypeSupportedTypeForIsInstance(typeArg.type, isInstanceCheck, diag) + ) + ) { + isValidType = false; + } + } else { + if (!this._isTypeSupportedTypeForIsInstance(arg1Subtype, isInstanceCheck, diag)) { + isValidType = false; + } + } + }); + + if (!isValidType) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + isInstanceCheck + ? LocMessage.isInstanceInvalidType().format({ + type: this._evaluator.printType(arg1Type), + }) + diag.getString() + : LocMessage.isSubclassInvalidType().format({ + type: this._evaluator.printType(arg1Type), + }) + diag.getString(), + node.d.args[1] + ); + } + + // If this call is an issubclass, check for the use of a "data protocol", + // which PEP 544 says cannot be used in issubclass. + if (!isInstanceCheck) { + const diag = new DiagnosticAddendum(); + + doForEachSubtype(arg1Type, (arg1Subtype) => { + if ( + isClassInstance(arg1Subtype) && + ClassType.isTupleClass(arg1Subtype) && + arg1Subtype.priv.tupleTypeArgs + ) { + arg1Subtype.priv.tupleTypeArgs.forEach((typeArg) => { + this._validateNotDataProtocol(typeArg.type, diag); + }); + } else { + this._validateNotDataProtocol(arg1Subtype, diag); + } + }); + + if (!diag.isEmpty()) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataProtocolInSubclassCheck(), + node.d.args[1] + ); + } + } + + // If this call is within an assert statement, we won't check whether + // it's unnecessary. + if (ParseTreeUtils.isWithinAssertExpression(node)) { + return; + } + + const classTypeList = getIsInstanceClassTypes(this._evaluator, arg1Type); + if (!classTypeList) { + return; + } + + // Check for unsafe protocol overlaps. + classTypeList.forEach((filterType) => { + if (isInstantiableClass(filterType)) { + this._validateUnsafeProtocolOverlap( + node.d.args[0].d.valueExpr, + ClassType.cloneAsInstance(filterType), + isInstanceCheck ? arg0Type : convertToInstance(arg0Type) + ); + } + }); + + // Check for unnecessary isinstance or issubclass calls. + if (this._fileInfo.diagnosticRuleSet.reportUnnecessaryIsInstance !== 'none') { + const narrowedTypeNegative = narrowTypeForInstanceOrSubclass( + this._evaluator, + arg0Type, + classTypeList, + isInstanceCheck, + /* isTypeIsCheck */ false, + /* isPositiveTest */ false, + node + ); + + const narrowedTypePositive = narrowTypeForInstanceOrSubclass( + this._evaluator, + arg0Type, + classTypeList, + isInstanceCheck, + /* isTypeIsCheck */ false, + /* isPositiveTest */ true, + node + ); + + const isAlwaysTrue = isNever(narrowedTypeNegative); + const isNeverTrue = isNever(narrowedTypePositive); + + if (isAlwaysTrue || isNeverTrue) { + const classType = combineTypes(classTypeList.map((t) => convertToInstance(t))); + const messageTemplate = isAlwaysTrue + ? isInstanceCheck + ? LocMessage.unnecessaryIsInstanceAlways() + : LocMessage.unnecessaryIsSubclassAlways() + : isInstanceCheck + ? LocMessage.unnecessaryIsInstanceNever() + : LocMessage.unnecessaryIsSubclassNever(); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnnecessaryIsInstance, + messageTemplate.format({ + testType: this._evaluator.printType(arg0Type), + classType: this._evaluator.printType(classType), + }), + node + ); + } + } + } + + private _validateUnsafeProtocolOverlap(errorNode: ExpressionNode, protocol: ClassType, testType: Type) { + // If this is a protocol class, check for an "unsafe overlap" + // with the arg0 type. + if (ClassType.isProtocolClass(protocol)) { + let isUnsafeOverlap = false; + const diag = new DiagnosticAddendum(); + + doForEachSubtype(testType, (testSubtype) => { + if (isClassInstance(testSubtype)) { + if (isProtocolUnsafeOverlap(this._evaluator, protocol, testSubtype)) { + isUnsafeOverlap = true; + diag.addMessage( + LocAddendum.protocolUnsafeOverlap().format({ + name: testSubtype.shared.name, + }) + ); + } + } + }); + + if (isUnsafeOverlap) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.protocolUnsafeOverlap().format({ + name: protocol.shared.name, + }) + diag.getString(), + errorNode + ); + } + } + } + + // Determines whether the specified type is allowed as the second argument + // to an isinstance or issubclass check. + private _isTypeSupportedTypeForIsInstance(type: Type, isInstanceCheck: boolean, diag: DiagnosticAddendum) { + let isSupported = true; + + doForEachSubtype(type, (subtype) => { + subtype = this._evaluator.makeTopLevelTypeVarsConcrete(subtype); + subtype = transformPossibleRecursiveTypeAlias(subtype); + + if (subtype.props?.specialForm && ClassType.isBuiltIn(subtype.props.specialForm, 'TypeAliasType')) { + diag.addMessage(LocAddendum.typeAliasInstanceCheck()); + isSupported = false; + return; + } + + switch (subtype.category) { + case TypeCategory.Any: + case TypeCategory.Unknown: + case TypeCategory.Unbound: + break; + + case TypeCategory.Class: + if (ClassType.isBuiltIn(subtype, 'TypedDict')) { + diag.addMessage(LocAddendum.typedDictNotAllowed()); + isSupported = false; + } else if (ClassType.isBuiltIn(subtype, 'NamedTuple')) { + diag.addMessage(LocAddendum.namedTupleNotAllowed()); + isSupported = false; + } else if (isNoneInstance(subtype)) { + diag.addMessage(LocAddendum.noneNotAllowed()); + isSupported = false; + } else if (ClassType.isTypedDictClass(subtype)) { + diag.addMessage(LocAddendum.typedDictClassNotAllowed()); + isSupported = false; + } else if (subtype.priv.isTypeArgExplicit && !subtype.priv.includeSubclasses) { + // If it's a class, make sure that it has not been given explicit + // type arguments. This will result in a TypeError exception. + diag.addMessage(LocAddendum.genericClassNotAllowed()); + isSupported = false; + } else if (ClassType.isIllegalIsinstanceClass(subtype)) { + diag.addMessage( + LocAddendum.isinstanceClassNotSupported().format({ type: subtype.shared.name }) + ); + isSupported = false; + } else if ( + ClassType.isProtocolClass(subtype) && + !ClassType.isRuntimeCheckable(subtype) && + !subtype.priv.includeSubclasses + ) { + // According to PEP 544, protocol classes cannot be used as the right-hand + // argument to isinstance or issubclass unless they are annotated as + // "runtime checkable". + diag.addMessage(LocAddendum.protocolRequiresRuntimeCheckable()); + isSupported = false; + } else if (ClassType.isNewTypeClass(subtype)) { + diag.addMessage(LocAddendum.newTypeClassNotAllowed()); + isSupported = false; + } else if ( + subtype.props?.specialForm && + isClassInstance(subtype.props.specialForm) && + ClassType.isBuiltIn(subtype.props.specialForm, 'Annotated') + ) { + diag.addMessage(LocAddendum.annotatedNotAllowed()); + isSupported = false; + } else if ( + subtype.props?.specialForm && + isInstantiableClass(subtype.props.specialForm) && + ClassType.isBuiltIn(subtype.props.specialForm, 'Literal') + ) { + diag.addMessage(LocAddendum.literalNotAllowed()); + isSupported = false; + } + break; + + case TypeCategory.Function: + if (!TypeBase.isInstantiable(subtype) || subtype.priv.isCallableWithTypeArgs) { + diag.addMessage(LocAddendum.genericClassNotAllowed()); + isSupported = false; + } + break; + + case TypeCategory.TypeVar: + diag.addMessage(LocAddendum.typeVarNotAllowed()); + isSupported = false; + break; + } + }); + + return isSupported; + } + + private _validateNotDataProtocol(type: Type, diag: DiagnosticAddendum) { + if (isInstantiableClass(type) && ClassType.isProtocolClass(type) && !isMethodOnlyProtocol(type)) { + diag.addMessage( + LocAddendum.dataProtocolUnsupported().format({ + name: type.shared.name, + }) + ); + } + } + + private _isSymbolPrivate(nameValue: string, scopeType: ScopeType) { + // All variables within the scope of a function or a list + // comprehension are considered private. + if (scopeType === ScopeType.Function || scopeType === ScopeType.Comprehension) { + return true; + } + + // See if the symbol is private. + if (SymbolNameUtils.isPrivateName(nameValue)) { + return true; + } + + if (SymbolNameUtils.isProtectedName(nameValue)) { + // Protected names outside of a class scope are considered private. + const isClassScope = scopeType === ScopeType.Class; + return !isClassScope; + } + + return false; + } + + private _reportDeprecatedClassProperty(node: FunctionNode, functionTypeResult: FunctionTypeResult) { + if ( + !isClassInstance(functionTypeResult.decoratedType) || + !ClassType.isClassProperty(functionTypeResult.decoratedType) + ) { + return; + } + + this._reportDeprecatedDiagnostic(node.d.name, LocMessage.classPropertyDeprecated()); + } + + private _reportDeprecatedUseForMemberAccess(node: NameNode, info: MemberAccessDeprecationInfo) { + let errorMessage: string | undefined; + + if (info.accessType === 'property') { + if (info.accessMethod === 'get') { + errorMessage = LocMessage.deprecatedPropertyGetter().format({ name: node.d.value }); + } else if (info.accessMethod === 'set') { + errorMessage = LocMessage.deprecatedPropertySetter().format({ name: node.d.value }); + } else { + errorMessage = LocMessage.deprecatedPropertyDeleter().format({ name: node.d.value }); + } + } else if (info.accessType === 'descriptor') { + if (info.accessMethod === 'get') { + errorMessage = LocMessage.deprecatedDescriptorGetter().format({ name: node.d.value }); + } else if (info.accessMethod === 'set') { + errorMessage = LocMessage.deprecatedDescriptorSetter().format({ name: node.d.value }); + } else { + errorMessage = LocMessage.deprecatedDescriptorDeleter().format({ name: node.d.value }); + } + } + + if (errorMessage) { + this._reportDeprecatedDiagnostic(node, errorMessage, info.deprecatedMessage); + } + } + + private _reportDeprecatedUseForOperation(node: ExpressionNode, typeResult: TypeResult | undefined) { + const deprecationInfo = typeResult?.magicMethodDeprecationInfo; + if (!deprecationInfo) { + return; + } + + this._reportDeprecatedDiagnostic( + node, + LocMessage.deprecatedMethod().format({ + className: deprecationInfo.className, + name: deprecationInfo.methodName, + }), + deprecationInfo.deprecatedMessage + ); + } + + private _reportDeprecatedUseForType(node: NameNode, type: Type | undefined, isImportFromTyping = false) { + if (!type) { + return; + } + + let errorMessage: string | undefined; + let deprecatedMessage: string | undefined; + + function getDeprecatedMessageForFunction(functionType: FunctionType): string { + if ( + functionType.shared.declaration && + functionType.shared.declaration.node.nodeType === ParseNodeType.Function + ) { + const containingClass = ParseTreeUtils.getEnclosingClass( + functionType.shared.declaration.node, + /* stopAtFunction */ true + ); + + if (containingClass) { + return LocMessage.deprecatedMethod().format({ + name: functionType.shared.name || '', + className: containingClass.d.name.d.value, + }); + } + } + + return LocMessage.deprecatedFunction().format({ + name: functionType.shared.name, + }); + } + + function getDeprecatedMessageForOverloadedCall(evaluator: TypeEvaluator, type: Type) { + // Determine if the node is part of a call expression. If so, + // we can determine which overload(s) were used to satisfy + // the call expression and determine whether any of them + // are deprecated. + let callTypeResult: TypeResult | undefined; + + const callNode = ParseTreeUtils.getCallForName(node); + if (callNode) { + callTypeResult = evaluator.getTypeResult(callNode); + } else { + const decoratorNode = ParseTreeUtils.getDecoratorForName(node); + if (decoratorNode) { + callTypeResult = evaluator.getTypeResultForDecorator(decoratorNode); + } + } + + if ( + callTypeResult && + callTypeResult.overloadsUsedForCall && + callTypeResult.overloadsUsedForCall.length > 0 + ) { + callTypeResult.overloadsUsedForCall.forEach((overload) => { + if (overload.shared.deprecatedMessage !== undefined) { + if (node.d.value === overload.shared.name) { + deprecatedMessage = overload.shared.deprecatedMessage; + errorMessage = getDeprecatedMessageForFunction(overload); + } else if ( + isInstantiableClass(type) && + ['__init__', '__new__'].includes(overload.shared.name) + ) { + deprecatedMessage = overload.shared.deprecatedMessage; + errorMessage = LocMessage.deprecatedConstructor().format({ + name: type.shared.name, + }); + } else if (isClassInstance(type) && overload.shared.name === '__call__') { + deprecatedMessage = overload.shared.deprecatedMessage; + errorMessage = LocMessage.deprecatedFunction().format({ + name: node.d.value, + }); + } + } + }); + } + } + + doForEachSubtype(type, (subtype) => { + if (isClass(subtype)) { + if ( + !subtype.priv.includeSubclasses && + subtype.shared.deprecatedMessage !== undefined && + node.d.value === subtype.shared.name + ) { + deprecatedMessage = subtype.shared.deprecatedMessage; + errorMessage = LocMessage.deprecatedClass().format({ name: subtype.shared.name }); + return; + } + + getDeprecatedMessageForOverloadedCall(this._evaluator, subtype); + return; + } + + if (isFunction(subtype)) { + if (subtype.shared.deprecatedMessage !== undefined) { + if ( + !subtype.shared.name || + subtype.shared.name === '__call__' || + node.d.value === subtype.shared.name + ) { + deprecatedMessage = subtype.shared.deprecatedMessage; + errorMessage = getDeprecatedMessageForFunction(subtype); + } + } + } else if (isOverloaded(subtype)) { + // Determine if the node is part of a call expression. If so, + // we can determine which overload(s) were used to satisfy + // the call expression and determine whether any of them + // are deprecated. + getDeprecatedMessageForOverloadedCall(this._evaluator, subtype); + + // If there the implementation itself is deprecated, assume it + // is deprecated even if it's outside of a call expression. + const impl = OverloadedType.getImplementation(subtype); + if (impl && isFunction(impl) && impl.shared.deprecatedMessage !== undefined) { + if (!impl.shared.name || node.d.value === impl.shared.name) { + deprecatedMessage = impl.shared.deprecatedMessage; + errorMessage = getDeprecatedMessageForFunction(impl); + } + } + } + }); + + if (errorMessage) { + this._reportDeprecatedDiagnostic(node, errorMessage, deprecatedMessage); + } + + if (this._fileInfo.diagnosticRuleSet.deprecateTypingAliases) { + const deprecatedForm = deprecatedAliases.get(node.d.value) ?? deprecatedSpecialForms.get(node.d.value); + + if (deprecatedForm) { + if ( + (isInstantiableClass(type) && type.shared.fullName === deprecatedForm.fullName) || + type.props?.typeAliasInfo?.shared.fullName === deprecatedForm.fullName + ) { + if ( + PythonVersion.isGreaterOrEqualTo( + this._fileInfo.executionEnvironment.pythonVersion, + deprecatedForm.version + ) + ) { + if (!deprecatedForm.typingImportOnly || isImportFromTyping) { + this._reportDeprecatedDiagnostic( + node, + LocMessage.deprecatedType().format({ + version: PythonVersion.toString(deprecatedForm.version), + replacement: deprecatedForm.replacementText, + }) + ); + } + } + } + } + } + } + + private _reportDeprecatedDiagnostic(node: ParseNode, diagnosticMessage: string, deprecatedMessage?: string) { + const diag = new DiagnosticAddendum(); + if (deprecatedMessage) { + diag.addMessage(deprecatedMessage); + } + + if (this._fileInfo.diagnosticRuleSet.reportDeprecated === 'none') { + this._evaluator.addDeprecated(diagnosticMessage + diag.getString(), node); + } else { + this._evaluator.addDiagnostic(DiagnosticRule.reportDeprecated, diagnosticMessage + diag.getString(), node); + } + } + + private _reportUnboundName(node: NameNode) { + if (this._fileInfo.diagnosticRuleSet.reportUnboundVariable === 'none') { + return; + } + + // Skip this for keyword argument names. + if (node.parent?.nodeType === ParseNodeType.Argument && node.parent.d.name === node) { + return; + } + + if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { + const type = this._evaluator.getType(node); + + if (type) { + if (isUnbound(type)) { + if (this._evaluator.isNodeReachable(node)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnboundVariable, + LocMessage.symbolIsUnbound().format({ name: node.d.value }), + node + ); + } + } else if (isPossiblyUnbound(type)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportPossiblyUnboundVariable, + LocMessage.symbolIsPossiblyUnbound().format({ name: node.d.value }), + node + ); + } + } + } + } + + private _conditionallyReportPrivateUsage(node: NameNode) { + if (this._fileInfo.diagnosticRuleSet.reportPrivateUsage === 'none') { + return; + } + + // Ignore privates in type stubs. + if (this._fileInfo.isStubFile) { + return; + } + + // Ignore privates in named arguments. + if (node.parent?.nodeType === ParseNodeType.Argument && node.parent.d.name === node) { + return; + } + + const nameValue = node.d.value; + const isPrivateName = SymbolNameUtils.isPrivateName(nameValue); + const isProtectedName = SymbolNameUtils.isProtectedName(nameValue); + + // If it's not a protected or private name, don't bother with + // any further checks. + if (!isPrivateName && !isProtectedName) { + return; + } + + // Get the declarations for this name node, but filter out + // any variable declarations that are bound using nonlocal + // or global explicit bindings. + const declarations = this._evaluator + .getDeclInfoForNameNode(node) + ?.decls?.filter((decl) => decl.type !== DeclarationType.Variable || !decl.isExplicitBinding); + + let primaryDeclaration = + declarations && declarations.length > 0 ? declarations[declarations.length - 1] : undefined; + if (!primaryDeclaration || primaryDeclaration.node === node) { + return; + } + + if (primaryDeclaration.type === DeclarationType.Alias) { + // If this symbol is an import alias (i.e. it's a local name rather than the + // original imported name), skip the private check. + if (primaryDeclaration.usesLocalName) { + return; + } + + const resolvedAliasInfo = this._evaluator.resolveAliasDeclarationWithInfo( + primaryDeclaration, + /* resolveLocalNames */ true + ); + + if (!resolvedAliasInfo) { + return; + } + + primaryDeclaration = resolvedAliasInfo.declaration; + + // If the alias resolved to a stub file or py.typed source file + // and the declaration is marked "externally visible", it is + // exempt from private usage checks. + if (!resolvedAliasInfo.isPrivate) { + return; + } + } + + if (!primaryDeclaration || primaryDeclaration.node === node) { + return; + } + + let classNode: ClassNode | undefined; + if (primaryDeclaration.node) { + classNode = ParseTreeUtils.getEnclosingClass(primaryDeclaration.node); + } + + // If this is the name of a class, find the class that contains it rather + // than constraining the use of the class name within the class itself. + if (primaryDeclaration.node && primaryDeclaration.node.parent && primaryDeclaration.node.parent === classNode) { + classNode = ParseTreeUtils.getEnclosingClass(classNode); + } + + // If it's a class member, check whether it's a legal protected access. + let isProtectedAccess = false; + if (classNode) { + if (isProtectedName) { + const declClassTypeInfo = this._evaluator.getTypeOfClass(classNode); + if (declClassTypeInfo && isInstantiableClass(declClassTypeInfo.decoratedType)) { + // If it's a member defined in a stub file, we'll assume that it's part + // of the public contract even if it's named as though it's private. + if (ClassType.isDefinedInStub(declClassTypeInfo.decoratedType)) { + return; + } + + // Note that the access is to a protected class member. + isProtectedAccess = true; + + const enclosingClassNode = ParseTreeUtils.getEnclosingClass(node); + if (enclosingClassNode) { + const enclosingClassTypeInfo = this._evaluator.getTypeOfClass(enclosingClassNode); + + // If the referencing class is a subclass of the declaring class, it's + // allowed to access a protected name. + if (enclosingClassTypeInfo && isInstantiableClass(enclosingClassTypeInfo.decoratedType)) { + if ( + derivesFromClassRecursive( + enclosingClassTypeInfo.decoratedType, + declClassTypeInfo.decoratedType, + /* ignoreUnknown */ true + ) + ) { + return; + } + } + } + } + } + } + + if (classNode && !ParseTreeUtils.isNodeContainedWithin(node, classNode)) { + if (isProtectedAccess) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportPrivateUsage, + LocMessage.protectedUsedOutsideOfClass().format({ name: nameValue }), + node + ); + } else { + this._evaluator.addDiagnostic( + DiagnosticRule.reportPrivateUsage, + LocMessage.privateUsedOutsideOfClass().format({ name: nameValue }), + node + ); + } + } + } + + // Validates that an enum class does not attempt to override another + // enum class that has already defined values. + private _validateEnumClassOverride(node: ClassNode, classType: ClassType) { + classType.shared.baseClasses.forEach((baseClass, index) => { + if (isClass(baseClass) && isEnumClassWithMembers(this._evaluator, baseClass)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.enumClassOverride().format({ name: baseClass.shared.name }), + node.d.arguments[index] + ); + } + }); + } + + // Verifies the rules specified in PEP 589 about TypedDict classes. + // They cannot have statements other than type annotations, doc + // strings, and "pass" statements or ellipses. + private _validateTypedDictClassSuite(suiteNode: SuiteNode) { + const emitBadStatementError = (node: ParseNode) => { + this._evaluator.addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.typedDictBadVar(), node); + }; + + suiteNode.d.statements.forEach((statement) => { + if (!AnalyzerNodeInfo.isCodeUnreachable(statement)) { + if (statement.nodeType === ParseNodeType.StatementList) { + for (const substatement of statement.d.statements) { + if ( + substatement.nodeType !== ParseNodeType.TypeAnnotation && + substatement.nodeType !== ParseNodeType.Ellipsis && + substatement.nodeType !== ParseNodeType.StringList && + substatement.nodeType !== ParseNodeType.Pass + ) { + emitBadStatementError(substatement); + } + } + } else { + emitBadStatementError(statement); + } + } + }); + } + + private _validateTypeGuardFunction(node: FunctionNode, functionType: FunctionType, isMethod: boolean) { + const returnType = functionType.shared.declaredReturnType; + if (!returnType) { + return; + } + + if (!isClassInstance(returnType) || !returnType.priv.typeArgs || returnType.priv.typeArgs.length < 1) { + return; + } + + const isTypeGuard = ClassType.isBuiltIn(returnType, 'TypeGuard'); + const isTypeIs = ClassType.isBuiltIn(returnType, 'TypeIs'); + + if (!isTypeGuard && !isTypeIs) { + return; + } + + // Make sure there's at least one input parameter provided. + let paramCount = functionType.shared.parameters.length; + if (isMethod) { + if ( + FunctionType.isInstanceMethod(functionType) || + FunctionType.isConstructorMethod(functionType) || + FunctionType.isClassMethod(functionType) + ) { + paramCount--; + } + } + + if (paramCount < 1) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeGuardParamCount(), + node.d.name + ); + } + + if (isTypeIs) { + const scopeIds = getTypeVarScopeIds(functionType); + const narrowedType = returnType.priv.typeArgs[0]; + let typeGuardType = makeTypeVarsBound(narrowedType, scopeIds); + typeGuardType = TypeBase.cloneWithTypeForm(typeGuardType, typeGuardType); + + // Determine the type of the first parameter. + const paramIndex = isMethod && !FunctionType.isStaticMethod(functionType) ? 1 : 0; + if (paramIndex >= functionType.shared.parameters.length) { + return; + } + + const paramType = makeTypeVarsBound(FunctionType.getParamType(functionType, paramIndex), scopeIds); + + // Verify that the typeGuardType is a narrower type than the paramType. + if (!this._evaluator.assignType(paramType, typeGuardType)) { + const returnAnnotation = node.d.returnAnnotation || node.d.funcAnnotationComment?.d.returnAnnotation; + if (returnAnnotation) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeIsReturnType().format({ + type: this._evaluator.printType(paramType), + returnType: this._evaluator.printType(narrowedType), + }), + returnAnnotation + ); + } + } + } + } + + private _validateDunderSignatures(node: FunctionNode, functionType: FunctionType, isMethod: boolean) { + const functionName = functionType.shared.name; + + // Is this an '__init__' method? Verify that it returns None. + if (isMethod && functionName === '__init__') { + const returnAnnotation = node.d.returnAnnotation || node.d.funcAnnotationComment?.d.returnAnnotation; + const declaredReturnType = functionType.shared.declaredReturnType; + + if (returnAnnotation && declaredReturnType) { + if (!isNoneInstance(declaredReturnType) && !isNever(declaredReturnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.initMustReturnNone(), + returnAnnotation + ); + } + } else { + const inferredReturnType = this._evaluator.getInferredReturnType(functionType); + if ( + !isNever(inferredReturnType) && + !isNoneInstance(inferredReturnType) && + !isAnyOrUnknown(inferredReturnType) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.initMustReturnNone(), + node.d.name + ); + } + } + } + } + + private _validateFunctionReturn(node: FunctionNode, functionType: FunctionType) { + // Stub files are allowed not to return an actual value, + // so skip this if it's a stub file. + if (this._fileInfo.isStubFile) { + return; + } + + const returnAnnotation = node.d.returnAnnotation || node.d.funcAnnotationComment?.d.returnAnnotation; + if (returnAnnotation) { + const functionNeverReturns = !this._evaluator.isAfterNodeReachable(node); + const implicitlyReturnsNone = this._evaluator.isAfterNodeReachable(node.d.suite); + + let declaredReturnType = functionType.shared.declaredReturnType; + + if (declaredReturnType) { + this._reportUnknownReturnResult(node, declaredReturnType); + this._validateReturnTypeIsNotContravariant(declaredReturnType, returnAnnotation); + + const liveScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + declaredReturnType = makeTypeVarsBound(declaredReturnType, liveScopes); + } + + // Wrap the declared type in a generator type if the function is a generator. + if (FunctionType.isGenerator(functionType)) { + declaredReturnType = getDeclaredGeneratorReturnType(functionType); + } + + // The types of all return statement expressions were already checked + // against the declared type, but we need to verify the implicit None + // at the end of the function. + if (declaredReturnType && !functionNeverReturns && implicitlyReturnsNone) { + if (isNever(declaredReturnType)) { + // If the function consists entirely of "...", assume that it's + // an abstract method or a protocol method and don't require that + // the return type matches. This check can also be skipped for an overload. + if (!ParseTreeUtils.isSuiteEmpty(node.d.suite) && !FunctionType.isOverloaded(functionType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportReturnType, + LocMessage.noReturnReturnsNone(), + returnAnnotation + ); + } + } else if (!FunctionType.isAbstractMethod(functionType)) { + // If the function consists entirely of "...", assume that it's + // an abstract method or a protocol method and don't require that + // the return type matches. This check can also be skipped for an overload. + const isEmptySuite = + ParseTreeUtils.isSuiteEmpty(node.d.suite) || FunctionType.isOverloaded(functionType); + + // Make sure that the function doesn't implicitly return None if the declared + // type doesn't allow it. Skip this check for abstract methods. + const diagAddendum = isEmptySuite ? undefined : new DiagnosticAddendum(); + + // If the declared type isn't compatible with 'None', flag an error. + if (!this._evaluator.assignType(declaredReturnType, this._evaluator.getNoneType(), diagAddendum)) { + if (!isEmptySuite) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportReturnType, + LocMessage.returnMissing().format({ + returnType: this._evaluator.printType(declaredReturnType), + }) + diagAddendum?.getString(), + returnAnnotation + ); + } + } + } + } + } else { + const inferredReturnType = this._evaluator.getInferredReturnType(functionType); + this._reportUnknownReturnResult(node, inferredReturnType); + this._validateReturnTypeIsNotContravariant(inferredReturnType, node.d.name); + } + } + + private _validateReturnTypeIsNotContravariant(returnType: Type, errorNode: ExpressionNode) { + let isContraTypeVar = false; + + doForEachSubtype(returnType, (subtype) => { + if ( + isTypeVar(subtype) && + subtype.shared.declaredVariance === Variance.Contravariant && + subtype.priv.scopeType === TypeVarScopeType.Class + ) { + isContraTypeVar = true; + } + }); + + if (isContraTypeVar) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.returnTypeContravariant(), + errorNode + ); + } + } + + private _reportUnknownReturnResult(node: FunctionNode, returnType: Type) { + if (isUnknown(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownParameterType, + LocMessage.returnTypeUnknown(), + node.d.name + ); + } else if (isPartlyUnknown(returnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnknownParameterType, + LocMessage.returnTypePartiallyUnknown().format({ + returnType: this._evaluator.printType(returnType, { expandTypeAlias: true }), + }), + node.d.name + ); + } + } + + // Validates that any overridden member variables are not marked + // as Final in parent classes. + private _validateFinalMemberOverrides(classType: ClassType) { + ClassType.getSymbolTable(classType).forEach((localSymbol, name) => { + const parentSymbol = lookUpClassMember(classType, name, MemberAccessFlags.SkipOriginalClass); + + if (parentSymbol && isInstantiableClass(parentSymbol.classType) && !SymbolNameUtils.isPrivateName(name)) { + // Did the parent class explicitly declare the variable as final? + if (this._evaluator.isFinalVariable(parentSymbol.symbol)) { + const decl = localSymbol.getDeclarations()[0]; + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalRedeclarationBySubclass().format({ + name, + className: parentSymbol.classType.shared.name, + }), + decl.node + ); + } else if ( + ClassType.hasNamedTupleEntry(parentSymbol.classType, name) && + !SymbolNameUtils.isDunderName(name) + ) { + // If the parent class is a named tuple, all instance variables + // (other than dundered ones) are implicitly final. + const decl = localSymbol.getDeclarations()[0]; + + if (decl.type === DeclarationType.Variable) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.namedTupleEntryRedeclared().format({ + name, + className: parentSymbol.classType.shared.name, + }), + decl.node + ); + } + } + } + }); + } + + // Validates that the values associated with enum members are type compatible. + // Also looks for duplicate values. + private _validateEnumMembers(classType: ClassType, node: ClassNode) { + if (!ClassType.isEnumClass(classType) || ClassType.isBuiltIn(classType)) { + return; + } + + // Does the "_value_" field have a declared type? If so, we'll enforce it. + const declaredValueType = getEnumDeclaredValueType(this._evaluator, classType, /* declaredTypesOnly */ true); + + // Is there a custom "__new__" and/or "__init__" method? If so, we'll + // verify that the signature of these calls is compatible with the values. + let newMemberTypeResult = getBoundNewMethod( + this._evaluator, + node.d.name, + classType, + /* diag */ undefined, + MemberAccessFlags.SkipObjectBaseClass + ); + + // If this __new__ comes from a built-in class like Enum, we'll ignore it. + if (newMemberTypeResult?.classType) { + if (isClass(newMemberTypeResult.classType) && ClassType.isBuiltIn(newMemberTypeResult.classType)) { + newMemberTypeResult = undefined; + } + } + + let initMemberTypeResult = getBoundInitMethod( + this._evaluator, + node.d.name, + ClassType.cloneAsInstance(classType), + /* diag */ undefined, + MemberAccessFlags.SkipObjectBaseClass + ); + + // If this __init__ comes from a built-in class like Enum, we'll ignore it. + if (initMemberTypeResult?.classType) { + if (isClass(initMemberTypeResult.classType) && ClassType.isBuiltIn(initMemberTypeResult.classType)) { + initMemberTypeResult = undefined; + } + } + + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + // Determine whether this is an enum member. We ignore the presence + // of an annotation in this case because the runtime does. From a + // type checking perspective, if the runtime treats the assignment + // as an enum member but there is a type annotation present, it is + // considered a type checking error. + const symbolType = transformTypeForEnumMember( + this._evaluator, + classType, + name, + /* ignoreAnnotation */ true + ); + + // Is this symbol a literal instance of the enum class? + if ( + !symbolType || + !isClassInstance(symbolType) || + !ClassType.isSameGenericClass(symbolType, ClassType.cloneAsInstance(classType)) || + !(symbolType.priv.literalValue instanceof EnumLiteral) + ) { + return; + } + + // Enum members should not have type annotations. + const typedDecls = symbol.getTypedDeclarations(); + if (typedDecls.length > 0) { + if (typedDecls[0].type === DeclarationType.Variable && typedDecls[0].inferredTypeSource) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.enumMemberTypeAnnotation(), + typedDecls[0].node + ); + } + return; + } + + // Look for a duplicate assignment. + const decls = symbol.getDeclarations(); + if (decls.length >= 2 && decls[0].type === DeclarationType.Variable) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.duplicateEnumMember().format({ name }), + decls[1].node + ); + + return; + } + + if (decls[0].type !== DeclarationType.Variable) { + return; + } + + // Look for an enum attribute annotated with "Final". + if (decls[0].isFinal) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.enumMemberTypeAnnotation(), + decls[0].node + ); + } + + const declNode = decls[0].node; + const assignedValueType = symbolType.priv.literalValue.itemType; + const assignmentNode = ParseTreeUtils.getParentNodeOfType( + declNode, + ParseNodeType.Assignment + ); + const errorNode = assignmentNode?.d.rightExpr ?? declNode; + + // Validate the __new__ and __init__ methods if present. + if (newMemberTypeResult || initMemberTypeResult) { + if (!isAnyOrUnknown(assignedValueType)) { + // Construct an argument list. If the assigned type is a tuple, we'll + // unpack it. Otherwise, only one argument is passed. + const argList: Arg[] = [ + { + argCategory: + isClassInstance(assignedValueType) && isTupleClass(assignedValueType) + ? ArgCategory.UnpackedList + : ArgCategory.Simple, + typeResult: { type: assignedValueType }, + }, + ]; + + if (newMemberTypeResult) { + this._evaluator.validateCallArgs( + errorNode, + argList, + newMemberTypeResult, + /* constraints */ undefined, + /* skipUnknownArgCheck */ undefined, + /* inferenceContext */ undefined + ); + } + + if (initMemberTypeResult) { + this._evaluator.validateCallArgs( + errorNode, + argList, + initMemberTypeResult, + /* constraints */ undefined, + /* skipUnknownArgCheck */ undefined, + /* inferenceContext */ undefined + ); + } + } + } else if (declaredValueType) { + const diag = new DiagnosticAddendum(); + + // If the assigned value is already an instance of this enum class, skip this check. + if ( + !isClassInstance(assignedValueType) || + !ClassType.isSameGenericClass(assignedValueType, classType) + ) { + if (!this._evaluator.assignType(declaredValueType, assignedValueType, diag)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportAssignmentType, + LocMessage.typeAssignmentMismatch().format( + this._evaluator.printSrcDestTypes(assignedValueType, declaredValueType) + ) + diag.getString(), + errorNode + ); + } + } + } + }); + } + + // If a class is a dataclass with a `__post_init__` method, verify that its + // signature is correct. + private _validateDataClassPostInit(classType: ClassType) { + if (!ClassType.isDataClass(classType)) { + return; + } + + const postInitMember = lookUpClassMember( + classType, + '__post_init__', + MemberAccessFlags.SkipBaseClasses | MemberAccessFlags.DeclaredTypesOnly + ); + + // If there's no __post_init__ method, there's nothing to check. + if (!postInitMember) { + return; + } + + // If the class derives from Any, we can't reliably apply the check. + if (ClassType.derivesFromAnyOrUnknown(classType)) { + return; + } + + // Collect the list of init-only variables in the order they were declared. + const initOnlySymbolMap = new Map(); + ClassType.getReverseMro(classType).forEach((mroClass) => { + if (isClass(mroClass) && ClassType.isDataClass(mroClass)) { + ClassType.getSymbolTable(mroClass).forEach((symbol, name) => { + if (symbol.isInitVar()) { + initOnlySymbolMap.set(name, symbol); + } + }); + } + }); + + const postInitType = this._evaluator.getTypeOfMember(postInitMember); + if ( + !isFunction(postInitType) || + !FunctionType.isInstanceMethod(postInitType) || + !postInitType.shared.declaration + ) { + return; + } + + const paramListDetails = getParamListDetails(postInitType); + // If there is an *args or **kwargs parameter or a keyword-only separator, + // don't bother checking. + if ( + paramListDetails.argsIndex !== undefined || + paramListDetails.kwargsIndex !== undefined || + paramListDetails.firstKeywordOnlyIndex !== undefined + ) { + return; + } + + // Verify that the parameter count matches. + const nonDefaultParams = paramListDetails.params.filter( + (paramInfo, index) => FunctionType.getParamDefaultType(postInitType, index) === undefined + ); + + // We expect to see one param for "self" plus one for each of the InitVars. + const expectedParamCount = initOnlySymbolMap.size + 1; + const postInitNode = postInitType.shared.declaration.node; + + if (expectedParamCount < nonDefaultParams.length || expectedParamCount > paramListDetails.params.length) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassPostInitParamCount().format({ expected: initOnlySymbolMap.size }), + postInitNode.d.name + ); + } + + // Verify that the parameter types match. + let paramIndex = 1; + + initOnlySymbolMap.forEach((symbol, fieldName) => { + if (paramIndex >= paramListDetails.params.length) { + return; + } + + const param = paramListDetails.params[paramIndex].param; + const paramNode = postInitNode.d.params.find((node) => node.d.name?.d.value === param.name); + const annotationNode = paramNode?.d.annotation ?? paramNode?.d.annotationComment; + + if (FunctionParam.isTypeDeclared(param) && annotationNode) { + const fieldType = this._evaluator.getDeclaredTypeOfSymbol(symbol)?.type; + const paramType = FunctionType.getParamType(postInitType, paramListDetails.params[paramIndex].index); + const assignTypeDiag = new DiagnosticAddendum(); + + if (fieldType && !this._evaluator.assignType(paramType, fieldType, assignTypeDiag)) { + const diagnostic = this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassPostInitType().format({ fieldName }) + assignTypeDiag.getString(), + annotationNode + ); + + if (diagnostic) { + const fieldDecls = symbol.getTypedDeclarations(); + if (fieldDecls.length > 0) { + diagnostic.addRelatedInfo( + LocAddendum.dataClassFieldLocation(), + fieldDecls[0].uri, + fieldDecls[0].range + ); + } + } + } + } + + paramIndex++; + }); + } + + // If a class is marked final, it must implement all abstract methods, + // otherwise it is of no use. + private _validateFinalClassNotAbstract(classType: ClassType, errorNode: ClassNode) { + if (!ClassType.isFinal(classType)) { + return; + } + + if (!ClassType.supportsAbstractMethods(classType)) { + return; + } + + const abstractSymbols = this._evaluator.getAbstractSymbols(classType); + if (abstractSymbols.length === 0) { + return; + } + + const diagAddendum = new DiagnosticAddendum(); + const errorsToDisplay = 2; + + abstractSymbols.forEach((abstractMethod, index) => { + if (index === errorsToDisplay) { + diagAddendum.addMessage( + LocAddendum.memberIsAbstractMore().format({ + count: abstractSymbols.length - errorsToDisplay, + }) + ); + } else if (index < errorsToDisplay) { + if (isInstantiableClass(abstractMethod.classType)) { + const className = abstractMethod.classType.shared.name; + diagAddendum.addMessage( + LocAddendum.memberIsAbstract().format({ + type: className, + name: abstractMethod.symbolName, + }) + ); + } + } + }); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalClassIsAbstract().format({ + type: classType.shared.name, + }) + diagAddendum.getString(), + errorNode.d.name + ); + } + + // Reports the case where an instance variable is not declared or initialized + // within the class body or constructor method. + private _validateInstanceVariableInitialization(node: ClassNode, classType: ClassType) { + // This check doesn't apply to stub files. + if (this._fileInfo.isStubFile) { + return; + } + + // This check can be expensive, so don't perform it if the corresponding + // rule is disabled. + if (this._fileInfo.diagnosticRuleSet.reportUninitializedInstanceVariable === 'none') { + return; + } + + // Protocol classes and ABCs are exempted from this check unless they are + // marked @final. + if ( + ClassType.isProtocolClass(classType) || + (ClassType.supportsAbstractMethods(classType) && !ClassType.isFinal(classType)) + ) { + return; + } + + // If the class is final, see if it has any abstract base classes that define + // variables. We need to make sure these are initialized. + const abstractSymbols = new Map(); + if (ClassType.isFinal(classType)) { + getProtocolSymbolsRecursive(classType, abstractSymbols, ClassTypeFlags.SupportsAbstractMethods); + } + + // If this is a dataclass, get all of the entries so we can tell which + // ones are initialized by the synthesized __init__ method. + const dataClassEntries: DataClassEntry[] = []; + if (ClassType.isDataClass(classType)) { + addInheritedDataClassEntries(classType, dataClassEntries); + } + + ClassType.getSymbolTable(classType).forEach((localSymbol, name) => { + abstractSymbols.delete(name); + + // This applies only to instance members. + if (!localSymbol.isInstanceMember()) { + return; + } + + const decls = localSymbol.getDeclarations(); + + // If the symbol is assigned (or at least declared) within the + // class body or within the __init__ method, it can be ignored. + if ( + decls.find((decl) => { + const containingClass = ParseTreeUtils.getEnclosingClassOrFunction(decl.node); + if (!containingClass) { + return true; + } + + if (containingClass.nodeType === ParseNodeType.Class) { + // If this is part of an assignment statement, assume it has been + // initialized as a class variable. + if (decl.node.parent?.nodeType === ParseNodeType.Assignment) { + return true; + } + + if ( + decl.node.parent?.nodeType === ParseNodeType.TypeAnnotation && + decl.node.parent.parent?.nodeType === ParseNodeType.Assignment + ) { + return true; + } + + // If this is part of a dataclass, a class handled by a dataclass_transform, + // or a NamedTuple, exempt it because the class variable will be transformed + // into an instance variable in this case. + if (ClassType.isDataClass(classType) || ClassType.hasNamedTupleEntry(classType, name)) { + return true; + } + + // If this is part of a TypedDict, exempt it because the class variables + // are not actually class variables in a TypedDict. + if (ClassType.isTypedDictClass(classType)) { + return true; + } + } + + if (containingClass.d.name.d.value === '__init__') { + return true; + } + + return false; + }) + ) { + return; + } + + // If the symbol is declared by its parent, we can assume it + // is initialized there. + const parentSymbol = lookUpClassMember(classType, name, MemberAccessFlags.SkipOriginalClass); + if (parentSymbol) { + return; + } + + // Report the variable as uninitialized only on the first decl. + this._evaluator.addDiagnostic( + DiagnosticRule.reportUninitializedInstanceVariable, + LocMessage.uninitializedInstanceVariable().format({ name: name }), + decls[0].node + ); + }); + + // See if there are any variables from abstract base classes + // that are not initialized. + const diagAddendum = new DiagnosticAddendum(); + abstractSymbols.forEach((member, name) => { + const decls = member.symbol.getDeclarations(); + + if (decls.length === 0 || !isClass(member.classType)) { + return; + } + + if (decls[0].type !== DeclarationType.Variable) { + return; + } + + // Dataclass fields are typically exempted from this check because + // they have synthesized __init__ methods that initialize these variables. + const dcEntry = dataClassEntries?.find((entry) => entry.name === name); + if (dcEntry) { + if (dcEntry.includeInInit) { + return; + } + } else { + // Do one or more declarations involve assignments? + if (decls.some((decl) => decl.type === DeclarationType.Variable && !!decl.inferredTypeSource)) { + return; + } + } + + diagAddendum.addMessage( + LocAddendum.uninitializedAbstractVariable().format({ + name, + classType: member.classType.shared.name, + }) + ); + }); + + if (!diagAddendum.isEmpty()) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUninitializedInstanceVariable, + LocMessage.uninitializedAbstractVariables().format({ classType: classType.shared.name }) + + diagAddendum.getString(), + node.d.name + ); + } + } + + // Validates that the type variables used in a generic protocol class have + // the proper variance (invariant, covariant, contravariant). See PEP 544 + // for an explanation for why this is important to enforce. + private _validateProtocolTypeParamVariance(errorNode: ClassNode, classType: ClassType) { + // If this protocol has no TypeVars with specified variance, there's nothing to do here. + if (classType.shared.typeParams.length === 0) { + return; + } + + const objectType = this._evaluator.getBuiltInType(errorNode, 'object'); + if (!isInstantiableClass(objectType)) { + return; + } + + const objectObject = ClassType.cloneAsInstance(objectType); + const dummyTypeObject = ClassType.createInstantiable( + '__varianceDummy', + '', + '', + Uri.empty(), + 0, + 0, + undefined, + undefined + ); + + classType.shared.typeParams.forEach((param, paramIndex) => { + // Skip TypeVarTuples and ParamSpecs. + if (isTypeVarTuple(param) || isParamSpec(param)) { + return; + } + + // Skip type variables that have been internally synthesized + // for a variety of reasons. + if (param.shared.isSynthesized) { + return; + } + + // Skip type variables with auto-variance. + if (param.shared.declaredVariance === Variance.Auto) { + return; + } + + // Replace all type arguments with a dummy type except for the + // TypeVar of interest, which is replaced with an object instance. + const srcTypeArgs = classType.shared.typeParams.map((p, i) => { + if (isTypeVarTuple(p)) { + return p; + } + return i === paramIndex ? objectObject : dummyTypeObject; + }); + + // Replace all type arguments with a dummy type except for the + // TypeVar of interest, which is replaced with itself. + const destTypeArgs = classType.shared.typeParams.map((p, i) => { + return i === paramIndex || isTypeVarTuple(p) ? p : dummyTypeObject; + }); + + const srcType = ClassType.specialize(classType, srcTypeArgs); + const destType = ClassType.specialize(classType, destTypeArgs); + + const isDestSubtypeOfSrc = this._evaluator.assignClassToSelf(srcType, destType, Variance.Covariant); + + let expectedVariance: Variance; + if (isDestSubtypeOfSrc) { + expectedVariance = Variance.Covariant; + } else { + const isSrcSubtypeOfDest = this._evaluator.assignClassToSelf(destType, srcType, Variance.Contravariant); + if (isSrcSubtypeOfDest) { + expectedVariance = Variance.Contravariant; + } else { + expectedVariance = Variance.Invariant; + } + } + + if (expectedVariance !== classType.shared.typeParams[paramIndex].shared.declaredVariance) { + let message: string; + if (expectedVariance === Variance.Covariant) { + message = LocMessage.protocolVarianceCovariant().format({ + variable: param.shared.name, + class: classType.shared.name, + }); + } else if (expectedVariance === Variance.Contravariant) { + message = LocMessage.protocolVarianceContravariant().format({ + variable: param.shared.name, + class: classType.shared.name, + }); + } else { + message = LocMessage.protocolVarianceInvariant().format({ + variable: param.shared.name, + class: classType.shared.name, + }); + } + + this._evaluator.addDiagnostic(DiagnosticRule.reportInvalidTypeVarUse, message, errorNode.d.name); + } + }); + } + + // Validates that a class variable doesn't conflict with a __slots__ + // name. This will generate a runtime exception. + private _validateSlotsClassVarConflict(classType: ClassType) { + if (!classType.shared.localSlotsNames) { + // Nothing to check, since this class doesn't use __slots__. + return; + } + + // Don't apply this for dataclasses because their class variables + // are transformed into instance variables. + if (ClassType.isDataClass(classType)) { + return; + } + + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + const decls = symbol.getDeclarations(); + const isDefinedBySlots = decls.some( + (decl) => decl.type === DeclarationType.Variable && decl.isDefinedBySlots + ); + + if (isDefinedBySlots) { + decls.forEach((decl) => { + if ( + decl.type === DeclarationType.Variable && + !decl.isDefinedBySlots && + !decl.isDefinedByMemberAccess + ) { + if (decl.node.nodeType === ParseNodeType.Name && ParseTreeUtils.isWriteAccess(decl.node)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.slotsClassVarConflict().format({ name }), + decl.node + ); + } + } + }); + } + }); + } + + // Validates that the __init__ and __new__ method signatures are consistent. + private _validateConstructorConsistency(classType: ClassType, errorNode: ExpressionNode) { + // If the class has a custom metaclass with a __call__ method, skip this check. + const callMethodResult = getBoundCallMethod(this._evaluator, errorNode, classType); + if (callMethodResult) { + return; + } + + const newMethodResult = getBoundNewMethod(this._evaluator, errorNode, classType); + if ( + !newMethodResult || + newMethodResult.typeErrors || + !newMethodResult.classType || + !isClass(newMethodResult.classType) + ) { + return; + } + + const initMethodResult = getBoundInitMethod(this._evaluator, errorNode, ClassType.cloneAsInstance(classType)); + if ( + !initMethodResult || + initMethodResult.typeErrors || + !initMethodResult.classType || + !isClass(initMethodResult.classType) + ) { + return; + } + + // If both the __new__ and __init__ come from subclasses, don't bother + // checking for this class. + if ( + !ClassType.isSameGenericClass(initMethodResult.classType, classType) && + !ClassType.isSameGenericClass(newMethodResult.classType, classType) + ) { + return; + } + + let newMemberType: Type | undefined = newMethodResult.type; + if (!isFunctionOrOverloaded(newMemberType)) { + return; + } + + if (isOverloaded(newMemberType)) { + // Find the implementation, not the overloaded signatures. + newMemberType = OverloadedType.getImplementation(newMemberType); + + if (!newMemberType || !isFunction(newMemberType)) { + return; + } + } + + let initMemberType: Type | undefined = initMethodResult.type; + if (!isFunctionOrOverloaded(initMemberType)) { + return; + } + + if (isOverloaded(initMemberType)) { + // Find the implementation, not the overloaded signatures. + initMemberType = OverloadedType.getImplementation(initMemberType); + + if (!initMemberType || !isFunction(initMemberType)) { + return; + } + } + + // If either of the functions has a default parameter signature + // (* args: Any, ** kwargs: Any), don't proceed with the check. + if (FunctionType.hasDefaultParams(initMemberType) || FunctionType.hasDefaultParams(newMemberType)) { + return; + } + + if ( + !this._evaluator.assignType( + newMemberType, + initMemberType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.SkipReturnTypeCheck + ) || + !this._evaluator.assignType( + initMemberType, + newMemberType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.SkipReturnTypeCheck + ) + ) { + const displayOnInit = ClassType.isSameGenericClass(initMethodResult.classType, classType); + const initDecl = initMemberType.shared.declaration; + const newDecl = newMemberType.shared.declaration; + + if (initDecl && newDecl) { + const mainDecl = displayOnInit ? initDecl : newDecl; + const mainDeclNode = + mainDecl.node.nodeType === ParseNodeType.Function ? mainDecl.node.d.name : mainDecl.node; + + const diagAddendum = new DiagnosticAddendum(); + const initSignature = this._evaluator.printType(initMemberType); + const newSignature = this._evaluator.printType(newMemberType); + + diagAddendum.addMessage( + LocAddendum.initMethodSignature().format({ + type: initSignature, + }) + ); + diagAddendum.addMessage( + LocAddendum.newMethodSignature().format({ + type: newSignature, + }) + ); + + const diagnostic = this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentConstructor, + LocMessage.constructorParametersMismatch().format({ + classType: this._evaluator.printType( + ClassType.cloneAsInstance( + displayOnInit ? initMethodResult.classType : newMethodResult.classType + ) + ), + }) + diagAddendum.getString(), + mainDeclNode + ); + + if (diagnostic) { + const secondaryDecl = displayOnInit ? newDecl : initDecl; + + diagnostic.addRelatedInfo( + (displayOnInit ? LocAddendum.newMethodLocation() : LocAddendum.initMethodLocation()).format({ + type: this._evaluator.printType( + ClassType.cloneAsInstance( + displayOnInit ? newMethodResult.classType : initMethodResult.classType + ) + ), + }), + secondaryDecl.uri, + secondaryDecl.range + ); + } + } + } + } + + // Verifies that classes that have more than one base class do not have + // have conflicting type arguments. + private _validateMultipleInheritanceBaseClasses(classType: ClassType, errorNode: ParseNode) { + // Skip this check if the class has only one base class or one or more + // of the base classes are Any. + const filteredBaseClasses: ClassType[] = []; + for (const baseClass of classType.shared.baseClasses) { + if (!isClass(baseClass)) { + return; + } + + if (!ClassType.isBuiltIn(baseClass, ['Generic', 'Protocol', 'object'])) { + filteredBaseClasses.push(baseClass); + } + } + + if (filteredBaseClasses.length < 2) { + return; + } + + const diagAddendum = new DiagnosticAddendum(); + + for (const baseClass of filteredBaseClasses) { + const solution = buildSolutionFromSpecializedClass(baseClass); + + for (const baseClassMroClass of baseClass.shared.mro) { + // There's no need to check for conflicts if this class isn't generic. + if (isClass(baseClassMroClass) && baseClassMroClass.shared.typeParams.length > 0) { + const specializedBaseClassMroClass = applySolvedTypeVars(baseClassMroClass, solution) as ClassType; + + // Find the corresponding class in the derived class's MRO list. + const matchingMroClass = classType.shared.mro.find( + (mroClass) => + isClass(mroClass) && ClassType.isSameGenericClass(mroClass, specializedBaseClassMroClass) + ); + + if (matchingMroClass && isInstantiableClass(matchingMroClass)) { + const scopeIds = getTypeVarScopeIds(classType); + const matchingMroObject = makeTypeVarsBound( + ClassType.cloneAsInstance(matchingMroClass), + scopeIds + ); + const baseClassMroObject = makeTypeVarsBound( + ClassType.cloneAsInstance(specializedBaseClassMroClass), + scopeIds + ); + + if (!this._evaluator.assignType(matchingMroObject, baseClassMroObject)) { + const diag = new DiagnosticAddendum(); + const baseClassObject = convertToInstance(baseClass); + + if (isTypeSame(baseClassObject, baseClassMroObject)) { + diag.addMessage( + LocAddendum.baseClassIncompatible().format({ + baseClass: this._evaluator.printType(baseClassObject), + type: this._evaluator.printType(matchingMroObject), + }) + ); + } else { + diag.addMessage( + LocAddendum.baseClassIncompatibleSubclass().format({ + baseClass: this._evaluator.printType(baseClassObject), + subclass: this._evaluator.printType(baseClassMroObject), + type: this._evaluator.printType(matchingMroObject), + }) + ); + } + + diagAddendum.addAddendum(diag); + + // Break out of the inner loop so we don't report any redundant errors for this base class. + break; + } + } + } + } + } + + if (!diagAddendum.isEmpty()) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.baseClassIncompatible().format({ type: classType.shared.name }) + diagAddendum.getString(), + errorNode + ); + } + } + + // Validates that any methods and variables in multiple base classes are + // compatible with each other. + private _validateMultipleInheritanceCompatibility(classType: ClassType, errorNode: ParseNode) { + // Skip this check if reportIncompatibleMethodOverride and reportIncompatibleVariableOverride + // are disabled because it's a relatively expensive check. + if ( + this._fileInfo.diagnosticRuleSet.reportIncompatibleMethodOverride === 'none' && + this._fileInfo.diagnosticRuleSet.reportIncompatibleVariableOverride === 'none' + ) { + return; + } + + const baseClasses: ClassType[] = []; + + // Filter any unknown base classes. Also remove Generic and Protocol + // base classes. + classType.shared.baseClasses.forEach((baseClass) => { + if ( + isClass(baseClass) && + !ClassType.isBuiltIn(baseClass, 'Generic') && + !ClassType.isBuiltIn(baseClass, 'Protocol') + ) { + baseClasses.push(baseClass); + } + }); + + // If there is only one base class, there's nothing to do. + if (baseClasses.length < 2) { + return; + } + + // Build maps of symbols for each of the base classes. + const baseClassSymbolMaps = baseClasses.map((baseClass) => { + const specializedBaseClass = classType.shared.mro.find( + (c) => isClass(c) && ClassType.isSameGenericClass(c, baseClass) + ); + if (!specializedBaseClass || !isClass(specializedBaseClass)) { + return new Map(); + } + + // Retrieve all of the specialized symbols from the base class and its ancestors. + return getClassFieldsRecursive(specializedBaseClass); + }); + + const childClassSymbolMap = getClassFieldsRecursive(classType); + + for (let symbolMapBaseIndex = 1; symbolMapBaseIndex < baseClassSymbolMaps.length; symbolMapBaseIndex++) { + const baseSymbolMap = baseClassSymbolMaps[symbolMapBaseIndex]; + + for (const [name, overriddenClassAndSymbol] of baseSymbolMap) { + // Special-case dundered methods, which can differ in signature. Also + // exempt private symbols. + if (SymbolNameUtils.isDunderName(name) || SymbolNameUtils.isPrivateName(name)) { + continue; + } + + const overriddenClassType = overriddenClassAndSymbol.classType; + if (!isClass(overriddenClassType)) { + continue; + } + + const overrideClassAndSymbol = childClassSymbolMap.get(name); + + if (overrideClassAndSymbol) { + const overrideClassType = overrideClassAndSymbol.classType; + + // If the override is the same as the overridden, then there's nothing + // to check. If the override is the child class, then we can also skip + // the check because the normal override checks will report the error. + if ( + !isClass(overrideClassType) || + ClassType.isSameGenericClass(overrideClassType, overriddenClassType) || + ClassType.isSameGenericClass(overrideClassType, classType) + ) { + continue; + } + + this._validateMultipleInheritanceOverride( + overriddenClassAndSymbol, + overrideClassAndSymbol, + classType, + name, + errorNode + ); + } + } + } + } + + private _validateMultipleInheritanceOverride( + overriddenClassAndSymbol: ClassMember, + overrideClassAndSymbol: ClassMember, + childClassType: ClassType, + memberName: string, + errorNode: ParseNode + ) { + if (!isClass(overriddenClassAndSymbol.classType) || !isClass(overrideClassAndSymbol.classType)) { + return; + } + + // Special case the '_' symbol, which is used in single dispatch + // code and other cases where the name does not matter. + if (memberName === '_') { + return; + } + + let overriddenType = this._evaluator.getEffectiveTypeOfSymbol(overriddenClassAndSymbol.symbol); + overriddenType = partiallySpecializeType( + overriddenType, + overriddenClassAndSymbol.classType, + this._evaluator.getTypeClassType() + ); + + const overrideSymbol = overrideClassAndSymbol.symbol; + let overrideType = this._evaluator.getEffectiveTypeOfSymbol(overrideSymbol); + overrideType = partiallySpecializeType( + overrideType, + overrideClassAndSymbol.classType, + this._evaluator.getTypeClassType() + ); + + const childOverrideSymbol = ClassType.getSymbolTable(childClassType).get(memberName); + const childOverrideType = childOverrideSymbol + ? this._evaluator.getEffectiveTypeOfSymbol(childOverrideSymbol) + : undefined; + + let diag: Diagnostic | undefined; + const overrideDecl = getLastTypedDeclarationForSymbol(overrideClassAndSymbol.symbol); + const overriddenDecl = getLastTypedDeclarationForSymbol(overriddenClassAndSymbol.symbol); + + if (isFunctionOrOverloaded(overriddenType)) { + const diagAddendum = new DiagnosticAddendum(); + + if (isFunctionOrOverloaded(overrideType)) { + if ( + !this._evaluator.validateOverrideMethod( + overriddenType, + overrideType, + /* baseClass */ undefined, + diagAddendum, + /* enforceParamNameMatch */ true + ) + ) { + if (overrideDecl && overrideDecl.type === DeclarationType.Function) { + diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.baseClassMethodTypeIncompatible().format({ + classType: childClassType.shared.name, + name: memberName, + }) + diagAddendum.getString(), + errorNode + ); + } + } + } + } else if (isProperty(overriddenType)) { + // Handle properties specially. + if (!isProperty(overrideType) && !isAnyOrUnknown(overrideType)) { + const decls = overrideSymbol.getDeclarations(); + if (decls.length > 0) { + diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.baseClassVariableTypeIncompatible().format({ + classType: childClassType.shared.name, + name: memberName, + }), + errorNode + ); + } + } else { + this._validateMultipleInheritancePropertyOverride( + overriddenClassAndSymbol.classType, + childClassType, + overriddenType, + overrideType, + overrideSymbol, + memberName, + errorNode + ); + } + } else { + // This check can be expensive, so don't perform it if the corresponding + // rule is disabled. + if (this._fileInfo.diagnosticRuleSet.reportIncompatibleVariableOverride !== 'none') { + const primaryDecl = getLastTypedDeclarationForSymbol(overriddenClassAndSymbol.symbol); + let isInvariant = primaryDecl?.type === DeclarationType.Variable && !primaryDecl.isFinal; + + // If the entry is a member of a frozen dataclass, it is immutable, + // so it does not need to be invariant. + if ( + ClassType.isDataClassFrozen(overriddenClassAndSymbol.classType) && + overriddenClassAndSymbol.classType.shared.dataClassEntries + ) { + const dataclassEntry = overriddenClassAndSymbol.classType.shared.dataClassEntries.find( + (entry) => entry.name === memberName + ); + if (dataclassEntry) { + isInvariant = false; + } + } + + let overriddenTDEntry: TypedDictEntry | undefined; + if (overriddenClassAndSymbol.classType.shared.typedDictEntries) { + overriddenTDEntry = + overriddenClassAndSymbol.classType.shared.typedDictEntries.knownItems.get(memberName) ?? + overriddenClassAndSymbol.classType.shared.typedDictEntries.extraItems ?? + getEffectiveExtraItemsEntryType(this._evaluator, overriddenClassAndSymbol.classType); + + if (overriddenTDEntry?.isReadOnly) { + isInvariant = false; + } + } + + let overrideTDEntry: TypedDictEntry | undefined; + if (overrideClassAndSymbol.classType.shared.typedDictEntries) { + overrideTDEntry = + overrideClassAndSymbol.classType.shared.typedDictEntries.knownItems.get(memberName) ?? + overrideClassAndSymbol.classType.shared.typedDictEntries.extraItems ?? + getEffectiveExtraItemsEntryType(this._evaluator, overrideClassAndSymbol.classType); + } + + if ( + !this._evaluator.assignType( + overriddenType, + childOverrideType ?? overrideType, + /* diag */ undefined, + /* constraints */ undefined, + isInvariant ? AssignTypeFlags.Invariant : AssignTypeFlags.Default + ) + ) { + diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.baseClassVariableTypeIncompatible().format({ + classType: childClassType.shared.name, + name: memberName, + }), + errorNode + ); + } else if (overriddenTDEntry && overrideTDEntry) { + let isRequiredCompatible: boolean; + let isReadOnlyCompatible = true; + + // If both classes are TypedDicts and they both define this field, + // make sure the attributes are compatible. + if (overriddenTDEntry.isReadOnly) { + isRequiredCompatible = overrideTDEntry.isRequired || !overriddenTDEntry.isRequired; + } else { + isReadOnlyCompatible = !overrideTDEntry.isReadOnly; + isRequiredCompatible = overrideTDEntry.isRequired === overriddenTDEntry.isRequired; + } + + if (!isRequiredCompatible) { + const message = overrideTDEntry.isRequired + ? LocMessage.typedDictFieldRequiredRedefinition + : LocMessage.typedDictFieldNotRequiredRedefinition; + diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + message().format({ name: memberName }), + errorNode + ); + } else if (!isReadOnlyCompatible) { + diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.typedDictFieldReadOnlyRedefinition().format({ + name: memberName, + }), + errorNode + ); + } + } + } + } + + if (diag && overrideDecl && overriddenDecl) { + this._addMultipleInheritanceRelatedInfo( + diag, + overriddenClassAndSymbol.classType, + overriddenType, + overriddenDecl, + overrideClassAndSymbol.classType, + overrideType, + overrideDecl + ); + } + } + + private _addMultipleInheritanceRelatedInfo( + diag: Diagnostic, + overriddenClass: ClassType, + overriddenType: Type, + overriddenDecl: Declaration, + overrideClass: ClassType, + overrideType: Type, + overrideDecl: Declaration + ) { + diag.addRelatedInfo( + LocAddendum.baseClassOverriddenType().format({ + baseClass: this._evaluator.printType(convertToInstance(overriddenClass)), + type: this._evaluator.printType(overriddenType), + }), + overriddenDecl.uri, + overriddenDecl.range + ); + + diag.addRelatedInfo( + LocAddendum.baseClassOverridesType().format({ + baseClass: this._evaluator.printType(convertToInstance(overrideClass)), + type: this._evaluator.printType(overrideType), + }), + overrideDecl.uri, + overrideDecl.range + ); + } + + private _validateMultipleInheritancePropertyOverride( + overriddenClassType: ClassType, + overrideClassType: ClassType, + overriddenSymbolType: Type, + overrideSymbolType: Type, + overrideSymbol: Symbol, + memberName: string, + errorNode: ParseNode + ) { + const propMethodInfo: [string, (c: ClassType) => FunctionType | undefined][] = [ + ['fget', (c) => c.priv.fgetInfo?.methodType], + ['fset', (c) => c.priv.fsetInfo?.methodType], + ['fdel', (c) => c.priv.fdelInfo?.methodType], + ]; + + propMethodInfo.forEach((info) => { + const diagAddendum = new DiagnosticAddendum(); + const [methodName, methodAccessor] = info; + const baseClassPropMethod = methodAccessor(overriddenSymbolType as ClassType); + const subclassPropMethod = methodAccessor(overrideSymbolType as ClassType); + + // Is the method present on the base class but missing in the subclass? + if (baseClassPropMethod) { + const baseClassMethodType = partiallySpecializeType( + baseClassPropMethod, + overriddenClassType, + this._evaluator.getTypeClassType() + ); + + if (isFunction(baseClassMethodType)) { + if (!subclassPropMethod) { + // The method is missing. + diagAddendum.addMessage( + LocAddendum.propertyMethodMissing().format({ + name: methodName, + }) + ); + + const decls = overrideSymbol.getDeclarations(); + + if (decls.length > 0) { + const lastDecl = decls[decls.length - 1]; + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.propertyOverridden().format({ + name: memberName, + className: overriddenClassType.shared.name, + }) + diagAddendum.getString(), + errorNode + ); + + const origDecl = baseClassMethodType.shared.declaration; + if (diag && origDecl) { + this._addMultipleInheritanceRelatedInfo( + diag, + overriddenClassType, + overriddenSymbolType, + origDecl, + overrideClassType, + overrideSymbolType, + lastDecl + ); + } + } + } else { + const subclassMethodType = partiallySpecializeType( + subclassPropMethod, + overrideClassType, + this._evaluator.getTypeClassType() + ); + + if (isFunction(subclassMethodType)) { + if ( + !this._evaluator.validateOverrideMethod( + baseClassMethodType, + subclassMethodType, + overrideClassType, + diagAddendum.createAddendum() + ) + ) { + diagAddendum.addMessage( + LocAddendum.propertyMethodIncompatible().format({ + name: methodName, + }) + ); + const decl = subclassMethodType.shared.declaration; + + if (decl && decl.type === DeclarationType.Function) { + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.propertyOverridden().format({ + name: memberName, + className: overriddenClassType.shared.name, + }) + diagAddendum.getString(), + errorNode + ); + + const origDecl = baseClassMethodType.shared.declaration; + if (diag && origDecl) { + this._addMultipleInheritanceRelatedInfo( + diag, + overriddenClassType, + overriddenSymbolType, + origDecl, + overrideClassType, + overrideSymbolType, + decl + ); + } + } + } + } + } + } + } + }); + } + + // Validates that any overloaded methods are consistent in how they + // are decorated. For example, if the first overload is not marked @final + // but subsequent ones are, an error should be reported. + private _validateOverloadDecoratorConsistency(classType: ClassType) { + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + const primaryDecl = getLastTypedDeclarationForSymbol(symbol); + + if (!primaryDecl || primaryDecl.type !== DeclarationType.Function) { + return; + } + + const typeOfSymbol = this._evaluator.getEffectiveTypeOfSymbol(symbol); + + if (!isOverloaded(typeOfSymbol)) { + return; + } + + const overloads = OverloadedType.getOverloads(typeOfSymbol); + const implementation = OverloadedType.getImplementation(typeOfSymbol); + + this._validateOverloadFinalOverride(overloads, implementation); + + this._validateOverloadAbstractConsistency(overloads, implementation); + }); + } + + private _validateOverloadAbstractConsistency(overloads: FunctionType[], implementation: Type | undefined) { + // If there's an implementation, it will determine whether the + // function is abstract. + if (implementation && isFunction(implementation)) { + const isImplAbstract = FunctionType.isAbstractMethod(implementation); + if (isImplAbstract) { + return; + } + + overloads.forEach((overload) => { + const decl = overload.shared.declaration; + + if (FunctionType.isAbstractMethod(overload) && decl) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadAbstractImplMismatch().format({ + name: overload.shared.name, + }), + getNameNodeForDeclaration(decl) ?? decl.node + ); + } + }); + return; + } + + if (overloads.length < 2) { + return; + } + + // If there was no implementation, make sure all overloads are either + // abstract or not abstract. + const isFirstOverloadAbstract = FunctionType.isAbstractMethod(overloads[0]); + + overloads.slice(1).forEach((overload, index) => { + if (FunctionType.isAbstractMethod(overload) !== isFirstOverloadAbstract && overload.shared.declaration) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadAbstractMismatch().format({ + name: overload.shared.name, + }), + getNameNodeForDeclaration(overload.shared.declaration) ?? overload.shared.declaration.node + ); + } + }); + } + + private _validateOverloadFinalOverride(overloads: FunctionType[], implementation: Type | undefined) { + // If there's an implementation, the overloads are not allowed to be marked final or override. + if (implementation) { + overloads.forEach((overload) => { + if (FunctionType.isFinal(overload) && overload.shared.declaration?.node) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadFinalImpl(), + getNameNodeForDeclaration(overload.shared.declaration) ?? overload.shared.declaration.node + ); + } + + if (FunctionType.isOverridden(overload) && overload.shared.declaration?.node) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadOverrideImpl(), + getNameNodeForDeclaration(overload.shared.declaration) ?? overload.shared.declaration.node + ); + } + }); + + return; + } + + // If there's not an implementation, only the first overload can be marked final. + if (overloads.length === 0) { + return; + } + + overloads.slice(1).forEach((overload, index) => { + if (FunctionType.isFinal(overload) && overload.shared.declaration?.node) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadFinalNoImpl(), + getNameNodeForDeclaration(overload.shared.declaration) ?? overload.shared.declaration.node + ); + } + + if (FunctionType.isOverridden(overload) && overload.shared.declaration?.node) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInconsistentOverload, + LocMessage.overloadOverrideNoImpl(), + getNameNodeForDeclaration(overload.shared.declaration) ?? overload.shared.declaration.node + ); + } + }); + } + + // For a TypedDict class that derives from another TypedDict class + // that is closed, verify that any new keys are compatible with the + // base class. + private _validateTypedDictOverrides(classType: ClassType) { + if (!ClassType.isTypedDictClass(classType)) { + return; + } + + const typedDictEntries = getTypedDictMembersForClass(this._evaluator, classType, /* allowNarrowed */ false); + + for (const baseClass of classType.shared.baseClasses) { + const diag = new DiagnosticAddendum(); + + if ( + !isClass(baseClass) || + !ClassType.isTypedDictClass(baseClass) || + !ClassType.isTypedDictEffectivelyClosed(baseClass) + ) { + continue; + } + + const baseTypedDictEntries = getTypedDictMembersForClass( + this._evaluator, + baseClass, + /* allowNarrowed */ false + ); + + const solution = buildSolutionFromSpecializedClass(baseClass); + + const baseExtraItemsType = baseTypedDictEntries.extraItems + ? applySolvedTypeVars(baseTypedDictEntries.extraItems.valueType, solution) + : UnknownType.create(); + + for (const [name, entry] of typedDictEntries.knownItems) { + const baseEntry = baseTypedDictEntries.knownItems.get(name); + + if (!baseEntry) { + if (!baseTypedDictEntries.extraItems || isNever(baseTypedDictEntries.extraItems.valueType)) { + diag.addMessage( + LocAddendum.typedDictClosedExtraNotAllowed().format({ + name, + }) + ); + } else if ( + !this._evaluator.assignType( + baseExtraItemsType, + entry.valueType, + /* diag */ undefined, + /* constraints */ undefined, + !baseTypedDictEntries.extraItems.isReadOnly + ? AssignTypeFlags.Invariant + : AssignTypeFlags.Default + ) + ) { + diag.addMessage( + LocAddendum.typedDictClosedExtraTypeMismatch().format({ + name, + type: this._evaluator.printType(entry.valueType), + }) + ); + } else if (!baseTypedDictEntries.extraItems.isReadOnly && entry.isReadOnly) { + diag.addMessage( + LocAddendum.typedDictClosedFieldNotReadOnly().format({ + name, + }) + ); + } else if (!baseTypedDictEntries.extraItems.isReadOnly && entry.isRequired) { + diag.addMessage( + LocAddendum.typedDictClosedFieldNotRequired().format({ + name, + }) + ); + } + } + } + + if (typedDictEntries.extraItems && baseTypedDictEntries.extraItems) { + if ( + !this._evaluator.assignType( + baseExtraItemsType, + typedDictEntries.extraItems.valueType, + /* diag */ undefined, + /* constraints */ undefined, + !baseTypedDictEntries.extraItems.isReadOnly + ? AssignTypeFlags.Invariant + : AssignTypeFlags.Default + ) + ) { + diag.addMessage( + LocAddendum.typedDictClosedExtraTypeMismatch().format({ + name: 'extra_items', + type: this._evaluator.printType(typedDictEntries.extraItems.valueType), + }) + ); + } + } + + if (!diag.isEmpty() && classType.shared.declaration) { + const declNode = getNameNodeForDeclaration(classType.shared.declaration); + + if (declNode) { + if (baseTypedDictEntries.extraItems) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.typedDictClosedExtras().format({ + name: baseClass.shared.name, + type: this._evaluator.printType(baseExtraItemsType), + }) + diag.getString(), + declNode + ); + } else { + this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.typedDictClosedNoExtras().format({ + name: baseClass.shared.name, + }) + diag.getString(), + declNode + ); + } + } + } + } + } + + // Validates that any overridden methods or variables contain the same + // types as the original method. Also marks the class as abstract if one + // or more abstract methods are not overridden. + private _validateBaseClassOverrides(classType: ClassType) { + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + // Private symbols do not need to match in type since their + // names are mangled, and subclasses can't access the value in + // the parent class. + if (SymbolNameUtils.isPrivateName(name)) { + return; + } + + // If the symbol has no declaration, and the type is inferred, + // skip the type validation but still check for other issues like + // Final overrides and class/instance variable mismatches. + let validateType = true; + if (!symbol.hasTypedDeclarations()) { + validateType = false; + } + + // Get the symbol type defined in this class. + const typeOfSymbol = this._evaluator.getEffectiveTypeOfSymbol(symbol); + + // If the type of the override symbol isn't known, stop here. + if (isAnyOrUnknown(typeOfSymbol)) { + return; + } + + let firstOverride: ClassMember | undefined; + + for (const baseClass of classType.shared.baseClasses) { + if (!isClass(baseClass)) { + continue; + } + + // Look up the base class in the MRO list. It's the same generic class + // but has already been specialized using the type variables of the classType. + const mroBaseClass = classType.shared.mro.find( + (mroClass) => isClass(mroClass) && ClassType.isSameGenericClass(mroClass, baseClass) + ); + if (!mroBaseClass) { + continue; + } + + assert(isClass(mroBaseClass)); + const baseClassAndSymbol = lookUpClassMember(mroBaseClass, name, MemberAccessFlags.Default); + if (!baseClassAndSymbol) { + continue; + } + + firstOverride = firstOverride ?? baseClassAndSymbol; + + this._validateBaseClassOverride( + baseClassAndSymbol, + symbol, + validateType ? typeOfSymbol : AnyType.create(), + classType, + name + ); + } + + if (!firstOverride) { + // If this is a method decorated with @override, validate that there + // is a base class method of the same name. + this._validateOverrideDecoratorNotPresent(symbol, typeOfSymbol); + } else { + this._validateOverrideDecoratorPresent(symbol, typeOfSymbol, firstOverride); + } + }); + } + + private _validateOverrideDecoratorPresent(symbol: Symbol, overrideType: Type, baseMember: ClassMember) { + // Skip this check if disabled. + if (this._fileInfo.diagnosticRuleSet.reportImplicitOverride === 'none') { + return; + } + + let overrideFunction: FunctionType | undefined; + + if (isFunction(overrideType)) { + overrideFunction = overrideType; + } else if (isOverloaded(overrideType)) { + const impl = OverloadedType.getImplementation(overrideType); + if (impl && isFunction(impl)) { + overrideFunction = impl; + } + } else if (isClassInstance(overrideType) && ClassType.isPropertyClass(overrideType)) { + if (overrideType.priv.fgetInfo) { + overrideFunction = overrideType.priv.fgetInfo.methodType; + } + } + + if (!overrideFunction?.shared.declaration || FunctionType.isOverridden(overrideFunction)) { + return; + } + + // Constructors are exempt. + if (this._isMethodExemptFromLsp(overrideFunction.shared.name)) { + return; + } + + // If the declaration for the override function is not the same as the + // declaration for the symbol, the function was probably replaced by a decorator. + if (!symbol.getDeclarations().some((decl) => decl === overrideFunction!.shared.declaration)) { + return; + } + + // If the base class is unknown, don't report a missing decorator. + if (isAnyOrUnknown(baseMember.classType)) { + return; + } + + const funcNode = overrideFunction.shared.declaration.node; + this._evaluator.addDiagnostic( + DiagnosticRule.reportImplicitOverride, + LocMessage.overrideDecoratorMissing().format({ + name: funcNode.d.name.d.value, + className: this._evaluator.printType(convertToInstance(baseMember.classType)), + }), + funcNode.d.name + ); + } + + // Determines whether the name is exempt from Liskov Substitution Principle rules. + private _isMethodExemptFromLsp(name: string): boolean { + const exemptMethods = ['__init__', '__new__', '__init_subclass__', '__post_init__']; + return exemptMethods.some((n) => n === name); + } + + // Determines whether the type is a function or overloaded function with an @override + // decorator. In this case, an error is reported because no base class has declared + // a method of the same name. + private _validateOverrideDecoratorNotPresent(symbol: Symbol, overrideType: Type) { + let overrideFunction: FunctionType | undefined; + + if (isFunction(overrideType)) { + overrideFunction = overrideType; + } else if (isOverloaded(overrideType)) { + const impl = OverloadedType.getImplementation(overrideType); + if (impl && isFunction(impl)) { + overrideFunction = impl; + } + + // If there is no implementation present, use the first overload. + if (!impl) { + const overloads = OverloadedType.getOverloads(overrideType); + if (overloads.length > 0) { + overrideFunction = overloads[0]; + } + } + } else if (isClassInstance(overrideType) && ClassType.isPropertyClass(overrideType)) { + if (overrideType.priv.fgetInfo) { + overrideFunction = overrideType.priv.fgetInfo.methodType; + } + } + + if (!overrideFunction?.shared.declaration || !FunctionType.isOverridden(overrideFunction)) { + return; + } + + // If the declaration for the override function is not the same as the + // declaration for the symbol, the function was probably replaced by a decorator. + if (!symbol.getDeclarations().some((decl) => decl === overrideFunction!.shared.declaration)) { + return; + } + + const funcNode = overrideFunction.shared.declaration.node; + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.overriddenMethodNotFound().format({ name: funcNode.d.name.d.value }), + funcNode.d.name + ); + } + + private _validateBaseClassOverride( + baseClassAndSymbol: ClassMember, + overrideSymbol: Symbol, + overrideType: Type, + childClassType: ClassType, + memberName: string + ) { + if (!isInstantiableClass(baseClassAndSymbol.classType)) { + return; + } + + if (baseClassAndSymbol.symbol.isIgnoredForOverrideChecks() || overrideSymbol.isIgnoredForOverrideChecks()) { + return; + } + + // If the base class doesn't provide a type declaration, we won't bother + // proceeding with additional checks. Type inference is too inaccurate + // in this case, plus it would be very slow. + if (!baseClassAndSymbol.symbol.hasTypedDeclarations()) { + return; + } + + // Special case the '_' symbol, which is used in single dispatch + // code and other cases where the name does not matter. + if (memberName === '_') { + return; + } + + const baseClass = baseClassAndSymbol.classType; + const childClassSelf = ClassType.cloneAsInstance( + selfSpecializeClass(childClassType, { useBoundTypeVars: true }) + ); + + // The "Self" value for the base class depends on whether it's a + // protocol or not. It's not clear from the typing spec whether + // this is the correct behavior. + const baseClassSelf = ClassType.isProtocolClass(baseClass) + ? childClassSelf + : ClassType.cloneAsInstance(selfSpecializeClass(baseClass, { useBoundTypeVars: true })); + + let baseType = partiallySpecializeType( + this._evaluator.getEffectiveTypeOfSymbol(baseClassAndSymbol.symbol), + baseClass, + this._evaluator.getTypeClassType(), + baseClassSelf + ); + + overrideType = partiallySpecializeType( + overrideType, + childClassType, + this._evaluator.getTypeClassType(), + childClassSelf + ); + + if (childClassType.shared.typeVarScopeId) { + overrideType = makeTypeVarsBound(overrideType, [childClassType.shared.typeVarScopeId]); + baseType = makeTypeVarsBound(baseType, [childClassType.shared.typeVarScopeId]); + } + + // Determine whether this is an attempt to override a method marked @final. + if (this._isFinalFunction(memberName, baseClassAndSymbol.symbol, baseType)) { + const decl = getLastTypedDeclarationForSymbol(overrideSymbol); + if (decl && decl.type === DeclarationType.Function) { + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.finalMethodOverride().format({ + name: memberName, + className: baseClass.shared.name, + }), + decl.node.d.name + ); + + const origDecl = getLastTypedDeclarationForSymbol(baseClassAndSymbol.symbol); + if (diag && origDecl) { + diag.addRelatedInfo(LocAddendum.finalMethod(), origDecl.uri, origDecl.range); + } + } + } + + if (isFunctionOrOverloaded(baseType)) { + const diagAddendum = new DiagnosticAddendum(); + + // Don't check certain magic functions or private symbols. + // Also, skip this check if the class is a TypedDict. The methods for a TypedDict + // are synthesized, and they can result in many overloads. We assume they + // are correct and will not produce any errors. + if ( + this._isMethodExemptFromLsp(memberName) || + SymbolNameUtils.isPrivateName(memberName) || + ClassType.isTypedDictClass(childClassType) + ) { + return; + } + + if (isFunctionOrOverloaded(overrideType)) { + // Don't enforce parameter names for dundered methods. Many of them + // are misnamed in typeshed stubs, so this would result in many + // false positives. + const enforceParamNameMatch = !SymbolNameUtils.isDunderName(memberName); + + if ( + this._evaluator.validateOverrideMethod( + baseType, + overrideType, + childClassType, + diagAddendum, + enforceParamNameMatch + ) + ) { + return; + } + + const decl = getLastTypedDeclarationForSymbol(overrideSymbol); + if (!decl) { + return; + } + + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.incompatibleMethodOverride().format({ + name: memberName, + className: baseClass.shared.name, + }) + diagAddendum.getString(), + getNameNodeForDeclaration(decl) ?? decl.node + ); + + const origDecl = getLastTypedDeclarationForSymbol(baseClassAndSymbol.symbol); + if (diag && origDecl) { + diag.addRelatedInfo(LocAddendum.overriddenMethod(), origDecl.uri, origDecl.range); + } + return; + } + + if (!isAnyOrUnknown(overrideType)) { + // Special-case overrides of methods in '_TypedDict', since + // TypedDict attributes aren't manifest as attributes but rather + // as named keys. + if (ClassType.isBuiltIn(baseClass, ['_TypedDict', 'TypedDictFallback'])) { + return; + } + + const decls = overrideSymbol.getDeclarations(); + if (decls.length === 0) { + return; + } + + const lastDecl = decls[decls.length - 1]; + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.methodOverridden().format({ + name: memberName, + className: baseClass.shared.name, + type: this._evaluator.printType(overrideType), + }), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + + const origDecl = getLastTypedDeclarationForSymbol(baseClassAndSymbol.symbol); + if (diag && origDecl) { + diag.addRelatedInfo(LocAddendum.overriddenMethod(), origDecl.uri, origDecl.range); + } + } + return; + } + + if (isProperty(baseType)) { + // Handle properties specially. + if (!isProperty(overrideType)) { + const decls = overrideSymbol.getDeclarations(); + if (decls.length > 0 && overrideSymbol.isClassMember()) { + const lastDecl = decls[decls.length - 1]; + this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.propertyOverridden().format({ + name: memberName, + className: baseClass.shared.name, + }), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + } + } else { + this._validatePropertyOverride( + baseClass, + childClassType, + baseType, + overrideType, + overrideSymbol, + memberName + ); + } + return; + } + + // This check can be expensive, so don't perform it if the corresponding + // rule is disabled. + if (this._fileInfo.diagnosticRuleSet.reportIncompatibleVariableOverride !== 'none') { + const decls = overrideSymbol.getDeclarations(); + + if (decls.length === 0) { + return; + } + + const lastDecl = decls[decls.length - 1]; + const primaryDecl = decls[0]; + + // Verify that the override type is assignable to (same or narrower than) + // the declared type of the base symbol. + let isInvariant = primaryDecl?.type === DeclarationType.Variable && !primaryDecl.isFinal; + + // If the entry is a member of a frozen dataclass, it is immutable, + // so it does not need to be invariant. + if (ClassType.isDataClassFrozen(baseClass) && baseClass.shared.dataClassEntries) { + const dataclassEntry = baseClass.shared.dataClassEntries.find((entry) => entry.name === memberName); + if (dataclassEntry) { + isInvariant = false; + } + } + + let overriddenTDEntry: TypedDictEntry | undefined; + let overrideTDEntry: TypedDictEntry | undefined; + + if (!overrideSymbol.isIgnoredForProtocolMatch()) { + if (baseClass.shared.typedDictEntries) { + overriddenTDEntry = + baseClass.shared.typedDictEntries.knownItems.get(memberName) ?? + baseClass.shared.typedDictEntries.extraItems ?? + getEffectiveExtraItemsEntryType(this._evaluator, baseClass); + + if (overriddenTDEntry?.isReadOnly) { + isInvariant = false; + } + } + + if (childClassType.shared.typedDictEntries) { + overrideTDEntry = + childClassType.shared.typedDictEntries.knownItems.get(memberName) ?? + childClassType.shared.typedDictEntries.extraItems ?? + getEffectiveExtraItemsEntryType(this._evaluator, childClassType); + } + } + + let diagAddendum = new DiagnosticAddendum(); + if ( + !this._evaluator.assignType( + baseType, + overrideType, + diagAddendum, + /* constraints */ undefined, + isInvariant ? AssignTypeFlags.Invariant : AssignTypeFlags.Default + ) + ) { + if (isInvariant) { + diagAddendum = new DiagnosticAddendum(); + diagAddendum.addMessage(LocAddendum.overrideIsInvariant()); + diagAddendum.createAddendum().addMessage( + LocAddendum.overrideInvariantMismatch().format({ + overrideType: this._evaluator.printType(overrideType), + baseType: this._evaluator.printType(baseType), + }) + ); + } + + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.symbolOverridden().format({ + name: memberName, + className: baseClass.shared.name, + }) + diagAddendum.getString(), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + + const origDecl = getLastTypedDeclarationForSymbol(baseClassAndSymbol.symbol); + if (diag && origDecl) { + diag.addRelatedInfo(LocAddendum.overriddenSymbol(), origDecl.uri, origDecl.range); + } + } else if (overriddenTDEntry && overrideTDEntry) { + // Make sure the required/not-required attribute is compatible. + let isRequiredCompatible = true; + if (overriddenTDEntry.isReadOnly) { + // If the read-only flag is set, a not-required field can be overridden + // by a required field, but not vice versa. + isRequiredCompatible = overrideTDEntry.isRequired || !overriddenTDEntry.isRequired; + } else { + isRequiredCompatible = overrideTDEntry.isRequired === overriddenTDEntry.isRequired; + } + + if (!isRequiredCompatible) { + const message = overrideTDEntry.isRequired + ? LocMessage.typedDictFieldRequiredRedefinition + : LocMessage.typedDictFieldNotRequiredRedefinition; + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + message().format({ name: memberName }), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + } + + // Make sure that the derived class isn't marking a previously writable + // entry as read-only. + if (!overriddenTDEntry.isReadOnly && overrideTDEntry.isReadOnly) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictFieldReadOnlyRedefinition().format({ + name: memberName, + }), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + } + } + + // Verify that there is not a Final mismatch. + const isBaseVarFinal = this._evaluator.isFinalVariable(baseClassAndSymbol.symbol); + const overrideFinalVarDecl = decls.find((d) => this._evaluator.isFinalVariableDeclaration(d)); + + if (!isBaseVarFinal && overrideFinalVarDecl) { + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + LocMessage.variableFinalOverride().format({ + name: memberName, + className: baseClass.shared.name, + }), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + + if (diag) { + diag.addRelatedInfo( + LocAddendum.overriddenSymbol(), + overrideFinalVarDecl.uri, + overrideFinalVarDecl.range + ); + } + } + + // Verify that a class variable isn't overriding an instance + // variable or vice versa. + const isBaseClassVar = baseClassAndSymbol.symbol.isClassVar(); + let isClassVar = overrideSymbol.isClassVar(); + + if (isBaseClassVar && !isClassVar) { + // If the subclass doesn't redeclare the type but simply assigns + // it without declaring its type, we won't consider it an instance + // variable. + if (!overrideSymbol.hasTypedDeclarations()) { + isClassVar = true; + } + + // If the subclass is declaring an inner class, we'll consider that + // to be a ClassVar. + if (overrideSymbol.getTypedDeclarations().every((decl) => decl.type === DeclarationType.Class)) { + isClassVar = true; + } + } + + // Allow TypedDict members to have the same name as class variables in the + // base class because TypedDict members are not really instance members. + const ignoreTypedDictOverride = ClassType.isTypedDictClass(childClassType) && !isClassVar; + + if (isBaseClassVar !== isClassVar && !ignoreTypedDictOverride) { + const unformattedMessage = overrideSymbol.isClassVar() + ? LocMessage.classVarOverridesInstanceVar() + : LocMessage.instanceVarOverridesClassVar(); + + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleVariableOverride, + unformattedMessage.format({ + name: memberName, + className: baseClass.shared.name, + }), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + + const origDecl = getLastTypedDeclarationForSymbol(baseClassAndSymbol.symbol); + if (diag && origDecl) { + diag.addRelatedInfo(LocAddendum.overriddenSymbol(), origDecl.uri, origDecl.range); + } + } + } + } + + private _isFinalFunction(name: string, symbol: Symbol, type: Type) { + if (SymbolNameUtils.isPrivateName(name)) { + return false; + } + + // Was this declared with a "def" statement? + const defDecls: FunctionNode[] = []; + symbol.getDeclarations().forEach((decl) => { + if (decl.type === DeclarationType.Function && decl.node.nodeType === ParseNodeType.Function) { + defDecls.push(decl.node); + } + }); + + // Locate all final function declarations. + const finalDefDecls = defDecls.filter((decl) => { + const undecoratedFuncType = this._evaluator.getTypeOfFunction(decl)?.functionType; + if (!undecoratedFuncType) { + return false; + } + + return FunctionType.isFinal(undecoratedFuncType); + }); + + return finalDefDecls.length > 0; + } + + private _validatePropertyOverride( + baseClassType: ClassType, + childClassType: ClassType, + baseType: Type, + childType: Type, + overrideSymbol: Symbol, + memberName: string + ) { + const propMethodInfo: [string, (c: ClassType) => FunctionType | undefined][] = [ + ['fget', (c) => c.priv.fgetInfo?.methodType], + ['fset', (c) => c.priv.fsetInfo?.methodType], + ['fdel', (c) => c.priv.fdelInfo?.methodType], + ]; + + propMethodInfo.forEach((info) => { + const diagAddendum = new DiagnosticAddendum(); + const [methodName, methodAccessor] = info; + const baseClassPropMethod = methodAccessor(baseType as ClassType); + const subclassPropMethod = methodAccessor(childType as ClassType); + + // Is the method present on the base class but missing in the subclass? + if (baseClassPropMethod) { + const baseClassMethodType = partiallySpecializeType( + baseClassPropMethod, + baseClassType, + this._evaluator.getTypeClassType() + ); + + if (!isFunction(baseClassMethodType)) { + return; + } + + if (!subclassPropMethod) { + // The method is missing. + diagAddendum.addMessage( + LocAddendum.propertyMethodMissing().format({ + name: methodName, + }) + ); + + const decls = overrideSymbol.getDeclarations(); + + if (decls.length > 0) { + const lastDecl = decls[decls.length - 1]; + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.propertyOverridden().format({ + name: memberName, + className: baseClassType.shared.name, + }) + diagAddendum.getString(), + getNameNodeForDeclaration(lastDecl) ?? lastDecl.node + ); + + const origDecl = baseClassMethodType.shared.declaration; + if (diag && origDecl) { + diag.addRelatedInfo(LocAddendum.overriddenMethod(), origDecl.uri, origDecl.range); + } + } + + return; + } + + const subclassMethodType = partiallySpecializeType( + subclassPropMethod, + childClassType, + this._evaluator.getTypeClassType() + ); + + if (!isFunction(subclassMethodType)) { + return; + } + + if ( + this._evaluator.validateOverrideMethod( + baseClassMethodType, + subclassMethodType, + childClassType, + diagAddendum.createAddendum() + ) + ) { + return; + } + + diagAddendum.addMessage( + LocAddendum.propertyMethodIncompatible().format({ + name: methodName, + }) + ); + const decl = subclassMethodType.shared.declaration; + if (!decl || decl.type !== DeclarationType.Function) { + return; + } + + let diagLocation: ParseNode = decl.node.d.name; + + // Make sure the method decl is contained within the + // class suite. If not, it probably comes from a decorator + // in another class. We don't want to report the error + // in the wrong location. + const childClassDecl = childClassType.shared.declaration; + if ( + !childClassDecl || + childClassDecl.node.nodeType !== ParseNodeType.Class || + !ParseTreeUtils.isNodeContainedWithin(decl.node, childClassDecl.node.d.suite) + ) { + const symbolDecls = overrideSymbol.getDeclarations(); + if (symbolDecls.length === 0) { + return; + } + const lastSymbolDecl = symbolDecls[symbolDecls.length - 1]; + diagLocation = getNameNodeForDeclaration(lastSymbolDecl) ?? lastSymbolDecl.node; + } + + const diag = this._evaluator.addDiagnostic( + DiagnosticRule.reportIncompatibleMethodOverride, + LocMessage.propertyOverridden().format({ + name: memberName, + className: baseClassType.shared.name, + }) + diagAddendum.getString(), + diagLocation + ); + + const origDecl = baseClassMethodType.shared.declaration; + if (diag && origDecl) { + diag.addRelatedInfo(LocAddendum.overriddenMethod(), origDecl.uri, origDecl.range); + } + } + }); + } + + // Performs checks on a function that is located within a class + // and has been determined not to be a property accessor. + private _validateMethod(node: FunctionNode, functionType: FunctionType, classNode: ClassNode) { + const classTypeInfo = this._evaluator.getTypeOfClass(classNode); + if (!classTypeInfo) { + return; + } + + const classType = classTypeInfo.classType; + const methodName = node.d.name.d.value; + const isMetaclass = isInstantiableMetaclass(classType); + + const superCheckMethods = ['__init__', '__init_subclass__', '__enter__', '__exit__']; + if (superCheckMethods.includes(methodName)) { + if ( + !FunctionType.isAbstractMethod(functionType) && + !FunctionType.isOverloaded(functionType) && + !this._fileInfo.isStubFile + ) { + this._validateSuperCallForMethod(node, functionType, classType); + } + } + + const selfNames = ['self', '_self', '__self']; + const clsNames = ['cls', '_cls', '__cls']; + const clsNamesMetaclass = ['__mcls', 'mcls', 'mcs', 'metacls']; + + if (methodName === '_generate_next_value_') { + // Skip this check for _generate_next_value_. + return; + } + + if (methodName === '__new__') { + // __new__ overrides should have a "cls" parameter. + if (node.d.params.length === 0 || !node.d.params[0].d.name) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportSelfClsParameterName, + LocMessage.newClsParam(), + node.d.name + ); + } else { + const paramName = node.d.params[0].d.name.d.value; + if (!clsNames.includes(paramName) && !(isMetaclass && clsNamesMetaclass.includes(paramName))) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportSelfClsParameterName, + LocMessage.newClsParam(), + node.d.params[0] + ); + } + } + + this._validateClsSelfParamType(node, functionType, classType, /* isCls */ true); + return; + } + + if (FunctionType.isStaticMethod(functionType)) { + if (node.d.params.length === 0 || !node.d.params[0].d.name) { + return; + } + + // Static methods should not have "self" or "cls" parameters. + const paramName = node.d.params[0].d.name.d.value; + if (paramName === 'self' || paramName === 'cls') { + this._evaluator.addDiagnostic( + DiagnosticRule.reportSelfClsParameterName, + LocMessage.staticClsSelfParam(), + node.d.params[0].d.name + ); + } + return; + } + + if (FunctionType.isClassMethod(functionType)) { + let paramName = ''; + if (node.d.params.length > 0 && node.d.params[0].d.name) { + paramName = node.d.params[0].d.name.d.value; + } + + // Class methods should have a "cls" parameter. + if (!clsNames.includes(paramName) && !(isMetaclass && clsNamesMetaclass.includes(paramName))) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportSelfClsParameterName, + LocMessage.classMethodClsParam(), + node.d.params.length > 0 ? node.d.params[0] : node.d.name + ); + } + + this._validateClsSelfParamType(node, functionType, classType, /* isCls */ true); + return; + } + + const decoratorIsPresent = node.d.decorators.length > 0; + const isOverloaded = FunctionType.isOverloaded(functionType); + + // The presence of a decorator can change the behavior, so we need + // to back off from this check if a decorator is present. An overload + // is a decorator, but we'll ignore that here. + if (isOverloaded || !decoratorIsPresent) { + let paramName = ''; + let firstParamIsSimple = true; + + if (node.d.params.length > 0) { + if (node.d.params[0].d.name) { + paramName = node.d.params[0].d.name.d.value; + } + + if (node.d.params[0].d.category !== ParamCategory.Simple) { + firstParamIsSimple = false; + } + } + + // Instance methods should have a "self" parameter. + if (firstParamIsSimple && !selfNames.includes(paramName)) { + const isLegalMetaclassName = isMetaclass && clsNames.includes(paramName); + + // Some typeshed stubs use a name that starts with an underscore to designate + // a parameter that cannot be positional. + const isPrivateName = SymbolNameUtils.isPrivateOrProtectedName(paramName); + + if (!isLegalMetaclassName && !isPrivateName) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportSelfClsParameterName, + LocMessage.instanceMethodSelfParam(), + node.d.params.length > 0 ? node.d.params[0] : node.d.name + ); + } + } + } + + this._validateClsSelfParamType(node, functionType, classType, /* isCls */ false); + } + + // Determines whether the method properly calls through to the same method in all + // parent classes that expose a same-named method. + private _validateSuperCallForMethod(node: FunctionNode, methodType: FunctionType, classType: ClassType) { + // This is an expensive test, so if it's not enabled, don't do any work. + if (this._fileInfo.diagnosticRuleSet.reportMissingSuperCall === 'none') { + return; + } + + // If the class is marked final, we can skip the "object" base class + // because we know that the `__init__` method in `object` doesn't do + // anything. It's not safe to do this if the class isn't final because + // it could be combined with other classes in a multi-inheritance + // situation that effectively adds new superclasses that we don't know + // about statically. + let effectiveFlags = MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.SkipOriginalClass; + if (ClassType.isFinal(classType)) { + effectiveFlags |= MemberAccessFlags.SkipObjectBaseClass; + } + + const methodMember = lookUpClassMember(classType, methodType.shared.name, effectiveFlags); + if (!methodMember) { + return; + } + + let foundCallOfMember = false; + + // Now scan the implementation of the method to determine whether + // super(). has been called for all of the required base classes. + const callNodeWalker = new ParseTreeUtils.CallNodeWalker((node) => { + if (node.d.leftExpr.nodeType === ParseNodeType.MemberAccess) { + // Is it accessing the method by the same name? + if (node.d.leftExpr.d.member.d.value === methodType.shared.name) { + const memberBaseExpr = node.d.leftExpr.d.leftExpr; + + // Is it a "super" call? + if ( + memberBaseExpr.nodeType === ParseNodeType.Call && + memberBaseExpr.d.leftExpr.nodeType === ParseNodeType.Name && + memberBaseExpr.d.leftExpr.d.value === 'super' + ) { + foundCallOfMember = true; + } else { + // Is it an X. direct call? + const baseType = this._evaluator.getType(memberBaseExpr); + if (baseType && isInstantiableClass(baseType)) { + foundCallOfMember = true; + } + } + } + } + }); + callNodeWalker.walk(node.d.suite); + + // If we didn't find a call to at least one base class, report the problem. + if (!foundCallOfMember) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportMissingSuperCall, + LocMessage.missingSuperCall().format({ + methodName: methodType.shared.name, + }), + node.d.name + ); + } + } + + // Validates that the annotated type of a "self" or "cls" parameter is + // compatible with the type of the class that contains it. + private _validateClsSelfParamType( + node: FunctionNode, + functionType: FunctionType, + classType: ClassType, + isCls: boolean + ) { + if (node.d.params.length < 1 || functionType.shared.parameters.length < 1) { + return; + } + + // If there is no type annotation, there's nothing to check because + // the type will be inferred.d.typeAnnotation + const paramInfo = functionType.shared.parameters[0]; + const paramType = FunctionType.getParamType(functionType, 0); + const paramAnnotation = node.d.params[0].d.annotation ?? node.d.params[0].d.annotationComment; + if (!paramAnnotation || !paramInfo.name) { + return; + } + + // If this is an __init__ method, we need to specifically check for the + // use of class-scoped TypeVars, which are not allowed in this context + // according to the typing spec. + if (functionType.shared.name === '__init__' && functionType.shared.methodClass) { + const typeVars = getTypeVarArgsRecursive(paramType); + + if ( + typeVars.some( + (typeVar) => + typeVar.priv.scopeId === functionType.shared.methodClass?.shared.typeVarScopeId && + !TypeVarType.isSelf(typeVar) + ) + ) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportInvalidTypeVarUse, + LocMessage.initMethodSelfParamTypeVar(), + paramAnnotation + ); + } + } + + // If this is a protocol class, the self and cls parameters can be bound + // to something other than the class. + if (ClassType.isProtocolClass(classType)) { + return; + } + + const concreteParamType = this._evaluator.makeTopLevelTypeVarsConcrete(paramType); + const expectedType = isCls ? classType : convertToInstance(classType); + + // If the declared type is a protocol class or instance, skip + // the check. This has legitimate uses for mix-in classes. + if (isInstantiableClass(concreteParamType) && ClassType.isProtocolClass(concreteParamType)) { + return; + } + if (isClassInstance(concreteParamType) && ClassType.isProtocolClass(concreteParamType)) { + return; + } + + // If the method starts with a `*args: P.args`, skip the check. + if ( + paramInfo.category === ParamCategory.ArgsList && + isParamSpec(paramType) && + paramType.priv.paramSpecAccess === 'args' + ) { + return; + } + + // Don't enforce this for an overloaded method because the "self" param + // annotation can be used as a filter for the overload. This differs from + // mypy, which enforces this check for overloads, but there are legitimate + // uses for this in an overloaded method. + if (FunctionType.isOverloaded(functionType)) { + return; + } + + // If the declared type is LiteralString and the class is str, exempt this case. + // It's used in the typeshed stubs. + if ( + isClassInstance(paramType) && + ClassType.isBuiltIn(paramType, 'LiteralString') && + ClassType.isBuiltIn(classType, 'str') + ) { + return; + } + + if (!this._evaluator.assignType(paramType, expectedType)) { + // We exempt Never from this check because it has a legitimate use in this case. + if (!isNever(paramType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.clsSelfParamTypeMismatch().format({ + name: paramInfo.name, + classType: this._evaluator.printType(expectedType), + }), + paramAnnotation + ); + } + } + } + + // Determines whether a yield or yield from node is compatible with the + // return type annotation of the containing function. + private _validateYieldType( + node: YieldNode | YieldFromNode, + yieldType: Type, + expectedDiagAddendum?: DiagnosticAddendum, + sendType?: Type + ) { + const enclosingFunctionNode = ParseTreeUtils.getEnclosingFunction(node); + if (!enclosingFunctionNode || !enclosingFunctionNode.d.returnAnnotation) { + return; + } + + const functionTypeResult = this._evaluator.getTypeOfFunction(enclosingFunctionNode); + if (!functionTypeResult) { + return; + } + + let declaredReturnType = FunctionType.getEffectiveReturnType(functionTypeResult.functionType); + if (!declaredReturnType) { + return; + } + + const liveScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + declaredReturnType = makeTypeVarsBound(declaredReturnType, liveScopes); + + let generatorType: Type | undefined; + if ( + !enclosingFunctionNode.d.isAsync && + isClassInstance(declaredReturnType) && + ClassType.isBuiltIn(declaredReturnType, 'AwaitableGenerator') + ) { + // Handle the old-style (pre-await) generator case + // if the return type explicitly uses AwaitableGenerator. + generatorType = + this._evaluator.getTypeCheckerInternalsType(node, 'AwaitableGenerator') ?? + this._evaluator.getTypingType(node, 'AwaitableGenerator'); + } else { + generatorType = this._evaluator.getTypingType( + node, + enclosingFunctionNode.d.isAsync ? 'AsyncGenerator' : 'Generator' + ); + } + + if (!generatorType || !isInstantiableClass(generatorType)) { + return; + } + + if (!this._evaluator.isNodeReachable(node, /* sourceNode */ undefined)) { + return; + } + + if (isNever(declaredReturnType)) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.noReturnContainsYield(), + node + ); + return; + } + + const generatorTypeArgs = [yieldType, sendType ?? UnknownType.create(), UnknownType.create()]; + const specializedGenerator = ClassType.cloneAsInstance(ClassType.specialize(generatorType, generatorTypeArgs)); + + const diagAddendum = new DiagnosticAddendum(); + if (!this._evaluator.assignType(declaredReturnType, specializedGenerator, diagAddendum)) { + const errorMessage = enclosingFunctionNode.d.isAsync + ? LocMessage.generatorAsyncReturnType() + : LocMessage.generatorSyncReturnType(); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportReturnType, + errorMessage.format({ yieldType: this._evaluator.printType(yieldType) }) + + (expectedDiagAddendum?.getString() ?? diagAddendum.getString()), + node.d.expr ?? node, + expectedDiagAddendum?.getEffectiveTextRange() ?? node.d.expr ?? node + ); + } + } + + // Determines whether any of the except statements are unreachable because + // they are redundant. + private _reportUnusedExceptStatements(node: TryNode) { + let sawUnknownExceptionType = false; + const exceptionTypesSoFar: ClassType[] = []; + + node.d.exceptClauses.forEach((except) => { + if (sawUnknownExceptionType || except.d.isExceptGroup || !except.d.typeExpr) { + return; + } + + const exceptionType = this._evaluator.getType(except.d.typeExpr); + if (!exceptionType || isAnyOrUnknown(exceptionType)) { + sawUnknownExceptionType = true; + return; + } + + const typesOfThisExcept: ClassType[] = []; + + if (isInstantiableClass(exceptionType)) { + // If the exception type is a variable whose type could represent + // subclasses, the actual exception type is statically unknown. + if (exceptionType.priv.includeSubclasses) { + sawUnknownExceptionType = true; + } + + typesOfThisExcept.push(exceptionType); + } else if (isClassInstance(exceptionType)) { + const iterableType = + this._evaluator.getTypeOfIterator( + { type: exceptionType }, + /* isAsync */ false, + /* errorNode */ except.d.typeExpr, + /* emitNotIterableError */ false + )?.type ?? UnknownType.create(); + + doForEachSubtype(iterableType, (subtype) => { + if (isAnyOrUnknown(subtype)) { + sawUnknownExceptionType = true; + } + + if (isInstantiableClass(subtype)) { + // If the exception type is a variable whose type could represent + // subclasses, the actual exception type is statically unknown. + if (subtype.priv.includeSubclasses) { + sawUnknownExceptionType = true; + } + typesOfThisExcept.push(subtype); + } + }); + } else { + sawUnknownExceptionType = true; + } + + if (exceptionTypesSoFar.length > 0 && !sawUnknownExceptionType) { + const diagAddendum = new DiagnosticAddendum(); + let overriddenExceptionCount = 0; + + typesOfThisExcept.forEach((thisExceptType) => { + const subtype = exceptionTypesSoFar.find((previousExceptType) => { + return derivesFromClassRecursive(thisExceptType, previousExceptType, /* ignoreUnknown */ true); + }); + + if (subtype) { + diagAddendum.addMessage( + LocAddendum.unreachableExcept().format({ + exceptionType: this._evaluator.printType(convertToInstance(thisExceptType)), + parentType: this._evaluator.printType(convertToInstance(subtype)), + }) + ); + overriddenExceptionCount++; + } + }); + + // Were all of the exception types overridden? + if (typesOfThisExcept.length > 0 && typesOfThisExcept.length === overriddenExceptionCount) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnusedExcept, + LocMessage.unreachableExcept() + diagAddendum.getString(), + except.d.typeExpr + ); + + this._evaluator.addDiagnostic( + DiagnosticRule.reportUnreachable, + LocMessage.unreachableCodeType(), + except.d.exceptSuite, + except.d.exceptToken + ); + + this._evaluator.addUnreachableCode( + except, + Reachability.UnreachableByAnalysis, + except.d.exceptSuite + ); + } + } + + appendArray(exceptionTypesSoFar, typesOfThisExcept); + }); + } + + private _reportDuplicateImports() { + const importStatements = getTopLevelImports(this._moduleNode); + + const importModuleMap = new Map(); + + importStatements.orderedImports.forEach((importStatement) => { + if (importStatement.node.nodeType === ParseNodeType.ImportFrom) { + const symbolMap = new Map(); + + importStatement.node.d.imports.forEach((importFromAs) => { + // Ignore duplicates if they're aliased. + if (!importFromAs.d.alias) { + const prevImport = symbolMap.get(importFromAs.d.name.d.value); + if (prevImport) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportDuplicateImport, + LocMessage.duplicateImport().format({ importName: importFromAs.d.name.d.value }), + importFromAs.d.name + ); + } else { + symbolMap.set(importFromAs.d.name.d.value, importFromAs); + } + } + }); + } else if (importStatement.subnode) { + // Ignore duplicates if they're aliased. + if (!importStatement.subnode.d.alias) { + const prevImport = importModuleMap.get(importStatement.moduleName); + if (prevImport) { + this._evaluator.addDiagnostic( + DiagnosticRule.reportDuplicateImport, + LocMessage.duplicateImport().format({ importName: importStatement.moduleName }), + importStatement.subnode + ); + } else { + importModuleMap.set(importStatement.moduleName, importStatement.subnode); + } + } + } + }); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/circularDependency.ts b/python-parser/packages/pyright-internal/src/analyzer/circularDependency.ts new file mode 100644 index 00000000..441e52f6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/circularDependency.ts @@ -0,0 +1,54 @@ +/* + * circularDependency.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A list of file paths that are part of a circular dependency + * chain (i.e. a chain of imports). Since these are circular, there + * no defined "start", but this module helps normalize the start + * by picking the alphabetically-first module in the cycle. + */ + +import { Uri } from '../common/uri/uri'; + +export class CircularDependency { + private _paths: Uri[] = []; + + appendPath(path: Uri) { + this._paths.push(path); + } + + getPaths() { + return this._paths; + } + + normalizeOrder() { + // Find the path that is alphabetically first and reorder + // based on that. + let firstIndex = 0; + this._paths.forEach((path, index) => { + if (path < this._paths[firstIndex]) { + firstIndex = index; + } + }); + + if (firstIndex !== 0) { + this._paths = this._paths.slice(firstIndex).concat(this._paths.slice(0, firstIndex)); + } + } + + isEqual(circDependency: CircularDependency) { + if (circDependency._paths.length !== this._paths.length) { + return false; + } + + for (let i = 0; i < this._paths.length; i++) { + if (this._paths[i] !== circDependency._paths[i]) { + return false; + } + } + + return true; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/codeFlowEngine.ts b/python-parser/packages/pyright-internal/src/analyzer/codeFlowEngine.ts new file mode 100644 index 00000000..2a1f4477 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/codeFlowEngine.ts @@ -0,0 +1,2039 @@ +/* + * codeFlowEngine.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that traverses the code flow graph to determine the (narrowed) + * type of a variable or expression or the reachability of a statement. + * + * This is largely based on the code flow engine in the + * TypeScript compiler. + */ + +import { ConsoleInterface } from '../common/console'; +import { assert, fail } from '../common/debug'; +import { convertOffsetToPosition } from '../common/positionUtils'; +import { ArgCategory, ExpressionNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { getFileInfo, getImportInfo } from './analyzerNodeInfo'; +import { + CodeFlowReferenceExpressionNode, + createKeyForReference, + createKeysForReferenceSubexpressions, + FlowAssignment, + FlowBranchLabel, + FlowCall, + FlowCondition, + FlowExhaustedMatch, + FlowFlags, + FlowLabel, + FlowNarrowForPattern, + FlowNode, + FlowPostContextManagerLabel, + FlowPostFinally, + FlowPreFinallyGate, + FlowVariableAnnotation, + FlowWildcardImport, +} from './codeFlowTypes'; +import { formatControlFlowGraph } from './codeFlowUtils'; +import { getBoundCallMethod, getBoundNewMethod } from './constructors'; +import { isMatchingExpression, isPartialMatchingExpression, printExpression } from './parseTreeUtils'; +import { getPatternSubtypeNarrowingCallback } from './patternMatching'; +import { SpeculativeTypeTracker } from './typeCacheUtils'; +import { narrowForKeyAssignment } from './typedDicts'; +import { EvalFlags, Reachability, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { getTypeNarrowingCallback } from './typeGuards'; +import { + ClassType, + combineTypes, + FunctionType, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isNever, + isOverloaded, + isParamSpec, + isTypeSame, + isTypeVar, + isTypeVarTuple, + maxTypeRecursionCount, + NeverType, + OverloadedType, + Type, + TypeVarType, + UnboundType, + UnknownType, +} from './types'; +import { + cleanIncompleteUnknown, + derivesFromStdlibClass, + doForEachSubtype, + isIncompleteUnknown, + isTypeAliasPlaceholder, + mapSubtypes, +} from './typeUtils'; + +export interface FlowNodeTypeResult { + type: Type | undefined; + isIncomplete: boolean; + generationCount: number | undefined; + incompleteSubtypes: IncompleteSubtypeInfo[] | undefined; +} + +export namespace FlowNodeTypeResult { + export function create( + type: Type | undefined, + isIncomplete: boolean, + generationCount?: number, + incompleteSubtypes?: IncompleteSubtypeInfo[] + ): FlowNodeTypeResult { + return { + type, + isIncomplete, + generationCount, + incompleteSubtypes, + }; + } +} + +export interface FlowNodeTypeOptions { + targetSymbolId?: number; + typeAtStart?: TypeResult; + skipConditionalNarrowing?: boolean; +} + +export interface CodeFlowAnalyzer { + getTypeFromCodeFlow: ( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + options?: FlowNodeTypeOptions + ) => FlowNodeTypeResult; +} + +export interface CodeFlowEngine { + createCodeFlowAnalyzer: () => CodeFlowAnalyzer; + getFlowNodeReachability: (flowNode: FlowNode, sourceFlowNode?: FlowNode, ignoreNoReturn?: boolean) => Reachability; + narrowConstrainedTypeVar: (flowNode: FlowNode, typeVar: TypeVarType) => Type | undefined; + printControlFlowGraph: ( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + callName: string, + logger: ConsoleInterface + ) => void; +} + +export interface IncompleteSubtypeInfo { + type: Type; + isIncomplete: boolean; + isPending: boolean; + evaluationCount: number; +} + +export interface IncompleteType { + isIncompleteType?: true; + + // Type computed so far + type: Type | undefined; + + // Array of incomplete subtypes that have been computed so far + // (used for loops) + incompleteSubtypes: IncompleteSubtypeInfo[]; + + // Tracks whether something has changed since this cache entry + // was written that might change the incomplete type; if this + // doesn't match the global "incomplete generation count", this + // cached value is stale + generationCount: number; + + // Indicates that the cache entry represents a sentinel + // value used to detect and prevent recursion. + isRecursionSentinel?: boolean; +} + +interface ReachabilityCacheEntry { + reachability: Reachability | undefined; + reachabilityFrom: Map; +} + +// Define a user type guard function for IncompleteType. +export function isIncompleteType(cachedType: CachedType): cachedType is IncompleteType { + return !!(cachedType as IncompleteType).isIncompleteType; +} + +export type CachedType = Type | IncompleteType; + +interface CodeFlowTypeCache { + cache: Map; + pendingNodes: Set; + closedFinallyGateNodes: Set; +} + +// This debugging option prints the control flow graph when getTypeFromCodeFlow is called. +const enablePrintControlFlowGraph = false; + +// This debugging option prints the results of calls to isCallNoReturn. +const enablePrintCallNoReturn = false; + +// Should the code flow engine assume that an unannotated function does not have +// an inferred return type of `NoReturn`, or should it perform code flow analysis +// to determine whether it is `NoReturn`? Enabling this produces more consistent +// and complete results, but it can be very expensive. +const inferNoReturnForUnannotatedFunctions = false; + +// In rare circumstances, it's possible for types in a loop not to converge. This +// can happen, for example, if there are many symbols that depend on each other +// and their types depend on complex overloads that can resolve to Any under +// certain circumstances. This defines the max number of times we'll attempt to +// evaluate an antecedent in a loop before we give up and "pin" the evaluated +// type for that antecedent. The number is somewhat arbitrary. Too low and +// it will cause incorrect types to be evaluated even when types could converge. +// Too high, and it will cause long hangs before giving up. +const maxConvergenceAttemptLimit = 256; + +// Should a message be logged when the convergence limit is hit? This is useful +// for debugging but not something that is actionable for users, so disable by +// default. +const enablePrintConvergenceLimitHit = false; + +export function getCodeFlowEngine( + evaluator: TypeEvaluator, + speculativeTypeTracker: SpeculativeTypeTracker +): CodeFlowEngine { + const isReachableRecursionSet = new Set(); + const reachabilityCache = new Map(); + const callIsNoReturnCache = new Map(); + const isExceptionContextManagerCache = new Map(); + let flowIncompleteGeneration = 1; + let noReturnAnalysisDepth = 0; + let contextManagerAnalysisDepth = 0; + let maxConvergenceLimitHit = false; + + // Creates a new code flow analyzer that can be used to narrow the types + // of the expressions within an execution context. Each code flow analyzer + // instance maintains a cache of types it has already determined. + function createCodeFlowAnalyzer(): CodeFlowAnalyzer { + const flowNodeTypeCacheSet = new Map(); + + function getFlowNodeTypeCacheForReference(referenceKey: string) { + let flowNodeTypeCache = flowNodeTypeCacheSet.get(referenceKey); + if (!flowNodeTypeCache) { + flowNodeTypeCache = { + cache: new Map(), + pendingNodes: new Set(), + closedFinallyGateNodes: new Set(), + }; + flowNodeTypeCacheSet.set(referenceKey, flowNodeTypeCache); + } + + return flowNodeTypeCache; + } + + // Determines whether any calls to getTypeFromCodeFlow are pending + // for an expression other than referenceKeyFilter. This is important in cases + // where the type of one expression depends on the type of another + // in a loop. If there are other pending evaluations, we will mark the + // current evaluation as incomplete and return back to the pending + // evaluation. + function isGetTypeFromCodeFlowPending(referenceKeyFilter: string | undefined): boolean { + if (!referenceKeyFilter) { + return false; + } + + for (const [key, value] of flowNodeTypeCacheSet.entries()) { + if (key !== referenceKeyFilter && value.pendingNodes.size > 0) { + return true; + } + } + + return false; + } + + // This function has two primary modes. The first is used to determine + // the narrowed type of a reference expression based on code flow analysis. + // The second (when reference is undefined) is used to determine whether + // the specified flowNode is reachable when "never narrowing" is applied. + function getTypeFromCodeFlow( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + options?: FlowNodeTypeOptions + ): FlowNodeTypeResult { + if (enablePrintControlFlowGraph) { + printControlFlowGraph(flowNode, reference, 'getTypeFromCodeFlow'); + } + + const referenceKey = reference !== undefined ? createKeyForReference(reference) : undefined; + let subexpressionReferenceKeys: string[] | undefined; + const referenceKeyWithSymbolId = + referenceKey !== undefined && options?.targetSymbolId !== undefined + ? referenceKey + `.${options?.targetSymbolId.toString()}` + : '.'; + const flowNodeTypeCache = getFlowNodeTypeCacheForReference(referenceKeyWithSymbolId); + + // Caches the type of the flow node in our local cache, keyed by the flow node ID. + function setCacheEntry( + flowNode: FlowNode, + type: Type | undefined, + isIncomplete: boolean + ): FlowNodeTypeResult { + if (!isIncomplete) { + flowIncompleteGeneration++; + } else if (type) { + const prevEntry = flowNodeTypeCache.cache.get(flowNode.id); + if (prevEntry) { + const prevIncompleteType = prevEntry as IncompleteType; + + if ( + prevIncompleteType.isIncompleteType && + prevIncompleteType.type && + !isTypeSame(prevIncompleteType.type, type) + ) { + flowIncompleteGeneration++; + } + } + } + + // For speculative or incomplete types, we'll create a separate + // object. For non-speculative and complete types, we'll store + // the type directly. + const entry: CachedType | undefined = isIncomplete + ? { + isIncompleteType: true, + type, + incompleteSubtypes: [], + generationCount: flowIncompleteGeneration, + } + : type; + + flowNodeTypeCache.cache.set(flowNode.id, entry); + speculativeTypeTracker.trackEntry(flowNodeTypeCache.cache, flowNode.id); + + return FlowNodeTypeResult.create( + type, + isIncomplete, + flowIncompleteGeneration, + isIncomplete ? [] : undefined + ); + } + + function setIncompleteSubtype( + flowNode: FlowNode, + index: number, + type: Type, + isIncomplete: boolean, + isPending: boolean, + evaluationCount: number + ) { + const cachedEntry = flowNodeTypeCache.cache.get(flowNode.id); + if (cachedEntry === undefined || !isIncompleteType(cachedEntry)) { + fail( + 'setIncompleteSubtype can be called only on a valid incomplete cache entry: ' + + `prev cache entry?: ${!cachedEntry} ` + + `index=${index} ` + + `isPending=${isPending} ` + + `evaluationCount=${evaluationCount}` + ); + } + + const incompleteEntries = cachedEntry.incompleteSubtypes; + if (index < incompleteEntries.length) { + const oldEntry = incompleteEntries[index]; + if (oldEntry.isIncomplete !== isIncomplete || !isTypeSame(oldEntry.type, type)) { + incompleteEntries[index] = { type, isIncomplete, isPending, evaluationCount }; + flowIncompleteGeneration++; + } else if (oldEntry.isPending !== isPending) { + incompleteEntries[index] = { type, isIncomplete, isPending, evaluationCount }; + } + } else { + assert(incompleteEntries.length === index); + incompleteEntries.push({ type, isIncomplete, isPending, evaluationCount }); + flowIncompleteGeneration++; + } + + let combinedType: Type | undefined; + if (cachedEntry.incompleteSubtypes.length > 0) { + // Recompute the effective type based on all of the incomplete + // types we've accumulated so far. + const typesToCombine: Type[] = []; + + cachedEntry.incompleteSubtypes.forEach((t) => { + if (t.type) { + typesToCombine.push(t.type); + } + }); + + combinedType = typesToCombine.length > 0 ? combineTypes(typesToCombine) : undefined; + } + + cachedEntry.type = combinedType; + cachedEntry.generationCount = flowIncompleteGeneration; + + return getCacheEntry(flowNode); + } + + // Cache either contains a type or an object that represents an incomplete type. + // Incomplete types are types that haven't gone through all flow nodes yet. + // Incomplete only happens for branch and loop nodes. + function getCacheEntry(flowNode: FlowNode): FlowNodeTypeResult | undefined { + if (!flowNodeTypeCache.cache.has(flowNode.id)) { + return undefined; + } + + const cachedEntry = flowNodeTypeCache.cache.get(flowNode.id); + if (cachedEntry === undefined) { + return FlowNodeTypeResult.create(/* type */ undefined, /* isIncomplete */ false); + } + + if (!isIncompleteType(cachedEntry)) { + return FlowNodeTypeResult.create(cachedEntry, /* isIncomplete */ false); + } + + return FlowNodeTypeResult.create( + cachedEntry.type, + /* isIncomplete */ true, + cachedEntry.generationCount, + cachedEntry.incompleteSubtypes + ); + } + + function deleteCacheEntry(flowNode: FlowNode) { + flowNodeTypeCache.cache.delete(flowNode.id); + } + + // Cleans any "incomplete unknowns" from the specified set of entries + // to compute the final type. + function cleanIncompleteUnknownForCacheEntry(cacheEntry: FlowNodeTypeResult): Type | undefined { + if (!cacheEntry.type) { + return undefined; + } + + if (!cacheEntry.incompleteSubtypes || cacheEntry.incompleteSubtypes.length === 0) { + return cleanIncompleteUnknown(cacheEntry.type); + } + + const typesToCombine: Type[] = []; + + cacheEntry.incompleteSubtypes?.forEach((entry) => { + if (entry.type && !isIncompleteUnknown(entry.type)) { + typesToCombine.push(cleanIncompleteUnknown(entry.type)); + } + }); + + return combineTypes(typesToCombine); + } + + function evaluateAssignmentFlowNode(flowNode: FlowAssignment): TypeResult | undefined { + // For function and class nodes, the reference node is the name + // node, but we need to use the parent node (the FunctionNode or ClassNode) + // to access the decorated type in the type cache. + let nodeForCacheLookup: ParseNode = flowNode.node; + const parentNode = flowNode.node.parent; + if (parentNode) { + if (parentNode.nodeType === ParseNodeType.Function || parentNode.nodeType === ParseNodeType.Class) { + nodeForCacheLookup = parentNode; + } + } + + return evaluator.evaluateTypeForSubnode(nodeForCacheLookup, () => { + evaluator.evaluateTypesForStatement(flowNode.node); + }); + } + + function preventRecursion(flowNode: FlowNode, callback: () => T): T { + flowNodeTypeCache.pendingNodes.add(flowNode.id); + + try { + const result = callback(); + flowNodeTypeCache.pendingNodes.delete(flowNode.id); + return result; + } catch (e) { + // Don't use a "finally" clause here because the TypeScript + // debugger doesn't handle "step out" well with finally clauses. + flowNodeTypeCache.pendingNodes.delete(flowNode.id); + throw e; + } + } + + // If this flow has no knowledge of the target expression, it returns undefined. + // If the start flow node for this scope is reachable, the typeAtStart value is + // returned. + function getTypeFromFlowNode(flowNode: FlowNode): FlowNodeTypeResult { + let curFlowNode = flowNode; + + // This is a frequently-called routine, so it's a good place to call + // the cancellation check. If the operation is canceled, an exception + // will be thrown at this point. + evaluator.checkForCancellation(); + + while (true) { + // Have we already been here? If so, use the cached value. + const cachedEntry = getCacheEntry(curFlowNode); + if (cachedEntry) { + if (!cachedEntry.isIncomplete) { + return cachedEntry; + } + + // If the cached entry is incomplete, we can use it only if nothing + // has changed that may cause the previously-reported incomplete type to change. + if (cachedEntry.generationCount === flowIncompleteGeneration) { + return FlowNodeTypeResult.create( + cleanIncompleteUnknownForCacheEntry(cachedEntry), + /* isIncomplete */ true + ); + } + } + + // Check for recursion. + if (flowNodeTypeCache.pendingNodes.has(curFlowNode.id)) { + return FlowNodeTypeResult.create( + cachedEntry?.type ?? UnknownType.create(/* isIncomplete */ true), + /* isIncomplete */ true + ); + } + + if (curFlowNode.flags & (FlowFlags.UnreachableStaticCondition | FlowFlags.UnreachableStructural)) { + // We can get here if there are nodes in a compound logical expression + // (e.g. "False and x") that are never executed but are evaluated. + return setCacheEntry(curFlowNode, NeverType.createNever(), /* isIncomplete */ false); + } + + if (curFlowNode.flags & FlowFlags.VariableAnnotation) { + const varAnnotationNode = curFlowNode as FlowVariableAnnotation; + curFlowNode = varAnnotationNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.Call) { + const callFlowNode = curFlowNode as FlowCall; + + // If this function returns a "NoReturn" type, that means + // it always raises an exception or otherwise doesn't return, + // so we can assume that the code before this is unreachable. + if (isCallNoReturn(evaluator, callFlowNode)) { + return setCacheEntry(curFlowNode, /* type */ undefined, /* isIncomplete */ false); + } + + curFlowNode = callFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.Assignment) { + const assignmentFlowNode = curFlowNode as FlowAssignment; + const targetNode = assignmentFlowNode.node; + + // Are we targeting the same symbol? We need to do this extra check because the same + // symbol name might refer to different symbols in different scopes (e.g. a list + // comprehension introduces a new scope). + if (reference) { + if ( + options?.targetSymbolId === assignmentFlowNode.targetSymbolId && + isMatchingExpression(reference, targetNode) + ) { + // Is this a special "unbind" assignment? If so, + // we can handle it immediately without any further evaluation. + if (curFlowNode.flags & FlowFlags.Unbind) { + // Don't treat unbound assignments to indexed expressions (i.e. "del x[0]") + // as true deletions. The most common use case for "del x[0]" is in a list, + // and the list class treats this as an element deletion, not an assignment. + if (reference.nodeType === ParseNodeType.Index) { + // No need to explore further. + return setCacheEntry(curFlowNode, undefined, /* isIncomplete */ false); + } + + // Don't treat unbound assignments to member access expressions (i.e. "del a.x") + // as true deletions either. These may go through a descriptor object __delete__ + // method or a __delattr__ method on the class. + if (reference.nodeType === ParseNodeType.MemberAccess) { + // No need to explore further. + return setCacheEntry(curFlowNode, undefined, /* isIncomplete */ false); + } + + return setCacheEntry(curFlowNode, UnboundType.create(), /* isIncomplete */ false); + } + + let flowTypeResult = preventRecursion(curFlowNode, () => + evaluateAssignmentFlowNode(assignmentFlowNode) + ); + + if (flowTypeResult) { + if (isTypeAliasPlaceholder(flowTypeResult.type)) { + // Don't cache a recursive type alias placeholder. + return FlowNodeTypeResult.create(flowTypeResult.type, /* isIncomplete */ true); + } else if ( + reference.nodeType === ParseNodeType.MemberAccess && + evaluator.isAsymmetricAccessorAssignment(targetNode) + ) { + flowTypeResult = undefined; + } + } + + return setCacheEntry(curFlowNode, flowTypeResult?.type, !!flowTypeResult?.isIncomplete); + } + + // Is this a simple assignment to an index expression? If so, it could + // be assigning to a TypedDict, which requires narrowing of the expression's + // base type. + if ( + targetNode.nodeType === ParseNodeType.Index && + isMatchingExpression(reference, targetNode.d.leftExpr) + ) { + if ( + targetNode.parent?.nodeType === ParseNodeType.Assignment && + targetNode.d.items.length === 1 && + !targetNode.d.trailingComma && + !targetNode.d.items[0].d.name && + targetNode.d.items[0].d.argCategory === ArgCategory.Simple && + targetNode.d.items[0].d.valueExpr.nodeType === ParseNodeType.StringList && + targetNode.d.items[0].d.valueExpr.d.strings.length === 1 && + targetNode.d.items[0].d.valueExpr.d.strings[0].nodeType === ParseNodeType.String + ) { + const keyValue = targetNode.d.items[0].d.valueExpr.d.strings[0].d.value; + const narrowedResult = preventRecursion(assignmentFlowNode, () => { + const flowTypeResult = getTypeFromFlowNode(assignmentFlowNode.antecedent); + + if (flowTypeResult.type) { + flowTypeResult.type = mapSubtypes(flowTypeResult.type, (subtype) => { + if (isClass(subtype) && ClassType.isTypedDictClass(subtype)) { + return narrowForKeyAssignment(subtype, keyValue); + } + return subtype; + }); + } + + return flowTypeResult; + }); + + return setCacheEntry( + curFlowNode, + narrowedResult?.type, + !!narrowedResult?.isIncomplete + ); + } + } + + if (isPartialMatchingExpression(reference, targetNode)) { + // If the node partially matches the reference, we need to "kill" any narrowed + // types further above this point. For example, if we see the sequence + // a.b = 3 + // a = Foo() + // x = a.b + // The type of "a.b" can no longer be assumed to be Literal[3]. + return FlowNodeTypeResult.create( + options?.typeAtStart?.type, + !!options?.typeAtStart?.isIncomplete + ); + } + } + + curFlowNode = assignmentFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.BranchLabel) { + const branchFlowNode = curFlowNode as FlowBranchLabel; + + if (curFlowNode.flags & FlowFlags.PostContextManager) { + // Determine whether any of the context managers support exception + // suppression. If not, none of its antecedents are reachable. + const contextMgrNode = curFlowNode as FlowPostContextManagerLabel; + const contextManagerSwallowsExceptions = contextMgrNode.expressions.some((expr) => + isExceptionContextManager(evaluator, expr, contextMgrNode.isAsync) + ); + + if (contextManagerSwallowsExceptions === contextMgrNode.blockIfSwallowsExceptions) { + // Do not explore any further along this code flow path. + return setCacheEntry(curFlowNode, /* type */ undefined, /* isIncomplete */ false); + } + } + + // Is the current symbol modified in any way within the scope of the branch? + // If not, we can skip all processing within the branch scope. + if (reference && branchFlowNode.preBranchAntecedent && branchFlowNode.affectedExpressions) { + if (!subexpressionReferenceKeys) { + subexpressionReferenceKeys = createKeysForReferenceSubexpressions(reference); + } + + if ( + !subexpressionReferenceKeys.some((key) => + branchFlowNode.affectedExpressions!.has(key) + ) && + getFlowNodeReachability(curFlowNode, branchFlowNode.preBranchAntecedent) === + Reachability.Reachable + ) { + curFlowNode = branchFlowNode.preBranchAntecedent; + continue; + } + } + + return getTypeFromBranchFlowNode(curFlowNode as FlowLabel); + } + + if (curFlowNode.flags & FlowFlags.LoopLabel) { + const loopNode = curFlowNode as FlowLabel; + + // Is the current symbol modified in any way within the loop? If not, we can skip all + // processing within the loop and assume that the type comes from the first antecedent, + // which feeds the loop. + if (reference) { + if (!subexpressionReferenceKeys) { + subexpressionReferenceKeys = createKeysForReferenceSubexpressions(reference); + } + + if (!subexpressionReferenceKeys.some((key) => loopNode.affectedExpressions!.has(key))) { + curFlowNode = loopNode.antecedents[0]; + continue; + } + } + + return getTypeFromLoopFlowNode(loopNode, cachedEntry); + } + + if (curFlowNode.flags & (FlowFlags.TrueCondition | FlowFlags.FalseCondition)) { + const conditionalFlowNode = curFlowNode as FlowCondition; + + if (!options?.skipConditionalNarrowing && reference) { + const narrowedResult = preventRecursion(curFlowNode, () => { + const typeNarrowingCallback = getTypeNarrowingCallback( + evaluator, + reference, + conditionalFlowNode.expression, + !!( + conditionalFlowNode.flags & + (FlowFlags.TrueCondition | FlowFlags.TrueNeverCondition) + ) + ); + + if (typeNarrowingCallback) { + const flowTypeResult = getTypeFromFlowNode(conditionalFlowNode.antecedent); + let flowType = flowTypeResult.type; + let isIncomplete = flowTypeResult.isIncomplete; + + if (flowType) { + const flowTypeResult = typeNarrowingCallback(flowType); + + if (flowTypeResult) { + flowType = flowTypeResult.type; + if (flowTypeResult.isIncomplete) { + isIncomplete = true; + } + } + } + + return setCacheEntry(curFlowNode, flowType, isIncomplete); + } + + return undefined; + }); + + if (narrowedResult) { + return narrowedResult; + } + } + + curFlowNode = conditionalFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & (FlowFlags.TrueNeverCondition | FlowFlags.FalseNeverCondition)) { + const conditionalFlowNode = curFlowNode as FlowCondition; + if (!options?.skipConditionalNarrowing && conditionalFlowNode.reference) { + // Don't allow apply if the conditional expression references the expression + // we're already narrowing. This case will be handled by the TrueCondition + // or FalseCondition node. + if (createKeyForReference(conditionalFlowNode.reference) !== referenceKey) { + // Make sure the reference type has a declared type. If not, + // don't bother trying to infer its type because that would be + // too expensive. + const symbolWithScope = evaluator.lookUpSymbolRecursive( + conditionalFlowNode.reference, + conditionalFlowNode.reference.d.value, + /* honorCodeFlow */ false + ); + + if (symbolWithScope && symbolWithScope.symbol.hasTypedDeclarations()) { + const result = preventRecursion(curFlowNode, () => { + const typeNarrowingCallback = getTypeNarrowingCallback( + evaluator, + conditionalFlowNode.reference!, + conditionalFlowNode.expression, + !!( + conditionalFlowNode.flags & + (FlowFlags.TrueCondition | FlowFlags.TrueNeverCondition) + ) + ); + + if (typeNarrowingCallback) { + const refTypeInfo = evaluator.getTypeOfExpression( + conditionalFlowNode.reference! + ); + + let narrowedType = refTypeInfo.type; + let isIncomplete = !!refTypeInfo.isIncomplete; + + const narrowedTypeResult = typeNarrowingCallback(refTypeInfo.type); + if (narrowedTypeResult) { + narrowedType = narrowedTypeResult.type; + if (narrowedTypeResult.isIncomplete) { + isIncomplete = true; + } + } + + // If the narrowed type is "never", don't allow further exploration. + if (isNever(narrowedType)) { + return setCacheEntry(curFlowNode, undefined, isIncomplete); + } + } + + return undefined; + }); + + if (result) { + return result; + } + } + } + } + + curFlowNode = conditionalFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.ExhaustedMatch) { + const exhaustedMatchFlowNode = curFlowNode as FlowExhaustedMatch; + const narrowedTypeResult = evaluator.evaluateTypeForSubnode(exhaustedMatchFlowNode.node, () => { + evaluator.evaluateTypesForMatchStatement(exhaustedMatchFlowNode.node); + }); + + // If the narrowed type is "never", don't allow further exploration. + if (narrowedTypeResult) { + if (isNever(narrowedTypeResult.type)) { + return setCacheEntry( + curFlowNode, + narrowedTypeResult.type, + !!narrowedTypeResult.isIncomplete + ); + } + + if (reference) { + // See if the reference is a subexpression within the subject expression. + const typeNarrowingCallback = getPatternSubtypeNarrowingCallback( + evaluator, + reference, + exhaustedMatchFlowNode.subjectExpression + ); + + if (typeNarrowingCallback) { + const subexpressionTypeResult = typeNarrowingCallback(narrowedTypeResult.type); + + if (subexpressionTypeResult) { + return setCacheEntry( + curFlowNode, + subexpressionTypeResult.type, + !!narrowedTypeResult.isIncomplete || !!subexpressionTypeResult.isIncomplete + ); + } + } + } + } + + curFlowNode = exhaustedMatchFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.NarrowForPattern) { + const patternFlowNode = curFlowNode as FlowNarrowForPattern; + if (!reference || isMatchingExpression(reference, patternFlowNode.subjectExpression)) { + const typeResult = evaluator.evaluateTypeForSubnode(patternFlowNode.statement, () => { + if (patternFlowNode.statement.nodeType === ParseNodeType.Case) { + evaluator.evaluateTypesForCaseStatement(patternFlowNode.statement); + } else { + evaluator.evaluateTypesForMatchStatement(patternFlowNode.statement); + } + }); + + if (typeResult) { + if (!reference) { + if (isNever(typeResult.type)) { + return setCacheEntry( + curFlowNode, + /* type */ undefined, + !!typeResult.isIncomplete + ); + } + } else { + return setCacheEntry(curFlowNode, typeResult.type, !!typeResult.isIncomplete); + } + } + } else if (patternFlowNode.statement.nodeType === ParseNodeType.Case) { + const caseStatement = patternFlowNode.statement; + + // See if the reference is a subexpression within the subject expression. + const typeNarrowingCallback = getPatternSubtypeNarrowingCallback( + evaluator, + reference, + patternFlowNode.subjectExpression + ); + + if (typeNarrowingCallback) { + const typeResult = evaluator.evaluateTypeForSubnode(caseStatement, () => { + evaluator.evaluateTypesForCaseStatement(caseStatement); + }); + + if (typeResult) { + const narrowedTypeResult = typeNarrowingCallback(typeResult.type); + + if (narrowedTypeResult) { + return setCacheEntry( + curFlowNode, + narrowedTypeResult.type, + !!typeResult.isIncomplete || !!narrowedTypeResult.isIncomplete + ); + } + } + } + } + curFlowNode = patternFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.PreFinallyGate) { + return getTypeFromPreFinallyGateFlowNode(curFlowNode as FlowPreFinallyGate); + } + + if (curFlowNode.flags & FlowFlags.PostFinally) { + return getTypeFromPostFinallyFlowNode(curFlowNode as FlowPostFinally); + } + + if (curFlowNode.flags & FlowFlags.Start) { + return setCacheEntry( + curFlowNode, + options?.typeAtStart?.type, + !!options?.typeAtStart?.isIncomplete + ); + } + + if (curFlowNode.flags & FlowFlags.WildcardImport) { + const wildcardImportFlowNode = curFlowNode as FlowWildcardImport; + if (reference && reference.nodeType === ParseNodeType.Name) { + const nameValue = reference.d.value; + if (wildcardImportFlowNode.names.some((name) => name === nameValue)) { + return preventRecursion(curFlowNode, () => { + const type = getTypeFromWildcardImport(wildcardImportFlowNode, nameValue); + return setCacheEntry(curFlowNode, type, /* isIncomplete */ false); + }); + } + } + + curFlowNode = wildcardImportFlowNode.antecedent; + continue; + } + + // We shouldn't get here. + fail('Unexpected flow node flags'); + } + } + + function getTypeFromBranchFlowNode(branchNode: FlowLabel): FlowNodeTypeResult { + const typesToCombine: Type[] = []; + + let sawIncomplete = false; + + for (const antecedent of branchNode.antecedents) { + const flowTypeResult = getTypeFromFlowNode(antecedent); + + if (reference === undefined && flowTypeResult.type && !isNever(flowTypeResult.type)) { + // If we're solving for "reachability", and we have now proven + // reachability, there's no reason to do more work. The type we + // return here doesn't matter as long as it's not undefined. + return setCacheEntry(branchNode, UnknownType.create(), /* isIncomplete */ false); + } + + if (flowTypeResult.isIncomplete) { + sawIncomplete = true; + } + + if (flowTypeResult.type) { + typesToCombine.push(flowTypeResult.type); + } + } + + const effectiveType = typesToCombine.length > 0 ? combineTypes(typesToCombine) : undefined; + + return setCacheEntry(branchNode, effectiveType, sawIncomplete); + } + + function getTypeFromLoopFlowNode( + loopNode: FlowLabel, + cacheEntry: FlowNodeTypeResult | undefined + ): FlowNodeTypeResult { + // The type result from one antecedent may depend on the type + // result from another, so loop up to one time for each + // antecedent in the loop. + const maxAttemptCount = loopNode.antecedents.length; + + if (cacheEntry === undefined) { + // We haven't been here before, so create a new incomplete cache entry. + cacheEntry = setCacheEntry( + loopNode, + reference ? undefined : UnknownType.create(), + /* isIncomplete */ true + ); + } else if ( + cacheEntry.incompleteSubtypes && + cacheEntry.incompleteSubtypes.length === loopNode.antecedents.length && + cacheEntry.incompleteSubtypes.some((subtype) => subtype.isPending) + ) { + // If entries have been added for all antecedents and there are pending entries + // that have not been evaluated even once, treat it as incomplete. We clean + // any incomplete unknowns from the type here to assist with type convergence. + return FlowNodeTypeResult.create( + cleanIncompleteUnknownForCacheEntry(cacheEntry), + /* isIncomplete */ true + ); + } + + let attemptCount = 0; + + while (true) { + let sawIncomplete = false; + let sawPending = false; + let isProvenReachable = + reference === undefined && + cacheEntry.incompleteSubtypes?.some((subtype) => subtype.type !== undefined); + let firstAntecedentTypeIsIncomplete = false; + let firstAntecedentTypeIsPending = false; + + loopNode.antecedents.forEach((antecedent, index) => { + // If we've trying to determine reachability and we've already proven + // reachability, then we're done. + if (reference === undefined && isProvenReachable) { + return; + } + + if (firstAntecedentTypeIsPending && index > 0) { + return; + } + + cacheEntry = getCacheEntry(loopNode)!; + + // Is this entry marked "pending"? If so, we have recursed and there + // is another call on the stack that is actively evaluating this + // antecedent. Skip it here to avoid infinite recursion but note that + // we skipped a "pending" antecedent. + if ( + cacheEntry.incompleteSubtypes && + index < cacheEntry.incompleteSubtypes.length && + cacheEntry.incompleteSubtypes[index].isPending + ) { + // In rare circumstances, it's possible for a code flow graph with + // nested loops to hit the case where the first antecedent is marked + // as pending. In this case, we'll evaluate only the first antecedent + // again even though it's pending. We're guaranteed to make forward + // progress with the first antecedent, and that will allow us to establish + // an initial type for this expression, but we don't want to evaluate + // any other antecedents in this case because this could result in + // infinite recursion. + if (index === 0) { + firstAntecedentTypeIsPending = true; + } else { + sawIncomplete = true; + sawPending = true; + return; + } + } + + // Have we already been here (i.e. does the entry exist and is + // not marked "pending")? If so, we can use the type that was already + // computed if it is complete. + const subtypeEntry = + cacheEntry.incompleteSubtypes !== undefined && index < cacheEntry.incompleteSubtypes.length + ? cacheEntry.incompleteSubtypes[index] + : undefined; + if (subtypeEntry === undefined || (!subtypeEntry?.isPending && subtypeEntry?.isIncomplete)) { + const entryEvaluationCount = subtypeEntry === undefined ? 0 : subtypeEntry.evaluationCount; + + // Does it look like this will never converge? If so, stick with the + // previously-computed type for this entry. + if (entryEvaluationCount >= maxConvergenceAttemptLimit) { + // Log this only once. + if (!maxConvergenceLimitHit && enablePrintConvergenceLimitHit) { + console.log('Types failed to converge during code flow analysis'); + } + maxConvergenceLimitHit = true; + return; + } + + // Set this entry to "pending" to prevent infinite recursion. + // We'll mark it "not pending" below. + cacheEntry = setIncompleteSubtype( + loopNode, + index, + subtypeEntry?.type ?? UnknownType.create(/* isIncomplete */ true), + /* isIncomplete */ true, + /* isPending */ true, + entryEvaluationCount + ); + + try { + const flowTypeResult = getTypeFromFlowNode(antecedent); + + if (flowTypeResult.isIncomplete) { + sawIncomplete = true; + + if (index === 0) { + firstAntecedentTypeIsIncomplete = true; + } + } + + cacheEntry = setIncompleteSubtype( + loopNode, + index, + flowTypeResult.type ?? + (flowTypeResult.isIncomplete + ? UnknownType.create(/* isIncomplete */ true) + : NeverType.createNever()), + flowTypeResult.isIncomplete, + /* isPending */ firstAntecedentTypeIsPending, + entryEvaluationCount + 1 + ); + } catch (e) { + cacheEntry = setIncompleteSubtype( + loopNode, + index, + UnknownType.create(/* isIncomplete */ true), + /* isIncomplete */ true, + /* isPending */ firstAntecedentTypeIsPending, + entryEvaluationCount + 1 + ); + throw e; + } + } + + if (reference === undefined && cacheEntry?.type !== undefined) { + isProvenReachable = true; + } + }); + + if (isProvenReachable) { + // If we saw a pending entry, do not save over the top of the cache + // entry because we'll overwrite a pending evaluation. The type that + // we return here doesn't matter as long as it's not undefined. + return sawPending + ? FlowNodeTypeResult.create(UnknownType.create(), /* isIncomplete */ false) + : setCacheEntry(loopNode, UnknownType.create(), /* isIncomplete */ false); + } + + let effectiveType = cacheEntry.type; + if (sawIncomplete) { + // If there is an incomplete "Unknown" type within a union type, remove + // it. Otherwise we might end up resolving the cycle with a type + // that includes an undesirable unknown. + if (effectiveType) { + const cleanedType = cleanIncompleteUnknown(effectiveType); + if (cleanedType !== effectiveType) { + effectiveType = cleanedType; + } + } + } + + if (!sawIncomplete || attemptCount >= maxAttemptCount) { + // If we were able to evaluate a type along at least one antecedent + // path, mark it as complete. If we couldn't evaluate a type along + // any antecedent path, assume that some recursive call further + // up the stack will be able to produce a valid type. + let reportIncomplete = sawIncomplete; + if ( + sawIncomplete && + !sawPending && + !isGetTypeFromCodeFlowPending(referenceKeyWithSymbolId) && + effectiveType && + !isIncompleteUnknown(effectiveType) && + !firstAntecedentTypeIsIncomplete + ) { + reportIncomplete = false; + } + + // If we saw a pending or incomplete entry, do not save over the top + // of the cache entry because we'll overwrite the partial result. + if (sawPending || sawIncomplete) { + if (!reportIncomplete) { + // Bump the generation count because we need to recalculate + // other incomplete types based on this now-complete type. + flowIncompleteGeneration++; + } + + return FlowNodeTypeResult.create(effectiveType, reportIncomplete); + } + + // If the first antecedent was pending, we skipped all of the other + // antecedents, so the type is incomplete. + if (firstAntecedentTypeIsPending) { + return FlowNodeTypeResult.create(effectiveType, /* isIncomplete */ true); + } + + return setCacheEntry(loopNode, effectiveType, /* isIncomplete */ false); + } + + attemptCount++; + } + } + + function getTypeFromPreFinallyGateFlowNode(preFinallyFlowNode: FlowPreFinallyGate): FlowNodeTypeResult { + // Is the finally gate closed? + if (flowNodeTypeCache.closedFinallyGateNodes.has(preFinallyFlowNode.id)) { + return FlowNodeTypeResult.create(/* type */ undefined, /* isIncomplete */ false); + } + + const flowTypeResult = getTypeFromFlowNode(preFinallyFlowNode.antecedent); + + // We want to cache the type only if we're evaluating the "gate closed" path. + deleteCacheEntry(preFinallyFlowNode); + + return FlowNodeTypeResult.create(flowTypeResult.type, flowTypeResult.isIncomplete); + } + + function getTypeFromPostFinallyFlowNode(postFinallyFlowNode: FlowPostFinally): FlowNodeTypeResult { + const wasGateClosed = flowNodeTypeCache.closedFinallyGateNodes.has( + postFinallyFlowNode.preFinallyGate.id + ); + try { + flowNodeTypeCache.closedFinallyGateNodes.add(postFinallyFlowNode.preFinallyGate.id); + let flowTypeResult: FlowNodeTypeResult | undefined; + + // Use speculative mode for the remainder of the finally suite + // because the final types within this parse node block should be + // evaluated when the gate is open. + evaluator.useSpeculativeMode(postFinallyFlowNode.finallyNode, () => { + flowTypeResult = getTypeFromFlowNode(postFinallyFlowNode.antecedent); + }); + + // If the type is incomplete, don't write back to the cache. + return flowTypeResult!.isIncomplete + ? flowTypeResult! + : setCacheEntry(postFinallyFlowNode, flowTypeResult!.type, /* isIncomplete */ false); + } finally { + if (!wasGateClosed) { + flowNodeTypeCache.closedFinallyGateNodes.delete(postFinallyFlowNode.preFinallyGate.id); + } + } + } + + if (!flowNode) { + // This should happen only in cases where we're evaluating + // parse nodes that are created after the initial parse + // (namely, string literals that are used for forward + // referenced types). + return FlowNodeTypeResult.create(options?.typeAtStart?.type, !!options?.typeAtStart?.isIncomplete); + } + + return getTypeFromFlowNode(flowNode); + } + + return { + getTypeFromCodeFlow, + }; + } + + // Determines whether the specified flowNode can be reached by any + // control flow path within the execution context. If sourceFlowNode + // is specified, it returns true only if at least one control flow + // path passes through sourceFlowNode. + function getFlowNodeReachability( + flowNode: FlowNode, + sourceFlowNode?: FlowNode, + ignoreNoReturn = false + ): Reachability { + const visitedFlowNodeSet = new Set(); + const closedFinallyGateSet = new Set(); + + if (enablePrintControlFlowGraph) { + printControlFlowGraph(flowNode, /* reference */ undefined, 'getFlowNodeReachability'); + } + + function cacheReachabilityResult(reachability: Reachability): Reachability { + // If there is a finally gate set, we will not cache the results + // because this can affect the reachability. + if (closedFinallyGateSet.size > 0) { + return reachability; + } + + let cacheEntry = reachabilityCache.get(flowNode.id); + if (!cacheEntry) { + cacheEntry = { reachability: undefined, reachabilityFrom: new Map() }; + reachabilityCache.set(flowNode.id, cacheEntry); + } + + if (!sourceFlowNode) { + cacheEntry.reachability = reachability; + } else { + cacheEntry.reachabilityFrom.set(sourceFlowNode.id, reachability); + } + + return reachability; + } + + function getFlowNodeReachabilityRecursive(flowNode: FlowNode, recursionCount = 0): Reachability { + // Cut off the recursion at some point to prevent a stack overflow. + const maxFlowNodeReachableRecursionCount = 64; + if (recursionCount > maxFlowNodeReachableRecursionCount) { + return Reachability.Reachable; + } + recursionCount++; + + let curFlowNode = flowNode; + + while (true) { + // See if we've already cached this result. + const cacheEntry = reachabilityCache.get(flowNode.id); + if (cacheEntry !== undefined && closedFinallyGateSet.size === 0) { + if (!sourceFlowNode) { + if (cacheEntry.reachability !== undefined) { + return cacheEntry.reachability; + } + } else { + const reachabilityFrom = cacheEntry.reachabilityFrom.get(sourceFlowNode.id); + if (reachabilityFrom !== undefined) { + return reachabilityFrom; + } + } + } + + // If we've already visited this node, we can assume + // it wasn't reachable. + if (visitedFlowNodeSet.has(curFlowNode.id)) { + return cacheReachabilityResult(Reachability.UnreachableStructural); + } + + // Note that we've been here before. + visitedFlowNodeSet.add(curFlowNode.id); + + if (curFlowNode.flags & FlowFlags.UnreachableStructural) { + return cacheReachabilityResult(Reachability.UnreachableStructural); + } + + if (curFlowNode.flags & FlowFlags.UnreachableStaticCondition) { + return cacheReachabilityResult(Reachability.UnreachableStaticCondition); + } + + if (curFlowNode === sourceFlowNode) { + return cacheReachabilityResult(Reachability.Reachable); + } + + if ( + curFlowNode.flags & + (FlowFlags.VariableAnnotation | + FlowFlags.Assignment | + FlowFlags.WildcardImport | + FlowFlags.ExhaustedMatch) + ) { + const typedFlowNode = curFlowNode as + | FlowVariableAnnotation + | FlowAssignment + | FlowWildcardImport + | FlowExhaustedMatch; + curFlowNode = typedFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.NarrowForPattern) { + const patternFlowNode = curFlowNode as FlowNarrowForPattern; + + const typeResult = evaluator.evaluateTypeForSubnode(patternFlowNode.statement, () => { + if (patternFlowNode.statement.nodeType === ParseNodeType.Case) { + evaluator.evaluateTypesForCaseStatement(patternFlowNode.statement); + } else { + evaluator.evaluateTypesForMatchStatement(patternFlowNode.statement); + } + }); + + if (typeResult && isNever(typeResult.type)) { + return cacheReachabilityResult(Reachability.UnreachableByAnalysis); + } + + curFlowNode = patternFlowNode.antecedent; + continue; + } + + if ( + curFlowNode.flags & + (FlowFlags.TrueCondition | + FlowFlags.FalseCondition | + FlowFlags.TrueNeverCondition | + FlowFlags.FalseNeverCondition) + ) { + const conditionalFlowNode = curFlowNode as FlowCondition; + if (conditionalFlowNode.reference) { + // Make sure the reference type has a declared type. If not, + // don't bother trying to infer its type because that would be + // too expensive. + const symbolWithScope = evaluator.lookUpSymbolRecursive( + conditionalFlowNode.reference, + conditionalFlowNode.reference.d.value, + /* honorCodeFlow */ false + ); + + if (symbolWithScope && symbolWithScope.symbol.hasTypedDeclarations()) { + let isUnreachable = false; + + const typeNarrowingCallback = getTypeNarrowingCallback( + evaluator, + conditionalFlowNode.reference!, + conditionalFlowNode.expression, + !!(conditionalFlowNode.flags & (FlowFlags.TrueCondition | FlowFlags.TrueNeverCondition)) + ); + + if (typeNarrowingCallback) { + const refTypeInfo = evaluator.getTypeOfExpression(conditionalFlowNode.reference!); + + const narrowedTypeResult = typeNarrowingCallback(refTypeInfo.type); + const narrowedType = narrowedTypeResult?.type ?? refTypeInfo.type; + + if (isNever(narrowedType) && !refTypeInfo.isIncomplete) { + isUnreachable = true; + } + } + + if (isUnreachable) { + return cacheReachabilityResult(Reachability.UnreachableByAnalysis); + } + } + } + + curFlowNode = conditionalFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.Call) { + const callFlowNode = curFlowNode as FlowCall; + + // If this function returns a "NoReturn" type, that means + // it always raises an exception or otherwise doesn't return, + // so we can assume that the code before this is unreachable. + if (!ignoreNoReturn && isCallNoReturn(evaluator, callFlowNode)) { + return cacheReachabilityResult(Reachability.UnreachableByAnalysis); + } + + curFlowNode = callFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & (FlowFlags.BranchLabel | FlowFlags.LoopLabel)) { + if (curFlowNode.flags & FlowFlags.PostContextManager) { + // Determine whether any of the context managers support exception + // suppression. If not, none of its antecedents are reachable. + const contextMgrNode = curFlowNode as FlowPostContextManagerLabel; + if ( + !contextMgrNode.expressions.some((expr) => + isExceptionContextManager(evaluator, expr, contextMgrNode.isAsync) + ) + ) { + return cacheReachabilityResult(Reachability.UnreachableByAnalysis); + } + } + + const labelNode = curFlowNode as FlowLabel; + let unreachableByType = false; + let unreachableByStaticCondition = false; + for (const antecedent of labelNode.antecedents) { + const reachability = getFlowNodeReachabilityRecursive(antecedent, recursionCount); + if (reachability === Reachability.Reachable) { + return cacheReachabilityResult(reachability); + } else if (reachability === Reachability.UnreachableByAnalysis) { + unreachableByType = true; + } else if (reachability === Reachability.UnreachableStaticCondition) { + unreachableByStaticCondition = true; + } + } + return cacheReachabilityResult( + unreachableByType + ? Reachability.UnreachableByAnalysis + : unreachableByStaticCondition + ? Reachability.UnreachableStaticCondition + : Reachability.UnreachableStructural + ); + } + + if (curFlowNode.flags & FlowFlags.Start) { + // If we hit the start but were looking for a particular source flow + // node, return false. Otherwise, the start is what we're looking for. + return cacheReachabilityResult( + sourceFlowNode ? Reachability.UnreachableByAnalysis : Reachability.Reachable + ); + } + + if (curFlowNode.flags & FlowFlags.PreFinallyGate) { + const preFinallyFlowNode = curFlowNode as FlowPreFinallyGate; + if (closedFinallyGateSet.has(preFinallyFlowNode.id)) { + return cacheReachabilityResult(Reachability.UnreachableByAnalysis); + } + + curFlowNode = preFinallyFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.PostFinally) { + const postFinallyFlowNode = curFlowNode as FlowPostFinally; + const wasGateClosed = closedFinallyGateSet.has(postFinallyFlowNode.preFinallyGate.id); + + try { + closedFinallyGateSet.add(postFinallyFlowNode.preFinallyGate.id); + return cacheReachabilityResult( + getFlowNodeReachabilityRecursive(postFinallyFlowNode.antecedent, recursionCount) + ); + } finally { + if (!wasGateClosed) { + closedFinallyGateSet.delete(postFinallyFlowNode.preFinallyGate.id); + } + } + } + + // We shouldn't get here. + fail('Unexpected flow node flags'); + return cacheReachabilityResult(Reachability.Reachable); + } + } + + // Protect against infinite recursion. + if (isReachableRecursionSet.has(flowNode.id)) { + return Reachability.Reachable; + } + isReachableRecursionSet.add(flowNode.id); + + try { + return getFlowNodeReachabilityRecursive(flowNode); + } finally { + isReachableRecursionSet.delete(flowNode.id); + } + } + + // Determines whether the specified typeVar, which is assumed to be constrained, + // can be narrowed to one of its constrained types based on isinstance type + // guard checks. + function narrowConstrainedTypeVar(flowNode: FlowNode, typeVar: TypeVarType): ClassType | undefined { + assert(!isParamSpec(typeVar)); + assert(!isTypeVarTuple(typeVar)); + assert(!TypeVarType.hasBound(typeVar)); + assert(TypeVarType.hasConstraints(typeVar)); + + const visitedFlowNodeMap = new Set(); + const startingConstraints: ClassType[] = []; + + for (const constraint of typeVar.shared.constraints) { + if (isClassInstance(constraint)) { + startingConstraints.push(constraint); + } else { + // If one or more constraints are Unknown, Any, union types, etc., + // we can't narrow them. + return undefined; + } + } + + function narrowConstrainedTypeVarRecursive(flowNode: FlowNode, typeVar: TypeVarType): ClassType[] { + let curFlowNode = flowNode; + + while (true) { + if (visitedFlowNodeMap.has(curFlowNode.id)) { + return startingConstraints; + } + + if ( + curFlowNode.flags & + (FlowFlags.UnreachableStaticCondition | FlowFlags.UnreachableStructural | FlowFlags.Start) + ) { + return startingConstraints; + } + + if ( + curFlowNode.flags & + (FlowFlags.VariableAnnotation | + FlowFlags.Assignment | + FlowFlags.WildcardImport | + FlowFlags.TrueNeverCondition | + FlowFlags.FalseNeverCondition | + FlowFlags.ExhaustedMatch | + FlowFlags.PostFinally | + FlowFlags.PreFinallyGate | + FlowFlags.Call) + ) { + const typedFlowNode = curFlowNode as + | FlowVariableAnnotation + | FlowAssignment + | FlowWildcardImport + | FlowExhaustedMatch + | FlowPostFinally + | FlowPreFinallyGate + | FlowCall; + curFlowNode = typedFlowNode.antecedent; + continue; + } + + // Handle a case statement with a class pattern. + if (curFlowNode.flags & FlowFlags.NarrowForPattern) { + const narrowForPatternFlowNode = curFlowNode as FlowNarrowForPattern; + if (narrowForPatternFlowNode.statement.nodeType === ParseNodeType.Case) { + const subjectType = evaluator.getTypeOfExpression( + narrowForPatternFlowNode.subjectExpression + ).type; + + if (isCompatibleWithConstrainedTypeVar(subjectType, typeVar)) { + const patternNode = narrowForPatternFlowNode.statement.d.pattern; + + if ( + patternNode.nodeType === ParseNodeType.PatternAs && + patternNode.d.orPatterns.length === 1 && + patternNode.d.orPatterns[0].nodeType === ParseNodeType.PatternClass + ) { + const classPatternNode = patternNode.d.orPatterns[0]; + + const classType = evaluator.getTypeOfExpression( + classPatternNode.d.className, + EvalFlags.CallBaseDefaults + ).type; + + if (isInstantiableClass(classType)) { + const priorRemainingConstraints = narrowConstrainedTypeVarRecursive( + narrowForPatternFlowNode.antecedent, + typeVar + ); + + return priorRemainingConstraints.filter((subtype) => + ClassType.isSameGenericClass(subtype, ClassType.cloneAsInstance(classType)) + ); + } + } + } + } + + curFlowNode = narrowForPatternFlowNode.antecedent; + continue; + } + + // Handle an isinstance type guard. + if (curFlowNode.flags & (FlowFlags.TrueCondition | FlowFlags.FalseCondition)) { + const conditionFlowNode = curFlowNode as FlowCondition; + const testExpression = conditionFlowNode.expression; + const isPositiveTest = (curFlowNode.flags & FlowFlags.TrueCondition) !== 0; + + if ( + testExpression.nodeType === ParseNodeType.Call && + testExpression.d.leftExpr.nodeType === ParseNodeType.Name && + testExpression.d.leftExpr.d.value === 'isinstance' && + testExpression.d.args.length === 2 + ) { + const arg0Expr = testExpression.d.args[0].d.valueExpr; + + const arg0Type = evaluator.getTypeOfExpression(arg0Expr).type; + + if (isCompatibleWithConstrainedTypeVar(arg0Type, typeVar)) { + // Prevent infinite recursion by noting that we've been here before. + visitedFlowNodeMap.add(curFlowNode.id); + const priorRemainingConstraints = narrowConstrainedTypeVarRecursive( + conditionFlowNode.antecedent, + typeVar + ); + visitedFlowNodeMap.delete(curFlowNode.id); + + const arg1Expr = testExpression.d.args[1].d.valueExpr; + const arg1Type = evaluator.getTypeOfExpression( + arg1Expr, + EvalFlags.AllowMissingTypeArgs | + EvalFlags.StrLiteralAsType | + EvalFlags.NoParamSpec | + EvalFlags.NoTypeVarTuple | + EvalFlags.NoFinal | + EvalFlags.NoSpecialize + ).type; + + if (isInstantiableClass(arg1Type)) { + return priorRemainingConstraints.filter((subtype) => { + if (ClassType.isSameGenericClass(subtype, ClassType.cloneAsInstance(arg1Type))) { + return isPositiveTest; + } else { + return !isPositiveTest; + } + }); + } + } + } + + curFlowNode = conditionFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & (FlowFlags.BranchLabel | FlowFlags.LoopLabel)) { + const labelNode = curFlowNode as FlowLabel; + const newConstraints: ClassType[] = []; + + // Prevent infinite recursion by noting that we've been here before. + visitedFlowNodeMap.add(curFlowNode.id); + for (const antecedent of labelNode.antecedents) { + const constraintsToAdd = narrowConstrainedTypeVarRecursive(antecedent, typeVar); + + for (const constraint of constraintsToAdd) { + if (!newConstraints.some((t) => isTypeSame(t, constraint))) { + newConstraints.push(constraint); + } + } + } + visitedFlowNodeMap.delete(curFlowNode.id); + + return newConstraints; + } + + // We shouldn't get here. + fail('Unexpected flow node flags'); + return startingConstraints; + } + } + + const narrowedConstrainedType = narrowConstrainedTypeVarRecursive(flowNode, typeVar); + + // Have we narrowed the typeVar to a single constraint? + return narrowedConstrainedType.length === 1 ? narrowedConstrainedType[0] : undefined; + } + + // Determines whether a specified type is the same as a constrained + // TypeVar or is conditioned on that same TypeVar or is some union of + // the above. + function isCompatibleWithConstrainedTypeVar(type: Type, typeVar: TypeVarType) { + let isCompatible = true; + doForEachSubtype(type, (subtype) => { + if (isTypeVar(subtype)) { + if (!isTypeSame(subtype, typeVar)) { + isCompatible = false; + } + } else if (subtype.props?.condition) { + if ( + !subtype.props.condition.some( + (condition) => + TypeVarType.hasConstraints(condition.typeVar) && + condition.typeVar.priv.nameWithScope === typeVar.priv.nameWithScope + ) + ) { + isCompatible = false; + } + } else { + isCompatible = false; + } + }); + + return isCompatible; + } + + // Determines whether a call associated with this flow node returns a NoReturn + // type, thus preventing further traversal of the code flow graph. + function isCallNoReturn(evaluator: TypeEvaluator, flowNode: FlowCall) { + const node = flowNode.node; + const fileInfo = getFileInfo(node); + + // Assume that calls within a pyi file are not "NoReturn" calls. + if (fileInfo.isStubFile) { + return false; + } + + if (enablePrintCallNoReturn) { + console.log(`isCallNoReturn@${flowNode.id} Pre depth ${noReturnAnalysisDepth}`); + } + + // See if this information is cached already. + if (callIsNoReturnCache.has(node.id)) { + const result = callIsNoReturnCache.get(node.id); + + if (enablePrintCallNoReturn) { + console.log(`isCallNoReturn@${flowNode.id} Post: ${result ? 'true' : 'false'} (cached)`); + } + + return result; + } + + // See if we've exceeded the max recursion depth. + if (noReturnAnalysisDepth > maxTypeRecursionCount) { + return false; + } + + // Don't attempt to evaluate a lambda call. We need to evaluate these in the + // context of its arguments. + if (node.d.leftExpr.nodeType === ParseNodeType.Lambda) { + return false; + } + + // Initially set to false to avoid recursion. + callIsNoReturnCache.set(node.id, false); + + noReturnAnalysisDepth++; + + try { + let noReturnTypeCount = 0; + let subtypeCount = 0; + + // Evaluate the call base type. + const callTypeResult = evaluator.getTypeOfExpression(node.d.leftExpr, EvalFlags.CallBaseDefaults); + const callType = callTypeResult.type; + + doForEachSubtype(callType, (callSubtype) => { + // Track the number of subtypes we've examined. + subtypeCount++; + + if (isInstantiableClass(callSubtype)) { + // Does the class have a custom metaclass that implements a `__call__` method? + // If so, it will be called instead of `__init__` or `__new__`. We'll assume + // in this case that the __call__ method is not a NoReturn type. + const metaclassCallResult = getBoundCallMethod(evaluator, node, callSubtype); + if (metaclassCallResult) { + return; + } + + const newMethodResult = getBoundNewMethod(evaluator, node, callSubtype); + if (newMethodResult) { + if (isFunctionOrOverloaded(newMethodResult.type)) { + callSubtype = newMethodResult.type; + } + } + } else if (isClassInstance(callSubtype)) { + const callMethodType = evaluator.getBoundMagicMethod(callSubtype, '__call__'); + + if (callMethodType) { + callSubtype = callMethodType; + } + } + + const isCallAwaited = node.parent?.nodeType === ParseNodeType.Await; + if (isFunction(callSubtype)) { + if (isFunctionNoReturn(callSubtype, isCallAwaited)) { + noReturnTypeCount++; + } + } else if (isOverloaded(callSubtype)) { + let overloadCount = 0; + let noReturnOverloadCount = 0; + + OverloadedType.getOverloads(callSubtype).forEach((overload) => { + overloadCount++; + + if (isFunctionNoReturn(overload, isCallAwaited)) { + noReturnOverloadCount++; + } + }); + + // Was at least one of the overloaded return types NoReturn? + if (noReturnOverloadCount > 0) { + // Do all of the overloads return NoReturn? + if (noReturnOverloadCount === overloadCount) { + noReturnTypeCount++; + } else { + // Perform a more complete evaluation to determine whether + // the applicable overload returns a NoReturn. + const callResult = evaluator.validateOverloadedArgTypes( + node, + node.d.args.map((arg) => evaluator.convertNodeToArg(arg)), + { type: callSubtype, isIncomplete: callTypeResult.isIncomplete }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ false, + /* inferenceContext */ undefined + ); + + if (callResult.returnType && isNever(callResult.returnType)) { + noReturnTypeCount++; + } + } + } + } + }); + + // The call is considered NoReturn if all subtypes evaluate to NoReturn. + const callIsNoReturn = subtypeCount > 0 && noReturnTypeCount === subtypeCount; + + // Cache the value for next time. + callIsNoReturnCache.set(node.id, callIsNoReturn); + + if (enablePrintCallNoReturn) { + console.log(`isCallNoReturn@${flowNode.id} Post: ${callIsNoReturn ? 'true' : 'false'}`); + } + + return callIsNoReturn; + } finally { + noReturnAnalysisDepth--; + } + } + + function isFunctionNoReturn(functionType: FunctionType, isCallAwaited: boolean) { + const returnType = FunctionType.getEffectiveReturnType(functionType, /* includeInferred */ false); + if (returnType) { + if ( + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, ['Coroutine', 'CoroutineType']) && + returnType.priv.typeArgs && + returnType.priv.typeArgs.length >= 3 + ) { + if (isNever(returnType.priv.typeArgs[2]) && isCallAwaited) { + return true; + } + } + + return isNever(returnType); + } else if (!inferNoReturnForUnannotatedFunctions) { + return false; + } else if (functionType.shared.declaration) { + // If the function is a generator (i.e. it has yield statements) + // then it is not a "no return" call. Also, don't infer a "no + // return" type for abstract methods. + if ( + !functionType.shared.declaration.isGenerator && + !FunctionType.isAbstractMethod(functionType) && + !FunctionType.isStubDefinition(functionType) && + !FunctionType.isPyTypedDefinition(functionType) + ) { + // Check specifically for a common idiom where the only statement + // (other than a possible docstring) is a "raise NotImplementedError". + const functionStatements = functionType.shared.declaration.node.d.suite.d.statements; + + let foundRaiseNotImplemented = false; + for (const statement of functionStatements) { + if (statement.nodeType !== ParseNodeType.StatementList || statement.d.statements.length !== 1) { + break; + } + + const simpleStatement = statement.d.statements[0]; + if (simpleStatement.nodeType === ParseNodeType.StringList) { + continue; + } + + if (simpleStatement.nodeType === ParseNodeType.Raise && simpleStatement.d.expr) { + // Check for a raising about 'NotImplementedError' or a subtype thereof. + const exceptionType = evaluator.getType(simpleStatement.d.expr); + + if ( + exceptionType && + isClass(exceptionType) && + derivesFromStdlibClass(exceptionType, 'NotImplementedError') + ) { + foundRaiseNotImplemented = true; + } + } + + break; + } + + if (!foundRaiseNotImplemented && !isAfterNodeReachable(evaluator, functionType)) { + return true; + } + } + } + + return false; + } + + function isAfterNodeReachable(evaluator: TypeEvaluator, functionType: FunctionType) { + if (!functionType.shared.declaration) { + return true; + } + + return evaluator.isAfterNodeReachable(functionType.shared.declaration.node); + } + + // Performs a cursory analysis to determine whether the expression + // corresponds to a context manager object that supports the swallowing + // of exceptions. By convention, these objects have an "__exit__" method + // that returns a bool response (as opposed to a None). This function is + // called during code flow, so it can't rely on full type evaluation. It + // makes some simplifying assumptions that work in most cases. + function isExceptionContextManager(evaluator: TypeEvaluator, node: ExpressionNode, isAsync: boolean) { + // See if this information is cached already. + if (isExceptionContextManagerCache.has(node.id)) { + return isExceptionContextManagerCache.get(node.id); + } + + // Initially set to false to avoid infinite recursion. + isExceptionContextManagerCache.set(node.id, false); + + // See if we've exceeded the max recursion depth. + if (contextManagerAnalysisDepth > maxTypeRecursionCount) { + return false; + } + + contextManagerAnalysisDepth++; + let cmSwallowsExceptions = false; + + try { + const cmType = evaluator.getTypeOfExpression(node).type; + + if (cmType && isClassInstance(cmType)) { + const exitMethodName = isAsync ? '__aexit__' : '__exit__'; + const exitType = evaluator.getBoundMagicMethod(cmType, exitMethodName); + + if (exitType && isFunction(exitType) && exitType.shared.declaredReturnType) { + let returnType = exitType.shared.declaredReturnType; + + // If it's an __aexit__ method, its return type will typically be wrapped + // in a Coroutine, so we need to extract the return type from the third + // type argument. + if (isAsync) { + if ( + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, ['Coroutine', 'CoroutineType']) && + returnType.priv.typeArgs && + returnType.priv.typeArgs.length >= 3 + ) { + returnType = returnType.priv.typeArgs[2]; + } + } + + cmSwallowsExceptions = false; + if (isClassInstance(returnType) && ClassType.isBuiltIn(returnType, 'bool')) { + if (returnType.priv.literalValue === undefined || returnType.priv.literalValue === true) { + cmSwallowsExceptions = true; + } + } + } + } + } finally { + contextManagerAnalysisDepth--; + } + + // Cache the value for next time. + isExceptionContextManagerCache.set(node.id, cmSwallowsExceptions); + + return cmSwallowsExceptions; + } + + function getTypeFromWildcardImport(flowNode: FlowWildcardImport, name: string): Type { + const importInfo = getImportInfo(flowNode.node.d.module); + assert(importInfo !== undefined && importInfo.isImportFound); + assert(flowNode.node.d.isWildcardImport); + + const symbolWithScope = evaluator.lookUpSymbolRecursive(flowNode.node, name, /* honorCodeFlow */ false); + assert(symbolWithScope !== undefined); + const decls = symbolWithScope!.symbol.getDeclarations(); + const wildcardDecl = decls.find((decl) => decl.node === flowNode.node); + + if (!wildcardDecl) { + return UnknownType.create(); + } + + return evaluator.getInferredTypeOfDeclaration(symbolWithScope!.symbol, wildcardDecl) || UnknownType.create(); + } + + function printControlFlowGraph( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + callName: string, + logger: ConsoleInterface = console + ) { + let referenceText = ''; + if (reference) { + const fileInfo = getFileInfo(reference); + const pos = convertOffsetToPosition(reference.start, fileInfo.lines); + referenceText = `${printExpression(reference)}[${pos.line + 1}:${pos.character + 1}]`; + } + + logger.log(`${callName}@${flowNode.id}: ${referenceText || '(none)'}`); + logger.log(formatControlFlowGraph(flowNode)); + } + + return { + createCodeFlowAnalyzer, + getFlowNodeReachability, + narrowConstrainedTypeVar, + printControlFlowGraph, + }; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/codeFlowTypes.ts b/python-parser/packages/pyright-internal/src/analyzer/codeFlowTypes.ts new file mode 100644 index 00000000..7b1c4266 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/codeFlowTypes.ts @@ -0,0 +1,285 @@ +/* + * codeFlowTypes.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Data structures that track the code flow (or more accurately, + * the inverse of code flow) starting with return statements and + * working back to the entry. This allows us to work out the + * types at each point of the code flow. + * + * This is largely based on the code flow engine in the + * TypeScript compiler. + */ + +import { assert, fail } from '../common/debug'; +import { + ArgCategory, + AssignmentExpressionNode, + CallNode, + CaseNode, + ExpressionNode, + ImportFromNode, + IndexNode, + MatchNode, + MemberAccessNode, + NameNode, + NumberNode, + ParseNodeType, + StringNode, + SuiteNode, +} from '../parser/parseNodes'; +import { OperatorType } from '../parser/tokenizerTypes'; + +export enum FlowFlags { + UnreachableStructural = 1 << 0, // Code that is structurally unreachable (e.g. following a return statement) + UnreachableStaticCondition = 1 << 1, // code that is unreachable due to a condition that the binder evaluates to False + Start = 1 << 2, // Entry point + BranchLabel = 1 << 3, // Junction for forward control flow + LoopLabel = 1 << 4, // Junction for backward control flow + Assignment = 1 << 5, // Assignment statement + Unbind = 1 << 6, // Used with assignment to indicate target should be unbound + WildcardImport = 1 << 7, // For "from X import *" statements + TrueCondition = 1 << 8, // Condition known to be true + FalseCondition = 1 << 9, // Condition known to be false + Call = 1 << 10, // Call node + PreFinallyGate = 1 << 11, // Injected edge that links pre-finally label and pre-try flow + PostFinally = 1 << 12, // Injected edge that links post-finally flow with the rest of the graph + VariableAnnotation = 1 << 14, // Separates a variable annotation from its name node + PostContextManager = 1 << 15, // Label that's used for context managers that suppress exceptions + TrueNeverCondition = 1 << 16, // Condition whose type evaluates to never when narrowed in positive test + FalseNeverCondition = 1 << 17, // Condition whose type evaluates to never when narrowed in negative test + NarrowForPattern = 1 << 18, // Narrow the type of the subject expression within a case statement + ExhaustedMatch = 1 << 19, // Control flow gate that is closed when match is provably exhaustive +} + +let _nextFlowNodeId = 1; + +export type CodeFlowReferenceExpressionNode = NameNode | MemberAccessNode | IndexNode | AssignmentExpressionNode; + +export function getUniqueFlowNodeId() { + return _nextFlowNodeId++; +} + +export interface FlowNode { + flags: FlowFlags; + id: number; +} + +// FlowLabel represents a junction with multiple possible +// preceding control flows. +export interface FlowLabel extends FlowNode { + antecedents: FlowNode[]; + + // Set of all expressions that require code flow analysis + // through the loop or in branch paths to determine their types. + // If an expression is not within this map, branch or loop analysis + // can be skipped and determined from the first antecedent only. + affectedExpressions: Set | undefined; +} + +export interface FlowBranchLabel extends FlowLabel { + // If specified, this label represents a flow node that precedes + // (i.e. is higher up in the control flow graph) than all of + // the antecedents of this branch label. If an expression is + // not affected by the branch label, the entire flow node can be + // skipped, and processing can proceed at this label. + preBranchAntecedent: FlowNode | undefined; +} + +// FlowAssignment represents a node that assigns a value. +export interface FlowAssignment extends FlowNode { + node: CodeFlowReferenceExpressionNode; + antecedent: FlowNode; + targetSymbolId: number; +} + +// FlowVariableAnnotation separates a variable annotation +// node from its type annotation. For example, the declaration +// "foo: bar", the "bar" needs to be associated with a flow +// node that precedes the "foo". This is important if the +// same name is used for both (e.g. "foo: foo") and we need +// to determine that the annotation refers to a symbol within +// an outer scope. +export interface FlowVariableAnnotation extends FlowNode { + antecedent: FlowNode; +} + +// Similar to FlowAssignment but used specifically for +// wildcard "from X import *" statements. +export interface FlowWildcardImport extends FlowNode { + node: ImportFromNode; + names: string[]; + antecedent: FlowNode; +} + +// FlowCondition represents a condition that is known to +// be true or false at the node's location in the control flow. +export interface FlowCondition extends FlowNode { + expression: ExpressionNode; + reference?: NameNode | undefined; + antecedent: FlowNode; +} + +export interface FlowNarrowForPattern extends FlowNode { + subjectExpression: ExpressionNode; + statement: CaseNode | MatchNode; + antecedent: FlowNode; +} + +// FlowExhaustedMatch represents a control flow gate that is "closed" +// if a match statement can be statically proven to exhaust all cases +// (i.e. the narrowed type of the subject expression is Never at the bottom). +export interface FlowExhaustedMatch extends FlowNode { + node: MatchNode; + subjectExpression: ExpressionNode; + antecedent: FlowNode; +} + +// Records a call, which may raise exceptions, thus affecting +// the code flow and making subsequent code unreachable. +export interface FlowCall extends FlowNode { + node: CallNode; + antecedent: FlowNode; +} + +// See comment in the visitTry method in binder.ts for a full +// explanation of the FlowPreFinally and FlowPostFinally nodes. +export interface FlowPreFinallyGate extends FlowNode { + antecedent: FlowNode; +} + +export interface FlowPostFinally extends FlowNode { + antecedent: FlowNode; + finallyNode: SuiteNode; + preFinallyGate: FlowPreFinallyGate; +} + +export interface FlowPostContextManagerLabel extends FlowLabel { + expressions: ExpressionNode[]; + isAsync: boolean; + + // If the context manager swallows exceptions and this value + // is true, block any code flow analysis along this path. Conversely, + // if the context manager does not swallow exceptions and this + // value is false, block any code flow analysis along this path. + blockIfSwallowsExceptions: boolean; +} + +export function isCodeFlowSupportedForReference( + reference: ExpressionNode +): reference is CodeFlowReferenceExpressionNode { + if (reference.nodeType === ParseNodeType.Name) { + return true; + } + + if (reference.nodeType === ParseNodeType.MemberAccess) { + return isCodeFlowSupportedForReference(reference.d.leftExpr); + } + + if (reference.nodeType === ParseNodeType.AssignmentExpression) { + return true; + } + + if (reference.nodeType === ParseNodeType.Index) { + // Allow index expressions that have a single subscript that is a + // literal integer or string value. + if ( + reference.d.items.length !== 1 || + reference.d.trailingComma || + reference.d.items[0].d.name !== undefined || + reference.d.items[0].d.argCategory !== ArgCategory.Simple + ) { + return false; + } + + const subscriptNode = reference.d.items[0].d.valueExpr; + const isIntegerIndex = + subscriptNode.nodeType === ParseNodeType.Number && + !subscriptNode.d.isImaginary && + subscriptNode.d.isInteger; + const isNegativeIntegerIndex = + subscriptNode.nodeType === ParseNodeType.UnaryOperation && + subscriptNode.d.operator === OperatorType.Subtract && + subscriptNode.d.expr.nodeType === ParseNodeType.Number && + !subscriptNode.d.expr.d.isImaginary && + subscriptNode.d.expr.d.isInteger; + const isStringIndex = + subscriptNode.nodeType === ParseNodeType.StringList && + subscriptNode.d.strings.length === 1 && + subscriptNode.d.strings[0].nodeType === ParseNodeType.String; + + if (!isIntegerIndex && !isNegativeIntegerIndex && !isStringIndex) { + return false; + } + + return isCodeFlowSupportedForReference(reference.d.leftExpr); + } + + return false; +} + +export function createKeyForReference(reference: CodeFlowReferenceExpressionNode): string { + let key; + if (reference.nodeType === ParseNodeType.Name) { + key = reference.d.value; + } else if (reference.nodeType === ParseNodeType.AssignmentExpression) { + key = reference.d.name.d.value; + } else if (reference.nodeType === ParseNodeType.MemberAccess) { + const leftKey = createKeyForReference(reference.d.leftExpr as CodeFlowReferenceExpressionNode); + key = `${leftKey}.${reference.d.member.d.value}`; + } else if (reference.nodeType === ParseNodeType.Index) { + const leftKey = createKeyForReference(reference.d.leftExpr as CodeFlowReferenceExpressionNode); + assert(reference.d.items.length === 1); + const expr = reference.d.items[0].d.valueExpr; + if (expr.nodeType === ParseNodeType.Number) { + key = `${leftKey}[${(expr as NumberNode).d.value.toString()}]`; + } else if (expr.nodeType === ParseNodeType.StringList) { + const valExpr = expr; + assert(valExpr.d.strings.length === 1 && valExpr.d.strings[0].nodeType === ParseNodeType.String); + key = `${leftKey}["${(valExpr.d.strings[0] as StringNode).d.value}"]`; + } else if ( + expr.nodeType === ParseNodeType.UnaryOperation && + expr.d.operator === OperatorType.Subtract && + expr.d.expr.nodeType === ParseNodeType.Number + ) { + key = `${leftKey}[-${(expr.d.expr as NumberNode).d.value.toString()}]`; + } else { + fail('createKeyForReference received unexpected index type'); + } + } else { + fail('createKeyForReference received unexpected expression type'); + } + + return key; +} + +export function createKeysForReferenceSubexpressions(reference: CodeFlowReferenceExpressionNode): string[] { + if (reference.nodeType === ParseNodeType.Name) { + return [createKeyForReference(reference)]; + } + + if (reference.nodeType === ParseNodeType.AssignmentExpression) { + return [createKeyForReference(reference.d.name)]; + } + + if (reference.nodeType === ParseNodeType.MemberAccess) { + return [ + ...createKeysForReferenceSubexpressions(reference.d.leftExpr as CodeFlowReferenceExpressionNode), + createKeyForReference(reference), + ]; + } + + if (reference.nodeType === ParseNodeType.Index) { + return [ + ...createKeysForReferenceSubexpressions(reference.d.leftExpr as CodeFlowReferenceExpressionNode), + createKeyForReference(reference), + ]; + } + + fail('createKeyForReference received unexpected expression type'); +} + +// A reference key that corresponds to a wildcard import. +export const wildcardImportReferenceKey = '*'; diff --git a/python-parser/packages/pyright-internal/src/analyzer/codeFlowUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/codeFlowUtils.ts new file mode 100644 index 00000000..4cbf5660 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/codeFlowUtils.ts @@ -0,0 +1,445 @@ +/* + * codeFlowUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility functions that operate on code flow nodes and graphs. + */ + +import { convertOffsetToPosition } from '../common/positionUtils'; +import { ParseNode } from '../parser/parseNodes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { + FlowAssignment, + FlowCall, + FlowCondition, + FlowExhaustedMatch, + FlowFlags, + FlowLabel, + FlowNarrowForPattern, + FlowNode, + FlowPostFinally, + FlowPreFinallyGate, + FlowVariableAnnotation, + FlowWildcardImport, +} from './codeFlowTypes'; + +export function formatControlFlowGraph(flowNode: FlowNode) { + const enum BoxCharacter { + lr = '─', + ud = '│', + dr = '╭', + dl = '╮', + ul = '╯', + ur = '╰', + udr = '├', + udl = '┤', + dlr = '┬', + ulr = '┴', + udlr = '╫', + } + + const enum Connection { + None = 0, + Up = 1 << 0, + Down = 1 << 1, + Left = 1 << 2, + Right = 1 << 3, + + UpDown = Up | Down, + LeftRight = Left | Right, + UpLeft = Up | Left, + UpRight = Up | Right, + DownLeft = Down | Left, + DownRight = Down | Right, + UpDownLeft = UpDown | Left, + UpDownRight = UpDown | Right, + UpLeftRight = Up | LeftRight, + DownLeftRight = Down | LeftRight, + UpDownLeftRight = UpDown | LeftRight, + + NoChildren = 1 << 4, + } + + interface FlowGraphNode { + id: number; + flowNode: FlowNode; + edges: FlowGraphEdge[]; + text: string; + lane: number; + endLane: number; + level: number; + circular: boolean; + } + + interface FlowGraphEdge { + source: FlowGraphNode; + target: FlowGraphNode; + } + + const links: Record = Object.create(/* o */ null); + const nodes: FlowGraphNode[] = []; + const edges: FlowGraphEdge[] = []; + const root = buildGraphNode(flowNode, new Set()); + + for (const node of nodes) { + node.text = renderFlowNode(node.flowNode, node.circular); + computeLevel(node); + } + + const height = computeHeight(root); + const columnWidths = computeColumnWidths(height); + computeLanes(root, 0); + return renderGraph(); + + function getAntecedents(f: FlowNode): FlowNode[] { + if (f.flags & (FlowFlags.LoopLabel | FlowFlags.BranchLabel)) { + return (f as FlowLabel).antecedents; + } + + if ( + f.flags & + (FlowFlags.Assignment | + FlowFlags.VariableAnnotation | + FlowFlags.WildcardImport | + FlowFlags.TrueCondition | + FlowFlags.FalseCondition | + FlowFlags.TrueNeverCondition | + FlowFlags.FalseNeverCondition | + FlowFlags.NarrowForPattern | + FlowFlags.ExhaustedMatch | + FlowFlags.Call | + FlowFlags.PreFinallyGate | + FlowFlags.PostFinally) + ) { + const typedFlowNode = f as + | FlowAssignment + | FlowVariableAnnotation + | FlowWildcardImport + | FlowCondition + | FlowExhaustedMatch + | FlowCall + | FlowPreFinallyGate + | FlowPostFinally; + return [typedFlowNode.antecedent]; + } + + return []; + } + + function getChildren(node: FlowGraphNode) { + const children: FlowGraphNode[] = []; + for (const edge of node.edges) { + if (edge.source === node) { + children.push(edge.target); + } + } + return children; + } + + function getParents(node: FlowGraphNode) { + const parents: FlowGraphNode[] = []; + for (const edge of node.edges) { + if (edge.target === node) { + parents.push(edge.source); + } + } + return parents; + } + + function buildGraphNode(flowNode: FlowNode, seen: Set): FlowGraphNode { + const id = flowNode.id; + let graphNode = links[id]; + + if (graphNode && seen.has(flowNode)) { + graphNode = { + id: -1, + flowNode, + edges: [], + text: '', + lane: -1, + endLane: -1, + level: -1, + circular: true, + }; + nodes.push(graphNode); + return graphNode; + } + seen.add(flowNode); + + if (!graphNode) { + links[id] = graphNode = { + id, + flowNode, + edges: [], + text: '', + lane: -1, + endLane: -1, + level: -1, + circular: false, + }; + + nodes.push(graphNode); + + const antecedents = getAntecedents(flowNode); + for (const antecedent of antecedents) { + buildGraphEdge(graphNode, antecedent, seen); + } + } + + seen.delete(flowNode); + return graphNode; + } + + function buildGraphEdge(source: FlowGraphNode, antecedent: FlowNode, seen: Set) { + const target = buildGraphNode(antecedent, seen); + const edge: FlowGraphEdge = { source, target }; + edges.push(edge); + source.edges.push(edge); + target.edges.push(edge); + } + + function computeLevel(node: FlowGraphNode): number { + if (node.level !== -1) { + return node.level; + } + let level = 0; + for (const parent of getParents(node)) { + level = Math.max(level, computeLevel(parent) + 1); + } + return (node.level = level); + } + + function computeHeight(node: FlowGraphNode): number { + let height = 0; + for (const child of getChildren(node)) { + height = Math.max(height, computeHeight(child)); + } + return height + 1; + } + + function computeColumnWidths(height: number) { + const columns: number[] = fill(Array(height), 0); + for (const node of nodes) { + columns[node.level] = Math.max(columns[node.level], node.text.length); + } + return columns; + } + + function computeLanes(node: FlowGraphNode, lane: number) { + if (node.lane === -1) { + node.lane = lane; + node.endLane = lane; + const children = getChildren(node); + for (let i = 0; i < children.length; i++) { + if (i > 0) lane++; + const child = children[i]; + computeLanes(child, lane); + if (child.endLane > node.endLane) { + lane = child.endLane; + } + } + node.endLane = lane; + } + } + + function getHeader(flags: FlowFlags) { + if (flags & FlowFlags.Start) return 'Start'; + if (flags & FlowFlags.BranchLabel) return 'Branch'; + if (flags & FlowFlags.LoopLabel) return 'Loop'; + if (flags & FlowFlags.Unbind) return 'Unbind'; + if (flags & FlowFlags.Assignment) return 'Assign'; + if (flags & FlowFlags.TrueCondition) return 'True'; + if (flags & FlowFlags.FalseCondition) return 'False'; + if (flags & FlowFlags.Call) return 'Call'; + if (flags & FlowFlags.UnreachableStaticCondition) return 'UnreachableStaticCondition'; + if (flags & FlowFlags.UnreachableStructural) return 'UnreachableStructural'; + if (flags & FlowFlags.WildcardImport) return 'Wildcard'; + if (flags & FlowFlags.PreFinallyGate) return 'PreFinal'; + if (flags & FlowFlags.PostFinally) return 'PostFinal'; + if (flags & FlowFlags.VariableAnnotation) return 'Annotate'; + if (flags & FlowFlags.TrueNeverCondition) return 'TrueNever'; + if (flags & FlowFlags.FalseNeverCondition) return 'FalseNever'; + if (flags & FlowFlags.NarrowForPattern) return 'Pattern'; + if (flags & FlowFlags.ExhaustedMatch) return 'Exhaust'; + throw new Error(); + } + + function getParseNode(f: FlowNode): ParseNode | undefined { + if (f.flags & FlowFlags.Assignment) { + return (f as FlowAssignment).node; + } + + if (f.flags & FlowFlags.WildcardImport) { + return (f as FlowWildcardImport).node; + } + + if (f.flags & (FlowFlags.TrueCondition | FlowFlags.FalseCondition)) { + return (f as FlowCondition).expression; + } + + if (f.flags & FlowFlags.NarrowForPattern) { + return (f as FlowNarrowForPattern).statement; + } + + if (f.flags & FlowFlags.Call) { + return (f as FlowCall).node; + } + + return undefined; + } + + function getNodeText(f: FlowNode): string | undefined { + const parseNode = getParseNode(f); + + if (!parseNode) { + return undefined; + } + + const fileInfo = getFileInfo(parseNode); + const startPos = convertOffsetToPosition(parseNode.start, fileInfo.lines); + + return `[${startPos.line + 1}:${startPos.character + 1}]`; + } + + function renderFlowNode(flowNode: FlowNode, circular: boolean) { + const text = `${getHeader(flowNode.flags)}@${flowNode.id}${getNodeText(flowNode) || ''}`; + return circular ? `Circular(${text})` : text; + } + + function renderGraph() { + const columnCount = columnWidths.length; + const laneCount = nodes.reduce((x, n) => Math.max(x, n.lane), 0) + 1; + const lanes: string[] = fill(Array(laneCount), ''); + const grid: (FlowGraphNode | undefined)[][] = columnWidths.map(() => Array(laneCount)); + const connectors: Connection[][] = columnWidths.map(() => fill(Array(laneCount), 0)); + + // Build connectors. + for (const node of nodes) { + grid[node.level][node.lane] = node; + const children = getChildren(node); + for (let i = 0; i < children.length; i++) { + const child = children[i]; + let connector: Connection = Connection.Right; + if (child.lane === node.lane) connector |= Connection.Left; + if (i > 0) connector |= Connection.Up; + if (i < children.length - 1) connector |= Connection.Down; + connectors[node.level][child.lane] |= connector; + } + if (children.length === 0) { + connectors[node.level][node.lane] |= Connection.NoChildren; + } + const parents = getParents(node); + for (let i = 0; i < parents.length; i++) { + const parent = parents[i]; + let connector: Connection = Connection.Left; + if (i > 0) connector |= Connection.Up; + if (i < parents.length - 1) connector |= Connection.Down; + connectors[node.level - 1][parent.lane] |= connector; + } + } + + // Fill in missing connectors. + for (let column = 0; column < columnCount; column++) { + for (let lane = 0; lane < laneCount; lane++) { + const left = column > 0 ? connectors[column - 1][lane] : 0; + const above = lane > 0 ? connectors[column][lane - 1] : 0; + let connector = connectors[column][lane]; + if (!connector) { + connector = Connection.None; + + if (left & Connection.Right) { + connector |= Connection.LeftRight; + } + if (above & Connection.Down) { + connector |= Connection.UpDown; + } + connectors[column][lane] = connector; + } + } + } + + for (let column = 0; column < columnCount; column++) { + for (let lane = 0; lane < lanes.length; lane++) { + const connector = connectors[column][lane]; + const fill = connector & Connection.Left ? BoxCharacter.lr : ' '; + const node = grid[column][lane]; + if (!node) { + if (column < columnCount - 1) { + writeLane(lane, repeat(fill, columnWidths[column] + 1)); + } + } else { + writeLane(lane, node.text); + if (column < columnCount - 1) { + writeLane(lane, ' '); + writeLane(lane, repeat(fill, columnWidths[column] - node.text.length)); + } + } + writeLane(lane, getBoxCharacter(connector)); + writeLane( + lane, + connector & Connection.Right && column < columnCount - 1 && !grid[column + 1][lane] + ? BoxCharacter.lr + : ' ' + ); + } + } + + return `${lanes.join('\n')}\n`; + + function writeLane(lane: number, text: string) { + lanes[lane] += text; + } + } + + function getBoxCharacter(connector: Connection) { + switch (connector) { + case Connection.UpDown: + return BoxCharacter.ud; + case Connection.LeftRight: + return BoxCharacter.lr; + case Connection.UpLeft: + return BoxCharacter.ul; + case Connection.UpRight: + return BoxCharacter.ur; + case Connection.DownLeft: + return BoxCharacter.dl; + case Connection.DownRight: + return BoxCharacter.dr; + case Connection.UpDownLeft: + return BoxCharacter.udl; + case Connection.UpDownRight: + return BoxCharacter.udr; + case Connection.UpLeftRight: + return BoxCharacter.ulr; + case Connection.DownLeftRight: + return BoxCharacter.dlr; + case Connection.UpDownLeftRight: + return BoxCharacter.udlr; + } + return ' '; + } + + function fill(array: T[], value: T) { + if (array.fill) { + array.fill(value); + } else { + for (let i = 0; i < array.length; i++) { + array[i] = value; + } + } + return array; + } + + function repeat(ch: string, length: number) { + if (ch.repeat) { + return length > 0 ? ch.repeat(length) : ''; + } + let s = ''; + while (s.length < length) { + s += ch; + } + return s; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/commentUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/commentUtils.ts new file mode 100644 index 00000000..6f1ea3c0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/commentUtils.ts @@ -0,0 +1,317 @@ +/* + * commentUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility functions that parse comments and extract commands + * or other directives from them. + */ + +import { + cloneDiagnosticRuleSet, + DiagnosticLevel, + DiagnosticRuleSet, + getBasicDiagnosticRuleSet, + getBooleanDiagnosticRules, + getDiagLevelDiagnosticRules, + getStandardDiagnosticRuleSet, + getStrictDiagnosticRuleSet, + getStrictModeNotOverriddenRules, +} from '../common/configOptions'; +import { assert } from '../common/debug'; +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { convertOffsetToPosition } from '../common/positionUtils'; +import { TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { LocAddendum, LocMessage } from '../localization/localize'; +import { Token } from '../parser/tokenizerTypes'; + +const strictSetting = 'strict'; +const standardSetting = 'standard'; +const basicSetting = 'basic'; + +export interface CommentDiagnostic { + message: string; + range: TextRange; +} + +export function getFileLevelDirectives( + tokens: TextRangeCollection, + lines: TextRangeCollection, + defaultRuleSet: DiagnosticRuleSet, + useStrict: boolean, + diagnostics: CommentDiagnostic[] +): DiagnosticRuleSet { + let ruleSet = cloneDiagnosticRuleSet(defaultRuleSet); + let isModified = false; + + if (useStrict) { + _applyStrictRules(ruleSet); + isModified = true; + } + + for (let i = 0; i < tokens.count; i++) { + const token = tokens.getItemAt(i); + if (token.comments) { + for (const comment of token.comments) { + const [value, textRange] = _trimTextWithRange(comment.value, { + start: comment.start, + length: comment.length, + }); + + const isCommentOnOwnLine = (): boolean => { + const curTokenLineOffset = convertOffsetToPosition(comment.start, lines).character; + return curTokenLineOffset <= 1; + }; + + ruleSet = _parsePyrightComment(value, textRange, isCommentOnOwnLine, ruleSet, diagnostics); + isModified = true; + } + } + } + + // If we didn't make any modifications, use the default rule set to save memory. + return isModified ? ruleSet : defaultRuleSet; +} + +function _applyStrictRules(ruleSet: DiagnosticRuleSet) { + _overrideRules(ruleSet, getStrictDiagnosticRuleSet(), getStrictModeNotOverriddenRules()); +} + +function _applyStandardRules(ruleSet: DiagnosticRuleSet) { + _overwriteRules(ruleSet, getStandardDiagnosticRuleSet()); +} + +function _applyBasicRules(ruleSet: DiagnosticRuleSet) { + _overwriteRules(ruleSet, getBasicDiagnosticRuleSet()); +} + +function _overrideRules( + ruleSet: DiagnosticRuleSet, + overrideRuleSet: DiagnosticRuleSet, + skipRuleNames: DiagnosticRule[] +) { + const boolRuleNames = getBooleanDiagnosticRules(); + const diagRuleNames = getDiagLevelDiagnosticRules(); + + // Enable the strict rules as appropriate. + for (const ruleName of boolRuleNames) { + if (skipRuleNames.find((r) => r === ruleName)) { + continue; + } + + if ((overrideRuleSet as any)[ruleName]) { + (ruleSet as any)[ruleName] = true; + } + } + + for (const ruleName of diagRuleNames) { + if (skipRuleNames.find((r) => r === ruleName)) { + continue; + } + + const overrideValue: DiagnosticLevel = (overrideRuleSet as any)[ruleName]; + const prevValue: DiagnosticLevel = (ruleSet as any)[ruleName]; + + // Override only if the new value is more strict than the existing value. + if ( + overrideValue === 'error' || + (overrideValue === 'warning' && prevValue !== 'error') || + (overrideValue === 'information' && prevValue !== 'error' && prevValue !== 'warning') + ) { + (ruleSet as any)[ruleName] = overrideValue; + } + } +} + +function _overwriteRules(ruleSet: DiagnosticRuleSet, overrideRuleSet: DiagnosticRuleSet) { + const boolRuleNames = getBooleanDiagnosticRules(); + const diagRuleNames = getDiagLevelDiagnosticRules(); + + for (const ruleName of boolRuleNames) { + (ruleSet as any)[ruleName] = (overrideRuleSet as any)[ruleName]; + } + + for (const ruleName of diagRuleNames) { + (ruleSet as any)[ruleName] = (overrideRuleSet as any)[ruleName]; + } +} + +function _parsePyrightComment( + commentValue: string, + commentRange: TextRange, + isCommentOnOwnLine: () => boolean, + ruleSet: DiagnosticRuleSet, + diagnostics: CommentDiagnostic[] +) { + // Is this a pyright comment? + const commentPrefix = 'pyright:'; + if (commentValue.startsWith(commentPrefix)) { + const operands = commentValue.substring(commentPrefix.length); + + // Handle (actual ignore) "ignore" directives. + if (operands.trim().startsWith('ignore')) { + return ruleSet; + } + + if (!isCommentOnOwnLine()) { + const diagAddendum = new DiagnosticAddendum(); + diagAddendum.addMessage(LocAddendum.pyrightCommentIgnoreTip()); + const diag: CommentDiagnostic = { + message: LocMessage.pyrightCommentNotOnOwnLine() + diagAddendum.getString(), + range: commentRange, + }; + + diagnostics.push(diag); + } + + const operandList = operands.split(','); + + // If it contains a "strict" operand, replace the existing + // diagnostic rules with their strict counterparts. + if (operandList.some((s) => s.trim() === strictSetting)) { + _applyStrictRules(ruleSet); + } else if (operandList.some((s) => s.trim() === standardSetting)) { + _applyStandardRules(ruleSet); + } else if (operandList.some((s) => s.trim() === basicSetting)) { + _applyBasicRules(ruleSet); + } + + let rangeOffset = 0; + for (const operand of operandList) { + const [trimmedOperand, operandRange] = _trimTextWithRange(operand, { + start: commentRange.start + commentPrefix.length + rangeOffset, + length: operand.length, + }); + + ruleSet = _parsePyrightOperand(trimmedOperand, operandRange, ruleSet, diagnostics); + rangeOffset += operand.length + 1; + } + } + + return ruleSet; +} + +function _parsePyrightOperand( + operand: string, + operandRange: TextRange, + ruleSet: DiagnosticRuleSet, + diagnostics: CommentDiagnostic[] +) { + const operandSplit = operand.split('='); + const [trimmedRule, ruleRange] = _trimTextWithRange(operandSplit[0], { + start: operandRange.start, + length: operandSplit[0].length, + }); + + // Handle basic directives "basic", "standard" and "strict". + if (operandSplit.length === 1) { + if (trimmedRule && [strictSetting, standardSetting, basicSetting].some((setting) => trimmedRule === setting)) { + return ruleSet; + } + } + + const diagLevelRules = getDiagLevelDiagnosticRules(); + const boolRules = getBooleanDiagnosticRules(); + + const ruleValue = operandSplit.length > 0 ? operandSplit.slice(1).join('=') : ''; + const [trimmedRuleValue, ruleValueRange] = _trimTextWithRange(ruleValue, { + start: operandRange.start + operandSplit[0].length + 1, + length: ruleValue.length, + }); + + if (diagLevelRules.find((r) => r === trimmedRule)) { + const diagLevelValue = _parseDiagLevel(trimmedRuleValue); + if (diagLevelValue !== undefined) { + (ruleSet as any)[trimmedRule] = diagLevelValue; + } else { + const diag: CommentDiagnostic = { + message: LocMessage.pyrightCommentInvalidDiagnosticSeverityValue(), + range: trimmedRuleValue ? ruleValueRange : ruleRange, + }; + diagnostics.push(diag); + } + } else if (boolRules.find((r) => r === trimmedRule)) { + const boolValue = _parseBoolSetting(trimmedRuleValue); + if (boolValue !== undefined) { + (ruleSet as any)[trimmedRule] = boolValue; + } else { + const diag: CommentDiagnostic = { + message: LocMessage.pyrightCommentInvalidDiagnosticBoolValue(), + range: trimmedRuleValue ? ruleValueRange : ruleRange, + }; + diagnostics.push(diag); + } + } else if (trimmedRule) { + const diag: CommentDiagnostic = { + message: trimmedRuleValue + ? LocMessage.pyrightCommentUnknownDiagnosticRule().format({ rule: trimmedRule }) + : LocMessage.pyrightCommentUnknownDirective().format({ directive: trimmedRule }), + range: ruleRange, + }; + diagnostics.push(diag); + } else { + const diag: CommentDiagnostic = { + message: LocMessage.pyrightCommentMissingDirective(), + range: ruleRange, + }; + diagnostics.push(diag); + } + + return ruleSet; +} + +function _parseDiagLevel(value: string): DiagnosticLevel | undefined { + switch (value) { + case 'false': + case 'none': + return 'none'; + + case 'true': + case 'error': + return 'error'; + + case 'warning': + return 'warning'; + + case 'information': + return 'information'; + + default: + return undefined; + } +} + +function _parseBoolSetting(value: string): boolean | undefined { + if (value === 'false') { + return false; + } else if (value === 'true') { + return true; + } + + return undefined; +} + +// Calls "trim" on the text and adjusts the corresponding range +// if characters are trimmed from the beginning or end. +function _trimTextWithRange(text: string, range: TextRange): [string, TextRange] { + assert(text.length === range.length); + const value1 = text.trimStart(); + + let updatedRange = range; + + if (value1 !== text) { + const delta = text.length - value1.length; + updatedRange = { start: updatedRange.start + delta, length: updatedRange.length - delta }; + } + + const value2 = value1.trimEnd(); + if (value2 !== value1) { + updatedRange = { start: updatedRange.start, length: updatedRange.length - value1.length + value2.length }; + } + + assert(value2.length === updatedRange.length); + return [value2, updatedRange]; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/constraintSolution.ts b/python-parser/packages/pyright-internal/src/analyzer/constraintSolution.ts new file mode 100644 index 00000000..bf3a38c7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/constraintSolution.ts @@ -0,0 +1,89 @@ +/* + * constraintSolution.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Data structure that holds one or more constraint solutions for a set + * of type variables. + */ + +import { assert } from '../common/debug'; +import { FunctionType, ParamSpecType, Type, TypeVarType } from './types'; + +// Records the types associated with a set of type variables. +export class ConstraintSolutionSet { + // Indexed by TypeVar ID. + private _typeVarMap: Map; + + constructor() { + this._typeVarMap = new Map(); + } + + isEmpty() { + return this._typeVarMap.size === 0; + } + + getType(typeVar: ParamSpecType): FunctionType | undefined; + getType(typeVar: TypeVarType): Type | undefined; + getType(typeVar: TypeVarType): Type | undefined { + const key = TypeVarType.getNameWithScope(typeVar); + return this._typeVarMap.get(key); + } + + setType(typeVar: TypeVarType, type: Type | undefined) { + const key = TypeVarType.getNameWithScope(typeVar); + return this._typeVarMap.set(key, type); + } + + hasType(typeVar: TypeVarType): boolean { + const key = TypeVarType.getNameWithScope(typeVar); + return this._typeVarMap.has(key); + } + + doForEachTypeVar(callback: (type: Type, typeVarId: string) => void) { + this._typeVarMap.forEach((type, key) => { + if (type) { + callback(type, key); + } + }); + } +} + +export class ConstraintSolution { + private _solutionSets: ConstraintSolutionSet[]; + + constructor(solutionSets?: ConstraintSolutionSet[]) { + this._solutionSets = + solutionSets && solutionSets.length > 0 ? [...solutionSets] : [new ConstraintSolutionSet()]; + } + + isEmpty() { + return this._solutionSets.every((set) => set.isEmpty()); + } + + setType(typeVar: TypeVarType, type: Type) { + return this._solutionSets.forEach((set) => { + set.setType(typeVar, type); + }); + } + + getMainSolutionSet() { + return this.getSolutionSet(0); + } + + getSolutionSets() { + return this._solutionSets; + } + + doForEachSolutionSet(callback: (solutionSet: ConstraintSolutionSet, index: number) => void) { + this.getSolutionSets().forEach((set, index) => { + callback(set, index); + }); + } + + getSolutionSet(index: number) { + assert(index >= 0 && index < this._solutionSets.length); + return this._solutionSets[index]; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/constraintSolver.ts b/python-parser/packages/pyright-internal/src/analyzer/constraintSolver.ts new file mode 100644 index 00000000..1b905426 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/constraintSolver.ts @@ -0,0 +1,1397 @@ +/* + * constraintSolver.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that solves a TypeVar, TypeVarTuple or ParamSpec based on + * all of the provided constraints. + * + */ + +import { DiagnosticAddendum } from '../common/diagnostic'; +import { LocAddendum } from '../localization/localize'; +import { ConstraintSolution, ConstraintSolutionSet } from './constraintSolution'; +import { ConstraintSet, ConstraintTracker, TypeVarConstraints } from './constraintTracker'; +import { + AssignTypeFlags, + maxSubtypesForInferredType, + SolveConstraintsOptions, + TypeEvaluator, +} from './typeEvaluatorTypes'; +import { + ClassType, + combineTypes, + FunctionType, + isAny, + isAnyOrUnknown, + isClass, + isClassInstance, + isFunction, + isInstantiableClass, + isNever, + isParamSpec, + isTypeSame, + isTypeVar, + isTypeVarTuple, + isUnion, + isUnknown, + isUnpacked, + isUnpackedClass, + ParamSpecType, + TupleTypeArg, + Type, + TypeBase, + TypeVarKind, + TypeVarScopeId, + TypeVarType, + Variance, +} from './types'; +import { + addConditionToType, + applySolvedTypeVars, + buildSolutionFromSpecializedClass, + convertToInstance, + convertToInstantiable, + convertTypeToParamSpecValue, + getTypeCondition, + getTypeVarArgsRecursive, + getTypeVarScopeId, + isEffectivelyInstantiable, + isLiteralTypeOrUnion, + isPartlyUnknown, + makePacked, + makeUnpacked, + mapSubtypes, + simplifyFunctionToParamSpec, + sortTypes, + specializeTupleClass, + specializeWithDefaultTypeArgs, + stripTypeForm, + transformExpectedType, + transformPossibleRecursiveTypeAlias, +} from './typeUtils'; + +// As we widen the lower bound of a type variable, we may end up with +// many subtypes. For performance reasons, we need to cap this at some +// point. This constant determines the cap. +const maxSubtypeCountForTypeVarLowerBound = 64; + +// This debugging switch enables logging of the constraints before and +// after it is updated by the constraint solver. +const logConstraintsUpdates = false; + +// Assigns the source type to the dest type var in the type var context. If an existing +// type is already associated with that type var name, it attempts to either widen or +// narrow the type (depending on the value of the isContravariant parameter). The goal is +// to produce the narrowest type that meets all of the requirements. If the type var context +// has been "locked", it simply validates that the srcType is compatible (with no attempt +// to widen or narrow). +export function assignTypeVar( + evaluator: TypeEvaluator, + destType: TypeVarType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags = AssignTypeFlags.Default, + recursionCount = 0 +): boolean { + let isAssignable: boolean; + + if (logConstraintsUpdates) { + const indent = ' '.repeat(recursionCount * 2); + console.log(`${indent}`); + console.log(`${indent}assignTypeVar called with`); + console.log(`${indent}destType: ${evaluator.printType(destType)}`); + console.log(`${indent}srcType: ${evaluator.printType(srcType)}`); + console.log(`${indent}flags: ${flags}`); + if (constraints) { + logConstraints(evaluator, constraints, indent); + } + } + + // If both src and dest types are packed, unpack them both. + if (isUnpacked(destType) && isUnpacked(srcType)) { + destType = TypeVarType.cloneForPacked(destType); + srcType = makePacked(srcType); + } + + // If the TypeVar doesn't have a scope ID, then it's being used + // outside of a valid TypeVar scope. This will be reported as a + // separate error. Just ignore this case to avoid redundant errors. + if (!destType.priv.scopeId) { + return true; + } + + if (TypeVarType.isBound(destType) && !TypeVarType.isUnification(destType)) { + return assignBoundTypeVar(evaluator, destType, srcType, diag, flags); + } + + // Handle type[T] as a dest and a special form as a source. + if ( + TypeBase.isInstantiable(destType) && + isInstantiableClass(srcType) && + evaluator.isSpecialFormClass(srcType, flags) + ) { + return false; + } + + // An TypeVar can always be assigned to itself, but we won't record this in the constraints. + if (isTypeSame(destType, srcType)) { + return true; + } + + if (isParamSpec(destType)) { + // Handle ParamSpecs specially. + isAssignable = assignParamSpec(evaluator, destType, srcType, diag, constraints, recursionCount); + } else { + if (isTypeVarTuple(destType) && !destType.priv.isInUnion) { + if (destType.priv.isUnpacked) { + const tupleClassType = evaluator.getTupleClassType(); + + if (!isUnpacked(srcType) && tupleClassType) { + // Package up the type into a tuple. + srcType = convertToInstance( + specializeTupleClass( + tupleClassType, + [{ type: srcType, isUnbounded: false }], + /* isTypeArgExplicit */ true, + /* isUnpacked */ true + ) + ); + } + } else { + srcType = makeUnpacked(srcType); + } + } + + // If we're assigning an unpacked TypeVarTuple to a regular TypeVar, + // we need to treat it as a union of the unpacked TypeVarTuple. + if ( + isTypeVarTuple(srcType) && + srcType.priv.isUnpacked && + !srcType.priv.isInUnion && + !isTypeVarTuple(destType) + ) { + srcType = TypeVarType.cloneForUnpacked(srcType, /* isInUnion */ true); + } + + // Handle the constrained case. This case needs to be handled specially + // because type narrowing isn't used in this case. For example, if the + // source type is "Literal[1]" and the constraint list includes the type + // "float", the resulting type is float. + if (TypeVarType.hasConstraints(destType)) { + isAssignable = assignConstrainedTypeVar( + evaluator, + destType, + srcType, + diag, + constraints, + flags, + recursionCount + ); + } else { + isAssignable = assignUnconstrainedTypeVar( + evaluator, + destType, + srcType, + diag, + constraints, + flags, + recursionCount + ); + } + } + + if (logConstraintsUpdates) { + const indent = ' '.repeat(recursionCount * 2); + console.log(`${indent}`); + if (constraints) { + logConstraints(evaluator, constraints, indent); + } + } + + return isAssignable; +} + +// Returns a solution for the type variables tracked by the constraint tracker. +export function solveConstraints( + evaluator: TypeEvaluator, + constraints: ConstraintTracker, + options?: SolveConstraintsOptions +): ConstraintSolution { + const solutionSets: ConstraintSolutionSet[] = []; + + constraints.doForEachConstraintSet((constraintSet) => { + const solutionSet = solveConstraintSet(evaluator, constraintSet, options); + solutionSets.push(solutionSet); + }); + + return new ConstraintSolution(solutionSets); +} + +// Applies solved TypeVars from one context to this context. +export function applySourceSolutionToConstraints(constraints: ConstraintTracker, srcSolution: ConstraintSolution) { + if (srcSolution.isEmpty()) { + return; + } + + constraints.doForEachConstraintSet((constraintSet) => { + constraintSet.getTypeVars().forEach((entry) => { + constraintSet.setBounds( + entry.typeVar, + entry.lowerBound ? applySolvedTypeVars(entry.lowerBound, srcSolution) : undefined, + entry.upperBound ? applySolvedTypeVars(entry.upperBound, srcSolution) : undefined, + entry.retainLiterals + ); + }); + }); +} + +export function solveConstraintSet( + evaluator: TypeEvaluator, + constraintSet: ConstraintSet, + options?: SolveConstraintsOptions +): ConstraintSolutionSet { + const solutionSet = new ConstraintSolutionSet(); + + // Solve the type variables. + constraintSet.doForEachTypeVar((entry) => { + solveTypeVarRecursive(evaluator, constraintSet, options, solutionSet, entry); + }); + + return solutionSet; +} + +function solveTypeVarRecursive( + evaluator: TypeEvaluator, + constraintSet: ConstraintSet, + options: SolveConstraintsOptions | undefined, + solutionSet: ConstraintSolutionSet, + entry: TypeVarConstraints +): Type | undefined { + // If this TypeVar already has a solution, don't attempt to re-solve it. + if (solutionSet.hasType(entry.typeVar)) { + return solutionSet.getType(entry.typeVar); + } + + // Protect against infinite recursion by setting the initial value to undefined. + solutionSet.setType(entry.typeVar, undefined); + let value = getTypeVarType(evaluator, constraintSet, entry.typeVar, options?.useLowerBoundOnly); + + if (value) { + // Are there any unsolved TypeVars in this type? + const typeVars = getTypeVarArgsRecursive(value); + + if (typeVars.length > 0) { + const dependentSolution = new ConstraintSolution(); + + for (const typeVar of typeVars) { + // Don't attempt to replace a TypeVar with itself. + if (isTypeSame(typeVar, entry.typeVar, { ignoreTypeFlags: true })) { + continue; + } + + // Don't attempt to solve or replace bound TypeVars. + if (TypeVarType.isBound(typeVar)) { + continue; + } + + const dependentEntry = constraintSet.getTypeVar(typeVar); + if (!dependentEntry) { + continue; + } + + const dependentType = solveTypeVarRecursive( + evaluator, + constraintSet, + options, + solutionSet, + dependentEntry + ); + + if (dependentType) { + dependentSolution.setType(typeVar, dependentType); + } + } + + // Apply the dependent TypeVar values to the current TypeVar value. + if (!dependentSolution.isEmpty()) { + value = applySolvedTypeVars(value, dependentSolution); + } + } + } + + solutionSet.setType(entry.typeVar, value); + return value; +} + +// In cases where the expected type is a specialized base class of the +// source type, we need to determine which type arguments in the derived +// class will make it compatible with the specialized base class. This method +// performs this reverse mapping of type arguments and populates the type var +// map for the target type. If the type is not assignable to the expected type, +// it returns false. +export function addConstraintsForExpectedType( + evaluator: TypeEvaluator, + type: ClassType, + expectedType: Type, + constraints: ConstraintTracker, + liveTypeVarScopes: TypeVarScopeId[] | undefined, + usageOffset: number | undefined = undefined +): boolean { + if (isAny(expectedType)) { + type.shared.typeParams.forEach((typeParam) => { + constraints.setBounds(typeParam, expectedType, expectedType); + }); + return true; + } + + if (isTypeVar(expectedType) && TypeVarType.isSelf(expectedType) && expectedType.shared.boundType) { + expectedType = expectedType.shared.boundType; + } + + if (!isClass(expectedType)) { + return false; + } + + // If the expected type is generic (but not specialized), we can't proceed. + const expectedTypeArgs = expectedType.priv.typeArgs; + if (!expectedTypeArgs) { + return evaluator.assignType( + type, + expectedType, + /* diag */ undefined, + constraints, + AssignTypeFlags.PopulateExpectedType + ); + } + + evaluator.inferVarianceForClass(type); + + // If the expected type is the same as the target type (commonly the case), + // we can use a faster method. + if (ClassType.isSameGenericClass(expectedType, type)) { + const solution = buildSolutionFromSpecializedClass(expectedType); + const typeParams = ClassType.getTypeParams(expectedType); + typeParams.forEach((typeParam) => { + let typeArgValue = solution.getMainSolutionSet().getType(typeParam); + + if (typeArgValue && liveTypeVarScopes) { + typeArgValue = transformExpectedType(typeArgValue, liveTypeVarScopes, usageOffset); + } + + if (typeArgValue) { + const variance = TypeVarType.getVariance(typeParam); + + constraints.setBounds( + typeParam, + variance === Variance.Covariant ? undefined : typeArgValue, + variance === Variance.Contravariant ? undefined : typeArgValue + ); + } + }); + return true; + } + + // Create a generic version of the expected type. + const expectedTypeScopeId = getTypeVarScopeId(expectedType); + const synthExpectedTypeArgs = ClassType.getTypeParams(expectedType).map((typeParam, index) => { + const typeVar = TypeVarType.createInstance( + `__dest${index}`, + isParamSpec(typeParam) ? TypeVarKind.ParamSpec : TypeVarKind.TypeVar + ); + typeVar.shared.isSynthesized = true; + + // Use invariance here so we set the lower and upper bound on the TypeVar. + typeVar.shared.declaredVariance = Variance.Invariant; + typeVar.priv.scopeId = expectedTypeScopeId; + return typeVar; + }); + const genericExpectedType = ClassType.specialize(expectedType, synthExpectedTypeArgs); + + // For each type param in the target type, create a placeholder type variable. + const typeArgs = ClassType.getTypeParams(type).map((typeParam, index) => { + const typeVar = TypeVarType.createInstance( + `__source${index}`, + isParamSpec(typeParam) ? TypeVarKind.ParamSpec : TypeVarKind.TypeVar + ); + typeVar.shared.isSynthesized = true; + typeVar.shared.synthesizedIndex = index; + typeVar.shared.isExemptFromBoundCheck = true; + return TypeVarType.cloneAsUnificationVar(typeVar); + }); + + const specializedType = ClassType.specialize(type, typeArgs); + const syntheticConstraints = new ConstraintTracker(); + if ( + evaluator.assignType( + genericExpectedType, + specializedType, + /* diag */ undefined, + syntheticConstraints, + AssignTypeFlags.PopulateExpectedType + ) + ) { + let isResultValid = true; + + synthExpectedTypeArgs.forEach((typeVar, index) => { + let synthTypeVar = getTypeVarType(evaluator, syntheticConstraints.getMainConstraintSet(), typeVar); + const otherSubtypes: Type[] = []; + + // If the resulting type is a union, try to find a matching type var and move + // the remaining subtypes to the "otherSubtypes" array. + if (synthTypeVar) { + if (isParamSpec(typeVar) && isFunction(synthTypeVar)) { + synthTypeVar = simplifyFunctionToParamSpec(synthTypeVar); + } + + if (isUnion(synthTypeVar)) { + let foundSynthTypeVar: TypeVarType | undefined; + + sortTypes(synthTypeVar.priv.subtypes).forEach((subtype) => { + if ( + isTypeVar(subtype) && + subtype.shared.isSynthesized && + subtype.shared.synthesizedIndex !== undefined && + !foundSynthTypeVar + ) { + foundSynthTypeVar = subtype; + } else { + otherSubtypes.push(subtype); + } + }); + + if (foundSynthTypeVar) { + synthTypeVar = foundSynthTypeVar; + } + } + } + + // Is this one of the synthesized type vars we allocated above? If so, + // the type arg that corresponds to this type var maps back to the target type. + if ( + synthTypeVar && + isTypeVar(synthTypeVar) && + synthTypeVar.shared.isSynthesized && + synthTypeVar.shared.synthesizedIndex !== undefined + ) { + const targetTypeVar = ClassType.getTypeParams(specializedType)[synthTypeVar.shared.synthesizedIndex]; + if (index < expectedTypeArgs.length) { + let typeArgValue: Type | undefined = transformPossibleRecursiveTypeAlias(expectedTypeArgs[index]); + + if (otherSubtypes.length > 0) { + typeArgValue = combineTypes([typeArgValue, ...otherSubtypes]); + } + + if (liveTypeVarScopes) { + typeArgValue = transformExpectedType(typeArgValue, liveTypeVarScopes, usageOffset); + } + + if ( + !typeArgValue || + !assignTypeVar( + evaluator, + targetTypeVar, + typeArgValue, + /* diag */ undefined, + constraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ) + ) { + isResultValid = false; + } + } + } + }); + + return isResultValid; + } + + return false; +} + +function stripLiteralsForLowerBound(evaluator: TypeEvaluator, typeVar: TypeVarType, lowerBound: Type) { + return isTypeVarTuple(typeVar) + ? stripLiteralValueForUnpackedTuple(evaluator, lowerBound) + : stripTypeForm(evaluator.stripLiteralValue(lowerBound)); +} + +function getTypeVarType( + evaluator: TypeEvaluator, + constraintSet: ConstraintSet, + typeVar: TypeVarType, + useLowerBoundOnly?: boolean +): Type | undefined { + const entry = constraintSet.getTypeVar(typeVar); + if (!entry) { + return undefined; + } + + if (isParamSpec(typeVar)) { + if (!entry.lowerBound) { + return undefined; + } + + if (isFunction(entry.lowerBound)) { + return entry.lowerBound; + } + + if (isAnyOrUnknown(entry.lowerBound)) { + return ParamSpecType.getUnknown(); + } + } + + let result: Type | undefined; + + let lowerBound = entry.lowerBound; + if (lowerBound) { + if (!entry.retainLiterals) { + const lowerNoLiterals = stripLiteralsForLowerBound(evaluator, typeVar, lowerBound); + + // If we can widen the lower bound to a non-literal type without + // exceeding the upper bound, use the widened type. + if (lowerNoLiterals !== lowerBound) { + if (!entry.upperBound || evaluator.assignType(entry.upperBound, lowerNoLiterals)) { + if (TypeVarType.hasConstraints(typeVar)) { + // Does it still match a value constraint? + if (typeVar.shared.constraints.some((constraint) => isTypeSame(lowerNoLiterals, constraint))) { + lowerBound = lowerNoLiterals; + } + } else { + lowerBound = lowerNoLiterals; + } + } + } + } + + result = lowerBound; + } else if (!useLowerBoundOnly) { + result = entry.upperBound; + } + + return result; +} + +// Handles an assignment to a TypeVar that is "bound" rather than "free". +// In general, such assignments are not allowed, but there are some special +// cases to be handled. +function assignBoundTypeVar( + evaluator: TypeEvaluator, + destType: TypeVarType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + flags: AssignTypeFlags +) { + // Handle Any as a source. + if (isAnyOrUnknown(srcType) || (isClass(srcType) && ClassType.derivesFromAnyOrUnknown(srcType))) { + return true; + } + + // Is this the equivalent of an "Unknown" for a ParamSpec? + if (isParamSpec(destType) && isFunction(srcType) && FunctionType.isGradualCallableForm(srcType)) { + return true; + } + + // Never is always assignable except in an invariant context. + const isInvariant = (flags & AssignTypeFlags.Invariant) !== 0; + if (isNever(srcType) && !isInvariant) { + return true; + } + + // Handle a type[Any] as a source. + if (isClassInstance(srcType) && ClassType.isBuiltIn(srcType, 'type')) { + if (!srcType.priv.typeArgs || srcType.priv.typeArgs.length < 1 || isAnyOrUnknown(srcType.priv.typeArgs[0])) { + if (TypeBase.isInstantiable(destType)) { + return true; + } + } + } + + // Emit an error unless this is a synthesized type variable used + // for pseudo-generic classes. + if (!destType.shared.isSynthesized || TypeVarType.isSelf(destType)) { + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(evaluator.printSrcDestTypes(srcType, destType))); + } + + return false; +} + +// Handles assignments to a TypeVarTuple or a TypeVar that does not have +// value constraints (but may have an upper bound). +function assignUnconstrainedTypeVar( + evaluator: TypeEvaluator, + destType: TypeVarType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number +) { + const isInvariant = (flags & AssignTypeFlags.Invariant) !== 0; + const isContravariant = (flags & AssignTypeFlags.Contravariant) !== 0 && !isInvariant; + + // Handle the unconstrained (but possibly bound) case. + const curEntry = constraints?.getMainConstraintSet().getTypeVar(destType); + + let curUpperBound = curEntry?.upperBound; + if (!curUpperBound && !TypeVarType.isSelf(destType)) { + curUpperBound = destType.shared.boundType; + } + let curLowerBound = curEntry?.lowerBound; + let newLowerBound = curLowerBound; + let newUpperBound = curUpperBound; + const diagAddendum = diag ? new DiagnosticAddendum() : undefined; + + let adjSrcType = srcType; + + // If the source is a class that is missing type arguments, fill + // in missing type arguments with Unknown. + if ((flags & AssignTypeFlags.AllowUnspecifiedTypeArgs) === 0) { + if (isClass(adjSrcType) && adjSrcType.priv.includeSubclasses) { + adjSrcType = specializeWithDefaultTypeArgs(adjSrcType); + } + } + + if (TypeBase.isInstantiable(destType)) { + if (isEffectivelyInstantiable(adjSrcType)) { + adjSrcType = convertToInstance(adjSrcType, /* includeSubclasses */ false); + } else { + // Handle the case of a TypeVar that has a bound of `type`. + const concreteAdjSrcType = evaluator.makeTopLevelTypeVarsConcrete(adjSrcType); + + if (isEffectivelyInstantiable(concreteAdjSrcType)) { + adjSrcType = convertToInstance(concreteAdjSrcType); + } else { + diag?.addMessage( + LocAddendum.typeAssignmentMismatch().format(evaluator.printSrcDestTypes(srcType, destType)) + ); + return false; + } + } + } else if ( + isTypeVar(srcType) && + TypeBase.isInstantiable(srcType) && + isTypeSame(convertToInstance(srcType), destType) + ) { + diag?.addMessage( + LocAddendum.typeAssignmentMismatch().format(evaluator.printSrcDestTypes(adjSrcType, destType)) + ); + return false; + } + + if ((flags & AssignTypeFlags.PopulateExpectedType) !== 0) { + if ((flags & AssignTypeFlags.SkipPopulateUnknownExpectedType) !== 0 && isUnknown(adjSrcType)) { + return true; + } + + // If we're populating the expected type, constrain either the + // lower type bound, upper type bound or both. Don't overwrite + // an existing entry. + if (!curEntry) { + if (isInvariant) { + newLowerBound = adjSrcType; + newUpperBound = adjSrcType; + } else if (isContravariant) { + newLowerBound = adjSrcType; + } else { + newUpperBound = adjSrcType; + } + } + } else if (isContravariant) { + // Update the upper bound. + if (!curUpperBound || isTypeSame(destType, curUpperBound)) { + newUpperBound = adjSrcType; + } else if (!isTypeSame(curUpperBound, adjSrcType, {}, recursionCount)) { + if ( + evaluator.assignType( + curUpperBound, + evaluator.makeTopLevelTypeVarsConcrete(adjSrcType), + diagAddendum, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + // The srcType is narrower than the current upper bound, so replace it. + newUpperBound = adjSrcType; + } else if ( + !evaluator.assignType( + adjSrcType, + curUpperBound, + diagAddendum, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + if (diag && diagAddendum) { + diag.addMessage( + LocAddendum.typeAssignmentMismatch().format( + evaluator.printSrcDestTypes(curUpperBound, adjSrcType) + ) + ); + diag.addAddendum(diagAddendum); + } + return false; + } + } + + // Make sure we haven't narrowed it beyond the current lower bound. + if (curLowerBound) { + if ( + !evaluator.assignType( + newUpperBound!, + curLowerBound, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + if (diag && diagAddendum) { + diag.addMessage( + LocAddendum.typeAssignmentMismatch().format( + evaluator.printSrcDestTypes(curLowerBound, newUpperBound!) + ) + ); + diag.addAddendum(diagAddendum); + } + return false; + } + } + } else { + if (!curLowerBound || isTypeSame(destType, curLowerBound)) { + // There was previously no lower bound. We've now established one. + newLowerBound = adjSrcType; + } else if (isTypeSame(curLowerBound, adjSrcType, {}, recursionCount)) { + // If this is an invariant context and there is currently no upper bound + // established, use the "no literals" version of the lower bound rather + // than a version that has literals. + if (!newUpperBound && isInvariant && curEntry && !curEntry.retainLiterals) { + newLowerBound = stripLiteralsForLowerBound(evaluator, destType, curLowerBound); + } + } else { + if (evaluator.assignType(curLowerBound, adjSrcType, diagAddendum, constraints, flags, recursionCount)) { + // No need to widen. Stick with the existing type unless it's unknown + // or partly unknown, in which case we'll replace it with a known type + // as long as it doesn't violate the current lower bound. + if ( + isPartlyUnknown(curLowerBound) && + !isUnknown(adjSrcType) && + evaluator.assignType( + adjSrcType, + curLowerBound, + /* diag */ undefined, + constraints, + AssignTypeFlags.Default, + recursionCount + ) + ) { + newLowerBound = adjSrcType; + } else { + newLowerBound = curLowerBound; + + if (constraints) { + newLowerBound = evaluator.solveAndApplyConstraints(newLowerBound, constraints); + } + } + } else if ( + isTypeVar(curLowerBound) && + !isTypeVar(adjSrcType) && + evaluator.assignType( + evaluator.makeTopLevelTypeVarsConcrete(curLowerBound), + adjSrcType, + diagAddendum, + constraints, + flags, + recursionCount + ) + ) { + // If the existing lower bound was a TypeVar that is not + // part of the current context we can replace it with the new + // source type. + newLowerBound = adjSrcType; + } else { + if ( + evaluator.assignType( + adjSrcType, + curLowerBound, + /* diag */ undefined, + constraints, + AssignTypeFlags.Default, + recursionCount + ) + ) { + // If the source is a TypeVar that just got assigned the value + // of the current lower bound, don't replace the current lower + // bound with the TypeVar. + if (!isTypeVar(adjSrcType)) { + newLowerBound = adjSrcType; + } + } else if (isTypeVarTuple(destType)) { + const widenedType = widenTypeForTypeVarTuple(evaluator, curLowerBound, adjSrcType); + if (!widenedType) { + diag?.addMessage( + LocAddendum.typeAssignmentMismatch().format( + evaluator.printSrcDestTypes(curLowerBound, adjSrcType) + ) + ); + return false; + } + + newLowerBound = widenedType; + } else { + const objectType = evaluator.getObjectType(); + + // If this is an invariant context and there is currently no upper bound + // established, use the "no literals" version of the lower bound rather + // than a version that has literals. + if (!newUpperBound && isInvariant && curEntry && !curEntry.retainLiterals) { + curLowerBound = stripLiteralsForLowerBound(evaluator, destType, curLowerBound); + } + + let curSolvedLowerBound = curLowerBound; + + if (constraints) { + curSolvedLowerBound = evaluator.solveAndApplyConstraints(curLowerBound, constraints); + } + + // In some extreme edge cases, the lower bound can become + // a union with so many subtypes that performance grinds to a + // halt. We'll detect this case and widen the resulting type + // to an 'object' instead of making the union even bigger. This + // is still a valid solution to the TypeVar. + if ( + isUnion(curSolvedLowerBound) && + curSolvedLowerBound.priv.subtypes.length > maxSubtypesForInferredType && + TypeVarType.hasBound(destType) && + isClassInstance(objectType) + ) { + newLowerBound = combineTypes([curSolvedLowerBound, objectType], { + maxSubtypeCount: maxSubtypeCountForTypeVarLowerBound, + }); + } else { + newLowerBound = combineTypes([curSolvedLowerBound, adjSrcType], { + maxSubtypeCount: maxSubtypeCountForTypeVarLowerBound, + }); + } + } + } + } + + // If this is an invariant context, make sure the lower bound + // isn't too wide. + if (isInvariant && newLowerBound) { + if ( + !evaluator.assignType( + adjSrcType, + newLowerBound, + diag?.createAddendum(), + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + if (diag && diagAddendum) { + diag.addMessage( + LocAddendum.typeAssignmentMismatch().format( + evaluator.printSrcDestTypes(newLowerBound, adjSrcType) + ) + ); + } + return false; + } + } + + // Make sure we don't exceed the upper bound. + if (curUpperBound && newLowerBound) { + if (!isTypeSame(curUpperBound, newLowerBound, {}, recursionCount)) { + if ( + !evaluator.assignType( + curUpperBound, + newLowerBound, + diag?.createAddendum(), + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + if (diag && diagAddendum) { + diag.addMessage( + LocAddendum.typeAssignmentMismatch().format( + evaluator.printSrcDestTypes(newLowerBound, curUpperBound) + ) + ); + } + return false; + } + } + } + } + + if (!newUpperBound && isInvariant) { + newUpperBound = newLowerBound; + } + + // If there's a bound type, make sure the source is assignable to it. + if (destType.shared.boundType) { + const updatedType = (newLowerBound || newUpperBound)!; + + // If the dest is a Type[T] but the source is not a valid Type, + // skip the assignType check and the diagnostic addendum, which will + // be confusing and inaccurate. + if (TypeBase.isInstantiable(destType) && !isEffectivelyInstantiable(srcType, { honorTypeVarBounds: true })) { + return false; + } + + // In general, bound types cannot be generic, but the "Self" type is an + // exception. In this case, we need to use the original constraints + // to solve for the generic type variable(s) in the bound type. + const effectiveConstraints = TypeVarType.isSelf(destType) ? constraints : undefined; + + if ( + !evaluator.assignType( + destType.shared.boundType, + evaluator.makeTopLevelTypeVarsConcrete(updatedType), + diag?.createAddendum(), + effectiveConstraints, + AssignTypeFlags.Default, + recursionCount + ) + ) { + // Avoid adding a message that will confuse users if the TypeVar was + // synthesized for internal purposes. + if (!destType.shared.isSynthesized) { + diag?.addMessage( + LocAddendum.typeBound().format({ + sourceType: evaluator.printType(updatedType), + destType: evaluator.printType(destType.shared.boundType), + name: TypeVarType.getReadableName(destType), + }) + ); + } + return false; + } + } + + constraints?.setBounds( + destType, + newLowerBound, + newUpperBound, + (flags & (AssignTypeFlags.PopulateExpectedType | AssignTypeFlags.RetainLiteralsForTypeVar)) !== 0 + ); + + return true; +} + +// Handles assignments to a TypeVar with value constraints. +function assignConstrainedTypeVar( + evaluator: TypeEvaluator, + destType: TypeVarType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number +) { + let constrainedType: Type | undefined; + const concreteSrcType = evaluator.makeTopLevelTypeVarsConcrete(srcType); + const curEntry = constraints?.getMainConstraintSet().getTypeVar(destType); + + const curUpperBound = curEntry?.upperBound; + const curLowerBound = curEntry?.lowerBound; + let retainLiterals = false; + + if (isTypeVar(srcType)) { + if ( + evaluator.assignType( + destType, + concreteSrcType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + constrainedType = srcType; + + // If the source and dest are both instantiables (type[T]), then + // we need to convert to an instance (T). + if (TypeBase.isInstantiable(srcType)) { + constrainedType = convertToInstance(srcType, /* includeSubclasses */ false); + } + } + } else { + let isCompatible = true; + + // Subtypes that are not conditionally dependent on the dest type var + // must all map to the same constraint. For example, Union[str, bytes] + // cannot be assigned to AnyStr. + let unconditionalConstraintIndex: number | undefined; + + // Find the narrowest constrained type that is compatible. + constrainedType = mapSubtypes(concreteSrcType, (srcSubtype) => { + let constrainedSubtype: Type | undefined; + + if (isAnyOrUnknown(srcSubtype)) { + return srcSubtype; + } + + let constraintIndexUsed: number | undefined; + destType.shared.constraints.forEach((constraint, i) => { + const adjustedConstraint = TypeBase.isInstantiable(destType) + ? convertToInstantiable(constraint) + : constraint; + if ( + evaluator.assignType( + adjustedConstraint, + srcSubtype, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + if ( + !constrainedSubtype || + evaluator.assignType( + TypeBase.isInstantiable(destType) + ? convertToInstantiable(constrainedSubtype) + : constrainedSubtype, + adjustedConstraint, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + constrainedSubtype = addConditionToType(constraint, getTypeCondition(srcSubtype)); + constraintIndexUsed = i; + } + } + }); + + if (!constrainedSubtype) { + // We found a source subtype that is not compatible with the dest. + // This is OK if we're handling the contravariant case because only + // one subtype needs to be assignable in that case. + if ((flags & AssignTypeFlags.Contravariant) === 0) { + isCompatible = false; + } + } + + // If this subtype isn't conditional, make sure it maps to the same + // constraint index as previous unconditional subtypes. + if (constraintIndexUsed !== undefined && !getTypeCondition(srcSubtype)) { + if ( + unconditionalConstraintIndex !== undefined && + unconditionalConstraintIndex !== constraintIndexUsed + ) { + isCompatible = false; + } + + unconditionalConstraintIndex = constraintIndexUsed; + } + + return constrainedSubtype; + }); + + if (isNever(constrainedType) || !isCompatible) { + constrainedType = undefined; + } + + // If the type is a union, see if the entire union is assignable to one + // of the constraints. + if (!constrainedType && isUnion(concreteSrcType)) { + constrainedType = destType.shared.constraints.find((constraint) => { + const adjustedConstraint = TypeBase.isInstantiable(destType) + ? convertToInstantiable(constraint) + : constraint; + return evaluator.assignType( + adjustedConstraint, + concreteSrcType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ); + }); + } + } + + // If there was no constrained type that was assignable + // or there were multiple types that were assignable and they + // are not conditional, it's an error. + if (!constrainedType) { + diag?.addMessage( + LocAddendum.typeConstrainedTypeVar().format({ + type: evaluator.printType(srcType), + name: destType.shared.name, + }) + ); + return false; + } else if (isLiteralTypeOrUnion(constrainedType)) { + retainLiterals = true; + } + + if (curLowerBound && !isAnyOrUnknown(curLowerBound)) { + if ( + !evaluator.assignType( + curLowerBound, + constrainedType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + // Handle the case where one of the constrained types is a wider + // version of another constrained type that was previously assigned + // to the type variable. + if ( + evaluator.assignType( + constrainedType, + curLowerBound, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + constraints?.setBounds(destType, constrainedType, curUpperBound); + } else { + diag?.addMessage( + LocAddendum.typeConstrainedTypeVar().format({ + type: evaluator.printType(constrainedType), + name: evaluator.printType(curLowerBound), + }) + ); + return false; + } + } + } else { + // Assign the type to the type var. + constraints?.setBounds(destType, constrainedType, curUpperBound, retainLiterals); + } + + return true; +} + +// Handles assignments to a ParamSpec. +function assignParamSpec( + evaluator: TypeEvaluator, + destType: ParamSpecType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + recursionCount = 0 +) { + // If there is no constraint tracker, there's nothing to do because + // param specs have no upper bounds or constraints. + if (!constraints) { + return true; + } + + let isAssignable = true; + let adjSrcType = isParamSpec(srcType) ? srcType : convertTypeToParamSpecValue(srcType); + if (isFunction(adjSrcType)) { + adjSrcType = simplifyFunctionToParamSpec(adjSrcType); + } + + constraints.doForEachConstraintSet((constraintSet) => { + if (isParamSpec(adjSrcType)) { + const existingType = constraintSet.getTypeVar(destType)?.lowerBound; + if (existingType) { + const paramSpecValue = convertTypeToParamSpecValue(existingType); + const existingTypeParamSpec = FunctionType.getParamSpecFromArgsKwargs(paramSpecValue); + const existingTypeWithoutArgsKwargs = FunctionType.cloneRemoveParamSpecArgsKwargs(paramSpecValue); + + if (existingTypeWithoutArgsKwargs.shared.parameters.length === 0 && existingTypeParamSpec) { + // If there's an existing entry that matches, that's fine. + if (isTypeSame(existingTypeParamSpec, adjSrcType, {}, recursionCount)) { + return; + } + } + } else { + constraintSet.setBounds(destType, adjSrcType); + return; + } + } else if (isFunction(adjSrcType)) { + const newFunction = adjSrcType; + let updateContextWithNewFunction = false; + + const existingType = constraintSet.getTypeVar(destType)?.lowerBound; + if (existingType) { + // Convert the remaining portion of the signature to a function + // for comparison purposes. + const existingFunction = simplifyFunctionToParamSpec(convertTypeToParamSpecValue(existingType)); + + const isNewNarrower = evaluator.assignType( + existingFunction, + newFunction, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.SkipReturnTypeCheck, + recursionCount + ); + + const isNewWider = evaluator.assignType( + newFunction, + existingFunction, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.SkipReturnTypeCheck, + recursionCount + ); + + // Should we widen the type? + if (isNewNarrower && isNewWider) { + // The new type is both a supertype and a subtype of the existing type. + // That means the two types are the same or one (or both) have the type + // "..." (which is the ParamSpec equivalent of "Any"). If only one has + // the type "...", we'll prefer the other one. This is analogous to + // what we do with regular TypeVars, where we prefer non-Any values. + if (!FunctionType.isGradualCallableForm(newFunction)) { + updateContextWithNewFunction = true; + } else { + return; + } + } else if (isNewWider) { + updateContextWithNewFunction = true; + } else if (isNewNarrower) { + // The existing function is already narrower than the new function, so + // no need to narrow it further. + return; + } + } else { + updateContextWithNewFunction = true; + } + + if (updateContextWithNewFunction) { + constraintSet.setBounds(destType, newFunction); + return; + } + } else if (isAnyOrUnknown(adjSrcType)) { + return; + } + + diag?.addMessage( + LocAddendum.typeParamSpec().format({ + type: evaluator.printType(adjSrcType), + name: destType.shared.name, + }) + ); + + isAssignable = false; + }); + + return isAssignable; +} + +// For normal TypeVars, the constraint solver can widen a type by combining +// two otherwise incompatible types into a union. For TypeVarTuples, we need +// to do the equivalent operation for unpacked tuples. +function widenTypeForTypeVarTuple(evaluator: TypeEvaluator, type1: Type, type2: Type): Type | undefined { + // The typing spec indicates that the type should always be "exactly + // the same type" if a TypeVarTuple is used in multiple locations. + // This is problematic for a number of reasons, but in the interest + // of sticking to the spec, we'll enforce that here. + + // If the two types are not unpacked tuples, we can't combine them. + if (!isUnpackedClass(type1) || !isUnpackedClass(type2)) { + return undefined; + } + + // If the two unpacked tuples are not the same length, we can't combine them. + if ( + !type1.priv.tupleTypeArgs || + !type2.priv.tupleTypeArgs || + type1.priv.tupleTypeArgs.length !== type2.priv.tupleTypeArgs.length + ) { + return undefined; + } + + const strippedType1 = stripLiteralValueForUnpackedTuple(evaluator, type1); + const strippedType2 = stripLiteralValueForUnpackedTuple(evaluator, type2); + + if (isTypeSame(strippedType1, strippedType2)) { + return strippedType1; + } + + return undefined; +} + +// If the provided type is an unpacked tuple, this function strips the +// literals from types of the corresponding elements. +function stripLiteralValueForUnpackedTuple(evaluator: TypeEvaluator, type: Type): Type { + if (!isUnpackedClass(type) || !type.priv.tupleTypeArgs) { + return type; + } + + let strippedLiteral = false; + const tupleTypeArgs: TupleTypeArg[] = type.priv.tupleTypeArgs.map((arg) => { + const strippedType = stripTypeForm(evaluator.stripLiteralValue(arg.type)); + + if (strippedType !== arg.type) { + strippedLiteral = true; + } + + return { + isUnbounded: arg.isUnbounded, + isOptional: arg.isOptional, + type: strippedType, + }; + }); + + if (!strippedLiteral) { + return type; + } + + return specializeTupleClass(type, tupleTypeArgs, /* isTypeArgExplicit */ true, /* isUnpacked */ true); +} + +// This function is used for debugging only. It dumps the current contents of +// the constraints to the console. +function logConstraints(evaluator: TypeEvaluator, constraints: ConstraintTracker, indent: string) { + const constraintSetCount = constraints.getConstraintSets().length; + if (constraintSetCount === 0) { + console.log(`${indent} no signatures`); + } else if (constraintSetCount === 1) { + logTypeVarConstraintSet(evaluator, constraints.getConstraintSets()[0], `${indent} `); + } else { + constraints.doForEachConstraintSet((set, index) => { + console.log(`${indent} signature ${index}`); + logTypeVarConstraintSet(evaluator, set, `${indent} `); + }); + } +} + +function logTypeVarConstraintSet(evaluator: TypeEvaluator, context: ConstraintSet, indent: string) { + let loggedConstraint = false; + + context.getTypeVars().forEach((entry) => { + const typeVarName = `${indent}${entry.typeVar.shared.name}`; + const lowerBound = entry.lowerBound; + const upperBound = entry.upperBound; + + // Log the lower and upper bounds. + if (lowerBound && upperBound && isTypeSame(lowerBound, upperBound)) { + console.log(`${typeVarName} = ${evaluator.printType(lowerBound)}`); + loggedConstraint = true; + } else { + if (lowerBound) { + console.log(`${typeVarName} ≤ ${evaluator.printType(lowerBound)}`); + loggedConstraint = true; + } + if (upperBound) { + console.log(`${typeVarName} ≥ ${evaluator.printType(upperBound)}`); + loggedConstraint = true; + } + } + }); + + if (!loggedConstraint) { + console.log(`${indent}no constraints`); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/constraintTracker.ts b/python-parser/packages/pyright-internal/src/analyzer/constraintTracker.ts new file mode 100644 index 00000000..cb77fc49 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/constraintTracker.ts @@ -0,0 +1,287 @@ +/* + * constraintTracker.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Module that tracks the constraints for a set of type variables. + * It is used by the constraint solver to solve for the type of + * each type variable. + */ + +import { assert } from '../common/debug'; +import { getComplexityScoreForType } from './typeComplexity'; +import { Type, TypeVarScopeId, TypeVarType, isTypeSame } from './types'; + +// The maximum number of constraint sets that can be associated +// with a constraint tracker. This equates to the number of overloads +// that can be captured by a ParamSpec (or multiple ParamSpecs). +// We should never hit this limit in practice, but there are certain +// pathological cases where we could, and we need to protect against +// this so it doesn't completely exhaust memory. This was previously +// set to 64, but we have seen cases where a library uses in excess +// of 300 overloads on a single function. +const maxConstraintSetCount = 1024; + +// Records constraint information about a single type variable. +export interface TypeVarConstraints { + typeVar: TypeVarType; + + // Bounds for solved type variable as constraints are added. + lowerBound?: Type | undefined; + upperBound?: Type | undefined; + + // Should the lower bound include literal values? + retainLiterals?: boolean; +} + +// Records the constraints information for a set of type variables +// associated with a callee's signature. +export class ConstraintSet { + // Maps type variable IDs to their current constraints. + private _typeVarMap: Map; + + // A set of one or more TypeVar scope IDs that identify this constraint set. + // This corresponds to the scope ID of the overload signature. Normally + // there will be only one scope ID associated with each signature, but + // we can have multiple if we are solving for multiple ParamSpecs. If + // there are two ParamSpecs P1 and P2 and both are bound to 3 overloads, + // we'll have 9 sets of TypeVars that we're solving, for all combinations + // of P1 and P2). + private _scopeIds: Set | undefined; + + constructor() { + this._typeVarMap = new Map(); + } + + clone() { + const constraintSet = new ConstraintSet(); + + this._typeVarMap.forEach((value) => { + constraintSet.setBounds(value.typeVar, value.lowerBound, value.upperBound, value.retainLiterals); + }); + + if (this._scopeIds) { + this._scopeIds.forEach((scopeId) => constraintSet.addScopeId(scopeId)); + } + + return constraintSet; + } + + isSame(other: ConstraintSet) { + if (this._typeVarMap.size !== other._typeVarMap.size) { + return false; + } + + function typesMatch(type1: Type | undefined, type2: Type | undefined) { + if (!type1 || !type2) { + return type1 === type2; + } + + return isTypeSame(type1, type2, { honorIsTypeArgExplicit: true, honorTypeForm: true }); + } + + let isSame = true; + this._typeVarMap.forEach((value, key) => { + const otherValue = other._typeVarMap.get(key); + if ( + !otherValue || + !typesMatch(value.lowerBound, otherValue.lowerBound) || + !typesMatch(value.upperBound, otherValue.upperBound) + ) { + isSame = false; + } + }); + + return isSame; + } + + isEmpty() { + return this._typeVarMap.size === 0; + } + + // Provides a "score" - a value that values completeness (number + // of type variables that are assigned) and simplicity. + getScore() { + let score = 0; + + // Sum the scores for the defined type vars. + this._typeVarMap.forEach((entry) => { + // Add 1 to the score for each type variable defined. + score += 1; + + // Add a fractional amount based on the simplicity of the definition. + // The more complex, the lower the score. In the spirit of Occam's + // Razor, we always want to favor simple answers. + const typeVarType = entry.lowerBound ?? entry.upperBound; + if (typeVarType) { + score += 1.0 - getComplexityScoreForType(typeVarType); + } + }); + + return score; + } + + setBounds(typeVar: TypeVarType, lowerBound: Type | undefined, upperBound?: Type, retainLiterals?: boolean) { + const key = TypeVarType.getNameWithScope(typeVar); + this._typeVarMap.set(key, { + typeVar, + lowerBound, + upperBound, + retainLiterals, + }); + } + + doForEachTypeVar(cb: (entry: TypeVarConstraints) => void) { + this._typeVarMap.forEach(cb); + } + + getTypeVar(typeVar: TypeVarType): TypeVarConstraints | undefined { + const key = TypeVarType.getNameWithScope(typeVar); + return this._typeVarMap.get(key); + } + + getTypeVars(): TypeVarConstraints[] { + const entries: TypeVarConstraints[] = []; + + this._typeVarMap.forEach((entry) => { + entries.push(entry); + }); + + return entries; + } + + addScopeId(scopeId: TypeVarScopeId) { + if (!this._scopeIds) { + this._scopeIds = new Set(); + } + + this._scopeIds.add(scopeId); + } + + hasScopeId(scopeId: TypeVarScopeId) { + if (!this._scopeIds) { + return false; + } + + return this._scopeIds.has(scopeId); + } + + getScopeIds() { + return new Set(this._scopeIds); + } + + hasUnificationVars() { + for (const entry of this._typeVarMap.values()) { + if (TypeVarType.isUnification(entry.typeVar)) { + return true; + } + } + + return false; + } +} + +export class ConstraintTracker { + private _constraintSets: ConstraintSet[]; + + constructor() { + this._constraintSets = [new ConstraintSet()]; + } + + clone() { + const newTypeVarMap = new ConstraintTracker(); + + newTypeVarMap._constraintSets = this._constraintSets.map((set) => set.clone()); + + return newTypeVarMap; + } + + cloneWithSignature(scopeId: TypeVarScopeId): ConstraintTracker { + const cloned = this.clone(); + + if (scopeId) { + const filteredSets = this._constraintSets.filter((context) => context.hasScopeId(scopeId)); + + if (filteredSets.length > 0) { + cloned._constraintSets = filteredSets; + } else { + cloned._constraintSets.forEach((context) => { + context.addScopeId(scopeId); + }); + } + } + + return cloned; + } + + // Copies a cloned type var context back into this object. + copyFromClone(clone: ConstraintTracker) { + this._constraintSets = clone._constraintSets.map((context) => context.clone()); + } + + copyBounds(entry: TypeVarConstraints) { + this._constraintSets.forEach((set) => { + set.setBounds(entry.typeVar, entry.lowerBound, entry.upperBound, entry.retainLiterals); + }); + } + + // Copy the specified constraint sets into this type var context. + addConstraintSets(contexts: ConstraintSet[]) { + assert(contexts.length > 0); + + // Limit the number of constraint sets. There are rare circumstances + // where this can grow to unbounded numbers and exhaust memory. + if (contexts.length < maxConstraintSetCount) { + this._constraintSets = Array.from(contexts); + } + } + + isSame(other: ConstraintTracker) { + if (other._constraintSets.length !== this._constraintSets.length) { + return false; + } + + return this._constraintSets.every((set, index) => set.isSame(other._constraintSets[index])); + } + + isEmpty() { + return this._constraintSets.every((set) => set.isEmpty()); + } + + setBounds(typeVar: TypeVarType, lowerBound: Type | undefined, upperBound?: Type, retainLiterals?: boolean) { + return this._constraintSets.forEach((set) => { + set.setBounds(typeVar, lowerBound, upperBound, retainLiterals); + }); + } + + getScore() { + let total = 0; + + this._constraintSets.forEach((set) => { + total += set.getScore(); + }); + + // Return the average score among all constraint sets. + return total / this._constraintSets.length; + } + + getMainConstraintSet() { + return this._constraintSets[0]; + } + + getConstraintSets() { + return this._constraintSets; + } + + doForEachConstraintSet(callback: (constraintSet: ConstraintSet, index: number) => void) { + this.getConstraintSets().forEach((set, index) => { + callback(set, index); + }); + } + + getConstraintSet(index: number) { + assert(index >= 0 && index < this._constraintSets.length); + return this._constraintSets[index]; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/constructorTransform.ts b/python-parser/packages/pyright-internal/src/analyzer/constructorTransform.ts new file mode 100644 index 00000000..66ad3877 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/constructorTransform.ts @@ -0,0 +1,487 @@ +/* + * constructorTransform.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that transforms a newly-created object after a call to the + * constructor is evaluated. It allows for special-case behavior that + * cannot otherwise be described in the Python type system. + * + */ + +import { appendArray } from '../common/collectionUtils'; +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { LocMessage } from '../localization/localize'; +import { ArgCategory, ExpressionNode, ParamCategory } from '../parser/parseNodes'; +import { ConstraintTracker } from './constraintTracker'; +import { createFunctionFromConstructor } from './constructors'; +import { getParamListDetails, ParamKind } from './parameterUtils'; +import { getTypedDictMembersForClass } from './typedDicts'; +import { Arg, FunctionResult, TypeEvaluator } from './typeEvaluatorTypes'; +import { + AnyType, + ClassType, + FunctionParam, + FunctionType, + FunctionTypeFlags, + isClassInstance, + isFunction, + isInstantiableClass, + isOverloaded, + isTypeSame, + isTypeVar, + isUnpackedClass, + OverloadedType, + Type, + TypedDictEntry, +} from './types'; +import { convertToInstance, lookUpObjectMember, makeInferenceContext, MemberAccessFlags } from './typeUtils'; + +export function hasConstructorTransform(classType: ClassType): boolean { + if (classType.shared.fullName === 'functools.partial') { + return true; + } + + return false; +} + +export function applyConstructorTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + classType: ClassType, + result: FunctionResult +): FunctionResult | undefined { + if (classType.shared.fullName === 'functools.partial') { + return applyPartialTransform(evaluator, errorNode, argList, result); + } + + // By default, return the result unmodified. + return result; +} + +// Applies a transform for the functools.partial class constructor. +function applyPartialTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + result: FunctionResult +): FunctionResult | undefined { + // We assume that the normal return result is a functools.partial class instance. + if (!isClassInstance(result.returnType) || result.returnType.shared.fullName !== 'functools.partial') { + return undefined; + } + + const callMemberResult = lookUpObjectMember(result.returnType, '__call__', MemberAccessFlags.SkipInstanceMembers); + if (!callMemberResult || !isTypeSame(convertToInstance(callMemberResult.classType), result.returnType)) { + return undefined; + } + + const callMemberType = evaluator.getTypeOfMember(callMemberResult); + if (!isFunction(callMemberType) || callMemberType.shared.parameters.length < 1) { + return undefined; + } + + if (argList.length < 1) { + return undefined; + } + + const origFunctionTypeResult = evaluator.getTypeOfArg(argList[0], /* inferenceContext */ undefined); + let origFunctionType = origFunctionTypeResult.type; + const origFunctionTypeConcrete = evaluator.makeTopLevelTypeVarsConcrete(origFunctionType); + + if (isInstantiableClass(origFunctionTypeConcrete)) { + const constructor = createFunctionFromConstructor( + evaluator, + origFunctionTypeConcrete, + isTypeVar(origFunctionType) ? convertToInstance(origFunctionType) : undefined + ); + + if (constructor) { + origFunctionType = constructor; + } + } + + // Evaluate the inferred return type if necessary. + evaluator.inferReturnTypeIfNecessary(origFunctionType); + + // We don't currently handle unpacked arguments. + if (argList.some((arg) => arg.argCategory !== ArgCategory.Simple)) { + return undefined; + } + + // Make sure the first argument is a simple function. + if (isFunction(origFunctionType)) { + const transformResult = applyPartialTransformToFunction( + evaluator, + errorNode, + argList, + callMemberType, + origFunctionType + ); + if (!transformResult) { + return undefined; + } + + // Create a new copy of the functools.partial class that overrides the __call__ method. + const newPartialClass = ClassType.cloneForPartial(result.returnType, transformResult.returnType); + + return { + returnType: newPartialClass, + isTypeIncomplete: result.isTypeIncomplete, + argumentErrors: transformResult.argumentErrors, + }; + } + + if (isOverloaded(origFunctionType)) { + const applicableOverloads: FunctionType[] = []; + const overloads = OverloadedType.getOverloads(origFunctionType); + let sawArgErrors = false; + + // Apply the partial transform to each of the functions in the overload. + overloads.forEach((overload) => { + // Apply the transform to this overload, but don't report errors. + const transformResult = applyPartialTransformToFunction( + evaluator, + /* errorNode */ undefined, + argList, + callMemberType, + overload + ); + + if (transformResult) { + if (transformResult.argumentErrors) { + sawArgErrors = true; + } else if (isFunction(transformResult.returnType)) { + applicableOverloads.push(transformResult.returnType); + } + } + }); + + if (applicableOverloads.length === 0) { + if (sawArgErrors && overloads.length > 0) { + evaluator.addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.noOverload().format({ + name: overloads[0].shared.name, + }), + errorNode + ); + } + + return undefined; + } + + let synthesizedCallType: Type; + if (applicableOverloads.length === 1) { + synthesizedCallType = applicableOverloads[0]; + } else { + synthesizedCallType = OverloadedType.create( + // Set the "overloaded" flag for each of the __call__ overloads. + applicableOverloads.map((overload) => + FunctionType.cloneWithNewFlags(overload, overload.shared.flags | FunctionTypeFlags.Overloaded) + ) + ); + } + + // Create a new copy of the functools.partial class that overrides the __call__ method. + const newPartialClass = ClassType.cloneForPartial(result.returnType, synthesizedCallType); + + return { + returnType: newPartialClass, + isTypeIncomplete: result.isTypeIncomplete, + argumentErrors: false, + }; + } + + return undefined; +} + +function applyPartialTransformToFunction( + evaluator: TypeEvaluator, + errorNode: ExpressionNode | undefined, + argList: Arg[], + partialCallMemberType: FunctionType, + origFunctionType: FunctionType +): FunctionResult | undefined { + // Create a map to track which parameters have supplied arguments. + const paramMap = new Map(); + + const paramListDetails = getParamListDetails(origFunctionType); + + // Verify the types of the provided arguments. + let argumentErrors = false; + let reportedPositionalError = false; + const constraints = new ConstraintTracker(); + + const remainingArgsList = argList.slice(1); + remainingArgsList.forEach((arg, argIndex) => { + if (!arg.valueExpression) { + return; + } + + // Is it a positional argument or a keyword argument? + if (!arg.name) { + // Does this positional argument map to a positional parameter? + if ( + argIndex >= paramListDetails.params.length || + paramListDetails.params[argIndex].kind === ParamKind.Keyword + ) { + if (paramListDetails.argsIndex !== undefined) { + const paramType = FunctionType.getParamType( + origFunctionType, + paramListDetails.params[paramListDetails.argsIndex].index + ); + const diag = new DiagnosticAddendum(); + + const argTypeResult = evaluator.getTypeOfExpression( + arg.valueExpression, + /* flags */ undefined, + makeInferenceContext(paramType) + ); + + if (!evaluator.assignType(paramType, argTypeResult.type, diag, constraints)) { + if (errorNode) { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.shared.name, + paramName: paramListDetails.params[paramListDetails.argsIndex].param.name ?? '', + }), + arg.valueExpression ?? errorNode + ); + } + + argumentErrors = true; + } + } else { + // Don't report multiple positional errors. + if (!reportedPositionalError) { + if (errorNode) { + evaluator.addDiagnostic( + DiagnosticRule.reportCallIssue, + paramListDetails.positionParamCount === 1 + ? LocMessage.argPositionalExpectedOne() + : LocMessage.argPositionalExpectedCount().format({ + expected: paramListDetails.positionParamCount, + }), + arg.valueExpression ?? errorNode + ); + } + } + + reportedPositionalError = true; + argumentErrors = true; + } + } else { + const paramInfo = paramListDetails.params[argIndex]; + const paramType = paramInfo.type; + const diag = new DiagnosticAddendum(); + const paramName = paramInfo.param.name ?? ''; + + const argTypeResult = evaluator.getTypeOfExpression( + arg.valueExpression, + /* flags */ undefined, + makeInferenceContext(paramType) + ); + + if (!evaluator.assignType(paramType, argTypeResult.type, diag, constraints)) { + if (errorNode) { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.shared.name, + paramName, + }), + arg.valueExpression ?? errorNode + ); + } + + argumentErrors = true; + } + + // Mark the parameter as assigned. + paramMap.set(paramName, false); + } + } else { + const matchingParam = paramListDetails.params.find( + (paramInfo) => paramInfo.param.name === arg.name?.d.value && paramInfo.kind !== ParamKind.Positional + ); + + if (!matchingParam) { + // Is there a kwargs parameter? + if (paramListDetails.kwargsIndex === undefined) { + if (errorNode) { + evaluator.addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramNameMissing().format({ name: arg.name.d.value }), + arg.name + ); + } + argumentErrors = true; + } else { + const paramType = FunctionType.getParamType( + origFunctionType, + paramListDetails.params[paramListDetails.kwargsIndex].index + ); + const diag = new DiagnosticAddendum(); + + const argTypeResult = evaluator.getTypeOfExpression( + arg.valueExpression, + /* flags */ undefined, + makeInferenceContext(paramType) + ); + + if (!evaluator.assignType(paramType, argTypeResult.type, diag, constraints)) { + if (errorNode) { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.shared.name, + paramName: paramListDetails.params[paramListDetails.kwargsIndex].param.name ?? '', + }), + arg.valueExpression ?? errorNode + ); + } + + argumentErrors = true; + } + } + } else { + const paramName = matchingParam.param.name!; + const paramType = matchingParam.type; + + if (paramMap.has(paramName)) { + if (errorNode) { + evaluator.addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramAlreadyAssigned().format({ name: arg.name.d.value }), + arg.name + ); + } + + argumentErrors = true; + } else { + const diag = new DiagnosticAddendum(); + + const argTypeResult = evaluator.getTypeOfExpression( + arg.valueExpression, + /* flags */ undefined, + makeInferenceContext(paramType) + ); + + if (!evaluator.assignType(paramType, argTypeResult.type, diag, constraints)) { + if (errorNode) { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.shared.name, + paramName, + }), + arg.valueExpression ?? errorNode + ); + } + + argumentErrors = true; + } + paramMap.set(paramName, true); + } + } + } + }); + + const specializedFunctionType = evaluator.solveAndApplyConstraints(origFunctionType, constraints); + if (!isFunction(specializedFunctionType)) { + return undefined; + } + + // Create a new parameter list that omits parameters that have been + // populated already. + const updatedParamList: FunctionParam[] = specializedFunctionType.shared.parameters.map((param, index) => { + let newType = FunctionType.getParamType(specializedFunctionType, index); + + // If this is an **kwargs with an unpacked TypedDict, mark the provided + // TypedDict entries as provided. + if ( + param.category === ParamCategory.KwargsDict && + isClassInstance(newType) && + isUnpackedClass(newType) && + ClassType.isTypedDictClass(newType) + ) { + const typedDictEntries = getTypedDictMembersForClass(evaluator, newType); + const narrowedEntriesMap = new Map(newType.priv.typedDictNarrowedEntries ?? []); + + typedDictEntries.knownItems.forEach((entry, name) => { + if (paramMap.has(name)) { + narrowedEntriesMap.set(name, { ...entry, isRequired: false }); + } + }); + + newType = ClassType.cloneAsInstance( + ClassType.cloneForNarrowedTypedDictEntries(newType, narrowedEntriesMap) + ); + } + + // If it's a keyword parameter that has been assigned a value through + // the "partial" mechanism, mark it has having a default value. + let newDefaultType = FunctionType.getParamDefaultType(specializedFunctionType, index); + if (param.name && paramMap.get(param.name)) { + newDefaultType = AnyType.create(/* isEllipsis */ true); + } + return FunctionParam.create(param.category, newType, param.flags, param.name, newDefaultType); + }); + const unassignedParamList = updatedParamList.filter((param) => { + if (param.category === ParamCategory.KwargsDict) { + return false; + } + if (param.category === ParamCategory.ArgsList) { + return true; + } + return !param.name || !paramMap.has(param.name); + }); + const assignedKeywordParamList = updatedParamList.filter((param) => { + return param.name && paramMap.get(param.name); + }); + const kwargsParam = updatedParamList.filter((param) => { + return param.category === ParamCategory.KwargsDict; + }); + + const newParamList: FunctionParam[] = []; + appendArray(newParamList, unassignedParamList); + appendArray(newParamList, assignedKeywordParamList); + appendArray(newParamList, kwargsParam); + + // Create a new __call__ method that uses the remaining parameters. + const newCallMemberType = FunctionType.createInstance( + partialCallMemberType.shared.name, + partialCallMemberType.shared.fullName, + partialCallMemberType.shared.moduleName, + partialCallMemberType.shared.flags, + specializedFunctionType.shared.docString + ); + + if (partialCallMemberType.shared.parameters.length > 0) { + FunctionType.addParam(newCallMemberType, partialCallMemberType.shared.parameters[0]); + } + newParamList.forEach((param) => { + FunctionType.addParam(newCallMemberType, param); + }); + + newCallMemberType.shared.declaredReturnType = specializedFunctionType.shared.declaredReturnType + ? FunctionType.getEffectiveReturnType(specializedFunctionType) + : specializedFunctionType.shared.inferredReturnType?.type; + newCallMemberType.shared.declaration = partialCallMemberType.shared.declaration; + newCallMemberType.shared.typeVarScopeId = specializedFunctionType.shared.typeVarScopeId; + + return { returnType: newCallMemberType, isTypeIncomplete: false, argumentErrors }; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/constructors.ts b/python-parser/packages/pyright-internal/src/analyzer/constructors.ts new file mode 100644 index 00000000..d96eb1d5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/constructors.ts @@ -0,0 +1,1122 @@ +/* + * constructors.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides type evaluation logic for constructors. A constructor + * in Python is implemented by a `__call__` method on the metaclass, + * which is typically the `type` class. The default implementation + * calls the `__new__` method on the class to allocate the object. + * If the resulting object is an instance of the class, it then calls + * the `__init__` method on the resulting object with the same arguments. + */ + +import { appendArray } from '../common/collectionUtils'; +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { ExpressionNode, ParamCategory } from '../parser/parseNodes'; +import { ConstraintSolution } from './constraintSolution'; +import { addConstraintsForExpectedType } from './constraintSolver'; +import { ConstraintTracker } from './constraintTracker'; +import { applyConstructorTransform, hasConstructorTransform } from './constructorTransform'; +import { Arg, CallResult, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { + ClassType, + FunctionType, + FunctionTypeFlags, + InheritanceChain, + OverloadedType, + Type, + TypeVarType, + UnknownType, + combineTypes, + findSubtype, + isAny, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isNever, + isOverloaded, + isTypeVar, + isUnknown, +} from './types'; +import { + InferenceContext, + MemberAccessFlags, + addTypeVarsToListIfUnique, + applySolvedTypeVars, + convertToInstance, + doForEachSignature, + doForEachSubtype, + getTypeVarArgsRecursive, + getTypeVarScopeId, + getTypeVarScopeIds, + isTupleClass, + lookUpClassMember, + mapSubtypes, + selfSpecializeClass, + specializeTupleClass, +} from './typeUtils'; + +// Fetches and binds the __new__ method from a class. +export function getBoundNewMethod( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + type: ClassType, + diag: DiagnosticAddendum | undefined = undefined, + additionalFlags = MemberAccessFlags.SkipObjectBaseClass +) { + const flags = + MemberAccessFlags.SkipClassMembers | + MemberAccessFlags.SkipAttributeAccessOverride | + MemberAccessFlags.TreatConstructorAsClassMethod | + additionalFlags; + + return evaluator.getTypeOfBoundMember(errorNode, type, '__new__', { method: 'get' }, diag, flags); +} + +// Fetches and binds the __init__ method from a class instance. +export function getBoundInitMethod( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + type: ClassType, + diag: DiagnosticAddendum | undefined = undefined, + additionalFlags = MemberAccessFlags.SkipObjectBaseClass +) { + const flags = + MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.SkipAttributeAccessOverride | additionalFlags; + + return evaluator.getTypeOfBoundMember(errorNode, type, '__init__', { method: 'get' }, diag, flags); +} + +// Fetches and binds the __call__ method from a class or its metaclass. +export function getBoundCallMethod(evaluator: TypeEvaluator, errorNode: ExpressionNode, type: ClassType) { + return evaluator.getTypeOfBoundMember( + errorNode, + type, + '__call__', + { method: 'get' }, + /* diag */ undefined, + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipTypeBaseClass | + MemberAccessFlags.SkipAttributeAccessOverride + ); +} + +// Matches the arguments of a call to the constructor for a class. +// If successful, it returns the resulting (specialized) object type that +// is allocated by the constructor. If unsuccessful, it reports diagnostics. +export function validateConstructorArgs( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + type: ClassType, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined +): CallResult { + // If this is an unspecialized generic type alias, specialize it now + // using default type argument values. + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo?.shared.typeParams && !aliasInfo.typeArgs) { + type = applySolvedTypeVars(type, new ConstraintSolution(), { + replaceUnsolved: { + scopeIds: [aliasInfo.shared.typeVarScopeId], + tupleClassType: evaluator.getTupleClassType(), + }, + }) as ClassType; + } + + const metaclassResult = validateMetaclassCall( + evaluator, + errorNode, + argList, + type, + skipUnknownArgCheck, + inferenceContext, + /* useSpeculativeModeForArgs */ true + ); + + if (metaclassResult) { + const metaclassReturnType = metaclassResult.returnType ?? UnknownType.create(); + + // If there a custom `__call__` method on the metaclass that returns + // something other than an instance of the class, assume that it + // overrides the normal `type.__call__` logic and don't perform the usual + // __new__ and __init__ validation. + if (metaclassResult.argumentErrors || shouldSkipNewAndInitEvaluation(evaluator, type, metaclassReturnType)) { + validateMetaclassCall( + evaluator, + errorNode, + argList, + type, + skipUnknownArgCheck, + inferenceContext, + /* useSpeculativeModeForArgs */ false + ); + + return metaclassResult; + } + } + + // Determine whether the class overrides the object.__new__ method. + const newMethodDiag = new DiagnosticAddendum(); + const newMethodTypeResult = getBoundNewMethod(evaluator, errorNode, type, newMethodDiag); + if (newMethodTypeResult?.typeErrors) { + evaluator.addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, newMethodDiag.getString(), errorNode); + } + + const useConstructorTransform = hasConstructorTransform(type); + + // If there is a constructor transform, evaluate all arguments speculatively + // so we can later re-evaluate them in the context of the transform. + const returnResult = evaluator.useSpeculativeMode(useConstructorTransform ? errorNode : undefined, () => { + return validateNewAndInitMethods( + evaluator, + errorNode, + argList, + type, + skipUnknownArgCheck, + inferenceContext, + newMethodTypeResult + ); + }); + + let validatedArgExpressions = !useConstructorTransform || returnResult.argumentErrors; + + // Apply a constructor transform if applicable. + if (useConstructorTransform) { + if (returnResult.argumentErrors) { + // If there were errors when validating the __new__ and __init__ methods, + // we need to re-evaluate the arguments to generate error messages because + // we previously evaluated them speculatively. + validateNewAndInitMethods( + evaluator, + errorNode, + argList, + type, + skipUnknownArgCheck, + inferenceContext, + newMethodTypeResult + ); + + validatedArgExpressions = true; + } else if (returnResult.returnType) { + const transformed = applyConstructorTransform(evaluator, errorNode, argList, type, { + argumentErrors: !!returnResult.argumentErrors, + returnType: returnResult.returnType, + isTypeIncomplete: !!returnResult.isTypeIncomplete, + }); + + if (transformed) { + returnResult.returnType = transformed.returnType; + + if (transformed.isTypeIncomplete) { + returnResult.isTypeIncomplete = true; + } + + if (transformed.argumentErrors) { + returnResult.argumentErrors = true; + } + + validatedArgExpressions = true; + } + } + } + + // If we weren't able to validate the args, analyze the expressions here + // to mark symbols referenced and report expression evaluation errors. + if (!validatedArgExpressions) { + argList.forEach((arg) => { + if (arg.valueExpression && !evaluator.isSpeculativeModeInUse(arg.valueExpression)) { + evaluator.getTypeOfExpression(arg.valueExpression); + } + }); + } + + return returnResult; +} + +function validateNewAndInitMethods( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + type: ClassType, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined, + newMethodTypeResult: TypeResult | undefined +): CallResult { + let returnType: Type | undefined; + let validatedArgExpressions = false; + let argumentErrors = false; + let isTypeIncomplete = false; + const overloadsUsedForCall: FunctionType[] = []; + let newMethodReturnType: Type | undefined; + + // Validate __new__ if it is present. + if (newMethodTypeResult) { + // Use speculative mode for arg expressions because we don't know whether + // we'll need to re-evaluate these expressions later for __init__. + const newCallResult = validateNewMethod( + evaluator, + errorNode, + argList, + type, + skipUnknownArgCheck, + inferenceContext, + newMethodTypeResult, + /* useSpeculativeModeForArgs */ true + ); + + if (newCallResult.argumentErrors) { + argumentErrors = true; + } else { + appendArray(overloadsUsedForCall, newCallResult.overloadsUsedForCall ?? []); + } + + if (newCallResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + newMethodReturnType = newCallResult.returnType; + } + + if (!newMethodReturnType || isDefaultNewMethod(newMethodTypeResult?.type)) { + // If there is no __new__ method or it uses a default signature, + // (cls, *args, **kwargs) -> Self, allow the __init__ method to + // determine the specialized type of the class. + newMethodReturnType = ClassType.cloneAsInstance(type); + } else if (isUnknown(newMethodReturnType) || (newMethodTypeResult && isAny(newMethodTypeResult.type))) { + // If the __new__ method returns Unknown, we'll ignore its return + // type and assume that it returns Self. + newMethodReturnType = applySolvedTypeVars(ClassType.cloneAsInstance(type), new ConstraintSolution(), { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(type), + tupleClassType: evaluator.getTupleClassType(), + }, + }) as ClassType; + } + + let initMethodTypeResult: TypeResult | undefined; + + // If there were errors evaluating the __new__ method, assume that __new__ + // returns the class instance and proceed accordingly. This may produce + // false positives in some cases, but it will prevent false negatives + // if the __init__ method also produces type errors (perhaps unrelated + // to the errors in the __new__ method). + if (argumentErrors) { + initMethodTypeResult = { type: convertToInstance(type) }; + } + + // Validate __init__ if it's present. + if ( + !isNever(newMethodReturnType) && + !shouldSkipInitEvaluation(evaluator, type, newMethodReturnType) && + isClassInstance(newMethodReturnType) + ) { + // If the __new__ method returned the same type as the class it's constructing + // but didn't supply solved type arguments, we'll ignore its specialized return + // type and rely on the __init__ method to supply the type arguments instead. + let initMethodBindToType = newMethodReturnType; + if ( + initMethodBindToType.priv.typeArgs && + initMethodBindToType.priv.typeArgs.some((typeArg) => isUnknown(typeArg)) + ) { + initMethodBindToType = ClassType.cloneAsInstance(type); + } + + // Determine whether the class overrides the object.__init__ method. + const initMethodDiag = new DiagnosticAddendum(); + initMethodTypeResult = getBoundInitMethod(evaluator, errorNode, initMethodBindToType, initMethodDiag); + if (initMethodTypeResult?.typeErrors) { + evaluator.addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, initMethodDiag.getString(), errorNode); + } + + // Validate __init__ if it's present. + if (initMethodTypeResult) { + const initCallResult = validateInitMethod( + evaluator, + errorNode, + argList, + initMethodBindToType, + skipUnknownArgCheck, + inferenceContext, + initMethodTypeResult.type + ); + + if (initCallResult.argumentErrors) { + argumentErrors = true; + } else if (initCallResult.overloadsUsedForCall) { + overloadsUsedForCall.push(...initCallResult.overloadsUsedForCall); + } + + if (initCallResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + returnType = initCallResult.returnType; + validatedArgExpressions = true; + skipUnknownArgCheck = true; + } + } + + if (!validatedArgExpressions && newMethodTypeResult) { + // If we skipped the __init__ method and the __new__ method was evaluated only + // speculatively, evaluate it non-speculatively now so we can report errors. + if (!evaluator.isSpeculativeModeInUse(errorNode)) { + validateNewMethod( + evaluator, + errorNode, + argList, + type, + skipUnknownArgCheck, + inferenceContext, + newMethodTypeResult, + /* useSpeculativeModeForArgs */ false + ); + } + + validatedArgExpressions = true; + returnType = newMethodReturnType; + } + + // If the class doesn't override object.__new__ or object.__init__, use the + // fallback constructor type evaluation for the `object` class. + if (!newMethodTypeResult && !initMethodTypeResult) { + const callResult = validateFallbackConstructorCall(evaluator, errorNode, argList, type, inferenceContext); + + if (callResult.argumentErrors) { + argumentErrors = true; + } else if (callResult.overloadsUsedForCall) { + appendArray(overloadsUsedForCall, callResult.overloadsUsedForCall); + } + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + returnType = callResult.returnType ?? UnknownType.create(); + } + + return { argumentErrors, returnType, isTypeIncomplete, overloadsUsedForCall }; +} + +// Evaluates the __new__ method for type correctness. If useSpeculativeModeForArgs +// is true, use speculative mode to evaluate the arguments (unless an argument +// error is produced, in which case it's OK to use speculative mode). +function validateNewMethod( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + type: ClassType, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined, + newMethodTypeResult: TypeResult, + useSpeculativeModeForArgs: boolean +): CallResult { + let newReturnType: Type | undefined; + let isTypeIncomplete = false; + let argumentErrors = false; + const overloadsUsedForCall: FunctionType[] = []; + + const constraints = new ConstraintTracker(); + + const callResult = evaluator.useSpeculativeMode( + useSpeculativeModeForArgs ? errorNode : undefined, + () => { + return evaluator.validateCallArgs( + errorNode, + argList, + newMethodTypeResult, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + }, + { dependentType: newMethodTypeResult.type } + ); + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (callResult.argumentErrors) { + argumentErrors = true; + + // Evaluate the arguments in a non-speculative manner to generate any diagnostics. + evaluator.validateCallArgs( + errorNode, + argList, + newMethodTypeResult, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + } else { + newReturnType = callResult.returnType; + + if (overloadsUsedForCall.length === 0 && callResult.overloadsUsedForCall) { + overloadsUsedForCall.push(...callResult.overloadsUsedForCall); + } + } + + if (newReturnType) { + // Special-case the 'tuple' type specialization to use the homogenous + // arbitrary-length form. + if (isClassInstance(newReturnType) && isTupleClass(newReturnType) && !newReturnType.priv.tupleTypeArgs) { + if (newReturnType.priv.typeArgs && newReturnType.priv.typeArgs.length === 1) { + newReturnType = specializeTupleClass(newReturnType, [ + { type: newReturnType.priv.typeArgs[0], isUnbounded: true }, + ]); + } + + newReturnType = applyExpectedTypeForTupleConstructor(newReturnType, inferenceContext); + } + } else { + newReturnType = applyExpectedTypeForConstructor(evaluator, type, inferenceContext, constraints); + } + + return { argumentErrors, returnType: newReturnType, isTypeIncomplete, overloadsUsedForCall }; +} + +function validateInitMethod( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + type: ClassType, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined, + initMethodType: Type +): CallResult { + let isTypeIncomplete = false; + let argumentErrors = false; + const overloadsUsedForCall: FunctionType[] = []; + + const constraints = new ConstraintTracker(); + if (type.priv.typeArgs) { + addConstraintsForExpectedType(evaluator, type, type, constraints, /* liveTypeVarScopes */ undefined); + } + + const returnTypeOverride = selfSpecializeClass(type); + const callResult = evaluator.validateCallArgs( + errorNode, + argList, + { type: initMethodType }, + constraints, + skipUnknownArgCheck, + inferenceContext ? { ...inferenceContext, returnTypeOverride } : undefined + ); + + let adjustedClassType = type; + if ( + callResult.specializedInitSelfType && + isClassInstance(callResult.specializedInitSelfType) && + ClassType.isSameGenericClass(callResult.specializedInitSelfType, adjustedClassType) + ) { + adjustedClassType = ClassType.cloneAsInstantiable(callResult.specializedInitSelfType); + } + + const returnType = applyExpectedTypeForConstructor( + evaluator, + adjustedClassType, + /* inferenceContext */ undefined, + constraints + ); + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (callResult.argumentErrors) { + argumentErrors = true; + } else if (callResult.overloadsUsedForCall) { + overloadsUsedForCall.push(...callResult.overloadsUsedForCall); + } + + return { argumentErrors, returnType, isTypeIncomplete, overloadsUsedForCall }; +} + +function validateFallbackConstructorCall( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + type: ClassType, + inferenceContext: InferenceContext | undefined +): CallResult { + // Bind the __new__ method from the object class. + const newMethodType = getBoundNewMethod( + evaluator, + errorNode, + type, + /* diag */ undefined, + /* additionalFlags */ MemberAccessFlags.Default + )?.type; + + // If there was no object.__new__ or it's not a callable, then something has + // gone terribly wrong in the typeshed stubs. To avoid crashing, simply + // return the instance. + if (!newMethodType || !isFunctionOrOverloaded(newMethodType)) { + return { returnType: convertToInstance(type) }; + } + + return validateNewMethod( + evaluator, + errorNode, + argList, + type, + /* skipUnknownArgCheck */ false, + inferenceContext, + { type: newMethodType }, + /* useSpeculativeModeForArgs */ false + ); +} + +function validateMetaclassCall( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + type: ClassType, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined, + useSpeculativeModeForArgs: boolean +): CallResult | undefined { + const metaclassCallMethodInfo = getBoundCallMethod(evaluator, errorNode, type); + + if (!metaclassCallMethodInfo) { + return undefined; + } + + const callResult = evaluator.useSpeculativeMode(useSpeculativeModeForArgs ? errorNode : undefined, () => { + return evaluator.validateCallArgs( + errorNode, + argList, + metaclassCallMethodInfo, + /* constraints */ undefined, + skipUnknownArgCheck, + inferenceContext + ); + }); + + if (!callResult.argumentErrors) { + // If the return type is unannotated, don't use the inferred return type. + const callType = metaclassCallMethodInfo.type; + if (isFunction(callType) && !callType.shared.declaredReturnType) { + return undefined; + } + + // If the return type is unknown, ignore it. + if (callResult.returnType && isUnknown(callResult.returnType)) { + return undefined; + } + } + + return callResult; +} + +function applyExpectedSubtypeForConstructor( + evaluator: TypeEvaluator, + type: ClassType, + expectedSubtype: Type, + constraints: ConstraintTracker +): Type | undefined { + const specializedType = evaluator.solveAndApplyConstraints(ClassType.cloneAsInstance(type), constraints, { + replaceUnsolved: { + scopeIds: [], + tupleClassType: evaluator.getTupleClassType(), + }, + }); + + if (!evaluator.assignType(expectedSubtype, specializedType)) { + return undefined; + } + + // If the expected type is "Any", transform it to an Any. + if (isAny(expectedSubtype)) { + return expectedSubtype; + } + + return specializedType; +} + +// Handles the case where a constructor is a generic type and the type +// arguments are not specified but can be provided by the expected type. +function applyExpectedTypeForConstructor( + evaluator: TypeEvaluator, + type: ClassType, + inferenceContext: InferenceContext | undefined, + constraints: ConstraintTracker +): Type { + let defaultIfNotFound = true; + + // If this isn't a generic type or it's a type that has already been + // explicitly specialized, the expected type isn't applicable. + if (type.shared.typeParams.length === 0 || type.priv.typeArgs) { + return evaluator.solveAndApplyConstraints(ClassType.cloneAsInstance(type), constraints, { + replaceUnsolved: { + scopeIds: [], + tupleClassType: evaluator.getTupleClassType(), + }, + }); + } + + if (inferenceContext) { + const specializedExpectedType = mapSubtypes(inferenceContext.expectedType, (expectedSubtype) => { + return applyExpectedSubtypeForConstructor(evaluator, type, expectedSubtype, constraints); + }); + + if (!isNever(specializedExpectedType)) { + return specializedExpectedType; + } + + // If the expected type didn't provide TypeVar values, remaining + // unsolved TypeVars should be considered Unknown unless they were + // provided explicitly in the constructor call. + if (type.priv.typeArgs) { + defaultIfNotFound = false; + } + } + + const specializedType = evaluator.solveAndApplyConstraints(type, constraints, { + replaceUnsolved: defaultIfNotFound + ? { + scopeIds: getTypeVarScopeIds(type), + tupleClassType: evaluator.getTupleClassType(), + } + : undefined, + }) as ClassType; + return ClassType.cloneAsInstance(specializedType); +} + +// Similar to applyExpectedTypeForConstructor, this function handles the +// special case of the tuple class. +function applyExpectedTypeForTupleConstructor(type: ClassType, inferenceContext: InferenceContext | undefined) { + let specializedType = type; + + if ( + inferenceContext && + isClassInstance(inferenceContext.expectedType) && + isTupleClass(inferenceContext.expectedType) && + inferenceContext.expectedType.priv.tupleTypeArgs + ) { + specializedType = specializeTupleClass(type, inferenceContext.expectedType.priv.tupleTypeArgs); + } + + return specializedType; +} + +// Synthesize a function that represents the constructor for this class +// taking into consideration the __init__ and __new__ methods. +export function createFunctionFromConstructor( + evaluator: TypeEvaluator, + classType: ClassType, + selfType: ClassType | TypeVarType | undefined = undefined, + recursionCount = 0 +): Type | undefined { + const fromMetaclassCall = createFunctionFromMetaclassCall(evaluator, classType, recursionCount); + if (fromMetaclassCall) { + return fromMetaclassCall; + } + + let fromNew = createFunctionFromNewMethod(evaluator, classType, selfType, recursionCount); + + if (fromNew) { + let skipInitMethod = false; + + doForEachSignature(fromNew, (signature) => { + const newMethodReturnType = FunctionType.getEffectiveReturnType(signature); + if (newMethodReturnType && shouldSkipInitEvaluation(evaluator, classType, newMethodReturnType)) { + skipInitMethod = true; + } + }); + + if (skipInitMethod) { + return fromNew; + } + } + + const fromInit = createFunctionFromInitMethod(evaluator, classType, selfType, recursionCount); + + // If there is a valid __init__ method and the __new__ method + // is the default __new__ method provided by the object class, + // discard the __new__ method. + if (fromInit && fromNew && isDefaultNewMethod(fromNew)) { + fromNew = undefined; + } + + // If there is both a __new__ and __init__ method, return a union + // comprised of both resulting function types. + if (fromNew && fromInit) { + return combineTypes([fromInit, fromNew]); + } + + if (fromNew || fromInit) { + return fromNew ?? fromInit; + } + + return fromNew ?? createFunctionFromObjectNewMethod(classType); +} + +function createFunctionFromMetaclassCall( + evaluator: TypeEvaluator, + classType: ClassType, + recursionCount: number +): FunctionType | OverloadedType | undefined { + const metaclass = classType.shared.effectiveMetaclass; + if (!metaclass || !isClass(metaclass)) { + return undefined; + } + + const callInfo = lookUpClassMember( + metaclass, + '__call__', + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipTypeBaseClass | + MemberAccessFlags.SkipAttributeAccessOverride + ); + + if (!callInfo) { + return undefined; + } + + const callType = evaluator.getTypeOfMember(callInfo); + if (!isFunctionOrOverloaded(callType)) { + return undefined; + } + + const boundCallType = evaluator.bindFunctionToClassOrObject( + classType, + callType, + callInfo && isInstantiableClass(callInfo.classType) ? callInfo.classType : undefined, + /* treatConstructorAsClassMethod */ false, + classType, + /* diag */ undefined, + recursionCount + ); + + if (!boundCallType) { + return undefined; + } + + let useMetaclassCall = false; + + // Look at the signatures of all the __call__ methods to determine whether + // any of them returns something other than the instance of the class being + // constructed. + doForEachSignature(boundCallType, (signature) => { + if (signature.shared.declaredReturnType) { + const returnType = FunctionType.getEffectiveReturnType(signature); + if (returnType && shouldSkipNewAndInitEvaluation(evaluator, classType, returnType)) { + useMetaclassCall = true; + } + } + }); + + return useMetaclassCall ? boundCallType : undefined; +} + +function createFunctionFromNewMethod( + evaluator: TypeEvaluator, + classType: ClassType, + selfType: ClassType | TypeVarType | undefined, + recursionCount: number +): FunctionType | OverloadedType | undefined { + const newInfo = lookUpClassMember( + classType, + '__new__', + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipAttributeAccessOverride | + MemberAccessFlags.SkipObjectBaseClass + ); + + if (!newInfo) { + return undefined; + } + + const newType = evaluator.getTypeOfMember(newInfo); + + const convertNewToConstructor = (newSubtype: FunctionType) => { + // If there are no parameters that include class-scoped type parameters, + // self-specialize the class because the type arguments for the class + // can't be solved if there are no parameters to supply them. + const hasParamsWithTypeVars = newSubtype.shared.parameters.some((param, index) => { + if (index === 0 || !param.name) { + return false; + } + + const paramType = FunctionType.getParamType(newSubtype, index); + const typeVars = getTypeVarArgsRecursive(paramType); + return typeVars.some((typeVar) => typeVar.priv.scopeId === getTypeVarScopeId(classType)); + }); + + const boundNew = evaluator.bindFunctionToClassOrObject( + hasParamsWithTypeVars ? selfSpecializeClass(classType) : classType, + newSubtype, + newInfo && isInstantiableClass(newInfo.classType) ? newInfo.classType : undefined, + /* treatConstructorAsClassMethod */ true, + selfType, + /* diag */ undefined, + recursionCount + ) as FunctionType | undefined; + + if (!boundNew) { + return undefined; + } + + const convertedNew = FunctionType.clone(boundNew); + convertedNew.shared.typeVarScopeId = newSubtype.shared.typeVarScopeId; + + if (!convertedNew.shared.docString && classType.shared.docString) { + convertedNew.shared.docString = classType.shared.docString; + } + + convertedNew.shared.flags &= ~(FunctionTypeFlags.StaticMethod | FunctionTypeFlags.ConstructorMethod); + convertedNew.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + + return convertedNew; + }; + + if (isFunction(newType)) { + return convertNewToConstructor(newType); + } + + if (!isOverloaded(newType)) { + return undefined; + } + + const newOverloads: FunctionType[] = []; + OverloadedType.getOverloads(newType).forEach((overload) => { + const converted = convertNewToConstructor(overload); + if (converted) { + newOverloads.push(converted); + } + }); + + if (newOverloads.length === 0) { + return undefined; + } + + if (newOverloads.length === 1) { + return newOverloads[0]; + } + + return OverloadedType.create(newOverloads); +} + +function createFunctionFromObjectNewMethod(classType: ClassType) { + // Return a fallback constructor based on the object.__new__ method. + const constructorFunction = FunctionType.createSynthesizedInstance('__new__', FunctionTypeFlags.None); + constructorFunction.shared.declaredReturnType = ClassType.cloneAsInstance(classType); + + // If this is type[T] or a protocol, we don't know what parameters are accepted + // by the constructor, so add the default parameters. + if (classType.priv.includeSubclasses || ClassType.isProtocolClass(classType)) { + FunctionType.addDefaultParams(constructorFunction); + } + + if (!constructorFunction.shared.docString && classType.shared.docString) { + constructorFunction.shared.docString = classType.shared.docString; + } + + return constructorFunction; +} + +function createFunctionFromInitMethod( + evaluator: TypeEvaluator, + classType: ClassType, + selfType: ClassType | TypeVarType | undefined, + recursionCount: number +): FunctionType | OverloadedType | undefined { + // Use the __init__ method if available. It's usually more detailed. + const initInfo = lookUpClassMember( + classType, + '__init__', + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipAttributeAccessOverride | + MemberAccessFlags.SkipObjectBaseClass + ); + + if (!initInfo) { + return undefined; + } + + const initType = evaluator.getTypeOfMember(initInfo); + const objectType = ClassType.cloneAsInstance(classType); + + function convertInitToConstructor(initSubtype: FunctionType) { + const boundInit = evaluator.bindFunctionToClassOrObject( + objectType, + initSubtype, + initInfo && isInstantiableClass(initInfo.classType) ? initInfo.classType : undefined, + /* treatConstructorAsClassMethod */ undefined, + selfType, + /* diag */ undefined, + recursionCount + ) as FunctionType | undefined; + + if (!boundInit) { + return undefined; + } + + const convertedInit = FunctionType.clone(boundInit); + let returnType = selfType; + if (!returnType) { + returnType = objectType; + + // If this is a generic type, self-specialize the class (i.e. fill in + // its own type parameters as type arguments). + if (objectType.shared.typeParams.length > 0 && !objectType.priv.typeArgs) { + const constraints = new ConstraintTracker(); + + // If a TypeVar is not used in any of the parameter types, it should take + // on its default value (typically Unknown) in the resulting specialized type. + const typeVarsInParams: TypeVarType[] = []; + + convertedInit.shared.parameters.forEach((param, index) => { + const paramType = FunctionType.getParamType(convertedInit, index); + addTypeVarsToListIfUnique(typeVarsInParams, getTypeVarArgsRecursive(paramType)); + }); + + typeVarsInParams.forEach((typeVar) => { + constraints.setBounds(typeVar, typeVar); + }); + + returnType = evaluator.solveAndApplyConstraints(objectType, constraints, { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(objectType), + tupleClassType: evaluator.getTupleClassType(), + }, + }) as ClassType; + } + } + + convertedInit.shared.declaredReturnType = boundInit.priv.strippedFirstParamType ?? returnType; + + if (convertedInit.priv.specializedTypes) { + convertedInit.priv.specializedTypes.returnType = returnType; + } + + if (!convertedInit.shared.docString && classType.shared.docString) { + convertedInit.shared.docString = classType.shared.docString; + } + + convertedInit.shared.flags &= ~FunctionTypeFlags.StaticMethod; + convertedInit.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + + return convertedInit; + } + + if (isFunction(initType)) { + return convertInitToConstructor(initType); + } + + if (!isOverloaded(initType)) { + return undefined; + } + + const initOverloads: FunctionType[] = []; + OverloadedType.getOverloads(initType).forEach((overload) => { + const converted = convertInitToConstructor(overload); + if (converted) { + initOverloads.push(converted); + } + }); + + if (initOverloads.length === 0) { + return undefined; + } + + if (initOverloads.length === 1) { + return initOverloads[0]; + } + + return OverloadedType.create(initOverloads); +} + +// If the __call__ method returns a type that is not an instance of the class, +// skip the __new__ and __init__ method evaluation. +function shouldSkipNewAndInitEvaluation( + evaluator: TypeEvaluator, + classType: ClassType, + callMethodReturnType: Type +): boolean { + if ( + !evaluator.assignType(convertToInstance(classType), callMethodReturnType) || + isNever(callMethodReturnType) || + findSubtype(callMethodReturnType, (subtype) => isAny(subtype)) + ) { + return true; + } + + // Handle the special case of an enum class, where the __new__ and __init__ + // methods are replaced at runtime by the metaclass. + if (ClassType.isEnumClass(classType)) { + return true; + } + + return false; +} + +// If __new__ returns a type that is not an instance of the class, skip the +// __init__ method evaluation. This is consistent with the behavior of the +// type.__call__ runtime behavior. +function shouldSkipInitEvaluation(evaluator: TypeEvaluator, classType: ClassType, newMethodReturnType: Type): boolean { + const returnType = evaluator.makeTopLevelTypeVarsConcrete(newMethodReturnType); + + let skipInitCheck = false; + doForEachSubtype(returnType, (subtype) => { + if (isUnknown(subtype)) { + return; + } + + if (isClassInstance(subtype)) { + const inheritanceChain: InheritanceChain = []; + const isDerivedFrom = ClassType.isDerivedFrom( + ClassType.cloneAsInstantiable(subtype), + classType, + inheritanceChain + ); + + if (!isDerivedFrom) { + skipInitCheck = true; + } + + return; + } + + skipInitCheck = true; + }); + + return skipInitCheck; +} + +// Determine whether the __new__ method is the placeholder signature +// of "def __new__(cls, *args, **kwargs) -> Self". +function isDefaultNewMethod(newMethod?: Type): boolean { + if (!newMethod || !isFunction(newMethod)) { + return false; + } + + const params = newMethod.shared.parameters; + if (params.length !== 2) { + return false; + } + + if (params[0].category !== ParamCategory.ArgsList || params[1].category !== ParamCategory.KwargsDict) { + return false; + } + + let returnType: Type | undefined; + if (newMethod.shared.declaredReturnType) { + returnType = newMethod.shared.declaredReturnType; + } else { + returnType = newMethod.priv.specializedTypes?.returnType ?? newMethod.shared.inferredReturnType?.type; + } + + if (!returnType || !isTypeVar(returnType) || !TypeVarType.isSelf(returnType)) { + return false; + } + + return true; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/dataClasses.ts b/python-parser/packages/pyright-internal/src/analyzer/dataClasses.ts new file mode 100644 index 00000000..7c873763 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/dataClasses.ts @@ -0,0 +1,1597 @@ +/* + * dataClasses.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides special-case logic for the construction of dataclass + * classes and dataclass transform. + */ + +import { assert } from '../common/debug'; +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { PythonVersion, pythonVersion3_13 } from '../common/pythonVersion'; +import { LocMessage } from '../localization/localize'; +import { + ArgCategory, + ArgumentNode, + CallNode, + ClassNode, + ExpressionNode, + NameNode, + ParamCategory, + ParseNode, + ParseNodeType, + TypeAnnotationNode, +} from '../parser/parseNodes'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { getFileInfo } from './analyzerNodeInfo'; +import { ConstraintSolution } from './constraintSolution'; +import { ConstraintTracker } from './constraintTracker'; +import { createFunctionFromConstructor, getBoundInitMethod } from './constructors'; +import { DeclarationType } from './declaration'; +import { updateNamedTupleBaseClass } from './namedTuples'; +import { + getClassFullName, + getEnclosingClassOrFunction, + getScopeIdForNode, + getTypeSourceId, + getTypeVarScopesForNode, +} from './parseTreeUtils'; +import { evaluateStaticBoolExpression } from './staticExpressions'; +import { Symbol, SymbolFlags } from './symbol'; +import { isPrivateName } from './symbolNameUtils'; +import { Arg, EvalFlags, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + DataClassBehaviors, + DataClassEntry, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isOverloaded, + isUnion, + OverloadedType, + TupleTypeArg, + Type, + TypeVarScopeType, + TypeVarType, + UnknownType, + Variance, +} from './types'; +import { + addSolutionForSelfType, + applySolvedTypeVars, + buildSolution, + buildSolutionFromSpecializedClass, + computeMroLinearization, + convertToInstance, + doForEachSignature, + getTypeVarScopeId, + getTypeVarScopeIds, + isLiteralType, + isMetaclassInstance, + makeInferenceContext, + makeTypeVarsBound, + makeTypeVarsFree, + requiresSpecialization, + specializeTupleClass, + synthesizeTypeVarForSelfCls, +} from './typeUtils'; + +// Validates fields for compatibility with a dataclass and synthesizes +// an appropriate __new__ and __init__ methods plus __dataclass_fields__ +// and __match_args__ class variables. +export function synthesizeDataClassMethods( + evaluator: TypeEvaluator, + node: ClassNode, + classType: ClassType, + isNamedTuple: boolean, + skipSynthesizeInit: boolean, + hasExistingInitMethod: boolean, + skipSynthesizeHash: boolean +) { + assert(ClassType.isDataClass(classType) || isNamedTuple); + + const classTypeVar = synthesizeTypeVarForSelfCls(classType, /* isClsParam */ true); + const newType = FunctionType.createSynthesizedInstance('__new__', FunctionTypeFlags.ConstructorMethod); + newType.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + const initType = FunctionType.createSynthesizedInstance('__init__'); + initType.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + + // Generate both a __new__ and an __init__ method. The parameters of the + // __new__ method are based on field definitions for NamedTuple classes, + // and the parameters of the __init__ method are based on field definitions + // in other cases. + FunctionType.addParam( + newType, + FunctionParam.create(ParamCategory.Simple, classTypeVar, FunctionParamFlags.TypeDeclared, 'cls') + ); + if (!isNamedTuple) { + FunctionType.addDefaultParams(newType); + newType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } + newType.shared.declaredReturnType = convertToInstance(classTypeVar); + + const selfType = synthesizeTypeVarForSelfCls(classType, /* isClsParam */ false); + const selfParam = FunctionParam.create(ParamCategory.Simple, selfType, FunctionParamFlags.TypeDeclared, 'self'); + FunctionType.addParam(initType, selfParam); + if (isNamedTuple) { + FunctionType.addDefaultParams(initType); + initType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } + initType.shared.declaredReturnType = evaluator.getNoneType(); + + // For Python 3.13 and newer, synthesize a __replace__ method. + let replaceType: FunctionType | undefined; + if ( + PythonVersion.isGreaterOrEqualTo( + AnalyzerNodeInfo.getFileInfo(node).executionEnvironment.pythonVersion, + pythonVersion3_13 + ) + ) { + replaceType = FunctionType.createSynthesizedInstance('__replace__'); + FunctionType.addParam(replaceType, selfParam); + FunctionType.addKeywordOnlyParamSeparator(replaceType); + replaceType.shared.declaredReturnType = selfType; + } + + // Maintain a list of all dataclass entries (including + // those from inherited classes) plus a list of only those + // entries added by this class. + const localDataClassEntries: DataClassEntry[] = []; + const fullDataClassEntries: DataClassEntry[] = []; + const namedTupleEntries = new Set(); + const allAncestorsKnown = addInheritedDataClassEntries(classType, fullDataClassEntries); + + if (!allAncestorsKnown) { + // If one or more ancestor classes have an unknown type, we cannot + // safely determine the parameter list, so we'll accept any parameters + // to avoid a false positive. + FunctionType.addDefaultParams(initType); + + if (replaceType) { + FunctionType.addDefaultParams(replaceType); + } + } + + // Add field-based parameters to either the __new__ or __init__ method + // based on whether this is a NamedTuple or a dataclass. + const constructorType = isNamedTuple ? newType : initType; + + // Maintain a list of "type evaluators". + type EntryTypeEvaluator = () => Type; + const localEntryTypeEvaluator: { entry: DataClassEntry; evaluator: EntryTypeEvaluator }[] = []; + let sawKeywordOnlySeparator = false; + + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + if (symbol.isIgnoredForProtocolMatch()) { + return; + } + + // Apparently, `__hash__` is special-cased in a dataclass. I can't find + // this in the spec, but the runtime seems to treat is specially. + if (name === '__hash__') { + return; + } + + let isInferredFinal = false; + + // Only variables (not functions, classes, etc.) are considered. + let classVarDecl = symbol.getTypedDeclarations().find((decl) => { + if (decl.type !== DeclarationType.Variable) { + return false; + } + + const container = getEnclosingClassOrFunction(decl.node); + if (!container || container.nodeType !== ParseNodeType.Class) { + return false; + } + + return true; + }); + + // See if this is an unannotated (inferred) Final value. + if (!classVarDecl) { + classVarDecl = symbol.getDeclarations().find((decl) => { + return decl.type === DeclarationType.Variable && !decl.typeAnnotationNode && decl.isFinal; + }); + + isInferredFinal = true; + } + + if (classVarDecl) { + let statement: ParseNode | undefined = classVarDecl.node; + + while (statement) { + if (statement.nodeType === ParseNodeType.Assignment) { + break; + } + + if (statement.nodeType === ParseNodeType.TypeAnnotation) { + if (statement.parent?.nodeType === ParseNodeType.Assignment) { + statement = statement.parent; + } + break; + } + + statement = statement.parent; + } + + if (!statement) { + return; + } + + let variableNameNode: NameNode | undefined; + let typeAnnotationNode: TypeAnnotationNode | undefined; + let aliasName: string | undefined; + let variableTypeEvaluator: EntryTypeEvaluator | undefined; + let hasDefault = false; + let isDefaultFactory = false; + let isKeywordOnly = ClassType.isDataClassKeywordOnly(classType) || sawKeywordOnlySeparator; + let defaultExpr: ExpressionNode | undefined; + let includeInInit = true; + let converter: ArgumentNode | undefined; + + if (statement.nodeType === ParseNodeType.Assignment) { + if ( + statement.d.leftExpr.nodeType === ParseNodeType.TypeAnnotation && + statement.d.leftExpr.d.valueExpr.nodeType === ParseNodeType.Name + ) { + variableNameNode = statement.d.leftExpr.d.valueExpr; + typeAnnotationNode = statement.d.leftExpr; + const assignmentStatement = statement; + variableTypeEvaluator = () => { + if (isInferredFinal && defaultExpr) { + return evaluator.getTypeOfExpression(defaultExpr).type; + } + + return evaluator.getTypeOfAnnotation( + (assignmentStatement.d.leftExpr as TypeAnnotationNode).d.annotation, + { + varTypeAnnotation: true, + allowFinal: !isNamedTuple, + allowClassVar: !isNamedTuple, + } + ); + }; + } + + hasDefault = true; + defaultExpr = statement.d.rightExpr; + + // If the RHS of the assignment is assigning a field instance where the + // "init" parameter is set to false, do not include it in the init method. + if (!isNamedTuple && statement.d.rightExpr.nodeType === ParseNodeType.Call) { + const callTypeResult = evaluator.getTypeOfExpression( + statement.d.rightExpr.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const callType = callTypeResult.type; + + if ( + !isNamedTuple && + isDataclassFieldConstructor( + callType, + classType.shared.dataClassBehaviors?.fieldDescriptorNames || [] + ) + ) { + const initArg = statement.d.rightExpr.d.args.find((arg) => arg.d.name?.d.value === 'init'); + if (initArg && initArg.d.valueExpr) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + includeInInit = + evaluateStaticBoolExpression( + initArg.d.valueExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ) ?? includeInInit; + } else { + includeInInit = + getDefaultArgValueForFieldSpecifier( + evaluator, + statement.d.rightExpr, + callTypeResult, + 'init' + ) ?? includeInInit; + } + + const kwOnlyArg = statement.d.rightExpr.d.args.find((arg) => arg.d.name?.d.value === 'kw_only'); + if (kwOnlyArg && kwOnlyArg.d.valueExpr) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + isKeywordOnly = + evaluateStaticBoolExpression( + kwOnlyArg.d.valueExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ) ?? isKeywordOnly; + } else { + isKeywordOnly = + getDefaultArgValueForFieldSpecifier( + evaluator, + statement.d.rightExpr, + callTypeResult, + 'kw_only' + ) ?? isKeywordOnly; + } + + const defaultValueArg = statement.d.rightExpr.d.args.find( + (arg) => arg.d.name?.d.value === 'default' + ); + hasDefault = !!defaultValueArg; + if (defaultValueArg?.d.valueExpr) { + defaultExpr = defaultValueArg.d.valueExpr; + } + + const defaultFactoryArg = statement.d.rightExpr.d.args.find( + (arg) => arg.d.name?.d.value === 'default_factory' || arg.d.name?.d.value === 'factory' + ); + if (defaultFactoryArg) { + hasDefault = true; + isDefaultFactory = true; + } + if (defaultFactoryArg?.d.valueExpr) { + defaultExpr = defaultFactoryArg.d.valueExpr; + } + + const aliasArg = statement.d.rightExpr.d.args.find((arg) => arg.d.name?.d.value === 'alias'); + if (aliasArg) { + const valueType = evaluator.getTypeOfExpression(aliasArg.d.valueExpr).type; + if ( + isClassInstance(valueType) && + ClassType.isBuiltIn(valueType, 'str') && + isLiteralType(valueType) + ) { + aliasName = valueType.priv.literalValue as string; + } + } + + const converterArg = statement.d.rightExpr.d.args.find( + (arg) => arg.d.name?.d.value === 'converter' + ); + if (converterArg && converterArg.d.valueExpr) { + converter = converterArg; + } + } + } + } else if (statement.nodeType === ParseNodeType.TypeAnnotation) { + if (statement.d.valueExpr.nodeType === ParseNodeType.Name) { + variableNameNode = statement.d.valueExpr; + typeAnnotationNode = statement; + const annotationStatement = statement; + variableTypeEvaluator = () => + evaluator.getTypeOfAnnotation(annotationStatement.d.annotation, { + varTypeAnnotation: true, + allowFinal: !isNamedTuple, + allowClassVar: !isNamedTuple, + }); + + // Is this a KW_ONLY separator introduced in Python 3.10? + if (!isNamedTuple && statement.d.valueExpr.d.value === '_') { + const annotatedType = variableTypeEvaluator(); + + if (isClassInstance(annotatedType) && ClassType.isBuiltIn(annotatedType, 'KW_ONLY')) { + sawKeywordOnlySeparator = true; + variableNameNode = undefined; + typeAnnotationNode = undefined; + variableTypeEvaluator = undefined; + } + } + } + } + + if (variableNameNode && variableTypeEvaluator) { + const variableName = variableNameNode.d.value; + + // Named tuples don't allow attributes that begin with an underscore. + if (isNamedTuple && variableName.startsWith('_')) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.namedTupleFieldUnderscore(), + variableNameNode + ); + return; + } + + // Don't include class vars. PEP 557 indicates that they shouldn't + // be considered data class entries. + const variableSymbol = ClassType.getSymbolTable(classType).get(variableName); + namedTupleEntries.add(variableName); + + if (variableSymbol?.isClassVar()) { + // If an ancestor class declared an instance variable but this dataclass + // declares a ClassVar, delete the older one from the full data class entries. + const index = fullDataClassEntries.findIndex((p) => p.name === variableName); + if (index >= 0) { + fullDataClassEntries.splice(index, 1); + } + const dataClassEntry: DataClassEntry = { + name: variableName, + classType, + alias: aliasName, + isKeywordOnly: false, + hasDefault, + isDefaultFactory, + defaultExpr, + includeInInit, + nameNode: variableNameNode, + typeAnnotationNode: typeAnnotationNode, + type: UnknownType.create(), + isClassVar: true, + converter, + }; + localDataClassEntries.push(dataClassEntry); + } else { + // Create a new data class entry, but defer evaluation of the type until + // we've compiled the full list of data class entries for this class. This + // allows us to handle circular references in types. + const dataClassEntry: DataClassEntry = { + name: variableName, + classType, + alias: aliasName, + isKeywordOnly, + hasDefault, + isDefaultFactory, + defaultExpr, + includeInInit, + nameNode: variableNameNode, + typeAnnotationNode: typeAnnotationNode, + type: UnknownType.create(), + isClassVar: false, + converter, + }; + localEntryTypeEvaluator.push({ entry: dataClassEntry, evaluator: variableTypeEvaluator }); + + // Add the new entry to the local entry list. + let insertIndex = localDataClassEntries.findIndex((e) => e.name === variableName); + if (insertIndex >= 0) { + localDataClassEntries[insertIndex] = dataClassEntry; + } else { + localDataClassEntries.push(dataClassEntry); + } + + // Add the new entry to the full entry list. + insertIndex = fullDataClassEntries.findIndex((p) => p.name === variableName); + if (insertIndex >= 0) { + const oldEntry = fullDataClassEntries[insertIndex]; + + // While this isn't documented behavior, it appears that the dataclass implementation + // causes overridden variables to "inherit" default values from parent classes. + if (!dataClassEntry.hasDefault && oldEntry.hasDefault && oldEntry.includeInInit) { + dataClassEntry.hasDefault = true; + dataClassEntry.defaultExpr = oldEntry.defaultExpr; + hasDefault = true; + + // Warn the user of this case because it can result in type errors if the + // default value is incompatible with the new type. + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassFieldInheritedDefault().format({ fieldName: variableName }), + variableNameNode + ); + } + + fullDataClassEntries[insertIndex] = dataClassEntry; + } else { + fullDataClassEntries.push(dataClassEntry); + insertIndex = fullDataClassEntries.length - 1; + } + + // If we've already seen a entry with a default value defined, + // all subsequent entries must also have default values. + if (!isKeywordOnly && includeInInit && !skipSynthesizeInit && !hasDefault) { + const firstDefaultValueIndex = fullDataClassEntries.findIndex( + (p) => p.hasDefault && p.includeInInit && !p.isKeywordOnly + ); + if (firstDefaultValueIndex >= 0 && firstDefaultValueIndex < insertIndex) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassFieldWithDefault(), + variableNameNode + ); + } + } + } + } + } else { + // The symbol had no declared type, so it is (mostly) ignored by dataclasses. + // However, if it is assigned a field descriptor, it will result in a + // runtime exception. + const declarations = symbol.getDeclarations(); + if (declarations.length === 0) { + return; + } + const lastDecl = declarations[declarations.length - 1]; + if (lastDecl.type !== DeclarationType.Variable) { + return; + } + + const statement = lastDecl.node.parent; + if (!statement || statement.nodeType !== ParseNodeType.Assignment) { + return; + } + + // If the RHS of the assignment is assigning a field instance where the + // "init" parameter is set to false, do not include it in the init method. + if (!isNamedTuple && statement.d.rightExpr.nodeType === ParseNodeType.Call) { + const callType = evaluator.getTypeOfExpression( + statement.d.rightExpr.d.leftExpr, + EvalFlags.CallBaseDefaults + ).type; + + if ( + isDataclassFieldConstructor( + callType, + classType.shared.dataClassBehaviors?.fieldDescriptorNames || [] + ) + ) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassFieldWithoutAnnotation(), + statement.d.rightExpr + ); + } + } + } + }); + + if (isNamedTuple) { + classType.shared.namedTupleEntries = namedTupleEntries; + } else { + classType.shared.dataClassEntries = localDataClassEntries; + } + + // Now that the dataClassEntries field has been set with a complete list + // of local data class entries for this class, perform deferred type + // evaluations. This could involve circular type dependencies, so it's + // required that the list be complete (even if types are not yet accurate) + // before we perform the type evaluations. + localEntryTypeEvaluator.forEach((entryEvaluator) => { + entryEvaluator.entry.type = entryEvaluator.evaluator(); + }); + + const symbolTable = ClassType.getSymbolTable(classType); + const keywordOnlyParams: FunctionParam[] = []; + + if (!skipSynthesizeInit && !hasExistingInitMethod) { + if (allAncestorsKnown) { + fullDataClassEntries.forEach((entry) => { + if (entry.includeInInit) { + let defaultType: Type | undefined; + + // If the type refers to Self of the parent class, we need to + // transform it to refer to the Self of this subclass. + let effectiveType = entry.type; + if (entry.classType !== classType && requiresSpecialization(effectiveType)) { + const solution = new ConstraintSolution(); + addSolutionForSelfType(solution, entry.classType, classType); + effectiveType = applySolvedTypeVars(effectiveType, solution); + } + + // Is the field type a descriptor object? If so, we need to extract the corresponding + // type of the __init__ method parameter from the __set__ method. + effectiveType = transformDescriptorType(evaluator, effectiveType); + + if (entry.converter) { + const fieldType = effectiveType; + effectiveType = getConverterInputType(evaluator, entry.converter, effectiveType, entry.name); + symbolTable.set( + entry.name, + getDescriptorForConverterField( + evaluator, + classType, + node, + entry.nameNode, + entry.converter, + entry.name, + fieldType, + effectiveType + ) + ); + + if (entry.hasDefault) { + defaultType = entry.type; + } + } else { + if (entry.hasDefault) { + if (entry.isDefaultFactory || !entry.defaultExpr) { + defaultType = entry.type; + } else { + const defaultExpr = entry.defaultExpr; + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const flags = fileInfo.isStubFile ? EvalFlags.ConvertEllipsisToAny : EvalFlags.None; + const liveTypeVars = getTypeVarScopesForNode(entry.defaultExpr); + const boundEffectiveType = makeTypeVarsBound(effectiveType, liveTypeVars); + + // Use speculative mode here so we don't cache the results. + // We'll want to re-evaluate this expression later, potentially + // with different evaluation flags. + defaultType = evaluator.useSpeculativeMode(defaultExpr, () => { + return evaluator.getTypeOfExpression( + defaultExpr, + flags, + makeInferenceContext(boundEffectiveType) + ).type; + }); + + defaultType = makeTypeVarsFree(defaultType, liveTypeVars); + + if (entry.mroClass && requiresSpecialization(defaultType)) { + const solution = buildSolutionFromSpecializedClass(entry.mroClass); + defaultType = applySolvedTypeVars(defaultType, solution); + } + } + } + } + + const effectiveName = entry.alias || entry.name; + + if (!entry.alias && entry.nameNode && isPrivateName(entry.nameNode.d.value)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassFieldWithPrivateName(), + entry.nameNode + ); + } + + const param = FunctionParam.create( + ParamCategory.Simple, + effectiveType, + FunctionParamFlags.TypeDeclared, + effectiveName, + defaultType, + entry.defaultExpr + ); + + if (entry.isKeywordOnly) { + keywordOnlyParams.push(param); + } else { + FunctionType.addParam(constructorType, param); + } + + if (replaceType) { + const paramWithDefault = FunctionParam.create( + param.category, + param._type, + param.flags, + param.name, + AnyType.create(/* isEllipsis */ true) + ); + + FunctionType.addParam(replaceType, paramWithDefault); + } + } + }); + + if (keywordOnlyParams.length > 0) { + FunctionType.addKeywordOnlyParamSeparator(constructorType); + keywordOnlyParams.forEach((param) => { + FunctionType.addParam(constructorType, param); + }); + } + } + + symbolTable.set('__init__', Symbol.createWithType(SymbolFlags.ClassMember, initType)); + symbolTable.set('__new__', Symbol.createWithType(SymbolFlags.ClassMember, newType)); + + if (replaceType) { + symbolTable.set('__replace__', Symbol.createWithType(SymbolFlags.ClassMember, replaceType)); + } + } + + // Synthesize the __match_args__ class variable if it doesn't exist + // and match_args behavior is not explicitly disabled. + const strType = evaluator.getBuiltInType(node, 'str'); + const tupleClassType = evaluator.getBuiltInType(node, 'tuple'); + if ( + tupleClassType && + isInstantiableClass(tupleClassType) && + strType && + isInstantiableClass(strType) && + !symbolTable.has('__match_args__') && + (classType.shared.dataClassBehaviors?.matchArgs ?? true) + ) { + const matchArgsNames: string[] = []; + fullDataClassEntries.forEach((entry) => { + if (entry.includeInInit && !entry.isKeywordOnly) { + // Use the field name, not its alias (if it has one). + matchArgsNames.push(entry.name); + } + }); + const literalTypes: TupleTypeArg[] = matchArgsNames.map((name) => { + return { type: ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strType, name)), isUnbounded: false }; + }); + const matchArgsType = ClassType.cloneAsInstance(specializeTupleClass(tupleClassType, literalTypes)); + symbolTable.set('__match_args__', Symbol.createWithType(SymbolFlags.ClassMember, matchArgsType)); + } + + const synthesizeComparisonMethod = (operator: string, paramType: Type) => { + const operatorMethod = FunctionType.createSynthesizedInstance(operator); + FunctionType.addParam(operatorMethod, selfParam); + FunctionType.addParam( + operatorMethod, + FunctionParam.create(ParamCategory.Simple, paramType, FunctionParamFlags.TypeDeclared, 'other') + ); + operatorMethod.shared.declaredReturnType = evaluator.getBuiltInObject(node, 'bool'); + // If a method of this name already exists, don't override it. + if (!symbolTable.get(operator)) { + symbolTable.set(operator, Symbol.createWithType(SymbolFlags.ClassMember, operatorMethod)); + } + }; + + // Synthesize comparison operators. + if (!ClassType.isDataClassSkipGenerateEq(classType)) { + synthesizeComparisonMethod('__eq__', evaluator.getBuiltInObject(node, 'object')); + } + + if (ClassType.isDataClassGenerateOrder(classType)) { + ['__lt__', '__le__', '__gt__', '__ge__'].forEach((operator) => { + synthesizeComparisonMethod(operator, selfType); + }); + } + + let synthesizeHashFunction = ClassType.isDataClassFrozen(classType); + const synthesizeHashNone = + !isNamedTuple && !ClassType.isDataClassSkipGenerateEq(classType) && !ClassType.isDataClassFrozen(classType); + + if (skipSynthesizeHash) { + synthesizeHashFunction = false; + } + + // If the user has indicated that a hash function should be generated even if it's unsafe + // to do so or there is already a hash function present, override the default logic. + if (ClassType.isDataClassGenerateHash(classType)) { + synthesizeHashFunction = true; + } + + if (synthesizeHashFunction) { + const hashMethod = FunctionType.createSynthesizedInstance('__hash__'); + FunctionType.addParam(hashMethod, selfParam); + hashMethod.shared.declaredReturnType = evaluator.getBuiltInObject(node, 'int'); + symbolTable.set( + '__hash__', + Symbol.createWithType(SymbolFlags.ClassMember | SymbolFlags.IgnoredForOverrideChecks, hashMethod) + ); + } else if (synthesizeHashNone && !skipSynthesizeHash) { + symbolTable.set( + '__hash__', + Symbol.createWithType( + SymbolFlags.ClassMember | SymbolFlags.IgnoredForOverrideChecks, + evaluator.getNoneType() + ) + ); + } + + let dictType = evaluator.getBuiltInType(node, 'dict'); + if (isInstantiableClass(dictType)) { + dictType = ClassType.cloneAsInstance( + ClassType.specialize(dictType, [evaluator.getBuiltInObject(node, 'str'), AnyType.create()]) + ); + } + + if (!isNamedTuple) { + symbolTable.set( + '__dataclass_fields__', + Symbol.createWithType(SymbolFlags.ClassMember | SymbolFlags.ClassVar, dictType) + ); + } + + if (ClassType.isDataClassGenerateSlots(classType) && classType.shared.localSlotsNames === undefined) { + classType.shared.localSlotsNames = localDataClassEntries.map((entry) => entry.name); + } + + // Should we synthesize a __slots__ symbol? + if (ClassType.isDataClassGenerateSlots(classType)) { + let iterableType = evaluator.getTypingType(node, 'Iterable') ?? UnknownType.create(); + + if (isInstantiableClass(iterableType)) { + iterableType = ClassType.cloneAsInstance( + ClassType.specialize(iterableType, [evaluator.getBuiltInObject(node, 'str')]) + ); + } + + symbolTable.set( + '__slots__', + Symbol.createWithType(SymbolFlags.ClassMember | SymbolFlags.ClassVar, iterableType) + ); + } + + // If this dataclass derived from a NamedTuple, update the NamedTuple with + // the specialized entry types. + if ( + updateNamedTupleBaseClass( + classType, + fullDataClassEntries.map((entry) => entry.type), + /* isTypeArgExplicit */ true + ) + ) { + // Recompute the MRO based on the updated NamedTuple base class. + computeMroLinearization(classType); + } +} + +// If a field specifier is used to define a field, it may define a default +// argument value (either True or False) for a supported keyword parameter. +// This function extracts that default value if present and returns it. If +// it's not present, it returns undefined. +function getDefaultArgValueForFieldSpecifier( + evaluator: TypeEvaluator, + callNode: CallNode, + callTypeResult: TypeResult, + paramName: string +): boolean | undefined { + const callType = callTypeResult.type; + let callTarget: FunctionType | undefined; + + if (isFunction(callType)) { + callTarget = callType; + } else if (isOverloaded(callType)) { + callTarget = evaluator.getBestOverloadForArgs( + callNode, + { type: callType, isIncomplete: callTypeResult.isIncomplete }, + callNode.d.args.map((arg) => evaluator.convertNodeToArg(arg)) + ); + } else if (isInstantiableClass(callType)) { + const initMethodResult = getBoundInitMethod(evaluator, callNode, callType); + if (initMethodResult) { + if (isFunction(initMethodResult.type)) { + callTarget = initMethodResult.type; + } else if (isOverloaded(initMethodResult.type)) { + callTarget = evaluator.getBestOverloadForArgs( + callNode, + { type: initMethodResult.type }, + callNode.d.args.map((arg) => evaluator.convertNodeToArg(arg)) + ); + } + } + } + + if (callTarget) { + const initParamIndex = callTarget.shared.parameters.findIndex((p) => p.name === paramName); + if (initParamIndex >= 0) { + const initParam = callTarget.shared.parameters[initParamIndex]; + + // Is the parameter type a literal bool? + const initParamType = FunctionType.getParamType(callTarget, initParamIndex); + if ( + FunctionParam.isTypeDeclared(initParam) && + isClass(initParamType) && + typeof initParamType.priv.literalValue === 'boolean' + ) { + return initParamType.priv.literalValue; + } + + // Is the default argument value a literal bool? + const initParamDefaultType = FunctionType.getParamDefaultType(callTarget, initParamIndex); + if ( + initParamDefaultType && + isClass(initParamDefaultType) && + typeof initParamDefaultType.priv.literalValue === 'boolean' + ) { + return initParamDefaultType.priv.literalValue; + } + } + } + + return undefined; +} + +// Validates converter and, if valid, returns its input type. If invalid, +// fieldType is returned. +function getConverterInputType( + evaluator: TypeEvaluator, + converterNode: ArgumentNode, + fieldType: Type, + fieldName: string +): Type { + // Use speculative mode here so we don't cache the results. + // We'll want to re-evaluate this expression later, potentially + // with different evaluation flags. + const valueType = evaluator.useSpeculativeMode(converterNode.d.valueExpr, () => { + return evaluator.getTypeOfExpression(converterNode.d.valueExpr, EvalFlags.NoSpecialize).type; + }); + + const converterType = getConverterAsFunction(evaluator, valueType); + + if (!converterType) { + return fieldType; + } + + // Create synthesized function of the form Callable[[T], fieldType] which + // will be used to check compatibility of the provided converter. + const typeVar = TypeVarType.createInstance('__converterInput'); + typeVar.priv.scopeId = getScopeIdForNode(converterNode); + const targetFunction = FunctionType.createSynthesizedInstance(''); + targetFunction.shared.typeVarScopeId = typeVar.priv.scopeId; + targetFunction.shared.declaredReturnType = fieldType; + FunctionType.addParam( + targetFunction, + FunctionParam.create( + ParamCategory.Simple, + typeVar, + FunctionParamFlags.TypeDeclared | FunctionParamFlags.NameSynthesized, + '__input' + ) + ); + FunctionType.addPositionOnlyParamSeparator(targetFunction); + + if (isFunctionOrOverloaded(converterType)) { + const acceptedTypes: Type[] = []; + const diagAddendum = new DiagnosticAddendum(); + + doForEachSignature(converterType, (signature) => { + const returnConstraints = new ConstraintTracker(); + + if ( + evaluator.assignType( + FunctionType.getEffectiveReturnType(signature) ?? UnknownType.create(), + fieldType, + /* diag */ undefined, + returnConstraints + ) + ) { + signature = evaluator.solveAndApplyConstraints(signature, returnConstraints) as FunctionType; + } + + const inputConstraints = new ConstraintTracker(); + + if (evaluator.assignType(targetFunction, signature, diagAddendum, inputConstraints)) { + const overloadSolution = evaluator.solveAndApplyConstraints(typeVar, inputConstraints, { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(typeVar), + tupleClassType: evaluator.getTupleClassType(), + }, + }); + acceptedTypes.push(overloadSolution); + } + }); + + if (acceptedTypes.length > 0) { + return combineTypes(acceptedTypes); + } + + if (isFunction(converterType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassConverterFunction().format({ + argType: evaluator.printType(converterType), + fieldType: evaluator.printType(fieldType), + fieldName: fieldName, + }) + diagAddendum.getString(), + converterNode, + diagAddendum.getEffectiveTextRange() ?? converterNode + ); + } else { + const overloads = OverloadedType.getOverloads(converterType); + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassConverterOverloads().format({ + funcName: + overloads.length > 0 && overloads[0].shared.name + ? overloads[0].shared.name + : '', + fieldType: evaluator.printType(fieldType), + fieldName: fieldName, + }) + diagAddendum.getString(), + converterNode + ); + } + } + + return fieldType; +} + +function getConverterAsFunction( + evaluator: TypeEvaluator, + converterType: Type +): FunctionType | OverloadedType | undefined { + if (isFunctionOrOverloaded(converterType)) { + return converterType; + } + + if (isClassInstance(converterType)) { + return evaluator.getBoundMagicMethod(converterType, '__call__'); + } + + if (isInstantiableClass(converterType)) { + let fromConstructor = createFunctionFromConstructor(evaluator, converterType); + if (fromConstructor) { + // If conversion to a constructor resulted in a union type, we'll + // choose the first of the two subtypes, which typically corresponds + // to the __init__ method (rather than the __new__ method). + if (isUnion(fromConstructor)) { + fromConstructor = fromConstructor.priv.subtypes[0]; + } + + if (isFunctionOrOverloaded(fromConstructor)) { + return fromConstructor; + } + } + } + + return undefined; +} + +// Synthesizes an asymmetric descriptor class to be used in place of the +// annotated type of a field with a converter. The descriptor's __get__ method +// returns the declared type of the field and its __set__ method accepts the +// converter's input type. Returns the symbol for an instance of this descriptor +// type. +function getDescriptorForConverterField( + evaluator: TypeEvaluator, + dataclass: ClassType, + dataclassNode: ParseNode, + fieldNameNode: NameNode | undefined, + converterNode: ParseNode, + fieldName: string, + getType: Type, + setType: Type +): Symbol { + const fileInfo = getFileInfo(dataclassNode); + const typeMetaclass = evaluator.getBuiltInType(dataclassNode, 'type'); + const descriptorName = `__converterDescriptor_${fieldName}`; + + const descriptorClass = ClassType.createInstantiable( + descriptorName, + getClassFullName(converterNode, fileInfo.moduleName, descriptorName), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.None, + getTypeSourceId(converterNode), + /* declaredMetaclass */ undefined, + isInstantiableClass(typeMetaclass) ? typeMetaclass : UnknownType.create() + ); + + const scopeId = getScopeIdForNode(converterNode); + descriptorClass.shared.typeVarScopeId = scopeId; + + // Make the descriptor generic, copying the type parameters from the dataclass. + descriptorClass.shared.typeParams = dataclass.shared.typeParams.map((typeParm) => { + const typeParam = TypeVarType.cloneForScopeId( + typeParm, + scopeId, + descriptorClass.shared.name, + TypeVarScopeType.Class + ); + typeParam.priv.computedVariance = Variance.Covariant; + return typeParam; + }); + + const solution = buildSolution(dataclass.shared.typeParams, descriptorClass.shared.typeParams); + getType = applySolvedTypeVars(getType, solution); + setType = applySolvedTypeVars(setType, solution); + + descriptorClass.shared.baseClasses.push(evaluator.getBuiltInType(dataclassNode, 'object')); + computeMroLinearization(descriptorClass); + + const fields = ClassType.getSymbolTable(descriptorClass); + const selfType = synthesizeTypeVarForSelfCls(descriptorClass, /* isClsParam */ false); + + const setFunction = FunctionType.createSynthesizedInstance('__set__'); + FunctionType.addParam( + setFunction, + FunctionParam.create(ParamCategory.Simple, selfType, FunctionParamFlags.TypeDeclared, 'self') + ); + FunctionType.addParam( + setFunction, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'obj') + ); + FunctionType.addParam( + setFunction, + FunctionParam.create(ParamCategory.Simple, setType, FunctionParamFlags.TypeDeclared, 'value') + ); + setFunction.shared.declaredReturnType = evaluator.getNoneType(); + const setSymbol = Symbol.createWithType(SymbolFlags.ClassMember, setFunction); + fields.set('__set__', setSymbol); + + const getFunction = FunctionType.createSynthesizedInstance('__get__'); + FunctionType.addParam( + getFunction, + FunctionParam.create(ParamCategory.Simple, selfType, FunctionParamFlags.TypeDeclared, 'self') + ); + FunctionType.addParam( + getFunction, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'obj') + ); + FunctionType.addParam( + getFunction, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'objtype') + ); + getFunction.shared.declaredReturnType = getType; + const getSymbol = Symbol.createWithType(SymbolFlags.ClassMember, getFunction); + fields.set('__get__', getSymbol); + + const descriptorInstance = ClassType.specialize(ClassType.cloneAsInstance(descriptorClass), [ + ...dataclass.shared.typeParams, + ]); + + return Symbol.createWithType(SymbolFlags.ClassMember, descriptorInstance, fieldNameNode); +} + +// If the specified type is a descriptor — in particular, if it implements a +// __set__ method, this method transforms the type into the input parameter +// for the set method. +function transformDescriptorType(evaluator: TypeEvaluator, type: Type): Type { + if (!isClassInstance(type) || isMetaclassInstance(type)) { + return type; + } + + const setMethodType = evaluator.getBoundMagicMethod(type, '__set__'); + if (!setMethodType) { + return type; + } + + if (!isFunction(setMethodType)) { + return type; + } + + // The value parameter for a bound __set__ method is parameter index 1. + return FunctionType.getParamType(setMethodType, 1); +} + +// Builds a sorted list of dataclass entries that are inherited by +// the specified class. These entries must be unique and in reverse-MRO +// order. Returns true if all of the class types in the hierarchy are +// known, false if one or more are unknown. +export function addInheritedDataClassEntries(classType: ClassType, entries: DataClassEntry[]) { + let allAncestorsAreKnown = true; + + ClassType.getReverseMro(classType).forEach((mroClass) => { + if (isInstantiableClass(mroClass)) { + const solution = buildSolutionFromSpecializedClass(mroClass); + const dataClassEntries = ClassType.getDataClassEntries(mroClass); + + // Add the entries to the end of the list, replacing same-named + // entries if found. + dataClassEntries.forEach((entry) => { + const existingIndex = entries.findIndex((e) => e.name === entry.name); + + // If the type from the parent class is generic, we need to convert + // to the type parameter namespace of child class. + const updatedEntry = { ...entry, mroClass }; + updatedEntry.type = applySolvedTypeVars(updatedEntry.type, solution); + + if (entry.isClassVar) { + // If this entry is a class variable, it overrides an existing + // instance variable, so delete it. + if (existingIndex >= 0) { + entries.splice(existingIndex, 1); + } + } else if (existingIndex >= 0) { + entries[existingIndex] = updatedEntry; + } else { + entries.push(updatedEntry); + } + }); + } else { + allAncestorsAreKnown = false; + } + }); + + return allAncestorsAreKnown; +} + +function isDataclassFieldConstructor(type: Type, fieldDescriptorNames: string[]) { + let callName: string | undefined; + + if (isFunction(type)) { + callName = type.shared.fullName; + } else if (isOverloaded(type)) { + const overloads = OverloadedType.getOverloads(type); + if (overloads.length > 0) { + callName = overloads[0].shared.fullName; + } + } else if (isInstantiableClass(type)) { + callName = type.shared.fullName; + } + + if (!callName) { + return false; + } + + return fieldDescriptorNames.some((name) => name === callName); +} + +export function validateDataClassTransformDecorator( + evaluator: TypeEvaluator, + node: CallNode +): DataClassBehaviors | undefined { + const behaviors: DataClassBehaviors = { + skipGenerateInit: false, + skipGenerateEq: false, + generateOrder: false, + generateSlots: false, + generateHash: false, + keywordOnly: false, + frozen: false, + frozenDefault: false, + fieldDescriptorNames: [], + }; + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // Parse the arguments to the call. + node.d.args.forEach((arg) => { + if (!arg.d.name || arg.d.argCategory !== ArgCategory.Simple) { + evaluator.addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.dataClassTransformPositionalParam(), + arg + ); + return; + } + + switch (arg.d.name.d.value) { + case 'kw_only_default': { + const value = evaluateStaticBoolExpression( + arg.d.valueExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + if (value === undefined) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassTransformExpectedBoolLiteral(), + arg.d.valueExpr + ); + return; + } + + behaviors.keywordOnly = value; + break; + } + + case 'eq_default': { + const value = evaluateStaticBoolExpression( + arg.d.valueExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + if (value === undefined) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassTransformExpectedBoolLiteral(), + arg.d.valueExpr + ); + return; + } + + behaviors.skipGenerateEq = !value; + break; + } + + case 'order_default': { + const value = evaluateStaticBoolExpression( + arg.d.valueExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + if (value === undefined) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassTransformExpectedBoolLiteral(), + arg.d.valueExpr + ); + return; + } + + behaviors.generateOrder = value; + break; + } + + case 'frozen_default': { + const value = evaluateStaticBoolExpression( + arg.d.valueExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + if (value === undefined) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassTransformExpectedBoolLiteral(), + arg.d.valueExpr + ); + return; + } + + behaviors.frozen = value; + + // Store the frozen default separately because any class that + // doesn't explicitly specify a frozen value will inherit this + // value rather than the value from its parent. + behaviors.frozenDefault = value; + break; + } + + // Earlier versions of the dataclass_transform spec used the name "field_descriptors" + // rather than "field_specifiers". The older name is now deprecated but still supported + // for the time being because some libraries shipped with the older __dataclass_transform__ + // form that supported this older parameter name. + case 'field_descriptors': + case 'field_specifiers': { + const valueType = evaluator.getTypeOfExpression(arg.d.valueExpr).type; + if ( + !isClassInstance(valueType) || + !ClassType.isBuiltIn(valueType, 'tuple') || + !valueType.priv.tupleTypeArgs || + valueType.priv.tupleTypeArgs.some( + (entry) => !isInstantiableClass(entry.type) && !isFunctionOrOverloaded(entry.type) + ) + ) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassTransformFieldSpecifier().format({ + type: evaluator.printType(valueType), + }), + arg.d.valueExpr + ); + return; + } + + valueType.priv.tupleTypeArgs.forEach((arg) => { + if (isInstantiableClass(arg.type) || isFunction(arg.type)) { + behaviors.fieldDescriptorNames.push(arg.type.shared.fullName); + } else if (isOverloaded(arg.type)) { + const overloads = OverloadedType.getOverloads(arg.type); + if (overloads.length > 0) { + behaviors.fieldDescriptorNames.push(overloads[0].shared.fullName); + } + } + }); + break; + } + + default: + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassTransformUnknownArgument().format({ name: arg.d.name.d.value }), + arg.d.valueExpr + ); + break; + } + }); + + return behaviors; +} + +export function getDataclassDecoratorBehaviors(type: Type): DataClassBehaviors | undefined { + let functionType: FunctionType | undefined; + if (isFunction(type)) { + functionType = type; + } else if (isOverloaded(type)) { + // Find the first overload or implementation that contains a + // dataclass_transform decorator. If more than one have such a decorator, + // only the first one will be honored, as per PEP 681. + const overloads = OverloadedType.getOverloads(type); + const impl = OverloadedType.getImplementation(type); + + functionType = overloads.find((overload) => !!overload.shared.decoratorDataClassBehaviors); + + if (!functionType && impl && isFunction(impl) && impl.shared.decoratorDataClassBehaviors) { + functionType = impl; + } + + if (!functionType && overloads.length > 0) { + functionType = overloads[0]; + } + } + + if (!functionType) { + return undefined; + } + + if (functionType.shared.decoratorDataClassBehaviors) { + return functionType.shared.decoratorDataClassBehaviors; + } + + // Is this the built-in dataclass? If so, return the default behaviors. + if (functionType.shared.fullName === 'dataclasses.dataclass') { + return { + fieldDescriptorNames: ['dataclasses.field', 'dataclasses.Field'], + }; + } + + return undefined; +} + +function applyDataClassBehaviorOverride( + evaluator: TypeEvaluator, + errorNode: ParseNode, + classType: ClassType, + argName: string, + argValueExpr: ExpressionNode, + behaviors: DataClassBehaviors +) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + const value = evaluateStaticBoolExpression(argValueExpr, fileInfo.executionEnvironment, fileInfo.definedConstants); + + applyDataClassBehaviorOverrideValue(evaluator, errorNode, classType, argName, value, behaviors); +} + +function applyDataClassBehaviorOverrideValue( + evaluator: TypeEvaluator, + errorNode: ParseNode, + classType: ClassType, + argName: string, + argValue: boolean | undefined, + behaviors: DataClassBehaviors +) { + switch (argName) { + case 'order': + if (argValue !== undefined) { + behaviors.generateOrder = argValue; + } + break; + + case 'kw_only': + if (argValue !== undefined) { + behaviors.keywordOnly = argValue; + } + break; + + case 'match_args': + if (argValue !== undefined) { + behaviors.matchArgs = argValue; + } + break; + + case 'frozen': { + let hasUnfrozenBaseClass = false; + let hasFrozenBaseClass = false; + + if (argValue !== undefined) { + behaviors.frozen = argValue; + } + + classType.shared.baseClasses.forEach((baseClass) => { + if (isInstantiableClass(baseClass) && ClassType.isDataClass(baseClass)) { + if (ClassType.isDataClassFrozen(baseClass)) { + hasFrozenBaseClass = true; + } else if ( + !baseClass.shared.classDataClassTransform && + !( + baseClass.shared.declaredMetaclass && + isInstantiableClass(baseClass.shared.declaredMetaclass) && + !!baseClass.shared.declaredMetaclass.shared.classDataClassTransform + ) + ) { + // If this base class is unfrozen and isn't the class that directly + // references the metaclass that provides dataclass-like behaviors, + // we'll assume we're deriving from an unfrozen dataclass. + hasUnfrozenBaseClass = true; + } + } + }); + + if (argValue) { + // A frozen dataclass cannot derive from a non-frozen dataclass. + if (hasUnfrozenBaseClass) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassBaseClassNotFrozen(), + errorNode + ); + } + } else { + // A non-frozen dataclass cannot derive from a frozen dataclass. + if (hasFrozenBaseClass) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassBaseClassFrozen(), + errorNode + ); + } + } + break; + } + + case 'init': + if (argValue !== undefined) { + behaviors.skipGenerateInit = !argValue; + } + break; + + case 'eq': + if (argValue !== undefined) { + behaviors.skipGenerateEq = !argValue; + } + break; + + case 'slots': + if (argValue === true) { + behaviors.generateSlots = true; + + if (classType.shared.localSlotsNames) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dataClassSlotsOverwrite(), + errorNode + ); + } + } else if (argValue === false) { + behaviors.generateSlots = false; + } + break; + + case 'hash': + case 'unsafe_hash': + if (argValue === true) { + behaviors.generateHash = true; + } + break; + } +} + +export function applyDataClassClassBehaviorOverrides( + evaluator: TypeEvaluator, + errorNode: ParseNode, + classType: ClassType, + args: Arg[], + defaultBehaviors: DataClassBehaviors +) { + let sawFrozenArg = false; + + const behaviors = { ...defaultBehaviors }; + + // The "frozen" behavior is not inherited from the parent class. + // Instead, it comes from the default. + behaviors.frozen = behaviors.frozenDefault; + + classType.shared.dataClassBehaviors = behaviors; + + args.forEach((arg) => { + if (arg.valueExpression && arg.name) { + applyDataClassBehaviorOverride( + evaluator, + arg.name, + classType, + arg.name.d.value, + arg.valueExpression, + behaviors + ); + + if (arg.name.d.value === 'frozen') { + sawFrozenArg = true; + } + } + }); + + // If there was no frozen argument, it is implicitly set to the frozenDefault. + // This check validates that we're not overriding a frozen class with a + // non-frozen class or vice versa. + if (!sawFrozenArg) { + applyDataClassBehaviorOverrideValue( + evaluator, + errorNode, + classType, + 'frozen', + defaultBehaviors.frozenDefault, + behaviors + ); + } +} + +export function applyDataClassDecorator( + evaluator: TypeEvaluator, + errorNode: ParseNode, + classType: ClassType, + defaultBehaviors: DataClassBehaviors, + callNode: CallNode | undefined +) { + applyDataClassClassBehaviorOverrides( + evaluator, + errorNode, + classType, + (callNode?.d.args ?? []).map((arg) => evaluator.convertNodeToArg(arg)), + defaultBehaviors + ); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/declaration.ts b/python-parser/packages/pyright-internal/src/analyzer/declaration.ts new file mode 100644 index 00000000..74c54231 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/declaration.ts @@ -0,0 +1,300 @@ +/* + * declaration.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Tracks the location within the code where a named entity + * is declared and its associated declared type (if the type + * is explicitly declared). + */ + +import { Range } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { + ClassNode, + ExpressionNode, + FunctionNode, + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + ModuleNode, + NameNode, + ParameterNode, + ParseNode, + RaiseNode, + ReturnNode, + StringListNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParameterNode, + YieldFromNode, + YieldNode, +} from '../parser/parseNodes'; + +export const UnresolvedModuleMarker = Uri.constant('*** unresolved module ***'); + +export const enum DeclarationType { + Intrinsic, + Variable, + Param, + TypeParam, + TypeAlias, + Function, + Class, + SpecialBuiltInClass, + Alias, +} + +export type IntrinsicType = + | 'Any' + | 'str' + | 'str | None' + | 'int' + | 'MutableSequence[str]' + | '__class__' + | 'dict[str, Any]'; + +export interface DeclarationBase { + // Category of this symbol (function, variable, etc.). + // Used by hover provider to display helpful text. + type: DeclarationType; + + // Parse node associated with the declaration. Does not necessarily match + // the path and range. + node: ParseNode; + + // The file and range within that file that + // contains the declaration. Unless this is an alias, then uri refers to the + // file the alias is referring to. + uri: Uri; + range: Range; + + // The dot-separated import name for the file that + // contains the declaration (may not be definitive + // because a source file can be accessed via different + // import names in some cases). + moduleName: string; + + // The declaration is within an except clause of a try + // statement. We may want to ignore such declarations. + isInExceptSuite: boolean; + + // This declaration is within an inlined TypedDict definition. + isInInlinedTypedDict?: boolean; +} + +export interface IntrinsicDeclaration extends DeclarationBase { + type: DeclarationType.Intrinsic; + name: string; + node: ModuleNode | FunctionNode | ClassNode; + intrinsicType: IntrinsicType; +} + +export interface ClassDeclaration extends DeclarationBase { + type: DeclarationType.Class; + node: ClassNode; +} + +// This declaration form is used only for a few special +// built-in class types defined in typing.pyi. +export interface SpecialBuiltInClassDeclaration extends DeclarationBase { + type: DeclarationType.SpecialBuiltInClass; + node: TypeAnnotationNode; +} + +export interface FunctionDeclaration extends DeclarationBase { + type: DeclarationType.Function; + node: FunctionNode; + isMethod: boolean; + isGenerator: boolean; + returnStatements?: ReturnNode[]; + yieldStatements?: (YieldNode | YieldFromNode)[]; + raiseStatements?: RaiseNode[]; +} + +export interface ParamDeclaration extends DeclarationBase { + type: DeclarationType.Param; + node: ParameterNode; + + // Inferred parameters can be inferred from pieces of an actual NameNode, so this + // value represents the actual 'name' as the user thinks of it. + inferredName?: string; + + // Nodes that potentially makeup the type of an inferred parameter. + inferredTypeNodes?: ExpressionNode[]; +} + +export interface TypeParamDeclaration extends DeclarationBase { + type: DeclarationType.TypeParam; + node: TypeParameterNode; +} + +export interface TypeAliasDeclaration extends DeclarationBase { + type: DeclarationType.TypeAlias; + node: TypeAliasNode; + + // If a docstring (based on PEP 258) is present... + docString?: string | undefined; +} + +export interface VariableDeclaration extends DeclarationBase { + type: DeclarationType.Variable; + node: NameNode | StringListNode; + + // An explicit type annotation, if provided + typeAnnotationNode?: ExpressionNode | undefined; + + // A source of the inferred type + inferredTypeSource?: ParseNode | undefined; + + // Is the declaration considered "constant" (i.e. + // reassignment is not permitted)? + isConstant?: boolean | undefined; + + // Is the declaration considered "final" (similar to + // constant in that reassignment is not permitted)? + isFinal?: boolean; + + // Is the declaration an entry in __slots__? + isDefinedBySlots?: boolean; + + // For most symbols in a "py.typed" file, type inference is not + // allowed. But in certain cases (as with __match_args__ or __slots__), + // inference is permitted. + isInferenceAllowedInPyTyped?: boolean; + + // Is the declaration using a runtime-evaluated type expression + // rather than an annotation? This is used for TypedDicts, NamedTuples, + // and other complex (more dynamic) class definitions with typed variables. + isRuntimeTypeExpression?: boolean; + + // If the declaration is a type alias, points to the alias name. + typeAliasName?: NameNode | undefined; + + // Is the declaration a class or instance variable defined + // by a member access, or is it a direct variable declaration + // within the class? + isDefinedByMemberAccess?: boolean; + + // If an "attribute docstring" (as defined in PEP 258) is present... + docString?: string | undefined; + + // If set, indicates an alternative node to use to determine the type of the variable. + alternativeTypeNode?: ExpressionNode; + + // Is the declaration an assignment through an explicit nonlocal or global binding? + isExplicitBinding?: boolean; +} + +// Alias declarations are used for imports. They are resolved +// after the binding phase. +export interface AliasDeclaration extends DeclarationBase { + type: DeclarationType.Alias; + node: ImportAsNode | ImportFromAsNode | ImportFromNode; + + // Does this declaration use a local name or use the + // imported symbol directly? This is used to find and + // rename references. + usesLocalName: boolean; + + // Indicate whether symbols can be loaded from the path. + loadSymbolsFromPath: boolean; + + // The name of the symbol being imported (used for "from X import Y" + // statements, not applicable to "import X" statements). + symbolName?: string | undefined; + + // If there is a symbol name that can't be resolved within + // the target module (defined by "path"), the symbol might + // refer to a submodule with the same name. + submoduleFallback?: AliasDeclaration | undefined; + + // The first part of the multi-part name used in the import + // statement (e.g. for "import a.b.c", firstNamePart would + // be "a"). + firstNamePart?: string | undefined; + + // If the alias is targeting a module, multiple other modules + // may also need to be resolved and inserted implicitly into + // the module's namespace to emulate the behavior of the python + // module loader. This can be recursive (e.g. in the case of + // an "import a.b.c.d" statement). + implicitImports?: Map; + + // Is this a dummy entry for an unresolved import? + isUnresolved?: boolean; + + // Is this a dummy entry for an import that cannot be resolved + // directly because it targets a native library? + isNativeLib?: boolean; +} + +// This interface represents a set of actions that the python loader +// performs when a module import is encountered. +export interface ModuleLoaderActions { + // The resolved uri of the implicit import. This can be empty + // if the resolved uri doesn't reference a module (e.g. it's + // a directory). + uri: Uri; + + // Is this a dummy entry for an unresolved import? + isUnresolved?: boolean; + + // Indicate whether symbols can be loaded from the path. + loadSymbolsFromPath: boolean; + + // See comment for "implicitImports" field in AliasDeclaration. + implicitImports?: Map; +} + +export type Declaration = + | IntrinsicDeclaration + | ClassDeclaration + | SpecialBuiltInClassDeclaration + | FunctionDeclaration + | ParamDeclaration + | TypeParamDeclaration + | TypeAliasDeclaration + | VariableDeclaration + | AliasDeclaration; + +export function isFunctionDeclaration(decl: Declaration): decl is FunctionDeclaration { + return decl.type === DeclarationType.Function; +} + +export function isClassDeclaration(decl: Declaration): decl is ClassDeclaration { + return decl.type === DeclarationType.Class; +} + +export function isParamDeclaration(decl: Declaration): decl is ParamDeclaration { + return decl.type === DeclarationType.Param; +} + +export function isTypeParamDeclaration(decl: Declaration): decl is TypeParamDeclaration { + return decl.type === DeclarationType.TypeParam; +} + +export function isTypeAliasDeclaration(decl: Declaration): decl is TypeAliasDeclaration { + return decl.type === DeclarationType.TypeAlias; +} + +export function isVariableDeclaration(decl: Declaration): decl is VariableDeclaration { + return decl.type === DeclarationType.Variable; +} + +export function isAliasDeclaration(decl: Declaration): decl is AliasDeclaration { + return decl.type === DeclarationType.Alias; +} + +export function isSpecialBuiltInClassDeclaration(decl: Declaration): decl is SpecialBuiltInClassDeclaration { + return decl.type === DeclarationType.SpecialBuiltInClass; +} + +export function isIntrinsicDeclaration(decl: Declaration): decl is IntrinsicDeclaration { + return decl.type === DeclarationType.Intrinsic; +} + +export function isUnresolvedAliasDeclaration(decl: Declaration): boolean { + return isAliasDeclaration(decl) && decl.uri.equals(UnresolvedModuleMarker); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/declarationUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/declarationUtils.ts new file mode 100644 index 00000000..3b762385 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/declarationUtils.ts @@ -0,0 +1,420 @@ +/* + * declarationUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Collection of static methods that operate on declarations. + */ + +import { assertNever } from '../common/debug'; +import { getEmptyRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { NameNode, ParseNodeType } from '../parser/parseNodes'; +import { ImportLookup, ImportLookupResult } from './analyzerFileInfo'; +import { AliasDeclaration, Declaration, DeclarationType, ModuleLoaderActions, isAliasDeclaration } from './declaration'; +import { getFileInfoFromNode } from './parseTreeUtils'; +import { Symbol } from './symbol'; + +export interface ResolvedAliasInfo { + declaration: Declaration | undefined; + isPrivate: boolean; + privatePyTypedImported?: string; + privatePyTypedImporter?: string; +} + +export function hasTypeForDeclaration(declaration: Declaration): boolean { + switch (declaration.type) { + case DeclarationType.Intrinsic: + case DeclarationType.Class: + case DeclarationType.SpecialBuiltInClass: + case DeclarationType.Function: + case DeclarationType.TypeParam: + case DeclarationType.TypeAlias: + return true; + + case DeclarationType.Param: { + if (declaration.node.d.annotation || declaration.node.d.annotationComment) { + return true; + } + + // Handle function type comments. + const parameterParent = declaration.node.parent; + if (parameterParent?.nodeType === ParseNodeType.Function) { + if (parameterParent.d.funcAnnotationComment && !parameterParent.d.funcAnnotationComment.d.isEllipsis) { + const paramAnnotations = parameterParent.d.funcAnnotationComment.d.paramAnnotations; + + // Handle the case where the annotation comment is missing an + // annotation for the first parameter (self or cls). + if ( + parameterParent.d.params.length > paramAnnotations.length && + declaration.node === parameterParent.d.params[0] + ) { + return false; + } + + return true; + } + } + return false; + } + + case DeclarationType.Variable: + return !!declaration.typeAnnotationNode; + + case DeclarationType.Alias: + return false; + } +} + +export function areDeclarationsSame( + decl1: Declaration, + decl2: Declaration, + treatModuleInImportAndFromImportSame = false, + skipRangeForAliases = false +): boolean { + if (decl1.type !== decl2.type) { + return false; + } + + if (!decl1.uri.equals(decl2.uri)) { + return false; + } + + if (!skipRangeForAliases || decl1.type !== DeclarationType.Alias) { + if ( + decl1.range.start.line !== decl2.range.start.line || + decl1.range.start.character !== decl2.range.start.character + ) { + return false; + } + } + + // Alias declarations refer to the entire import statement. + // We need to further differentiate. + if (decl1.type === DeclarationType.Alias && decl2.type === DeclarationType.Alias) { + if (decl1.symbolName !== decl2.symbolName || decl1.usesLocalName !== decl2.usesLocalName) { + return false; + } + + if (treatModuleInImportAndFromImportSame) { + // Treat "module" in "import [|module|]", "from [|module|] import ..." + // or "from ... import [|module|]" same in IDE services. + // + // Some case such as "from [|module|] import ...", symbol for [|module|] doesn't even + // exist and it can't be referenced inside of a module, but nonetheless, IDE still + // needs these sometimes for things like hover tooltip, highlight references, + // find all references and etc. + return true; + } + + if (decl1.node !== decl2.node) { + return false; + } + } + + return true; +} + +export function getNameFromDeclaration(declaration: Declaration) { + switch (declaration.type) { + case DeclarationType.Alias: + return declaration.symbolName; + + case DeclarationType.Class: + case DeclarationType.Function: + case DeclarationType.TypeParam: + case DeclarationType.TypeAlias: + return declaration.node.d.name.d.value; + + case DeclarationType.Param: + return declaration.node.d.name?.d.value; + + case DeclarationType.Variable: + return declaration.node.nodeType === ParseNodeType.Name ? declaration.node.d.value : undefined; + + case DeclarationType.Intrinsic: + case DeclarationType.SpecialBuiltInClass: + return declaration.node.nodeType === ParseNodeType.TypeAnnotation && + declaration.node.d.valueExpr.nodeType === ParseNodeType.Name + ? declaration.node.d.valueExpr.d.value + : undefined; + + default: { + assertNever(declaration); + } + } + + throw new Error(`Shouldn't reach here`); +} + +export function getNameNodeForDeclaration(declaration: Declaration): NameNode | undefined { + if (declaration.node === undefined) { + return undefined; + } + + switch (declaration.type) { + case DeclarationType.Alias: + if (declaration.node.nodeType === ParseNodeType.ImportAs) { + return declaration.node.d.alias ?? declaration.node.d.module.d.nameParts[0]; + } else if (declaration.node.nodeType === ParseNodeType.ImportFromAs) { + return declaration.node.d.alias ?? declaration.node.d.name; + } else { + return declaration.node.d.module.d.nameParts[0]; + } + + case DeclarationType.Class: + case DeclarationType.Function: + case DeclarationType.TypeParam: + case DeclarationType.Param: + case DeclarationType.TypeAlias: + return declaration.node.d.name; + + case DeclarationType.Variable: + return declaration.node.nodeType === ParseNodeType.Name ? declaration.node : undefined; + + case DeclarationType.Intrinsic: + case DeclarationType.SpecialBuiltInClass: + return undefined; + + default: { + assertNever(declaration); + } + } + + throw new Error(`Shouldn't reach here`); +} + +export function isDefinedInFile(decl: Declaration, fileUri: Uri) { + if (isAliasDeclaration(decl)) { + // Alias decl's path points to the original symbol + // the alias is pointing to. So, we need to get the + // filepath in that the alias is defined from the node. + return getFileInfoFromNode(decl.node)?.fileUri.equals(fileUri); + } + + // Other decls, the path points to the file the symbol is defined in. + return decl.uri.equals(fileUri); +} + +export function getDeclarationsWithUsesLocalNameRemoved(decls: Declaration[]) { + // Make a shallow copy and clear the "usesLocalName" field. + return decls.map((localDecl) => { + if (localDecl.type !== DeclarationType.Alias) { + return localDecl; + } + + const nonLocalDecl: AliasDeclaration = { ...localDecl }; + nonLocalDecl.usesLocalName = false; + return nonLocalDecl; + }); +} + +export function synthesizeAliasDeclaration(uri: Uri): AliasDeclaration { + // The only time this decl is used is for IDE services such as + // the find all references, hover provider and etc. + return { + type: DeclarationType.Alias, + node: undefined!, + uri, + loadSymbolsFromPath: false, + range: getEmptyRange(), + implicitImports: new Map(), + usesLocalName: false, + moduleName: '', + isInExceptSuite: false, + }; +} + +export interface ResolveAliasOptions { + resolveLocalNames: boolean; + allowExternallyHiddenAccess: boolean; + skipFileNeededCheck: boolean; +} + +// If the specified declaration is an alias declaration that points to a symbol, +// it resolves the alias and looks up the symbol, then returns a declaration +// (typically the last) associated with that symbol. It does this recursively if +// necessary. If a symbol lookup fails, undefined is returned. If resolveLocalNames +// is true, the method resolves aliases through local renames ("as" clauses found +// in import statements). +export function resolveAliasDeclaration( + importLookup: ImportLookup, + declaration: Declaration, + options: ResolveAliasOptions +): ResolvedAliasInfo | undefined { + let curDeclaration: Declaration | undefined = declaration; + const alreadyVisited: Declaration[] = []; + let isPrivate = false; + + // These variables are used to find a transition from a non-py.typed to + // a py.typed resolution chain. In this case, if the imported symbol + // is a private symbol (i.e. not intended to be re-exported), we store + // the name of the importer and imported modules so the caller can + // report an error. + let sawPyTypedTransition = false; + let privatePyTypedImported: string | undefined; + let privatePyTypedImporter: string | undefined; + + while (true) { + if (curDeclaration.type !== DeclarationType.Alias || !curDeclaration.symbolName) { + return { + declaration: curDeclaration, + isPrivate, + privatePyTypedImported, + privatePyTypedImporter, + }; + } + + // If we are not supposed to follow local alias names and this + // is a local name, don't continue to follow the alias. + if (!options.resolveLocalNames && curDeclaration.usesLocalName) { + return { + declaration: curDeclaration, + isPrivate, + privatePyTypedImported, + privatePyTypedImporter, + }; + } + + let lookupResult: ImportLookupResult | undefined; + if (!curDeclaration.uri.isEmpty() && curDeclaration.loadSymbolsFromPath) { + lookupResult = importLookup(curDeclaration.uri, { + skipFileNeededCheck: options.skipFileNeededCheck, + }); + } + + const symbol: Symbol | undefined = lookupResult + ? lookupResult.symbolTable.get(curDeclaration.symbolName) + : undefined; + if (!symbol) { + if (curDeclaration.submoduleFallback) { + if (curDeclaration.symbolName) { + // See if we are resolving a specific imported symbol name and the submodule + // fallback cannot be resolved. For example, `from a import b`. If b is both + // a symbol in `a/__init__.py` and a submodule `a/b.py` and we are not using + // type information from this library (e.g. a non-py.typed library source file + // when useLibraryCodeForTypes is disabled), b should be evaluated as Unknown, + // not as a module. + if ( + !curDeclaration.uri.isEmpty() && + curDeclaration.submoduleFallback.type === DeclarationType.Alias && + !curDeclaration.submoduleFallback.uri.isEmpty() + ) { + const lookupResult = importLookup(curDeclaration.submoduleFallback.uri, { + skipFileNeededCheck: options.skipFileNeededCheck, + skipParsing: true, + }); + if (!lookupResult) { + return undefined; + } + } + } + + let submoduleFallback = curDeclaration.submoduleFallback; + if (curDeclaration.symbolName) { + submoduleFallback = { ...curDeclaration.submoduleFallback }; + let baseModuleName = submoduleFallback.moduleName; + + if (baseModuleName) { + baseModuleName = `${baseModuleName}.`; + } + + submoduleFallback.moduleName = `${baseModuleName}${curDeclaration.symbolName}`; + } + + return resolveAliasDeclaration(importLookup, submoduleFallback, options); + } + + // If the symbol comes from a native library, we won't + // be able to resolve its type directly. + if (curDeclaration.isNativeLib) { + return { + declaration: undefined, + isPrivate, + }; + } + + return undefined; + } + + if (symbol.isPrivateMember() && !sawPyTypedTransition) { + isPrivate = true; + } + + if (symbol.isExternallyHidden() && !options.allowExternallyHiddenAccess) { + return undefined; + } + + // Prefer declarations with specified types. If we don't have any of those, + // fall back on declarations with inferred types. + let declarations: Declaration[] = symbol.getTypedDeclarations(); + + // Try not to use declarations within an except suite even if it's a typed + // declaration. These are typically used for fallback exception handling. + declarations = declarations.filter((decl) => !decl.isInExceptSuite); + + if (declarations.length === 0) { + declarations = symbol.getDeclarations(); + declarations = declarations.filter((decl) => !decl.isInExceptSuite); + } + + if (declarations.length === 0) { + // Use declarations within except clauses if there are no alternatives. + declarations = symbol.getDeclarations(); + } + + if (declarations.length === 0) { + return undefined; + } + + const prevDeclaration = curDeclaration; + + // Prefer the last unvisited declaration in the list. This ensures that + // we use all of the overloads if it's an overloaded function. + const unvisitedDecls = declarations.filter((decl) => !alreadyVisited.includes(decl)); + if (unvisitedDecls.length > 0) { + curDeclaration = unvisitedDecls[unvisitedDecls.length - 1]; + } else { + curDeclaration = declarations[declarations.length - 1]; + } + + if (lookupResult?.isInPyTypedPackage) { + if (!sawPyTypedTransition) { + if (symbol.isPrivatePyTypedImport()) { + privatePyTypedImporter = prevDeclaration?.moduleName; + } + + // Note that we've seen a transition from a non-py.typed to a py.typed + // import. No further check is needed. + sawPyTypedTransition = true; + } else { + // If we've already seen a transition, look for the first non-private + // symbol that is resolved so we can tell the user to import from this + // location instead. + if (!symbol.isPrivatePyTypedImport()) { + privatePyTypedImported = privatePyTypedImported ?? curDeclaration?.moduleName; + } + } + } + + // Make sure we don't follow a circular list indefinitely. + if (alreadyVisited.find((decl) => decl === curDeclaration)) { + // If the path path of the alias points back to the original path, use the submodule + // fallback instead. This happens in the case where a module's __init__.py file + // imports a submodule using itself as the import target. For example, if + // the module is foo, and the foo.__init__.py file contains the statement + // "from foo import bar", we want to import the foo/bar.py submodule. + if (curDeclaration.type === DeclarationType.Alias && curDeclaration.submoduleFallback) { + return resolveAliasDeclaration(importLookup, curDeclaration.submoduleFallback, options); + } + return { + declaration, + isPrivate, + privatePyTypedImported, + privatePyTypedImporter, + }; + } + alreadyVisited.push(curDeclaration); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/decorators.ts b/python-parser/packages/pyright-internal/src/analyzer/decorators.ts new file mode 100644 index 00000000..43b2814b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/decorators.ts @@ -0,0 +1,606 @@ +/* + * decorators.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides type evaluation logic that is specific to the application of + * function or class decorators. + */ + +import { appendArray } from '../common/collectionUtils'; +import { ArgCategory, CallNode, DecoratorNode, FunctionNode, ParamCategory, ParseNodeType } from '../parser/parseNodes'; +import { getDeclaration, getFileInfo } from './analyzerNodeInfo'; +import { + applyDataClassDecorator, + getDataclassDecoratorBehaviors, + validateDataClassTransformDecorator, +} from './dataClasses'; +import { DeclarationType, FunctionDeclaration } from './declaration'; +import { convertDocStringToPlainText } from './docStringConversion'; +import { + clonePropertyWithDeleter, + clonePropertyWithSetter, + createProperty, + validatePropertyMethod, +} from './properties'; +import { Arg, EvalFlags, TypeEvaluator } from './typeEvaluatorTypes'; +import { isPartlyUnknown, isProperty } from './typeUtils'; +import { + ClassType, + ClassTypeFlags, + DataClassBehaviors, + FunctionParam, + FunctionType, + FunctionTypeFlags, + OverloadedType, + Type, + TypeBase, + UnknownType, + isClass, + isClassInstance, + isFunction, + isInstantiableClass, + isOverloaded, +} from './types'; + +export interface FunctionDecoratorInfo { + flags: FunctionTypeFlags; + deprecationMessage: string | undefined; +} + +// Scans through the decorators to find a few built-in decorators +// that affect the function flags. +export function getFunctionInfoFromDecorators( + evaluator: TypeEvaluator, + node: FunctionNode, + isInClass: boolean +): FunctionDecoratorInfo { + const fileInfo = getFileInfo(node); + let flags = FunctionTypeFlags.None; + let deprecationMessage: string | undefined; + + if (isInClass) { + // The "__new__" magic method is not an instance method. + // It acts as a static method instead. + if (node.d.name.d.value === '__new__') { + flags |= FunctionTypeFlags.ConstructorMethod; + } + + // Several magic methods are treated as class methods implicitly + // by the runtime. Check for these here. + const implicitClassMethods = ['__init_subclass__', '__class_getitem__']; + if (implicitClassMethods.some((name) => node.d.name.d.value === name)) { + flags |= FunctionTypeFlags.ClassMethod; + } + } + + for (const decoratorNode of node.d.decorators) { + // Some stub files (e.g. builtins.pyi) rely on forward declarations of decorators. + let evaluatorFlags = fileInfo.isStubFile ? EvalFlags.ForwardRefs : EvalFlags.None; + if (decoratorNode.d.expr.nodeType !== ParseNodeType.Call) { + evaluatorFlags |= EvalFlags.CallBaseDefaults; + } + + const decoratorTypeResult = evaluator.getTypeOfExpression(decoratorNode.d.expr, evaluatorFlags); + const decoratorType = decoratorTypeResult.type; + + if (isFunction(decoratorType)) { + if (FunctionType.isBuiltIn(decoratorType, 'abstractmethod')) { + if (isInClass) { + flags |= FunctionTypeFlags.AbstractMethod; + } + } else if (FunctionType.isBuiltIn(decoratorType, 'final')) { + flags |= FunctionTypeFlags.Final; + } else if (FunctionType.isBuiltIn(decoratorType, 'override')) { + flags |= FunctionTypeFlags.Overridden; + } else if (FunctionType.isBuiltIn(decoratorType, 'type_check_only')) { + flags |= FunctionTypeFlags.TypeCheckOnly; + } else if (FunctionType.isBuiltIn(decoratorType, 'no_type_check')) { + flags |= FunctionTypeFlags.NoTypeCheck; + } else if (FunctionType.isBuiltIn(decoratorType, 'overload')) { + flags |= FunctionTypeFlags.Overloaded; + } + } else if (isClass(decoratorType)) { + if (TypeBase.isInstantiable(decoratorType)) { + if (ClassType.isBuiltIn(decoratorType, 'staticmethod')) { + if (isInClass) { + flags |= FunctionTypeFlags.StaticMethod; + } + } else if (ClassType.isBuiltIn(decoratorType, 'classmethod')) { + if (isInClass) { + flags |= FunctionTypeFlags.ClassMethod; + } + } + } else { + if (ClassType.isBuiltIn(decoratorType, 'deprecated')) { + deprecationMessage = decoratorType.priv.deprecatedInstanceMessage; + } + } + } + } + + return { flags, deprecationMessage }; +} + +// Transforms the input function type into an output type based on the +// decorator function described by the decoratorNode. +export function applyFunctionDecorator( + evaluator: TypeEvaluator, + inputFunctionType: Type, + undecoratedType: FunctionType, + decoratorNode: DecoratorNode, + functionNode: FunctionNode +): Type { + const fileInfo = getFileInfo(decoratorNode); + + // Some stub files (e.g. builtins.pyi) rely on forward declarations of decorators. + let evaluatorFlags = fileInfo.isStubFile ? EvalFlags.ForwardRefs : EvalFlags.None; + if (decoratorNode.d.expr.nodeType !== ParseNodeType.Call) { + evaluatorFlags |= EvalFlags.CallBaseDefaults; + } + + const decoratorTypeResult = evaluator.getTypeOfExpression(decoratorNode.d.expr, evaluatorFlags); + const decoratorType = decoratorTypeResult.type; + + // Special-case the "overload" because it has no definition. Older versions of typeshed + // defined "overload" as an object, but newer versions define it as a function. + if ( + (isInstantiableClass(decoratorType) && ClassType.isSpecialBuiltIn(decoratorType, 'overload')) || + (isFunction(decoratorType) && FunctionType.isBuiltIn(decoratorType, 'overload')) + ) { + if (isFunction(inputFunctionType)) { + inputFunctionType.shared.flags |= FunctionTypeFlags.Overloaded; + undecoratedType.shared.flags |= FunctionTypeFlags.Overloaded; + return inputFunctionType; + } + } + + if (decoratorNode.d.expr.nodeType === ParseNodeType.Call) { + const decoratorCallType = evaluator.getTypeOfExpression( + decoratorNode.d.expr.d.leftExpr, + evaluatorFlags | EvalFlags.CallBaseDefaults + ).type; + + if (isFunction(decoratorCallType)) { + if ( + decoratorCallType.shared.name === '__dataclass_transform__' || + FunctionType.isBuiltIn(decoratorCallType, 'dataclass_transform') + ) { + undecoratedType.shared.decoratorDataClassBehaviors = validateDataClassTransformDecorator( + evaluator, + decoratorNode.d.expr + ); + return inputFunctionType; + } + } + } + + // Clear the PartiallyEvaluated flag in the input if it's set so + // it doesn't propagate to the decorated type. + const decoratorArg = + isFunction(inputFunctionType) && FunctionType.isPartiallyEvaluated(inputFunctionType) + ? FunctionType.cloneWithNewFlags( + inputFunctionType, + inputFunctionType.shared.flags & ~FunctionTypeFlags.PartiallyEvaluated + ) + : inputFunctionType; + + let returnType = getTypeOfDecorator(evaluator, decoratorNode, decoratorArg); + + // Check for some built-in decorator types with known semantics. + if (isFunction(decoratorType)) { + if (FunctionType.isBuiltIn(decoratorType, 'abstractmethod')) { + return inputFunctionType; + } + + if (FunctionType.isBuiltIn(decoratorType, 'type_check_only')) { + undecoratedType.shared.flags |= FunctionTypeFlags.TypeCheckOnly; + return inputFunctionType; + } + + // Handle property setters and deleters. + if (decoratorNode.d.expr.nodeType === ParseNodeType.MemberAccess) { + const baseType = evaluator.getTypeOfExpression( + decoratorNode.d.expr.d.leftExpr, + evaluatorFlags | EvalFlags.MemberAccessBaseDefaults + ).type; + + if (isProperty(baseType)) { + const memberName = decoratorNode.d.expr.d.member.d.value; + if (memberName === 'setter') { + if (isFunction(inputFunctionType)) { + validatePropertyMethod(evaluator, inputFunctionType, decoratorNode); + return clonePropertyWithSetter(evaluator, baseType, inputFunctionType, functionNode); + } else { + return inputFunctionType; + } + } else if (memberName === 'deleter') { + if (isFunction(inputFunctionType)) { + validatePropertyMethod(evaluator, inputFunctionType, decoratorNode); + return clonePropertyWithDeleter(evaluator, baseType, inputFunctionType, functionNode); + } else { + return inputFunctionType; + } + } + } + } + } else if (isInstantiableClass(decoratorType)) { + if (ClassType.isBuiltIn(decoratorType)) { + switch (decoratorType.shared.name) { + case 'classmethod': + case 'staticmethod': { + const requiredFlag = + decoratorType.shared.name === 'classmethod' + ? FunctionTypeFlags.ClassMethod + : FunctionTypeFlags.StaticMethod; + + // If the function isn't currently a class method or static method + // (which can happen if the function was wrapped in a decorator), + // add the appropriate flag. + if (isFunction(inputFunctionType) && (inputFunctionType.shared.flags & requiredFlag) === 0) { + const newFunction = FunctionType.clone(inputFunctionType); + newFunction.shared.flags &= ~( + FunctionTypeFlags.ConstructorMethod | + FunctionTypeFlags.StaticMethod | + FunctionTypeFlags.ClassMethod + ); + newFunction.shared.flags |= requiredFlag; + return newFunction; + } + + return inputFunctionType; + } + + case 'decorator': { + return inputFunctionType; + } + } + } + + // Handle properties and subclasses of properties specially. + if (ClassType.isPropertyClass(decoratorType)) { + if (isFunction(inputFunctionType)) { + validatePropertyMethod(evaluator, inputFunctionType, decoratorNode); + return createProperty(evaluator, decoratorNode, decoratorType, inputFunctionType); + } else if (isClassInstance(inputFunctionType)) { + const boundMethod = evaluator.getBoundMagicMethod(inputFunctionType, '__call__'); + + if (boundMethod && isFunction(boundMethod)) { + return createProperty(evaluator, decoratorNode, decoratorType, boundMethod); + } + + return UnknownType.create(); + } + } + } + + if (isFunction(inputFunctionType) && isFunction(returnType)) { + returnType = FunctionType.clone(returnType); + + // Copy the overload flag from the input function type. + if (FunctionType.isOverloaded(inputFunctionType)) { + returnType.shared.flags |= FunctionTypeFlags.Overloaded; + } + + // Copy the docstrings from the input function type if the + // decorator didn't have its own docstring. + if (!returnType.shared.docString) { + returnType.shared.docString = inputFunctionType.shared.docString; + } + } + + return returnType; +} + +export function applyClassDecorator( + evaluator: TypeEvaluator, + inputClassType: Type, + originalClassType: ClassType, + decoratorNode: DecoratorNode +): Type { + const fileInfo = getFileInfo(decoratorNode); + let flags = fileInfo.isStubFile ? EvalFlags.ForwardRefs : EvalFlags.None; + if (decoratorNode.d.expr.nodeType !== ParseNodeType.Call) { + flags |= EvalFlags.CallBaseDefaults; + } + const decoratorType = evaluator.getTypeOfExpression(decoratorNode.d.expr, flags).type; + + if (decoratorNode.d.expr.nodeType === ParseNodeType.Call) { + const decoratorCallType = evaluator.getTypeOfExpression( + decoratorNode.d.expr.d.leftExpr, + flags | EvalFlags.CallBaseDefaults + ).type; + + if (isFunction(decoratorCallType)) { + if ( + decoratorCallType.shared.name === '__dataclass_transform__' || + FunctionType.isBuiltIn(decoratorCallType, 'dataclass_transform') + ) { + originalClassType.shared.classDataClassTransform = validateDataClassTransformDecorator( + evaluator, + decoratorNode.d.expr + ); + } + } + } + + const applyDataclassTransform = (): boolean => { + // Is this a dataclass decorator? + let dataclassBehaviors: DataClassBehaviors | undefined; + let callNode: CallNode | undefined; + + if (decoratorNode.d.expr.nodeType === ParseNodeType.Call) { + callNode = decoratorNode.d.expr; + const decoratorCallType = evaluator.getTypeOfExpression( + callNode.d.leftExpr, + flags | EvalFlags.CallBaseDefaults + ).type; + dataclassBehaviors = getDataclassDecoratorBehaviors(decoratorCallType); + } else { + const decoratorType = evaluator.getTypeOfExpression(decoratorNode.d.expr, flags).type; + dataclassBehaviors = getDataclassDecoratorBehaviors(decoratorType); + } + + if (dataclassBehaviors) { + applyDataClassDecorator(evaluator, decoratorNode, originalClassType, dataclassBehaviors, callNode); + return true; + } + + return false; + }; + + if (isOverloaded(decoratorType)) { + const dataclassBehaviors = getDataclassDecoratorBehaviors(decoratorType); + if (dataclassBehaviors) { + applyDataClassDecorator( + evaluator, + decoratorNode, + originalClassType, + dataclassBehaviors, + /* callNode */ undefined + ); + return inputClassType; + } + } else if (isFunction(decoratorType)) { + if (FunctionType.isBuiltIn(decoratorType, 'final')) { + originalClassType.shared.flags |= ClassTypeFlags.Final; + + // Don't call getTypeOfDecorator for final. We'll hard-code its + // behavior because its function definition results in a cyclical + // dependency between builtins, typing and _typeshed stubs. + return inputClassType; + } + + if (FunctionType.isBuiltIn(decoratorType, 'type_check_only')) { + originalClassType.shared.flags |= ClassTypeFlags.TypeCheckOnly; + return inputClassType; + } + + if (FunctionType.isBuiltIn(decoratorType, 'runtime_checkable')) { + originalClassType.shared.flags |= ClassTypeFlags.RuntimeCheckable; + + // Don't call getTypeOfDecorator for runtime_checkable. It appears + // frequently in stubs, and it's a waste of time to validate its + // parameters. + return inputClassType; + } + + if (applyDataclassTransform()) { + return inputClassType; + } + } else if (isClassInstance(decoratorType)) { + if (ClassType.isBuiltIn(decoratorType, 'deprecated')) { + originalClassType.shared.deprecatedMessage = decoratorType.priv.deprecatedInstanceMessage; + return inputClassType; + } + + if (applyDataclassTransform()) { + return inputClassType; + } + } + + return getTypeOfDecorator(evaluator, decoratorNode, inputClassType); +} + +function getTypeOfDecorator(evaluator: TypeEvaluator, node: DecoratorNode, functionOrClassType: Type): Type { + // Evaluate the type of the decorator expression. + let flags = getFileInfo(node).isStubFile ? EvalFlags.ForwardRefs : EvalFlags.None; + if (node.d.expr.nodeType !== ParseNodeType.Call) { + flags |= EvalFlags.CallBaseDefaults; + } + + const decoratorTypeResult = evaluator.getTypeOfExpression(node.d.expr, flags); + + // Special-case the combination of a classmethod decorator applied + // to a property. This is allowed in Python 3.9, but it's not reflected + // in the builtins.pyi stub for classmethod. + if ( + isInstantiableClass(decoratorTypeResult.type) && + ClassType.isBuiltIn(decoratorTypeResult.type, 'classmethod') && + isProperty(functionOrClassType) + ) { + return functionOrClassType; + } + + const argList: Arg[] = [ + { + argCategory: ArgCategory.Simple, + typeResult: { type: functionOrClassType }, + }, + ]; + + const callTypeResult = evaluator.validateCallArgs( + node.d.expr, + argList, + decoratorTypeResult, + /* constraints */ undefined, + /* skipUnknownArgCheck */ true, + /* inferenceContext */ undefined + ); + + evaluator.setTypeResultForNode(node, { + type: callTypeResult.returnType ?? UnknownType.create(), + overloadsUsedForCall: callTypeResult.overloadsUsedForCall, + isIncomplete: callTypeResult.isTypeIncomplete, + }); + + const returnType = callTypeResult.returnType ?? UnknownType.create(); + + // If the return type is a function that has no annotations + // and just *args and **kwargs parameters, assume that it + // preserves the type of the input function. + if (isFunction(returnType) && !returnType.shared.declaredReturnType) { + if ( + !returnType.shared.parameters.some((param, index) => { + // Don't allow * or / separators or params with declared types. + if (!param.name || FunctionParam.isTypeDeclared(param)) { + return true; + } + + // Allow *args or **kwargs parameters. + if (param.category !== ParamCategory.Simple) { + return false; + } + + // Allow inferred "self" or "cls" parameters. + return index !== 0 || !FunctionParam.isTypeInferred(param); + }) + ) { + return functionOrClassType; + } + } + + // If the decorator is completely unannotated and the return type + // includes unknowns, assume that it preserves the type of the input + // function. + if (isPartlyUnknown(returnType)) { + if (isFunction(decoratorTypeResult.type)) { + if ( + !decoratorTypeResult.type.shared.parameters.find((param) => FunctionParam.isTypeDeclared(param)) && + decoratorTypeResult.type.shared.declaredReturnType === undefined + ) { + return functionOrClassType; + } + } + } + + return returnType; +} + +// Given a function node and the function type associated with it, this +// method searches for prior function nodes that are marked as @overload +// and creates an OverloadedType that includes this function and +// all previous ones. +export function addOverloadsToFunctionType(evaluator: TypeEvaluator, node: FunctionNode, type: Type): Type { + let functionDecl: FunctionDeclaration | undefined; + let implementation: Type | undefined; + + const decl = getDeclaration(node); + if (decl) { + functionDecl = decl as FunctionDeclaration; + } + const symbolWithScope = evaluator.lookUpSymbolRecursive(node, node.d.name.d.value, /* honorCodeFlow */ false); + if (symbolWithScope) { + const decls = symbolWithScope.symbol.getDeclarations(); + + // Find this function's declaration. + const declIndex = decls.findIndex((decl) => decl === functionDecl); + if (declIndex > 0) { + // Evaluate all of the previous function declarations. They will + // be cached. We do it in this order to avoid a stack overflow due + // to recursion if there is a large number (1000's) of overloads. + for (let i = 0; i < declIndex; i++) { + const decl = decls[i]; + if (decl.type === DeclarationType.Function) { + evaluator.getTypeOfFunction(decl.node); + } + } + + let overloadedTypes: FunctionType[] = []; + + // Look at the previous declaration's type. + const prevDecl = decls[declIndex - 1]; + if (prevDecl.type === DeclarationType.Function) { + const prevDeclDeclTypeInfo = evaluator.getTypeOfFunction(prevDecl.node); + if (prevDeclDeclTypeInfo) { + if (isFunction(prevDeclDeclTypeInfo.decoratedType)) { + if (FunctionType.isOverloaded(prevDeclDeclTypeInfo.decoratedType)) { + overloadedTypes.push(prevDeclDeclTypeInfo.decoratedType); + } + } else if (isOverloaded(prevDeclDeclTypeInfo.decoratedType)) { + implementation = OverloadedType.getImplementation(prevDeclDeclTypeInfo.decoratedType); + // If the previous overloaded function already had an implementation, + // this new function completely replaces the previous one. + if (implementation) { + return type; + } + + // If the previous declaration was itself an overloaded function, + // copy the entries from it. + appendArray(overloadedTypes, OverloadedType.getOverloads(prevDeclDeclTypeInfo.decoratedType)); + } + } + } + + if (isFunction(type) && FunctionType.isOverloaded(type)) { + overloadedTypes.push(type); + } else { + implementation = type; + } + + if (overloadedTypes.length === 1 && !implementation) { + return overloadedTypes[0]; + } + + if (overloadedTypes.length === 0 && implementation) { + return implementation; + } + + // Apply the implementation's docstring to any overloads that don't + // have their own docstrings. + if (implementation && isFunction(implementation) && implementation.shared.docString) { + const docString = implementation.shared.docString; + overloadedTypes = overloadedTypes.map((overload) => { + if (FunctionType.isOverloaded(overload) && !overload.shared.docString) { + return FunctionType.cloneWithDocString(overload, docString); + } + return overload; + }); + } + + // PEP 702 indicates that if the implementation of an overloaded + // function is marked deprecated, all of the overloads should be + // treated as deprecated as well. + if (implementation && isFunction(implementation) && implementation.shared.deprecatedMessage !== undefined) { + const deprecationMessage = implementation.shared.deprecatedMessage; + overloadedTypes = overloadedTypes.map((overload) => { + if (FunctionType.isOverloaded(overload) && overload.shared.deprecatedMessage === undefined) { + return FunctionType.cloneWithDeprecatedMessage(overload, deprecationMessage); + } + return overload; + }); + } + + return OverloadedType.create(overloadedTypes, implementation); + } + } + + return type; +} + +// Given a @typing.deprecated call node, returns either '' or a custom +// deprecation message if one is provided. +export function getDeprecatedMessageFromCall(node: CallNode): string { + if ( + node.d.args.length > 0 && + node.d.args[0].d.argCategory === ArgCategory.Simple && + node.d.args[0].d.valueExpr.nodeType === ParseNodeType.StringList + ) { + const stringListNode = node.d.args[0].d.valueExpr; + const message = stringListNode.d.strings.map((s) => s.d.value).join(''); + return convertDocStringToPlainText(message); + } + + return ''; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/deprecatedSymbols.ts b/python-parser/packages/pyright-internal/src/analyzer/deprecatedSymbols.ts new file mode 100644 index 00000000..b5c65c93 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/deprecatedSymbols.ts @@ -0,0 +1,315 @@ +/* + * deprecatedSymbols.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A list of implicitly-deprecated symbols as defined in PEP 585, etc. + */ + +import { PythonVersion, pythonVersion3_10, pythonVersion3_9 } from '../common/pythonVersion'; + +export interface DeprecatedForm { + // The version of Python where this symbol becomes deprecated + version: PythonVersion; + + // The full name of the deprecated type + fullName: string; + + // The replacement form + replacementText: string; + + // Indicates that the symbol is deprecated only if imported from `typing` + typingImportOnly?: boolean; +} + +export const deprecatedAliases = new Map([ + ['Tuple', { version: pythonVersion3_9, fullName: 'builtins.tuple', replacementText: 'tuple' }], + ['List', { version: pythonVersion3_9, fullName: 'builtins.list', replacementText: 'list' }], + ['Dict', { version: pythonVersion3_9, fullName: 'builtins.dict', replacementText: 'dict' }], + ['Set', { version: pythonVersion3_9, fullName: 'builtins.set', replacementText: 'set' }], + ['FrozenSet', { version: pythonVersion3_9, fullName: 'builtins.frozenset', replacementText: 'frozenset' }], + ['Type', { version: pythonVersion3_9, fullName: 'builtins.type', replacementText: 'type' }], + ['Deque', { version: pythonVersion3_9, fullName: 'collections.deque', replacementText: 'collections.deque' }], + [ + 'DefaultDict', + { + version: pythonVersion3_9, + fullName: 'collections.defaultdict', + replacementText: 'collections.defaultdict', + }, + ], + [ + 'OrderedDict', + { + version: pythonVersion3_9, + fullName: 'collections.OrderedDict', + replacementText: 'collections.OrderedDict', + typingImportOnly: true, + }, + ], + [ + 'Counter', + { + version: pythonVersion3_9, + fullName: 'collections.Counter', + replacementText: 'collections.Counter', + typingImportOnly: true, + }, + ], + [ + 'ChainMap', + { + version: pythonVersion3_9, + fullName: 'collections.ChainMap', + replacementText: 'collections.ChainMap', + typingImportOnly: true, + }, + ], + [ + 'Awaitable', + { + version: pythonVersion3_9, + fullName: 'typing.Awaitable', + replacementText: 'collections.abc.Awaitable', + typingImportOnly: true, + }, + ], + [ + 'Coroutine', + { + version: pythonVersion3_9, + fullName: 'typing.Coroutine', + replacementText: 'collections.abc.Coroutine', + typingImportOnly: true, + }, + ], + [ + 'AsyncIterable', + { + version: pythonVersion3_9, + fullName: 'typing.AsyncIterable', + replacementText: 'collections.abc.AsyncIterable', + typingImportOnly: true, + }, + ], + [ + 'AsyncIterator', + { + version: pythonVersion3_9, + fullName: 'typing.AsyncIterator', + replacementText: 'collections.abc.AsyncIterator', + typingImportOnly: true, + }, + ], + [ + 'AsyncGenerator', + { + version: pythonVersion3_9, + fullName: 'typing.AsyncGenerator', + replacementText: 'collections.abc.AsyncGenerator', + typingImportOnly: true, + }, + ], + [ + 'Iterable', + { + version: pythonVersion3_9, + fullName: 'typing.Iterable', + replacementText: 'collections.abc.Iterable', + typingImportOnly: true, + }, + ], + [ + 'Iterator', + { + version: pythonVersion3_9, + fullName: 'typing.Iterator', + replacementText: 'collections.abc.Iterator', + typingImportOnly: true, + }, + ], + [ + 'Generator', + { + version: pythonVersion3_9, + fullName: 'typing.Generator', + replacementText: 'collections.abc.Generator', + typingImportOnly: true, + }, + ], + [ + 'Reversible', + { + version: pythonVersion3_9, + fullName: 'typing.Reversible', + replacementText: 'collections.abc.Reversible', + typingImportOnly: true, + }, + ], + [ + 'Container', + { + version: pythonVersion3_9, + fullName: 'typing.Container', + replacementText: 'collections.abc.Container', + typingImportOnly: true, + }, + ], + [ + 'Collection', + { + version: pythonVersion3_9, + fullName: 'typing.Collection', + replacementText: 'collections.abc.Collection', + typingImportOnly: true, + }, + ], + [ + 'AbstractSet', + { + version: pythonVersion3_9, + fullName: 'typing.AbstractSet', + replacementText: 'collections.abc.Set', + typingImportOnly: true, + }, + ], + [ + 'MutableSet', + { + version: pythonVersion3_9, + fullName: 'typing.MutableSet', + replacementText: 'collections.abc.MutableSet', + typingImportOnly: true, + }, + ], + [ + 'Mapping', + { + version: pythonVersion3_9, + fullName: 'typing.Mapping', + replacementText: 'collections.abc.Mapping', + typingImportOnly: true, + }, + ], + [ + 'MutableMapping', + { + version: pythonVersion3_9, + fullName: 'typing.MutableMapping', + replacementText: 'collections.abc.MutableMapping', + typingImportOnly: true, + }, + ], + [ + 'Sequence', + { + version: pythonVersion3_9, + fullName: 'typing.Sequence', + replacementText: 'collections.abc.Sequence', + typingImportOnly: true, + }, + ], + [ + 'MutableSequence', + { + version: pythonVersion3_9, + fullName: 'typing.MutableSequence', + replacementText: 'collections.abc.MutableSequence', + typingImportOnly: true, + }, + ], + [ + 'ByteString', + { + version: pythonVersion3_9, + fullName: 'typing.ByteString', + replacementText: 'collections.abc.ByteString', + typingImportOnly: true, + }, + ], + [ + 'MappingView', + { + version: pythonVersion3_9, + fullName: 'typing.MappingView', + replacementText: 'collections.abc.MappingView', + typingImportOnly: true, + }, + ], + [ + 'KeysView', + { + version: pythonVersion3_9, + fullName: 'typing.KeysView', + replacementText: 'collections.abc.KeysView', + typingImportOnly: true, + }, + ], + [ + 'ItemsView', + { + version: pythonVersion3_9, + fullName: 'typing.ItemsView', + replacementText: 'collections.abc.ItemsView', + typingImportOnly: true, + }, + ], + [ + 'ValuesView', + { + version: pythonVersion3_9, + fullName: 'typing.ValuesView', + replacementText: 'collections.abc.ValuesView', + typingImportOnly: true, + }, + ], + [ + 'ContextManager', + { + version: pythonVersion3_9, + fullName: 'typing.ContextManager', + replacementText: 'contextlib.AbstractContextManager', + }, + ], + [ + 'AsyncContextManager', + { + version: pythonVersion3_9, + fullName: 'typing.AsyncContextManager', + replacementText: 'contextlib.AbstractAsyncContextManager', + }, + ], + [ + 'Pattern', + { + version: pythonVersion3_9, + fullName: 're.Pattern', + replacementText: 're.Pattern', + typingImportOnly: true, + }, + ], + [ + 'Match', + { + version: pythonVersion3_9, + fullName: 're.Match', + replacementText: 're.Match', + typingImportOnly: true, + }, + ], +]); + +export const deprecatedSpecialForms = new Map([ + ['Optional', { version: pythonVersion3_10, fullName: 'typing.Optional', replacementText: '| None' }], + ['Union', { version: pythonVersion3_10, fullName: 'typing.Union', replacementText: '|' }], + [ + 'Callable', + { + version: pythonVersion3_9, + fullName: 'typing.Callable', + replacementText: 'collections.abc.Callable', + typingImportOnly: true, + }, + ], +]); diff --git a/python-parser/packages/pyright-internal/src/analyzer/docStringConversion.ts b/python-parser/packages/pyright-internal/src/analyzer/docStringConversion.ts new file mode 100644 index 00000000..dbd2203d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/docStringConversion.ts @@ -0,0 +1,872 @@ +/* + * docStringConversion.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Python doc string to markdown/plain text format conversion. + */ + +import { cleanAndSplitDocString } from './docStringUtils'; + +// Converts a docstring to markdown format. +// +// This does various things, including removing common indention, escaping +// characters, handling code blocks, and more. +// +// This is a straight port of +// https://github.com/microsoft/python-language-server/blob/master/src/LanguageServer/Impl/Documentation/DocstringConverter.cs +// +// The restructured npm library was evaluated, and while it worked well for +// parsing valid input, it was going to be more difficult to handle invalid +// RST input. +export function convertDocStringToMarkdown(docString: string): string { + return new DocStringConverter(docString).convert(); +} + +// Converts a docstring to a plaintext, human readable form. This will +// first strip any common leading indention (like inspect.cleandoc), +// then remove duplicate empty/whitespace lines. +export function convertDocStringToPlainText(docString: string): string { + const lines = cleanAndSplitDocString(docString); + const output: string[] = []; + + for (const line of lines) { + const last = output.length > 0 ? output[output.length - 1] : undefined; + if (_isUndefinedOrWhitespace(line) && _isUndefinedOrWhitespace(last)) { + continue; + } + + output.push(line); + } + + return output.join('\n').trimEnd(); +} + +interface RegExpReplacement { + exp: RegExp; + replacement: string; +} + +// Regular expressions for one match +const LeadingSpaceCountRegExp = /\S|$/; +const NonWhitespaceRegExp = /\S/; +const TildaHeaderRegExp = /^\s*~~~+$/; +const PlusHeaderRegExp = /^\s*\+\+\++$/; +const EqualHeaderRegExp = /^\s*===+\s+===+$/; +const DashHeaderRegExp = /^\s*---+\s+---+$/; +const LeadingDashListRegExp = /^(\s*)-\s/; +const LeadingAsteriskListRegExp = /^(\s*)\*\s/; +const LeadingNumberListRegExp = /^(\s*)\d+\.\s/; +const LeadingAsteriskRegExp = /^(\s+\* )(.*)$/; +const SpaceDotDotRegExp = /^\s*\.\. /; +const DirectiveLikeRegExp = /^\s*\.\.\s+(.*)::\s*(.*)$/; +const DoctestRegExp = / *>>> /; +const DirectivesExtraNewlineRegExp = /^\s*:(param|arg|type|return|rtype|raise|except|var|ivar|cvar|copyright|license)/; +const epyDocFieldTokensRegExp = /^\.[\s\t]+(@\w)/gm; // cv2 has leading '.' http://epydoc.sourceforge.net/manual-epytext.html +const epyDocCv2FixRegExp = /^(\.\s{3})|^(\.)/; + +const PotentialHeaders: RegExpReplacement[] = [ + { exp: /^\s*=+(\s+=+)+$/, replacement: '=' }, + { exp: /^\s*-+(\s+-+)+$/, replacement: '-' }, + { exp: /^\s*~+(\s+-+)+$/, replacement: '~' }, + { exp: /^\s*\++(\s+\++)+$/, replacement: '+' }, +]; + +// Regular expressions for replace all +const WhitespaceRegExp = /\s/g; +const DoubleTickRegExp = /``/g; +const TildeRegExp = /~/g; +const PlusRegExp = /\+/g; +const UnescapedMarkdownCharsRegExp = /(?`{3}(?!`)|~{3}(?!~))(\w*)/; +const CodeBlockEndRegExp = /^\s*(?`{3}(?!`)|~{3}(?!~))/; + +const HtmlEscapes: RegExpReplacement[] = [ + { exp: //g, replacement: '>' }, +]; + +const MarkdownLineBreak = ' \n'; +// http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#literal-blocks +const LiteralBlockEmptyRegExp = /^\s*::$/; +const LiteralBlockReplacements: RegExpReplacement[] = [ + { exp: /\s+::$/g, replacement: '' }, + { exp: /(\S)\s*::$/g, replacement: '$1:' }, + // http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#interpreted-text + { exp: /:[\w_\-+:.]+:`/g, replacement: '`' }, + { exp: /`:[\w_\-+:.]+:/g, replacement: '`' }, +]; + +// Converter is a state machine, where the current state is a function which +// will be run (and change the state again) until completion. +type State = () => void; + +interface RestTableState { + header: string; + inHeader: boolean; +} + +class DocStringConverter { + private _builder = ''; + private _skipAppendEmptyLine = true; + private _insideInlineCode = false; + private _appendDirectiveBlock = false; + + private _state: State; + private _stateStack: State[] = []; + private _input: string; + + private _lines: string[]; + private _lineNum = 0; + + private _blockIndent = 0; + + private _tableState: RestTableState | undefined; + private _lastBacktickString: string | undefined; + + constructor(input: string) { + this._state = this._parseText; + this._input = input; + this._lines = cleanAndSplitDocString(input); + } + + convert(): string { + const isEpyDoc = epyDocFieldTokensRegExp.test(this._input); + if (isEpyDoc) { + // fixup cv2 leading '.' + this._lines = this._lines.map((v) => v.replace(epyDocCv2FixRegExp, '')); + } + + while (this._currentLineOrUndefined() !== undefined) { + const before = this._state; + const beforeLine = this._lineNum; + + this._state(); + + // Parser must make progress; either the state or line number must change. + if (this._state === before && this._lineNum === beforeLine) { + break; + } + } + + // Close out any outstanding code blocks. + if ( + this._state === this._parseBacktickBlock || + this._state === this._parseDocTest || + this._state === this._parseLiteralBlock + ) { + // See what the current backtick block is. We want to match it. + this._trimOutputAndAppendLine(this._lastBacktickString || '```'); + } else if (this._insideInlineCode) { + this._trimOutputAndAppendLine('`', /* noNewLine */ true); + } + + return this._builder.trim(); + } + + private _eatLine() { + this._lineNum++; + } + + private _currentLineOrUndefined(): string | undefined { + return this._lineNum < this._lines.length ? this._lines[this._lineNum] : undefined; + } + + private _currentLine(): string { + return this._currentLineOrUndefined() || ''; + } + + private _currentIndent(): number { + return _countLeadingSpaces(this._currentLine()); + } + + private _prevIndent(): number { + return _countLeadingSpaces(this._lineAt(this._lineNum - 1) ?? ''); + } + + private _lineAt(i: number): string | undefined { + return i < this._lines.length ? this._lines[i] : undefined; + } + + private _nextBlockIndent(): number { + return _countLeadingSpaces( + this._lines.slice(this._lineNum + 1).find((v) => !_isUndefinedOrWhitespace(v)) || '' + ); + } + + private _currentLineIsOutsideBlock(): boolean { + return this._currentIndent() < this._blockIndent; + } + + private _currentLineWithinBlock(): string { + return this._currentLine().substr(this._blockIndent); + } + + private _pushAndSetState(next: State): void { + if (this._state === this._parseText) { + this._insideInlineCode = false; + } + + this._stateStack.push(this._state); + this._state = next; + } + + private _popState(): void { + this._state = this._stateStack.splice(0, 1)[0]; + + if (this._state === this._parseText) { + // Terminate inline code when leaving a block. + this._insideInlineCode = false; + } + } + + private _parseText(): void { + if (_isUndefinedOrWhitespace(this._currentLineOrUndefined())) { + this._state = this._parseEmpty; + return; + } + + if (this._beginBacktickBlock()) { + return; + } + + if (this._beginLiteralBlock()) { + return; + } + + if (this._beginDocTest()) { + return; + } + + if (this._beginDirective()) { + return; + } + + if (this._beginList()) { + return; + } + + if (this._beginFieldList()) { + return; + } + + if (this._beginTableBlock()) { + return; + } + + const line = this._formatPlainTextIndent(this._currentLine()); + + this._appendTextLine(line); + this._eatLine(); + } + + private _formatPlainTextIndent(line: string) { + const prev = this._lineAt(this._lineNum - 1); + const prevIndent = this._prevIndent(); + const currIndent = this._currentIndent(); + + if ( + currIndent > prevIndent && + !_isUndefinedOrWhitespace(prev) && + !this._builder.endsWith(MarkdownLineBreak) && + !this._builder.endsWith('\n\n') && + !_isHeader(prev) + ) { + this._builder = this._builder.slice(0, -1) + MarkdownLineBreak; + } + + if ( + prevIndent > currIndent && + !_isUndefinedOrWhitespace(prev) && + !this._builder.endsWith(MarkdownLineBreak) && + !this._builder.endsWith('\n\n') + ) { + this._builder = this._builder.slice(0, -1) + MarkdownLineBreak; + } + + if (prevIndent === 0 || this._builder.endsWith(MarkdownLineBreak) || this._builder.endsWith('\n\n')) { + line = this._convertIndent(line); + } else { + line = line.trimStart(); + } + return line; + } + + private _convertIndent(line: string) { + line = line.replace(/^([ \t]+)(.+)$/g, (_match, g1, g2) => ' '.repeat(g1.length) + g2); + return line; + } + + private _escapeHtml(line: string): string { + HtmlEscapes.forEach((escape) => { + line = line.replace(escape.exp, escape.replacement); + }); + + return line; + } + + private _appendTextLine(line: string): void { + line = this._preprocessTextLine(line); + + const parts = line.split('`'); + + for (let i = 0; i < parts.length; i++) { + let part = parts[i]; + + if (i > 0) { + this._insideInlineCode = !this._insideInlineCode; + this._append('`'); + } + + if (this._insideInlineCode) { + this._append(part); + continue; + } + + part = this._escapeHtml(part); + + if (i === 0) { + // Only one part, and not inside code, so check header cases. + if (parts.length === 1) { + // Handle weird separator lines which contain random spaces. + for (const expReplacement of PotentialHeaders) { + if (expReplacement.exp.test(part)) { + part = part.replace(WhitespaceRegExp, expReplacement.replacement); + break; + } + } + + // Replace ReST style ~~~ header to prevent it being interpreted as a code block + // (an alternative in Markdown to triple backtick blocks). + if (TildaHeaderRegExp.test(part)) { + this._append(part.replace(TildeRegExp, '-')); + continue; + } + + // Replace +++ heading too. + // TODO: Handle the rest of these, and the precedence order (which depends on the + // order heading lines are seen, not what the line contains). + // http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#sections + if (PlusHeaderRegExp.test(part)) { + this._append(part.replace(PlusRegExp, '-')); + continue; + } + } + + // Don't strip away asterisk-based bullet point lists. + // + // TODO: Replace this with real list parsing. This may have + // false positives and cause random italics when the ReST list + // doesn't match Markdown's specification. + const match = LeadingAsteriskRegExp.exec(part); + if (match !== null && match.length === 3) { + this._append(match[1]); + part = match[2]; + } + } + + // TODO: Find a better way to handle this; the below breaks escaped + // characters which appear at the beginning or end of a line. + // Applying this only when i == 0 or i == parts.Length-1 may work. + + // http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#hyperlink-references + // part = RegExp.Replace(part, @"^_+", ""); + // http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#inline-internal-targets + // part = RegExp.Replace(part, @"_+$", ""); + + // TODO: Strip footnote/citation references. + + // Escape _, *, and ~, but ignore things like ":param \*\*kwargs:". + const subparts = part.split(linkRegExp); + subparts.forEach((item) => { + // Don't escape links + if (linkRegExp.test(item)) { + this._append(item); + } else { + this._append(item.replace(UnescapedMarkdownCharsRegExp, '\\$1')); + } + }); + } + + // Go straight to the builder so that _appendLine doesn't think + // we're actually trying to insert an extra blank line and skip + // future whitespace. Empty line deduplication is already handled + // because Append is used above. + this._builder += '\n'; + } + + private _preprocessTextLine(line: string): string { + // http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#literal-blocks + if (LiteralBlockEmptyRegExp.test(line)) { + return ''; + } + + LiteralBlockReplacements.forEach((item) => (line = line.replace(item.exp, item.replacement))); + + line = line.replace(DoubleTickRegExp, '`'); + return line; + } + + private _parseEmpty(): void { + if (_isUndefinedOrWhitespace(this._currentLineOrUndefined())) { + this._appendLine(); + this._eatLine(); + return; + } + + this._state = this._parseText; + } + + private _beginMinIndentCodeBlock(state: State): void { + this._appendLine('```'); + this._pushAndSetState(state); + this._blockIndent = this._currentIndent(); + } + + private _beginBacktickBlock(): boolean { + const match = this._currentLine().match(CodeBlockStartRegExp); + if (match !== null) { + this._blockIndent = this._currentIndent(); + this._lastBacktickString = match[1]; + + // Remove indentation and preserve language tag. + this._appendLine(match[1] + match[2]); + + this._pushAndSetState(this._parseBacktickBlock); + this._eatLine(); + return true; + } + return false; + } + + private _parseBacktickBlock(): void { + // Only match closing ``` at same indent level of opening. + if (CodeBlockEndRegExp.test(this._currentLine()) && this._currentIndent() === this._blockIndent) { + const match = this._currentLine().match(CodeBlockEndRegExp); + this._lastBacktickString = match ? match[1] : '```'; + this._appendLine(this._lastBacktickString); + this._appendLine(); + this._popState(); + } else { + this._appendLine(this._currentLine()); + } + + this._eatLine(); + } + + private _beginDocTest(): boolean { + if (!DoctestRegExp.test(this._currentLine())) { + return false; + } + + this._beginMinIndentCodeBlock(this._parseDocTest); + this._appendLine(this._currentLineWithinBlock()); + this._eatLine(); + return true; + } + + private _parseDocTest(): void { + if (this._currentLineIsOutsideBlock() || _isUndefinedOrWhitespace(this._currentLine())) { + this._trimOutputAndAppendLine('```'); + this._appendLine(); + this._popState(); + return; + } + + this._appendLine(this._currentLineWithinBlock()); + this._eatLine(); + } + + private _beginLiteralBlock(): boolean { + // The previous line must be empty. + const prev = this._lineAt(this._lineNum - 1); + if (prev === undefined) { + return false; + } else if (!_isUndefinedOrWhitespace(prev)) { + return false; + } + + // Find the previous paragraph and check that it ends with :: + let i = this._lineNum - 2; + for (; i >= 0; i--) { + const line = this._lineAt(i); + if (_isUndefinedOrWhitespace(line)) { + continue; + } + + // Safe to ignore whitespace after the :: because all lines have been trimRight'd. + if (line!.endsWith('::')) { + break; + } + + return false; + } + + if (i < 0) { + return false; + } + + // Special case: allow one-liners at the same indent level. + if (this._currentIndent() === 0) { + this._appendLine('```'); + this._pushAndSetState(this._parseLiteralBlockSingleLine); + return true; + } + + this._beginMinIndentCodeBlock(this._parseLiteralBlock); + return true; + } + + private _parseLiteralBlock(): void { + // Slightly different than doctest, wait until the first non-empty unindented line to exit. + if (_isUndefinedOrWhitespace(this._currentLineOrUndefined())) { + this._appendLine(); + this._eatLine(); + return; + } + + const prev = this._lineAt(this._lineNum - 1); + if (this._currentLineIsOutsideBlock() && _isUndefinedOrWhitespace(prev)) { + this._trimOutputAndAppendLine('```'); + this._appendLine(); + this._popState(); + return; + } + + this._appendLine(this._currentLine()); + this._eatLine(); + } + + private _parseLiteralBlockSingleLine(): void { + this._appendLine(this._currentLine()); + this._appendLine('```'); + this._appendLine(); + this._popState(); + this._eatLine(); + } + + private _beginDirective(): boolean { + if (!SpaceDotDotRegExp.test(this._currentLine())) { + return false; + } + + this._pushAndSetState(this._parseDirective); + this._blockIndent = this._nextBlockIndent(); + this._appendDirectiveBlock = false; + return true; + } + + // https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#field-lists + // Python doesn't have a single standard for param documentation. There are four + // popular styles. + // + // 1. Epytext: + // @param param1: description + // 2. reST: + // :param param1: description + // 3. Google (variant 1): + // Args: + // param1: description + // 4. Google (variant 2): + // Args: + // param1 (type): description + private _beginFieldList(): boolean { + if (this._insideInlineCode) { + return false; + } + + let line = this._currentLine(); + + // Handle epyDocs + if (line.startsWith('@')) { + this._appendLine(); + this._appendTextLine(line); + this._eatLine(); + return true; + } + + // catch-all for styles except reST + const hasArgs = !line?.endsWith(':') && !line?.endsWith('::') && !!line.match(/.*?\s*:\s*(.+)/gu); + + // reSt params. Attempt to put directives lines into their own paragraphs. + const restDirective = DirectivesExtraNewlineRegExp.test(line); //line.match(/^\s*:param/); + + if (hasArgs || restDirective) { + const prev = this._lineAt(this._lineNum - 1); + // Force a line break, if previous line doesn't already have a break or is blank + if (!this._builder.endsWith(MarkdownLineBreak) && !this._builder.endsWith(`\n\n`) && !_isHeader(prev)) { + this._builder = this._builder.slice(0, -1) + MarkdownLineBreak; + } + + // force indent for fields + line = this._convertIndent(line); + this._appendTextLine(line); + this._eatLine(); + return true; + } + + return false; + } + + private _beginTableBlock(): boolean { + if (this._insideInlineCode) { + return false; + } + + const line = this._currentLine(); + + if (EqualHeaderRegExp.test(line)) { + this._tableState = { header: line.trimStart(), inHeader: true }; + this._eatLine(); + this._pushAndSetState(this._parseTableBlock); + return true; + } + + return false; + } + + // Converts ReST style tables to ones that vscode will render. + // + // ReST: + // ========= ============ + // Syntax Description + // --------- ------------ + // Header Title + // Paragraph Text + // ========= ============ + // + // Markdown: + // | Syntax | Description | + // | ----------- | ----------- | + // | Header | Title | + // | Paragraph | Text | + private _parseTableBlock(): void { + if (_isUndefinedOrWhitespace(this._currentLineOrUndefined()) || !this._tableState) { + this._tableState = undefined; + this._popState(); + return; + } + + let line = this._currentLine(); + + if (EqualHeaderRegExp.test(line)) { + this._eatLine(); + this._appendLine('\n
\n'); + this._popState(); + this._tableState = undefined; + return; + } else { + let formattedLine = '|'; + const columnParts = this._tableState.header.split(' '); + const headerStrings: string[] = []; + + if (this._tableState.inHeader) { + do { + // Special header parsing to handle multiline headers + // for now we just append the multi header rows into a single line + // using the html
to signify newlines, but vscode doesn't seem to support it yet + // So headers will appear as a single line for now + let colStart = 0; + for (let i = 0; i < columnParts.length; i++) { + const equalStr = columnParts[i]; + const len = equalStr.length + 1; + const columnStr = line.slice(colStart, colStart + len); + + if (headerStrings[i] === undefined) { + headerStrings[i] = `${columnStr} `; + } else { + headerStrings[i] = headerStrings[i].concat(`
${columnStr} `); + } + colStart += len; + } + this._eatLine(); + line = this._currentLine(); + } while ( + !_isUndefinedOrWhitespace(this._currentLineOrUndefined()) && + !DashHeaderRegExp.test(line) && + !EqualHeaderRegExp.test(line) + ); + + this._tableState.inHeader = false; + + // Append header + headerStrings.forEach((h) => { + formattedLine += `${h}|`; + }); + this._appendLine(formattedLine); + + // Convert header end + const endHeaderStr = line.trimStart().replace(/=/g, '-').replace(' ', '|'); + this._appendLine(`|${endHeaderStr}|`); + this._eatLine(); + } else { + // Normal row parsing + let colStart = 0; + columnParts.forEach((column) => { + const len = column.length + 1; + const columnStr = line.slice(colStart, colStart + len); + formattedLine += `${columnStr}|`; + + colStart += len; + }); + + this._appendLine(formattedLine); + this._eatLine(); + } + } + } + + private _beginList(): boolean { + if (this._insideInlineCode) { + return false; + } + + let line = this._currentLine(); + const dashMatch = LeadingDashListRegExp.exec(line); + if (dashMatch?.length === 2) { + // Prevent list item from being see as code, by halving leading spaces + if (dashMatch[1].length >= 4) { + line = ' '.repeat(dashMatch[1].length / 2) + line.trimLeft(); + } + + this._appendTextLine(line); + this._eatLine(); + + if (this._state !== this._parseList) { + this._pushAndSetState(this._parseList); + } + return true; + } + + const asteriskMatch = LeadingAsteriskListRegExp.exec(line); + if (asteriskMatch?.length === 2) { + if (asteriskMatch[1].length === 0) { + line = line = ' ' + line; + } else if (asteriskMatch[1].length >= 4) { + // Prevent list item from being see as code, by halving leading spaces + line = ' '.repeat(asteriskMatch[1].length / 2) + line.trimLeft(); + } + + this._appendTextLine(line); + this._eatLine(); + if (this._state !== this._parseList) { + this._pushAndSetState(this._parseList); + } + return true; + } + + const leadingNumberList = LeadingNumberListRegExp.exec(line); + if (leadingNumberList?.length === 2) { + this._appendTextLine(line); + this._eatLine(); + return true; + } + + return false; + } + + private _parseList(): void { + if (_isUndefinedOrWhitespace(this._currentLineOrUndefined()) || this._currentLineIsOutsideBlock()) { + this._popState(); + return; + } + + // Check for the start of a new list item + const isMultiLineItem = !this._beginList(); + + // Remove leading spaces so that multiline items get appear in a single block + if (isMultiLineItem) { + const line = this._currentLine().trimStart(); + this._appendTextLine(line); + this._eatLine(); + } + } + + private _parseDirective(): void { + // http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#directives + + const match = DirectiveLikeRegExp.exec(this._currentLine()); + if (match !== null && match.length === 3) { + const directiveType = match[1]; + const directive = match[2]; + + if (directiveType === 'class') { + this._appendDirectiveBlock = true; + this._appendLine(); + this._appendLine('```'); + this._appendLine(directive); + this._appendLine('```'); + this._appendLine(); + } else if (directiveType === 'code-block') { + this._appendDirectiveBlock = true; + this._beginMinIndentCodeBlock(this._parseLiteralBlock); + this._eatLine(); + return; + } + } + + if (this._blockIndent === 0) { + // This is a one-liner directive, so pop back. + this._popState(); + } else { + this._state = this._parseDirectiveBlock; + } + + this._eatLine(); + } + + private _parseDirectiveBlock(): void { + if (!_isUndefinedOrWhitespace(this._currentLineOrUndefined()) && this._currentLineIsOutsideBlock()) { + this._popState(); + return; + } + + if (this._appendDirectiveBlock) { + // This is a bit of a hack. This just trims the text and appends it + // like top-level text, rather than doing actual indent-based recursion. + this._appendTextLine(this._currentLine().trimLeft()); + } + + this._eatLine(); + } + + private _appendLine(line?: string): void { + if (!_isUndefinedOrWhitespace(line)) { + this._builder += line + '\n'; + this._skipAppendEmptyLine = false; + } else if (!this._skipAppendEmptyLine) { + this._builder += '\n'; + this._skipAppendEmptyLine = true; + } + } + + private _append(text: string): void { + this._builder += text; + this._skipAppendEmptyLine = false; + } + + private _trimOutputAndAppendLine(line: string, noNewLine = false): void { + this._builder = this._builder.trimRight(); + this._skipAppendEmptyLine = false; + + if (!noNewLine) { + this._appendLine(); + } + + this._appendLine(line); + } +} + +function _countLeadingSpaces(s: string): number { + return s.search(LeadingSpaceCountRegExp); +} + +function _isUndefinedOrWhitespace(s: string | undefined): boolean { + return s === undefined || !NonWhitespaceRegExp.test(s); +} + +function _isHeader(line: string | undefined): boolean { + return line !== undefined && (line.match(/^\s*[#`~=-]{3,}/)?.length ?? 0) > 0; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/docStringUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/docStringUtils.ts new file mode 100644 index 00000000..e6c94fbd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/docStringUtils.ts @@ -0,0 +1,152 @@ +/* + * docStringUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Static methods that format and parse doc strings based on + * the rules specified in PEP 257 + * (https://www.python.org/dev/peps/pep-0257/). + */ + +const docStringCrRegEx = /\r/g; +const docStringTabRegEx = /\t/g; + +export function cleanAndSplitDocString(rawString: string): string[] { + // Remove carriage returns and replace tabs. + const unescaped = rawString.replace(docStringCrRegEx, '').replace(docStringTabRegEx, ' '); + + // Split into lines. + const lines = unescaped.split('\n'); + + // Determine the max indent amount. + let leftSpacesToRemove = Number.MAX_VALUE; + lines.forEach((line, index) => { + // First line is special. + if (lines.length <= 1 || index > 0) { + const trimmed = line.trimLeft(); + if (trimmed) { + leftSpacesToRemove = Math.min(leftSpacesToRemove, line.length - trimmed.length); + } + } + }); + + // Handle the case where there were only empty lines. + if (leftSpacesToRemove >= Number.MAX_VALUE) { + leftSpacesToRemove = 0; + } + + // Trim the lines. + const trimmedLines: string[] = []; + lines.forEach((line, index) => { + if (index === 0) { + trimmedLines.push(line.trim()); + } else { + trimmedLines.push(line.substr(leftSpacesToRemove).trimRight()); + } + }); + + // Strip off leading and trailing blank lines. + while (trimmedLines.length > 0 && trimmedLines[0].length === 0) { + trimmedLines.shift(); + } + + while (trimmedLines.length > 0 && trimmedLines[trimmedLines.length - 1].length === 0) { + trimmedLines.pop(); + } + + return trimmedLines; +} + +export function extractParameterDocumentation(functionDocString: string, paramName: string): string | undefined { + if (!functionDocString || !paramName) { + return undefined; + } + + // Python doesn't have a single standard for param documentation. There are three + // popular styles. + // + // 1. Epytext: + // @param param1: description + // 2. reST: + // :param param1: description + // 3. Google (variant 1): + // Args: + // param1: description + // 4. Google (variant 2): + // Args: + // param1 (type): description + + const docStringLines = cleanAndSplitDocString(functionDocString); + for (const line of docStringLines) { + const trimmedLine = line.trim(); + + // Check for Epytext + let paramOffset = trimmedLine.indexOf('@param ' + paramName); + if (paramOffset >= 0) { + return trimmedLine.substr(paramOffset + 7); + } + + // Check for reST format + paramOffset = trimmedLine.indexOf(':param ' + paramName); + if (paramOffset >= 0) { + return trimmedLine.substr(paramOffset + 7); + } + + // Check for Google (variant 1) format + paramOffset = trimmedLine.indexOf(paramName + ': '); + if (paramOffset >= 0) { + return trimmedLine.substr(paramOffset); + } + + // Check for Google (variant 2) format + paramOffset = trimmedLine.indexOf(paramName + ' ('); + if (paramOffset >= 0) { + return trimmedLine.substr(paramOffset); + } + } + + return undefined; +} + +export function extractAttributeDocumentation(classDocString: string, attrName: string): string | undefined { + if (!classDocString || !attrName) { + return undefined; + } + + // Python documentation styles for attributes: + // + // 1. reST: + // :ivar attr1: description + // 2. Google: + // Attributes: + // attr1: description + // 3. Google (with type): + // Attributes: + // attr1 (type): description + + const docStringLines = cleanAndSplitDocString(classDocString); + for (const line of docStringLines) { + const trimmedLine = line.trim(); + + // Check for reST format + let attrOffset = trimmedLine.indexOf(':ivar ' + attrName); + if (attrOffset >= 0) { + return trimmedLine.substr(attrOffset + 6); + } + + // Check for Google (variant 1) format + attrOffset = trimmedLine.indexOf(attrName + ': '); + if (attrOffset >= 0) { + return trimmedLine.substr(attrOffset); + } + + // Check for Google (variant 2) format + attrOffset = trimmedLine.indexOf(attrName + ' ('); + if (attrOffset >= 0) { + return trimmedLine.substr(attrOffset); + } + } + + return undefined; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/enums.ts b/python-parser/packages/pyright-internal/src/analyzer/enums.ts new file mode 100644 index 00000000..16bab708 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/enums.ts @@ -0,0 +1,750 @@ +/* + * enums.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides special-case logic for the Enum class. + */ + +import { assert } from '../common/debug'; +import { PythonVersion, pythonVersion3_13 } from '../common/pythonVersion'; +import { ArgCategory, ExpressionNode, NameNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { VariableDeclaration } from './declaration'; +import { getClassFullName, getEnclosingClass, getTypeSourceId } from './parseTreeUtils'; +import { Symbol, SymbolFlags } from './symbol'; +import { isPrivateName, isSingleDunderName } from './symbolNameUtils'; +import { Arg, EvalFlags, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { enumerateLiteralsForType } from './typeGuards'; +import { MemberAccessFlags, computeMroLinearization, lookUpClassMember, makeInferenceContext } from './typeUtils'; +import { + AnyType, + ClassType, + ClassTypeFlags, + EnumLiteral, + Type, + TypeBase, + UnknownType, + combineTypes, + findSubtype, + isAny, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + maxTypeRecursionCount, +} from './types'; + +interface EnumEvalStackEntry { + classType: ClassType; + memberName: string; +} + +// This stack is used to prevent infinite recursion when evaluating +// enum members that refer to other enum members. +const enumEvalStack: EnumEvalStackEntry[] = []; + +// Determines whether the class is an Enum metaclass or a subclass thereof. +export function isEnumMetaclass(classType: ClassType) { + return classType.shared.mro.some( + (mroClass) => isClass(mroClass) && ClassType.isBuiltIn(mroClass, ['EnumMeta', 'EnumType']) + ); +} + +// Determines whether this is an enum class that has at least one enum +// member defined. +export function isEnumClassWithMembers(evaluator: TypeEvaluator, classType: ClassType) { + if (!isClass(classType) || !ClassType.isEnumClass(classType)) { + return false; + } + + // Determine whether the enum class defines a member. + const symbolTable = ClassType.getSymbolTable(classType); + for (const name of symbolTable.keys()) { + const symbolType = transformTypeForEnumMember(evaluator, classType, name); + if ( + symbolType && + isClassInstance(symbolType) && + ClassType.isSameGenericClass(symbolType, ClassType.cloneAsInstance(classType)) + ) { + return true; + } + } + + return false; +} + +// Creates a new custom enum class with named values. +export function createEnumType( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + enumClass: ClassType, + argList: Arg[] +): ClassType | undefined { + const fileInfo = getFileInfo(errorNode); + const isReprEnum = isReprEnumClass(enumClass); + + if (argList.length === 0) { + return undefined; + } + + const nameArg = argList[0]; + if ( + nameArg.argCategory !== ArgCategory.Simple || + !nameArg.valueExpression || + nameArg.valueExpression.nodeType !== ParseNodeType.StringList || + nameArg.valueExpression.d.strings.length !== 1 || + nameArg.valueExpression.d.strings[0].nodeType !== ParseNodeType.String + ) { + return undefined; + } + + const className = nameArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + const classType = ClassType.createInstantiable( + className, + getClassFullName(errorNode, fileInfo.moduleName, className), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.EnumClass | ClassTypeFlags.ValidTypeAliasClass, + getTypeSourceId(errorNode), + /* declaredMetaclass */ undefined, + enumClass.shared.effectiveMetaclass + ); + classType.shared.baseClasses.push(enumClass); + computeMroLinearization(classType); + + const classFields = ClassType.getSymbolTable(classType); + classFields.set( + '__class__', + Symbol.createWithType(SymbolFlags.ClassMember | SymbolFlags.IgnoredForProtocolMatch, classType) + ); + + if (argList.length < 2) { + return undefined; + } + + const initArg = argList[1]; + if (initArg.argCategory !== ArgCategory.Simple || !initArg.valueExpression) { + return undefined; + } + + const intClassType = evaluator.getBuiltInType(errorNode, 'int'); + if (!intClassType || !isInstantiableClass(intClassType)) { + return undefined; + } + const classInstanceType = ClassType.cloneAsInstance(classType); + + // The Enum functional form supports various forms of arguments: + // Enum('name', 'a b c') + // Enum('name', 'a,b,c') + // Enum('name', ['a', 'b', 'c']) + // Enum('name', ('a', 'b', 'c')) + // Enum('name', (('a', 1), ('b', 2), ('c', 3))) + // Enum('name', [('a', 1), ('b', 2), ('c', 3))] + // Enum('name', {'a': 1, 'b': 2, 'c': 3}) + if (initArg.valueExpression.nodeType === ParseNodeType.StringList) { + // Don't allow format strings in the init arg. + if (!initArg.valueExpression.d.strings.every((str) => str.nodeType === ParseNodeType.String)) { + return undefined; + } + + const initStr = initArg.valueExpression.d.strings + .map((s) => s.d.value) + .join('') + .trim(); + + // Split by comma or whitespace. + const entryNames = initStr.split(/[\s,]+/); + + for (const [index, entryName] of entryNames.entries()) { + if (!entryName) { + return undefined; + } + + const valueType = ClassType.cloneWithLiteral(ClassType.cloneAsInstance(intClassType), index + 1); + + const enumLiteral = new EnumLiteral( + classType.shared.fullName, + classType.shared.name, + entryName, + valueType, + isReprEnum + ); + + const newSymbol = Symbol.createWithType( + SymbolFlags.ClassMember, + ClassType.cloneWithLiteral(classInstanceType, enumLiteral) + ); + + classFields.set(entryName, newSymbol); + } + + return classType; + } + + if ( + initArg.valueExpression.nodeType === ParseNodeType.List || + initArg.valueExpression.nodeType === ParseNodeType.Tuple + ) { + const entries = + initArg.valueExpression.nodeType === ParseNodeType.List + ? initArg.valueExpression.d.items + : initArg.valueExpression.d.items; + + if (entries.length === 0) { + return undefined; + } + + // Entries can be either string literals or tuples of a string + // literal and a value. All entries must follow the same pattern. + let isSimpleString = false; + for (const [index, entry] of entries.entries()) { + if (index === 0) { + isSimpleString = entry.nodeType === ParseNodeType.StringList; + } + + let nameNode: ParseNode | undefined; + let valueType: Type | undefined; + + if (entry.nodeType === ParseNodeType.StringList) { + if (!isSimpleString) { + return undefined; + } + + nameNode = entry; + valueType = ClassType.cloneWithLiteral(ClassType.cloneAsInstance(intClassType), index + 1); + } else if (entry.nodeType === ParseNodeType.Tuple) { + if (isSimpleString) { + return undefined; + } + + if (entry.d.items.length !== 2) { + return undefined; + } + nameNode = entry.d.items[0]; + valueType = evaluator.getTypeOfExpression(entry.d.items[1]).type; + } else { + return undefined; + } + + if ( + nameNode.nodeType !== ParseNodeType.StringList || + nameNode.d.strings.length !== 1 || + nameNode.d.strings[0].nodeType !== ParseNodeType.String + ) { + return undefined; + } + + const entryName = nameNode.d.strings[0].d.value; + + const enumLiteral = new EnumLiteral( + classType.shared.fullName, + classType.shared.name, + entryName, + valueType, + isReprEnum + ); + + const newSymbol = Symbol.createWithType( + SymbolFlags.ClassMember, + ClassType.cloneWithLiteral(classInstanceType, enumLiteral) + ); + + classFields.set(entryName, newSymbol); + } + } + + if (initArg.valueExpression.nodeType === ParseNodeType.Dictionary) { + const entries = initArg.valueExpression.d.items; + if (entries.length === 0) { + return undefined; + } + + for (const entry of entries) { + // Don't support dictionary expansion expressions. + if (entry.nodeType !== ParseNodeType.DictionaryKeyEntry) { + return undefined; + } + + const nameNode = entry.d.keyExpr; + const valueType = evaluator.getTypeOfExpression(entry.d.valueExpr).type; + + if ( + nameNode.nodeType !== ParseNodeType.StringList || + nameNode.d.strings.length !== 1 || + nameNode.d.strings[0].nodeType !== ParseNodeType.String + ) { + return undefined; + } + + const entryName = nameNode.d.strings[0].d.value; + const enumLiteral = new EnumLiteral( + classType.shared.fullName, + classType.shared.name, + entryName, + valueType, + isReprEnum + ); + + const newSymbol = Symbol.createWithType( + SymbolFlags.ClassMember, + ClassType.cloneWithLiteral(classInstanceType, enumLiteral) + ); + + classFields.set(entryName, newSymbol); + } + } + + return classType; +} + +// Performs the "magic" that the Enum metaclass does at runtime when it +// transforms a value into an enum instance. If the specified name isn't +// an enum member, this function returns undefined indicating that the +// Enum metaclass does not transform the value. +// By default, if a type annotation is present, the member is not treated +// as a member of the enumeration, but the Enum metaclass ignores such +// annotations. The typing spec indicates that the use of an annotation is +// illegal, so we need to detect this case and report an error. +export function transformTypeForEnumMember( + evaluator: TypeEvaluator, + classType: ClassType, + memberName: string, + ignoreAnnotation = false, + recursionCount = 0 +): Type | undefined { + if (!ClassType.isEnumClass(classType)) { + return undefined; + } + + if (recursionCount > maxTypeRecursionCount) { + return undefined; + } + recursionCount++; + + // Avoid infinite recursion. + if ( + enumEvalStack.find( + (entry) => ClassType.isSameGenericClass(entry.classType, classType) && entry.memberName === memberName + ) + ) { + return undefined; + } + + enumEvalStack.push({ classType, memberName }); + + try { + const memberInfo = lookUpClassMember(classType, memberName); + if (!memberInfo || !isClass(memberInfo.classType) || !ClassType.isEnumClass(memberInfo.classType)) { + return undefined; + } + + const decls = memberInfo.symbol.getDeclarations(); + if (decls.length < 1) { + return undefined; + } + + const primaryDecl = decls[0]; + + let isMemberOfEnumeration = false; + let isUnpackedTuple = false; + let valueTypeExprNode: ExpressionNode | undefined; + let declaredTypeNode: ExpressionNode | undefined; + let nameNode: NameNode | undefined; + + if (primaryDecl.node.nodeType === ParseNodeType.Name) { + nameNode = primaryDecl.node; + } else if ( + primaryDecl.node.nodeType === ParseNodeType.Function || + primaryDecl.node.nodeType === ParseNodeType.Class + ) { + // Handle the case where a method or class is decorated with @enum.member. + nameNode = primaryDecl.node.d.name; + } else { + return undefined; + } + + if (nameNode.parent?.nodeType === ParseNodeType.Assignment && nameNode.parent.d.leftExpr === nameNode) { + isMemberOfEnumeration = true; + valueTypeExprNode = nameNode.parent.d.rightExpr; + } else if ( + nameNode.parent?.nodeType === ParseNodeType.Tuple && + nameNode.parent.parent?.nodeType === ParseNodeType.Assignment + ) { + isMemberOfEnumeration = true; + isUnpackedTuple = true; + valueTypeExprNode = nameNode.parent.parent.d.rightExpr; + } else if ( + nameNode.parent?.nodeType === ParseNodeType.TypeAnnotation && + nameNode.parent.d.valueExpr === nameNode + ) { + if (ignoreAnnotation) { + isMemberOfEnumeration = true; + } + declaredTypeNode = nameNode.parent.d.annotation; + } + + // The spec specifically excludes names that start and end with a single underscore. + // This also includes dunder names. + if (isSingleDunderName(memberName)) { + return undefined; + } + + // Specifically exclude "value" and "name". These are reserved by the enum metaclass. + if (memberName === 'name' || memberName === 'value') { + return undefined; + } + + const declaredType = declaredTypeNode ? evaluator.getTypeOfAnnotation(declaredTypeNode) : undefined; + let assignedType: Type | undefined; + + if (valueTypeExprNode) { + const evalFlags = getFileInfo(valueTypeExprNode).isStubFile ? EvalFlags.ConvertEllipsisToAny : undefined; + assignedType = evaluator.getTypeOfExpression(valueTypeExprNode, evalFlags).type; + } + + // Handle aliases to other enum members within the same enum. + if (valueTypeExprNode?.nodeType === ParseNodeType.Name && valueTypeExprNode.d.value !== memberName) { + const aliasedEnumType = transformTypeForEnumMember( + evaluator, + classType, + valueTypeExprNode.d.value, + /* ignoreAnnotation */ false, + recursionCount + ); + + if ( + aliasedEnumType && + isClassInstance(aliasedEnumType) && + ClassType.isSameGenericClass(aliasedEnumType, ClassType.cloneAsInstance(memberInfo.classType)) && + aliasedEnumType.priv.literalValue !== undefined + ) { + return aliasedEnumType; + } + } + + if (primaryDecl.node.nodeType === ParseNodeType.Function) { + const functionTypeInfo = evaluator.getTypeOfFunction(primaryDecl.node); + if (functionTypeInfo) { + assignedType = functionTypeInfo.decoratedType; + } + } else if (primaryDecl.node.nodeType === ParseNodeType.Class) { + const classTypeInfo = evaluator.getTypeOfClass(primaryDecl.node); + if (classTypeInfo) { + assignedType = classTypeInfo.decoratedType; + + // If the class is not marked as a member or a non-member, the behavior + // depends on the version of Python. In versions prior to 3.13, classes + // are treated as members. + if (isInstantiableClass(assignedType)) { + const fileInfo = getFileInfo(primaryDecl.node); + isMemberOfEnumeration = PythonVersion.isLessThan( + fileInfo.executionEnvironment.pythonVersion, + pythonVersion3_13 + ); + } + } + } + + let valueType = declaredType ?? assignedType ?? UnknownType.create(); + + // If the LHS is an unpacked tuple, we need to handle this as + // a special case. + if (isUnpackedTuple) { + valueType = + evaluator.getTypeOfIterator( + { type: valueType }, + /* isAsync */ false, + nameNode, + /* emitNotIterableError */ false + )?.type ?? UnknownType.create(); + } + + // The spec excludes descriptors. + if (isClassInstance(valueType) && ClassType.getSymbolTable(valueType).get('__get__')) { + return undefined; + } + + // The spec excludes private (mangled) names. + if (isPrivateName(memberName)) { + return undefined; + } + + // The enum spec doesn't explicitly specify this, but it + // appears that callables are excluded. + if (!findSubtype(valueType, (subtype) => !isFunctionOrOverloaded(subtype))) { + return undefined; + } + + if ( + !assignedType && + nameNode.parent?.nodeType === ParseNodeType.Assignment && + nameNode.parent.d.leftExpr === nameNode + ) { + assignedType = evaluator.getTypeOfExpression( + nameNode.parent.d.rightExpr, + /* flags */ undefined, + makeInferenceContext(declaredType) + ).type; + } + + // Handle the Python 3.11 "enum.member()" and "enum.nonmember()" features. + if (assignedType && isClassInstance(assignedType) && ClassType.isBuiltIn(assignedType)) { + if (assignedType.shared.fullName === 'enum.nonmember') { + const nonMemberType = + assignedType.priv.typeArgs && assignedType.priv.typeArgs.length > 0 + ? assignedType.priv.typeArgs[0] + : UnknownType.create(); + + // If the type of the nonmember is declared and the assigned value has + // a compatible type, use the declared type. + if (declaredType && evaluator.assignType(declaredType, nonMemberType)) { + return declaredType; + } + + return nonMemberType; + } + + if (assignedType.shared.fullName === 'enum.member') { + valueType = + assignedType.priv.typeArgs && assignedType.priv.typeArgs.length > 0 + ? assignedType.priv.typeArgs[0] + : UnknownType.create(); + isMemberOfEnumeration = true; + } + } + + if (!isMemberOfEnumeration) { + return undefined; + } + + const enumLiteral = new EnumLiteral( + memberInfo.classType.shared.fullName, + memberInfo.classType.shared.name, + memberName, + valueType, + isReprEnumClass(classType) + ); + + return ClassType.cloneAsInstance(ClassType.cloneWithLiteral(memberInfo.classType, enumLiteral)); + } finally { + enumEvalStack.pop(); + } +} + +export function isDeclInEnumClass(evaluator: TypeEvaluator, decl: VariableDeclaration): boolean { + const classNode = getEnclosingClass(decl.node, /* stopAtFunction */ true); + if (!classNode) { + return false; + } + + const classInfo = evaluator.getTypeOfClass(classNode); + if (!classInfo) { + return false; + } + + return ClassType.isEnumClass(classInfo.classType); +} + +export function getEnumDeclaredValueType( + evaluator: TypeEvaluator, + classType: ClassType, + declaredTypesOnly = false +): Type | undefined { + // See if there is a declared type for "_value_". + let valueType: Type | undefined; + + const declaredValueMember = lookUpClassMember( + classType, + '_value_', + declaredTypesOnly ? MemberAccessFlags.DeclaredTypesOnly : MemberAccessFlags.Default + ); + + // If the declared type comes from the 'Enum' base class, ignore it + // because it will be "Any", which isn't useful to us here. + if ( + declaredValueMember && + declaredValueMember.classType && + isClass(declaredValueMember.classType) && + !ClassType.isBuiltIn(declaredValueMember.classType, 'Enum') + ) { + valueType = evaluator.getTypeOfMember(declaredValueMember); + } + + return valueType; +} + +export function getTypeOfEnumMember( + evaluator: TypeEvaluator, + errorNode: ParseNode, + classType: ClassType, + memberName: string, + isIncomplete: boolean +): TypeResult | undefined { + if (!ClassType.isEnumClass(classType)) { + return undefined; + } + + const type = transformTypeForEnumMember(evaluator, classType, memberName); + if (type) { + return { type, isIncomplete }; + } + + if (TypeBase.isInstantiable(classType)) { + return undefined; + } + + // Handle the special case of 'name' and 'value' members within an enum. + const literalValue = classType.priv.literalValue; + + if (memberName === 'name' || memberName === '_name_') { + // Does the class explicitly override this member? Or it it using the + // standard behavior provided by the "Enum" class? + const memberInfo = lookUpClassMember(classType, memberName); + if (memberInfo && isClass(memberInfo.classType) && !ClassType.isBuiltIn(memberInfo.classType, 'Enum')) { + return undefined; + } + + const strClass = evaluator.getBuiltInType(errorNode, 'str'); + if (!isInstantiableClass(strClass)) { + return undefined; + } + + const makeNameType = (value: EnumLiteral) => { + return ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strClass, value.itemName)); + }; + + if (literalValue) { + assert(literalValue instanceof EnumLiteral); + return { type: makeNameType(literalValue), isIncomplete }; + } + + // The type wasn't associated with a particular enum literal, so return + // a union of all possible enum literals. + const literalValues = enumerateLiteralsForType(evaluator, classType); + if (literalValues && literalValues.length > 0) { + return { + type: combineTypes( + literalValues.map((literalClass) => { + const literalValue = literalClass.priv.literalValue; + assert(literalValue instanceof EnumLiteral); + return makeNameType(literalValue); + }) + ), + isIncomplete, + }; + } + } + + // See if there is a declared type for "_value_". + const valueType = getEnumDeclaredValueType(evaluator, classType); + + if (memberName === 'value' || memberName === '_value_') { + // Does the class explicitly override this member? Or it it using the + // standard behavior provided by the "Enum" class and other built-in + // subclasses like "StrEnum" and "IntEnum"? + const memberInfo = lookUpClassMember(classType, memberName); + if (memberInfo && isClass(memberInfo.classType) && !ClassType.isBuiltIn(memberInfo.classType)) { + return undefined; + } + + // If the enum class has a custom metaclass, it may implement some + // "magic" that computes different values for the "_value_" attribute. + // This occurs, for example, in the django TextChoices class. If we + // detect a custom metaclass, we'll use the declared type of _value_ + // if it is declared. + const metaclass = classType.shared.effectiveMetaclass; + if (metaclass && isClass(metaclass) && !ClassType.isBuiltIn(metaclass)) { + return { type: valueType ?? AnyType.create(), isIncomplete }; + } + + // If the enum class has a custom __new__ or __init__ method, + // it may implement some magic that computes different values for + // the "_value_" attribute. If we see a customer __new__ or __init__, + // we'll assume the value type is what we computed above, or Any. + const newMember = lookUpClassMember(classType, '__new__', MemberAccessFlags.SkipObjectBaseClass); + const initMember = lookUpClassMember(classType, '__init__', MemberAccessFlags.SkipObjectBaseClass); + + if (newMember && isClass(newMember.classType) && !ClassType.isBuiltIn(newMember.classType)) { + return { type: valueType ?? AnyType.create(), isIncomplete }; + } + + if (initMember && isClass(initMember.classType) && !ClassType.isBuiltIn(initMember.classType)) { + return { type: valueType ?? AnyType.create(), isIncomplete }; + } + + // There were no explicit assignments to the "_value_" attribute, so we can + // assume that the values are assigned directly to the "_value_" by + // the EnumMeta metaclass. + if (literalValue) { + assert(literalValue instanceof EnumLiteral); + + // If there is no known value type for this literal value, + // return undefined. This will cause the caller to fall back + // on the definition of "_value_" within the class definition + // (if present). + if (isAny(literalValue.itemType)) { + return valueType ? { type: valueType, isIncomplete } : undefined; + } + + return { type: literalValue.itemType, isIncomplete }; + } + + // The type wasn't associated with a particular enum literal, so return + // a union of all possible enum literals. + const literalValues = enumerateLiteralsForType(evaluator, classType); + if (literalValues && literalValues.length > 0) { + return { + type: combineTypes( + literalValues.map((literalClass) => { + const literalValue = literalClass.priv.literalValue; + assert(literalValue instanceof EnumLiteral); + return literalValue.itemType; + }) + ), + isIncomplete, + }; + } + } + + return undefined; +} + +export function getEnumAutoValueType(evaluator: TypeEvaluator, node: ExpressionNode) { + const containingClassNode = getEnclosingClass(node); + + if (containingClassNode) { + const classTypeInfo = evaluator.getTypeOfClass(containingClassNode); + if (classTypeInfo) { + const memberInfo = evaluator.getTypeOfBoundMember( + node, + ClassType.cloneAsInstance(classTypeInfo.classType), + '_generate_next_value_' + ); + + // Did we find a custom _generate_next_value_ sunder override? + // Ignore if this comes from Enum because it is declared as + // returning an "Any" type in the typeshed stubs. + if ( + memberInfo && + !memberInfo.typeErrors && + isFunction(memberInfo.type) && + memberInfo.classType && + isClass(memberInfo.classType) && + !ClassType.isBuiltIn(memberInfo.classType, 'Enum') + ) { + if (memberInfo.type.shared.declaredReturnType) { + return memberInfo.type.shared.declaredReturnType; + } + } + } + } + + return evaluator.getBuiltInObject(node, 'int'); +} + +function isReprEnumClass(enumClass: ClassType) { + return enumClass.shared.mro.some((mroClass) => isClass(mroClass) && ClassType.isBuiltIn(mroClass, 'ReprEnum')); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/functionTransform.ts b/python-parser/packages/pyright-internal/src/analyzer/functionTransform.ts new file mode 100644 index 00000000..925622e8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/functionTransform.ts @@ -0,0 +1,139 @@ +/* + * functionTransform.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that transforms the return result of a function. + * + */ + +import { DiagnosticRule } from '../common/diagnosticRules'; +import { LocMessage } from '../localization/localize'; +import { ExpressionNode, ParamCategory } from '../parser/parseNodes'; +import { Symbol, SymbolFlags } from './symbol'; +import { Arg, FunctionResult, TypeEvaluator } from './typeEvaluatorTypes'; +import { + ClassType, + FunctionParam, + FunctionParamFlags, + FunctionType, + isClassInstance, + isFunction, + isInstantiableClass, + OverloadedType, + Type, +} from './types'; +import { ClassMember, lookUpObjectMember, MemberAccessFlags, synthesizeTypeVarForSelfCls } from './typeUtils'; + +export function applyFunctionTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + functionType: FunctionType | OverloadedType, + result: FunctionResult +): FunctionResult { + if (isFunction(functionType)) { + if (functionType.shared.fullName === 'functools.total_ordering') { + return applyTotalOrderingTransform(evaluator, errorNode, argList, result); + } + } + + // By default, return the result unmodified. + return result; +} + +function applyTotalOrderingTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + result: FunctionResult +) { + if (argList.length !== 1) { + return result; + } + + // This function is meant to apply to a concrete instantiable class. + const classType = argList[0].typeResult?.type; + if (!classType || !isInstantiableClass(classType) || classType.priv.includeSubclasses) { + return result; + } + + const orderingMethods = ['__lt__', '__le__', '__gt__', '__ge__']; + const instanceType = ClassType.cloneAsInstance(classType); + + // Verify that the class has at least one of the required functions. + let firstMemberFound: ClassMember | undefined; + const missingMethods = orderingMethods.filter((methodName) => { + const memberInfo = lookUpObjectMember(instanceType, methodName, MemberAccessFlags.SkipInstanceMembers); + if (memberInfo && !firstMemberFound) { + firstMemberFound = memberInfo; + } + return !memberInfo; + }); + + if (!firstMemberFound) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.totalOrderingMissingMethod(), + errorNode + ); + return result; + } + + // Determine what type to use for the parameter corresponding to + // the second operand. This will be taken from the existing method. + let operandType: Type | undefined; + + const firstMemberType = evaluator.getTypeOfMember(firstMemberFound); + if ( + isFunction(firstMemberType) && + firstMemberType.shared.parameters.length >= 2 && + FunctionParam.isTypeDeclared(firstMemberType.shared.parameters[1]) + ) { + operandType = FunctionType.getParamType(firstMemberType, 1); + } + + // If there was no provided operand type, fall back to object. + if (!operandType) { + const objectType = evaluator.getBuiltInObject(errorNode, 'object'); + if (!objectType || !isClassInstance(objectType)) { + return result; + } + operandType = objectType; + } + + const boolType = evaluator.getBuiltInObject(errorNode, 'bool'); + if (!boolType || !isClassInstance(boolType)) { + return result; + } + + const selfParam = FunctionParam.create( + ParamCategory.Simple, + synthesizeTypeVarForSelfCls(classType, /* isClsParam */ false), + FunctionParamFlags.TypeDeclared, + 'self' + ); + + const objParam = FunctionParam.create( + ParamCategory.Simple, + operandType, + FunctionParamFlags.TypeDeclared, + '__value' + ); + + // Add the missing members to the class's symbol table. + missingMethods.forEach((methodName) => { + const methodToAdd = FunctionType.createSynthesizedInstance(methodName); + FunctionType.addParam(methodToAdd, selfParam); + FunctionType.addParam(methodToAdd, objParam); + methodToAdd.shared.declaredReturnType = boolType; + + ClassType.getSymbolTable(classType).set( + methodName, + Symbol.createWithType(SymbolFlags.ClassMember, methodToAdd) + ); + }); + + return result; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/importLogger.ts b/python-parser/packages/pyright-internal/src/analyzer/importLogger.ts new file mode 100644 index 00000000..413e2ff0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/importLogger.ts @@ -0,0 +1,20 @@ +/* + * importLogging.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utilities for logging information about import resolution failures. + */ + +export class ImportLogger { + private _logs: string[] = []; + + log(message: string) { + this._logs.push(message); + } + + getLogs() { + return this._logs; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/importResolver.ts b/python-parser/packages/pyright-internal/src/analyzer/importResolver.ts new file mode 100644 index 00000000..cb21e59d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/importResolver.ts @@ -0,0 +1,2866 @@ +/* + * importResolver.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides the logic for resolving imports according to the + * runtime rules of Python. + */ + +import type { Dirent } from 'fs'; + +import { appendArray, flatten, getMapValues, getOrAdd } from '../common/collectionUtils'; +import { ConfigOptions, ExecutionEnvironment, matchFileSpecs } from '../common/configOptions'; +import { Host } from '../common/host'; +import { stubsSuffix } from '../common/pathConsts'; +import { getFileExtension, stripFileExtension } from '../common/pathUtils'; +import { PythonVersion, pythonVersion3_0 } from '../common/pythonVersion'; +import { ServiceProvider } from '../common/serviceProvider'; +import * as StringUtils from '../common/stringUtils'; +import { equateStringsCaseInsensitive } from '../common/stringUtils'; +import { Uri } from '../common/uri/uri'; +import { getFileSystemEntriesFromDirEntries, isDirectory, isFile, tryRealpath, tryStat } from '../common/uri/uriUtils'; +import { Tokenizer } from '../parser/tokenizer'; +import { ImportLogger } from './importLogger'; +import { ImplicitImport, ImportResult, ImportType } from './importResult'; +import { getDirectoryLeadingDotsPointsTo } from './importStatementUtils'; +import { ImportPath, ParentDirectoryCache } from './parentDirectoryCache'; +import { PyTypedInfo, getPyTypedInfoForPyTypedFile } from './pyTypedUtils'; +import * as PythonPathUtils from './pythonPathUtils'; +import * as SymbolNameUtils from './symbolNameUtils'; +import { isDunderName } from './symbolNameUtils'; + +export interface ImportedModuleDescriptor { + leadingDots: number; + nameParts: string[]; + hasTrailingDot?: boolean | undefined; + importedSymbols: Set | undefined; +} + +export interface ModuleNameAndType { + moduleName: string; + importType: ImportType; + isLocalTypingsFile: boolean; +} + +export interface ModuleImportInfo extends ModuleNameAndType { + isTypeshedFile: boolean; + isThirdPartyPyTypedPresent: boolean; +} + +export interface ModuleNameInfoFromPath { + moduleName: string; + containsInvalidCharacters?: boolean; +} + +export function createImportedModuleDescriptor(moduleName: string): ImportedModuleDescriptor { + if (moduleName.length === 0) { + return { leadingDots: 0, nameParts: [], importedSymbols: new Set() }; + } + + let startIndex = 0; + let leadingDots = 0; + for (; startIndex < moduleName.length; startIndex++) { + if (moduleName[startIndex] !== '.') { + break; + } + + leadingDots++; + } + + return { + leadingDots, + nameParts: moduleName.slice(startIndex).split('.'), + importedSymbols: new Set(), + }; +} + +type CachedImportResults = Map; +interface SupportedVersionInfo { + min: PythonVersion; + max?: PythonVersion | undefined; + unsupportedPlatforms?: string[]; + supportedPlatforms?: string[]; +} + +interface CachedDir { + entries: Map; + + // A set of names in this directory (either subdirectories or + // file names without extensions) that could potentially resolve + // a module import. This is useful for quickly checking whether a full search should be done. + resolvableNames: Set; +} + +const supportedNativeLibExtensions = ['.pyd', '.so', '.dylib']; +export const supportedSourceFileExtensions = ['.py', '.pyi']; +export const supportedFileExtensions = [...supportedSourceFileExtensions, ...supportedNativeLibExtensions]; + +// Should we allow partial resolution for third-party packages? Some use tricks +// to populate their package namespaces, so we might be able to partially resolve +// a multi - part import(e.g. "a.b.c") but not fully resolve it. If this is set to +// false, we will have some false positives. If it is set to true, we won't report +// errors when these partial-resolutions fail. +const allowPartialResolutionForThirdPartyPackages = false; + +export class ImportResolver { + private _cachedPythonSearchPaths: { paths: Uri[]; failureInfo?: ImportLogger } | undefined; + private _cachedImportResults = new Map(); + private _cachedModuleNameResults = new Map>(); + private _cachedTypeshedRoot: Uri | undefined; + private _cachedTypeshedStdLibPath: Uri | undefined; + private _cachedTypeshedStdLibModuleVersionInfo: Map | undefined; + private _cachedTypeshedThirdPartyPath: Uri | undefined; + private _cachedTypeshedThirdPartyPackagePaths: Map | undefined; + private _cachedTypeshedThirdPartyPackageRoots: Uri[] | undefined; + private _cachedEntriesForPath = new Map(); + private _cachedFilesForPath = new Map(); + private _cachedDirExistenceForRoot = new Map(); + private _stdlibModules: Set | undefined; + + protected readonly cachedParentImportResults: ParentDirectoryCache; + + constructor(readonly serviceProvider: ServiceProvider, private _configOptions: ConfigOptions, readonly host: Host) { + this.cachedParentImportResults = new ParentDirectoryCache(() => this.getPythonSearchPaths()); + } + + get fileSystem() { + return this.serviceProvider.fs(); + } + + get tmp() { + return this.serviceProvider.tmp(); + } + + get partialStubs() { + return this.serviceProvider.partialStubs(); + } + + static isSupportedImportSourceFile(uri: Uri) { + const fileExtension = uri.lastExtension.toLowerCase(); + return supportedSourceFileExtensions.some((ext) => fileExtension === ext); + } + + static isSupportedImportFile(uri: Uri) { + const fileExtension = uri.lastExtension.toLowerCase(); + return supportedFileExtensions.some((ext) => fileExtension === ext); + } + + invalidateCache() { + this._cachedImportResults = new Map(); + this._cachedModuleNameResults = new Map>(); + this.cachedParentImportResults.reset(); + this._stdlibModules = undefined; + + this._invalidateFileSystemCache(); + + this.partialStubs?.clearPartialStubs(); + } + + // Resolves the import and returns the path if it exists, otherwise + // returns undefined. + resolveImport( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor + ): ImportResult { + // Wrap internal call to resolveImportInternal() to prevent calling any + // child class version of resolveImport(). + return this.resolveImportInternal(sourceFileUri, execEnv, moduleDescriptor); + } + + getCompletionSuggestions( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor + ) { + const suggestions = this._getCompletionSuggestionsStrict(sourceFileUri, execEnv, moduleDescriptor); + + // We only do parent import resolution for absolute path. + if (moduleDescriptor.leadingDots > 0) { + return suggestions; + } + + const root = getParentImportResolutionRoot(sourceFileUri, execEnv.root); + const origin = sourceFileUri.getDirectory(); + + let current: Uri | undefined = origin; + while (this._shouldWalkUp(current, root, execEnv) && current) { + this._getCompletionSuggestionsAbsolute( + sourceFileUri, + execEnv, + current, + moduleDescriptor, + suggestions, + /* strictOnly */ false + ); + + current = this._tryWalkUp(current); + } + + return suggestions; + } + + getConfigOptions() { + return this._configOptions; + } + + setConfigOptions(configOptions: ConfigOptions): void { + this._configOptions = configOptions; + this.invalidateCache(); + } + + // Returns the implementation file(s) for the given stub file. + getSourceFilesFromStub(stubFileUri: Uri, execEnv: ExecutionEnvironment, _mapCompiled: boolean): Uri[] { + const sourceFileUris: Uri[] = []; + + // When ImportResolver resolves an import to a stub file, a second resolve is done + // ignoring stub files, which gives us an approximation of where the implementation + // for that stub is located. + this._cachedImportResults.forEach((map) => { + map.forEach((result) => { + if (result.isStubFile && result.isImportFound && result.nonStubImportResult) { + if (result.resolvedUris[result.resolvedUris.length - 1].equals(stubFileUri)) { + if (result.nonStubImportResult.isImportFound) { + const nonEmptyUri = + result.nonStubImportResult.resolvedUris[ + result.nonStubImportResult.resolvedUris.length - 1 + ]; + + if (nonEmptyUri.hasExtension('.py') || nonEmptyUri.hasExtension('.pyi')) { + // We allow pyi in case there are multiple pyi for a compiled module such as + // numpy.random.mtrand + sourceFileUris.push(nonEmptyUri); + } + } + } + } + }); + }); + + // We haven't seen an import of that stub, attempt to find the source + // in some other ways. + if (sourceFileUris.length === 0) { + // Simple case where the stub and source files are next to each other. + const sourceFileUri = stubFileUri.replaceExtension('.py'); + if (this.dirExistsCached(sourceFileUri)) { + sourceFileUris.push(sourceFileUri); + } + } + + if (sourceFileUris.length === 0) { + // The stub and the source file may have the same name, but be located + // in different folder hierarchies. + // Example: + // \package\module.pyi + // \package\module.py + // We get the relative path(s) of the stub to its import root(s), + // in theory there can be more than one, then look for source + // files in all the import roots using the same relative path(s). + const importRoots = this.getImportRoots(execEnv); + + const relativeStubPaths: string[] = []; + for (const importRootUri of importRoots) { + if (stubFileUri.isChild(importRootUri)) { + const parts = Array.from(importRootUri.getRelativePathComponents(stubFileUri)); + + if (parts.length >= 1) { + // Handle the case where the symbol was resolved to a stubs package + // rather than the real package. We'll strip off the "-stubs" suffix + // in this case. + if (parts[0].endsWith(stubsSuffix)) { + parts[0] = parts[0].slice(0, parts[0].length - stubsSuffix.length); + } + + relativeStubPaths.push(parts.join('/')); + } + } + } + + for (const relativeStubPath of relativeStubPaths) { + for (const importRootUri of importRoots) { + const absoluteStubPath = importRootUri.resolvePaths(relativeStubPath); + let absoluteSourcePath = absoluteStubPath.replaceExtension('.py'); + if (this.fileExistsCached(absoluteSourcePath)) { + sourceFileUris.push(absoluteSourcePath); + } else { + const filePathWithoutExtension = absoluteSourcePath.stripExtension(); + + if (filePathWithoutExtension.pathEndsWith('__init__')) { + // Did not match: /package/__init__.py + // Try equivalent: /package.py + absoluteSourcePath = filePathWithoutExtension.getDirectory().packageUri; + if (this.fileExistsCached(absoluteSourcePath)) { + sourceFileUris.push(absoluteSourcePath); + } + } else { + // Did not match: /package.py + // Try equivalent: /package/__init__.py + absoluteSourcePath = filePathWithoutExtension.initPyUri; + if (this.fileExistsCached(absoluteSourcePath)) { + sourceFileUris.push(absoluteSourcePath); + } + } + } + } + } + } + + return sourceFileUris; + } + + // Returns the module name (of the form X.Y.Z) that needs to be imported + // from the current context to access the module with the specified file path. + // In a sense, it's performing the inverse of resolveImport. + getModuleNameForImport( + fileUri: Uri, + execEnv: ExecutionEnvironment, + allowInvalidModuleName = false, + detectPyTyped = false + ) { + // Cache results of the reverse of resolveImport as we cache resolveImport. + const cache = getOrAdd( + this._cachedModuleNameResults, + execEnv.root?.key, + () => new Map() + ); + const key = `${allowInvalidModuleName}.${detectPyTyped}.${fileUri.key}`; + return getOrAdd(cache, key, () => + this._getModuleNameForImport(fileUri, execEnv, allowInvalidModuleName, detectPyTyped) + ); + } + + getTypeshedStdLibPath(execEnv: ExecutionEnvironment) { + return this._getStdlibTypeshedPath( + this._configOptions.typeshedPath, + execEnv.pythonVersion, + execEnv.pythonPlatform, + /* logger */ undefined + ); + } + + getTypeshedThirdPartyPath(execEnv: ExecutionEnvironment) { + return this._getThirdPartyTypeshedPath(this._configOptions.typeshedPath); + } + + isStdlibModule(module: ImportedModuleDescriptor, execEnv: ExecutionEnvironment): boolean { + if (!this._stdlibModules) { + this._stdlibModules = this._buildStdlibCache(this.getTypeshedStdLibPath(execEnv), execEnv); + } + + return this._stdlibModules.has(module.nameParts.join('.')); + } + + getImportRoots(execEnv: ExecutionEnvironment, forLogging = false) { + const roots = []; + + const stdTypeshed = this._getStdlibTypeshedPath( + this._configOptions.typeshedPath, + execEnv.pythonVersion, + execEnv.pythonPlatform + ); + if (stdTypeshed) { + roots.push(stdTypeshed); + } + + // The "default" workspace has a root-less execution environment; ignore it. + if (execEnv.root) { + roots.push(execEnv.root); + } + + appendArray(roots, execEnv.extraPaths); + + if (this._configOptions.stubPath) { + roots.push(this._configOptions.stubPath); + } + + if (forLogging) { + // There's one path for each third party package, which blows up logging. + // Just get the root directly and show it with `...` to indicate that this + // is where the third party folder is in the roots. + const thirdPartyRoot = this._getThirdPartyTypeshedPath(this._configOptions.typeshedPath); + if (thirdPartyRoot) { + roots.push(thirdPartyRoot.resolvePaths('...')); + } + } else { + const thirdPartyPaths = this._getThirdPartyTypeshedPackageRoots(); + appendArray(roots, thirdPartyPaths); + } + + const typeshedPathEx = this.getTypeshedPathEx(execEnv); + if (typeshedPathEx) { + roots.push(typeshedPathEx); + } + + const pythonSearchPaths = this.getPythonSearchPaths(); + if (pythonSearchPaths.length > 0) { + appendArray(roots, pythonSearchPaths); + } + + return roots; + } + + ensurePartialStubPackages(execEnv: ExecutionEnvironment) { + if (!this.partialStubs) { + return false; + } + + if (this.partialStubs.isPartialStubPackagesScanned(execEnv)) { + return false; + } + + const ps = this.partialStubs; + const paths: Uri[] = []; + const typeshedPathEx = this.getTypeshedPathEx(execEnv); + + // Add paths to search stub packages. + addPaths(this._configOptions.stubPath); + addPaths(execEnv.root ?? this._configOptions.projectRoot); + execEnv.extraPaths.forEach((p) => addPaths(p)); + addPaths(typeshedPathEx); + + this.getPythonSearchPaths().forEach((p) => addPaths(p)); + + this.partialStubs.processPartialStubPackages(paths, this.getImportRoots(execEnv), typeshedPathEx); + this._invalidateFileSystemCache(); + return true; + + function addPaths(path?: Uri) { + if (!path || ps.isPathScanned(path)) { + return; + } + + paths.push(path); + } + } + + getPythonSearchPaths(importLogger?: ImportLogger): Uri[] { + // Find the site packages for the configured virtual environment. + if (!this._cachedPythonSearchPaths) { + const paths = ( + PythonPathUtils.findPythonSearchPaths(this.fileSystem, this._configOptions, this.host, importLogger) || + [] + ).map((p) => this.fileSystem.realCasePath(p)); + + // Remove duplicates (yes, it happens). + this._cachedPythonSearchPaths = { paths: Array.from(new Set(paths)), failureInfo: importLogger }; + } + + return this._cachedPythonSearchPaths.paths; + } + + getTypeshedStdlibExcludeList( + customTypeshedPath: Uri | undefined, + pythonVersion: PythonVersion, + pythonPlatform: string | undefined + ): Uri[] { + const typeshedStdlibPath = this._getStdlibTypeshedPath( + customTypeshedPath, + pythonVersion, + pythonPlatform, + /* importLogger */ undefined + ); + const excludes: Uri[] = []; + + if (!typeshedStdlibPath) { + return excludes; + } + + if (!this._cachedTypeshedStdLibModuleVersionInfo) { + this._cachedTypeshedStdLibModuleVersionInfo = this._readTypeshedStdLibVersions(customTypeshedPath); + } + + this._cachedTypeshedStdLibModuleVersionInfo.forEach((versionInfo, moduleName) => { + let shouldExcludeModule = false; + + if (versionInfo.max !== undefined && PythonVersion.isGreaterThan(pythonVersion, versionInfo.max)) { + shouldExcludeModule = true; + } + + if (pythonPlatform !== undefined) { + const pythonPlatformLower = pythonPlatform.toLowerCase(); + + // If there are supported platforms listed, and we are not using one + // of those supported platforms, exclude it. + if (versionInfo.supportedPlatforms) { + if (versionInfo.supportedPlatforms.every((p) => p.toLowerCase() !== pythonPlatformLower)) { + shouldExcludeModule = true; + } + } + + // If there are unsupported platforms listed, see if we're using one of them. + if (versionInfo.unsupportedPlatforms) { + if (versionInfo.unsupportedPlatforms.some((p) => p.toLowerCase() === pythonPlatformLower)) { + shouldExcludeModule = true; + } + } + } + + if (shouldExcludeModule) { + // Add excludes for both the ".pyi" file and the directory that contains it + // (in case it's using a "__init__.pyi" file). + const moduleDirPath = typeshedStdlibPath.combinePaths(...moduleName.split('.')); + excludes.push(moduleDirPath); + + const moduleFilePath = moduleDirPath.replaceExtension('.pyi'); + excludes.push(moduleFilePath); + } + }); + + return excludes; + } + + // Intended to be overridden by subclasses to provide additional stub + // path capabilities. Return undefined if no extra stub path were found. + getTypeshedPathEx(execEnv: ExecutionEnvironment, importLogger?: ImportLogger): Uri | undefined { + return undefined; + } + + protected readdirEntriesCached(uri: Uri): CachedDir { + const cachedValue = this._cachedEntriesForPath.get(uri.key); + if (cachedValue) { + return cachedValue; + } + + const newCachedDir: CachedDir = { + entries: new Map(), + resolvableNames: new Set(), + }; + try { + const entries = this.fileSystem.readdirEntriesSync(uri); + entries.forEach((entry) => { + newCachedDir.entries.set(entry.name, entry); + let isFile = entry.isFile(); + let isDirectory = entry.isDirectory(); + if (entry.isSymbolicLink()) { + const stat = tryStat(this.fileSystem, uri.combinePaths(entry.name)); + isFile = !!stat?.isFile(); + isDirectory = !!stat?.isDirectory(); + } + const resolvableName = isFile + ? stripFileExtension(entry.name, /* multiDotExtension */ true) + : entry.name; + newCachedDir.resolvableNames.add(resolvableName); + + if (isDirectory && entry.name.endsWith(stubsSuffix)) { + newCachedDir.resolvableNames.add( + resolvableName.substring(0, resolvableName.length - stubsSuffix.length) + ); + } + }); + } catch { + // Swallow error + } + + // Populate cache. + this._cachedEntriesForPath.set(uri.key, newCachedDir); + return newCachedDir; + } + + // Resolves the import and returns the path if it exists, otherwise + // returns undefined. + protected resolveImportInternal( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor + ): ImportResult { + const importName = formatImportName(moduleDescriptor); + const importResult = this._resolveImportStrict(importName, sourceFileUri, execEnv, moduleDescriptor); + + if (importResult.isImportFound || moduleDescriptor.leadingDots > 0) { + return importResult; + } + + // If the import is absolute and no other method works, try resolving the + // absolute in the importing file's directory, then the parent directory, + // and so on, until the import root is reached. + const origin = sourceFileUri.getDirectory(); + + const result = this.cachedParentImportResults.getImportResult(origin, importName, importResult); + if (result) { + // Already ran the parent directory resolution for this import name on this location. + return this.filterImplicitImports(result, moduleDescriptor.importedSymbols); + } + + // Check whether the given file is in the parent directory import resolution cache. + const root = getParentImportResolutionRoot(sourceFileUri, execEnv.root); + if (!this.cachedParentImportResults.checkValidPath(this.fileSystem, sourceFileUri, root)) { + return importResult; + } + + const importLogger = this._configOptions.verboseOutput ? new ImportLogger() : undefined; + importLogger?.log(`Attempting to resolve using local imports: ${importName}`); + + const importPath: ImportPath = { importPath: undefined }; + + // Going up the given folder one by one until we can resolve the import. + let current: Uri | undefined = origin; + while (this._shouldWalkUp(current, root, execEnv) && current) { + const result = this.resolveAbsoluteImport( + sourceFileUri, + current, + execEnv, + moduleDescriptor, + importName, + importLogger, + /* allowPartial */ undefined, + /* allowNativeLib */ undefined, + /* useStubPackage */ false, + /* allowPyi */ true + ); + + this.cachedParentImportResults.checked(current!, importName, importPath); + + if (result?.isImportFound) { + // This will make cache to point to actual path that contains the module we found + importPath.importPath = current; + + this.cachedParentImportResults.add({ + importResult: result, + path: current, + importName, + }); + + return this.filterImplicitImports(result, moduleDescriptor.importedSymbols); + } + + current = this._tryWalkUp(current); + } + + if (current) { + this.cachedParentImportResults.checked(current, importName, importPath); + } + + if (importLogger) { + const console = this.serviceProvider.console(); + importLogger.getLogs().forEach((diag) => console.log(diag)); + } + + return importResult; + } + + protected fileExistsCached(uri: Uri): boolean { + const directory = uri.getDirectory(); + if (directory.equals(uri)) { + // Started at root, so this can't be a file. + return false; + } + const cachedDir = this.readdirEntriesCached(directory); + const entry = cachedDir.entries.get(uri.fileName); + if (entry?.isFile()) { + return true; + } + + if (entry?.isSymbolicLink()) { + const realPath = tryRealpath(this.fileSystem, uri); + if (realPath && this.fileSystem.existsSync(realPath) && isFile(this.fileSystem, realPath)) { + return true; + } + } + + return false; + } + + protected dirExistsCached(uri: Uri): boolean { + const parent = uri.getDirectory(); + if (parent.equals(uri)) { + // Started at root. No entries to read, so have to check ourselves. + let cachedExistence = this._cachedDirExistenceForRoot.get(uri.key); + // Check if the value was in the cache or not. Undefined means it wasn't. + if (cachedExistence === undefined) { + cachedExistence = tryStat(this.fileSystem, uri)?.isDirectory() ?? false; + this._cachedDirExistenceForRoot.set(uri.key, cachedExistence); + } + return cachedExistence; + } + + // Otherwise not a root, so read the entries we have cached to see if + // the directory exists or not. + const cachedDir = this.readdirEntriesCached(parent); + const entry = cachedDir.entries.get(uri.fileName); + if (entry?.isDirectory()) { + return true; + } + + if (entry?.isSymbolicLink()) { + const realPath = tryRealpath(this.fileSystem, uri); + if (realPath && this.fileSystem.existsSync(realPath) && isDirectory(this.fileSystem, realPath)) { + return true; + } + } + + return false; + } + + protected addResultsToCache( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + importName: string, + importResult: ImportResult, + moduleDescriptor: ImportedModuleDescriptor | undefined, + fromUserFile: boolean + ) { + // If the import is relative, include the source file path in the key. + const relativeSourceFileUri = moduleDescriptor && moduleDescriptor.leadingDots > 0 ? sourceFileUri : undefined; + + getOrAdd(this._cachedImportResults, execEnv.root?.key, () => new Map()).set( + this._getImportCacheKey(relativeSourceFileUri, importName, fromUserFile), + importResult + ); + + return this.filterImplicitImports(importResult, moduleDescriptor?.importedSymbols); + } + + // Follows import resolution algorithm defined in PEP-420: + // https://www.python.org/dev/peps/pep-0420/ + protected resolveAbsoluteImport( + sourceFileUri: Uri | undefined, + rootPath: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + importName: string, + importLogger: ImportLogger | undefined, + allowPartial = false, + allowNativeLib = false, + useStubPackage = false, + allowPyi = true, + lookForPyTyped = false + ): ImportResult | undefined { + // Before we do additional work, see if this directory can possibly + // resolve this import. + if (!this._isPossibleImportDir(rootPath, moduleDescriptor)) { + return undefined; + } + + if (allowPyi && useStubPackage) { + // Look for packaged stubs first. PEP 561 indicates that package authors can ship + // their stubs separately from their package implementation by appending the string + // '-stubs' to its top - level directory name. We'll look there first. + const importResult = this._resolveAbsoluteImport( + rootPath, + execEnv, + moduleDescriptor, + importName, + importLogger, + allowPartial, + /* allowNativeLib */ false, + /* useStubPackage */ true, + /* allowPyi */ true, + /* lookForPyTyped */ true + ); + + // We found fully typed stub packages. + if (importResult.packageDirectory) { + // If this is a namespace package that wasn't resolved, assume that + // it's a partial stub package and continue looking for a real package. + if (!importResult.isNamespacePackage || importResult.isImportFound) { + return importResult; + } + } + } + + return this._resolveAbsoluteImport( + rootPath, + execEnv, + moduleDescriptor, + importName, + importLogger, + allowPartial, + allowNativeLib, + /* useStubPackage */ false, + allowPyi, + lookForPyTyped + ); + } + + // Intended to be overridden by subclasses to provide additional stub + // resolving capabilities. Return undefined if no stubs were found for + // this import. + protected resolveImportEx( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + importName: string, + importLogger?: ImportLogger, + allowPyi = true + ): ImportResult | undefined { + return undefined; + } + + // Intended to be overridden by subclasses to provide additional stub + // resolving capabilities for native (compiled) modules. Returns undefined + // if no stubs were found for this import. + protected resolveNativeImportEx( + libraryFileUri: Uri, + importName: string, + importLogger?: ImportLogger + ): Uri | undefined { + return undefined; + } + + protected getNativeModuleName(uri: Uri): string | undefined { + const fileExtension = uri.lastExtension.toLowerCase(); + if (_isNativeModuleFileExtension(fileExtension)) { + return stripFileExtension(uri.fileName, /* multiDotExtension */ true); + } + return undefined; + } + + // Potentially modifies the ImportResult by removing some or all of the + // implicit import entries. Only the imported symbols should be included. + protected filterImplicitImports( + importResult: ImportResult, + importedSymbols: Set | undefined + ): ImportResult { + if (importedSymbols === undefined) { + const newImportResult = Object.assign({}, importResult); + newImportResult.filteredImplicitImports = undefined; + return newImportResult; + } + + if (importedSymbols === undefined || importedSymbols.size === 0) { + return importResult; + } + + if (importResult.implicitImports === undefined || importResult.implicitImports.size === 0) { + return importResult; + } + + const filteredImplicitImports = new Map(); + importResult.implicitImports.forEach((implicitImport) => { + if (importedSymbols.has(implicitImport.name)) { + filteredImplicitImports.set(implicitImport.name, implicitImport); + } + }); + + if (filteredImplicitImports.size === importResult.implicitImports.size) { + return importResult; + } + + const newImportResult = Object.assign({}, importResult); + newImportResult.filteredImplicitImports = filteredImplicitImports; + return newImportResult; + } + + protected findImplicitImports( + importingModuleName: string, + dirPath: Uri, + exclusions: Uri[] + ): Map | undefined { + const implicitImportMap = new Map(); + + // Enumerate all of the files and directories in the path, expanding links. + const entries = getFileSystemEntriesFromDirEntries( + this.readdirEntriesCached(dirPath).entries.values(), + this.fileSystem, + dirPath + ); + + // Add implicit file-based modules. + for (const filePath of entries.files) { + const fileExt = filePath.lastExtension; + let strippedFileName: string; + let isNativeLib = false; + + if (fileExt === '.py' || fileExt === '.pyi') { + strippedFileName = stripFileExtension(filePath.fileName); + } else if ( + _isNativeModuleFileExtension(fileExt) && + !this.fileExistsCached(filePath.packageUri) && + !this.fileExistsCached(filePath.packageStubUri) + ) { + // Native module. + strippedFileName = filePath.stripAllExtensions().fileName; + isNativeLib = true; + } else { + continue; + } + + if (!exclusions.find((exclusion) => exclusion.equals(filePath))) { + const implicitImport: ImplicitImport = { + isStubFile: filePath.hasExtension('.pyi'), + isNativeLib, + name: strippedFileName, + uri: filePath, + }; + + // Always prefer stub files over non-stub files. + const entry = implicitImportMap.get(implicitImport.name); + if (!entry || !entry.isStubFile) { + // Try resolving resolving native lib to a custom stub. + if (isNativeLib) { + const nativeLibPath = filePath; + const nativeStubPath = this.resolveNativeImportEx( + nativeLibPath, + `${importingModuleName}.${strippedFileName}` + ); + if (nativeStubPath) { + implicitImport.uri = nativeStubPath; + implicitImport.isNativeLib = false; + } + } + implicitImportMap.set(implicitImport.name, implicitImport); + } + } + } + + // Add implicit directory-based modules. + for (const dirPath of entries.directories) { + const pyFilePath = dirPath.initPyUri; + const pyiFilePath = dirPath.initPyiUri; + let isStubFile = false; + let path: Uri | undefined; + + if (this.fileExistsCached(pyiFilePath)) { + isStubFile = true; + path = pyiFilePath; + } else if (this.fileExistsCached(pyFilePath)) { + path = pyFilePath; + } + + if (path) { + if (!exclusions.find((exclusion) => exclusion.equals(path))) { + const implicitImport: ImplicitImport = { + isStubFile, + isNativeLib: false, + name: dirPath.fileName, + uri: path, + pyTypedInfo: this._getPyTypedInfo(dirPath), + }; + + implicitImportMap.set(implicitImport.name, implicitImport); + } + } + } + + return implicitImportMap.size > 0 ? implicitImportMap : undefined; + } + + private _isPossibleImportDir(rootPath: Uri, moduleDescriptor: ImportedModuleDescriptor): boolean { + const cachedDir = this.readdirEntriesCached(rootPath); + + const isPotentialMatch = (name: string): boolean => { + return cachedDir.resolvableNames.has(name); + }; + + if (moduleDescriptor.nameParts.length > 0) { + return isPotentialMatch(moduleDescriptor.nameParts[0]); + } + + if (moduleDescriptor.importedSymbols) { + for (const key of moduleDescriptor.importedSymbols) { + if (isPotentialMatch(key)) { + return true; + } + } + } + + return isPotentialMatch('__init__'); + } + + private _resolveImportStrict( + importName: string, + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + importLogger?: ImportLogger + ) { + const fromUserFile = matchFileSpecs(this._configOptions, sourceFileUri); + const notFoundResult: ImportResult = { + importName, + isRelative: false, + isImportFound: false, + isPartlyResolved: false, + isNamespacePackage: false, + isInitFilePresent: false, + isStubPackage: false, + importFailureInfo: importLogger?.getLogs(), + resolvedUris: [], + importType: ImportType.Local, + isStubFile: false, + isNativeLib: false, + implicitImports: undefined, + filteredImplicitImports: undefined, + nonStubImportResult: undefined, + }; + + this.ensurePartialStubPackages(execEnv); + + // Is it a relative import? + if (moduleDescriptor.leadingDots > 0) { + const cachedResults = this._lookUpResultsInCache( + sourceFileUri, + execEnv, + importName, + moduleDescriptor, + fromUserFile + ); + + if (cachedResults) { + return cachedResults; + } + + const relativeImport = this._resolveRelativeImport( + sourceFileUri, + execEnv, + moduleDescriptor, + importName, + importLogger + ); + + if (relativeImport) { + relativeImport.isRelative = true; + + return this.addResultsToCache( + sourceFileUri, + execEnv, + importName, + relativeImport, + moduleDescriptor, + fromUserFile + ); + } + } else { + const cachedResults = this._lookUpResultsInCache( + sourceFileUri, + execEnv, + importName, + moduleDescriptor, + fromUserFile + ); + + if (cachedResults) { + // In most cases, we can simply return a cached entry. However, there are cases + // where the cached entry refers to a previously-resolved namespace package + // that does not resolve the symbols specified in the module descriptor. + // In this case, we will ignore the cached value and run the full import + // resolution again to try to find a package that resolves the import. + const isUnresolvedNamespace = + cachedResults.isImportFound && + cachedResults.isNamespacePackage && + !this._isNamespacePackageResolved(moduleDescriptor, cachedResults.implicitImports); + + if (!isUnresolvedNamespace) { + return cachedResults; + } + } + + const bestImport = this._resolveBestAbsoluteImport( + sourceFileUri, + execEnv, + moduleDescriptor, + /* allowPyi */ true + ); + + if (bestImport) { + if (bestImport.isStubFile) { + bestImport.nonStubImportResult = + this._resolveBestAbsoluteImport( + sourceFileUri, + execEnv, + moduleDescriptor, + /* allowPyi */ false + ) || notFoundResult; + } + + return this.addResultsToCache( + sourceFileUri, + execEnv, + importName, + bestImport, + moduleDescriptor, + fromUserFile + ); + } + } + + return this.addResultsToCache( + sourceFileUri, + execEnv, + importName, + notFoundResult, + /* moduleDescriptor */ undefined, + fromUserFile + ); + } + + private _getCompletionSuggestionsStrict( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor + ): Map { + const suggestions = new Map(); + + // Is it a relative import? + if (moduleDescriptor.leadingDots > 0) { + this._getCompletionSuggestionsRelative(sourceFileUri, execEnv, moduleDescriptor, suggestions); + } else { + // First check for a typeshed file. + if (moduleDescriptor.nameParts.length > 0) { + this._getCompletionSuggestionsTypeshedPath(sourceFileUri, execEnv, moduleDescriptor, true, suggestions); + } + + // Look for it in the root directory of the execution environment. + if (execEnv.root) { + this._getCompletionSuggestionsAbsolute( + sourceFileUri, + execEnv, + execEnv.root, + moduleDescriptor, + suggestions + ); + } + + for (const extraPath of execEnv.extraPaths) { + this._getCompletionSuggestionsAbsolute( + sourceFileUri, + execEnv, + extraPath, + moduleDescriptor, + suggestions + ); + } + + // Check for a typings file. + if (this._configOptions.stubPath) { + this._getCompletionSuggestionsAbsolute( + sourceFileUri, + execEnv, + this._configOptions.stubPath, + moduleDescriptor, + suggestions + ); + } + + // Check for a typeshed file. + this._getCompletionSuggestionsTypeshedPath(sourceFileUri, execEnv, moduleDescriptor, false, suggestions); + + // Look for the import in the list of third-party packages. + const pythonSearchPaths = this.getPythonSearchPaths(); + for (const searchPath of pythonSearchPaths) { + this._getCompletionSuggestionsAbsolute( + sourceFileUri, + execEnv, + searchPath, + moduleDescriptor, + suggestions + ); + } + } + + return suggestions; + } + + private _getModuleNameForImport( + fileUri: Uri, + execEnv: ExecutionEnvironment, + allowInvalidModuleName: boolean, + detectPyTyped: boolean + ): ModuleImportInfo { + let moduleName: string | undefined; + let importType = ImportType.BuiltIn; + let isLocalTypingsFile = false; + let isThirdPartyPyTypedPresent = false; + let isTypeshedFile = false; + + // If we cannot find a fully-qualified module name with legal characters, + // look for one with invalid characters (e.g. "-"). This is important to + // differentiate between different modules in a project in case they + // declare types with the same (local) name. + let moduleNameWithInvalidCharacters: string | undefined; + + // Is this a stdlib typeshed path? + const stdLibTypeshedPath = this._getStdlibTypeshedPath( + this._configOptions.typeshedPath, + execEnv.pythonVersion, + execEnv.pythonPlatform + ); + + if (stdLibTypeshedPath) { + moduleName = getModuleNameFromPath(stdLibTypeshedPath, fileUri); + if (moduleName) { + const moduleDescriptor: ImportedModuleDescriptor = { + leadingDots: 0, + nameParts: moduleName.split('.'), + importedSymbols: undefined, + }; + + if ( + this._isStdlibTypeshedStubValidForVersion( + moduleDescriptor, + this._configOptions.typeshedPath, + execEnv.pythonVersion, + execEnv.pythonPlatform + ) + ) { + return { + moduleName, + importType, + isTypeshedFile: true, + isLocalTypingsFile, + isThirdPartyPyTypedPresent, + }; + } + } + } + + // Look for it in the root directory of the execution environment. + if (execEnv.root) { + const candidateModuleNameInfo = _getModuleNameInfoFromPath(execEnv.root, fileUri); + + if (candidateModuleNameInfo) { + if (candidateModuleNameInfo.containsInvalidCharacters) { + moduleNameWithInvalidCharacters = candidateModuleNameInfo.moduleName; + } else { + moduleName = candidateModuleNameInfo.moduleName; + } + } + + importType = ImportType.Local; + } + + for (const extraPath of execEnv.extraPaths) { + const candidateModuleNameInfo = _getModuleNameInfoFromPath(extraPath, fileUri); + + if (candidateModuleNameInfo) { + if (candidateModuleNameInfo.containsInvalidCharacters) { + moduleNameWithInvalidCharacters = candidateModuleNameInfo.moduleName; + } else { + // Does this candidate look better than the previous best module name? + // We'll always try to use the shortest version. + const candidateModuleName = candidateModuleNameInfo.moduleName; + if (!moduleName || (candidateModuleName && candidateModuleName.length < moduleName.length)) { + moduleName = candidateModuleName; + importType = ImportType.Local; + } + } + } + } + + // Check for a typings file. + if (this._configOptions.stubPath) { + const candidateModuleNameInfo = _getModuleNameInfoFromPath(this._configOptions.stubPath, fileUri); + + if (candidateModuleNameInfo) { + if (candidateModuleNameInfo.containsInvalidCharacters) { + moduleNameWithInvalidCharacters = candidateModuleNameInfo.moduleName; + } else { + // Does this candidate look better than the previous best module name? + // We'll always try to use the shortest version. + const candidateModuleName = candidateModuleNameInfo.moduleName; + if (!moduleName || (candidateModuleName && candidateModuleName.length < moduleName.length)) { + moduleName = candidateModuleName; + + // Treat the typings path as a local import so errors are reported for it. + importType = ImportType.Local; + isLocalTypingsFile = true; + } + } + } + } + + // Check for a typeshed file. + const thirdPartyTypeshedPath = this._getThirdPartyTypeshedPath(this._configOptions.typeshedPath); + + if (thirdPartyTypeshedPath) { + const candidateModuleName = getModuleNameFromPath( + thirdPartyTypeshedPath, + fileUri, + /* stripTopContainerDir */ true + ); + + // Does this candidate look better than the previous best module name? + // We'll always try to use the shortest version. + if (!moduleName || (candidateModuleName && candidateModuleName.length < moduleName.length)) { + moduleName = candidateModuleName; + importType = ImportType.ThirdParty; + isTypeshedFile = true; + } + } + + const thirdPartyTypeshedPathEx = this.getTypeshedPathEx(execEnv); + if (thirdPartyTypeshedPathEx) { + const candidateModuleName = getModuleNameFromPath(thirdPartyTypeshedPathEx, fileUri); + + // Does this candidate look better than the previous best module name? + // We'll always try to use the shortest version. + if (!moduleName || (candidateModuleName && candidateModuleName.length < moduleName.length)) { + moduleName = candidateModuleName; + importType = ImportType.ThirdParty; + isTypeshedFile = true; + } + } + + // Look for the import in the list of third-party packages. + const pythonSearchPaths = this.getPythonSearchPaths(); + + for (const searchPath of pythonSearchPaths) { + const candidateModuleNameInfo = _getModuleNameInfoFromPath(searchPath, fileUri); + + if (candidateModuleNameInfo) { + if (candidateModuleNameInfo.containsInvalidCharacters) { + moduleNameWithInvalidCharacters = candidateModuleNameInfo.moduleName; + } else { + // Does this candidate look better than the previous best module name? + // We'll always try to use the shortest version. + const candidateModuleName = candidateModuleNameInfo.moduleName; + if (!moduleName || (candidateModuleName && candidateModuleName.length < moduleName.length)) { + moduleName = candidateModuleName; + importType = ImportType.ThirdParty; + isTypeshedFile = false; + } + } + } + } + + if (detectPyTyped && importType === ImportType.ThirdParty) { + const root = getParentImportResolutionRoot(fileUri, execEnv.root); + + // Go up directories one by one looking for a py.typed file. + let current: Uri | undefined = fileUri.getDirectory(); + while (this._shouldWalkUp(current, root, execEnv)) { + const pyTypedInfo = this._getPyTypedInfo(current!); + if (pyTypedInfo) { + if (!pyTypedInfo.isPartiallyTyped) { + isThirdPartyPyTypedPresent = true; + } + break; + } + + current = this._tryWalkUp(current); + } + } + + if (moduleName) { + return { moduleName, importType, isTypeshedFile, isLocalTypingsFile, isThirdPartyPyTypedPresent }; + } + + if (allowInvalidModuleName && moduleNameWithInvalidCharacters) { + return { + moduleName: moduleNameWithInvalidCharacters, + isTypeshedFile, + importType, + isLocalTypingsFile, + isThirdPartyPyTypedPresent, + }; + } + + // We didn't find any module name. + return { + moduleName: '', + isTypeshedFile, + importType: ImportType.Local, + isLocalTypingsFile, + isThirdPartyPyTypedPresent, + }; + } + + private _invalidateFileSystemCache() { + this._cachedEntriesForPath.clear(); + this._cachedFilesForPath.clear(); + this._cachedDirExistenceForRoot.clear(); + } + + private _resolveAbsoluteImport( + rootPath: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + importName: string, + importLogger: ImportLogger | undefined, + allowPartial: boolean, + allowNativeLib: boolean, + useStubPackage: boolean, + allowPyi: boolean, + lookForPyTyped: boolean + ): ImportResult { + if (useStubPackage) { + importLogger?.log(`Attempting to resolve stub package using root path '${rootPath}'`); + } else { + importLogger?.log(`Attempting to resolve using root path '${rootPath}'`); + } + + // Starting at the specified path, walk the file system to find the + // specified module. + const resolvedPaths: Uri[] = []; + let dirPath = rootPath; + let isNamespacePackage = false; + let isInitFilePresent = false; + let isStubPackage = false; + let isStubFile = false; + let isNativeLib = false; + let implicitImports: Map | undefined; + let packageDirectory: Uri | undefined; + let pyTypedInfo: PyTypedInfo | undefined; + + // Handle the "from . import XXX" case. + if (moduleDescriptor.nameParts.length === 0) { + const pyFilePath = dirPath.initPyUri; + const pyiFilePath = dirPath.initPyiUri; + + if (allowPyi && this.fileExistsCached(pyiFilePath)) { + importLogger?.log(`Resolved import with file '${pyiFilePath}'`); + resolvedPaths.push(pyiFilePath); + isStubFile = true; + } else if (this.fileExistsCached(pyFilePath)) { + importLogger?.log(`Resolved import with file '${pyFilePath}'`); + resolvedPaths.push(pyFilePath); + } else { + importLogger?.log(`Partially resolved import with directory '${dirPath}'`); + resolvedPaths.push(Uri.empty()); + isNamespacePackage = true; + } + + implicitImports = this.findImplicitImports(importName, dirPath, [pyFilePath, pyiFilePath]); + } else { + for (let i = 0; i < moduleDescriptor.nameParts.length; i++) { + const isFirstPart = i === 0; + const isLastPart = i === moduleDescriptor.nameParts.length - 1; + dirPath = dirPath.combinePaths(moduleDescriptor.nameParts[i]); + + if (useStubPackage && isFirstPart) { + dirPath = dirPath.addPath(stubsSuffix); + isStubPackage = true; + } + + const foundDirectory = this.dirExistsCached(dirPath); + + if (foundDirectory) { + if (isFirstPart) { + packageDirectory = dirPath; + } + + // See if we can find an __init__.py[i] in this directory. + const pyFilePath = dirPath.initPyUri; + const pyiFilePath = dirPath.initPyiUri; + isInitFilePresent = false; + + if (allowPyi && this.fileExistsCached(pyiFilePath)) { + importLogger?.log(`Resolved import with file '${pyiFilePath}'`); + resolvedPaths.push(pyiFilePath); + if (isLastPart) { + isStubFile = true; + } + isInitFilePresent = true; + } else if (this.fileExistsCached(pyFilePath)) { + importLogger?.log(`Resolved import with file '${pyFilePath}'`); + resolvedPaths.push(pyFilePath); + isInitFilePresent = true; + } + + if (!pyTypedInfo && lookForPyTyped) { + pyTypedInfo = this._getPyTypedInfo(dirPath); + } + + if (isInitFilePresent) { + if (!isLastPart) { + // We are not at the last part, and we found a directory, + // so continue to look for the next part. + continue; + } + + implicitImports = this.findImplicitImports(moduleDescriptor.nameParts.join('.'), dirPath, [ + pyFilePath, + pyiFilePath, + ]); + break; + } + } + + // We weren't able to find a directory or we found a directory with + // no __init__.py[i] file. See if we can find a ".py" or ".pyi" file + // with this name. + const pyFilePath = dirPath.packageUri; + const pyiFilePath = dirPath.packageStubUri; + const fileDirectory = dirPath.getDirectory(); + + if (allowPyi && this.fileExistsCached(pyiFilePath)) { + importLogger?.log(`Resolved import with file '${pyiFilePath}'`); + resolvedPaths.push(pyiFilePath); + if (isLastPart) { + isStubFile = true; + } + } else if (this.fileExistsCached(pyFilePath)) { + importLogger?.log(`Resolved import with file '${pyFilePath}'`); + resolvedPaths.push(pyFilePath); + } else if ( + allowNativeLib && + this._findAndResolveNativeModule( + fileDirectory, + dirPath, + execEnv, + importName, + moduleDescriptor, + importLogger, + resolvedPaths + ) + ) { + isNativeLib = true; + importLogger?.log(`Did not find file '${pyiFilePath}' or '${pyFilePath}'`); + } else if (foundDirectory) { + if (!isLastPart) { + // We are not at the last part, and we found a directory, + // so continue to look for the next part assuming this is + // a namespace package. + resolvedPaths.push(Uri.empty()); + isNamespacePackage = true; + pyTypedInfo = undefined; + continue; + } + + importLogger?.log(`Partially resolved import with directory '${dirPath}'`); + resolvedPaths.push(Uri.empty()); + + if (isLastPart) { + implicitImports = this.findImplicitImports(importName, dirPath, [pyFilePath, pyiFilePath]); + isNamespacePackage = true; + } + } + + if (!pyTypedInfo && lookForPyTyped) { + pyTypedInfo = this._getPyTypedInfo(fileDirectory); + } + break; + } + } + + let importFound: boolean; + const isPartlyResolved = resolvedPaths.length > 0 && resolvedPaths.length < moduleDescriptor.nameParts.length; + if (allowPartial) { + importFound = resolvedPaths.length > 0; + } else { + importFound = resolvedPaths.length >= moduleDescriptor.nameParts.length; + } + + return { + importName, + isRelative: false, + isNamespacePackage, + isInitFilePresent, + isStubPackage, + isImportFound: importFound, + isPartlyResolved, + importFailureInfo: importLogger?.getLogs(), + importType: ImportType.Local, + resolvedUris: resolvedPaths, + searchPath: rootPath, + isStubFile, + isNativeLib, + implicitImports, + pyTypedInfo, + filteredImplicitImports: implicitImports, + packageDirectory, + }; + } + + private _getImportCacheKey(sourceFileUri: Uri | undefined, importName: string, fromUserFile: boolean) { + return `${sourceFileUri?.key ?? ''}-${importName}-${fromUserFile}`; + } + + private _lookUpResultsInCache( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + importName: string, + moduleDescriptor: ImportedModuleDescriptor, + fromUserFile: boolean + ) { + const cacheForExecEnv = this._cachedImportResults.get(execEnv.root?.key ?? ''); + if (!cacheForExecEnv) { + return undefined; + } + + // If the import is relative, include the source file path in the key. + const relativeSourceFileUri = moduleDescriptor.leadingDots > 0 ? sourceFileUri : undefined; + + const cachedEntry = cacheForExecEnv.get( + this._getImportCacheKey(relativeSourceFileUri, importName, fromUserFile) + ); + + if (!cachedEntry) { + return undefined; + } + + return this.filterImplicitImports(cachedEntry, moduleDescriptor.importedSymbols); + } + + // Determines whether a namespace package resolves all of the symbols + // requested in the module descriptor. Namespace packages have no "__init__.py" + // file, so the only way that symbols can be resolved is if submodules + // are present. If specific symbols were requested, make sure they + // are all satisfied by submodules (as listed in the implicit imports). + private _isNamespacePackageResolved( + moduleDescriptor: ImportedModuleDescriptor, + implicitImports: Map | undefined + ) { + if (moduleDescriptor.importedSymbols) { + if (!Array.from(moduleDescriptor.importedSymbols.keys()).some((symbol) => implicitImports?.has(symbol))) { + return false; + } + } else if (!implicitImports || implicitImports.size === 0) { + return false; + } + return true; + } + + private _resolveBestAbsoluteImport( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + allowPyi: boolean + ): ImportResult | undefined { + const importName = formatImportName(moduleDescriptor); + const importLogger = this._configOptions.verboseOutput ? new ImportLogger() : undefined; + + // Check for a local stub file using stubPath. + if (allowPyi && this._configOptions.stubPath) { + importLogger?.log(`Looking in stubPath '${this._configOptions.stubPath}'`); + const typingsImport = this.resolveAbsoluteImport( + sourceFileUri, + this._configOptions.stubPath, + execEnv, + moduleDescriptor, + importName, + importLogger, + /* allowPartial */ undefined, + /* allowNativeLib */ false, + /* useStubPackage */ true, + allowPyi, + /* lookForPyTyped */ false + ); + + if (typingsImport?.isImportFound) { + // We will treat typings files as "local" rather than "third party". + typingsImport.importType = ImportType.Local; + typingsImport.isLocalTypingsFile = true; + + // If it's a namespace package that didn't resolve to a file, make sure that + // the imported symbols are present in the implicit imports. If not, we'll + // skip the typings import and continue searching. + if ( + typingsImport.isNamespacePackage && + typingsImport.resolvedUris[typingsImport.resolvedUris.length - 1].isEmpty() + ) { + if (this._isNamespacePackageResolved(moduleDescriptor, typingsImport.implicitImports)) { + return typingsImport; + } + } else { + return typingsImport; + } + } + } + + let bestResultSoFar: ImportResult | undefined; + let localImport: ImportResult | undefined; + + // Look for it in the root directory of the execution environment. + if (execEnv.root) { + importLogger?.log(`Looking in root directory of execution environment ` + `'${execEnv.root}'`); + + localImport = this.resolveAbsoluteImport( + sourceFileUri, + execEnv.root, + execEnv, + moduleDescriptor, + importName, + importLogger, + /* allowPartial */ undefined, + /* allowNativeLib */ true, + /* useStubPackage */ true, + allowPyi, + /* lookForPyTyped */ false + ); + bestResultSoFar = localImport; + } + + for (const extraPath of execEnv.extraPaths) { + importLogger?.log(`Looking in extraPath '${extraPath}'`); + localImport = this.resolveAbsoluteImport( + sourceFileUri, + extraPath, + execEnv, + moduleDescriptor, + importName, + importLogger, + /* allowPartial */ undefined, + /* allowNativeLib */ true, + /* useStubPackage */ true, + allowPyi, + /* lookForPyTyped */ false + ); + bestResultSoFar = this._pickBestImport(bestResultSoFar, localImport, moduleDescriptor); + } + + // Check for a stdlib typeshed file. + if (allowPyi && moduleDescriptor.nameParts.length > 0) { + importLogger?.log(`Looking for typeshed stdlib path`); + const typeshedStdlibImport = this._findTypeshedPath( + execEnv, + moduleDescriptor, + importName, + /* isStdLib */ true, + importLogger + ); + + if (typeshedStdlibImport) { + typeshedStdlibImport.isStdlibTypeshedFile = true; + return typeshedStdlibImport; + } + } + + // Look for the import in the list of third-party packages. + const pythonSearchPaths = this.getPythonSearchPaths(importLogger); + if (pythonSearchPaths.length > 0) { + for (const searchPath of pythonSearchPaths) { + importLogger?.log(`Looking in python search path '${searchPath}'`); + + const thirdPartyImport = this.resolveAbsoluteImport( + sourceFileUri, + searchPath, + execEnv, + moduleDescriptor, + importName, + importLogger, + /* allowPartial */ allowPartialResolutionForThirdPartyPackages, + /* allowNativeLib */ true, + /* useStubPackage */ true, + allowPyi, + /* lookForPyTyped */ true + ); + + if (thirdPartyImport) { + thirdPartyImport.importType = ImportType.ThirdParty; + + bestResultSoFar = this._pickBestImport(bestResultSoFar, thirdPartyImport, moduleDescriptor); + } + } + } else { + importLogger?.log('No python interpreter search path'); + } + + // If a library is fully py.typed, then we have found the best match, + // unless the execution environment is typeshed itself, in which case + // we don't want to favor py.typed libraries. Use the typeshed lookup below. + if (execEnv.root !== this._getTypeshedRoot(this._configOptions.typeshedPath, importLogger)) { + if (bestResultSoFar?.pyTypedInfo && !bestResultSoFar.isPartlyResolved) { + return bestResultSoFar; + } + } + + // Call the extensibility hook for subclasses. + const extraResults = this.resolveImportEx( + sourceFileUri, + execEnv, + moduleDescriptor, + importName, + importLogger, + allowPyi + ); + + if (extraResults) { + return extraResults; + } + + // Check for a third-party typeshed file. + if (allowPyi && moduleDescriptor.nameParts.length > 0) { + importLogger?.log(`Looking for typeshed third-party path`); + const typeshedImport = this._findTypeshedPath( + execEnv, + moduleDescriptor, + importName, + /* isStdLib */ false, + importLogger + ); + + if (typeshedImport) { + typeshedImport.isThirdPartyTypeshedFile = true; + bestResultSoFar = this._pickBestImport(bestResultSoFar, typeshedImport, moduleDescriptor); + } + } + + // We weren't able to find an exact match, so return the best + // partial match. + return bestResultSoFar; + } + + private _pickBestImport( + bestImportSoFar: ImportResult | undefined, + newImport: ImportResult | undefined, + moduleDescriptor: ImportedModuleDescriptor + ) { + if (!bestImportSoFar) { + return newImport; + } + + if (!newImport) { + return bestImportSoFar; + } + + if (newImport.isImportFound) { + // Prefer traditional packages over namespace packages. + const soFarIndex = bestImportSoFar.resolvedUris.findIndex((path) => !path.isEmpty()); + const newIndex = newImport.resolvedUris.findIndex((path) => !path.isEmpty()); + if (soFarIndex !== newIndex) { + if (soFarIndex < 0) { + return newImport; + } else if (newIndex < 0) { + return bestImportSoFar; + } + return soFarIndex < newIndex ? bestImportSoFar : newImport; + } + + // Prefer found over not found. + if (!bestImportSoFar.isImportFound) { + return newImport; + } + + // If both are namespace imports, select the one that resolves the symbols. + if (bestImportSoFar.isNamespacePackage && newImport.isNamespacePackage) { + if (moduleDescriptor.importedSymbols) { + if (!this._isNamespacePackageResolved(moduleDescriptor, bestImportSoFar.implicitImports)) { + if (this._isNamespacePackageResolved(moduleDescriptor, newImport.implicitImports)) { + return newImport; + } + + // Prefer the namespace package that has an __init__.py(i) file present + // in the final directory over one that does not. + if (bestImportSoFar.isInitFilePresent && !newImport.isInitFilePresent) { + return bestImportSoFar; + } else if (!bestImportSoFar.isInitFilePresent && newImport.isInitFilePresent) { + return newImport; + } + } + } + } + + // Prefer local over third-party. We check local first, so we should never + // see the reverse. + if (bestImportSoFar.importType === ImportType.Local && newImport.importType === ImportType.ThirdParty) { + return bestImportSoFar; + } + + // Prefer py.typed over non-py.typed. + if (bestImportSoFar.pyTypedInfo && !newImport.pyTypedInfo) { + return bestImportSoFar; + } else if (!bestImportSoFar.pyTypedInfo && newImport.pyTypedInfo) { + if (bestImportSoFar.importType === newImport.importType) { + return newImport; + } + } + + // Prefer pyi over py. + if (bestImportSoFar.isStubFile && !newImport.isStubFile) { + return bestImportSoFar; + } else if (!bestImportSoFar.isStubFile && newImport.isStubFile) { + return newImport; + } + + // All else equal, prefer shorter resolution paths. + if (bestImportSoFar.resolvedUris.length > newImport.resolvedUris.length) { + return newImport; + } + } else if (newImport.isPartlyResolved) { + // If the new import is a traditional package but only partly resolves + // the import but the best import so far is a namespace package, we need + // to consider whether the best import so far also resolves the first part + // of the import with a traditional package. Using the example "import a.b.c.d" + // and the symbol ~ to represent a namespace package, consider the following + // cases: + // bestSoFar: a/~b/~c/~d new: a Result: bestSoFar wins + // bestSoFar: ~a/~b/~c/~d new: a Result: new wins + // bestSoFar: a/~b/~c/~d new: a/b Result: new wins + const soFarIndex = bestImportSoFar.resolvedUris.findIndex((path) => !path.isEmpty()); + const newIndex = newImport.resolvedUris.findIndex((path) => !path.isEmpty()); + + if (soFarIndex !== newIndex) { + if (soFarIndex < 0) { + return newImport; + } else if (newIndex < 0) { + return bestImportSoFar; + } + return soFarIndex < newIndex ? bestImportSoFar : newImport; + } + } + + return bestImportSoFar; + } + + private _findTypeshedPath( + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + importName: string, + isStdLib: boolean, + importLogger?: ImportLogger + ): ImportResult | undefined { + importLogger?.log( + `Looking for typeshed ${ + isStdLib ? PythonPathUtils.stdLibFolderName : PythonPathUtils.thirdPartyFolderName + } path` + ); + + let typeshedPaths: Uri[] | undefined; + if (isStdLib) { + const path = this._getStdlibTypeshedPath( + this._configOptions.typeshedPath, + execEnv.pythonVersion, + execEnv.pythonPlatform, + importLogger, + moduleDescriptor + ); + + if (path) { + typeshedPaths = [path]; + } + } else { + typeshedPaths = this._getThirdPartyTypeshedPackagePaths(moduleDescriptor, importLogger); + } + + if (typeshedPaths) { + for (const typeshedPath of typeshedPaths) { + if (this.dirExistsCached(typeshedPath)) { + const importInfo = this.resolveAbsoluteImport( + undefined, + typeshedPath, + execEnv, + moduleDescriptor, + importName, + importLogger + ); + + if (importInfo?.isImportFound) { + let importType = isStdLib ? ImportType.BuiltIn : ImportType.ThirdParty; + + // Handle 'typing_extensions' as a special case because it's + // part of stdlib typeshed stubs, but it's not part of stdlib. + if (importName === 'typing_extensions') { + importType = ImportType.ThirdParty; + } + + importInfo.importType = importType; + return importInfo; + } + } + } + } + + importLogger?.log(`Typeshed path not found`); + return undefined; + } + + // Finds all of the stdlib modules and returns a Set containing all of their names. + private _buildStdlibCache(stdlibRoot: Uri | undefined, executionEnvironment: ExecutionEnvironment): Set { + const cache = new Set(); + + if (stdlibRoot) { + const readDir = (root: Uri, prefix: string | undefined) => { + this.readdirEntriesCached(root).entries.forEach((entry) => { + if (entry.isDirectory()) { + const dirRoot = root.combinePaths(entry.name); + readDir(dirRoot, prefix ? `${prefix}.${entry.name}` : entry.name); + } else if (entry.name.includes('.py')) { + const stripped = stripFileExtension(entry.name); + // Skip anything starting with an underscore. + if (!stripped.startsWith('_')) { + if ( + this._isStdlibTypeshedStubValidForVersion( + createImportedModuleDescriptor(stripped), + root, + executionEnvironment.pythonVersion, + executionEnvironment.pythonPlatform + ) + ) { + cache.add(prefix ? `${prefix}.${stripped}` : stripped); + } + } + } + }); + }; + readDir(stdlibRoot, undefined); + } + + return cache; + } + + // Populates a cache of third-party packages found within the typeshed + // directory. They are organized such that top-level directories contain + // the pypi-registered name of the package and an inner directory contains + // the name of the package as it is referenced by import statements. These + // don't always match. + private _buildTypeshedThirdPartyPackageMap(thirdPartyDir: Uri | undefined) { + this._cachedTypeshedThirdPartyPackagePaths = new Map(); + + if (thirdPartyDir) { + this.readdirEntriesCached(thirdPartyDir).entries.forEach((outerEntry) => { + if (outerEntry.isDirectory()) { + const innerDirPath = thirdPartyDir.combinePaths(outerEntry.name); + + this.readdirEntriesCached(innerDirPath).entries.forEach((innerEntry) => { + if (innerEntry.name === '@python2') { + return; + } + + if (innerEntry.isDirectory()) { + const pathList = this._cachedTypeshedThirdPartyPackagePaths!.get(innerEntry.name); + if (pathList) { + pathList.push(innerDirPath); + } else { + this._cachedTypeshedThirdPartyPackagePaths!.set(innerEntry.name, [innerDirPath]); + } + } else if (innerEntry.isFile()) { + if (innerEntry.name.endsWith('.pyi')) { + const strippedFileName = stripFileExtension(innerEntry.name); + const pathList = this._cachedTypeshedThirdPartyPackagePaths!.get(strippedFileName); + if (pathList) { + pathList.push(innerDirPath); + } else { + this._cachedTypeshedThirdPartyPackagePaths!.set(strippedFileName, [innerDirPath]); + } + } + } + }); + } + }); + } + + const flattenPaths = Array.from(this._cachedTypeshedThirdPartyPackagePaths.values()).flatMap((v) => v); + this._cachedTypeshedThirdPartyPackageRoots = Array.from(new Set(flattenPaths)).sort(); + } + + private _getCompletionSuggestionsTypeshedPath( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + isStdLib: boolean, + suggestions: Map + ) { + let typeshedPaths: Uri[] | undefined; + if (isStdLib) { + const path = this._getStdlibTypeshedPath( + this._configOptions.typeshedPath, + execEnv.pythonVersion, + execEnv.pythonPlatform, + /* importLogger */ undefined, + moduleDescriptor + ); + if (path) { + typeshedPaths = [path]; + } + } else { + typeshedPaths = this._getThirdPartyTypeshedPackagePaths( + moduleDescriptor, + /* importLogger */ undefined, + /* includeMatchOnly */ false + ); + + const typeshedPathEx = this.getTypeshedPathEx(execEnv); + if (typeshedPathEx) { + typeshedPaths = typeshedPaths ?? []; + typeshedPaths.push(typeshedPathEx); + } + } + + if (!typeshedPaths) { + return; + } + + typeshedPaths.forEach((typeshedPath) => { + if (this.dirExistsCached(typeshedPath)) { + this._getCompletionSuggestionsAbsolute( + sourceFileUri, + execEnv, + typeshedPath, + moduleDescriptor, + suggestions + ); + } + }); + } + + // Returns the directory for a module within the stdlib typeshed directory. + // If moduleDescriptor is provided, it is filtered based on the VERSIONS + // file in the typeshed stubs. + private _getStdlibTypeshedPath( + customTypeshedPath: Uri | undefined, + pythonVersion: PythonVersion, + pythonPlatform: string | undefined, + importLogger?: ImportLogger, + moduleDescriptor?: ImportedModuleDescriptor + ) { + const subdirectory = this._getTypeshedSubdirectory(/* isStdLib */ true, customTypeshedPath, importLogger); + if ( + subdirectory && + moduleDescriptor && + !this._isStdlibTypeshedStubValidForVersion( + moduleDescriptor, + customTypeshedPath, + pythonVersion, + pythonPlatform, + importLogger + ) + ) { + return undefined; + } + + return subdirectory; + } + + private _getThirdPartyTypeshedPath(customTypeshedPath: Uri | undefined, importLogger?: ImportLogger) { + return this._getTypeshedSubdirectory(/* isStdLib */ false, customTypeshedPath, importLogger); + } + + private _isStdlibTypeshedStubValidForVersion( + moduleDescriptor: ImportedModuleDescriptor, + customTypeshedPath: Uri | undefined, + pythonVersion: PythonVersion, + pythonPlatform: string | undefined, + importLogger?: ImportLogger + ) { + if (!this._cachedTypeshedStdLibModuleVersionInfo) { + this._cachedTypeshedStdLibModuleVersionInfo = this._readTypeshedStdLibVersions( + customTypeshedPath, + importLogger + ); + } + + // Loop through the name parts to make sure the module and submodules + // referenced in the import statement are valid for this version of Python. + for (let namePartCount = 1; namePartCount <= moduleDescriptor.nameParts.length; namePartCount++) { + const namePartsToConsider = moduleDescriptor.nameParts.slice(0, namePartCount); + const versionInfo = this._cachedTypeshedStdLibModuleVersionInfo.get(namePartsToConsider.join('.')); + + if (versionInfo) { + if (PythonVersion.isLessThan(pythonVersion, versionInfo.min)) { + return false; + } + + if (versionInfo.max !== undefined && PythonVersion.isGreaterThan(pythonVersion, versionInfo.max)) { + return false; + } + + if (pythonPlatform !== undefined) { + const pythonPlatformLower = pythonPlatform.toLowerCase(); + + if (versionInfo.supportedPlatforms) { + if (versionInfo.supportedPlatforms.every((p) => p.toLowerCase() !== pythonPlatformLower)) { + return false; + } + } + + if (versionInfo.unsupportedPlatforms) { + if (versionInfo.unsupportedPlatforms.some((p) => p.toLowerCase() === pythonPlatformLower)) { + return false; + } + } + } + } + } + + return true; + } + + private _readTypeshedStdLibVersions( + customTypeshedPath: Uri | undefined, + importLogger?: ImportLogger + ): Map { + const versionRangeMap = new Map(); + + // Read the VERSIONS file from typeshed. + const typeshedStdLibPath = this._getTypeshedSubdirectory(/* isStdLib */ true, customTypeshedPath, importLogger); + + if (typeshedStdLibPath) { + const versionsFilePath = typeshedStdLibPath.combinePaths('VERSIONS'); + try { + const fileStats = this.fileSystem.statSync(versionsFilePath); + if (fileStats.size > 0 && fileStats.size < 256 * 1024) { + const fileContents = this.fileSystem.readFileSync(versionsFilePath, 'utf8'); + fileContents.split(/\r?\n/).forEach((line) => { + const commentSplit = line.split('#'); + + // Platform-specific information can be specified after a semicolon. + const semicolonSplit = commentSplit[0].split(';').map((s) => s.trim()); + + // Version information is found after a colon. + const colonSplit = semicolonSplit[0].split(':'); + if (colonSplit.length !== 2) { + return; + } + + const versionSplit = colonSplit[1].split('-'); + if (versionSplit.length > 2) { + return; + } + + const moduleName = colonSplit[0].trim(); + if (!moduleName) { + return; + } + + let minVersionString = versionSplit[0].trim(); + if (minVersionString.endsWith('+')) { + // If the version ends in "+", strip it off. + minVersionString = minVersionString.substr(0, minVersionString.length - 1); + } + let minVersion = PythonVersion.fromString(minVersionString); + if (!minVersion) { + minVersion = pythonVersion3_0; + } + + let maxVersion: PythonVersion | undefined; + if (versionSplit.length > 1) { + maxVersion = PythonVersion.fromString(versionSplit[1].trim()); + } + + // A semicolon can be followed by a semicolon-delimited list of other + // exclusions. The "platform" exclusion is a comma delimited list platforms + // that are supported or not supported. + let supportedPlatforms: string[] | undefined; + let unsupportedPlatforms: string[] | undefined; + const platformsHeader = 'platforms='; + let platformExclusions = semicolonSplit.slice(1).find((s) => s.startsWith(platformsHeader)); + + if (platformExclusions) { + platformExclusions = platformExclusions.trim().substring(platformsHeader.length); + const commaSplit = platformExclusions.split(','); + for (let platform of commaSplit) { + platform = platform.trim(); + let isUnsupported = false; + + // Remove the '!' from the start if it's an exclusion. + if (platform.startsWith('!')) { + isUnsupported = true; + platform = platform.substring(1); + } + + if (isUnsupported) { + unsupportedPlatforms = unsupportedPlatforms ?? []; + unsupportedPlatforms.push(platform); + } else { + supportedPlatforms = supportedPlatforms ?? []; + supportedPlatforms.push(platform); + } + } + } + + versionRangeMap.set(moduleName, { + min: minVersion, + max: maxVersion, + supportedPlatforms, + unsupportedPlatforms, + }); + }); + } else { + importLogger?.log(`Typeshed stdlib VERSIONS file is unexpectedly large`); + } + } catch (e: any) { + importLogger?.log(`Could not read typeshed stdlib VERSIONS file: '${JSON.stringify(e)}'`); + } + } + + return versionRangeMap; + } + + private _getThirdPartyTypeshedPackagePaths( + moduleDescriptor: ImportedModuleDescriptor, + importLogger?: ImportLogger, + includeMatchOnly = true + ): Uri[] | undefined { + const typeshedPath = this._getThirdPartyTypeshedPath(this._configOptions.typeshedPath, importLogger); + + if (!this._cachedTypeshedThirdPartyPackagePaths) { + this._buildTypeshedThirdPartyPackageMap(typeshedPath); + } + + const firstNamePart = moduleDescriptor.nameParts.length > 0 ? moduleDescriptor.nameParts[0] : ''; + if (includeMatchOnly) { + return this._cachedTypeshedThirdPartyPackagePaths!.get(firstNamePart); + } + + if (firstNamePart) { + return flatten( + getMapValues(this._cachedTypeshedThirdPartyPackagePaths!, (k) => k.startsWith(firstNamePart)) + ); + } + + return []; + } + + private _getThirdPartyTypeshedPackageRoots(importLogger?: ImportLogger) { + const typeshedPath = this._getThirdPartyTypeshedPath(this._configOptions.typeshedPath, importLogger); + + if (!this._cachedTypeshedThirdPartyPackagePaths) { + this._buildTypeshedThirdPartyPackageMap(typeshedPath); + } + + return this._cachedTypeshedThirdPartyPackageRoots!; + } + + private _getTypeshedRoot(customTypeshedPath: Uri | undefined, importLogger?: ImportLogger) { + if (this._cachedTypeshedRoot === undefined) { + let typeshedPath = undefined; + + // Did the user specify a typeshed path? If not, we'll look in the + // python search paths, then in the typeshed-fallback directory. + if (customTypeshedPath) { + if (this.dirExistsCached(customTypeshedPath)) { + typeshedPath = customTypeshedPath; + } + } + + // If typeshed directory wasn't found in other locations, use the fallback. + if (!typeshedPath) { + typeshedPath = PythonPathUtils.getTypeShedFallbackPath(this.fileSystem) ?? Uri.empty(); + } + + this._cachedTypeshedRoot = typeshedPath; + } + + return this._cachedTypeshedRoot.isEmpty() ? undefined : this._cachedTypeshedRoot; + } + + private _getTypeshedSubdirectory( + isStdLib: boolean, + customTypeshedPath: Uri | undefined, + importLogger?: ImportLogger + ) { + // See if we have it cached. + if (isStdLib) { + if (this._cachedTypeshedStdLibPath !== undefined) { + return this._cachedTypeshedStdLibPath; + } + } else { + if (this._cachedTypeshedThirdPartyPath !== undefined) { + return this._cachedTypeshedThirdPartyPath; + } + } + + let typeshedPath = this._getTypeshedRoot(customTypeshedPath, importLogger); + if (typeshedPath === undefined) { + return undefined; + } + + typeshedPath = PythonPathUtils.getTypeshedSubdirectory(typeshedPath, isStdLib); + if (!this.dirExistsCached(typeshedPath)) { + return undefined; + } + + // Cache the results. + if (isStdLib) { + this._cachedTypeshedStdLibPath = typeshedPath; + } else { + this._cachedTypeshedThirdPartyPath = typeshedPath; + } + + return typeshedPath; + } + + private _resolveRelativeImport( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + importName: string, + importLogger?: ImportLogger + ): ImportResult | undefined { + importLogger?.log('Attempting to resolve relative import'); + + // Determine which search path this file is part of. + const directory = getDirectoryLeadingDotsPointsTo(sourceFileUri.getDirectory(), moduleDescriptor.leadingDots); + if (!directory) { + importLogger?.log(`Invalid relative path '${importName}'`); + return undefined; + } + + // Now try to match the module parts from the current directory location. + const absImport = this.resolveAbsoluteImport( + sourceFileUri, + directory, + execEnv, + moduleDescriptor, + importName, + importLogger, + /* allowPartial */ false, + /* allowNativeLib */ true + ); + + if (absImport && absImport.isStubFile) { + // If we found a stub for a relative import, only search + // the same folder for the real module. Otherwise, it will + // error out on runtime. + absImport.nonStubImportResult = this.resolveAbsoluteImport( + sourceFileUri, + directory, + execEnv, + moduleDescriptor, + importName, + importLogger, + /* allowPartial */ false, + /* allowNativeLib */ true, + /* useStubPackage */ false, + /* allowPyi */ false + ) ?? { + importName, + isRelative: true, + isImportFound: false, + isPartlyResolved: false, + isNamespacePackage: false, + isStubPackage: false, + isInitFilePresent: false, + resolvedUris: [], + importType: ImportType.Local, + isStubFile: false, + isNativeLib: false, + implicitImports: undefined, + filteredImplicitImports: undefined, + nonStubImportResult: undefined, + }; + } + + return absImport; + } + + private _getCompletionSuggestionsRelative( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + suggestions: Map + ) { + // Determine which search path this file is part of. + const directory = getDirectoryLeadingDotsPointsTo(sourceFileUri.getDirectory(), moduleDescriptor.leadingDots); + if (!directory) { + return; + } + + // Now try to match the module parts from the current directory location. + this._getCompletionSuggestionsAbsolute(sourceFileUri, execEnv, directory, moduleDescriptor, suggestions); + } + + private _getFilesInDirectory(dirPath: Uri): Uri[] { + const cachedValue = this._cachedFilesForPath.get(dirPath.key); + if (cachedValue) { + return cachedValue; + } + + let newCacheValue: Uri[] = []; + try { + const entriesInDir = this.readdirEntriesCached(dirPath); + const filesInDir: Dirent[] = []; + + // Add any files or symbolic links that point to files. + entriesInDir.entries.forEach((f) => { + if (f.isFile()) { + filesInDir.push(f); + } else if (f.isSymbolicLink() && tryStat(this.fileSystem, dirPath.combinePaths(f.name))?.isFile()) { + filesInDir.push(f); + } + }); + + newCacheValue = filesInDir.map((f) => dirPath.combinePaths(f.name)); + } catch { + newCacheValue = []; + } + + this._cachedFilesForPath.set(dirPath.key, newCacheValue); + return newCacheValue; + } + + private _getCompletionSuggestionsAbsolute( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + rootPath: Uri, + moduleDescriptor: ImportedModuleDescriptor, + suggestions: Map, + strictOnly = true + ) { + // Starting at the specified path, walk the file system to find the + // specified module. + let dirPath = rootPath; + + // Copy the nameParts into a new directory and add an extra empty + // part if there is a trailing dot. + const nameParts = moduleDescriptor.nameParts.map((name) => name); + if (moduleDescriptor.hasTrailingDot) { + nameParts.push(''); + } + + // We need to track this since a module might be resolvable using relative path + // but can't resolved by absolute path. + const leadingDots = moduleDescriptor.leadingDots; + const parentNameParts = nameParts.slice(0, -1); + + // Handle the case where the user has typed the first + // dot (or multiple) in a relative path. + if (nameParts.length === 0) { + this._addFilteredSuggestionsAbsolute( + sourceFileUri, + execEnv, + dirPath, + '', + suggestions, + leadingDots, + parentNameParts, + strictOnly + ); + } else { + for (let i = 0; i < nameParts.length; i++) { + // Provide completions only if we're on the last part + // of the name. + if (i === nameParts.length - 1) { + this._addFilteredSuggestionsAbsolute( + sourceFileUri, + execEnv, + dirPath, + nameParts[i], + suggestions, + leadingDots, + parentNameParts, + strictOnly + ); + } + + dirPath = dirPath.combinePaths(nameParts[i]); + if (!this.dirExistsCached(dirPath)) { + break; + } + } + } + } + + private _addFilteredSuggestionsAbsolute( + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + currentPath: Uri, + filter: string, + suggestions: Map, + leadingDots: number, + parentNameParts: string[], + strictOnly: boolean + ) { + // Enumerate all of the files and directories in the path, expanding links. + const entries = getFileSystemEntriesFromDirEntries( + this.readdirEntriesCached(currentPath).entries.values(), + this.fileSystem, + currentPath + ); + + entries.files.forEach((file) => { + // Strip multi-dot extensions to handle file names like "foo.cpython-32m.so". We want + // to detect the ".so" but strip off the entire ".cpython-32m.so" extension. + const fileWithoutExtension = file.stripAllExtensions().fileName; + + if (ImportResolver.isSupportedImportFile(file)) { + if (fileWithoutExtension === '__init__') { + return; + } + + if (filter && !StringUtils.isPatternInSymbol(filter, fileWithoutExtension)) { + return; + } + + if ( + !this._isUniqueValidSuggestion(fileWithoutExtension, suggestions) || + !this._isResolvableSuggestion( + fileWithoutExtension, + leadingDots, + parentNameParts, + sourceFileUri, + execEnv, + strictOnly + ) + ) { + return; + } + + suggestions.set(fileWithoutExtension, file); + } + }); + + entries.directories.forEach((dir) => { + const dirSuggestion = dir.fileName; + if (filter && !dirSuggestion.startsWith(filter)) { + return; + } + + if ( + !this._isUniqueValidSuggestion(dirSuggestion, suggestions) || + !this._isResolvableSuggestion( + dirSuggestion, + leadingDots, + parentNameParts, + sourceFileUri, + execEnv, + strictOnly + ) + ) { + return; + } + + const initPyiPath = dir.initPyiUri; + if (this.fileExistsCached(initPyiPath)) { + suggestions.set(dirSuggestion, initPyiPath); + return; + } + + const initPyPath = dir.initPyUri; + if (this.fileExistsCached(initPyPath)) { + suggestions.set(dirSuggestion, initPyPath); + return; + } + + // It is a namespace package. there is no corresponding module path. + suggestions.set(dirSuggestion, Uri.empty()); + }); + } + + // Fix for editable installed submodules where the suggested directory was a namespace directory that wouldn't resolve. + // only used for absolute imports + private _isResolvableSuggestion( + name: string, + leadingDots: number, + parentNameParts: string[], + sourceFileUri: Uri, + execEnv: ExecutionEnvironment, + strictOnly: boolean + ) { + // We always resolve names based on sourceFileUri. + const moduleDescriptor: ImportedModuleDescriptor = { + leadingDots: leadingDots, + nameParts: [...parentNameParts, name], + importedSymbols: new Set(), + }; + + // Make sure we don't use parent folder resolution when checking whether the given name is resolvable. + let importResult: ImportResult | undefined; + if (strictOnly) { + const importName = formatImportName(moduleDescriptor); + importResult = this._resolveImportStrict(importName, sourceFileUri, execEnv, moduleDescriptor); + } else { + importResult = this.resolveImportInternal(sourceFileUri, execEnv, moduleDescriptor); + } + + if (importResult && importResult.isImportFound) { + // Check the import isn't for a private or protected module. If it is, then + // only allow it if there's no py.typed file. + if (!SymbolNameUtils.isPrivateOrProtectedName(name) || importResult.pyTypedInfo === undefined) { + return true; + } + } + return false; + } + + private _isUniqueValidSuggestion(suggestionToAdd: string, suggestions: Map) { + if (suggestions.has(suggestionToAdd)) { + return false; + } + + // Don't add directories with illegal module names. + if (/[.-]/.test(suggestionToAdd)) { + return false; + } + + // Don't add directories with dunder names like "__pycache__". + if (isDunderName(suggestionToAdd) && suggestionToAdd !== '__future__') { + return false; + } + + return true; + } + + // Retrieves the pytyped info for a directory if it exists. This is a small perf optimization + // that allows skipping the search when the pytyped file doesn't exist. + private _getPyTypedInfo(filePath: Uri): PyTypedInfo | undefined { + if (!this.fileExistsCached(filePath.pytypedUri)) { + return undefined; + } + + return getPyTypedInfoForPyTypedFile(this.fileSystem, filePath.pytypedUri); + } + + private _findAndResolveNativeModule( + fileDirectory: Uri, + dirPath: Uri, + execEnv: ExecutionEnvironment, + importName: string, + moduleDescriptor: ImportedModuleDescriptor, + importLogger: ImportLogger | undefined, + resolvedPaths: Uri[] + ): boolean { + let isNativeLib = false; + + if (!execEnv.skipNativeLibraries && this.dirExistsCached(fileDirectory)) { + const filesInDir = this._getFilesInDirectory(fileDirectory); + const dirName = dirPath.fileName; + const nativeLibPath = filesInDir.find((f) => this._isNativeModuleFileName(dirName, f)); + + if (nativeLibPath) { + // Try resolving native library to a custom stub. + isNativeLib = this._resolveNativeModuleWithStub( + nativeLibPath, + execEnv, + importName, + moduleDescriptor, + importLogger, + resolvedPaths + ); + + if (isNativeLib) { + importLogger?.log(`Resolved with native lib '${nativeLibPath.toUserVisibleString()}'`); + } + } + } + + return isNativeLib; + } + + private _resolveNativeModuleWithStub( + nativeLibPath: Uri, + execEnv: ExecutionEnvironment, + importName: string, + moduleDescriptor: ImportedModuleDescriptor, + importLogger: ImportLogger | undefined, + resolvedPaths: Uri[] + ): boolean { + let moduleFullName = importName; + + if (moduleDescriptor.leadingDots > 0) { + // Relative path. Convert `.mtrand` to `numpy.random.mtrand` based on search path. + const info = this.getModuleNameForImport(nativeLibPath, execEnv); + moduleFullName = info.moduleName.length > 0 ? info.moduleName : moduleFullName; + } + + const compiledStubPath = this.resolveNativeImportEx(nativeLibPath, moduleFullName, importLogger); + if (compiledStubPath) { + importLogger?.log(`Resolved native import ${importName} with stub '${compiledStubPath}'`); + resolvedPaths.push(compiledStubPath); + return false; // Resolved to a stub. + } + + importLogger?.log(`Resolved import with file '${nativeLibPath}'`); + resolvedPaths.push(nativeLibPath); + return true; + } + + private _isNativeModuleFileName(moduleName: string, fileUri: Uri): boolean { + // Strip off the final file extension and the part of the file name + // that excludes all (multi-part) file extensions. This allows us to + // handle file names like "foo.cpython-32m.so". + const fileExtension = fileUri.lastExtension.toLowerCase(); + const withoutExtension = stripFileExtension(fileUri.fileName, /* multiDotExtension */ true); + return ( + _isNativeModuleFileExtension(fileExtension) && equateStringsCaseInsensitive(moduleName, withoutExtension) + ); + } + + private _tryWalkUp(current: Uri | undefined): Uri | undefined { + if (!current || current.isEmpty() || current.isRoot()) { + return undefined; + } + + // Ensure we don't go around forever even if isRoot returns false. + const next = current.resolvePaths('..'); + if (next.equals(current)) { + return undefined; + } + return next; + } + + private _shouldWalkUp(current: Uri | undefined, root: Uri, execEnv: ExecutionEnvironment) { + return ( + current && + !current.isEmpty() && + (current.isChild(root) || (current.equals(root) && isDefaultWorkspace(execEnv.root))) + ); + } +} + +export type ImportResolverFactory = ( + serviceProvider: ServiceProvider, + options: ConfigOptions, + host: Host +) => ImportResolver; + +export function formatImportName(moduleDescriptor: ImportedModuleDescriptor) { + return '.'.repeat(moduleDescriptor.leadingDots) + moduleDescriptor.nameParts.join('.'); +} + +export function getParentImportResolutionRoot(sourceFileUri: Uri, executionRoot: Uri | undefined): Uri { + if (!isDefaultWorkspace(executionRoot)) { + return executionRoot!; + } + + return sourceFileUri.getDirectory(); +} + +export function getModuleNameFromPath( + containerPath: Uri, + fileUri: Uri, + stripTopContainerDir = false +): string | undefined { + const moduleNameInfo = _getModuleNameInfoFromPath(containerPath, fileUri, stripTopContainerDir); + if (!moduleNameInfo || moduleNameInfo.containsInvalidCharacters) { + return undefined; + } + + return moduleNameInfo.moduleName; +} + +function _getModuleNameInfoFromPath( + containerPath: Uri, + fileUri: Uri, + stripTopContainerDir = false +): ModuleNameInfoFromPath | undefined { + if (!fileUri.startsWith(containerPath)) { + return undefined; + } + + const parts = Array.from(containerPath.getRelativePathComponents(fileUri)); + if (parts.length > 0) { + const origLastPart = parts[parts.length - 1]; + + // Strip the file extension from the last part. + let newLastPart = stripFileExtension(origLastPart); + + // If module is native, strip platform part, such as 'cp36-win_amd64' in 'mtrand.cp36-win_amd64'. + if (_isNativeModuleFileExtension(getFileExtension(origLastPart))) { + newLastPart = stripFileExtension(newLastPart); + } + + parts[parts.length - 1] = newLastPart; + + // Strip off the '/__init__' if it's present. + if (newLastPart === '__init__') { + parts.pop(); + } + } + + if (stripTopContainerDir) { + if (parts.length === 0) { + return undefined; + } + parts.shift(); + } + + if (parts.length === 0) { + return undefined; + } + + // Handle the case where the symbol was resolved to a stubs package + // rather than the real package. We'll strip off the "-stubs" suffix + // in this case. + if (parts[0].endsWith(stubsSuffix)) { + parts[0] = parts[0].substr(0, parts[0].length - stubsSuffix.length); + } + + // Check whether parts contains invalid characters. + const containsInvalidCharacters = parts.some((p) => !Tokenizer.isPythonIdentifier(p)); + + return { + moduleName: parts.join('.'), + containsInvalidCharacters, + }; +} + +function _isNativeModuleFileExtension(fileExtension: string): boolean { + return supportedNativeLibExtensions.some((ext) => ext === fileExtension); +} + +export function isDefaultWorkspace(uri: Uri | undefined) { + return !uri || uri.isEmpty() || Uri.isDefaultWorkspace(uri); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/importResult.ts b/python-parser/packages/pyright-internal/src/analyzer/importResult.ts new file mode 100644 index 00000000..c1a2d4ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/importResult.ts @@ -0,0 +1,108 @@ +/* + * importResult.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Interface that describes the output of the import resolver. + */ + +import { Uri } from '../common/uri/uri'; +import { PyTypedInfo } from './pyTypedUtils'; + +export const enum ImportType { + BuiltIn, + ThirdParty, + Local, +} + +export interface ImplicitImport { + isStubFile: boolean; + isNativeLib: boolean; + name: string; + uri: Uri; + pyTypedInfo?: PyTypedInfo | undefined; +} + +export interface ImportResult { + // The formatted import name. Useful for error messages. + importName: string; + + // Indicates whether the import name was relative (starts + // with one or more dots). + isRelative: boolean; + + // True if import was resolved to a module or file. + isImportFound: boolean; + + // The specific submodule was not found but a part of + // its path was resolved. + isPartlyResolved: boolean; + + // True if the import refers to a namespace package (a + // folder without an __init__.py(i) file at the last level). + // To determine if any intermediate level is a namespace + // package, look at the resolvedPaths array. Namespace package + // entries will have an empty string for the resolvedPath. + isNamespacePackage: boolean; + + // True if there is an __init__.py(i) file in the final + // directory resolved. + isInitFilePresent: boolean; + + // Did it resolve to a stub within a stub package? + isStubPackage: boolean; + + // If isImportFound is false, may contain strings that help + // diagnose the import resolution failure. + importFailureInfo?: string[]; + + // Type of import (built-in, local, third-party). + importType: ImportType; + + // The resolved absolute paths for each of the files in the module name. + // Parts that have no files (e.g. directories within a namespace + // package) have empty strings for a resolvedPath. + resolvedUris: Uri[]; + + // For absolute imports, the search path that was used to resolve + // (or partially resolve) the module. + searchPath?: Uri; + + // True if resolved file is a type hint (.pyi) file rather than + // a python (.py) file. + isStubFile: boolean; + + // True if resolved file is a native DLL. + isNativeLib: boolean; + + // True if the resolved file is a type hint (.pyi) file that comes + // from typeshed in the stdlib or third-party stubs. + isStdlibTypeshedFile?: boolean; + isThirdPartyTypeshedFile?: boolean; + + // True if the resolved file is a type hint (.pyi) file that comes + // from the configured typings directory. + isLocalTypingsFile?: boolean; + + // List of files within the final resolved path that are implicitly + // imported as part of the package - used for both traditional and + // namespace packages. + implicitImports?: Map; + + // Implicit imports that have been filtered to include only + // those symbols that are explicitly imported in a "from x import y" + // statement. + filteredImplicitImports?: Map; + + // If resolved from a type hint (.pyi), then store the import result + // from .py here. + nonStubImportResult?: ImportResult | undefined; + + // Is there a "py.typed" file (as described in PEP 561) present in + // the package that was used to resolve the import? + pyTypedInfo?: PyTypedInfo | undefined; + + // The directory of the package, if found. + packageDirectory?: Uri | undefined; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/importStatementUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/importStatementUtils.ts new file mode 100644 index 00000000..02785ea4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/importStatementUtils.ts @@ -0,0 +1,1004 @@ +/* + * importStatementUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility routines for summarizing and manipulating + * import statements in a Python source file. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { addIfUnique, appendArray, createMapFromItems } from '../common/collectionUtils'; +import { ConfigOptions } from '../common/configOptions'; +import { TextEditAction } from '../common/editAction'; +import { ReadOnlyFileSystem } from '../common/fileSystem'; +import { convertOffsetToPosition, convertPositionToOffset } from '../common/positionUtils'; +import { compareStringsCaseSensitive } from '../common/stringUtils'; +import { Position, Range, TextRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { isFile } from '../common/uri/uriUtils'; +import { + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + ImportNode, + ModuleNameNode, + ModuleNode, + ParseNode, + ParseNodeType, +} from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { TokenType } from '../parser/tokenizerTypes'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { ImportLookupResult } from './analyzerFileInfo'; +import { ModuleNameAndType } from './importResolver'; +import { ImportResult, ImportType } from './importResult'; +import { getTokenAfter, getTokenAt } from './parseTreeUtils'; +import * as SymbolNameUtils from './symbolNameUtils'; + +const underscoreRegEx = /_/g; +const indentTextRegEx = /^\s*$/; + +export interface ImportStatement { + node: ImportNode | ImportFromNode; + subnode?: ImportAsNode; + importResult: ImportResult | undefined; + resolvedPath: Uri | undefined; + moduleName: string; + followsNonImportStatement: boolean; +} + +export interface ImportStatements { + orderedImports: ImportStatement[]; + mapByFilePath: Map; + implicitImports?: Map; +} + +export const enum ImportGroup { + // The ordering here is important because this is the order + // in which PEP8 specifies that imports should be ordered. + BuiltIn = 0, + ThirdParty = 1, + Local = 2, + LocalRelative = 3, +} + +export interface ImportNameInfo { + name?: string; + alias?: string; +} + +export interface ImportNameWithModuleInfo extends ImportNameInfo { + module: ModuleNameAndType; + nameForImportFrom?: string; +} + +export interface ModuleNameInfo { + name: string; + nameForImportFrom?: string; +} + +// Determines which import grouping should be used when sorting imports. +export function getImportGroup(statement: ImportStatement): ImportGroup { + if (statement.importResult) { + if (statement.importResult.importType === ImportType.BuiltIn) { + return ImportGroup.BuiltIn; + } else if ( + statement.importResult.importType === ImportType.ThirdParty || + statement.importResult.isLocalTypingsFile + ) { + return ImportGroup.ThirdParty; + } + + if (statement.importResult.isRelative) { + return ImportGroup.LocalRelative; + } + + return ImportGroup.Local; + } else { + return ImportGroup.Local; + } +} + +// Compares sort order of two import statements. +export function compareImportStatements(a: ImportStatement, b: ImportStatement) { + const aImportGroup = getImportGroup(a); + const bImportGroup = getImportGroup(b); + + if (aImportGroup < bImportGroup) { + return -1; + } else if (aImportGroup > bImportGroup) { + return 1; + } + + return a.moduleName < b.moduleName ? -1 : 1; +} + +// Looks for top-level 'import' and 'import from' statements and provides +// an ordered list and a map (by file path). +export function getTopLevelImports(parseTree: ModuleNode, includeImplicitImports = false): ImportStatements { + const localImports: ImportStatements = { + orderedImports: [], + mapByFilePath: new Map(), + }; + + let followsNonImportStatement = false; + let foundFirstImportStatement = false; + + parseTree.d.statements.forEach((statement) => { + if (statement.nodeType === ParseNodeType.StatementList) { + statement.d.statements.forEach((subStatement) => { + if (subStatement.nodeType === ParseNodeType.Import) { + foundFirstImportStatement = true; + _processImportNode(subStatement, localImports, followsNonImportStatement); + followsNonImportStatement = false; + } else if (subStatement.nodeType === ParseNodeType.ImportFrom) { + foundFirstImportStatement = true; + _processImportFromNode( + subStatement, + localImports, + followsNonImportStatement, + includeImplicitImports + ); + followsNonImportStatement = false; + } else { + followsNonImportStatement = foundFirstImportStatement; + } + }); + } else { + followsNonImportStatement = foundFirstImportStatement; + } + }); + + return localImports; +} + +// Return import symbol type to allow sorting similar to isort +// CONSTANT_VARIABLE, CamelCaseClass, variable_or_function +function _getImportSymbolNameType(symbolName: string): number { + if (SymbolNameUtils.isConstantName(symbolName)) { + return 0; + } + if (SymbolNameUtils.isTypeAliasName(symbolName)) { + return 1; + } + return 2; +} + +export function getTextEditsForAutoImportSymbolAddition( + importNameInfo: ImportNameInfo | ImportNameInfo[], + importStatement: ImportStatement, + parseFileResults: ParseFileResults +): TextEditAction[] { + const additionEdits: AdditionEdit[] = []; + if ( + !importStatement.node || + importStatement.node.nodeType !== ParseNodeType.ImportFrom || + importStatement.node.d.isWildcardImport + ) { + return additionEdits; + } + + // Make sure we're not attempting to auto-import a symbol that + // already exists in the import list. + const importFrom = importStatement.node; + importNameInfo = (Array.isArray(importNameInfo) ? importNameInfo : [importNameInfo]).filter( + (info) => + !!info.name && + !importFrom.d.imports.some( + (importAs) => importAs.d.name.d.value === info.name && importAs.d.alias?.d.value === info.alias + ) + ); + + if (importNameInfo.length === 0) { + return additionEdits; + } + + for (const nameInfo of importNameInfo) { + additionEdits.push( + _getTextEditsForAutoImportSymbolAddition( + nameInfo.name!, + nameInfo.alias, + importStatement.node, + parseFileResults + ) + ); + } + + // Merge edits with the same insertion point. + const editsMap = createMapFromItems(additionEdits, (e) => Range.print(e.range)); + const textEditList: TextEditAction[] = []; + for (const editGroup of editsMap.values()) { + if (editGroup.length === 1) { + textEditList.push(editGroup[0]); + } else { + textEditList.push({ + range: editGroup[0].range, + replacementText: editGroup + .sort((a, b) => _compareImportNames(a.importName, b.importName)) + .map((e) => e.replacementText) + .join(''), + }); + } + } + + return textEditList; +} + +function _compareImportNames(name1: string, name2: string) { + // Compare import name by import symbol type and then alphabetical order. + // Match isort default behavior. + const name1Type = _getImportSymbolNameType(name1); + const name2Type = _getImportSymbolNameType(name2); + const compare = name1Type - name2Type; + if (compare !== 0) { + return compare; + } + + // isort will prefer '_' over alphanumerical chars + // This can't be reproduced by a normal string compare in TypeScript, since '_' > 'A'. + // Replace all '_' with '=' which guarantees '=' < 'A'. + // Safe to do as '=' is an invalid char in Python names. + const name1toCompare = name1.replace(underscoreRegEx, '='); + const name2toCompare = name2.replace(underscoreRegEx, '='); + return compareStringsCaseSensitive(name1toCompare, name2toCompare); +} + +interface AdditionEdit extends TextEditAction { + importName: string; +} + +function _getTextEditsForAutoImportSymbolAddition( + importName: string, + alias: string | undefined, + node: ImportFromNode, + parseFileResults: ParseFileResults +): AdditionEdit { + // Scan through the import symbols to find the right insertion point, + // assuming we want to keep the imports alphabetized. + let priorImport: ImportFromAsNode | undefined; + for (const curImport of node.d.imports) { + if (_compareImportNames(curImport.d.name.d.value, importName) > 0) { + break; + } + + priorImport = curImport; + } + + // Are import symbols formatted one per line or multiple per line? We + // will honor the existing formatting. We'll use a heuristic to determine + // whether symbols are one per line or multiple per line. + // from x import a, b, c + // or + // from x import ( + // a + // ) + let useOnePerLineFormatting = false; + let indentText = ''; + if (node.d.imports.length > 0) { + const importStatementPos = convertOffsetToPosition(node.start, parseFileResults.tokenizerOutput.lines); + const firstSymbolPos = convertOffsetToPosition(node.d.imports[0].start, parseFileResults.tokenizerOutput.lines); + const secondSymbolPos = + node.d.imports.length > 1 + ? convertOffsetToPosition(node.d.imports[1].start, parseFileResults.tokenizerOutput.lines) + : undefined; + + if ( + firstSymbolPos.line > importStatementPos.line && + (secondSymbolPos === undefined || secondSymbolPos.line > firstSymbolPos.line) + ) { + const firstSymbolLineRange = parseFileResults.tokenizerOutput.lines.getItemAt(firstSymbolPos.line); + + // Use the same combination of spaces or tabs to match + // existing formatting. + indentText = parseFileResults.text.substr(firstSymbolLineRange.start, firstSymbolPos.character); + + // Is the indent text composed of whitespace only? + if (indentTextRegEx.test(indentText)) { + useOnePerLineFormatting = true; + } + } + } + + const insertionOffset = priorImport + ? TextRange.getEnd(priorImport) + : node.d.imports.length > 0 + ? node.d.imports[0].start + : node.start + node.length; + const insertionPosition = convertOffsetToPosition(insertionOffset, parseFileResults.tokenizerOutput.lines); + + const insertText = alias ? `${importName} as ${alias}` : `${importName}`; + let replacementText: string; + + if (useOnePerLineFormatting) { + const eol = parseFileResults.tokenizerOutput.predominantEndOfLineSequence; + replacementText = priorImport ? `,${eol}${indentText}${insertText}` : `${insertText},${eol}${indentText}`; + } else { + replacementText = priorImport ? `, ${insertText}` : `${insertText}, `; + } + + return { + range: { start: insertionPosition, end: insertionPosition }, + importName, + replacementText, + }; +} + +interface InsertionEdit { + range: Range; + preChange: string; + importStatement: string; + postChange: string; + importGroup: ImportGroup; +} + +export function getTextEditsForAutoImportInsertions( + importNameInfo: ImportNameWithModuleInfo[] | ImportNameWithModuleInfo, + importStatements: ImportStatements, + parseFileResults: ParseFileResults, + invocationPosition: Position +): TextEditAction[] { + const insertionEdits: InsertionEdit[] = []; + + importNameInfo = Array.isArray(importNameInfo) ? importNameInfo : [importNameInfo]; + if (importNameInfo.length === 0) { + return []; + } + + const map = createMapFromItems(importNameInfo, (i) => `${i.module.moduleName}-${i.nameForImportFrom ?? ''}`); + for (const importInfo of map.values()) { + appendArray( + insertionEdits, + _getInsertionEditsForAutoImportInsertion( + importInfo, + { name: importInfo[0].module.moduleName, nameForImportFrom: importInfo[0].nameForImportFrom }, + importStatements, + getImportGroupFromModuleNameAndType(importInfo[0].module), + parseFileResults, + invocationPosition + ) + ); + } + + return _convertInsertionEditsToTextEdits(parseFileResults, insertionEdits); +} + +export function getTextEditsForAutoImportInsertion( + importNameInfo: ImportNameInfo[] | ImportNameInfo, + moduleNameInfo: ModuleNameInfo, + importStatements: ImportStatements, + importGroup: ImportGroup, + parseFileResults: ParseFileResults, + invocationPosition: Position +): TextEditAction[] { + const insertionEdits = _getInsertionEditsForAutoImportInsertion( + importNameInfo, + moduleNameInfo, + importStatements, + importGroup, + parseFileResults, + invocationPosition + ); + + return _convertInsertionEditsToTextEdits(parseFileResults, insertionEdits); +} + +function _convertInsertionEditsToTextEdits(parseFileResults: ParseFileResults, insertionEdits: InsertionEdit[]) { + if (insertionEdits.length < 2) { + return insertionEdits.map((e) => getTextEdit(e)); + } + + // Merge edits with the same insertion point. + const editsMap = [...createMapFromItems(insertionEdits, (e) => `${e.importGroup} ${Range.print(e.range)}`)] + .sort((a, b) => compareStringsCaseSensitive(a[0], b[0])) + .map((v) => v[1]); + + const textEditList: TextEditAction[] = []; + for (const editGroup of editsMap) { + if (editGroup.length === 1) { + textEditList.push(getTextEdit(editGroup[0])); + } else { + textEditList.push({ + range: editGroup[0].range, + replacementText: + editGroup[0].preChange + + editGroup + .map((e) => e.importStatement) + .sort((a, b) => compareImports(a, b)) + .join(parseFileResults.tokenizerOutput.predominantEndOfLineSequence) + + editGroup[0].postChange, + }); + } + } + + return textEditList; + + function getTextEdit(edit: InsertionEdit): TextEditAction { + return { range: edit.range, replacementText: edit.preChange + edit.importStatement + edit.postChange }; + } + + function compareImports(a: string, b: string) { + const isImport1 = a.startsWith('import'); + const isImport2 = b.startsWith('import'); + + if (isImport1 === isImport2) { + return a < b ? -1 : 1; + } + + return isImport1 ? -1 : 1; + } +} + +function _getInsertionEditsForAutoImportInsertion( + importNameInfo: ImportNameInfo[] | ImportNameInfo, + moduleNameInfo: ModuleNameInfo, + importStatements: ImportStatements, + importGroup: ImportGroup, + parseFileResults: ParseFileResults, + invocationPosition: Position +): InsertionEdit[] { + const insertionEdits: InsertionEdit[] = []; + + importNameInfo = Array.isArray(importNameInfo) ? importNameInfo : [importNameInfo]; + if (importNameInfo.length === 0) { + // This will let "import [moduleName]" to be generated. + importNameInfo.push({}); + } + + // We need to emit a new 'from import' statement if symbolName is given. otherwise, use 'import' statement. + const map = createMapFromItems(importNameInfo, (i) => (i.name ? 'from' : 'import')); + + // Add import statements first. + const imports = map.get('import'); + if (imports) { + appendToEdits(imports, (names) => `import ${names.join(', ')}`); + } + + // Add from import statements next. + const fromImports = map.get('from'); + if (fromImports) { + appendToEdits( + fromImports, + (names) => `from ${moduleNameInfo.nameForImportFrom ?? moduleNameInfo.name} import ${names.join(', ')}` + ); + } + + return insertionEdits; + + function getImportAsText(nameInfo: ImportNameInfo, moduleName: string) { + const importText = nameInfo.name ? nameInfo.name : moduleName; + return { + sortText: importText, + text: nameInfo.alias ? `${importText} as ${nameInfo.alias}` : importText, + }; + } + + function appendToEdits(importNameInfo: ImportNameInfo[], importStatementGetter: (n: string[]) => string) { + const importNames = importNameInfo + .map((i) => getImportAsText(i, moduleNameInfo.name)) + .sort((a, b) => _compareImportNames(a.sortText, b.sortText)) + .reduce((set, v) => addIfUnique(set, v.text), [] as string[]); + + insertionEdits.push( + _getInsertionEditForAutoImportInsertion( + importStatementGetter(importNames), + importStatements, + moduleNameInfo.name, + importGroup, + parseFileResults, + invocationPosition + ) + ); + } +} + +function _getInsertionEditForAutoImportInsertion( + importStatement: string, + importStatements: ImportStatements, + moduleName: string, + importGroup: ImportGroup, + parseFileResults: ParseFileResults, + invocationPosition: Position +): InsertionEdit { + let preChange = ''; + let postChange = ''; + + let insertionPosition: Position; + const invocation = convertPositionToOffset(invocationPosition, parseFileResults.tokenizerOutput.lines)!; + if (importStatements.orderedImports.length > 0 && invocation > importStatements.orderedImports[0].node.start) { + let insertBefore = true; + let insertionImport = importStatements.orderedImports[0]; + + // Find a good spot to insert the new import statement. Follow + // the PEP8 standard sorting order whereby built-in imports are + // followed by third-party, which are followed by local. + let prevImportGroup = ImportGroup.BuiltIn; + for (const curImport of importStatements.orderedImports) { + // If the import was resolved, use its import type. If it wasn't + // resolved, assume that it's the same import type as the previous + // one. + const curImportGroup: ImportGroup = curImport.importResult ? getImportGroup(curImport) : prevImportGroup; + + if (importGroup < curImportGroup) { + if (!insertBefore && prevImportGroup < importGroup) { + // Add an extra line to create a new group. + preChange = parseFileResults.tokenizerOutput.predominantEndOfLineSequence + preChange; + } + break; + } + + if (importGroup === curImportGroup && curImport.moduleName > moduleName) { + insertBefore = true; + insertionImport = curImport; + break; + } + + // If we're about to hit the end of the import statements, don't go + // any further. + if (curImport.followsNonImportStatement) { + if (importGroup > prevImportGroup) { + // Add an extra line to create a new group. + preChange = parseFileResults.tokenizerOutput.predominantEndOfLineSequence + preChange; + } + break; + } + + // If this is the last import, see if we need to create a new group. + if (curImport === importStatements.orderedImports[importStatements.orderedImports.length - 1]) { + if (importGroup > curImportGroup) { + // Add an extra line to create a new group. + preChange = parseFileResults.tokenizerOutput.predominantEndOfLineSequence + preChange; + } + } + + // Are we starting a new group? + if (!insertBefore && importGroup < prevImportGroup && importGroup === curImportGroup) { + insertBefore = true; + } else { + insertBefore = false; + } + + prevImportGroup = curImportGroup; + insertionImport = curImport; + } + + if (insertionImport) { + if (insertBefore) { + postChange = postChange + parseFileResults.tokenizerOutput.predominantEndOfLineSequence; + } else { + preChange = parseFileResults.tokenizerOutput.predominantEndOfLineSequence + preChange; + } + + insertionPosition = convertOffsetToPosition( + insertBefore ? insertionImport.node.start : TextRange.getEnd(insertionImport.node), + parseFileResults.tokenizerOutput.lines + ); + } else { + insertionPosition = { line: 0, character: 0 }; + } + } else { + // Insert at or near the top of the file. See if there's a doc string and + // copyright notice, etc. at the top. If so, move past those. + insertionPosition = { line: 0, character: 0 }; + let addNewLineBefore = false; + + for (const statement of parseFileResults.parserOutput.parseTree.d.statements) { + let stopHere = true; + if (statement.nodeType === ParseNodeType.StatementList && statement.d.statements.length === 1) { + const simpleStatement = statement.d.statements[0]; + + if (simpleStatement.nodeType === ParseNodeType.StringList) { + // Assume that it's a file header doc string. + stopHere = false; + } else if (simpleStatement.nodeType === ParseNodeType.Assignment) { + if (simpleStatement.d.leftExpr.nodeType === ParseNodeType.Name) { + if (SymbolNameUtils.isDunderName(simpleStatement.d.leftExpr.d.value)) { + // Assume that it's an assignment of __copyright__, __author__, etc. + stopHere = false; + } + } + } + } + + if (stopHere) { + insertionPosition = convertOffsetToPosition(statement.start, parseFileResults.tokenizerOutput.lines); + addNewLineBefore = false; + break; + } else { + insertionPosition = convertOffsetToPosition( + statement.start + statement.length, + parseFileResults.tokenizerOutput.lines + ); + addNewLineBefore = true; + } + } + + postChange = + postChange + + parseFileResults.tokenizerOutput.predominantEndOfLineSequence + + parseFileResults.tokenizerOutput.predominantEndOfLineSequence; + if (addNewLineBefore) { + preChange = parseFileResults.tokenizerOutput.predominantEndOfLineSequence + preChange; + } else { + postChange = postChange + parseFileResults.tokenizerOutput.predominantEndOfLineSequence; + } + } + + const range = { start: insertionPosition, end: insertionPosition }; + return { range, preChange, importStatement, postChange, importGroup }; +} + +function _processImportNode(node: ImportNode, localImports: ImportStatements, followsNonImportStatement: boolean) { + node.d.list.forEach((importAsNode) => { + const importResult = AnalyzerNodeInfo.getImportInfo(importAsNode.d.module); + let resolvedPath: Uri | undefined; + + if (importResult && importResult.isImportFound) { + resolvedPath = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + } + + const localImport: ImportStatement = { + node, + subnode: importAsNode, + importResult, + resolvedPath, + moduleName: formatModuleName(importAsNode.d.module), + followsNonImportStatement, + }; + + localImports.orderedImports.push(localImport); + + // Add it to the map. + if (resolvedPath && !resolvedPath.isEmpty()) { + // Don't overwrite existing import or import from statements + // because we always want to prefer 'import from' over 'import' + // in the map. + if (!localImports.mapByFilePath.has(resolvedPath.key)) { + localImports.mapByFilePath.set(resolvedPath.key, localImport); + } + } + }); +} + +function _processImportFromNode( + node: ImportFromNode, + localImports: ImportStatements, + followsNonImportStatement: boolean, + includeImplicitImports: boolean +) { + const importResult = AnalyzerNodeInfo.getImportInfo(node.d.module); + let resolvedPath: Uri | undefined; + + if (importResult && importResult.isImportFound) { + resolvedPath = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + } + + if (includeImplicitImports && importResult) { + localImports.implicitImports = localImports.implicitImports ?? new Map(); + + if (importResult.implicitImports) { + for (const implicitImport of importResult.implicitImports.values()) { + const importFromAs = node.d.imports.find((i) => i.d.name.d.value === implicitImport.name); + if (importFromAs) { + localImports.implicitImports.set(implicitImport.uri.key, importFromAs); + } + } + } + } + + const localImport: ImportStatement = { + node, + importResult, + resolvedPath, + moduleName: formatModuleName(node.d.module), + followsNonImportStatement, + }; + + localImports.orderedImports.push(localImport); + + // Add it to the map. + if (resolvedPath && !resolvedPath.isEmpty()) { + const prevEntry = localImports.mapByFilePath.get(resolvedPath.key); + // Overwrite existing import statements because we always want to prefer + // 'import from' over 'import'. Also, overwrite existing 'import from' if + // the module name is shorter. + if ( + !prevEntry || + prevEntry.node.nodeType === ParseNodeType.Import || + prevEntry.moduleName.length > localImport.moduleName.length + ) { + localImports.mapByFilePath.set(resolvedPath.key, localImport); + } + } +} + +export function formatModuleName(node: ModuleNameNode): string { + let moduleName = ''; + for (let i = 0; i < node.d.leadingDots; i++) { + moduleName = moduleName + '.'; + } + + moduleName += node.d.nameParts.map((part) => part.d.value).join('.'); + + return moduleName; +} + +export function getContainingImportStatement(node: ParseNode | undefined, token: CancellationToken) { + while (node) { + throwIfCancellationRequested(token); + + if (node.nodeType === ParseNodeType.Import || node.nodeType === ParseNodeType.ImportFrom) { + break; + } + + node = node.parent; + } + + return node; +} + +export function getAllImportNames(node: ImportNode | ImportFromNode) { + if (node.nodeType === ParseNodeType.Import) { + const importNode = node as ImportNode; + return importNode.d.list; + } + + const importFromNode = node as ImportFromNode; + return importFromNode.d.imports; +} + +export function getImportGroupFromModuleNameAndType(moduleNameAndType: ModuleNameAndType): ImportGroup { + let importGroup = ImportGroup.Local; + if (moduleNameAndType.isLocalTypingsFile || moduleNameAndType.importType === ImportType.ThirdParty) { + importGroup = ImportGroup.ThirdParty; + } else if (moduleNameAndType.importType === ImportType.BuiltIn) { + importGroup = ImportGroup.BuiltIn; + } + + return importGroup; +} + +export function getTextRangeForImportNameDeletion( + parseFileResults: ParseFileResults, + nameNodes: ImportAsNode[] | ImportFromAsNode[], + ...nameNodeIndexToDelete: number[] +): TextRange[] { + const editSpans: TextRange[] = []; + for (const pair of getConsecutiveNumberPairs(nameNodeIndexToDelete)) { + const startNode = nameNodes[pair.start]; + const endNode = nameNodes[pair.end]; + + if (pair.start === 0 && nameNodes.length === pair.end + 1) { + // get span of whole statement. ex) "import [|A|]" or "import [|A, B|]" + editSpans.push(TextRange.fromBounds(startNode.start, TextRange.getEnd(endNode))); + } else if (pair.end === nameNodes.length - 1) { + // get span of "import A[|, B|]" or "import A[|, B, C|]" + const previousNode = nameNodes[pair.start - 1]; + editSpans.push( + ...getEditsPreservingFirstCommentAfterCommaIfExist(parseFileResults, previousNode, startNode, endNode) + ); + } else { + // get span of "import [|A, |]B" or "import [|A, B,|] C" + const start = startNode.start; + const length = nameNodes[pair.end + 1].start - start; + editSpans.push({ start, length }); + } + } + return editSpans; +} + +function getEditsPreservingFirstCommentAfterCommaIfExist( + parseFileResults: ParseFileResults, + previousNode: ParseNode, + startNode: ParseNode, + endNode: ParseNode +): TextRange[] { + const offsetOfPreviousNodeEnd = TextRange.getEnd(previousNode); + const startingToken = getTokenAt(parseFileResults.tokenizerOutput.tokens, startNode.start); + if (!startingToken || !startingToken.comments || startingToken.comments.length === 0) { + const length = TextRange.getEnd(endNode) - offsetOfPreviousNodeEnd; + return [{ start: offsetOfPreviousNodeEnd, length }]; + } + + const commaToken = getTokenAfter( + parseFileResults.tokenizerOutput.tokens, + TextRange.getEnd(previousNode), + (t) => t.type === TokenType.Comma + ); + if (!commaToken) { + const length = TextRange.getEnd(endNode) - offsetOfPreviousNodeEnd; + return [{ start: offsetOfPreviousNodeEnd, length }]; + } + + // We have code something like + // previousNode, #comment + // startNode, + // endNode + // + // Make sure we preserve #comment when deleting start/end nodes so we have + // previousNode #comment + // as final result. + const lengthToComma = TextRange.getEnd(commaToken) - offsetOfPreviousNodeEnd; + const offsetToCommentEnd = TextRange.getEnd(startingToken.comments[startingToken.comments.length - 1]); + const length = TextRange.getEnd(endNode) - offsetToCommentEnd; + return [ + { start: offsetOfPreviousNodeEnd, length: lengthToComma }, + { start: offsetToCommentEnd, length }, + ]; +} + +function getConsecutiveNumberPairs(indices: number[]) { + if (indices.length === 0) { + return []; + } + + if (indices.length === 1) { + return [{ start: indices[0], end: indices[0] }]; + } + + const pairs: { start: number; end: number }[] = []; + + let start = indices[0]; + let current = start; + for (const index of indices) { + if (current === index) { + continue; + } + + if (current + 1 === index) { + current = index; + continue; + } + + pairs.push({ start, end: current }); + + start = index; + current = index; + } + + pairs.push({ start, end: current }); + return pairs; +} + +export function getRelativeModuleName( + fs: ReadOnlyFileSystem, + sourcePath: Uri, + targetPath: Uri, + configOptions: ConfigOptions, + ignoreFolderStructure = false, + sourceIsFile?: boolean +) { + let srcPath = sourcePath; + sourceIsFile = sourceIsFile !== undefined ? sourceIsFile : isFile(fs, sourcePath); + if (sourceIsFile) { + srcPath = sourcePath.getDirectory(); + } + + let symbolName: string | undefined; + let destPath = targetPath; + if ( + (configOptions.stubPath && destPath.isChild(configOptions.stubPath)) || + (configOptions.typeshedPath && destPath.isChild(configOptions.typeshedPath)) + ) { + // Always use absolute imports for files in these library-like directories. + return undefined; + } + if (sourceIsFile) { + destPath = targetPath.getDirectory(); + + const fileName = targetPath.stripAllExtensions().fileName; + if (fileName !== '__init__') { + // ex) src: a.py, dest: b.py -> ".b" will be returned. + symbolName = fileName; + } else if (ignoreFolderStructure) { + // ex) src: nested1/nested2/__init__.py, dest: nested1/__init__.py -> "...nested1" will be returned + // like how it would return for sibling folder. + // + // if folder structure is not ignored, ".." will be returned + symbolName = destPath.fileName; + destPath = destPath.getDirectory(); + } + } + + const relativePaths = srcPath.getRelativePathComponents(destPath); + + // This assumes both file paths are under the same importing root. + // So this doesn't handle paths pointing to 2 different import roots. + // ex) user file A to library file B + let currentPaths = '.'; + for (let i = 0; i < relativePaths.length; i++) { + const relativePath = relativePaths[i]; + if (relativePath === '..') { + currentPaths += '.'; + } else { + currentPaths += relativePath; + } + + if (relativePath !== '..' && i !== relativePaths.length - 1) { + currentPaths += '.'; + } + } + + if (symbolName) { + currentPaths = + currentPaths[currentPaths.length - 1] === '.' ? currentPaths + symbolName : currentPaths + '.' + symbolName; + } + + return currentPaths; +} + +export function getDirectoryLeadingDotsPointsTo(fromDirectory: Uri, leadingDots: number) { + let currentDirectory = fromDirectory; + for (let i = 1; i < leadingDots; i++) { + if (currentDirectory.isRoot()) { + return undefined; + } + + currentDirectory = currentDirectory.getDirectory(); + } + + return currentDirectory; +} + +export function getResolvedFilePath(importResult: ImportResult | undefined) { + if (!importResult || !importResult.isImportFound || importResult.resolvedUris.length === 0) { + return undefined; + } + + if (importResult.resolvedUris.length === 1 && importResult.resolvedUris[0].equals(Uri.empty())) { + // Import is resolved to namespace package folder. + if (importResult.packageDirectory) { + return importResult.packageDirectory; + } + + // Absolute import is partially resolved from the path. + if (importResult.searchPath) { + return importResult.searchPath; + } + + return undefined; + } + + // Regular case. + return importResult.resolvedUris[importResult.resolvedUris.length - 1]; +} + +export function haveSameParentModule(module1: string[], module2: string[]) { + if (module1.length !== module2.length) { + return false; + } + + let i = 0; + for (i = 0; i < module1.length - 1; i++) { + if (module1[i] !== module2[i]) { + break; + } + } + + return i === module1.length - 1; +} + +// Helper function to get the list of names that would be imported by a wildcard import +export function getWildcardImportNames(lookupInfo: ImportLookupResult): string[] { + const namesToImport: string[] = []; + + // If a dunder all symbol is defined, it takes precedence. + if (lookupInfo.dunderAllNames) { + if (!lookupInfo.usesUnsupportedDunderAllForm) { + return lookupInfo.dunderAllNames; + } + + appendArray(namesToImport, lookupInfo.dunderAllNames); + } + + lookupInfo.symbolTable.forEach((symbol, name) => { + if (!symbol.isExternallyHidden() && !name.startsWith('_')) { + namesToImport!.push(name); + } + }); + + return namesToImport; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/namedTuples.ts b/python-parser/packages/pyright-internal/src/analyzer/namedTuples.ts new file mode 100644 index 00000000..ea3dfcdb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/namedTuples.ts @@ -0,0 +1,513 @@ +/* + * namedTuples.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides special-case logic for the construction of named tuple + * classes with defined entry names and types. + */ + +import { DiagnosticRule } from '../common/diagnosticRules'; +import { convertOffsetsToRange } from '../common/positionUtils'; +import { TextRange } from '../common/textRange'; +import { LocMessage } from '../localization/localize'; +import { ArgCategory, ExpressionNode, ParamCategory, ParseNodeType } from '../parser/parseNodes'; +import { Tokenizer } from '../parser/tokenizer'; +import { getFileInfo } from './analyzerNodeInfo'; +import { DeclarationType, VariableDeclaration } from './declaration'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { evaluateStaticBoolExpression } from './staticExpressions'; +import { Symbol, SymbolFlags } from './symbol'; +import { Arg, TypeEvaluator } from './typeEvaluatorTypes'; +import { + computeMroLinearization, + convertToInstance, + getTypeVarScopeId, + isLiteralType, + isTupleClass, + isUnboundedTupleClass, + specializeTupleClass, + synthesizeTypeVarForSelfCls, +} from './typeUtils'; +import { + AnyType, + ClassType, + ClassTypeFlags, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + TupleTypeArg, + Type, + UnknownType, + combineTypes, + isClassInstance, + isInstantiableClass, +} from './types'; + +// Creates a new custom tuple factory class with named values. +// Supports both typed and untyped variants. + +export function createNamedTupleType( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[], + includesTypes: boolean +): ClassType { + const fileInfo = getFileInfo(errorNode); + let className = 'namedtuple'; + const namedTupleEntries = new Set(); + + // The "rename" parameter is supported only in the untyped version. + let allowRename = false; + if (!includesTypes) { + const renameArg = argList.find( + (arg) => arg.argCategory === ArgCategory.Simple && arg.name?.d.value === 'rename' + ); + + if (renameArg?.valueExpression) { + const renameValue = evaluateStaticBoolExpression( + renameArg.valueExpression, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + if (renameValue === true) { + allowRename = true; + } + } + } + + if (argList.length === 0) { + evaluator.addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.namedTupleFirstArg(), errorNode); + } else { + const nameArg = argList[0]; + if (nameArg.argCategory !== ArgCategory.Simple) { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.namedTupleFirstArg(), + argList[0].valueExpression || errorNode + ); + } else if (nameArg.valueExpression && nameArg.valueExpression.nodeType === ParseNodeType.StringList) { + className = nameArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + } + } + + // Is there is a default arg? If so, is it defined in a way that we + // can determine its length statically? + const defaultsArg = argList.find((arg) => arg.name?.d.value === 'defaults'); + let defaultArgCount: number | undefined = 0; + if (defaultsArg && defaultsArg.valueExpression) { + const defaultsArgType = evaluator.getTypeOfExpression(defaultsArg.valueExpression).type; + if ( + isClassInstance(defaultsArgType) && + isTupleClass(defaultsArgType) && + !isUnboundedTupleClass(defaultsArgType) && + defaultsArgType.priv.tupleTypeArgs + ) { + defaultArgCount = defaultsArgType.priv.tupleTypeArgs.length; + } else { + defaultArgCount = undefined; + } + } + + const namedTupleType = evaluator.getTypingType(errorNode, 'NamedTuple') || UnknownType.create(); + + const classType = ClassType.createInstantiable( + className, + ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.ValidTypeAliasClass, + ParseTreeUtils.getTypeSourceId(errorNode), + /* declaredMetaclass */ undefined, + isInstantiableClass(namedTupleType) ? namedTupleType.shared.effectiveMetaclass : UnknownType.create() + ); + classType.shared.baseClasses.push(namedTupleType); + classType.shared.typeVarScopeId = ParseTreeUtils.getScopeIdForNode(errorNode); + + const classFields = ClassType.getSymbolTable(classType); + classFields.set( + '__class__', + Symbol.createWithType(SymbolFlags.ClassMember | SymbolFlags.IgnoredForProtocolMatch, classType) + ); + + const classTypeVar = synthesizeTypeVarForSelfCls(classType, /* isClsParam */ true); + const constructorType = FunctionType.createSynthesizedInstance('__new__', FunctionTypeFlags.ConstructorMethod); + constructorType.shared.declaredReturnType = convertToInstance(classTypeVar); + constructorType.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + if (ParseTreeUtils.isAssignmentToDefaultsFollowingNamedTuple(errorNode)) { + constructorType.shared.flags |= FunctionTypeFlags.DisableDefaultChecks; + } + constructorType.shared.typeVarScopeId = classType.shared.typeVarScopeId; + FunctionType.addParam( + constructorType, + FunctionParam.create(ParamCategory.Simple, classTypeVar, FunctionParamFlags.TypeDeclared, 'cls') + ); + + const matchArgsNames: string[] = []; + + const selfParam = FunctionParam.create( + ParamCategory.Simple, + synthesizeTypeVarForSelfCls(classType, /* isClsParam */ false), + FunctionParamFlags.TypeDeclared, + 'self' + ); + + let addGenericGetAttribute = false; + const entryTypes: Type[] = []; + + if (argList.length < 2) { + evaluator.addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.namedTupleSecondArg(), errorNode); + addGenericGetAttribute = true; + } else { + const entriesArg = argList[1]; + if (entriesArg.argCategory !== ArgCategory.Simple) { + addGenericGetAttribute = true; + } else { + if ( + !includesTypes && + entriesArg.valueExpression && + entriesArg.valueExpression.nodeType === ParseNodeType.StringList + ) { + const entryNameNode = entriesArg.valueExpression; + const entries = entriesArg.valueExpression.d.strings + .map((s) => s.d.value) + .join('') + .split(/[,\s]+/); + const firstParamWithDefaultIndex = + defaultArgCount === undefined ? 0 : Math.max(0, entries.length - defaultArgCount); + entries.forEach((entryName, index) => { + entryName = entryName.trim(); + if (entryName) { + entryName = renameUnderscore(evaluator, entryName, allowRename, entryNameNode, index); + entryName = renameKeyword(evaluator, entryName, allowRename, entryNameNode, index); + + const entryType = UnknownType.create(); + const paramInfo = FunctionParam.create( + ParamCategory.Simple, + entryType, + FunctionParamFlags.TypeDeclared, + entryName, + index >= firstParamWithDefaultIndex ? entryType : undefined + ); + + FunctionType.addParam(constructorType, paramInfo); + const newSymbol = Symbol.createWithType(SymbolFlags.InstanceMember, entryType); + matchArgsNames.push(entryName); + + // We need to associate the declaration with a parse node. + // In this case it's just part of a string literal value. + // The definition provider won't necessarily take the + // user to the exact spot in the string, but it's close enough. + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: entryNameNode, + isRuntimeTypeExpression: true, + uri: fileInfo.fileUri, + range: convertOffsetsToRange( + entryNameNode.start, + TextRange.getEnd(entryNameNode), + fileInfo.lines + ), + moduleName: fileInfo.moduleName, + isInExceptSuite: false, + }; + newSymbol.addDeclaration(declaration); + classFields.set(entryName, newSymbol); + entryTypes.push(entryType); + } + }); + } else if ( + entriesArg.valueExpression?.nodeType === ParseNodeType.List || + entriesArg.valueExpression?.nodeType === ParseNodeType.Tuple + ) { + const entryList = entriesArg.valueExpression; + const entryMap = new Map(); + const entryExpressions = + entriesArg.valueExpression?.nodeType === ParseNodeType.List + ? entriesArg.valueExpression.d.items + : entriesArg.valueExpression.d.items; + + const firstParamWithDefaultIndex = + defaultArgCount === undefined ? 0 : Math.max(0, entryExpressions.length - defaultArgCount); + + entryExpressions.forEach((entry, index) => { + let entryTypeNode: ExpressionNode | undefined; + let entryType: Type | undefined; + let entryNameNode: ExpressionNode | undefined; + let entryName = ''; + + if (includesTypes) { + // Handle the variant that includes name/type tuples. + if (entry.nodeType === ParseNodeType.Tuple && entry.d.items.length === 2) { + entryNameNode = entry.d.items[0]; + entryTypeNode = entry.d.items[1]; + entryType = convertToInstance( + evaluator.getTypeOfExpressionExpectingType(entryTypeNode).type + ); + } else { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.namedTupleNameType(), + entry + ); + } + } else { + entryNameNode = entry; + entryType = UnknownType.create(); + } + + if (entryNameNode) { + const nameTypeResult = evaluator.getTypeOfExpression(entryNameNode); + if ( + isClassInstance(nameTypeResult.type) && + ClassType.isBuiltIn(nameTypeResult.type, 'str') && + isLiteralType(nameTypeResult.type) + ) { + entryName = nameTypeResult.type.priv.literalValue as string; + + if (!entryName) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.namedTupleEmptyName(), + entryNameNode + ); + } else { + entryName = renameUnderscore(evaluator, entryName, allowRename, entryNameNode, index); + entryName = renameKeyword(evaluator, entryName, allowRename, entryNameNode, index); + } + } else { + addGenericGetAttribute = true; + } + } else { + addGenericGetAttribute = true; + } + + if (!entryName) { + entryName = `_${index.toString()}`; + } + + if (entryMap.has(entryName)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.namedTupleNameUnique(), + entryNameNode || entry + ); + } + + // Record names in a map to detect duplicates. + entryMap.set(entryName, entryName); + + if (!entryType) { + entryType = UnknownType.create(); + } + + const paramInfo = FunctionParam.create( + ParamCategory.Simple, + entryType, + includesTypes ? FunctionParamFlags.TypeDeclared : FunctionParamFlags.None, + entryName, + index >= firstParamWithDefaultIndex ? entryType : undefined + ); + + FunctionType.addParam(constructorType, paramInfo); + entryTypes.push(entryType); + matchArgsNames.push(entryName); + + const newSymbol = Symbol.createWithType( + SymbolFlags.InstanceMember | SymbolFlags.NamedTupleMember, + entryType + ); + if (entryNameNode && entryNameNode.nodeType === ParseNodeType.StringList) { + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: entryNameNode, + uri: fileInfo.fileUri, + typeAnnotationNode: entryTypeNode, + range: convertOffsetsToRange( + entryNameNode.start, + TextRange.getEnd(entryNameNode), + fileInfo.lines + ), + moduleName: fileInfo.moduleName, + isInExceptSuite: false, + }; + newSymbol.addDeclaration(declaration); + } + classFields.set(entryName, newSymbol); + namedTupleEntries.add(entryName); + }); + + // Set the type in the type cache for the dict node so it + // doesn't get evaluated again. + evaluator.setTypeResultForNode(entryList, { type: UnknownType.create() }); + } else { + // A dynamic expression was used, so we can't evaluate + // the named tuple statically. + addGenericGetAttribute = true; + } + + if (entriesArg.valueExpression && !addGenericGetAttribute) { + // Set the type of the value expression node to Any so we don't attempt to + // re-evaluate it later, potentially generating "partially unknown" errors + // in strict mode. + evaluator.setTypeResultForNode(entriesArg.valueExpression, { type: AnyType.create() }); + } + } + } + + classType.shared.namedTupleEntries = namedTupleEntries; + + if (addGenericGetAttribute) { + constructorType.shared.parameters = []; + FunctionType.addDefaultParams(constructorType); + entryTypes.push(AnyType.create(/* isEllipsis */ false)); + entryTypes.push(AnyType.create(/* isEllipsis */ true)); + } + + // Always use generic parameters for __init__. + const initType = FunctionType.createSynthesizedInstance('__init__'); + FunctionType.addParam(initType, selfParam); + FunctionType.addDefaultParams(initType); + initType.shared.declaredReturnType = evaluator.getNoneType(); + initType.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + + classFields.set('__new__', Symbol.createWithType(SymbolFlags.ClassMember, constructorType)); + classFields.set('__init__', Symbol.createWithType(SymbolFlags.ClassMember, initType)); + + const lenType = FunctionType.createSynthesizedInstance('__len__'); + lenType.shared.declaredReturnType = evaluator.getBuiltInObject(errorNode, 'int'); + FunctionType.addParam(lenType, selfParam); + classFields.set('__len__', Symbol.createWithType(SymbolFlags.ClassMember, lenType)); + + if (addGenericGetAttribute) { + const getAttribType = FunctionType.createSynthesizedInstance('__getattribute__'); + getAttribType.shared.declaredReturnType = AnyType.create(); + FunctionType.addParam(getAttribType, selfParam); + FunctionType.addParam( + getAttribType, + FunctionParam.create( + ParamCategory.Simple, + evaluator.getBuiltInObject(errorNode, 'str'), + FunctionParamFlags.TypeDeclared, + 'name' + ) + ); + classFields.set('__getattribute__', Symbol.createWithType(SymbolFlags.ClassMember, getAttribType)); + } + + const tupleClassType = evaluator.getBuiltInType(errorNode, 'tuple'); + + // Synthesize the __match_args__ class variable. + const strType = evaluator.getBuiltInType(errorNode, 'str'); + if ( + !addGenericGetAttribute && + strType && + isInstantiableClass(strType) && + tupleClassType && + isInstantiableClass(tupleClassType) + ) { + const literalTypes: TupleTypeArg[] = matchArgsNames.map((name) => { + return { type: ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strType, name)), isUnbounded: false }; + }); + const matchArgsType = ClassType.cloneAsInstance(specializeTupleClass(tupleClassType, literalTypes)); + classFields.set('__match_args__', Symbol.createWithType(SymbolFlags.ClassMember, matchArgsType)); + } + + updateNamedTupleBaseClass(classType, entryTypes, !addGenericGetAttribute); + + computeMroLinearization(classType); + + return classType; +} + +export function updateNamedTupleBaseClass(classType: ClassType, typeArgs: Type[], isTypeArgExplicit: boolean): boolean { + let isUpdateNeeded = false; + + classType.shared.baseClasses = classType.shared.baseClasses.map((baseClass) => { + if (!isInstantiableClass(baseClass) || !ClassType.isBuiltIn(baseClass, 'NamedTuple')) { + return baseClass; + } + + const tupleTypeArgs: TupleTypeArg[] = []; + + if (!isTypeArgExplicit) { + tupleTypeArgs.push({ + type: typeArgs.length > 0 ? combineTypes(typeArgs) : UnknownType.create(), + isUnbounded: true, + }); + } else { + typeArgs.forEach((t) => { + tupleTypeArgs.push({ type: t, isUnbounded: false }); + }); + } + + // Create a copy of the NamedTuple class that replaces the tuple base class. + const clonedNamedTupleClass = ClassType.specialize(baseClass, /* typeArgs */ undefined, isTypeArgExplicit); + clonedNamedTupleClass.shared = { ...clonedNamedTupleClass.shared }; + + clonedNamedTupleClass.shared.baseClasses = clonedNamedTupleClass.shared.baseClasses.map( + (namedTupleBaseClass) => { + if (!isInstantiableClass(namedTupleBaseClass) || !ClassType.isBuiltIn(namedTupleBaseClass, 'tuple')) { + return namedTupleBaseClass; + } + + return specializeTupleClass(namedTupleBaseClass, tupleTypeArgs, isTypeArgExplicit); + } + ); + + computeMroLinearization(clonedNamedTupleClass); + + isUpdateNeeded = true; + return clonedNamedTupleClass; + }); + + return isUpdateNeeded; +} + +function renameKeyword( + evaluator: TypeEvaluator, + name: string, + allowRename: boolean, + errorNode: ExpressionNode, + index: number +): string { + // Determine whether the name is a keyword in python. + const isKeyword = Tokenizer.isPythonKeyword(name); + + if (!isKeyword) { + // No rename necessary. + return name; + } + + if (allowRename) { + // Rename based on index. + return `_${index}`; + } + + evaluator.addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.namedTupleNameKeyword(), errorNode); + return name; +} + +function renameUnderscore( + evaluator: TypeEvaluator, + name: string, + allowRename: boolean, + errorNode: ExpressionNode, + index: number +): string { + if (!name.startsWith('_')) { + // No rename necessary. + return name; + } + + if (allowRename) { + // Rename based on index. + return `_${index}`; + } + + evaluator.addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.namedTupleFieldUnderscore(), errorNode); + + return name; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/operations.ts b/python-parser/packages/pyright-internal/src/analyzer/operations.ts new file mode 100644 index 00000000..d4ed00b3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/operations.ts @@ -0,0 +1,1404 @@ +/* + * operations.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides type evaluation logic for unary, binary, augmented assignment, + * and ternary operators. + */ + +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { PythonVersion, pythonVersion3_10 } from '../common/pythonVersion'; +import { LocMessage } from '../localization/localize'; +import { + AugmentedAssignmentNode, + BinaryOperationNode, + ExpressionNode, + ParseNodeType, + TernaryNode, + UnaryOperationNode, +} from '../parser/parseNodes'; +import { OperatorType } from '../parser/tokenizerTypes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { getEnclosingLambda, isWithinLoop, operatorSupportsChaining, printOperator } from './parseTreeUtils'; +import { getScopeForNode } from './scopeUtils'; +import { evaluateStaticBoolExpression } from './staticExpressions'; +import { EvalFlags, MagicMethodDeprecationInfo, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { + InferenceContext, + convertToInstantiable, + getLiteralTypeClassName, + getTypeCondition, + getUnionSubtypeCount, + isNoneInstance, + isOptionalType, + isTupleClass, + isUnboundedTupleClass, + isUnionableType, + lookUpClassMember, + makeInferenceContext, + mapSubtypes, + preserveUnknown, + removeNoneFromUnion, + someSubtypes, + specializeTupleClass, + specializeWithDefaultTypeArgs, + transformPossibleRecursiveTypeAlias, +} from './typeUtils'; +import { + ClassType, + NeverType, + Type, + TypeBase, + UnknownType, + combineTypes, + isAnyOrUnknown, + isClass, + isClassInstance, + isFunctionOrOverloaded, + isInstantiableClass, + isNever, + isUnion, +} from './types'; + +// Maps binary operators to the magic methods that implement them. +const binaryOperatorMap: { [operator: number]: [string, string] } = { + [OperatorType.Add]: ['__add__', '__radd__'], + [OperatorType.Subtract]: ['__sub__', '__rsub__'], + [OperatorType.Multiply]: ['__mul__', '__rmul__'], + [OperatorType.FloorDivide]: ['__floordiv__', '__rfloordiv__'], + [OperatorType.Divide]: ['__truediv__', '__rtruediv__'], + [OperatorType.Mod]: ['__mod__', '__rmod__'], + [OperatorType.Power]: ['__pow__', '__rpow__'], + [OperatorType.MatrixMultiply]: ['__matmul__', '__rmatmul__'], + [OperatorType.BitwiseAnd]: ['__and__', '__rand__'], + [OperatorType.BitwiseOr]: ['__or__', '__ror__'], + [OperatorType.BitwiseXor]: ['__xor__', '__rxor__'], + [OperatorType.LeftShift]: ['__lshift__', '__rlshift__'], + [OperatorType.RightShift]: ['__rshift__', '__rrshift__'], + [OperatorType.Equals]: ['__eq__', '__eq__'], + [OperatorType.NotEquals]: ['__ne__', '__ne__'], + [OperatorType.LessThan]: ['__lt__', '__gt__'], + [OperatorType.LessThanOrEqual]: ['__le__', '__ge__'], + [OperatorType.GreaterThan]: ['__gt__', '__lt__'], + [OperatorType.GreaterThanOrEqual]: ['__ge__', '__le__'], +}; + +// Map of operators that always return a bool result. +const booleanOperatorMap: { [operator: number]: true } = { + [OperatorType.And]: true, + [OperatorType.Or]: true, + [OperatorType.Is]: true, + [OperatorType.IsNot]: true, + [OperatorType.In]: true, + [OperatorType.NotIn]: true, +}; + +interface BinaryOperationOptions { + isLiteralMathAllowed?: boolean; + isTupleAddAllowed?: boolean; +} + +// If the number of subtypes starts to explode when applying "literal math", +// cut off the literal union and fall back to the non-literal supertype. +const maxLiteralMathSubtypeCount = 64; + +export function validateBinaryOperation( + evaluator: TypeEvaluator, + operator: OperatorType, + leftTypeResult: TypeResult, + rightTypeResult: TypeResult, + errorNode: ExpressionNode, + inferenceContext: InferenceContext | undefined, + diag: DiagnosticAddendum, + options: BinaryOperationOptions +): TypeResult { + const leftType = leftTypeResult.type; + const rightType = rightTypeResult.type; + const isIncomplete = !!leftTypeResult.isIncomplete || !!rightTypeResult.isIncomplete; + let type: Type | undefined; + let concreteLeftType = evaluator.makeTopLevelTypeVarsConcrete(leftType); + let deprecatedInfo: MagicMethodDeprecationInfo | undefined; + + if (booleanOperatorMap[operator] !== undefined) { + // If it's an AND or OR, we need to handle short-circuiting by + // eliminating any known-truthy or known-falsy types. + if (operator === OperatorType.And) { + // If the LHS evaluates to falsy, the And expression will + // always return the type of the left-hand side. + if (!evaluator.canBeTruthy(concreteLeftType)) { + return { type: leftType }; + } + + // If the LHS evaluates to truthy, the And expression will + // always return the type of the right-hand side. + if (!evaluator.canBeFalsy(concreteLeftType)) { + return { type: rightType }; + } + + concreteLeftType = evaluator.removeTruthinessFromType(concreteLeftType); + + if (isNever(rightType)) { + return { type: concreteLeftType }; + } + } else if (operator === OperatorType.Or) { + // If the LHS evaluates to truthy, the Or expression will + // always return the type of the left-hand side. + if (!evaluator.canBeFalsy(concreteLeftType)) { + return { type: leftType }; + } + + // If the LHS evaluates to falsy, the Or expression will + // always return the type of the right-hand side. + if (!evaluator.canBeTruthy(concreteLeftType)) { + return { type: rightType }; + } + + concreteLeftType = evaluator.removeFalsinessFromType(concreteLeftType); + + if (isNever(rightType)) { + return { type: concreteLeftType }; + } + } + + if (isNever(leftType) || isNever(rightType)) { + return { type: NeverType.createNever() }; + } + + // The "in" and "not in" operators make use of the __contains__ + // magic method. + if (operator === OperatorType.In || operator === OperatorType.NotIn) { + const result = validateContainmentOperation( + evaluator, + operator, + leftTypeResult, + concreteLeftType, + rightTypeResult, + errorNode, + diag + ); + + if (result.magicMethodDeprecationInfo) { + deprecatedInfo = result.magicMethodDeprecationInfo; + } + + type = result.type; + + // Assume that a bool is returned even if the type is unknown. + if (type && !isNever(type)) { + type = evaluator.getBuiltInObject(errorNode, 'bool'); + } + } else { + type = evaluator.mapSubtypesExpandTypeVars( + concreteLeftType, + /* options */ undefined, + (leftSubtypeExpanded, leftSubtypeUnexpanded) => { + return evaluator.mapSubtypesExpandTypeVars( + rightType, + { conditionFilter: getTypeCondition(leftSubtypeExpanded) }, + (rightSubtypeExpanded, rightSubtypeUnexpanded) => { + // If the operator is an AND or OR, we need to combine the two types. + if (operator === OperatorType.And || operator === OperatorType.Or) { + return combineTypes([leftSubtypeUnexpanded, rightSubtypeUnexpanded]); + } + // The other boolean operators always return a bool value. + return evaluator.getBuiltInObject(errorNode, 'bool'); + } + ); + } + ); + } + } else if (binaryOperatorMap[operator]) { + if (isNever(leftType) || isNever(rightType)) { + return { type: NeverType.createNever() }; + } + + // Handle certain operations on certain homogenous literal types + // using special-case math. For example, Literal[1, 2] + Literal[3, 4] + // should result in Literal[4, 5, 6]. + if (options.isLiteralMathAllowed) { + type = calcLiteralForBinaryOp(operator, leftType, rightType); + } + + if (!type) { + const result = validateArithmeticOperation( + evaluator, + operator, + leftTypeResult, + rightTypeResult, + errorNode, + inferenceContext, + diag, + options + ); + + if (result.magicMethodDeprecationInfo) { + deprecatedInfo = result.magicMethodDeprecationInfo; + } + + type = result.type; + } + } + + return { type: type ?? UnknownType.create(isIncomplete), magicMethodDeprecationInfo: deprecatedInfo }; +} + +export function getTypeOfBinaryOperation( + evaluator: TypeEvaluator, + node: BinaryOperationNode, + flags: EvalFlags, + inferenceContext: InferenceContext | undefined +): TypeResult { + const leftExpression = node.d.leftExpr; + let rightExpression = node.d.rightExpr; + let isIncomplete = false; + let typeErrors = false; + + // If this is a comparison and the left expression is also a comparison, + // we need to change the behavior to accommodate python's "chained + // comparisons" feature. + if (operatorSupportsChaining(node.d.operator)) { + if ( + rightExpression.nodeType === ParseNodeType.BinaryOperation && + !rightExpression.d.hasParens && + operatorSupportsChaining(rightExpression.d.operator) + ) { + // Evaluate the right expression so it is type checked. + getTypeOfBinaryOperation(evaluator, rightExpression, flags, inferenceContext); + + // Use the left side of the right expression for comparison purposes. + rightExpression = rightExpression.d.leftExpr; + } + } + + // For most binary operations, the "expected type" is applied to the output + // of the magic method for that operation. However, the "or" and "and" operators + // have no magic method, so we apply the expected type directly to both operands. + let expectedOperandType = + node.d.operator === OperatorType.Or || node.d.operator === OperatorType.And + ? inferenceContext?.expectedType + : undefined; + + // Handle the very special case where the expected type is a list + // and the operator is a multiply. This comes up in the common case + // of "x: List[Optional[X]] = [None] * y" where y is an integer literal. + let expectedLeftOperandType: Type | undefined; + if ( + node.d.operator === OperatorType.Multiply && + inferenceContext && + isClassInstance(inferenceContext.expectedType) && + ClassType.isBuiltIn(inferenceContext.expectedType, 'list') && + inferenceContext.expectedType.priv.typeArgs && + inferenceContext.expectedType.priv.typeArgs.length >= 1 && + node.d.leftExpr.nodeType === ParseNodeType.List + ) { + expectedLeftOperandType = inferenceContext.expectedType; + } + + const effectiveExpectedType = expectedOperandType ?? expectedLeftOperandType; + const leftTypeResult = evaluator.getTypeOfExpression( + leftExpression, + flags, + makeInferenceContext(effectiveExpectedType) + ); + let leftType = leftTypeResult.type; + + if (!expectedOperandType) { + if (node.d.operator === OperatorType.Or || node.d.operator === OperatorType.And) { + // For "or" and "and", use the type of the left operand under certain + // circumstances. This allows us to infer a better type for expressions + // like `x or []`. Do this only if it's a generic class (like list or dict) + // or a TypedDict. + if ( + someSubtypes(leftType, (subtype) => { + if (!isClassInstance(subtype)) { + return false; + } + + return ClassType.isTypedDictClass(subtype) || subtype.shared.typeParams.length > 0; + }) + ) { + expectedOperandType = leftType; + } + } else if (node.d.operator === OperatorType.Add && node.d.rightExpr.nodeType === ParseNodeType.List) { + // For the "+" operator , use this technique only if the right operand is + // a list expression. This heuristic handles the common case of `my_list + [0]`. + expectedOperandType = leftType; + } else if (node.d.operator === OperatorType.BitwiseOr) { + // If this is a bitwise or ("|"), use the type of the left operand. This allows + // us to support the case where a TypedDict is being updated with a dict expression. + if (isClassInstance(leftType) && ClassType.isTypedDictClass(leftType)) { + expectedOperandType = leftType; + } + } + } + + const rightTypeResult = evaluator.getTypeOfExpression( + rightExpression, + flags, + makeInferenceContext(expectedOperandType) + ); + let rightType = rightTypeResult.type; + + if (leftTypeResult.isIncomplete || rightTypeResult.isIncomplete) { + isIncomplete = true; + } + + // Is this a "|" operator used in a context where it is supposed to be + // interpreted as a union operator? + if ( + node.d.operator === OperatorType.BitwiseOr && + !customMetaclassSupportsMethod(leftType, '__or__') && + !customMetaclassSupportsMethod(rightType, '__ror__') + ) { + let adjustedRightType = rightType; + let adjustedLeftType = leftType; + if (!isNoneInstance(leftType) && isNoneInstance(rightType)) { + // Handle the special case where "None" is being added to the union + // with something else. Even though "None" will normally be interpreted + // as the None singleton object in contexts where a type annotation isn't + // assumed, we'll allow it here. + adjustedRightType = convertToInstantiable(evaluator.getNoneType()); + } else if (!isNoneInstance(rightType) && isNoneInstance(leftType)) { + adjustedLeftType = convertToInstantiable(evaluator.getNoneType()); + } + + if (isUnionableType([adjustedLeftType, adjustedRightType])) { + if (isInstantiableClass(adjustedLeftType)) { + adjustedLeftType = specializeWithDefaultTypeArgs(adjustedLeftType); + } + + if (isInstantiableClass(adjustedRightType)) { + adjustedRightType = specializeWithDefaultTypeArgs(adjustedRightType); + } + + return createUnionType( + evaluator, + node, + flags, + leftTypeResult, + rightTypeResult, + adjustedRightType, + adjustedLeftType + ); + } + } + + if ((flags & EvalFlags.TypeExpression) !== 0) { + // Exempt "|" because it might be a union operation involving unknowns. + if (node.d.operator !== OperatorType.BitwiseOr) { + evaluator.addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.binaryOperationNotAllowed(), node); + return { type: UnknownType.create() }; + } + } + + // Optional checks apply to all operations except for boolean operations. + let isLeftOptionalType = false; + if (booleanOperatorMap[node.d.operator] === undefined) { + // None is a valid operand for == and != even if the type stub says otherwise. + if (node.d.operator === OperatorType.Equals || node.d.operator === OperatorType.NotEquals) { + leftType = removeNoneFromUnion(leftType); + } else { + isLeftOptionalType = isOptionalType(leftType); + } + + // None is a valid operand for == and != even if the type stub says otherwise. + if (node.d.operator === OperatorType.Equals || node.d.operator === OperatorType.NotEquals) { + rightType = removeNoneFromUnion(rightType); + } + } + + const diag = new DiagnosticAddendum(); + + // Don't use literal math if the operation is within a loop + // because the literal values may change each time. We also don't want to + // apply literal math within the body of a lambda because they are often + // used as callbacks where the value changes each time they are called. + const isLiteralMathAllowed = !isWithinLoop(node) && !getEnclosingLambda(node); + + // Don't special-case tuple __add__ if the left type is a union. This + // can result in an infinite loop if we keep creating new tuple types + // within a loop construct using __add__. + const isTupleAddAllowed = !isUnion(leftType); + + const typeResult = validateBinaryOperation( + evaluator, + node.d.operator, + { type: leftType, isIncomplete: leftTypeResult.isIncomplete }, + { type: rightType, isIncomplete: rightTypeResult.isIncomplete }, + node, + inferenceContext, + diag, + { isLiteralMathAllowed, isTupleAddAllowed } + ); + + if (typeResult.isIncomplete) { + isIncomplete = true; + } + + if (!diag.isEmpty()) { + typeErrors = true; + + if (!isIncomplete) { + if (isLeftOptionalType && diag.getMessages().length === 1) { + // If the left was an optional type and there is just one diagnostic, + // assume that it was due to a "None" not being supported. Report + // this as a reportOptionalOperand diagnostic rather than a + // reportGeneralTypeIssues diagnostic. + evaluator.addDiagnostic( + DiagnosticRule.reportOptionalOperand, + LocMessage.noneOperator().format({ + operator: printOperator(node.d.operator), + }), + node.d.leftExpr + ); + } else { + // If neither the LHS or RHS are unions, don't include a diagnostic addendum + // because it will be redundant with the main diagnostic message. The addenda + // are useful only if union expansion was used for one or both operands. + let diagString = ''; + if ( + isUnion(evaluator.makeTopLevelTypeVarsConcrete(leftType)) || + isUnion(evaluator.makeTopLevelTypeVarsConcrete(rightType)) + ) { + diagString = diag.getString(); + } + + evaluator.addDiagnostic( + DiagnosticRule.reportOperatorIssue, + LocMessage.typeNotSupportBinaryOperator().format({ + operator: printOperator(node.d.operator), + leftType: evaluator.printType(leftType), + rightType: evaluator.printType(rightType), + }) + diagString, + node + ); + } + } + } + + return { + type: typeResult.type, + isIncomplete, + typeErrors, + magicMethodDeprecationInfo: typeResult.magicMethodDeprecationInfo, + }; +} + +export function getTypeOfAugmentedAssignment( + evaluator: TypeEvaluator, + node: AugmentedAssignmentNode, + inferenceContext: InferenceContext | undefined +): TypeResult { + const operatorMap: { [operator: number]: [string, OperatorType] } = { + [OperatorType.AddEqual]: ['__iadd__', OperatorType.Add], + [OperatorType.SubtractEqual]: ['__isub__', OperatorType.Subtract], + [OperatorType.MultiplyEqual]: ['__imul__', OperatorType.Multiply], + [OperatorType.FloorDivideEqual]: ['__ifloordiv__', OperatorType.FloorDivide], + [OperatorType.DivideEqual]: ['__itruediv__', OperatorType.Divide], + [OperatorType.ModEqual]: ['__imod__', OperatorType.Mod], + [OperatorType.PowerEqual]: ['__ipow__', OperatorType.Power], + [OperatorType.MatrixMultiplyEqual]: ['__imatmul__', OperatorType.MatrixMultiply], + [OperatorType.BitwiseAndEqual]: ['__iand__', OperatorType.BitwiseAnd], + [OperatorType.BitwiseOrEqual]: ['__ior__', OperatorType.BitwiseOr], + [OperatorType.BitwiseXorEqual]: ['__ixor__', OperatorType.BitwiseXor], + [OperatorType.LeftShiftEqual]: ['__ilshift__', OperatorType.LeftShift], + [OperatorType.RightShiftEqual]: ['__irshift__', OperatorType.RightShift], + }; + + let type: Type | undefined; + let typeResult: TypeResult | undefined; + const diag = new DiagnosticAddendum(); + let deprecatedInfo: MagicMethodDeprecationInfo | undefined; + + const leftTypeResult = evaluator.getTypeOfExpression(node.d.leftExpr); + const leftType = leftTypeResult.type; + + let expectedOperandType: Type | undefined; + if (node.d.operator === OperatorType.BitwiseOrEqual) { + // If this is a bitwise or ("|="), use the type of the left operand. This allows + // us to support the case where a TypedDict is being updated with a dict expression. + expectedOperandType = leftType; + } + + const rightTypeResult = evaluator.getTypeOfExpression( + node.d.rightExpr, + /* flags */ undefined, + makeInferenceContext(expectedOperandType) + ); + const rightType = rightTypeResult.type; + const isIncomplete = !!rightTypeResult.isIncomplete || !!leftTypeResult.isIncomplete; + + if (isNever(leftType) || isNever(rightType)) { + typeResult = { type: NeverType.createNever(), isIncomplete }; + } else { + type = evaluator.mapSubtypesExpandTypeVars( + leftType, + /* options */ undefined, + (leftSubtypeExpanded, leftSubtypeUnexpanded) => { + return evaluator.mapSubtypesExpandTypeVars( + rightType, + { conditionFilter: getTypeCondition(leftSubtypeExpanded) }, + (rightSubtypeExpanded, rightSubtypeUnexpanded) => { + if (isAnyOrUnknown(leftSubtypeUnexpanded) || isAnyOrUnknown(rightSubtypeUnexpanded)) { + return preserveUnknown(leftSubtypeUnexpanded, rightSubtypeUnexpanded); + } + + const magicMethodName = operatorMap[node.d.operator][0]; + let returnTypeResult = evaluator.getTypeOfMagicMethodCall( + leftSubtypeUnexpanded, + magicMethodName, + [{ type: rightSubtypeUnexpanded, isIncomplete: rightTypeResult.isIncomplete }], + node, + inferenceContext + ); + + if (!returnTypeResult && leftSubtypeUnexpanded !== leftSubtypeExpanded) { + // Try with the expanded left type. + returnTypeResult = evaluator.getTypeOfMagicMethodCall( + leftSubtypeExpanded, + magicMethodName, + [{ type: rightSubtypeUnexpanded, isIncomplete: rightTypeResult.isIncomplete }], + node, + inferenceContext + ); + } + + if (!returnTypeResult && rightSubtypeUnexpanded !== rightSubtypeExpanded) { + // Try with the expanded left and right type. + returnTypeResult = evaluator.getTypeOfMagicMethodCall( + leftSubtypeExpanded, + magicMethodName, + [{ type: rightSubtypeExpanded, isIncomplete: rightTypeResult.isIncomplete }], + node, + inferenceContext + ); + } + + if (!returnTypeResult) { + // If the LHS class didn't support the magic method for augmented + // assignment, fall back on the normal binary expression evaluator. + const binaryOperator = operatorMap[node.d.operator][1]; + + // Don't use literal math if the operation is within a loop + // because the literal values may change each time. + const isLiteralMathAllowed = + !isWithinLoop(node) && + isExpressionLocalVariable(evaluator, node.d.leftExpr) && + getUnionSubtypeCount(leftType) * getUnionSubtypeCount(rightType) < + maxLiteralMathSubtypeCount; + + // Don't special-case tuple __add__ if the left type is a union. This + // can result in an infinite loop if we keep creating new tuple types + // within a loop construct using __add__. + const isTupleAddAllowed = !isUnion(leftType); + + returnTypeResult = validateBinaryOperation( + evaluator, + binaryOperator, + { type: leftSubtypeUnexpanded, isIncomplete: leftTypeResult.isIncomplete }, + { type: rightSubtypeUnexpanded, isIncomplete: rightTypeResult.isIncomplete }, + node, + inferenceContext, + diag, + { isLiteralMathAllowed, isTupleAddAllowed } + ); + } + + if (returnTypeResult?.magicMethodDeprecationInfo) { + deprecatedInfo = returnTypeResult.magicMethodDeprecationInfo; + } + + return returnTypeResult?.type; + } + ); + } + ); + + // If the LHS class didn't support the magic method for augmented + // assignment, fall back on the normal binary expression evaluator. + if (!diag.isEmpty() || !type || isNever(type)) { + if (!isIncomplete) { + evaluator.addDiagnostic( + DiagnosticRule.reportOperatorIssue, + LocMessage.typeNotSupportBinaryOperator().format({ + operator: printOperator(node.d.operator), + leftType: evaluator.printType(leftType), + rightType: evaluator.printType(rightType), + }) + diag.getString(), + node + ); + } + } + + typeResult = { type, isIncomplete, magicMethodDeprecationInfo: deprecatedInfo }; + } + + evaluator.assignTypeToExpression(node.d.destExpr, typeResult, node.d.rightExpr); + + return typeResult; +} + +export function getTypeOfUnaryOperation( + evaluator: TypeEvaluator, + node: UnaryOperationNode, + flags: EvalFlags, + inferenceContext: InferenceContext | undefined +): TypeResult { + if ((flags & EvalFlags.TypeExpression) !== 0) { + evaluator.addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.unaryOperationNotAllowed(), node); + return { type: UnknownType.create() }; + } + + const exprTypeResult = evaluator.getTypeOfExpression(node.d.expr); + let exprType = evaluator.makeTopLevelTypeVarsConcrete(transformPossibleRecursiveTypeAlias(exprTypeResult.type)); + + const isIncomplete = exprTypeResult.isIncomplete; + + if (isNever(exprType)) { + return { type: NeverType.createNever(), isIncomplete }; + } + + // Map unary operators to magic functions. Note that the bitwise + // invert has two magic functions that are aliases of each other. + const unaryOperatorMap: { [operator: number]: string } = { + [OperatorType.Add]: '__pos__', + [OperatorType.Subtract]: '__neg__', + [OperatorType.BitwiseInvert]: '__invert__', + [OperatorType.Not]: '__bool__', + }; + + let type: Type | undefined; + let deprecatedInfo: MagicMethodDeprecationInfo | undefined; + + if (node.d.operator !== OperatorType.Not) { + if (isOptionalType(exprType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportOptionalOperand, + LocMessage.noneOperator().format({ + operator: printOperator(node.d.operator), + }), + node.d.expr + ); + exprType = removeNoneFromUnion(exprType); + } + } + + // Handle certain operations on certain literal types + // using special-case math. Do not apply this if the input type + // is incomplete because we may be evaluating an expression within + // a loop, so the literal value may change each time. + if (!exprTypeResult.isIncomplete) { + type = calcLiteralForUnaryOp(node.d.operator, exprType); + } + + if (!type) { + if (isAnyOrUnknown(exprType)) { + type = exprType; + } else { + const magicMethodName = unaryOperatorMap[node.d.operator]; + let isResultValid = true; + + type = evaluator.mapSubtypesExpandTypeVars(exprType, /* options */ undefined, (subtypeExpanded) => { + const typeResult = evaluator.getTypeOfMagicMethodCall( + subtypeExpanded, + magicMethodName, + [], + node, + inferenceContext + ); + + if (!typeResult) { + isResultValid = false; + } + + if (typeResult?.magicMethodDeprecationInfo) { + deprecatedInfo = typeResult.magicMethodDeprecationInfo; + } + + return typeResult?.type; + }); + + if (!isResultValid) { + type = undefined; + } + } + + // __not__ always returns a boolean. + if (node.d.operator === OperatorType.Not) { + type = evaluator.getBuiltInObject(node, 'bool'); + if (!type) { + type = UnknownType.create(); + } + } + + if (!type) { + if (!isIncomplete) { + if (inferenceContext && !isAnyOrUnknown(inferenceContext.expectedType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportOperatorIssue, + LocMessage.typeNotSupportUnaryOperatorBidirectional().format({ + operator: printOperator(node.d.operator), + type: evaluator.printType(exprType), + expectedType: evaluator.printType(inferenceContext.expectedType), + }), + node + ); + } else { + evaluator.addDiagnostic( + DiagnosticRule.reportOperatorIssue, + LocMessage.typeNotSupportUnaryOperator().format({ + operator: printOperator(node.d.operator), + type: evaluator.printType(exprType), + }), + node + ); + } + } + + type = UnknownType.create(isIncomplete); + } + } + + return { type, isIncomplete, magicMethodDeprecationInfo: deprecatedInfo }; +} + +export function getTypeOfTernaryOperation( + evaluator: TypeEvaluator, + node: TernaryNode, + flags: EvalFlags, + inferenceContext: InferenceContext | undefined +): TypeResult { + const fileInfo = getFileInfo(node); + + if ((flags & EvalFlags.TypeExpression) !== 0) { + evaluator.addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.ternaryNotAllowed(), node); + return { type: UnknownType.create() }; + } + + evaluator.getTypeOfExpression(node.d.testExpr); + + const typesToCombine: Type[] = []; + let isIncomplete = false; + let typeErrors = false; + + const constExprValue = evaluateStaticBoolExpression( + node.d.testExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + + if (constExprValue !== false && evaluator.isNodeReachable(node.d.ifExpr)) { + const ifType = evaluator.getTypeOfExpression(node.d.ifExpr, flags, inferenceContext); + typesToCombine.push(ifType.type); + if (ifType.isIncomplete) { + isIncomplete = true; + } + if (ifType.typeErrors) { + typeErrors = true; + } + } + + if (constExprValue !== true && evaluator.isNodeReachable(node.d.elseExpr)) { + const elseType = evaluator.getTypeOfExpression(node.d.elseExpr, flags, inferenceContext); + typesToCombine.push(elseType.type); + if (elseType.isIncomplete) { + isIncomplete = true; + } + if (elseType.typeErrors) { + typeErrors = true; + } + } + + return { type: combineTypes(typesToCombine), isIncomplete, typeErrors }; +} + +function createUnionType( + evaluator: TypeEvaluator, + node: BinaryOperationNode, + flags: EvalFlags, + leftTypeResult: TypeResult, + rightTypeResult: TypeResult, + adjustedRightType: Type, + adjustedLeftType: Type +): TypeResult { + const leftExpression = node.d.leftExpr; + const rightExpression = node.d.rightExpr; + const fileInfo = getFileInfo(node); + const unionNotationSupported = + fileInfo.isStubFile || + (flags & EvalFlags.ForwardRefs) !== 0 || + PythonVersion.isGreaterOrEqualTo(fileInfo.executionEnvironment.pythonVersion, pythonVersion3_10); + + if (!unionNotationSupported) { + // If the left type is Any, we can't say for sure whether this + // is an illegal syntax or a valid application of the "|" operator. + if (!isAnyOrUnknown(adjustedLeftType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.unionSyntaxIllegal(), + node, + node.d.operatorToken + ); + } + } + + const isLeftTypeArgValid = evaluator.validateTypeArg({ ...leftTypeResult, node: leftExpression }); + const isRightTypeArgValid = evaluator.validateTypeArg({ ...rightTypeResult, node: rightExpression }); + + if (!isLeftTypeArgValid || !isRightTypeArgValid) { + return { type: UnknownType.create() }; + } + + adjustedLeftType = evaluator.reportMissingTypeArgs( + node.d.leftExpr, + adjustedLeftType, + flags | EvalFlags.InstantiableType + ); + adjustedRightType = evaluator.reportMissingTypeArgs( + node.d.rightExpr, + adjustedRightType, + flags | EvalFlags.InstantiableType + ); + + let newUnion = combineTypes([adjustedLeftType, adjustedRightType], { skipElideRedundantLiterals: true }); + + const unionClass = evaluator.getUnionClassType(); + if (unionClass && isInstantiableClass(unionClass) && (flags & EvalFlags.IsinstanceArg) === 0) { + newUnion = TypeBase.cloneAsSpecialForm(newUnion, ClassType.cloneAsInstance(unionClass)); + } + + if (leftTypeResult.type.props?.typeForm && rightTypeResult.type.props?.typeForm) { + const newTypeForm = combineTypes([leftTypeResult.type.props.typeForm, rightTypeResult.type.props.typeForm]); + newUnion = TypeBase.cloneWithTypeForm(newUnion, newTypeForm); + } + + // Check for "stringified" forward reference type expressions. The "|" operator + // doesn't support these except in certain circumstances. Notably, it can't be used + // with other strings or with types that are not specialized using an index form. + if (!fileInfo.isStubFile) { + let stringNode: ExpressionNode | undefined; + let otherNode: ExpressionNode | undefined; + let otherType: Type | undefined; + + if (leftExpression.nodeType === ParseNodeType.StringList) { + stringNode = leftExpression; + otherNode = rightExpression; + otherType = rightTypeResult.type; + } else if (rightExpression.nodeType === ParseNodeType.StringList) { + stringNode = rightExpression; + otherNode = leftExpression; + otherType = leftTypeResult.type; + } + + if (stringNode && otherNode && otherType) { + let isAllowed = true; + if (isClass(otherType)) { + if (!otherType.priv.isTypeArgExplicit || isClassInstance(otherType)) { + isAllowed = false; + } + } + + if (!isAllowed) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.unionForwardReferenceNotAllowed(), + stringNode + ); + } + } + } + + return { type: newUnion }; +} + +// Attempts to apply "literal math" for a literal operands. +function calcLiteralForUnaryOp(operator: OperatorType, operandType: Type): Type | undefined { + let type: Type | undefined; + + if (getUnionSubtypeCount(operandType) >= maxLiteralMathSubtypeCount) { + return undefined; + } + + if (!!getTypeCondition(operandType) || someSubtypes(operandType, (subtype) => !!getTypeCondition(subtype))) { + return undefined; + } + + const literalClassName = getLiteralTypeClassName(operandType); + + if (literalClassName === 'int') { + if (operator === OperatorType.Add) { + type = operandType; + } else if (operator === OperatorType.Subtract) { + type = mapSubtypes(operandType, (subtype) => { + const classSubtype = subtype as ClassType; + return ClassType.cloneWithLiteral(classSubtype, -(classSubtype.priv.literalValue as number | bigint)); + }); + } else if (operator === OperatorType.BitwiseInvert) { + // Python defines bitwise invert (~x) as -(x + 1). Use BigInt math + // to avoid JavaScript's 32-bit truncation when using the ~ operator + // on Number values. + type = mapSubtypes(operandType, (subtype) => { + const classSubtype = subtype as ClassType; + const literalValue = classSubtype.priv.literalValue as number | bigint; + const bigVal = typeof literalValue === 'bigint' ? literalValue : BigInt(literalValue as number); + let newValue: number | bigint = -(bigVal + BigInt(1)); + if (newValue >= BigInt(Number.MIN_SAFE_INTEGER) && newValue <= BigInt(Number.MAX_SAFE_INTEGER)) { + newValue = Number(newValue); + } + return ClassType.cloneWithLiteral(classSubtype, newValue); + }); + } + } else if (literalClassName === 'bool') { + if (operator === OperatorType.Not) { + type = mapSubtypes(operandType, (subtype) => { + const classSubtype = subtype as ClassType; + return ClassType.cloneWithLiteral(classSubtype, !(classSubtype.priv.literalValue as boolean)); + }); + } + } + + return type; +} + +// Attempts to apply "literal math" for two literal operands. +function calcLiteralForBinaryOp(operator: OperatorType, leftType: Type, rightType: Type): Type | undefined { + const leftLiteralClassName = getLiteralTypeClassName(leftType); + if ( + !leftLiteralClassName || + getTypeCondition(leftType) || + someSubtypes(leftType, (subtype) => !!getTypeCondition(subtype)) + ) { + return undefined; + } + + const rightLiteralClassName = getLiteralTypeClassName(rightType); + if ( + leftLiteralClassName !== rightLiteralClassName || + getTypeCondition(rightType) || + someSubtypes(rightType, (subtype) => !!getTypeCondition(subtype)) || + getUnionSubtypeCount(leftType) * getUnionSubtypeCount(rightType) >= maxLiteralMathSubtypeCount + ) { + return undefined; + } + + // Handle str and bytes literals. + if (leftLiteralClassName === 'str' || leftLiteralClassName === 'bytes') { + if (operator === OperatorType.Add) { + return mapSubtypes(leftType, (leftSubtype) => { + return mapSubtypes(rightType, (rightSubtype) => { + const leftClassSubtype = leftSubtype as ClassType; + const rightClassSubtype = rightSubtype as ClassType; + + return ClassType.cloneWithLiteral( + leftClassSubtype, + ((leftClassSubtype.priv.literalValue as string) + rightClassSubtype.priv.literalValue) as string + ); + }); + }); + } + } + + // Handle int literals. + if (leftLiteralClassName === 'int') { + const supportedOps = [ + OperatorType.Add, + OperatorType.Subtract, + OperatorType.Multiply, + OperatorType.FloorDivide, + OperatorType.Mod, + OperatorType.Power, + OperatorType.LeftShift, + OperatorType.RightShift, + OperatorType.BitwiseAnd, + OperatorType.BitwiseOr, + OperatorType.BitwiseXor, + ]; + if (!supportedOps.includes(operator)) { + return undefined; + } + + let isValidResult = true; + + const type = mapSubtypes(leftType, (leftSubtype) => { + return mapSubtypes(rightType, (rightSubtype) => { + try { + const leftClassSubtype = leftSubtype as ClassType; + const rightClassSubtype = rightSubtype as ClassType; + const leftLiteralValue = BigInt(leftClassSubtype.priv.literalValue as number | bigint); + const rightLiteralValue = BigInt(rightClassSubtype.priv.literalValue as number | bigint); + + let newValue: number | bigint | undefined; + if (operator === OperatorType.Add) { + newValue = leftLiteralValue + rightLiteralValue; + } else if (operator === OperatorType.Subtract) { + newValue = leftLiteralValue - rightLiteralValue; + } else if (operator === OperatorType.Multiply) { + newValue = leftLiteralValue * rightLiteralValue; + } else if (operator === OperatorType.FloorDivide) { + if (rightLiteralValue !== BigInt(0)) { + newValue = leftLiteralValue / rightLiteralValue; + + // BigInt rounds to zero, but floor divide rounds to negative + // infinity, so we need to adjust the result if the signs + // of the operands are different. + if ( + newValue * rightLiteralValue !== leftLiteralValue && + leftLiteralValue < BigInt(0) !== rightLiteralValue < BigInt(0) + ) { + newValue -= BigInt(1); + } + } + } else if (operator === OperatorType.Mod) { + if (rightLiteralValue !== BigInt(0)) { + // BigInt always produces a remainder, but Python produces + // a modulo result whose sign is always the same as the + // right operand. + newValue = ((leftLiteralValue % rightLiteralValue) + rightLiteralValue) % rightLiteralValue; + } + } else if (operator === OperatorType.Power) { + if (rightLiteralValue >= BigInt(0)) { + try { + newValue = leftLiteralValue ** rightLiteralValue; + } catch { + // Don't allow if we exceed max bigint integer value. + } + } + } else if (operator === OperatorType.LeftShift) { + if (rightLiteralValue >= BigInt(0)) { + newValue = leftLiteralValue << rightLiteralValue; + } + } else if (operator === OperatorType.RightShift) { + if (rightLiteralValue >= BigInt(0)) { + newValue = leftLiteralValue >> rightLiteralValue; + } + } else if (operator === OperatorType.BitwiseAnd) { + newValue = leftLiteralValue & rightLiteralValue; + } else if (operator === OperatorType.BitwiseOr) { + newValue = leftLiteralValue | rightLiteralValue; + } else if (operator === OperatorType.BitwiseXor) { + newValue = leftLiteralValue ^ rightLiteralValue; + } + + if (newValue === undefined) { + isValidResult = false; + return undefined; + } else if (typeof newValue === 'number' && isNaN(newValue)) { + isValidResult = false; + return undefined; + } else { + // Convert back to a simple number if it fits. Leave as a bigint + // if it doesn't. + if (newValue >= Number.MIN_SAFE_INTEGER && newValue <= Number.MAX_SAFE_INTEGER) { + newValue = Number(newValue); + } + + return ClassType.cloneWithLiteral(leftClassSubtype, newValue); + } + } catch { + isValidResult = false; + return undefined; + } + }); + }); + + if (isValidResult) { + return type; + } + } + + return undefined; +} + +function customMetaclassSupportsMethod(type: Type, methodName: string): boolean { + if (!isInstantiableClass(type)) { + return false; + } + + const metaclass = type.shared.effectiveMetaclass; + if (!metaclass || !isInstantiableClass(metaclass)) { + return false; + } + + if (ClassType.isBuiltIn(metaclass, 'type')) { + return false; + } + + const memberInfo = lookUpClassMember(metaclass, methodName); + if (!memberInfo) { + return false; + } + + // If the metaclass inherits from Any or Unknown, we have to guess + // whether the method is supported. We'll assume it's not, since this + // is the most likely case. + if (isAnyOrUnknown(memberInfo.classType)) { + return false; + } + + if (isInstantiableClass(memberInfo.classType) && ClassType.isBuiltIn(memberInfo.classType, 'type')) { + return false; + } + + return true; +} + +// All functions in Python derive from object, so they inherit all +// of the capabilities of an object. This function converts a function +// to an object instance. +function convertFunctionToObject(evaluator: TypeEvaluator, type: Type) { + if (isFunctionOrOverloaded(type)) { + return evaluator.getObjectType(); + } + + return type; +} + +// Determines whether the expression refers to a variable that +// is defined within the current scope or some outer scope. +function isExpressionLocalVariable(evaluator: TypeEvaluator, node: ExpressionNode): boolean { + if (node.nodeType !== ParseNodeType.Name) { + return false; + } + + const symbolWithScope = evaluator.lookUpSymbolRecursive(node, node.d.value, /* honorCodeFlow */ false); + if (!symbolWithScope) { + return false; + } + + const currentScope = getScopeForNode(node); + return currentScope === symbolWithScope.scope; +} + +function validateContainmentOperation( + evaluator: TypeEvaluator, + operator: OperatorType, + leftTypeResult: TypeResult, + concreteLeftType: Type, + rightTypeResult: TypeResult, + errorNode: ExpressionNode, + diag: DiagnosticAddendum +): TypeResult { + let deprecatedInfo: MagicMethodDeprecationInfo | undefined; + + const type = evaluator.mapSubtypesExpandTypeVars( + rightTypeResult.type, + /* options */ undefined, + (rightSubtypeExpanded, rightSubtypeUnexpanded) => { + return evaluator.mapSubtypesExpandTypeVars( + concreteLeftType, + { conditionFilter: getTypeCondition(rightSubtypeExpanded) }, + (leftSubtype) => { + if (isAnyOrUnknown(leftSubtype) || isAnyOrUnknown(rightSubtypeUnexpanded)) { + return preserveUnknown(leftSubtype, rightSubtypeExpanded); + } + + let returnTypeResult = evaluator.getTypeOfMagicMethodCall( + rightSubtypeExpanded, + '__contains__', + [{ type: leftSubtype, isIncomplete: leftTypeResult.isIncomplete }], + errorNode, + /* inferenceContext */ undefined + ); + + if (!returnTypeResult) { + // If __contains__ was not supported, fall back + // on an iterable. + const iteratorType = evaluator.getTypeOfIterator( + { type: rightSubtypeExpanded, isIncomplete: rightTypeResult.isIncomplete }, + /* isAsync */ false, + errorNode, + /* emitNotIterableError */ false + )?.type; + + if (iteratorType && evaluator.assignType(iteratorType, leftSubtype)) { + returnTypeResult = { type: evaluator.getBuiltInObject(errorNode, 'bool') }; + } + } + + if (!returnTypeResult) { + diag.addMessage( + LocMessage.typeNotSupportBinaryOperator().format({ + operator: printOperator(operator), + leftType: evaluator.printType(leftSubtype), + rightType: evaluator.printType(rightSubtypeExpanded), + }) + ); + } + + if (returnTypeResult?.magicMethodDeprecationInfo) { + deprecatedInfo = returnTypeResult.magicMethodDeprecationInfo; + } + + return returnTypeResult?.type ?? evaluator.getBuiltInObject(errorNode, 'bool'); + } + ); + } + ); + + return { type, magicMethodDeprecationInfo: deprecatedInfo }; +} + +function validateArithmeticOperation( + evaluator: TypeEvaluator, + operator: OperatorType, + leftTypeResult: TypeResult, + rightTypeResult: TypeResult, + errorNode: ExpressionNode, + inferenceContext: InferenceContext | undefined, + diag: DiagnosticAddendum, + options: BinaryOperationOptions +): TypeResult { + let deprecatedInfo: MagicMethodDeprecationInfo | undefined; + const isIncomplete = !!leftTypeResult.isIncomplete || !!rightTypeResult.isIncomplete; + + const type = evaluator.mapSubtypesExpandTypeVars( + leftTypeResult.type, + /* options */ undefined, + (leftSubtypeExpanded, leftSubtypeUnexpanded) => { + return evaluator.mapSubtypesExpandTypeVars( + rightTypeResult.type, + { conditionFilter: getTypeCondition(leftSubtypeExpanded) }, + (rightSubtypeExpanded, rightSubtypeUnexpanded) => { + if (isAnyOrUnknown(leftSubtypeUnexpanded) || isAnyOrUnknown(rightSubtypeUnexpanded)) { + return preserveUnknown(leftSubtypeUnexpanded, rightSubtypeUnexpanded); + } + + const tupleClassType = evaluator.getTupleClassType(); + + // Special-case __add__ for tuples when the types for both tuples are known. + if ( + options.isTupleAddAllowed && + operator === OperatorType.Add && + isClassInstance(leftSubtypeExpanded) && + isTupleClass(leftSubtypeExpanded) && + leftSubtypeExpanded.priv.tupleTypeArgs && + isClassInstance(rightSubtypeExpanded) && + isTupleClass(rightSubtypeExpanded) && + rightSubtypeExpanded.priv.tupleTypeArgs && + tupleClassType && + isInstantiableClass(tupleClassType) + ) { + // If at least one of the tuples is of fixed size, we can + // combine them into a precise new type. If both are unbounded + // (or contain an unbounded element), we cannot combine them + // in this manner because tuples can contain at most one + // unbounded element. + if ( + !isUnboundedTupleClass(leftSubtypeExpanded) || + !isUnboundedTupleClass(rightSubtypeExpanded) + ) { + return ClassType.cloneAsInstance( + specializeTupleClass(tupleClassType, [ + ...leftSubtypeExpanded.priv.tupleTypeArgs, + ...rightSubtypeExpanded.priv.tupleTypeArgs, + ]) + ); + } + } + + const magicMethodName = binaryOperatorMap[operator][0]; + let resultTypeResult = evaluator.getTypeOfMagicMethodCall( + convertFunctionToObject(evaluator, leftSubtypeUnexpanded), + magicMethodName, + [{ type: rightSubtypeUnexpanded, isIncomplete: rightTypeResult.isIncomplete }], + errorNode, + inferenceContext + ); + + if (!resultTypeResult && leftSubtypeUnexpanded !== leftSubtypeExpanded) { + // Try the expanded left type. + resultTypeResult = evaluator.getTypeOfMagicMethodCall( + convertFunctionToObject(evaluator, leftSubtypeExpanded), + magicMethodName, + [{ type: rightSubtypeUnexpanded, isIncomplete: rightTypeResult.isIncomplete }], + errorNode, + inferenceContext + ); + } + + if (!resultTypeResult && rightSubtypeUnexpanded !== rightSubtypeExpanded) { + // Try the expanded left and right type. + resultTypeResult = evaluator.getTypeOfMagicMethodCall( + convertFunctionToObject(evaluator, leftSubtypeExpanded), + magicMethodName, + [{ type: rightSubtypeExpanded, isIncomplete: rightTypeResult.isIncomplete }], + errorNode, + inferenceContext + ); + } + + if (!resultTypeResult) { + // Try the alternate form (swapping right and left). + const altMagicMethodName = binaryOperatorMap[operator][1]; + resultTypeResult = evaluator.getTypeOfMagicMethodCall( + convertFunctionToObject(evaluator, rightSubtypeUnexpanded), + altMagicMethodName, + [{ type: leftSubtypeUnexpanded, isIncomplete: leftTypeResult.isIncomplete }], + errorNode, + inferenceContext + ); + + if (!resultTypeResult && rightSubtypeUnexpanded !== rightSubtypeExpanded) { + // Try the expanded right type. + resultTypeResult = evaluator.getTypeOfMagicMethodCall( + convertFunctionToObject(evaluator, rightSubtypeExpanded), + altMagicMethodName, + [ + { + type: leftSubtypeUnexpanded, + isIncomplete: leftTypeResult.isIncomplete, + }, + ], + errorNode, + inferenceContext + ); + } + + if (!resultTypeResult && leftSubtypeUnexpanded !== leftSubtypeExpanded) { + // Try the expanded right and left type. + resultTypeResult = evaluator.getTypeOfMagicMethodCall( + convertFunctionToObject(evaluator, rightSubtypeExpanded), + altMagicMethodName, + [{ type: leftSubtypeExpanded, isIncomplete: leftTypeResult.isIncomplete }], + errorNode, + inferenceContext + ); + } + } + + if (!resultTypeResult) { + if (inferenceContext && !isAnyOrUnknown(inferenceContext.expectedType)) { + diag.addMessage( + LocMessage.typeNotSupportBinaryOperatorBidirectional().format({ + operator: printOperator(operator), + leftType: evaluator.printType(leftSubtypeExpanded), + rightType: evaluator.printType(rightSubtypeExpanded), + expectedType: evaluator.printType(inferenceContext.expectedType), + }) + ); + } else { + diag.addMessage( + LocMessage.typeNotSupportBinaryOperator().format({ + operator: printOperator(operator), + leftType: evaluator.printType(leftSubtypeExpanded), + rightType: evaluator.printType(rightSubtypeExpanded), + }) + ); + } + } + + if (resultTypeResult?.magicMethodDeprecationInfo) { + deprecatedInfo = resultTypeResult.magicMethodDeprecationInfo; + } + + return resultTypeResult?.type ?? UnknownType.create(isIncomplete); + } + ); + } + ); + + return { type, magicMethodDeprecationInfo: deprecatedInfo }; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/packageTypeReport.ts b/python-parser/packages/pyright-internal/src/analyzer/packageTypeReport.ts new file mode 100644 index 00000000..9301475e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/packageTypeReport.ts @@ -0,0 +1,112 @@ +/* + * packageTypeReport.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Encapsulates the output of the package type verifier, + * storing information about the public symbols and whether + * they have known types. + */ + +import { Diagnostic, DiagnosticWithinFile } from '../common/diagnostic'; +import { Uri } from '../common/uri/uri'; +import { ScopeType } from './scope'; + +export enum SymbolCategory { + Indeterminate, + Module, + Class, + Variable, + Constant, + Function, + Method, + TypeVar, + TypeAlias, +} + +// The order of these is important. Status values with higher numbers are +// considered "worse" than status values with lower numbers. +export const enum TypeKnownStatus { + Known = 0, // Type is fully known (declared) + Ambiguous = 1, // Type is inferred and potentially ambiguous (may differ by type checker) + PartiallyUnknown = 2, // Part of the type is unknown + Unknown = 3, // The type is completely unknown +} + +export interface SymbolInfo { + category: SymbolCategory; + name: string; + fullName: string; + fileUri: Uri; + isExported: boolean; + typeKnownStatus: TypeKnownStatus; + referenceCount: number; + diagnostics: DiagnosticWithinFile[]; + scopeType: ScopeType; +} + +export interface ModuleInfo { + name: string; + uri: Uri; + isExported: boolean; +} + +export type AlternateSymbolNameMap = Map; + +export interface PackageTypeReport { + packageName: string; + moduleName: string; + ignoreExternal: boolean; + packageRootDirectoryUri: Uri | undefined; + moduleRootDirectoryUri: Uri | undefined; + isModuleSingleFile: boolean; + pyTypedPathUri: Uri | undefined; + missingFunctionDocStringCount: number; + missingClassDocStringCount: number; + missingDefaultParamCount: number; + + // Tracks types that are re-exported from other modules + // and therefore have "aliased" full names that don't + // match the full name of the original declaration. + alternateSymbolNames: AlternateSymbolNameMap; + + // Information about each module in the package and + // imported by modules in the package. + modules: Map; + + // Diagnostics that are not related to specific types + // (e.g. missing py.typed file). + generalDiagnostics: Diagnostic[]; + + // Information about all public symbols and the symbols + // they depend upon, indexed by the full name. + symbols: Map; +} + +export function getEmptyReport( + packageName: string, + packageRootUri: Uri, + moduleName: string, + moduleRootUri: Uri, + isModuleSingleFile: boolean, + ignoreExternal: boolean +) { + const report: PackageTypeReport = { + packageName, + ignoreExternal, + packageRootDirectoryUri: packageRootUri, + moduleName, + moduleRootDirectoryUri: moduleRootUri, + isModuleSingleFile, + pyTypedPathUri: undefined, + missingFunctionDocStringCount: 0, + missingClassDocStringCount: 0, + missingDefaultParamCount: 0, + alternateSymbolNames: new Map(), + modules: new Map(), + generalDiagnostics: [], + symbols: new Map(), + }; + return report; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/packageTypeVerifier.ts b/python-parser/packages/pyright-internal/src/analyzer/packageTypeVerifier.ts new file mode 100644 index 00000000..2fcd4672 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/packageTypeVerifier.ts @@ -0,0 +1,1581 @@ +/* + * packageTypeVerifier.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Validates the public symbols exported by a package to ensure + * that the types are complete. + */ + +import { CommandLineOptions } from '../common/commandLineOptions'; +import { ConfigOptions, ExecutionEnvironment } from '../common/configOptions'; +import { NullConsole } from '../common/console'; +import { assert } from '../common/debug'; +import { Diagnostic, DiagnosticAddendum, DiagnosticCategory } from '../common/diagnostic'; +import { FullAccessHost } from '../common/fullAccessHost'; +import { Host } from '../common/host'; +import { getFileExtension, stripFileExtension } from '../common/pathUtils'; +import { ServiceProvider } from '../common/serviceProvider'; +import { getEmptyRange, Range } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { tryStat } from '../common/uri/uriUtils'; +import { DeclarationType, FunctionDeclaration, VariableDeclaration } from './declaration'; +import { createImportedModuleDescriptor, ImportResolver } from './importResolver'; +import { + AlternateSymbolNameMap, + getEmptyReport, + ModuleInfo, + PackageTypeReport, + SymbolCategory, + SymbolInfo, + TypeKnownStatus, +} from './packageTypeReport'; +import { Program } from './program'; +import { getPyTypedInfo, PyTypedInfo } from './pyTypedUtils'; +import { ScopeType } from './scope'; +import { getScopeForNode } from './scopeUtils'; +import { Symbol, SymbolTable } from './symbol'; +import { isDunderName, isPrivateOrProtectedName } from './symbolNameUtils'; +import { + ClassType, + FunctionParam, + FunctionType, + FunctionTypeFlags, + isClass, + isFunction, + isInstantiableClass, + isModule, + isTypeSame, + isUnknown, + ModuleType, + OverloadedType, + Type, + TypeBase, + TypeCategory, +} from './types'; +import { + doForEachSubtype, + getFullNameOfType, + isDescriptorInstance, + isEllipsisType, + isPartlyUnknown, + partiallySpecializeType, + specializeForBaseClass, +} from './typeUtils'; + +type PublicSymbolSet = Set; + +interface ModuleDirectoryInfo { + moduleDirectory: Uri; + isModuleSingleFile: boolean; +} + +export class PackageTypeVerifier { + private _configOptions: ConfigOptions; + private _execEnv: ExecutionEnvironment; + private _importResolver: ImportResolver; + private _program: Program; + + constructor( + private _serviceProvider: ServiceProvider, + private _host: Host, + commandLineOptions: CommandLineOptions, + private _packageName: string, + private _ignoreExternal = false + ) { + const host = new FullAccessHost(_serviceProvider); + this._configOptions = new ConfigOptions(Uri.empty()); + const console = new NullConsole(); + + // Make sure we have a default python platform and version. + // Allow the command-line parameters to override the normal defaults. + if (commandLineOptions.configSettings.pythonPlatform) { + this._configOptions.defaultPythonPlatform = commandLineOptions.configSettings.pythonPlatform; + } else { + this._configOptions.ensureDefaultPythonPlatform(host, console); + } + + if (commandLineOptions.configSettings.pythonVersion) { + this._configOptions.defaultPythonVersion = commandLineOptions.configSettings.pythonVersion; + } else { + this._configOptions.ensureDefaultPythonVersion(host, console); + } + + if (_ignoreExternal) { + this._configOptions.evaluateUnknownImportsAsAny = true; + } + + this._execEnv = this._configOptions.findExecEnvironment(Uri.file('.', _serviceProvider)); + this._importResolver = new ImportResolver(this._serviceProvider, this._configOptions, this._host); + this._program = new Program(this._importResolver, this._configOptions, this._serviceProvider); + } + + verify(): PackageTypeReport { + const trimmedModuleName = this._packageName.trim(); + const moduleNameParts = trimmedModuleName.split('.'); + + const packageDirectoryInfo = this._getDirectoryInfoForModule(moduleNameParts[0]); + const moduleDirectoryInfo = this._getDirectoryInfoForModule(trimmedModuleName); + + const report = getEmptyReport( + moduleNameParts[0], + packageDirectoryInfo?.moduleDirectory ?? Uri.empty(), + trimmedModuleName, + moduleDirectoryInfo?.moduleDirectory ?? Uri.empty(), + moduleDirectoryInfo?.isModuleSingleFile ?? false, + this._ignoreExternal + ); + const commonDiagnostics = report.generalDiagnostics; + + try { + if (!trimmedModuleName) { + commonDiagnostics.push( + new Diagnostic( + DiagnosticCategory.Error, + `Module name "${trimmedModuleName}" is invalid`, + getEmptyRange() + ) + ); + } else if (!report.moduleRootDirectoryUri) { + commonDiagnostics.push( + new Diagnostic( + DiagnosticCategory.Error, + `Module "${trimmedModuleName}" cannot be resolved`, + getEmptyRange() + ) + ); + } else { + let pyTypedInfo: PyTypedInfo | undefined; + if (report.moduleRootDirectoryUri) { + pyTypedInfo = this._getDeepestPyTypedInfo(report.moduleRootDirectoryUri, moduleNameParts); + } + + // If we couldn't find any "py.typed" info in the module path, search again + // starting at the package root. + if (!pyTypedInfo && report.packageRootDirectoryUri) { + pyTypedInfo = this._getDeepestPyTypedInfo(report.packageRootDirectoryUri, moduleNameParts); + } + + if (!pyTypedInfo) { + commonDiagnostics.push( + new Diagnostic(DiagnosticCategory.Error, 'No py.typed file found', getEmptyRange()) + ); + } else { + report.pyTypedPathUri = pyTypedInfo.pyTypedPath; + + const publicModules = this._getListOfPublicModules( + report.moduleRootDirectoryUri, + report.isModuleSingleFile, + trimmedModuleName + ); + + // If the filter eliminated all modules, report an error. + if (publicModules.length === 0) { + commonDiagnostics.push( + new Diagnostic( + DiagnosticCategory.Error, + `Module "${trimmedModuleName}" cannot be resolved`, + getEmptyRange() + ) + ); + } + + // Build a set of all public symbols exported by this package. We'll + // use this map to determine which diagnostics to report. We don't want + // to report diagnostics many times for types that include public types. + const publicSymbols = new Set(); + publicModules.forEach((moduleName) => { + this._getPublicSymbolsForModule(moduleName, publicSymbols, report.alternateSymbolNames); + }); + + publicModules.forEach((moduleName) => { + this._verifyTypesOfModule(moduleName, publicSymbols, report); + }); + } + } + } catch (e: any) { + const message: string = + (e.stack ? e.stack.toString() : undefined) || + (typeof e.message === 'string' ? e.message : undefined) || + JSON.stringify(e); + commonDiagnostics.push( + new Diagnostic( + DiagnosticCategory.Error, + `An internal error occurred while verifying types: "${message}"`, + getEmptyRange() + ) + ); + } + + return report; + } + + static getSymbolCategoryString(symbolType: SymbolCategory): string { + switch (symbolType) { + case SymbolCategory.Class: + return 'class'; + + case SymbolCategory.Function: + return 'function'; + + case SymbolCategory.Method: + return 'method'; + + case SymbolCategory.Constant: + return 'constant'; + + case SymbolCategory.Variable: + return 'variable'; + + case SymbolCategory.Module: + return 'module'; + + case SymbolCategory.TypeAlias: + return 'type alias'; + + case SymbolCategory.TypeVar: + return 'type variable'; + + case SymbolCategory.Indeterminate: + return 'symbol'; + } + } + + private _getDeepestPyTypedInfo(rootDirectory: Uri, packageNameParts: string[]) { + let subNameParts = Array.from(packageNameParts); + + // Find the deepest py.typed file that corresponds to the requested submodule. + while (subNameParts.length >= 1) { + const packageSubdir = rootDirectory.combinePaths(...subNameParts.slice(1)); + const pyTypedInfo = getPyTypedInfo(this._serviceProvider.fs(), packageSubdir); + if (pyTypedInfo) { + return pyTypedInfo; + } + + subNameParts = subNameParts.slice(0, subNameParts.length - 1); + } + + return undefined; + } + + private _resolveImport(moduleName: string) { + return this._importResolver.resolveImport( + Uri.empty(), + this._execEnv, + createImportedModuleDescriptor(moduleName) + ); + } + + private _getPublicSymbolsForModule( + moduleName: string, + publicSymbols: PublicSymbolSet, + alternateSymbolNames: AlternateSymbolNameMap + ) { + const importResult = this._resolveImport(moduleName); + + if (importResult.isImportFound) { + const modulePath = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + this._program.addTrackedFiles([modulePath], /* isThirdPartyImport */ true, /* isInPyTypedPackage */ true); + + const sourceFile = this._program.getBoundSourceFile(modulePath); + + if (sourceFile) { + const module: ModuleInfo = { + name: moduleName, + uri: modulePath, + isExported: true, + }; + + const parseTree = sourceFile.getParserOutput()!.parseTree; + const moduleScope = getScopeForNode(parseTree)!; + + this._getPublicSymbolsInSymbolTable( + publicSymbols, + alternateSymbolNames, + module, + module.name, + moduleScope.symbolTable, + ScopeType.Module + ); + } + } + } + + private _getPublicSymbolsInSymbolTable( + publicSymbols: PublicSymbolSet, + alternateSymbolNames: AlternateSymbolNameMap, + module: ModuleInfo, + scopeName: string, + symbolTable: SymbolTable, + scopeType: ScopeType + ) { + symbolTable.forEach((symbol, name) => { + if ( + !isPrivateOrProtectedName(name) && + !symbol.isIgnoredForProtocolMatch() && + !this._isSymbolTypeImplied(scopeType, name) + ) { + const fullName = `${scopeName}.${name}`; + + if (!symbol.isExternallyHidden() && !symbol.isPrivateMember() && !symbol.isPrivatePyTypedImport()) { + const symbolType = this._program.getTypeOfSymbol(symbol); + publicSymbols.add(fullName); + + const typedDecls = symbol.getTypedDeclarations(); + + if (typedDecls.length > 0) { + // Is this a class declared within this module or class? + // If so, add the symbols declared within it. + const classDecl = typedDecls.find((decl) => decl.type === DeclarationType.Class); + if (classDecl) { + if (isInstantiableClass(symbolType)) { + this._getPublicSymbolsInSymbolTable( + publicSymbols, + alternateSymbolNames, + module, + fullName, + ClassType.getSymbolTable(symbolType), + ScopeType.Class + ); + } + } + } + + // Is this the re-export of an import? If so, record the alternate name. + const importDecl = symbol.getDeclarations().find((decl) => decl.type === DeclarationType.Alias); + if (importDecl && importDecl.type === DeclarationType.Alias) { + const typeName = getFullNameOfType(this._program.getTypeOfSymbol(symbol)); + if (typeName) { + this._addAlternateSymbolName(alternateSymbolNames, typeName, fullName); + } + } + } + } + }); + } + + private _addAlternateSymbolName(map: AlternateSymbolNameMap, name: string, altName: string) { + if (name !== altName) { + let altNameList = map.get(name); + + if (!altNameList) { + altNameList = []; + map.set(name, altNameList); + } + + // Add the alternate name if it's unique. + if (!altNameList.some((name) => name === altName)) { + altNameList.push(altName); + } + } + } + + private _verifyTypesOfModule(moduleName: string, publicSymbols: PublicSymbolSet, report: PackageTypeReport) { + const importResult = this._resolveImport(moduleName); + if (!importResult.isImportFound) { + report.generalDiagnostics.push( + new Diagnostic(DiagnosticCategory.Error, `Could not resolve module "${moduleName}"`, getEmptyRange()) + ); + } else if (importResult.isStubPackage) { + report.generalDiagnostics.push( + new Diagnostic( + DiagnosticCategory.Error, + `No inlined types found for module "${moduleName}" because stub package was present`, + getEmptyRange() + ) + ); + } else { + const modulePath = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + + const module: ModuleInfo = { + name: moduleName, + uri: modulePath, + isExported: true, + }; + + report.modules.set(modulePath.key, module); + this._program.addTrackedFiles([modulePath], /* isThirdPartyImport */ true, /* isInPyTypedPackage */ true); + + const sourceFile = this._program.getBoundSourceFile(modulePath); + + if (sourceFile) { + const parseTree = sourceFile.getParserOutput()!.parseTree; + const moduleScope = getScopeForNode(parseTree)!; + + this._getTypeKnownStatusForSymbolTable( + report, + module.name, + moduleScope.symbolTable, + ScopeType.Module, + publicSymbols + ); + } else { + report.generalDiagnostics.push( + new Diagnostic(DiagnosticCategory.Error, `Could not bind file "${modulePath}"`, getEmptyRange()) + ); + } + } + } + + // Scans the directory structure for a list of public modules + // within the package. + private _getListOfPublicModules(moduleRoot: Uri, isModuleSingleFile: boolean, moduleName: string): string[] { + const publicModules: string[] = []; + this._addPublicModulesRecursive(moduleRoot, isModuleSingleFile, moduleName, publicModules); + + // Make sure modules are unique. There may be duplicates if a ".py" and ".pyi" + // exist for some modules. + const uniqueModules: string[] = []; + const moduleMap = new Map(); + + publicModules.forEach((module) => { + if (!moduleMap.has(module)) { + uniqueModules.push(module); + moduleMap.set(module, module); + } + }); + + return uniqueModules; + } + + private _addPublicModulesRecursive( + dirPath: Uri, + isModuleSingleFile: boolean, + modulePath: string, + publicModules: string[] + ) { + const dirEntries = this._serviceProvider.fs().readdirEntriesSync(dirPath); + + dirEntries.forEach((entry) => { + let isFile = entry.isFile(); + let isDirectory = entry.isDirectory(); + if (entry.isSymbolicLink()) { + const stat = tryStat(this._serviceProvider.fs(), dirPath.combinePaths(entry.name)); + if (stat) { + isFile = stat.isFile(); + isDirectory = stat.isDirectory(); + } + } + + if (isFile) { + const fileExtension = getFileExtension(entry.name); + + if (fileExtension === '.py' || fileExtension === '.pyi') { + const nameWithoutExtension = stripFileExtension(entry.name); + + if (nameWithoutExtension === '__init__') { + if (!isModuleSingleFile) { + publicModules.push(modulePath); + } + } else { + if ( + !isPrivateOrProtectedName(nameWithoutExtension) && + this._isLegalModulePartName(nameWithoutExtension) + ) { + if (isModuleSingleFile) { + if (modulePath.endsWith(`.${nameWithoutExtension}`)) { + publicModules.push(modulePath); + } + } else { + publicModules.push(`${modulePath}.${nameWithoutExtension}`); + } + } + } + } + } else if (isDirectory && !isModuleSingleFile) { + if (!isPrivateOrProtectedName(entry.name) && this._isLegalModulePartName(entry.name)) { + this._addPublicModulesRecursive( + dirPath.combinePaths(entry.name), + isModuleSingleFile, + `${modulePath}.${entry.name}`, + publicModules + ); + } + } + }); + } + + private _isLegalModulePartName(name: string): boolean { + // PEP8 indicates that all module names should be lowercase + // with underscores. It doesn't talk about non-ASCII + // characters, but it appears that's the convention. + return !!name.match(/[a-z_]+/); + } + + private _shouldIgnoreType(report: PackageTypeReport, fullTypeName: string) { + // If we're ignoring unknown types from other packages, see if we should skip. + return report.ignoreExternal && !fullTypeName.startsWith(report.packageName); + } + + private _getTypeKnownStatusForSymbolTable( + report: PackageTypeReport, + scopeName: string, + symbolTable: SymbolTable, + scopeType: ScopeType, + publicSymbols: PublicSymbolSet, + overrideSymbolCallback?: (name: string, symbol: Symbol) => Type | undefined + ): TypeKnownStatus { + if (this._shouldIgnoreType(report, scopeName)) { + return TypeKnownStatus.Known; + } + + let knownStatus = TypeKnownStatus.Known; + + symbolTable.forEach((symbol, name) => { + if ( + !isPrivateOrProtectedName(name) && + !symbol.isExternallyHidden() && + !symbol.isPrivateMember() && + !symbol.isPrivatePyTypedImport() && + !symbol.isIgnoredForProtocolMatch() && + !this._isSymbolTypeImplied(scopeType, name) + ) { + const fullName = `${scopeName}.${name}`; + + // If the symbol was already cached, update its reference count + // and skip the rest. + const cachedSymbolInfo = report.symbols.get(fullName); + if (cachedSymbolInfo) { + cachedSymbolInfo.referenceCount++; + return; + } + + let symbolType = this._program.getTypeOfSymbol(symbol); + + let usesAmbiguousOverride = false; + let baseSymbolType: Type | undefined; + let childSymbolType: Type | undefined; + + if (overrideSymbolCallback) { + const baseSymbolType = overrideSymbolCallback(name, symbol); + + if (baseSymbolType) { + childSymbolType = symbolType; + + // If the inferred type is ambiguous or the declared base class type is + // not the same type as the inferred type, mark it as ambiguous because + // different type checkers will get different results. + if (TypeBase.isAmbiguous(childSymbolType) || !isTypeSame(baseSymbolType, childSymbolType)) { + // If the base type is known to be a descriptor with a setter, + // assume that the child class is simply writing to the base class's setter. + if (!isDescriptorInstance(baseSymbolType, /* requireSetter */ true)) { + usesAmbiguousOverride = true; + } + } + + symbolType = baseSymbolType; + } + } + + const typedDecls = symbol.getTypedDeclarations(); + const primaryDecl = typedDecls.length > 0 ? typedDecls[typedDecls.length - 1] : undefined; + let symbolInfo: SymbolInfo; + + if (primaryDecl?.type === DeclarationType.Class && isInstantiableClass(symbolType)) { + symbolInfo = this._getSymbolForClass(report, symbolType, publicSymbols); + } else if (primaryDecl?.type === DeclarationType.Alias && isModule(symbolType)) { + symbolInfo = this._getSymbolForModule(report, symbolType, publicSymbols); + } else { + const decls = symbol.getDeclarations(); + const primaryDecl = decls.length > 0 ? decls[decls.length - 1] : undefined; + const declRange = primaryDecl?.range || getEmptyRange(); + const declPath = primaryDecl?.uri || Uri.empty(); + const symbolCategory = this._getSymbolCategory(symbol, symbolType); + const isExported = publicSymbols.has(fullName); + + // If the only reference to this symbol is a "__slots__" entry, we will + // skip it when considering type completeness. + if ( + decls.length === 1 && + primaryDecl?.type === DeclarationType.Variable && + primaryDecl.isDefinedBySlots + ) { + return; + } + + symbolInfo = { + category: symbolCategory, + name, + fullName, + fileUri: declPath, + isExported, + typeKnownStatus: TypeKnownStatus.Known, + referenceCount: 1, + diagnostics: [], + scopeType, + }; + + this._addSymbol(report, symbolInfo); + + if (primaryDecl) { + let resolvedDecl = primaryDecl; + if (resolvedDecl.type === DeclarationType.Alias) { + resolvedDecl = + this._program.evaluator?.resolveAliasDeclaration( + resolvedDecl, + /* resolveLocalNames */ true + ) ?? resolvedDecl; + } + + if (resolvedDecl.type === DeclarationType.Class && isClass(symbolType)) { + this._reportMissingClassDocstring(symbolInfo, symbolType, report); + } + + if (resolvedDecl.type === DeclarationType.Function && isFunction(symbolType)) { + this._reportMissingFunctionDocstring(symbolInfo, symbolType, declRange, declPath, report); + } + } + + if (!this._isSymbolTypeImplied(scopeType, name)) { + this._getSymbolTypeKnownStatus( + report, + symbolInfo, + symbolType, + declRange, + declPath, + publicSymbols + ); + } + } + + if (usesAmbiguousOverride) { + const decls = symbol.getDeclarations(); + const primaryDecl = decls.length > 0 ? decls[decls.length - 1] : undefined; + const declRange = primaryDecl?.range || getEmptyRange(); + const declPath = primaryDecl?.uri || Uri.empty(); + + const extraInfo = new DiagnosticAddendum(); + if (baseSymbolType) { + extraInfo.addMessage( + `Type declared in base class is "${this._program.printType(baseSymbolType)}"` + ); + } + + if (childSymbolType) { + extraInfo.addMessage( + `Type inferred in child class is "${this._program.printType(childSymbolType)}"` + ); + + if (TypeBase.isAmbiguous(childSymbolType)) { + extraInfo.addMessage( + 'Inferred child class type is missing type annotation and could be inferred differently by type checkers' + ); + } + } + + this._addSymbolError( + symbolInfo, + `Ambiguous base class override` + extraInfo.getString(), + declRange, + declPath + ); + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + TypeKnownStatus.Ambiguous + ); + } + + knownStatus = this._updateKnownStatusIfWorse(knownStatus, symbolInfo.typeKnownStatus); + } + }); + + return knownStatus; + } + + private _reportMissingClassDocstring(symbolInfo: SymbolInfo, type: ClassType, report: PackageTypeReport) { + if (type.shared.docString) { + return; + } + + this._addSymbolWarning( + symbolInfo, + `No docstring found for class "${symbolInfo.fullName}"`, + getEmptyRange(), + Uri.empty() + ); + + report.missingClassDocStringCount++; + } + + private _reportMissingFunctionDocstring( + symbolInfo: SymbolInfo | undefined, + type: FunctionType, + declRange: Range | undefined, + declFileUri: Uri | undefined, + report: PackageTypeReport + ) { + if ( + type.shared.parameters.find((_, index) => { + const defaultType = FunctionType.getParamDefaultType(type, index); + return defaultType && isEllipsisType(defaultType); + }) + ) { + if (symbolInfo) { + this._addSymbolWarning( + symbolInfo, + `One or more default values in function "${symbolInfo.fullName}" is specified as "..."`, + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + } + + report.missingDefaultParamCount++; + } + + if (type.shared.docString) { + return; + } + + // Don't require docstrings for dunder methods. + if (symbolInfo && isDunderName(symbolInfo.name)) { + return; + } + + // Don't require docstrings for overloads. + if (FunctionType.isOverloaded(type)) { + return; + } + + if (symbolInfo) { + this._addSymbolWarning( + symbolInfo, + `No docstring found for function "${symbolInfo.fullName}"`, + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + } + + report.missingFunctionDocStringCount++; + } + + // Determines whether the type for the symbol in question is fully known. + // If not, it adds diagnostics to the symbol information and updates the + // typeKnownStatus field. + private _getSymbolTypeKnownStatus( + report: PackageTypeReport, + symbolInfo: SymbolInfo, + type: Type, + declRange: Range, + declFileUri: Uri, + publicSymbols: PublicSymbolSet + ): TypeKnownStatus { + let knownStatus = TypeKnownStatus.Known; + + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo?.typeArgs) { + aliasInfo.typeArgs.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for type alias "${aliasInfo!.shared.name}" has unknown type`, + declRange, + declFileUri + ); + knownStatus = TypeKnownStatus.Unknown; + } else if (isPartlyUnknown(typeArg)) { + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for type alias "${ + aliasInfo!.shared.name + }" has partially unknown type`, + declRange, + declFileUri + ); + knownStatus = TypeKnownStatus.PartiallyUnknown; + } + }); + } + + if (TypeBase.isAmbiguous(type) && !isUnknown(type)) { + const ambiguousDiag = new DiagnosticAddendum(); + ambiguousDiag.addMessage(`Inferred type is "${this._program.printType(type)}"`); + this._addSymbolError( + symbolInfo, + 'Type is missing type annotation and could be inferred differently by type checkers' + + ambiguousDiag.getString(), + declRange, + declFileUri + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Ambiguous); + } + + switch (type.category) { + case TypeCategory.Unbound: + case TypeCategory.Any: + case TypeCategory.Never: + case TypeCategory.TypeVar: + break; + + case TypeCategory.Unknown: { + this._addSymbolError( + symbolInfo, + `Type unknown for ${PackageTypeVerifier.getSymbolCategoryString(symbolInfo.category)} "${ + symbolInfo.fullName + }"`, + declRange, + declFileUri + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + break; + } + + case TypeCategory.Union: { + doForEachSubtype(type, (subtype) => { + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getSymbolTypeKnownStatus( + report, + symbolInfo, + subtype, + declRange, + declFileUri, + publicSymbols + ) + ); + }); + break; + } + + case TypeCategory.Overloaded: { + for (const overload of OverloadedType.getOverloads(type)) { + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getSymbolTypeKnownStatus( + report, + symbolInfo, + overload, + declRange, + declFileUri, + publicSymbols + ) + ); + } + break; + } + + case TypeCategory.Function: { + if (!this._shouldIgnoreType(report, type.shared.fullName)) { + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getFunctionTypeKnownStatus( + report, + type, + publicSymbols, + symbolInfo, + declRange, + declFileUri, + undefined /* diag */ + ) + ); + } + + break; + } + + case TypeCategory.Class: { + // Properties require special handling. + if (TypeBase.isInstance(type) && ClassType.isPropertyClass(type)) { + const propMethodInfo: [string, (c: ClassType) => FunctionType | undefined][] = [ + ['fget', (c) => c.priv.fgetInfo?.methodType], + ['fset', (c) => c.priv.fsetInfo?.methodType], + ['fdel', (c) => c.priv.fdelInfo?.methodType], + ]; + + const propertyClass = type; + + propMethodInfo.forEach((info) => { + const methodAccessor = info[1]; + let accessType = methodAccessor(propertyClass); + + if (!accessType) { + return; + } + + if (isFunction(accessType)) { + // The processing for fget, fset and fdel mark the methods as "static" so they + // work properly when accessed directly from the property object. We need + // to remove this flag here so the method is seen as an instance method rather than + // static. Otherwise we'll incorrectly report that "self" is not annotated. + accessType = FunctionType.cloneWithNewFlags( + accessType, + accessType.shared.flags & ~FunctionTypeFlags.StaticMethod + ); + } + + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getSymbolTypeKnownStatus( + report, + symbolInfo, + accessType, + getEmptyRange(), + Uri.empty(), + publicSymbols + ) + ); + }); + + break; + } + + if (!this._shouldIgnoreType(report, type.shared.fullName)) { + // Don't bother type-checking built-in types. + if (!ClassType.isBuiltIn(type)) { + const symbolInfo = this._getSymbolForClass(report, type, publicSymbols); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, symbolInfo.typeKnownStatus); + } + } + + // Analyze type arguments if present to make sure they are known. + if (type.priv.typeArgs) { + type.priv.typeArgs!.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for class "${type.shared.name}" has unknown type`, + declRange, + declFileUri + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else if (isPartlyUnknown(typeArg)) { + const diag = new DiagnosticAddendum(); + diag.addMessage(`Type is ${(this._program.printType(typeArg), { expandTypeAlias: true })}`); + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for class "${ + type.shared.name + }" has partially unknown type` + diag.getString(), + declRange, + declFileUri + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.PartiallyUnknown); + } + }); + } + + break; + } + + case TypeCategory.Module: { + if (!this._shouldIgnoreType(report, type.priv.moduleName)) { + const moduleSymbol = this._getSymbolForModule(report, type, publicSymbols); + if (moduleSymbol.typeKnownStatus !== TypeKnownStatus.Known) { + this._addSymbolError( + symbolInfo, + `Module "${moduleSymbol.fullName}" is partially unknown`, + declRange, + declFileUri + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, moduleSymbol.typeKnownStatus); + } + } + + break; + } + } + + // Downgrade the symbol's type known status info. + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse(symbolInfo.typeKnownStatus, knownStatus); + + return knownStatus; + } + + private _getFunctionTypeKnownStatus( + report: PackageTypeReport, + type: FunctionType, + publicSymbols: PublicSymbolSet, + symbolInfo?: SymbolInfo, + declRange?: Range, + declFileUri?: Uri, + diag?: DiagnosticAddendum + ): TypeKnownStatus { + let knownStatus = TypeKnownStatus.Known; + + // If the file path wasn't provided, try to get it from the type. + if (type.shared.declaration && !declFileUri) { + declFileUri = type.shared.declaration.uri; + } + + type.shared.parameters.forEach((param, index) => { + const paramType = FunctionType.getParamType(type, index); + + // Skip nameless parameters like "*" and "/". + if (param.name) { + if (!FunctionParam.isTypeDeclared(param)) { + // Allow params (like "self" and "cls") to skip declarations because + // we're able to synthesize these. + const isSynthesized = + index === 0 && + symbolInfo?.scopeType === ScopeType.Class && + (FunctionType.isClassMethod(type) || + FunctionType.isInstanceMethod(type) || + FunctionType.isConstructorMethod(type)); + + if (!isSynthesized) { + if (symbolInfo) { + this._addSymbolError( + symbolInfo, + `Type annotation for parameter "${param.name}" is missing`, + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + } + diag?.createAddendum().addMessage(`Type annotation for parameter "${param.name}" is missing`); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } + } else if (isUnknown(paramType)) { + if (symbolInfo) { + this._addSymbolError( + symbolInfo, + `Type of parameter "${param.name}" is unknown`, + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + diag?.createAddendum().addMessage(`Type of parameter "${param.name}" is unknown`); + } + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else { + const extraInfo = new DiagnosticAddendum(); + const paramKnownStatus = this._getTypeKnownStatus( + report, + paramType, + publicSymbols, + extraInfo.createAddendum() + ); + + if (paramKnownStatus !== TypeKnownStatus.Known) { + extraInfo.addMessage( + `Parameter type is "${this._program.printType(paramType, { expandTypeAlias: true })}"` + ); + + if (symbolInfo) { + this._addSymbolError( + symbolInfo, + `Type of parameter "${param.name}" is partially unknown` + extraInfo.getString(), + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + } + + if (diag) { + const subDiag = diag.createAddendum(); + subDiag.addMessage(`Type of parameter "${param.name}" is partially unknown`); + subDiag.addAddendum(extraInfo); + } + + knownStatus = this._updateKnownStatusIfWorse(knownStatus, paramKnownStatus); + } + } + } + }); + + if (type.shared.declaredReturnType) { + if (isUnknown(type.shared.declaredReturnType)) { + if (symbolInfo) { + this._addSymbolError( + symbolInfo, + `Return type is unknown`, + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + } + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else { + const extraInfo = new DiagnosticAddendum(); + const returnTypeKnownStatus = this._getTypeKnownStatus( + report, + type.shared.declaredReturnType, + publicSymbols, + extraInfo.createAddendum() + ); + + if (returnTypeKnownStatus !== TypeKnownStatus.Known) { + extraInfo.addMessage( + `Return type is "${this._program.printType(type.shared.declaredReturnType, { + expandTypeAlias: true, + })}"` + ); + + if (symbolInfo) { + this._addSymbolError( + symbolInfo, + `Return type is partially unknown` + extraInfo.getString(), + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + } + + if (diag) { + const subDiag = diag.createAddendum(); + subDiag.addMessage(`Return type is partially unknown`); + subDiag.addAddendum(extraInfo); + } + + knownStatus = this._updateKnownStatusIfWorse(knownStatus, returnTypeKnownStatus); + } + } + } else { + // Init methods have an implied return type. + if (type.shared.name !== '__init__') { + if (symbolInfo) { + this._addSymbolError( + symbolInfo, + `Return type annotation is missing`, + declRange ?? getEmptyRange(), + declFileUri ?? Uri.empty() + ); + } + diag?.createAddendum().addMessage(`Return type annotation is missing`); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } + } + + if (symbolInfo) { + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse(symbolInfo.typeKnownStatus, knownStatus); + } + + return knownStatus; + } + + private _getSymbolForClass(report: PackageTypeReport, type: ClassType, publicSymbols: PublicSymbolSet): SymbolInfo { + // See if this type is already analyzed. + const cachedType = report.symbols.get(type.shared.fullName); + if (cachedType) { + cachedType.referenceCount++; + return cachedType; + } + + const symbolInfo: SymbolInfo = { + category: SymbolCategory.Class, + name: type.shared.name, + fullName: type.shared.fullName, + fileUri: type.shared.fileUri, + isExported: publicSymbols.has(type.shared.fullName), + typeKnownStatus: TypeKnownStatus.Known, + referenceCount: 1, + diagnostics: [], + scopeType: ScopeType.Class, + }; + + this._addSymbol(report, symbolInfo); + + // Determine whether the class has a proper doc string. + this._reportMissingClassDocstring(symbolInfo, type, report); + + const symbolTableTypeKnownStatus = this._getTypeKnownStatusForSymbolTable( + report, + type.shared.fullName, + ClassType.getSymbolTable(type), + ScopeType.Class, + publicSymbols, + (name: string, symbol: Symbol) => { + // If the symbol within this class is lacking a type declaration, + // see if we can find a same-named symbol in a parent class with + // a type declaration. + if (symbol.hasTypedDeclarations()) { + return undefined; + } + + for (const mroClass of type.shared.mro.slice(1)) { + if (isClass(mroClass)) { + const overrideSymbol = ClassType.getSymbolTable(mroClass).get(name); + if (overrideSymbol && overrideSymbol.hasTypedDeclarations()) { + const baseSymbolType = this._program.getTypeOfSymbol(overrideSymbol); + const baseClassType = specializeForBaseClass(type, mroClass); + + return partiallySpecializeType(baseSymbolType, baseClassType, /* typeClass */ undefined); + } + } + } + + return undefined; + } + ); + + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + symbolTableTypeKnownStatus + ); + + // Add information for the metaclass. + if (type.shared.effectiveMetaclass) { + if (!isInstantiableClass(type.shared.effectiveMetaclass)) { + this._addSymbolError(symbolInfo, `Type of metaclass unknown`, getEmptyRange(), Uri.empty()); + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + TypeKnownStatus.PartiallyUnknown + ); + } else { + const diag = new DiagnosticAddendum(); + const metaclassKnownStatus = this._getTypeKnownStatus( + report, + type.shared.effectiveMetaclass, + publicSymbols, + diag + ); + + if (metaclassKnownStatus !== TypeKnownStatus.Known) { + this._addSymbolError( + symbolInfo, + `Type of metaclass "${type.shared.effectiveMetaclass.shared.name}" is partially unknown` + + diag.getString(), + getEmptyRange(), + Uri.empty() + ); + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + metaclassKnownStatus + ); + } + } + } + + // Add information for base classes. + type.shared.baseClasses.forEach((baseClass) => { + if (!isInstantiableClass(baseClass)) { + this._addSymbolError(symbolInfo, `Type of base class unknown`, getEmptyRange(), Uri.empty()); + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + TypeKnownStatus.PartiallyUnknown + ); + } else { + // Handle "tuple" specially. Even though it's a generic class, it + // doesn't require a type argument. + if (ClassType.isBuiltIn(baseClass, 'tuple')) { + return; + } + + const diag = new DiagnosticAddendum(); + const baseClassTypeStatus = this._getTypeKnownStatus(report, baseClass, publicSymbols, diag); + + if (baseClassTypeStatus !== TypeKnownStatus.Known) { + this._addSymbolError( + symbolInfo, + `Type of base class "${baseClass.shared.fullName}" is partially unknown` + diag.getString(), + getEmptyRange(), + Uri.empty() + ); + + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + baseClassTypeStatus + ); + } + } + }); + + return symbolInfo; + } + + private _getSymbolForModule( + report: PackageTypeReport, + type: ModuleType, + publicSymbols: PublicSymbolSet + ): SymbolInfo { + // See if this type is already analyzed. + const cachedType = report.symbols.get(type.priv.moduleName); + if (cachedType) { + cachedType.referenceCount++; + return cachedType; + } + + const symbolInfo: SymbolInfo = { + category: SymbolCategory.Module, + name: type.priv.moduleName, + fullName: type.priv.moduleName, + fileUri: type.priv.fileUri, + isExported: publicSymbols.has(type.priv.moduleName), + typeKnownStatus: TypeKnownStatus.Known, + referenceCount: 1, + diagnostics: [], + scopeType: ScopeType.Module, + }; + + // Add the symbol for the module if the name isn't relative. + if (!type.priv.moduleName.startsWith('.')) { + this._addSymbol(report, symbolInfo); + } + + const symbolTableTypeKnownStatus = this._getTypeKnownStatusForSymbolTable( + report, + type.priv.moduleName, + type.priv.fields, + ScopeType.Module, + publicSymbols + ); + + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + symbolTableTypeKnownStatus + ); + + return symbolInfo; + } + + private _getTypeKnownStatus( + report: PackageTypeReport, + type: Type, + publicSymbols: PublicSymbolSet, + diag: DiagnosticAddendum + ): TypeKnownStatus { + let knownStatus = TypeKnownStatus.Known; + + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo?.typeArgs) { + aliasInfo.typeArgs.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for type alias "${aliasInfo!.shared.name}" has unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else if (isPartlyUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for type alias "${ + aliasInfo!.shared.name + }" has partially unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.PartiallyUnknown); + } + }); + } + + if (TypeBase.isAmbiguous(type)) { + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Ambiguous); + } + + switch (type.category) { + case TypeCategory.Unbound: + case TypeCategory.Any: + case TypeCategory.Never: + case TypeCategory.TypeVar: + break; + + case TypeCategory.Unknown: { + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + break; + } + + case TypeCategory.Union: { + doForEachSubtype(type, (subtype) => { + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getTypeKnownStatus(report, subtype, publicSymbols, diag.createAddendum()) + ); + }); + + break; + } + + case TypeCategory.Overloaded: { + for (const overload of OverloadedType.getOverloads(type)) { + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getTypeKnownStatus(report, overload, publicSymbols, diag.createAddendum()) + ); + } + + break; + } + + case TypeCategory.Function: { + if (!this._shouldIgnoreType(report, type.shared.fullName)) { + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getFunctionTypeKnownStatus( + report, + type, + publicSymbols, + /* symbolInfo */ undefined, + /* declRange */ undefined, + /* declFilePath */ undefined, + diag + ) + ); + } + + break; + } + + case TypeCategory.Class: { + if (!this._shouldIgnoreType(report, type.shared.fullName)) { + // Don't bother type-checking built-in types. + if (!ClassType.isBuiltIn(type)) { + const symbolInfo = this._getSymbolForClass(report, type, publicSymbols); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, symbolInfo.typeKnownStatus); + } + } + + // Analyze type arguments if present to make sure they are known. + if (type.priv.typeArgs) { + type.priv.typeArgs!.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for class "${type.shared.name}" has unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else if (isPartlyUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for class "${type.shared.name}" has partially unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.PartiallyUnknown); + } + }); + } + + break; + } + + case TypeCategory.Module: { + if (!this._shouldIgnoreType(report, type.priv.moduleName)) { + const moduleSymbol = this._getSymbolForModule(report, type, publicSymbols); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, moduleSymbol.typeKnownStatus); + } + + break; + } + } + + return knownStatus; + } + + private _getSymbolCategory(symbol: Symbol, type: Type): SymbolCategory { + if (type.props?.typeAliasInfo) { + return SymbolCategory.TypeAlias; + } + + switch (type.category) { + case TypeCategory.Function: + case TypeCategory.Overloaded: { + const funcDecl = symbol + .getDeclarations() + .find((decl) => decl.type === DeclarationType.Function) as FunctionDeclaration; + if (funcDecl && funcDecl.isMethod) { + return SymbolCategory.Method; + } + + return SymbolCategory.Function; + } + + case TypeCategory.Class: { + if (TypeBase.isInstantiable(type)) { + return SymbolCategory.Class; + } + + const varDecl = symbol + .getDeclarations() + .find((decl) => decl.type === DeclarationType.Variable) as VariableDeclaration; + if (varDecl && (varDecl.isConstant || varDecl.isFinal)) { + return SymbolCategory.Constant; + } + return SymbolCategory.Variable; + } + + case TypeCategory.Module: { + return SymbolCategory.Module; + } + + case TypeCategory.TypeVar: { + return SymbolCategory.TypeVar; + } + + default: { + const varDecl = symbol + .getDeclarations() + .find((decl) => decl.type === DeclarationType.Variable) as VariableDeclaration; + if (varDecl) { + if (varDecl.isConstant || varDecl.isFinal) { + return SymbolCategory.Constant; + } else { + return SymbolCategory.Variable; + } + } + + return SymbolCategory.Indeterminate; + } + } + } + + private _getDirectoryInfoForModule(moduleName: string): ModuleDirectoryInfo | undefined { + const importResult = this._importResolver.resolveImport( + Uri.empty(), + this._execEnv, + createImportedModuleDescriptor(moduleName) + ); + + if (importResult.isImportFound) { + const resolvedPath = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + + // If it's a namespace package with no __init__.py(i), use the package + // directory instead. + const moduleDirectory = resolvedPath + ? resolvedPath.getDirectory() + : importResult.packageDirectory ?? Uri.empty(); + let isModuleSingleFile = false; + if (resolvedPath && !resolvedPath.isEmpty() && stripFileExtension(resolvedPath.fileName) !== '__init__') { + isModuleSingleFile = true; + } + + return { + moduleDirectory, + isModuleSingleFile, + }; + } + + return undefined; + } + + private _isSymbolTypeImplied(scopeType: ScopeType, name: string) { + if (scopeType === ScopeType.Class) { + const knownClassSymbols = [ + '__class__', + '__dict__', + '__doc__', + '__module__', + '__qualname__', + '__slots__', + '__all__', + '__weakref__', + ]; + return knownClassSymbols.some((sym) => sym === name); + } else if (scopeType === ScopeType.Module) { + const knownModuleSymbols = [ + '__all__', + '__author__', + '__copyright__', + '__email__', + '__license__', + '__title__', + '__uri__', + '__version__', + ]; + return knownModuleSymbols.some((sym) => sym === name); + } + + return false; + } + + private _addSymbol(report: PackageTypeReport, symbolInfo: SymbolInfo) { + assert(!report.symbols.has(symbolInfo.fullName)); + report.symbols.set(symbolInfo.fullName, symbolInfo); + } + + private _addSymbolError(symbolInfo: SymbolInfo, message: string, declRange: Range, declUri: Uri) { + symbolInfo.diagnostics.push({ + diagnostic: new Diagnostic(DiagnosticCategory.Error, message, declRange), + uri: declUri, + }); + } + + private _addSymbolWarning(symbolInfo: SymbolInfo, message: string, declRange: Range, declUri: Uri) { + symbolInfo.diagnostics.push({ + diagnostic: new Diagnostic(DiagnosticCategory.Warning, message, declRange), + uri: declUri, + }); + } + + private _updateKnownStatusIfWorse(currentStatus: TypeKnownStatus, newStatus: TypeKnownStatus) { + // Is the current status worse than the current status. + return newStatus > currentStatus ? newStatus : currentStatus; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/parameterUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/parameterUtils.ts new file mode 100644 index 00000000..e7c2f69d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/parameterUtils.ts @@ -0,0 +1,493 @@ +/* + * parameterUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Utility functions for parameters. + */ + +import { assert } from '../common/debug'; +import { ParamCategory } from '../parser/parseNodes'; +import { isDunderName } from './symbolNameUtils'; +import { + AnyType, + ClassType, + FunctionParam, + FunctionParamFlags, + FunctionType, + isAnyOrUnknown, + isClassInstance, + isNever, + isParamSpec, + isPositionOnlySeparator, + isTypeSame, + isTypeVarTuple, + isUnpackedClass, + Type, + TypeVarType, +} from './types'; +import { doForEachSubtype, partiallySpecializeType } from './typeUtils'; + +export function isTypedKwargs(param: FunctionParam, effectiveParamType: Type): boolean { + return ( + param.category === ParamCategory.KwargsDict && + isClassInstance(effectiveParamType) && + isUnpackedClass(effectiveParamType) && + ClassType.isTypedDictClass(effectiveParamType) && + !!effectiveParamType.shared.typedDictEntries + ); +} + +export enum ParamKind { + Positional, + Standard, + Keyword, + ExpandedArgs, +} + +export interface VirtualParamDetails { + param: FunctionParam; + type: Type; + declaredType: Type; + defaultType?: Type | undefined; + index: number; + kind: ParamKind; +} + +export interface ParamListDetails { + // Virtual parameter list that refers to original parameters + params: VirtualParamDetails[]; + + // Counts of virtual parameters + positionOnlyParamCount: number; + positionParamCount: number; + + // Indexes into virtual parameter list + kwargsIndex?: number; + argsIndex?: number; + firstKeywordOnlyIndex?: number; + firstPositionOrKeywordIndex: number; + + // Other information + hasUnpackedTypeVarTuple: boolean; + hasUnpackedTypedDict: boolean; + unpackedKwargsTypedDictType?: ClassType; + paramSpec?: TypeVarType; +} + +export interface ParamListDetailsOptions { + // Should we disallow extra keyword arguments to be passed + // if the function uses a **kwargs annotated with a (non-closed) + // unpacked TypedDict? By default, this is allowed, but PEP 692 + // suggests that this should be disallowed for calls whereas it + // explicitly says this is allowed for callable assignment rules. + disallowExtraKwargsForTd?: boolean; +} + +// Examines the input parameters within a function signature and creates a +// "virtual list" of parameters, stripping out any markers and expanding +// any *args with unpacked tuples. +export function getParamListDetails(type: FunctionType, options?: ParamListDetailsOptions): ParamListDetails { + const result: ParamListDetails = { + firstPositionOrKeywordIndex: 0, + positionParamCount: 0, + positionOnlyParamCount: 0, + params: [], + hasUnpackedTypeVarTuple: false, + hasUnpackedTypedDict: false, + }; + + let positionOnlyIndex = type.shared.parameters.findIndex((p) => isPositionOnlySeparator(p)); + + // Handle the old (pre Python 3.8) way of specifying positional-only + // parameters by naming them with "__". + if (positionOnlyIndex < 0) { + for (let i = 0; i < type.shared.parameters.length; i++) { + const p = type.shared.parameters[i]; + if (p.category !== ParamCategory.Simple) { + break; + } + + if (!p.name) { + break; + } + + if (isDunderName(p.name) || !p.name.startsWith('__')) { + // We exempt "self" and "cls" in class and instance methods. + if (i > 0 || FunctionType.isStaticMethod(type)) { + break; + } + + continue; + } + + positionOnlyIndex = i + 1; + } + } + + for (let i = 0; i < positionOnlyIndex; i++) { + if (FunctionType.getParamDefaultType(type, i)) { + break; + } + + result.positionOnlyParamCount++; + } + + let sawKeywordOnlySeparator = false; + + const addVirtualParam = ( + param: FunctionParam, + index: number, + typeOverride?: Type, + defaultTypeOverride?: Type, + sourceOverride?: ParamKind + ) => { + if (param.name) { + let kind: ParamKind; + if (sourceOverride !== undefined) { + kind = sourceOverride; + } else if (param.category === ParamCategory.ArgsList) { + kind = ParamKind.Positional; + } else if (sawKeywordOnlySeparator) { + kind = ParamKind.Keyword; + } else if (positionOnlyIndex >= 0 && index < positionOnlyIndex) { + kind = ParamKind.Positional; + } else { + kind = ParamKind.Standard; + } + + result.params.push({ + param, + index, + type: typeOverride ?? FunctionType.getParamType(type, index), + declaredType: FunctionType.getDeclaredParamType(type, index), + defaultType: defaultTypeOverride ?? FunctionType.getParamDefaultType(type, index), + kind, + }); + } + }; + + type.shared.parameters.forEach((param, index) => { + if (param.category === ParamCategory.ArgsList) { + // If this is an unpacked tuple, expand the entries. + const paramType = FunctionType.getParamType(type, index); + if (param.name && isUnpackedClass(paramType) && paramType.priv.tupleTypeArgs) { + const addToPositionalOnly = index < result.positionOnlyParamCount; + + paramType.priv.tupleTypeArgs.forEach((tupleArg, tupleIndex) => { + const category = + isTypeVarTuple(tupleArg.type) || tupleArg.isUnbounded + ? ParamCategory.ArgsList + : ParamCategory.Simple; + + if (category === ParamCategory.ArgsList) { + result.argsIndex = result.params.length; + } + + if (isTypeVarTuple(FunctionType.getParamType(type, index))) { + result.hasUnpackedTypeVarTuple = true; + } + + addVirtualParam( + FunctionParam.create( + category, + tupleArg.type, + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `${param.name}[${tupleIndex.toString()}]` + ), + index, + tupleArg.type, + /* defaultArgTypeOverride */ undefined, + ParamKind.ExpandedArgs + ); + + if (category === ParamCategory.Simple) { + result.positionParamCount++; + } + + if (tupleIndex > 0 && addToPositionalOnly) { + result.positionOnlyParamCount++; + } + }); + + // Normally, a VarArgList parameter (either named or as an unnamed separator) + // would signify the start of keyword-only parameters. However, we can construct + // callable signatures that defy this rule by using Callable and TypeVarTuples + // or unpacked tuples. + if (!sawKeywordOnlySeparator && (positionOnlyIndex < 0 || index >= positionOnlyIndex)) { + result.firstKeywordOnlyIndex = result.params.length; + sawKeywordOnlySeparator = true; + } + } else { + if (param.name && result.argsIndex === undefined) { + result.argsIndex = result.params.length; + + if (isTypeVarTuple(paramType)) { + result.hasUnpackedTypeVarTuple = true; + } + } + + // Normally, a VarArgList parameter (either named or as an unnamed separator) + // would signify the start of keyword-only parameters. However, we can construct + // callable signatures that defy this rule by using Callable and TypeVarTuples + // or unpacked tuples. + if (!sawKeywordOnlySeparator && (positionOnlyIndex < 0 || index >= positionOnlyIndex)) { + result.firstKeywordOnlyIndex = result.params.length; + if (param.name) { + result.firstKeywordOnlyIndex++; + } + sawKeywordOnlySeparator = true; + } + + addVirtualParam(param, index); + } + } else if (param.category === ParamCategory.KwargsDict) { + sawKeywordOnlySeparator = true; + + const paramType = FunctionType.getParamType(type, index); + + // Is this an unpacked TypedDict? If so, expand the entries. + if (isClassInstance(paramType) && isUnpackedClass(paramType) && paramType.shared.typedDictEntries) { + if (result.firstKeywordOnlyIndex === undefined) { + result.firstKeywordOnlyIndex = result.params.length; + } + + const typedDictType = paramType; + paramType.shared.typedDictEntries.knownItems.forEach((entry, name) => { + entry = paramType.priv.typedDictNarrowedEntries?.get(name) ?? entry; + + const specializedParamType = partiallySpecializeType( + entry.valueType, + typedDictType, + /* typeClassType */ undefined + ); + + const defaultParamType = !entry.isRequired ? specializedParamType : undefined; + addVirtualParam( + FunctionParam.create( + ParamCategory.Simple, + specializedParamType, + FunctionParamFlags.TypeDeclared, + name, + defaultParamType + ), + index, + specializedParamType, + defaultParamType + ); + }); + + const extraItemsType = paramType.shared.typedDictEntries.extraItems?.valueType; + + let addKwargsForExtraItems: boolean; + if (extraItemsType) { + addKwargsForExtraItems = !isNever(extraItemsType); + } else { + addKwargsForExtraItems = !options?.disallowExtraKwargsForTd; + } + + // Unless the TypedDict is completely closed (i.e. is not allowed to + // have any extra items), add a virtual **kwargs parameter to represent + // any additional items. + if (addKwargsForExtraItems) { + addVirtualParam( + FunctionParam.create( + ParamCategory.KwargsDict, + extraItemsType ?? AnyType.create(), + FunctionParamFlags.TypeDeclared, + 'kwargs' + ), + index, + extraItemsType + ); + + result.kwargsIndex = result.params.length - 1; + } + + result.hasUnpackedTypedDict = true; + result.unpackedKwargsTypedDictType = paramType; + } else if (param.name) { + if (result.kwargsIndex === undefined) { + result.kwargsIndex = result.params.length; + } + + if (result.firstKeywordOnlyIndex === undefined) { + result.firstKeywordOnlyIndex = result.params.length; + } + + addVirtualParam(param, index); + } + } else if (param.category === ParamCategory.Simple) { + if (param.name && !sawKeywordOnlySeparator) { + result.positionParamCount++; + } + + addVirtualParam( + param, + index, + /* typeOverride */ undefined, + type.priv.specializedTypes?.parameterDefaultTypes + ? type.priv.specializedTypes?.parameterDefaultTypes[index] + : undefined + ); + } + }); + + // If the signature ends in `*args: P.args, **kwargs: P.kwargs`, + // extract the ParamSpec P. + result.paramSpec = FunctionType.getParamSpecFromArgsKwargs(type); + + result.firstPositionOrKeywordIndex = result.params.findIndex( + (p) => p.kind !== ParamKind.Positional && p.kind !== ParamKind.ExpandedArgs + ); + if (result.firstPositionOrKeywordIndex < 0) { + result.firstPositionOrKeywordIndex = result.params.length; + } + + return result; +} + +// Returns true if the type of the argument type is "*args: P.args" or +// "*args: Any". Both of these match a parameter of type "*args: P.args". +export function isParamSpecArgs(paramSpec: TypeVarType, argType: Type) { + let isCompatible = true; + + doForEachSubtype(argType, (argSubtype) => { + if ( + isParamSpec(argSubtype) && + argSubtype.priv.paramSpecAccess === 'args' && + isTypeSame(argSubtype, paramSpec, { ignoreTypeFlags: true }) + ) { + return; + } + + if ( + isClassInstance(argSubtype) && + argSubtype.priv.tupleTypeArgs && + argSubtype.priv.tupleTypeArgs.length === 1 && + argSubtype.priv.tupleTypeArgs[0].isUnbounded && + isAnyOrUnknown(argSubtype.priv.tupleTypeArgs[0].type) + ) { + return; + } + + if (isAnyOrUnknown(argSubtype)) { + return; + } + + isCompatible = false; + }); + + return isCompatible; +} + +// Returns true if the type of the argument type is "**kwargs: P.kwargs" or +// "*kwargs: Any". Both of these match a parameter of type "*kwargs: P.kwargs". +export function isParamSpecKwargs(paramSpec: TypeVarType, argType: Type) { + let isCompatible = true; + + doForEachSubtype(argType, (argSubtype) => { + if ( + isParamSpec(argSubtype) && + argSubtype.priv.paramSpecAccess === 'kwargs' && + isTypeSame(argSubtype, paramSpec, { ignoreTypeFlags: true }) + ) { + return; + } + + if ( + isClassInstance(argSubtype) && + ClassType.isBuiltIn(argSubtype, 'dict') && + argSubtype.priv.typeArgs && + argSubtype.priv.typeArgs.length === 2 && + isClassInstance(argSubtype.priv.typeArgs[0]) && + ClassType.isBuiltIn(argSubtype.priv.typeArgs[0], 'str') && + isAnyOrUnknown(argSubtype.priv.typeArgs[1]) + ) { + return; + } + + if (isAnyOrUnknown(argSubtype)) { + return; + } + + isCompatible = false; + }); + + return isCompatible; +} + +export interface ParamAssignmentInfo { + paramDetails: VirtualParamDetails; + keywordName?: string; + argsNeeded: number; + argsReceived: number; +} + +// A class that tracks which parameters in a signature +// have been assigned arguments. +export class ParamAssignmentTracker { + params: ParamAssignmentInfo[]; + + constructor(paramInfos: VirtualParamDetails[]) { + this.params = paramInfos.map((p) => { + const argsNeeded = !!p.defaultType || p.param.category !== ParamCategory.Simple ? 0 : 1; + return { paramDetails: p, argsNeeded, argsReceived: 0 }; + }); + } + + // Add a virtual keyword parameter for a keyword argument that + // targets a **kwargs parameter. This allows us to detect duplicate + // keyword arguments. + addKeywordParam(name: string, info: VirtualParamDetails): void { + this.params.push({ + paramDetails: info, + keywordName: name, + argsNeeded: 1, + argsReceived: 1, + }); + } + + lookupName(name: string): ParamAssignmentInfo | undefined { + return this.params.find((p) => { + // Don't return positional parameters because their names are irrelevant. + const kind = p.paramDetails.kind; + if (kind === ParamKind.Positional || kind === ParamKind.ExpandedArgs) { + return false; + } + + const effectiveName = p.keywordName ?? p.paramDetails.param.name; + return effectiveName === name; + }); + } + + lookupDetails(paramInfo: VirtualParamDetails): ParamAssignmentInfo { + const info = this.params.find((p) => p.paramDetails === paramInfo); + assert(info !== undefined); + return info; + } + + markArgReceived(paramInfo: VirtualParamDetails) { + const entry = this.lookupDetails(paramInfo); + entry.argsReceived++; + } + + // Returns a list of params that have not received their + // required number of arguments. + getUnassignedParams(): string[] { + const unassignedParams: string[] = []; + this.params.forEach((p) => { + if (!p.paramDetails.param.name) { + return; + } + + if (p.argsReceived >= p.argsNeeded) { + return; + } + + unassignedParams.push(p.paramDetails.param.name); + }); + + return unassignedParams; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/parentDirectoryCache.ts b/python-parser/packages/pyright-internal/src/analyzer/parentDirectoryCache.ts new file mode 100644 index 00000000..92b9471d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/parentDirectoryCache.ts @@ -0,0 +1,87 @@ +/* + * parentDirectoryCache.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Cache to hold parent directory import result to make sure + * we don't repeatedly search folders. + */ + +import { getOrAdd } from '../common/collectionUtils'; +import { FileSystem } from '../common/fileSystem'; +import { Uri } from '../common/uri/uri'; +import { ImportResult } from './importResult'; + +export type ImportPath = { importPath: Uri | undefined }; + +type CacheEntry = { importResult: ImportResult; path: Uri; importName: string }; + +export class ParentDirectoryCache { + private readonly _importChecked = new Map>(); + private readonly _cachedResults = new Map>(); + + private _libPathCache: Uri[] | undefined = undefined; + + constructor(private _importRootGetter: () => Uri[]) { + // empty + } + + getImportResult(path: Uri, importName: string, importResult: ImportResult): ImportResult | undefined { + const result = this._cachedResults.get(importName)?.get(path.key); + if (result) { + // We already checked for the importName at the path. + return result; + } + + const checked = this._importChecked.get(importName)?.get(path.key); + if (checked) { + // We already checked for the importName at the path. + if (!checked.importPath) { + return importResult; + } + + return this._cachedResults.get(importName)?.get(checked.importPath.key) ?? importResult; + } + + return undefined; + } + + checkValidPath(fs: FileSystem, sourceFileUri: Uri, root: Uri): boolean { + if (!sourceFileUri.startsWith(root)) { + // We don't search containing folders for libs. + return false; + } + + this._libPathCache = + this._libPathCache ?? + this._importRootGetter() + .map((r) => fs.realCasePath(r)) + .filter((r) => !r.equals(root)) + .filter((r) => r.startsWith(root)); + + if (this._libPathCache.some((p) => sourceFileUri.startsWith(p))) { + // Make sure it is not lib folders under user code root. + // ex) .venv folder + return false; + } + + return true; + } + + checked(path: Uri, importName: string, importPath: ImportPath) { + getOrAdd(this._importChecked, importName, () => new Map()).set(path.key, importPath); + } + + add(result: CacheEntry) { + getOrAdd(this._cachedResults, result.importName, () => new Map()).set( + result.path.key, + result.importResult + ); + } + + reset() { + this._importChecked.clear(); + this._cachedResults.clear(); + this._libPathCache = undefined; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/parseTreeCleaner.ts b/python-parser/packages/pyright-internal/src/analyzer/parseTreeCleaner.ts new file mode 100644 index 00000000..df16589c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/parseTreeCleaner.ts @@ -0,0 +1,35 @@ +/* + * parseTreeCleaner.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A parse tree walker that's used to clean any analysis + * information hanging off the parse tree. It's used when + * dependent files have been modified and the file requires + * reanalysis. Without this, we'd need to generate a fresh + * parse tree from scratch. + */ + +import { ModuleNode, ParseNode } from '../parser/parseNodes'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { ParseTreeWalker } from './parseTreeWalker'; + +export class ParseTreeCleanerWalker extends ParseTreeWalker { + private _parseTree: ModuleNode; + + constructor(parseTree: ModuleNode) { + super(); + + this._parseTree = parseTree; + } + + clean() { + this.walk(this._parseTree); + } + + override visitNode(node: ParseNode) { + AnalyzerNodeInfo.cleanNodeAnalysisInfo(node); + return super.visitNode(node); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/parseTreeUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/parseTreeUtils.ts new file mode 100644 index 00000000..6498709c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/parseTreeUtils.ts @@ -0,0 +1,2726 @@ +/* + * parseTreeUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility routines for traversing a parse tree. + */ + +import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; +import { containsOnlyWhitespace } from '../common/core'; +import { assert, assertNever, fail } from '../common/debug'; +import { convertPositionToOffset, convertTextRangeToRange } from '../common/positionUtils'; +import { Position, Range, TextRange } from '../common/textRange'; +import { TextRangeCollection, getIndexContaining } from '../common/textRangeCollection'; +import { + ArgCategory, + ArgumentNode, + AssignmentExpressionNode, + AwaitNode, + CallNode, + ClassNode, + DecoratorNode, + EvaluationScopeNode, + ExecutionScopeNode, + ExpressionNode, + FunctionNode, + ImportFromNode, + IndexNode, + LambdaNode, + MemberAccessNode, + ModuleNode, + NameNode, + ParamCategory, + ParameterNode, + ParseNode, + ParseNodeType, + StatementListNode, + StatementNode, + StringListNode, + StringNode, + SuiteNode, + TypeAnnotationNode, + TypeParameterScopeNode, + isExpressionNode, +} from '../parser/parseNodes'; +import { OperatorTypeNameMap, ParseNodeTypeNameMap } from '../parser/parseNodeUtils'; +import { ParseFileResults } from '../parser/parser'; +import { Tokenizer, TokenizerOutput } from '../parser/tokenizer'; +import { KeywordType, OperatorType, StringToken, StringTokenFlags, Token, TokenType } from '../parser/tokenizerTypes'; +import { getScope } from './analyzerNodeInfo'; +import { ParseTreeWalker, getChildNodes } from './parseTreeWalker'; +import { TypeVarScopeId } from './types'; + +export const enum PrintExpressionFlags { + None = 0, + + // Don't use string literals for forward declarations. + ForwardDeclarations = 1 << 0, + + // By default, strings are truncated. If this flag + // is specified, the full original string is used. + DoNotLimitStringLength = 1 << 1, +} + +export interface EvaluationScopeInfo { + node: EvaluationScopeNode; + useProxyScope?: boolean; +} + +// Returns the depth of the node as measured from the root +// of the parse tree. +export function getNodeDepth(node: ParseNode): number { + let depth = 0; + let curNode: ParseNode | undefined = node; + + while (curNode) { + depth++; + curNode = curNode.parent; + } + + return depth; +} + +// Returns the deepest node that contains the specified position. +export function findNodeByPosition( + node: ParseNode, + position: Position, + lines: TextRangeCollection +): ParseNode | undefined { + const offset = convertPositionToOffset(position, lines); + if (offset === undefined) { + return undefined; + } + + return findNodeByOffset(node, offset); +} + +// Returns the deepest node that contains the specified offset. +export function findNodeByOffset(node: ParseNode, offset: number): ParseNode | undefined { + if (!TextRange.overlaps(node, offset)) { + return undefined; + } + + // The range is found within this node. See if we can localize it + // further by checking its children. + let children = getChildNodes(node); + if (isCompliantWithNodeRangeRules(node) && children.length > 20) { + // Use binary search to find the child to visit. This should be helpful + // when there are many siblings, such as statements in a module/suite + // or expressions in a list, etc. Otherwise, we will have to traverse + // every sibling before finding the correct one. + let index = getIndexContaining(children, offset, TextRange.overlaps); + + if (index >= 0) { + // Find first sibling that overlaps with the offset. This ensures that + // our binary search result matches what we would have returned via a + // linear search. + let searchIndex = index - 1; + while (searchIndex >= 0) { + const previousChild = children[searchIndex]; + if (previousChild) { + if (TextRange.overlaps(previousChild, offset)) { + index = searchIndex; + } else { + break; + } + } + + searchIndex--; + } + + children = [children[index]]; + } + } + + for (const child of children) { + if (!child) { + continue; + } + + const containingChild = findNodeByOffset(child, offset); + if (containingChild) { + // For augmented assignments, prefer the dest expression, which is a clone + // of the left expression but is used to hold the type of the operation result. + if (node.nodeType === ParseNodeType.AugmentedAssignment && containingChild === node.d.leftExpr) { + return node.d.destExpr; + } + + return containingChild; + } + } + + return node; +} + +export function isCompliantWithNodeRangeRules(node: ParseNode) { + // ParseNode range rules are + // 1. Children are all contained within the parent. + // 2. Children have non-overlapping ranges. + // 3. Children are listed in increasing order. + return node.nodeType !== ParseNodeType.Assignment && node.nodeType !== ParseNodeType.StringList; +} + +export function getClassFullName(classNode: ParseNode, moduleName: string, className: string): string { + const nameParts: string[] = [className]; + + let curNode: ParseNode | undefined = classNode; + + // Walk the parse tree looking for classes. + while (curNode) { + curNode = getEnclosingClass(curNode); + if (curNode) { + nameParts.push(curNode.d.name.d.value); + } + } + + nameParts.push(moduleName); + + return nameParts.reverse().join('.'); +} + +// Create an ID that is based on the location within the file. +// This allows us to disambiguate between different types that +// don't have unique names (those that are not created with class +// declarations). +export function getTypeSourceId(node: ParseNode): number { + return node.start; +} + +export function printArg(node: ArgumentNode, flags: PrintExpressionFlags) { + let argStr = ''; + if (node.d.argCategory === ArgCategory.UnpackedList) { + argStr = '*'; + } else if (node.d.argCategory === ArgCategory.UnpackedDictionary) { + argStr = '**'; + } + if (node.d.name) { + argStr += node.d.name.d.value + '='; + } + argStr += printExpression(node.d.valueExpr, flags); + return argStr; +} + +export function printExpression(node: ExpressionNode, flags = PrintExpressionFlags.None): string { + switch (node.nodeType) { + case ParseNodeType.Name: { + return node.d.value; + } + + case ParseNodeType.MemberAccess: { + return printExpression(node.d.leftExpr, flags) + '.' + node.d.member.d.value; + } + + case ParseNodeType.Call: { + let lhs = printExpression(node.d.leftExpr, flags); + + // Some left-hand expressions must be parenthesized. + if ( + node.d.leftExpr.nodeType !== ParseNodeType.MemberAccess && + node.d.leftExpr.nodeType !== ParseNodeType.Name && + node.d.leftExpr.nodeType !== ParseNodeType.Index && + node.d.leftExpr.nodeType !== ParseNodeType.Call + ) { + lhs = `(${lhs})`; + } + + return lhs + '(' + node.d.args.map((arg) => printArg(arg, flags)).join(', ') + ')'; + } + + case ParseNodeType.Index: { + return ( + printExpression(node.d.leftExpr, flags) + + '[' + + node.d.items.map((item) => printArg(item, flags)).join(', ') + + (node.d.trailingComma ? ',' : '') + + ']' + ); + } + + case ParseNodeType.UnaryOperation: { + const exprStr = printOperator(node.d.operator) + printExpression(node.d.expr, flags); + return node.d.hasParens ? `(${exprStr})` : exprStr; + } + + case ParseNodeType.BinaryOperation: { + const exprStr = + printExpression(node.d.leftExpr, flags) + + ' ' + + printOperator(node.d.operator) + + ' ' + + printExpression(node.d.rightExpr, flags); + + return node.d.hasParens ? `(${exprStr})` : exprStr; + } + + case ParseNodeType.Number: { + let value = node.d.value.toString(); + + // If it's stored as a bigint, strip off the "n". + if (value.endsWith('n')) { + value = value.substring(0, value.length - 1); + } + + if (node.d.isImaginary) { + value += 'j'; + } + return value; + } + + case ParseNodeType.StringList: { + if (flags & PrintExpressionFlags.ForwardDeclarations && node.d.annotation) { + return printExpression(node.d.annotation, flags); + } else { + return node.d.strings + .map((str) => { + return printExpression(str, flags); + }) + .join(' '); + } + } + + case ParseNodeType.String: { + let exprString = ''; + if (node.d.token.flags & StringTokenFlags.Raw) { + exprString += 'r'; + } + + if (node.d.token.flags & StringTokenFlags.Unicode) { + exprString += 'u'; + } + + if (node.d.token.flags & StringTokenFlags.Bytes) { + exprString += 'b'; + } + + if (node.d.token.flags & StringTokenFlags.Format) { + exprString += 'f'; + } + + if (node.d.token.flags & StringTokenFlags.Template) { + exprString += 't'; + } + + let escapedString = node.d.token.escapedValue; + if ((flags & PrintExpressionFlags.DoNotLimitStringLength) === 0) { + const maxStringLength = 32; + escapedString = escapedString.slice(0, maxStringLength); + } + + if (node.d.token.flags & StringTokenFlags.Triplicate) { + if (node.d.token.flags & StringTokenFlags.SingleQuote) { + exprString += `'''${escapedString}'''`; + } else { + exprString += `"""${escapedString}"""`; + } + } else { + if (node.d.token.flags & StringTokenFlags.SingleQuote) { + exprString += `'${escapedString}'`; + } else { + exprString += `"${escapedString}"`; + } + } + + return exprString; + } + + case ParseNodeType.FormatString: { + let exprString = 'f'; + + let escapedString = ''; + const itemsToPrint = [...node.d.middleTokens, ...node.d.fieldExprs].sort((a, b) => a.start - b.start); + + while (itemsToPrint.length > 0) { + const itemToPrint = itemsToPrint.shift()!; + + if ('nodeType' in itemToPrint) { + escapedString += `{${printExpression(itemToPrint)}}`; + } else { + escapedString += itemToPrint.escapedValue; + } + } + + if (node.d.token.flags & StringTokenFlags.Triplicate) { + if (node.d.token.flags & StringTokenFlags.SingleQuote) { + exprString += `'''${escapedString}'''`; + } else { + exprString += `"""${escapedString}"""`; + } + } else { + if (node.d.token.flags & StringTokenFlags.SingleQuote) { + exprString += `'${escapedString}'`; + } else { + exprString += `"${escapedString}"`; + } + } + + return exprString; + } + + case ParseNodeType.Assignment: { + return printExpression(node.d.leftExpr, flags) + ' = ' + printExpression(node.d.rightExpr, flags); + } + + case ParseNodeType.AssignmentExpression: { + return printExpression(node.d.name, flags) + ' := ' + printExpression(node.d.rightExpr, flags); + } + + case ParseNodeType.TypeAnnotation: { + return printExpression(node.d.valueExpr, flags) + ': ' + printExpression(node.d.annotation, flags); + } + + case ParseNodeType.AugmentedAssignment: { + return ( + printExpression(node.d.leftExpr, flags) + + ' ' + + printOperator(node.d.operator) + + ' ' + + printExpression(node.d.rightExpr, flags) + ); + } + + case ParseNodeType.Await: { + const exprStr = 'await ' + printExpression(node.d.expr, flags); + return node.d.hasParens ? `(${exprStr})` : exprStr; + } + + case ParseNodeType.Ternary: { + return ( + printExpression(node.d.ifExpr, flags) + + ' if ' + + printExpression(node.d.testExpr, flags) + + ' else ' + + printExpression(node.d.elseExpr, flags) + ); + } + + case ParseNodeType.List: { + const expressions = node.d.items.map((expr) => { + return printExpression(expr, flags); + }); + return `[${expressions.join(', ')}]`; + } + + case ParseNodeType.Unpack: { + return '*' + printExpression(node.d.expr, flags); + } + + case ParseNodeType.Tuple: { + const expressions = node.d.items.map((expr) => { + return printExpression(expr, flags); + }); + if (expressions.length === 1) { + return `(${expressions[0]}, )`; + } + return `(${expressions.join(', ')})`; + } + + case ParseNodeType.Yield: { + if (node.d.expr) { + return 'yield ' + printExpression(node.d.expr, flags); + } else { + return 'yield'; + } + } + + case ParseNodeType.YieldFrom: { + return 'yield from ' + printExpression(node.d.expr, flags); + } + + case ParseNodeType.Ellipsis: { + return '...'; + } + + case ParseNodeType.Comprehension: { + let listStr = ''; + + if (isExpressionNode(node.d.expr)) { + listStr = printExpression(node.d.expr as ExpressionNode, flags); + } else if (node.d.expr.nodeType === ParseNodeType.DictionaryKeyEntry) { + const keyStr = printExpression(node.d.expr.d.keyExpr, flags); + const valueStr = printExpression(node.d.expr.d.valueExpr, flags); + listStr = `${keyStr}: ${valueStr}`; + } + + listStr = + listStr + + ' ' + + node.d.forIfNodes + .map((expr) => { + if (expr.nodeType === ParseNodeType.ComprehensionFor) { + return ( + `${expr.d.isAsync ? 'async ' : ''}for ` + + printExpression(expr.d.targetExpr, flags) + + ` in ${printExpression(expr.d.iterableExpr, flags)}` + ); + } else { + return `if ${printExpression(expr.d.testExpr, flags)}`; + } + }) + .join(' '); + + return node.d.hasParens ? `(${listStr})` : listStr; + } + + case ParseNodeType.Slice: { + let result = ''; + + if (node.d.startValue || node.d.endValue || node.d.stepValue) { + if (node.d.startValue) { + result += printExpression(node.d.startValue, flags); + } + if (node.d.endValue) { + result += ': ' + printExpression(node.d.endValue, flags); + } + if (node.d.stepValue) { + result += ': ' + printExpression(node.d.stepValue, flags); + } + } else { + result += ':'; + } + + return result; + } + + case ParseNodeType.Lambda: { + return ( + 'lambda ' + + node.d.params + .map((param) => { + let paramStr = ''; + + if (param.d.category === ParamCategory.ArgsList) { + paramStr += '*'; + } else if (param.d.category === ParamCategory.KwargsDict) { + paramStr += '**'; + } + + if (param.d.name) { + paramStr += param.d.name.d.value; + } else if (param.d.category === ParamCategory.Simple) { + paramStr += '/'; + } + + if (param.d.defaultValue) { + paramStr += ' = ' + printExpression(param.d.defaultValue, flags); + } + return paramStr; + }) + .join(', ') + + ': ' + + printExpression(node.d.expr, flags) + ); + } + + case ParseNodeType.Constant: { + if (node.d.constType === KeywordType.True) { + return 'True'; + } else if (node.d.constType === KeywordType.False) { + return 'False'; + } else if (node.d.constType === KeywordType.Debug) { + return '__debug__'; + } else if (node.d.constType === KeywordType.None) { + return 'None'; + } + break; + } + + case ParseNodeType.Dictionary: { + const dictContents = `${node.d.items.map((entry) => { + if (entry.nodeType === ParseNodeType.DictionaryKeyEntry) { + return ( + `${printExpression(entry.d.keyExpr, flags)}: ` + `${printExpression(entry.d.valueExpr, flags)}` + ); + } else if (entry.nodeType === ParseNodeType.DictionaryExpandEntry) { + return `**${printExpression(entry.d.expr, flags)}`; + } else { + return printExpression(entry, flags); + } + })}`; + + if (dictContents) { + return `{ ${dictContents} }`; + } + + return '{}'; + } + + case ParseNodeType.Set: { + return node.d.items.map((entry) => printExpression(entry, flags)).join(', '); + } + + case ParseNodeType.Error: { + return ''; + } + + default: { + assertNever(node); + } + } + + return ''; +} + +export function printOperator(operator: OperatorType): string { + const operatorName = OperatorTypeNameMap[operator]; + if (operatorName) { + return operatorName; + } else { + return 'unknown'; + } +} + +// If the name node is the LHS of a call expression or is a member +// name in the LHS of a call expression, returns the call node. +export function getCallForName(node: NameNode): CallNode | undefined { + if (node.parent?.nodeType === ParseNodeType.Call && node.parent.d.leftExpr === node) { + return node.parent; + } + + if ( + node.parent?.nodeType === ParseNodeType.MemberAccess && + node.parent.d.member === node && + node.parent.parent?.nodeType === ParseNodeType.Call && + node.parent.parent.d.leftExpr === node.parent + ) { + return node.parent.parent; + } + + return undefined; +} + +export function getDecoratorForName(node: NameNode): DecoratorNode | undefined { + if (node.parent?.nodeType === ParseNodeType.Decorator && node.parent.d.expr === node) { + return node.parent; + } + + if ( + node.parent?.nodeType === ParseNodeType.MemberAccess && + node.parent.d.member === node && + node.parent.parent?.nodeType === ParseNodeType.Decorator && + node.parent.parent.d.expr === node.parent + ) { + return node.parent.parent; + } + + return undefined; +} + +export function getEnclosingSuite(node: ParseNode): SuiteNode | undefined { + let curNode = node.parent; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Suite) { + return curNode; + } + curNode = curNode.parent; + } + + return undefined; +} + +export function getEnclosingClass(node: ParseNode, stopAtFunction = false): ClassNode | undefined { + let curNode = node.parent; + while (curNode) { + if (curNode.nodeType === ParseNodeType.Class) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Module) { + return undefined; + } + + if (curNode.nodeType === ParseNodeType.Function) { + if (stopAtFunction) { + return undefined; + } + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function getEnclosingModule(node: ParseNode): ModuleNode { + let curNode = node.parent; + while (curNode) { + if (curNode.nodeType === ParseNodeType.Module) { + return curNode; + } + + curNode = curNode.parent; + } + + fail('Module node not found'); + return undefined!; +} + +export function getEnclosingClassOrModule(node: ParseNode, stopAtFunction = false): ClassNode | ModuleNode | undefined { + let curNode = node.parent; + while (curNode) { + if (curNode.nodeType === ParseNodeType.Class) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Module) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Function) { + if (stopAtFunction) { + return undefined; + } + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function getEnclosingFunction(node: ParseNode): FunctionNode | undefined { + let curNode = node.parent; + let prevNode: ParseNode | undefined; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Function) { + // Don't treat a decorator as being "enclosed" in the function. + if (!curNode.d.decorators.some((decorator) => decorator === prevNode)) { + return curNode; + } + } + + if (curNode.nodeType === ParseNodeType.Class) { + return undefined; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return undefined; +} + +// This is similar to getEnclosingFunction except that it uses evaluation +// scopes rather than the parse tree to determine whether the specified node +// is within the scope. That means if the node is within a class decorator +// (for example), it will be considered part of its parent node rather than +// the class node. +export function getEnclosingFunctionEvaluationScope(node: ParseNode): FunctionNode | undefined { + let curNode = getEvaluationScopeNode(node).node; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Function) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Class || !curNode.parent) { + return undefined; + } + + curNode = getEvaluationScopeNode(curNode.parent).node; + } + + return undefined; +} + +export function getEnclosingLambda(node: ParseNode): LambdaNode | undefined { + let curNode = node.parent; + while (curNode) { + if (curNode.nodeType === ParseNodeType.Lambda) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Suite) { + return undefined; + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function getEnclosingClassOrFunction(node: ParseNode): FunctionNode | ClassNode | undefined { + let curNode = node.parent; + while (curNode) { + if (curNode.nodeType === ParseNodeType.Function) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Class) { + return curNode; + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function getEnclosingClassOrFunctionSuite(node: ParseNode): SuiteNode | undefined { + let curNode = node.parent; + while (curNode) { + if (curNode.nodeType === ParseNodeType.Suite) { + if ( + curNode.parent?.nodeType === ParseNodeType.Function || + curNode.parent?.nodeType === ParseNodeType.Class + ) { + return curNode; + } + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function getEnclosingSuiteOrModule( + node: ParseNode, + stopAtFunction = false, + stopAtLambda = true +): SuiteNode | ModuleNode | undefined { + let curNode = node.parent; + while (curNode) { + if (curNode.nodeType === ParseNodeType.Suite) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Module) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Lambda) { + if (stopAtLambda) { + return undefined; + } + } + + if (curNode.nodeType === ParseNodeType.Function) { + if (stopAtFunction) { + return undefined; + } + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function getEvaluationNodeForAssignmentExpression( + node: AssignmentExpressionNode +): LambdaNode | FunctionNode | ModuleNode | ClassNode | undefined { + // PEP 572 indicates that the evaluation node for an assignment expression + // target within a list comprehension is contained within a lambda, + // function or module, but not a class. + let sawComprehension = false; + let curNode: ParseNode | undefined = getEvaluationScopeNode(node).node; + + while (curNode !== undefined) { + switch (curNode.nodeType) { + case ParseNodeType.Function: + case ParseNodeType.Lambda: + case ParseNodeType.Module: + return curNode; + + case ParseNodeType.Class: + return sawComprehension ? undefined : curNode; + + case ParseNodeType.Comprehension: + sawComprehension = true; + curNode = getEvaluationScopeNode(curNode.parent!).node; + break; + + default: + return undefined; + } + } + + return undefined; +} + +// Returns the parse node corresponding to the scope that is used to evaluate +// a symbol referenced in the specified node. +export function getEvaluationScopeNode(node: ParseNode): EvaluationScopeInfo { + let prevNode: ParseNode | undefined; + let prevPrevNode: ParseNode | undefined; + let curNode: ParseNode | undefined = node; + let isParamNameNode = false; + let isParamDefaultNode = false; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Parameter) { + if (prevNode !== undefined && prevNode === curNode.d.name) { + // Note that we passed through a parameter name node. + isParamNameNode = true; + } else if (prevNode !== undefined && prevNode === curNode.d.defaultValue) { + // Note that we passed through a parameter default value node. + isParamDefaultNode = true; + } + } + + // We found a scope associated with this node. In most cases, + // we'll return this scope, but in a few cases we need to return + // the enclosing scope instead. + switch (curNode.nodeType) { + case ParseNodeType.TypeParameterList: { + return { node: curNode, useProxyScope: true }; + } + + case ParseNodeType.Function: { + if (!prevNode) { + break; + } + + // Decorators are always evaluated outside of the function scope. + if (curNode.d.decorators.some((decorator) => decorator === prevNode)) { + break; + } + + // The name of the function is evaluated within the containing scope. + if (prevNode === curNode.d.name) { + break; + } + + if (curNode.d.params.some((param) => param === prevNode)) { + // Default argument expressions are evaluated outside of the function scope. + if (isParamDefaultNode) { + break; + } + + if (isParamNameNode) { + if (getScope(curNode) !== undefined) { + return { node: curNode }; + } + } + } + + if (prevNode === curNode.d.suite) { + if (getScope(curNode) !== undefined) { + return { node: curNode }; + } + } + + // All other nodes in the function are evaluated in the context + // of the type parameter scope if it's present. Otherwise, + // they are evaluated within the function's parent scope. + if (curNode.d.typeParams) { + const scopeNode = curNode.d.typeParams; + if (getScope(scopeNode) !== undefined) { + return { node: scopeNode, useProxyScope: true }; + } + } + break; + } + + case ParseNodeType.Lambda: { + if (curNode.d.params.some((param) => param === prevNode)) { + if (isParamNameNode) { + if (getScope(curNode) !== undefined) { + return { node: curNode }; + } + } + } else if (!prevNode || prevNode === curNode.d.expr) { + if (getScope(curNode) !== undefined) { + return { node: curNode }; + } + } + break; + } + + case ParseNodeType.Class: { + if (!prevNode) { + break; + } + + // Decorators are always evaluated outside of the class scope. + if (curNode.d.decorators.some((decorator) => decorator === prevNode)) { + break; + } + + if (prevNode === curNode.d.suite) { + if (getScope(curNode) !== undefined) { + return { node: curNode }; + } + } + + // All other nodes in the class are evaluated in the context + // of the type parameter scope if it's present. Otherwise, + // they are evaluated within the class' parent scope. + if (curNode.d.typeParams) { + const scopeNode = curNode.d.typeParams; + if (getScope(scopeNode) !== undefined) { + return { node: scopeNode, useProxyScope: true }; + } + } + break; + } + + case ParseNodeType.Comprehension: { + if (getScope(curNode) !== undefined) { + // The iterable expression of the first subnode of a list comprehension + // is evaluated within the scope of its parent. + const isFirstIterableExpr = + prevNode === curNode.d.forIfNodes[0] && + curNode.d.forIfNodes[0].nodeType === ParseNodeType.ComprehensionFor && + curNode.d.forIfNodes[0].d.iterableExpr === prevPrevNode; + + if (!isFirstIterableExpr) { + return { node: curNode }; + } + } + break; + } + + case ParseNodeType.TypeAlias: { + if (prevNode === curNode.d.expr && curNode.d.typeParams) { + const scopeNode = curNode.d.typeParams; + if (getScope(scopeNode) !== undefined) { + return { node: scopeNode }; + } + } + break; + } + + case ParseNodeType.Module: { + if (getScope(curNode) !== undefined) { + return { node: curNode }; + } + break; + } + } + + prevPrevNode = prevNode; + prevNode = curNode; + curNode = curNode.parent; + } + + fail('Did not find evaluation scope'); + return undefined!; +} + +// Returns the parse node corresponding to the function, class, or type alias +// that potentially provides the scope for a type parameter. +export function getTypeVarScopeNode(node: ParseNode): TypeParameterScopeNode | undefined { + let prevNode: ParseNode | undefined; + let curNode: ParseNode | undefined = node; + + while (curNode) { + switch (curNode.nodeType) { + case ParseNodeType.Function: { + if (!curNode.d.decorators.some((decorator) => decorator === prevNode)) { + return curNode; + } + break; + } + + case ParseNodeType.Class: { + if (!curNode.d.decorators.some((decorator) => decorator === prevNode)) { + return curNode; + } + break; + } + + case ParseNodeType.TypeAlias: { + return curNode; + } + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return undefined; +} + +// Returns the parse node corresponding to the scope that is used +// for executing the code referenced in the specified node. +export function getExecutionScopeNode(node: ParseNode): ExecutionScopeNode { + let evaluationScope = getEvaluationScopeNode(node).node; + + // Classes are not considered execution scope because they are executed + // within the context of their containing module or function. Likewise, + // list comprehensions are executed within their container. Type parameter + // scopes are special because they act as proxies for their containing + // function or class scope. + while ( + evaluationScope.nodeType === ParseNodeType.TypeParameterList || + evaluationScope.nodeType === ParseNodeType.Class || + evaluationScope.nodeType === ParseNodeType.Comprehension + ) { + evaluationScope = getEvaluationScopeNode(evaluationScope.parent!).node; + } + + return evaluationScope; +} + +// Given a node within a type annotation expression, returns the type annotation +// node that contains it (if applicable). +export function getTypeAnnotationNode(node: ParseNode): TypeAnnotationNode | undefined { + let prevNode = node; + let curNode = node.parent; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.TypeAnnotation) { + if (curNode.d.annotation === prevNode) { + return curNode; + } + + break; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return undefined; +} + +// In general, arguments passed to a call are evaluated by the runtime in +// left-to-right order. There is one exception, however, when an unpacked +// iterable is used after a keyword argument. +export function getArgsByRuntimeOrder(node: CallNode) { + const positionalArgs = node.d.args.filter( + (arg) => !arg.d.name && arg.d.argCategory !== ArgCategory.UnpackedDictionary + ); + const keywordArgs = node.d.args.filter( + (arg) => !!arg.d.name || arg.d.argCategory === ArgCategory.UnpackedDictionary + ); + return positionalArgs.concat(keywordArgs); +} + +// PEP 591 spells out certain limited cases where an assignment target +// can be annotated with a "Final" annotation. This function determines +// whether Final is allowed for the specified node. +export function isFinalAllowedForAssignmentTarget(targetNode: ExpressionNode): boolean { + // Simple names always support Final. + if (targetNode.nodeType === ParseNodeType.Name) { + return true; + } + + // Member access expressions like "self.x" are permitted only + // within __init__ methods. + if (targetNode.nodeType === ParseNodeType.MemberAccess) { + if (targetNode.d.leftExpr.nodeType !== ParseNodeType.Name) { + return false; + } + + const classNode = getEnclosingClass(targetNode); + if (!classNode) { + return false; + } + + const methodNode = getEnclosingFunction(targetNode); + if (!methodNode) { + return false; + } + + if (methodNode.d.name.d.value !== '__init__') { + return false; + } + + return true; + } + + return false; +} + +export function isRequiredAllowedForAssignmentTarget(targetNode: ExpressionNode): boolean { + const classNode = getEnclosingClass(targetNode, /* stopAtFunction */ true); + if (!classNode) { + return false; + } + + return true; +} + +export function isNodeContainedWithin(node: ParseNode, potentialContainer: ParseNode): boolean { + let curNode: ParseNode | undefined = node; + while (curNode) { + if (curNode === potentialContainer) { + return true; + } + + curNode = curNode.parent; + } + + return false; +} + +export function getParentNodeOfType(node: ParseNode, containerType: ParseNodeType): T | undefined { + let curNode: ParseNode | undefined = node; + + while (curNode) { + if (curNode.nodeType === containerType) { + return curNode as T; + } + + curNode = curNode.parent; + } + + return undefined; +} + +// If the specified node is contained within an expression that is intended to be +// interpreted as a type annotation, this function returns the annotation node. +export function getParentAnnotationNode(node: ExpressionNode): ExpressionNode | undefined { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Function) { + if (prevNode === curNode.d.returnAnnotation) { + return prevNode; + } + return undefined; + } + + if (curNode.nodeType === ParseNodeType.Parameter) { + if (prevNode === curNode.d.annotation || prevNode === curNode.d.annotationComment) { + return prevNode; + } + return undefined; + } + + if (curNode.nodeType === ParseNodeType.Assignment) { + if (prevNode === curNode.d.annotationComment) { + return prevNode; + } + return undefined; + } + + if (curNode.nodeType === ParseNodeType.TypeAnnotation) { + if (prevNode === curNode.d.annotation) { + return prevNode; + } + return undefined; + } + + if (curNode.nodeType === ParseNodeType.FunctionAnnotation) { + if (prevNode === curNode.d.returnAnnotation || curNode.d.paramAnnotations.some((p) => p === prevNode)) { + assert(!prevNode || isExpressionNode(prevNode)); + return prevNode; + } + return undefined; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return undefined; +} + +export function isNodeContainedWithinNodeType(node: ParseNode, containerType: ParseNodeType): boolean { + return getParentNodeOfType(node, containerType) !== undefined; +} + +export function isSuiteEmpty(node: SuiteNode): boolean { + let sawEllipsis = false; + + for (const statement of node.d.statements) { + if (statement.nodeType === ParseNodeType.StatementList) { + for (const substatement of statement.d.statements) { + if (substatement.nodeType === ParseNodeType.Ellipsis) { + // Allow an ellipsis + sawEllipsis = true; + } else if (substatement.nodeType === ParseNodeType.StringList) { + // Allow doc strings + } else { + return false; + } + } + } else { + return false; + } + } + + return sawEllipsis; +} + +export function containsAwaitNode(node: ParseNode): boolean { + let foundAwait = false; + + class AwaitNodeWalker extends ParseTreeWalker { + override visitAwait(node: AwaitNode) { + foundAwait = true; + return false; + } + } + + const walker = new AwaitNodeWalker(); + walker.walk(node); + return foundAwait; +} + +// Determines whether two expressions match. Names are compared by value only +// unless an optional compareName function is provided. In that case, the +// compareName function is called to determine whether the two names match. +// This allows the caller to distinguish between names that are identical +// but defined in different scopes. +export function isMatchingExpression( + reference: ExpressionNode, + expression: ExpressionNode, + compareName?: (reference: NameNode, expression: NameNode) => boolean +): boolean { + if (reference.nodeType === ParseNodeType.Name) { + let nameToCompare: NameNode | undefined; + + if (expression.nodeType === ParseNodeType.Name) { + nameToCompare = expression; + } else if (expression.nodeType === ParseNodeType.AssignmentExpression) { + nameToCompare = expression.d.name; + } + + if (nameToCompare) { + if (reference.d.value !== nameToCompare.d.value) { + return false; + } + + if (compareName) { + return compareName(reference, nameToCompare); + } + + return true; + } + + return false; + } else if ( + reference.nodeType === ParseNodeType.MemberAccess && + expression.nodeType === ParseNodeType.MemberAccess + ) { + return ( + isMatchingExpression(reference.d.leftExpr, expression.d.leftExpr) && + reference.d.member.d.value === expression.d.member.d.value + ); + } else if (reference.nodeType === ParseNodeType.Index && expression.nodeType === ParseNodeType.Index) { + if (!isMatchingExpression(reference.d.leftExpr, expression.d.leftExpr)) { + return false; + } + + if ( + expression.d.items.length !== 1 || + expression.d.trailingComma || + expression.d.items[0].d.name || + expression.d.items[0].d.argCategory !== ArgCategory.Simple + ) { + return false; + } + + const expr = reference.d.items[0].d.valueExpr; + if (expr.nodeType === ParseNodeType.Number) { + const subscriptNode = expression.d.items[0].d.valueExpr; + if ( + subscriptNode.nodeType !== ParseNodeType.Number || + subscriptNode.d.isImaginary || + !subscriptNode.d.isInteger + ) { + return false; + } + + return expr.d.value === subscriptNode.d.value; + } + + if ( + expr.nodeType === ParseNodeType.UnaryOperation && + expr.d.operator === OperatorType.Subtract && + expr.d.expr.nodeType === ParseNodeType.Number + ) { + const subscriptNode = expression.d.items[0].d.valueExpr; + if ( + subscriptNode.nodeType !== ParseNodeType.UnaryOperation || + subscriptNode.d.operator !== OperatorType.Subtract || + subscriptNode.d.expr.nodeType !== ParseNodeType.Number || + subscriptNode.d.expr.d.isImaginary || + !subscriptNode.d.expr.d.isInteger + ) { + return false; + } + + return expr.d.expr.d.value === subscriptNode.d.expr.d.value; + } + + if (expr.nodeType === ParseNodeType.StringList) { + const referenceStringListNode = expr; + const subscriptNode = expression.d.items[0].d.valueExpr; + if ( + referenceStringListNode.d.strings.length === 1 && + referenceStringListNode.d.strings[0].nodeType === ParseNodeType.String && + subscriptNode.nodeType === ParseNodeType.StringList && + subscriptNode.d.strings.length === 1 && + subscriptNode.d.strings[0].nodeType === ParseNodeType.String + ) { + return referenceStringListNode.d.strings[0].d.value === subscriptNode.d.strings[0].d.value; + } + } + + return false; + } + + return false; +} + +export function isPartialMatchingExpression(reference: ExpressionNode, expression: ExpressionNode): boolean { + if (reference.nodeType === ParseNodeType.MemberAccess) { + return ( + isMatchingExpression(reference.d.leftExpr, expression) || + isPartialMatchingExpression(reference.d.leftExpr, expression) + ); + } else if (reference.nodeType === ParseNodeType.Index) { + return ( + isMatchingExpression(reference.d.leftExpr, expression) || + isPartialMatchingExpression(reference.d.leftExpr, expression) + ); + } + + return false; +} + +export function isWithinDefaultParamInitializer(node: ParseNode) { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Parameter && prevNode === curNode.d.defaultValue) { + return true; + } + + if ( + curNode.nodeType === ParseNodeType.Lambda || + curNode.nodeType === ParseNodeType.Function || + curNode.nodeType === ParseNodeType.Class || + curNode.nodeType === ParseNodeType.Module + ) { + return false; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return false; +} + +export function isWithinTypeAnnotation(node: ParseNode, requireQuotedAnnotation: boolean) { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + let isQuoted = false; + + while (curNode) { + if ( + curNode.nodeType === ParseNodeType.Parameter && + (prevNode === curNode.d.annotation || prevNode === curNode.d.annotationComment) + ) { + return isQuoted || !requireQuotedAnnotation; + } + + if (curNode.nodeType === ParseNodeType.Function && prevNode === curNode.d.returnAnnotation) { + return isQuoted || !requireQuotedAnnotation; + } + + if (curNode.nodeType === ParseNodeType.Function && prevNode === curNode.d.funcAnnotationComment) { + // Type comments are always considered forward declarations even though + // they're not "quoted". + return true; + } + + if (curNode.nodeType === ParseNodeType.TypeAnnotation && prevNode === curNode.d.annotation) { + return isQuoted || !requireQuotedAnnotation; + } + + if (curNode.nodeType === ParseNodeType.Assignment && prevNode === curNode.d.annotationComment) { + // Type comments are always considered forward declarations even though + // they're not "quoted". + return true; + } + + if (curNode.nodeType === ParseNodeType.StringList && prevNode === curNode.d.annotation) { + isQuoted = true; + } + + if ( + curNode.nodeType === ParseNodeType.Lambda || + curNode.nodeType === ParseNodeType.Function || + curNode.nodeType === ParseNodeType.Class || + curNode.nodeType === ParseNodeType.Module + ) { + return false; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return false; +} + +export function isWithinAnnotationComment(node: ParseNode) { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Function && prevNode === curNode.d.funcAnnotationComment) { + // Type comments are always considered forward declarations even though + // they're not "quoted". + return true; + } + + if (curNode.nodeType === ParseNodeType.Assignment && prevNode === curNode.d.annotationComment) { + // Type comments are always considered forward declarations even though + // they're not "quoted". + return true; + } + + if ( + curNode.nodeType === ParseNodeType.Lambda || + curNode.nodeType === ParseNodeType.Function || + curNode.nodeType === ParseNodeType.Class || + curNode.nodeType === ParseNodeType.Module + ) { + return false; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return false; +} + +export function isWithinLoop(node: ParseNode): boolean { + let curNode: ParseNode | undefined = node; + + while (curNode) { + switch (curNode.nodeType) { + case ParseNodeType.For: + case ParseNodeType.While: { + return true; + } + + case ParseNodeType.Module: { + break; + } + } + + curNode = curNode.parent; + } + + return false; +} + +export function isWithinAssertExpression(node: ParseNode): boolean { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + + while (curNode) { + switch (curNode.nodeType) { + case ParseNodeType.Assert: { + return curNode.d.testExpr === prevNode; + } + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return false; +} + +export function getDocString(statements: StatementNode[]): string | undefined { + // See if the first statement in the suite is a triple-quote string. + if (statements.length === 0) { + return undefined; + } + + if (statements[0].nodeType !== ParseNodeType.StatementList) { + return undefined; + } + + if (!isDocString(statements[0])) { + return undefined; + } + + // It's up to the user to convert normalize/convert this as needed. + const strings = (statements[0].d.statements[0] as StringListNode).d.strings; + if (strings.length === 1) { + return strings[0].d.value; + } + + return strings.map((s) => s.d.value).join(''); +} + +export function isDocString(statementList: StatementListNode): boolean { + // If the first statement in the suite isn't a StringNode, + // assume there is no docString. + if ( + statementList.d.statements.length === 0 || + statementList.d.statements[0].nodeType !== ParseNodeType.StringList + ) { + return false; + } + + // A docstring can consist of multiple joined strings in a single expression. + const strings = statementList.d.statements[0].d.strings; + if (strings.length === 0) { + return false; + } + + // Any f-strings invalidate the entire docstring. + if (strings.some((n) => n.nodeType === ParseNodeType.FormatString)) { + return false; + } + + // It's up to the user to convert normalize/convert this as needed. + return true; +} + +// Sometimes a NamedTuple assignment statement is followed by a statement +// that looks like the following: +// MyNamedTuple.__new__.__defaults__ = ... +// This pattern is commonly used to set the default values that are +// not specified in the original list. +export function isAssignmentToDefaultsFollowingNamedTuple(callNode: ParseNode): boolean { + if ( + callNode.nodeType !== ParseNodeType.Call || + !callNode.parent || + callNode.parent.nodeType !== ParseNodeType.Assignment || + callNode.parent.d.leftExpr.nodeType !== ParseNodeType.Name || + !callNode.parent.parent || + callNode.parent.parent.nodeType !== ParseNodeType.StatementList + ) { + return false; + } + + const namedTupleAssignedName = callNode.parent.d.leftExpr.d.value; + const statementList = callNode.parent.parent; + if ( + statementList.d.statements[0] !== callNode.parent || + !statementList.parent || + !( + statementList.parent.nodeType === ParseNodeType.Module || + statementList.parent.nodeType === ParseNodeType.Suite + ) + ) { + return false; + } + + const moduleOrSuite = statementList.parent; + let statementIndex = moduleOrSuite.d.statements.findIndex((s) => s === statementList); + + if (statementIndex < 0) { + return false; + } + statementIndex++; + + while (statementIndex < moduleOrSuite.d.statements.length) { + const nextStatement = moduleOrSuite.d.statements[statementIndex]; + if (nextStatement.nodeType !== ParseNodeType.StatementList) { + break; + } + + if (nextStatement.d.statements[0]?.nodeType === ParseNodeType.StringList) { + // Skip over comments + statementIndex++; + continue; + } + + if (nextStatement.d.statements[0]?.nodeType === ParseNodeType.Assignment) { + const assignNode = nextStatement.d.statements[0]; + if ( + assignNode.d.leftExpr.nodeType === ParseNodeType.MemberAccess && + assignNode.d.leftExpr.d.member.d.value === '__defaults__' + ) { + const defaultTarget = assignNode.d.leftExpr.d.leftExpr; + if ( + defaultTarget.nodeType === ParseNodeType.MemberAccess && + defaultTarget.d.member.d.value === '__new__' && + defaultTarget.d.leftExpr.nodeType === ParseNodeType.Name && + defaultTarget.d.leftExpr.d.value === namedTupleAssignedName + ) { + return true; + } + } + } + + break; + } + + return false; +} + +// This simple parse tree walker calls a callback function +// for each NameNode it encounters. +export class NameNodeWalker extends ParseTreeWalker { + private _subscriptIndex: number | undefined; + private _baseExpression: ExpressionNode | undefined; + + constructor( + private _callback: ( + node: NameNode, + subscriptIndex: number | undefined, + baseExpression: ExpressionNode | undefined + ) => void + ) { + super(); + } + + override visitName(node: NameNode) { + this._callback(node, this._subscriptIndex, this._baseExpression); + return true; + } + + override visitIndex(node: IndexNode) { + this.walk(node.d.leftExpr); + + const prevSubscriptIndex = this._subscriptIndex; + const prevBaseExpression = this._baseExpression; + this._baseExpression = node.d.leftExpr; + + node.d.items.forEach((item, index) => { + this._subscriptIndex = index; + this.walk(item); + }); + + this._subscriptIndex = prevSubscriptIndex; + this._baseExpression = prevBaseExpression; + + return false; + } +} + +export class CallNodeWalker extends ParseTreeWalker { + constructor(private _callback: (node: CallNode) => void) { + super(); + } + + override visitCall(node: CallNode) { + this._callback(node); + return true; + } +} + +export function getEnclosingParam(node: ParseNode): ParameterNode | undefined { + let curNode: ParseNode | undefined = node; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Parameter) { + return curNode; + } + + if (curNode.nodeType === ParseNodeType.Function) { + return undefined; + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function getCallNodeAndActiveParamIndex( + node: ParseNode, + insertionOffset: number, + tokens: TextRangeCollection +) { + // Find the call node that contains the specified node. + let curNode: ParseNode | undefined = node; + let callNode: CallNode | undefined; + + while (curNode !== undefined) { + // make sure we only look at callNodes when we are inside their arguments + if (curNode.nodeType === ParseNodeType.Call) { + if (isOffsetInsideCallArgs(tokens, curNode, insertionOffset)) { + callNode = curNode; + break; + } + } + curNode = curNode.parent; + } + + if (!callNode || !callNode.d.args) { + return undefined; + } + + const endPosition = TextRange.getEnd(callNode); + if (insertionOffset > endPosition) { + return undefined; + } + + const tokenAtEnd = getTokenAt(tokens, endPosition - 1); + if (insertionOffset === endPosition && tokenAtEnd?.type === TokenType.CloseParenthesis) { + return undefined; + } + + let addedActive = false; + let activeIndex = -1; + let activeOrFake = false; + callNode.d.args.forEach((arg, index) => { + if (addedActive) { + return; + } + + // Calculate the argument's bounds including whitespace and colons. + let start = arg.start; + const startTokenIndex = tokens.getItemAtPosition(start); + if (startTokenIndex >= 0) { + start = TextRange.getEnd(tokens.getItemAt(startTokenIndex - 1)); + } + + let end = TextRange.getEnd(arg); + const endTokenIndex = tokens.getItemAtPosition(end); + if (endTokenIndex >= 0) { + // Find the true end of the argument by searching for the + // terminating comma or parenthesis. + for (let i = endTokenIndex; i < tokens.count; i++) { + const tok = tokens.getItemAt(i); + + switch (tok.type) { + case TokenType.Comma: + case TokenType.CloseParenthesis: + break; + default: + continue; + } + + end = TextRange.getEnd(tok); + break; + } + } + + if (insertionOffset < end) { + activeIndex = index; + activeOrFake = insertionOffset >= start; + addedActive = true; + } + }); + + if (!addedActive) { + activeIndex = callNode.d.args.length + 1; + } + + return { + callNode, + activeIndex, + activeOrFake, + }; + + function isOffsetInsideCallArgs(tokens: TextRangeCollection, node: CallNode, offset: number) { + const argumentStart = + node.d.leftExpr.length > 0 ? TextRange.getEnd(node.d.leftExpr) - 1 : node.d.leftExpr.start; + + // Handle obvious case first. + const callEndOffset = TextRange.getEnd(node); + if (offset < argumentStart || callEndOffset < offset) { + return false; + } + + if (node.d.args.length > 0) { + const start = node.d.args[0].start; + const end = TextRange.getEnd(node.d.args[node.d.args.length - 1]); + if (start <= offset && offset < end) { + return true; + } + } + + const index = tokens.getItemAtPosition(argumentStart); + if (index < 0 || tokens.count <= index) { + return true; + } + + const nextToken = tokens.getItemAt(index + 1); + if (nextToken.type === TokenType.OpenParenthesis && offset < TextRange.getEnd(nextToken)) { + // Position must be after '('. + return false; + } + + return true; + } +} + +export function getTokenIndexAtLeft( + tokens: TextRangeCollection, + position: number, + includeWhitespace = false, + includeZeroLengthToken = false +) { + const index = tokens.getItemAtPosition(position); + if (index < 0) { + return -1; + } + + for (let i = index; i >= 0; i--) { + const token = tokens.getItemAt(i); + if (!includeZeroLengthToken && token.length === 0) { + continue; + } + + if (!includeWhitespace && Tokenizer.isWhitespace(token)) { + continue; + } + + if (TextRange.getEnd(token) <= position) { + return i; + } + } + + return -1; +} + +export function getTokenAtLeft( + tokens: TextRangeCollection, + position: number, + includeWhitespace = false, + includeZeroLengthToken = false +) { + const index = getTokenIndexAtLeft(tokens, position, includeWhitespace, includeZeroLengthToken); + if (index < 0) { + return undefined; + } + + return tokens.getItemAt(index); +} + +export function getTokenIndexAfter( + tokens: TextRangeCollection, + position: number, + predicate: (t: Token) => boolean +) { + const index = tokens.getItemAtPosition(position); + if (index < 0) { + return -1; + } + + for (let i = index; i < tokens.length; i++) { + const token = tokens.getItemAt(i); + if (predicate(token)) { + return i; + } + } + + return -1; +} + +export function getTokenAfter(tokens: TextRangeCollection, position: number, predicate: (t: Token) => boolean) { + const index = getTokenIndexAfter(tokens, position, predicate); + if (index < 0) { + return undefined; + } + + return tokens.getItemAt(index); +} + +export function getTokenAtIndex(tokens: TextRangeCollection, index: number) { + if (index < 0) { + return undefined; + } + + return tokens.getItemAt(index); +} + +export function getTokenAt(tokens: TextRangeCollection, position: number) { + return getTokenAtIndex(tokens, tokens.getItemAtPosition(position)); +} + +export function getTokenOverlapping(tokens: TextRangeCollection, position: number) { + const index = getIndexOfTokenOverlapping(tokens, position); + return getTokenAtIndex(tokens, index); +} + +export function getIndexOfTokenOverlapping(tokens: TextRangeCollection, position: number) { + const index = tokens.getItemAtPosition(position); + if (index < 0) { + return -1; + } + + const token = tokens.getItemAt(index); + + return TextRange.overlaps(token, position) ? index : -1; +} + +export function getCommentsAtTokenIndex(tokens: TextRangeCollection, index: number) { + let token = getTokenAtIndex(tokens, index); + if (!token) { + return undefined; + } + + // If the preceding token has the same start offset + // (in other words, when tokens have zero length and they're piled on top of each other) + // look back through the tokens until we find the first token with that start offset. + // That's where the comments (if any) will be. + for (let precedingIndex = index - 1; precedingIndex >= 0; --precedingIndex) { + const precedingToken = getTokenAtIndex(tokens, precedingIndex); + if (precedingToken && precedingToken.start === token.start) { + token = precedingToken; + } else { + break; + } + } + + return token.comments; +} + +export function printParseNodeType(type: ParseNodeType): string { + return ParseNodeTypeNameMap[type] ?? 'Unknown'; +} + +export function isWriteAccess(node: NameNode) { + let prevNode: ParseNode = node; + let curNode: ParseNode | undefined = prevNode.parent; + + while (curNode) { + switch (curNode.nodeType) { + case ParseNodeType.Assignment: { + return prevNode === curNode.d.leftExpr; + } + + case ParseNodeType.AugmentedAssignment: { + return prevNode === curNode.d.leftExpr; + } + + case ParseNodeType.AssignmentExpression: { + return prevNode === curNode.d.name; + } + + case ParseNodeType.Del: { + return true; + } + + case ParseNodeType.For: { + return prevNode === curNode.d.targetExpr; + } + + case ParseNodeType.ImportAs: { + return ( + prevNode === curNode.d.alias || + (curNode.d.module.d.nameParts.length > 0 && prevNode === curNode.d.module.d.nameParts[0]) + ); + } + + case ParseNodeType.ImportFromAs: { + return prevNode === curNode.d.alias || (!curNode.d.alias && prevNode === curNode.d.name); + } + + case ParseNodeType.MemberAccess: { + if (prevNode !== curNode.d.member) { + return false; + } + break; + } + + case ParseNodeType.Except: { + return prevNode === curNode.d.name; + } + + case ParseNodeType.With: { + return curNode.d.withItems.some((item) => item === prevNode); + } + + case ParseNodeType.ComprehensionFor: { + return prevNode === curNode.d.targetExpr; + } + + case ParseNodeType.TypeAnnotation: { + if (prevNode === curNode.d.annotation) { + return false; + } + break; + } + + case ParseNodeType.Function: + case ParseNodeType.Class: + case ParseNodeType.Module: { + return false; + } + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return false; +} + +export function getMatchingDescendants(node: ParseNode, match: (n: ParseNode) => boolean): ParseNode[] { + const matches: ParseNode[] = []; + const children = getChildNodes(node); + for (const child of children) { + if (child && match(child)) { + matches.push(child); + } + if (child) { + matches.push(...getMatchingDescendants(child, match)); + } + } + return matches; +} + +export function getModuleNode(node: ParseNode) { + let current: ParseNode | undefined = node; + while (current && current.nodeType !== ParseNodeType.Module) { + current = current.parent; + } + + return current; +} + +export function getFileInfoFromNode(node: ParseNode) { + const current = getModuleNode(node); + return current ? AnalyzerNodeInfo.getFileInfo(current) : undefined; +} + +export function isFunctionSuiteEmpty(node: FunctionNode) { + let isEmpty = true; + + node.d.suite.d.statements.forEach((statement) => { + if (statement.nodeType === ParseNodeType.Error) { + return; + } else if (statement.nodeType === ParseNodeType.StatementList) { + statement.d.statements.forEach((subStatement) => { + // Allow docstrings, ellipsis, and pass statements. + if ( + subStatement.nodeType !== ParseNodeType.Ellipsis && + subStatement.nodeType !== ParseNodeType.StringList && + subStatement.nodeType !== ParseNodeType.Pass + ) { + isEmpty = false; + } + }); + } else { + isEmpty = false; + } + }); + + return isEmpty; +} + +export function getTypeAnnotationForParam(node: FunctionNode, paramIndex: number): ExpressionNode | undefined { + if (paramIndex >= node.d.params.length) { + return undefined; + } + + const param = node.d.params[paramIndex]; + if (param.d.annotation) { + return param.d.annotation; + } else if (param.d.annotationComment) { + return param.d.annotationComment; + } + + if (!node.d.funcAnnotationComment || node.d.funcAnnotationComment.d.isEllipsis) { + return undefined; + } + + let firstCommentAnnotationIndex = 0; + const paramAnnotations = node.d.funcAnnotationComment.d.paramAnnotations; + if (paramAnnotations.length < node.d.params.length) { + firstCommentAnnotationIndex = 1; + } + + const adjIndex = paramIndex - firstCommentAnnotationIndex; + if (adjIndex < 0 || adjIndex >= paramAnnotations.length) { + return undefined; + } + + return paramAnnotations[adjIndex]; +} + +export function isImportModuleName(node: ParseNode): boolean { + return getFirstAncestorOrSelfOfKind(node, ParseNodeType.ModuleName)?.parent?.nodeType === ParseNodeType.ImportAs; +} + +export function isImportAlias(node: ParseNode): boolean { + return node.parent?.nodeType === ParseNodeType.ImportAs && node.parent.d.alias === node; +} + +export function isFromImportModuleName(node: ParseNode): boolean { + return getFirstAncestorOrSelfOfKind(node, ParseNodeType.ModuleName)?.parent?.nodeType === ParseNodeType.ImportFrom; +} + +export function isFromImportName(node: ParseNode): boolean { + return node.parent?.nodeType === ParseNodeType.ImportFromAs && node.parent.d.name === node; +} + +export function isFromImportAlias(node: ParseNode): boolean { + return node.parent?.nodeType === ParseNodeType.ImportFromAs && node.parent.d.alias === node; +} + +export function isLastNameOfModuleName(node: NameNode): boolean { + if (node.parent?.nodeType !== ParseNodeType.ModuleName) { + return false; + } + + const module = node.parent; + if (module.d.nameParts.length === 0) { + return false; + } + + return module.d.nameParts[module.d.nameParts.length - 1] === node; +} + +export function* getAncestorsIncludingSelf(node: ParseNode | undefined) { + while (node !== undefined) { + yield node; + node = node.parent; + } +} + +type NodeForType = T extends ParseNode & { nodeType: NT } ? T : never; + +export function getFirstAncestorOrSelfOfKind( + node: ParseNode | undefined, + type: NT +): NodeForType | undefined { + return getFirstAncestorOrSelf(node, (n) => n.nodeType === type) as NodeForType | undefined; +} + +export function getFirstAncestorOrSelf( + node: ParseNode | undefined, + predicate: (node: ParseNode) => boolean +): ParseNode | undefined { + for (const current of getAncestorsIncludingSelf(node)) { + if (predicate(current)) { + return current; + } + } + + return undefined; +} + +export function getDottedNameWithGivenNodeAsLastName(node: NameNode): MemberAccessNode | NameNode { + // Shape of dotted name is + // MemberAccess (ex, a.b) + // Name Name + // or + // MemberAccess (ex, a.b.c) + // MemberAccess Name + // Name Name + if (node.parent?.nodeType !== ParseNodeType.MemberAccess) { + return node; + } + + if (node.parent.d.leftExpr === node) { + return node; + } + + return node.parent; +} + +// +// Returns the dotted name that makes up the expression for the decorator. +// Example: +// @pytest.fixture() +// def my_fixture(): +// pass +// +// would return `pytest.fixture` +export function getDecoratorName(decorator: DecoratorNode): string | undefined { + function getExpressionName(node: ExpressionNode): string | undefined { + if (node.nodeType === ParseNodeType.Name || node.nodeType === ParseNodeType.MemberAccess) { + return getDottedName(node) + ?.map((n) => n.d.value) + .join('.'); + } + if (node.nodeType === ParseNodeType.Call) { + return getExpressionName(node.d.leftExpr); + } + + return undefined; + } + + return getExpressionName(decorator.d.expr); +} + +export function getDottedName(node: MemberAccessNode | NameNode): NameNode[] | undefined { + // ex) [a] or [a].b + // simple case, [a] + if (node.nodeType === ParseNodeType.Name) { + return [node]; + } + + // dotted name case. + const names: NameNode[] = []; + if (_getDottedName(node, names)) { + return names.reverse(); + } + + return undefined; + + function _getDottedName(node: MemberAccessNode | NameNode, names: NameNode[]): boolean { + if (node.nodeType === ParseNodeType.Name) { + names.push(node); + return true; + } + + names.push(node.d.member); + + if ( + node.d.leftExpr.nodeType === ParseNodeType.Name || + node.d.leftExpr.nodeType === ParseNodeType.MemberAccess + ) { + return _getDottedName(node.d.leftExpr, names); + } + + return false; + } +} + +export function getFirstNameOfDottedName(node: MemberAccessNode | NameNode): NameNode | undefined { + // ex) [a] or [a].b + if (node.nodeType === ParseNodeType.Name) { + return node; + } + + if (node.d.leftExpr.nodeType === ParseNodeType.Name || node.d.leftExpr.nodeType === ParseNodeType.MemberAccess) { + return getFirstNameOfDottedName(node.d.leftExpr); + } + + return undefined; +} + +export function isFirstNameOfDottedName(node: NameNode): boolean { + // ex) [A] or [A].B.C.D + if (node.parent?.nodeType !== ParseNodeType.MemberAccess) { + return true; + } + + if (node.parent.d.leftExpr === node) { + return true; + } + + return false; +} + +export function isLastNameOfDottedName(node: NameNode): boolean { + // ex) A or D.C.B.[A] + if (node.parent?.nodeType !== ParseNodeType.MemberAccess) { + return true; + } + + if ( + node.parent.d.leftExpr.nodeType !== ParseNodeType.Name && + node.parent.d.leftExpr.nodeType !== ParseNodeType.MemberAccess + ) { + return false; + } + + if (node.parent.d.leftExpr === node) { + return false; + } + + return node.parent.parent?.nodeType !== ParseNodeType.MemberAccess; +} + +export function getStringNodeValueRange(node: StringNode) { + return getStringValueRange(node.d.token); +} + +export function getStringValueRange(token: StringToken) { + const length = token.quoteMarkLength; + const hasEnding = !(token.flags & StringTokenFlags.Unterminated); + return TextRange.create(token.start + length, token.length - length - (hasEnding ? length : 0)); +} + +export function getFullStatementRange( + statementNode: ParseNode, + parseFileResults: ParseFileResults, + options?: { includeTrailingBlankLines: boolean } +): Range { + const range = convertTextRangeToRange(statementNode, parseFileResults.tokenizerOutput.lines); + + const start = _getStartPositionIfMultipleStatementsAreOnSameLine( + range, + statementNode.start, + parseFileResults.tokenizerOutput + ) ?? { + line: range.start.line, + character: 0, + }; + + // First, see whether there are other tokens except semicolon or new line on the same line. + const end = _getEndPositionIfMultipleStatementsAreOnSameLine( + range, + TextRange.getEnd(statementNode), + parseFileResults.tokenizerOutput + ); + + if (end) { + return { start, end }; + } + + // If not, delete the whole line. + if (range.end.line === parseFileResults.tokenizerOutput.lines.count - 1) { + return { start, end: range.end }; + } + + let lineDeltaToAdd = 1; + if (options) { + if (options.includeTrailingBlankLines) { + for (let i = lineDeltaToAdd; range.end.line + i < parseFileResults.tokenizerOutput.lines.count; i++) { + if (!isBlankLine(parseFileResults.tokenizerOutput, parseFileResults.text, range.end.line + i)) { + lineDeltaToAdd = i; + break; + } + } + } + } + + return { start, end: { line: range.end.line + lineDeltaToAdd, character: 0 } }; +} + +export function isBlankLine(tokenizerOutput: TokenizerOutput, text: string, line: number) { + const span = tokenizerOutput.lines.getItemAt(line); + return containsOnlyWhitespace(text, span.start, TextRange.getEnd(span)); +} + +export function isUnannotatedFunction(node: FunctionNode) { + return ( + node.d.returnAnnotation === undefined && + node.d.params.every((param) => param.d.annotation === undefined && param.d.annotationComment === undefined) + ); +} + +// Verifies that an import of the form "from __future__ import x" +// occurs only at the top of a file. This mirrors the algorithm used +// in the CPython interpreter. +export function isValidLocationForFutureImport(node: ImportFromNode): boolean { + const module = getModuleNode(node); + assert(module); + + let sawDocString = false; + + for (const statement of module.d.statements) { + if (statement.nodeType !== ParseNodeType.StatementList) { + return false; + } + + for (const simpleStatement of statement.d.statements) { + if (simpleStatement === node) { + return true; + } + + if (simpleStatement.nodeType === ParseNodeType.StringList) { + if (sawDocString) { + return false; + } + sawDocString = true; + } else if (simpleStatement.nodeType === ParseNodeType.ImportFrom) { + if ( + simpleStatement.d.module.d.leadingDots !== 0 || + simpleStatement.d.module.d.nameParts.length !== 1 || + simpleStatement.d.module.d.nameParts[0].d.value !== '__future__' + ) { + return false; + } + } else { + return false; + } + } + } + + return false; +} + +// "Chaining" is when binary operators can be chained together +// as a shorthand. For example, "a < b < c" is shorthand for +// "a < b and b < c". +export function operatorSupportsChaining(op: OperatorType) { + switch (op) { + case OperatorType.Equals: + case OperatorType.NotEquals: + case OperatorType.LessThan: + case OperatorType.LessThanOrEqual: + case OperatorType.GreaterThan: + case OperatorType.GreaterThanOrEqual: + case OperatorType.Is: + case OperatorType.IsNot: + case OperatorType.In: + case OperatorType.NotIn: + return true; + } + + return false; +} + +// If the statement is a part of multiple statements on the same line +// and the statement is not the first statement on the line, then it will return +// appropriate start position. otherwise, return undefined. +// ex) a = 1; [|b = 1|] +function _getStartPositionIfMultipleStatementsAreOnSameLine( + range: Range, + tokenPosition: number, + tokenizerOutput: TokenizerOutput +): Position | undefined { + const tokenIndex = tokenizerOutput.tokens.getItemAtPosition(tokenPosition); + if (tokenIndex < 0) { + return undefined; + } + + // Find the last token index on the previous line or the first token. + let currentIndex = tokenIndex; + for (; currentIndex > 0; currentIndex--) { + const token = tokenizerOutput.tokens.getItemAt(currentIndex); + const tokenRange = convertTextRangeToRange(token, tokenizerOutput.lines); + if (tokenRange.end.line !== range.start.line) { + break; + } + } + + // Find the previous token of the first token of the statement. + for (let index = tokenIndex - 1; index > currentIndex; index--) { + const token = tokenizerOutput.tokens.getItemAt(index); + + // Eat up indentation + if (token.type === TokenType.Indent || token.type === TokenType.Dedent) { + continue; + } + + // If previous token is new line, use default. + if (token.type === TokenType.NewLine) { + return undefined; + } + + // Anything else (ex, semicolon), use statement start as it is. + return range.start; + } + + return undefined; +} + +// If the statement is a part of multiple statements on the same line +// and the statement is not the last statement on the line, then it will return +// appropriate end position. otherwise, return undefined. +// ex) [|a = 1|]; b = 1 +function _getEndPositionIfMultipleStatementsAreOnSameLine( + range: Range, + tokenPosition: number, + tokenizerOutput: TokenizerOutput +): Position | undefined { + const tokenIndex = tokenizerOutput.tokens.getItemAtPosition(tokenPosition); + if (tokenIndex < 0) { + return undefined; + } + + // Find the first token index on the next line or the last token. + let currentIndex = tokenIndex; + for (; currentIndex < tokenizerOutput.tokens.count; currentIndex++) { + const token = tokenizerOutput.tokens.getItemAt(currentIndex); + const tokenRange = convertTextRangeToRange(token, tokenizerOutput.lines); + if (range.end.line !== tokenRange.start.line) { + break; + } + } + + // Find the next token of the last token of the statement. + let foundStatementEnd = false; + for (let index = tokenIndex; index < currentIndex; index++) { + const token = tokenizerOutput.tokens.getItemAt(index); + + // Eat up semicolon or new line. + if (token.type === TokenType.Semicolon || token.type === TokenType.NewLine) { + foundStatementEnd = true; + continue; + } + + if (!foundStatementEnd) { + continue; + } + + const tokenRange = convertTextRangeToRange(token, tokenizerOutput.lines); + return tokenRange.start; + } + + return undefined; +} + +export function getVariableDocStringNode(node: ExpressionNode): StringListNode | undefined { + // Walk up the parse tree to find an assignment or type alias statement. + let curNode: ParseNode | undefined = node; + let annotationNode: TypeAnnotationNode | undefined; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Assignment) { + break; + } + + if (curNode.nodeType === ParseNodeType.TypeAlias) { + break; + } + + if (curNode.nodeType === ParseNodeType.Suite) { + break; + } + + if (curNode.nodeType === ParseNodeType.TypeAnnotation && !annotationNode) { + annotationNode = curNode; + } + + curNode = curNode.parent; + } + + if (curNode?.nodeType !== ParseNodeType.Assignment && curNode?.nodeType !== ParseNodeType.TypeAlias) { + // Allow a simple annotation statement to have a docstring even + // though PEP 258 doesn't mention this case. This PEP pre-dated + // PEP 526, so it didn't contemplate this situation. + if (annotationNode) { + curNode = annotationNode; + } else { + return undefined; + } + } + + const parentNode = curNode.parent; + if (parentNode?.nodeType !== ParseNodeType.StatementList) { + return undefined; + } + + const suiteOrModule = parentNode.parent; + if ( + !suiteOrModule || + (suiteOrModule.nodeType !== ParseNodeType.Module && suiteOrModule.nodeType !== ParseNodeType.Suite) + ) { + return undefined; + } + + const assignmentIndex = suiteOrModule.d.statements.findIndex((node) => node === parentNode); + if (assignmentIndex < 0 || assignmentIndex === suiteOrModule.d.statements.length - 1) { + return undefined; + } + + const nextStatement = suiteOrModule.d.statements[assignmentIndex + 1]; + + if (nextStatement.nodeType !== ParseNodeType.StatementList || !isDocString(nextStatement)) { + return undefined; + } + + // See if the assignment is within one of the contexts specified in PEP 258. + let isValidContext = false; + if (parentNode?.parent?.nodeType === ParseNodeType.Module) { + // If we're at the top level of a module, the attribute docstring is valid. + isValidContext = true; + } else if ( + parentNode?.parent?.nodeType === ParseNodeType.Suite && + parentNode?.parent?.parent?.nodeType === ParseNodeType.Class + ) { + // If we're at the top level of a class, the attribute docstring is valid. + isValidContext = true; + } else { + const func = getEnclosingFunction(parentNode); + + // If we're within an __init__ method, the attribute docstring is valid. + if (func && func.d.name.d.value === '__init__' && getEnclosingClass(func, /* stopAtFunction */ true)) { + isValidContext = true; + } + } + + if (!isValidContext) { + return undefined; + } + + // A docstring can consist of multiple joined strings in a single expression. + return nextStatement.d.statements[0] as StringListNode; +} + +// Creates an ID that identifies this parse node in a way that will +// not change each time the file is parsed (unless, of course, the +// file contents change). +export function getScopeIdForNode(node: ParseNode): string { + let name = ''; + if (node.nodeType === ParseNodeType.Class) { + name = node.d.name.d.value; + } else if (node.nodeType === ParseNodeType.Function) { + name = node.d.name.d.value; + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + return `${fileInfo.fileId}.${node.start.toString()}-${name}`; +} + +// Walks up the parse tree and finds all scopes that can provide +// a context for a TypeVar and returns the scope ID for each. +export function getTypeVarScopesForNode(node: ParseNode): TypeVarScopeId[] { + const scopeIds: TypeVarScopeId[] = []; + + let curNode: ParseNode | undefined = node; + while (curNode) { + curNode = getTypeVarScopeNode(curNode); + if (!curNode) { + break; + } + + scopeIds.push(getScopeIdForNode(curNode)); + curNode = curNode.parent; + } + + return scopeIds; +} + +export function checkDecorator(node: DecoratorNode, value: string): boolean { + return node.d.expr.nodeType === ParseNodeType.Name && node.d.expr.d.value === value; +} + +export function isSimpleDefault(node: ExpressionNode): boolean { + switch (node.nodeType) { + case ParseNodeType.Number: + case ParseNodeType.Constant: + case ParseNodeType.MemberAccess: + return true; + + case ParseNodeType.String: + return (node.d.token.flags & (StringTokenFlags.Format | StringTokenFlags.Template)) === 0; + + case ParseNodeType.StringList: + return node.d.strings.every(isSimpleDefault); + + case ParseNodeType.UnaryOperation: + return isSimpleDefault(node.d.expr); + + case ParseNodeType.BinaryOperation: + return isSimpleDefault(node.d.leftExpr) && isSimpleDefault(node.d.rightExpr); + + default: + return false; + } +} + +export function getPreviousNonWhitespaceToken(tokens: TextRangeCollection, offset: number): Token | undefined { + let tokenIndex = tokens.getItemAtPosition(offset); + + while (tokenIndex >= 0) { + const token = tokens.getItemAt(tokenIndex); + if (!Tokenizer.isWhitespace(token)) { + return token; + } + + tokenIndex -= 1; + } + + return undefined; +} + +export function getNextNonWhitespaceToken(tokens: TextRangeCollection, offset: number): Token | undefined { + return getNextMatchingToken(tokens, offset, (token) => !Tokenizer.isWhitespace(token)); +} + +export function getNextMatchingToken( + tokens: TextRangeCollection, + offset: number, + match: (token: Token) => boolean, + exit: (token: Token) => boolean = () => false +): Token | undefined { + let tokenIndex = tokens.getItemAtPosition(offset) + 1; + while (tokenIndex < tokens.count) { + const token = tokens.getItemAt(tokenIndex); + if (match(token)) { + return token; + } + if (exit(token)) { + return undefined; + } + tokenIndex += 1; + } + + return undefined; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/parseTreeWalker.ts b/python-parser/packages/pyright-internal/src/analyzer/parseTreeWalker.ts new file mode 100644 index 00000000..f5a83ed0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/parseTreeWalker.ts @@ -0,0 +1,935 @@ +/* + * parseTreeWalker.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Class that traverses a parse tree. + */ + +import * as debug from '../common/debug'; +import { + ArgumentNode, + AssertNode, + AssignmentExpressionNode, + AssignmentNode, + AugmentedAssignmentNode, + AwaitNode, + BinaryOperationNode, + BreakNode, + CallNode, + CaseNode, + ClassNode, + ComprehensionForNode, + ComprehensionIfNode, + ComprehensionNode, + ConstantNode, + ContinueNode, + DecoratorNode, + DelNode, + DictionaryExpandEntryNode, + DictionaryKeyEntryNode, + DictionaryNode, + EllipsisNode, + ErrorNode, + ExceptNode, + ForNode, + FormatStringNode, + FunctionAnnotationNode, + FunctionNode, + GlobalNode, + IfNode, + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + ImportNode, + IndexNode, + LambdaNode, + ListNode, + MatchNode, + MemberAccessNode, + ModuleNameNode, + ModuleNode, + NameNode, + NonlocalNode, + NumberNode, + ParameterNode, + ParseNode, + ParseNodeArray, + ParseNodeType, + PassNode, + PatternAsNode, + PatternCaptureNode, + PatternClassArgumentNode, + PatternClassNode, + PatternLiteralNode, + PatternMappingExpandEntryNode, + PatternMappingKeyEntryNode, + PatternMappingNode, + PatternSequenceNode, + PatternValueNode, + RaiseNode, + ReturnNode, + SetNode, + SliceNode, + StatementListNode, + StringListNode, + StringNode, + SuiteNode, + TernaryNode, + TryNode, + TupleNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParameterListNode, + TypeParameterNode, + UnaryOperationNode, + UnpackNode, + WhileNode, + WithItemNode, + WithNode, + YieldFromNode, + YieldNode, +} from '../parser/parseNodes'; + +// Get child nodes of the given node. +export function getChildNodes(node: ParseNode): (ParseNode | undefined)[] { + switch (node.nodeType) { + case ParseNodeType.Error: + return [node.d.child, ...(node.d.decorators ?? [])]; + + case ParseNodeType.Argument: + return [node.d.name, node.d.valueExpr]; + + case ParseNodeType.Assert: + return [node.d.testExpr, node.d.exceptionExpr]; + + case ParseNodeType.AssignmentExpression: + return [node.d.name, node.d.rightExpr]; + + case ParseNodeType.Assignment: + return [node.d.leftExpr, node.d.rightExpr, node.d.annotationComment]; + + case ParseNodeType.AugmentedAssignment: + return [node.d.leftExpr, node.d.rightExpr]; + + case ParseNodeType.Await: + return [node.d.expr]; + + case ParseNodeType.BinaryOperation: + return [node.d.leftExpr, node.d.rightExpr]; + + case ParseNodeType.Break: + return []; + + case ParseNodeType.Call: + return [node.d.leftExpr, ...node.d.args]; + + case ParseNodeType.Case: + return [node.d.pattern, node.d.guardExpr, node.d.suite]; + + case ParseNodeType.Class: + return [...node.d.decorators, node.d.name, node.d.typeParams, ...node.d.arguments, node.d.suite]; + + case ParseNodeType.Comprehension: + return [node.d.expr, ...node.d.forIfNodes]; + + case ParseNodeType.ComprehensionFor: + return [node.d.targetExpr, node.d.iterableExpr]; + + case ParseNodeType.ComprehensionIf: + return [node.d.testExpr]; + + case ParseNodeType.Constant: + return []; + + case ParseNodeType.Continue: + return []; + + case ParseNodeType.Decorator: + return [node.d.expr]; + + case ParseNodeType.Del: + return node.d.targets; + + case ParseNodeType.Dictionary: + return node.d.items; + + case ParseNodeType.DictionaryExpandEntry: + return [node.d.expr]; + + case ParseNodeType.DictionaryKeyEntry: + return [node.d.keyExpr, node.d.valueExpr]; + + case ParseNodeType.Ellipsis: + return []; + + case ParseNodeType.If: + return [node.d.testExpr, node.d.ifSuite, node.d.elseSuite]; + + case ParseNodeType.Import: + return node.d.list; + + case ParseNodeType.ImportAs: + return [node.d.module, node.d.alias]; + + case ParseNodeType.ImportFrom: + return [node.d.module, ...node.d.imports]; + + case ParseNodeType.ImportFromAs: + return [node.d.name, node.d.alias]; + + case ParseNodeType.Index: + return [node.d.leftExpr, ...node.d.items]; + + case ParseNodeType.Except: + return [node.d.typeExpr, node.d.name, node.d.exceptSuite]; + + case ParseNodeType.For: + return [node.d.targetExpr, node.d.iterableExpr, node.d.forSuite, node.d.elseSuite]; + + case ParseNodeType.FormatString: + return [...node.d.fieldExprs, ...(node.d.formatExprs ?? [])]; + + case ParseNodeType.Function: + return [ + ...node.d.decorators, + node.d.name, + node.d.typeParams, + ...node.d.params, + node.d.returnAnnotation, + node.d.funcAnnotationComment, + node.d.suite, + ]; + + case ParseNodeType.FunctionAnnotation: + return [...node.d.paramAnnotations, node.d.returnAnnotation]; + + case ParseNodeType.Global: + return node.d.targets; + + case ParseNodeType.Lambda: + return [...node.d.params, node.d.expr]; + + case ParseNodeType.List: + return node.d.items; + + case ParseNodeType.Match: + return [node.d.expr, ...node.d.cases]; + + case ParseNodeType.MemberAccess: + return [node.d.leftExpr, node.d.member]; + + case ParseNodeType.ModuleName: + return node.d.nameParts; + + case ParseNodeType.Module: + return [...node.d.statements]; + + case ParseNodeType.Name: + return []; + + case ParseNodeType.Nonlocal: + return node.d.targets; + + case ParseNodeType.Number: + return []; + + case ParseNodeType.Parameter: + return [node.d.name, node.d.annotation, node.d.annotationComment, node.d.defaultValue]; + + case ParseNodeType.Pass: + return []; + + case ParseNodeType.PatternAs: + return [...node.d.orPatterns, node.d.target]; + + case ParseNodeType.PatternClass: + return [node.d.className, ...node.d.args]; + + case ParseNodeType.PatternClassArgument: + return [node.d.name, node.d.pattern]; + + case ParseNodeType.PatternCapture: + return [node.d.target]; + + case ParseNodeType.PatternLiteral: + return [node.d.expr]; + + case ParseNodeType.PatternMappingExpandEntry: + return [node.d.target]; + + case ParseNodeType.PatternMappingKeyEntry: + return [node.d.keyPattern, node.d.valuePattern]; + + case ParseNodeType.PatternMapping: + return [...node.d.entries]; + + case ParseNodeType.PatternSequence: + return [...node.d.entries]; + + case ParseNodeType.PatternValue: + return [node.d.expr]; + + case ParseNodeType.Raise: + return [node.d.expr, node.d.fromExpr]; + + case ParseNodeType.Return: + return [node.d.expr]; + + case ParseNodeType.Set: + return node.d.items; + + case ParseNodeType.Slice: + return [node.d.startValue, node.d.endValue, node.d.stepValue]; + + case ParseNodeType.StatementList: + return node.d.statements; + + case ParseNodeType.StringList: + return [node.d.annotation, ...node.d.strings]; + + case ParseNodeType.String: + return []; + + case ParseNodeType.Suite: + return [...node.d.statements]; + + case ParseNodeType.Ternary: + return [node.d.ifExpr, node.d.testExpr, node.d.elseExpr]; + + case ParseNodeType.Tuple: + return node.d.items; + + case ParseNodeType.Try: + return [node.d.trySuite, ...node.d.exceptClauses, node.d.elseSuite, node.d.finallySuite]; + + case ParseNodeType.TypeAlias: + return [node.d.name, node.d.typeParams, node.d.expr]; + + case ParseNodeType.TypeAnnotation: + return [node.d.valueExpr, node.d.annotation]; + + case ParseNodeType.TypeParameter: + return [node.d.name, node.d.boundExpr, node.d.defaultExpr]; + + case ParseNodeType.TypeParameterList: + return [...node.d.params]; + + case ParseNodeType.UnaryOperation: + return [node.d.expr]; + + case ParseNodeType.Unpack: + return [node.d.expr]; + + case ParseNodeType.While: + return [node.d.testExpr, node.d.whileSuite, node.d.elseSuite]; + + case ParseNodeType.With: + return [...node.d.withItems, node.d.suite]; + + case ParseNodeType.WithItem: + return [node.d.expr, node.d.target]; + + case ParseNodeType.Yield: + return [node.d.expr]; + + case ParseNodeType.YieldFrom: + return [node.d.expr]; + + default: + debug.assertNever(node, `Unknown node type ${node}`); + } +} + +// To use this class, create a subclass and override the +// visitXXX methods that you want to handle. +export class ParseTreeVisitor { + constructor(private readonly _default: T) { + // empty + } + + visit(node: ParseNode): T { + switch (node.nodeType) { + case ParseNodeType.Error: + return this.visitError(node); + + case ParseNodeType.Argument: + return this.visitArgument(node); + + case ParseNodeType.Assert: + return this.visitAssert(node); + + case ParseNodeType.AssignmentExpression: + return this.visitAssignmentExpression(node); + + case ParseNodeType.Assignment: + return this.visitAssignment(node); + + case ParseNodeType.AugmentedAssignment: + return this.visitAugmentedAssignment(node); + + case ParseNodeType.Await: + return this.visitAwait(node); + + case ParseNodeType.BinaryOperation: + return this.visitBinaryOperation(node); + + case ParseNodeType.Break: + return this.visitBreak(node); + + case ParseNodeType.Call: + return this.visitCall(node); + + case ParseNodeType.Case: + return this.visitCase(node); + + case ParseNodeType.Class: + return this.visitClass(node); + + case ParseNodeType.Comprehension: + return this.visitComprehension(node); + + case ParseNodeType.ComprehensionFor: + return this.visitComprehensionFor(node); + + case ParseNodeType.ComprehensionIf: + return this.visitComprehensionIf(node); + + case ParseNodeType.Constant: + return this.visitConstant(node); + + case ParseNodeType.Continue: + return this.visitContinue(node); + + case ParseNodeType.Decorator: + return this.visitDecorator(node); + + case ParseNodeType.Del: + return this.visitDel(node); + + case ParseNodeType.Dictionary: + return this.visitDictionary(node); + + case ParseNodeType.DictionaryExpandEntry: + return this.visitDictionaryExpandEntry(node); + + case ParseNodeType.DictionaryKeyEntry: + return this.visitDictionaryKeyEntry(node); + + case ParseNodeType.Ellipsis: + return this.visitEllipsis(node); + + case ParseNodeType.If: + return this.visitIf(node); + + case ParseNodeType.Import: + return this.visitImport(node); + + case ParseNodeType.ImportAs: + return this.visitImportAs(node); + + case ParseNodeType.ImportFrom: + return this.visitImportFrom(node); + + case ParseNodeType.ImportFromAs: + return this.visitImportFromAs(node); + + case ParseNodeType.Index: + return this.visitIndex(node); + + case ParseNodeType.Except: + return this.visitExcept(node); + + case ParseNodeType.For: + return this.visitFor(node); + + case ParseNodeType.FormatString: + return this.visitFormatString(node); + + case ParseNodeType.Function: + return this.visitFunction(node); + + case ParseNodeType.FunctionAnnotation: + return this.visitFunctionAnnotation(node); + + case ParseNodeType.Global: + return this.visitGlobal(node); + + case ParseNodeType.Lambda: + return this.visitLambda(node); + + case ParseNodeType.List: + return this.visitList(node); + + case ParseNodeType.Match: + return this.visitMatch(node); + + case ParseNodeType.MemberAccess: + return this.visitMemberAccess(node); + + case ParseNodeType.ModuleName: + return this.visitModuleName(node); + + case ParseNodeType.Module: + return this.visitModule(node); + + case ParseNodeType.Name: + return this.visitName(node); + + case ParseNodeType.Nonlocal: + return this.visitNonlocal(node); + + case ParseNodeType.Number: + return this.visitNumber(node); + + case ParseNodeType.Parameter: + return this.visitParameter(node); + + case ParseNodeType.Pass: + return this.visitPass(node); + + case ParseNodeType.PatternAs: + return this.visitPatternAs(node); + + case ParseNodeType.PatternClass: + return this.visitPatternClass(node); + + case ParseNodeType.PatternClassArgument: + return this.visitPatternClassArgument(node); + + case ParseNodeType.PatternCapture: + return this.visitPatternCapture(node); + + case ParseNodeType.PatternLiteral: + return this.visitPatternLiteral(node); + + case ParseNodeType.PatternMappingExpandEntry: + return this.visitPatternMappingExpandEntry(node); + + case ParseNodeType.PatternMappingKeyEntry: + return this.visitPatternMappingKeyEntry(node); + + case ParseNodeType.PatternMapping: + return this.visitPatternMapping(node); + + case ParseNodeType.PatternSequence: + return this.visitPatternSequence(node); + + case ParseNodeType.PatternValue: + return this.visitPatternValue(node); + + case ParseNodeType.Raise: + return this.visitRaise(node); + + case ParseNodeType.Return: + return this.visitReturn(node); + + case ParseNodeType.Set: + return this.visitSet(node); + + case ParseNodeType.Slice: + return this.visitSlice(node); + + case ParseNodeType.StatementList: + return this.visitStatementList(node); + + case ParseNodeType.StringList: + return this.visitStringList(node); + + case ParseNodeType.String: + return this.visitString(node); + + case ParseNodeType.Suite: + return this.visitSuite(node); + + case ParseNodeType.Ternary: + return this.visitTernary(node); + + case ParseNodeType.Tuple: + return this.visitTuple(node); + + case ParseNodeType.Try: + return this.visitTry(node); + + case ParseNodeType.TypeAlias: + return this.visitTypeAlias(node); + + case ParseNodeType.TypeAnnotation: + return this.visitTypeAnnotation(node); + + case ParseNodeType.TypeParameter: + return this.visitTypeParameter(node); + + case ParseNodeType.TypeParameterList: + return this.visitTypeParameterList(node); + + case ParseNodeType.UnaryOperation: + return this.visitUnaryOperation(node); + + case ParseNodeType.Unpack: + return this.visitUnpack(node); + + case ParseNodeType.While: + return this.visitWhile(node); + + case ParseNodeType.With: + return this.visitWith(node); + + case ParseNodeType.WithItem: + return this.visitWithItem(node); + + case ParseNodeType.Yield: + return this.visitYield(node); + + case ParseNodeType.YieldFrom: + return this.visitYieldFrom(node); + + default: + debug.assertNever(node, `Unknown node type ${node}`); + } + } + + // Override these methods as necessary. + visitArgument(node: ArgumentNode) { + return this._default; + } + + visitAssert(node: AssertNode) { + return this._default; + } + + visitAssignment(node: AssignmentNode) { + return this._default; + } + + visitAssignmentExpression(node: AssignmentExpressionNode) { + return this._default; + } + + visitAugmentedAssignment(node: AugmentedAssignmentNode) { + return this._default; + } + + visitAwait(node: AwaitNode) { + return this._default; + } + + visitBinaryOperation(node: BinaryOperationNode) { + return this._default; + } + + visitBreak(node: BreakNode) { + return this._default; + } + + visitCall(node: CallNode) { + return this._default; + } + + visitCase(node: CaseNode) { + return this._default; + } + + visitClass(node: ClassNode) { + return this._default; + } + + visitComprehension(node: ComprehensionNode) { + return this._default; + } + + visitComprehensionFor(node: ComprehensionForNode) { + return this._default; + } + + visitComprehensionIf(node: ComprehensionIfNode) { + return this._default; + } + + visitContinue(node: ContinueNode) { + return this._default; + } + + visitConstant(node: ConstantNode) { + return this._default; + } + + visitDecorator(node: DecoratorNode) { + return this._default; + } + + visitDel(node: DelNode) { + return this._default; + } + + visitDictionary(node: DictionaryNode) { + return this._default; + } + + visitDictionaryKeyEntry(node: DictionaryKeyEntryNode) { + return this._default; + } + + visitDictionaryExpandEntry(node: DictionaryExpandEntryNode) { + return this._default; + } + + visitError(node: ErrorNode) { + return this._default; + } + + visitEllipsis(node: EllipsisNode) { + return this._default; + } + + visitIf(node: IfNode) { + return this._default; + } + + visitImport(node: ImportNode) { + return this._default; + } + + visitImportAs(node: ImportAsNode) { + return this._default; + } + + visitImportFrom(node: ImportFromNode) { + return this._default; + } + + visitImportFromAs(node: ImportFromAsNode) { + return this._default; + } + + visitIndex(node: IndexNode) { + return this._default; + } + + visitExcept(node: ExceptNode) { + return this._default; + } + + visitFor(node: ForNode) { + return this._default; + } + + visitFormatString(node: FormatStringNode) { + return this._default; + } + + visitFunction(node: FunctionNode) { + return this._default; + } + + visitFunctionAnnotation(node: FunctionAnnotationNode) { + return this._default; + } + + visitGlobal(node: GlobalNode) { + return this._default; + } + + visitLambda(node: LambdaNode) { + return this._default; + } + + visitList(node: ListNode) { + return this._default; + } + + visitMatch(node: MatchNode) { + return this._default; + } + + visitMemberAccess(node: MemberAccessNode) { + return this._default; + } + + visitModule(node: ModuleNode) { + return this._default; + } + + visitModuleName(node: ModuleNameNode) { + return this._default; + } + + visitName(node: NameNode) { + return this._default; + } + + visitNonlocal(node: NonlocalNode) { + return this._default; + } + + visitNumber(node: NumberNode) { + return this._default; + } + + visitParameter(node: ParameterNode) { + return this._default; + } + + visitPass(node: PassNode) { + return this._default; + } + + visitPatternCapture(node: PatternCaptureNode) { + return this._default; + } + + visitPatternClass(node: PatternClassNode) { + return this._default; + } + + visitPatternClassArgument(node: PatternClassArgumentNode) { + return this._default; + } + + visitPatternAs(node: PatternAsNode) { + return this._default; + } + + visitPatternLiteral(node: PatternLiteralNode) { + return this._default; + } + + visitPatternMapping(node: PatternMappingNode) { + return this._default; + } + + visitPatternMappingExpandEntry(node: PatternMappingExpandEntryNode) { + return this._default; + } + + visitPatternMappingKeyEntry(node: PatternMappingKeyEntryNode) { + return this._default; + } + + visitPatternSequence(node: PatternSequenceNode) { + return this._default; + } + + visitPatternValue(node: PatternValueNode) { + return this._default; + } + + visitRaise(node: RaiseNode) { + return this._default; + } + + visitReturn(node: ReturnNode) { + return this._default; + } + + visitSet(node: SetNode) { + return this._default; + } + + visitSlice(node: SliceNode) { + return this._default; + } + + visitStatementList(node: StatementListNode) { + return this._default; + } + + visitString(node: StringNode) { + return this._default; + } + + visitStringList(node: StringListNode) { + return this._default; + } + + visitSuite(node: SuiteNode) { + return this._default; + } + + visitTernary(node: TernaryNode) { + return this._default; + } + + visitTuple(node: TupleNode) { + return this._default; + } + + visitTry(node: TryNode) { + return this._default; + } + + visitTypeAlias(node: TypeAliasNode) { + return this._default; + } + + visitTypeAnnotation(node: TypeAnnotationNode) { + return this._default; + } + + visitTypeParameter(node: TypeParameterNode) { + return this._default; + } + + visitTypeParameterList(node: TypeParameterListNode) { + return this._default; + } + + visitUnaryOperation(node: UnaryOperationNode) { + return this._default; + } + + visitUnpack(node: UnpackNode) { + return this._default; + } + + visitWhile(node: WhileNode) { + return this._default; + } + + visitWith(node: WithNode) { + return this._default; + } + + visitWithItem(node: WithItemNode) { + return this._default; + } + + visitYield(node: YieldNode) { + return this._default; + } + + visitYieldFrom(node: YieldFromNode) { + return this._default; + } +} + +// To use this class, create a subclass and override the +// visitXXX methods that you want to handle. +export class ParseTreeWalker extends ParseTreeVisitor { + constructor() { + super(/* default */ true); + } + + walk(node: ParseNode): void { + const childrenToWalk = this.visitNode(node); + if (childrenToWalk.length > 0) { + this.walkMultiple(childrenToWalk); + } + } + + walkMultiple(nodes: ParseNodeArray) { + nodes.forEach((node) => { + if (node) { + this.walk(node); + } + }); + } + + // If this.visit(node) returns true, all child nodes for the node are returned. + // If the method returns false, we assume that the handler has already handled the + // child nodes, so an empty list is returned. + visitNode(node: ParseNode): ParseNodeArray { + return this.visit(node) ? getChildNodes(node) : []; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/patternMatching.ts b/python-parser/packages/pyright-internal/src/analyzer/patternMatching.ts new file mode 100644 index 00000000..4e4f05e9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/patternMatching.ts @@ -0,0 +1,2240 @@ +/* + * patternMatching.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Type evaluation logic for evaluating and narrowing types + * related to "match" and "case" statements as documented in + * PEP 634. + */ + +import { appendArray } from '../common/collectionUtils'; +import { assert } from '../common/debug'; +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { LocAddendum, LocMessage } from '../localization/localize'; +import { + ArgCategory, + ExpressionNode, + ParseNode, + ParseNodeType, + PatternAsNode, + PatternAtomNode, + PatternClassArgumentNode, + PatternClassNode, + PatternLiteralNode, + PatternMappingNode, + PatternSequenceNode, + PatternValueNode, +} from '../parser/parseNodes'; +import { CodeFlowReferenceExpressionNode } from './codeFlowTypes'; +import { addConstraintsForExpectedType } from './constraintSolver'; +import { ConstraintTracker } from './constraintTracker'; +import { getTypeVarScopesForNode, isMatchingExpression } from './parseTreeUtils'; +import { getTypedDictMembersForClass } from './typedDicts'; +import { EvalFlags, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { + enumerateLiteralsForType, + narrowTypeForDiscriminatedDictEntryComparison, + narrowTypeForDiscriminatedLiteralFieldComparison, + narrowTypeForDiscriminatedTupleComparison, +} from './typeGuards'; +import { + AnyType, + ClassType, + FunctionType, + FunctionTypeFlags, + NeverType, + Type, + TypeBase, + TypedDictEntry, + UnknownType, + combineTypes, + isAnyOrUnknown, + isClass, + isClassInstance, + isInstantiableClass, + isNever, + isSameWithoutLiteralValue, + isTypeSame, + isTypeVarTuple, + isUnknown, + isUnpackedTypeVar, + isUnpackedTypeVarTuple, +} from './types'; +import { + addConditionToType, + containsAnyOrUnknown, + convertToInstance, + doForEachSubtype, + getTypeCondition, + getTypeVarScopeIds, + getUnknownTypeForCallable, + isLiteralType, + isLiteralTypeOrUnion, + isMetaclassInstance, + isNoneInstance, + isPartlyUnknown, + isTupleClass, + isUnboundedTupleClass, + lookUpClassMember, + mapSubtypes, + partiallySpecializeType, + preserveUnknown, + specializeTupleClass, + specializeWithUnknownTypeArgs, + transformPossibleRecursiveTypeAlias, +} from './typeUtils'; + +// PEP 634 indicates that several built-in classes are handled differently +// when used with class pattern matching. +const classPatternSpecialCases = [ + 'builtins.bool', + 'builtins.bytearray', + 'builtins.bytes', + 'builtins.dict', + 'builtins.float', + 'builtins.frozenset', + 'builtins.int', + 'builtins.list', + 'builtins.set', + 'builtins.str', + 'builtins.tuple', +]; + +// There are cases where sequence pattern matching of tuples with +// large unions can blow up and cause hangs. This constant limits +// the total number of subtypes that can be generated during type +// narrowing for sequence patterns before the narrowed type is +// converted to Any. This is tuned empirically to provide a reasonable +// performance cutoff. +const maxSequencePatternTupleExpansionSubtypes = 128; + +interface SequencePatternInfo { + subtype: Type; + isDefiniteNoMatch: boolean; + isPotentialNoMatch?: boolean; + entryTypes: Type[]; + isIndeterminateLength?: boolean; + isTuple?: boolean; + isUnboundedTuple?: boolean; +} + +interface MappingPatternInfo { + subtype: Type; + isDefinitelyMapping: boolean; + isDefinitelyNotMapping: boolean; + typedDict?: ClassType; + dictTypeArgs?: { + key: Type; + value: Type; + }; +} + +type PatternSubtypeNarrowingCallback = (type: Type) => TypeResult | undefined; + +export function narrowTypeBasedOnPattern( + evaluator: TypeEvaluator, + type: Type, + pattern: PatternAtomNode, + isPositiveTest: boolean +): Type { + switch (pattern.nodeType) { + case ParseNodeType.PatternSequence: { + return narrowTypeBasedOnSequencePattern(evaluator, type, pattern, isPositiveTest); + } + + case ParseNodeType.PatternLiteral: { + return narrowTypeBasedOnLiteralPattern(evaluator, type, pattern, isPositiveTest); + } + + case ParseNodeType.PatternClass: { + return narrowTypeBasedOnClassPattern(evaluator, type, pattern, isPositiveTest); + } + + case ParseNodeType.PatternAs: { + return narrowTypeBasedOnAsPattern(evaluator, type, pattern, isPositiveTest); + } + + case ParseNodeType.PatternMapping: { + return narrowTypeBasedOnMappingPattern(evaluator, type, pattern, isPositiveTest); + } + + case ParseNodeType.PatternValue: { + return narrowTypeBasedOnValuePattern(evaluator, type, pattern, isPositiveTest); + } + + case ParseNodeType.PatternCapture: { + // A capture captures everything, so nothing remains in the negative case. + return isPositiveTest ? type : NeverType.createNever(); + } + + case ParseNodeType.Error: { + return type; + } + } +} + +// Determines whether this pattern (or part of the pattern) in +// this case statement will never be matched. +export function checkForUnusedPattern(evaluator: TypeEvaluator, pattern: PatternAtomNode, subjectType: Type): void { + if (isNever(subjectType)) { + reportUnnecessaryPattern(evaluator, pattern, subjectType); + } else if (pattern.nodeType === ParseNodeType.PatternAs && pattern.d.orPatterns.length > 1) { + // Check each of the or patterns separately. + pattern.d.orPatterns.forEach((orPattern) => { + const subjectTypeMatch = narrowTypeBasedOnPattern( + evaluator, + subjectType, + orPattern, + /* isPositiveTest */ true + ); + + if (isNever(subjectTypeMatch)) { + reportUnnecessaryPattern(evaluator, orPattern, subjectType); + } + + subjectType = narrowTypeBasedOnPattern(evaluator, subjectType, orPattern, /* isPositiveTest */ false); + }); + } else { + const subjectTypeMatch = narrowTypeBasedOnPattern(evaluator, subjectType, pattern, /* isPositiveTest */ true); + + if (isNever(subjectTypeMatch)) { + reportUnnecessaryPattern(evaluator, pattern, subjectType); + } + } +} + +function narrowTypeBasedOnSequencePattern( + evaluator: TypeEvaluator, + type: Type, + pattern: PatternSequenceNode, + isPositiveTest: boolean +): Type { + let usingTupleExpansion = false; + type = transformPossibleRecursiveTypeAlias(type); + let sequenceInfo = getSequencePatternInfo(evaluator, pattern, type); + + // Further narrow based on pattern entry types. + sequenceInfo = sequenceInfo.filter((entry) => { + if (entry.isDefiniteNoMatch) { + if (isPositiveTest) { + return false; + } else { + return true; + } + } + + let isPlausibleMatch = true; + let isDefiniteMatch = true; + const narrowedEntryTypes: Type[] = []; + const unnarrowedEntryTypes: Type[] = []; + let canNarrowTuple = entry.isTuple; + + // Don't attempt to narrow tuples in the negative case if the subject + // contains indeterminate-length entries or the tuple is of indeterminate + // length. + if (!isPositiveTest) { + if (entry.isIndeterminateLength || entry.isUnboundedTuple) { + canNarrowTuple = false; + } + + if (isClassInstance(entry.subtype) && entry.subtype.priv.tupleTypeArgs) { + const unboundedIndex = entry.subtype.priv.tupleTypeArgs.findIndex((typeArg) => typeArg.isUnbounded); + + if (unboundedIndex >= 0) { + // If the pattern includes a "star" entry that aligns exactly with + // the corresponding unbounded entry in the tuple, we can narrow + // the tuple type. + if (pattern.d.starEntryIndex === undefined || pattern.d.starEntryIndex !== unboundedIndex) { + canNarrowTuple = false; + } + } + } + } + + // If the subject has an indeterminate length but the pattern does not accept + // an arbitrary number of entries or accepts at least one non-star entry, + // we can't prove that it's a definite match. + if (entry.isIndeterminateLength) { + if (pattern.d.entries.length !== 1 || pattern.d.starEntryIndex !== 0) { + isDefiniteMatch = false; + } + } + + const negativeNarrowedDims: number[] = []; + pattern.d.entries.forEach((sequenceEntry, index) => { + const entryType = getTypeOfPatternSequenceEntry( + evaluator, + pattern, + entry, + index, + pattern.d.entries.length, + pattern.d.starEntryIndex, + /* unpackStarEntry */ true + ); + + unnarrowedEntryTypes.push(entryType); + const narrowedEntryType = narrowTypeBasedOnPattern(evaluator, entryType, sequenceEntry, isPositiveTest); + + if (isPositiveTest) { + if (index === pattern.d.starEntryIndex) { + if ( + isClassInstance(narrowedEntryType) && + narrowedEntryType.priv.tupleTypeArgs && + !isUnboundedTupleClass(narrowedEntryType) && + narrowedEntryType.priv.tupleTypeArgs + ) { + appendArray( + narrowedEntryTypes, + narrowedEntryType.priv.tupleTypeArgs.map((t) => t.type) + ); + } else { + narrowedEntryTypes.push(narrowedEntryType); + canNarrowTuple = false; + } + } else { + narrowedEntryTypes.push(narrowedEntryType); + + if (isNever(narrowedEntryType)) { + isPlausibleMatch = false; + } + } + } else { + if (entry.isPotentialNoMatch) { + isDefiniteMatch = false; + } + + if (!isNever(narrowedEntryType)) { + isDefiniteMatch = false; + + // Record which entries were narrowed in the negative case + // by storing their indexes. If more than one is narrowed, + // we need to perform tuple expansion to represent the + // resulting narrowed type. + negativeNarrowedDims.push(index); + narrowedEntryTypes.push(narrowedEntryType); + } else { + narrowedEntryTypes.push(entryType); + } + + if (index === pattern.d.starEntryIndex) { + canNarrowTuple = false; + } + } + }); + + if (pattern.d.entries.length === 0) { + // If the pattern is an empty sequence, use the entry types. + if (entry.entryTypes.length > 0) { + narrowedEntryTypes.push(combineTypes(entry.entryTypes)); + } + + if (entry.isPotentialNoMatch) { + isDefiniteMatch = false; + } + } + + if (!isPositiveTest) { + // If the positive case is a definite match, the negative case can + // eliminate this subtype entirely. + if (isDefiniteMatch) { + return false; + } + + // Can we narrow a tuple? + if (canNarrowTuple && negativeNarrowedDims.length > 0) { + const tupleClassType = evaluator.getBuiltInType(pattern, 'tuple'); + if (tupleClassType && isInstantiableClass(tupleClassType)) { + // Expand the tuple in the dimensions that were narrowed. + // Start with the fully-narrowed set of entries. + const expandedEntryTypes = []; + + for (const dim of negativeNarrowedDims) { + const newEntryTypes = [...unnarrowedEntryTypes]; + newEntryTypes[dim] = narrowedEntryTypes[dim]; + expandedEntryTypes.push(newEntryTypes); + } + + entry.subtype = combineTypes( + expandedEntryTypes.map((entryTypes) => { + return ClassType.cloneAsInstance( + specializeTupleClass( + tupleClassType, + entryTypes.map((t) => { + return { type: t, isUnbounded: false }; + }) + ) + ); + }) + ); + + // Note that we're using tuple expansion in case we + // need to limit the number of subtypes generated. + usingTupleExpansion = true; + } + } + + return true; + } + + if (isPlausibleMatch) { + // If this is a tuple, we can narrow it to a specific tuple type. + // Other sequences cannot be narrowed because we don't know if they + // are immutable (covariant). + if (canNarrowTuple) { + const tupleClassType = evaluator.getBuiltInType(pattern, 'tuple'); + if (tupleClassType && isInstantiableClass(tupleClassType)) { + entry.subtype = ClassType.cloneAsInstance( + specializeTupleClass( + tupleClassType, + narrowedEntryTypes.map((t) => { + return { type: t, isUnbounded: false }; + }) + ) + ); + } + } + + // If this is a supertype of Sequence, we can narrow it to a Sequence type. + if (entry.isPotentialNoMatch && !entry.isTuple) { + const sequenceType = evaluator.getTypingType(pattern, 'Sequence'); + if (sequenceType && isInstantiableClass(sequenceType)) { + let typeArgType = evaluator.stripLiteralValue(combineTypes(narrowedEntryTypes)); + + // If the type is a union that contains Any or Unknown, remove the other types + // before wrapping it in a Sequence. + typeArgType = containsAnyOrUnknown(typeArgType, /* recurse */ false) ?? typeArgType; + + entry.subtype = ClassType.cloneAsInstance(ClassType.specialize(sequenceType, [typeArgType])); + } + } + } + + return isPlausibleMatch; + }); + + return combineTypes( + sequenceInfo.map((entry) => entry.subtype), + { maxSubtypeCount: usingTupleExpansion ? maxSequencePatternTupleExpansionSubtypes : undefined } + ); +} + +function narrowTypeBasedOnAsPattern( + evaluator: TypeEvaluator, + type: Type, + pattern: PatternAsNode, + isPositiveTest: boolean +): Type { + let remainingType = type; + + if (!isPositiveTest) { + pattern.d.orPatterns.forEach((subpattern) => { + remainingType = narrowTypeBasedOnPattern(evaluator, remainingType, subpattern, /* isPositiveTest */ false); + }); + return remainingType; + } + + const narrowedTypes = pattern.d.orPatterns.map((subpattern) => { + const narrowedSubtype = narrowTypeBasedOnPattern( + evaluator, + remainingType, + subpattern, + /* isPositiveTest */ true + ); + remainingType = narrowTypeBasedOnPattern(evaluator, remainingType, subpattern, /* isPositiveTest */ false); + return narrowedSubtype; + }); + return combineTypes(narrowedTypes); +} + +function narrowTypeBasedOnMappingPattern( + evaluator: TypeEvaluator, + type: Type, + pattern: PatternMappingNode, + isPositiveTest: boolean +): Type { + type = transformPossibleRecursiveTypeAlias(type); + + if (!isPositiveTest) { + // Handle the case where the pattern consists only of a "**x" entry. + if ( + pattern.d.entries.length === 1 && + pattern.d.entries[0].nodeType === ParseNodeType.PatternMappingExpandEntry + ) { + const mappingInfo = getMappingPatternInfo(evaluator, type, pattern); + return combineTypes(mappingInfo.filter((m) => !m.isDefinitelyMapping).map((m) => m.subtype)); + } + + if (pattern.d.entries.length !== 1 || pattern.d.entries[0].nodeType !== ParseNodeType.PatternMappingKeyEntry) { + return type; + } + + // Handle the case where the type is a union that includes a TypedDict with + // a field discriminated by a literal. + const keyPattern = pattern.d.entries[0].d.keyPattern; + const valuePattern = pattern.d.entries[0].d.valuePattern; + if ( + keyPattern.nodeType !== ParseNodeType.PatternLiteral || + valuePattern.nodeType !== ParseNodeType.PatternAs || + !valuePattern.d.orPatterns.every((orPattern) => orPattern.nodeType === ParseNodeType.PatternLiteral) + ) { + return type; + } + + const keyType = evaluator.getTypeOfExpression(keyPattern.d.expr).type; + + // The key type must be a str literal. + if ( + !isClassInstance(keyType) || + !ClassType.isBuiltIn(keyType, 'str') || + keyType.priv.literalValue === undefined + ) { + return type; + } + const keyValue = keyType.priv.literalValue as string; + + const valueTypes = valuePattern.d.orPatterns.map( + (orPattern) => evaluator.getTypeOfExpression((orPattern as PatternLiteralNode).d.expr).type + ); + + return mapSubtypes(type, (subtype) => { + if (isClassInstance(subtype) && ClassType.isTypedDictClass(subtype)) { + const typedDictMembers = getTypedDictMembersForClass(evaluator, subtype, /* allowNarrowed */ true); + const member = typedDictMembers.knownItems.get(keyValue); + + if (member && (member.isRequired || member.isProvided) && isClassInstance(member.valueType)) { + const memberValueType = member.valueType; + + // If there's at least one literal value pattern that matches + // the literal type of the member, we can eliminate this type. + if ( + valueTypes.some( + (valueType) => + isClassInstance(valueType) && + ClassType.isSameGenericClass(valueType, memberValueType) && + valueType.priv.literalValue === memberValueType.priv.literalValue + ) + ) { + return undefined; + } + } + } + + return subtype; + }); + } + + let mappingInfo = getMappingPatternInfo(evaluator, type, pattern); + + // Further narrow based on pattern entry types. + mappingInfo = mappingInfo.filter((mappingSubtypeInfo) => { + if (mappingSubtypeInfo.isDefinitelyNotMapping) { + return false; + } + + let isPlausibleMatch = true; + + pattern.d.entries.forEach((mappingEntry) => { + if (mappingSubtypeInfo.typedDict) { + if (mappingEntry.nodeType === ParseNodeType.PatternMappingKeyEntry) { + const narrowedKeyType = narrowTypeBasedOnPattern( + evaluator, + evaluator.getBuiltInObject(pattern, 'str'), + mappingEntry.d.keyPattern, + isPositiveTest + ); + + if (isNever(narrowedKeyType)) { + isPlausibleMatch = false; + } + + const valueType = mapSubtypes(narrowedKeyType, (keySubtype) => { + if (isAnyOrUnknown(keySubtype)) { + return keySubtype; + } + + if (isClassInstance(keySubtype) && ClassType.isBuiltIn(keySubtype, 'str')) { + if (!isLiteralType(keySubtype)) { + return UnknownType.create(); + } + + const tdEntries = getTypedDictMembersForClass(evaluator, mappingSubtypeInfo.typedDict!); + const valueEntry = tdEntries.knownItems.get(keySubtype.priv.literalValue as string); + if (valueEntry) { + const narrowedValueType = narrowTypeBasedOnPattern( + evaluator, + valueEntry.valueType, + mappingEntry.d.valuePattern, + /* isPositiveTest */ true + ); + if (!isNever(narrowedValueType)) { + // If this is a "NotRequired" entry that has not yet been demonstrated + // to be present, we can mark it as "provided" at this point. + if ( + !valueEntry.isRequired && + !valueEntry.isProvided && + isTypeSame(mappingSubtypeInfo.subtype, mappingSubtypeInfo.typedDict!) + ) { + const newNarrowedEntriesMap = new Map( + mappingSubtypeInfo.typedDict!.priv.typedDictNarrowedEntries ?? [] + ); + newNarrowedEntriesMap.set(keySubtype.priv.literalValue as string, { + valueType: valueEntry.valueType, + isReadOnly: valueEntry.isReadOnly, + isRequired: false, + isProvided: true, + }); + + // Clone the TypedDict object with the new entries. + mappingSubtypeInfo.subtype = ClassType.cloneAsInstance( + ClassType.cloneForNarrowedTypedDictEntries( + ClassType.cloneAsInstantiable(mappingSubtypeInfo.typedDict!), + newNarrowedEntriesMap + ) + ); + mappingSubtypeInfo.typedDict = mappingSubtypeInfo.subtype; + } + + return narrowedValueType; + } + } + } + + return undefined; + }); + + if (isNever(valueType)) { + isPlausibleMatch = false; + } + } + } else if (mappingSubtypeInfo.dictTypeArgs) { + if (mappingEntry.nodeType === ParseNodeType.PatternMappingKeyEntry) { + const narrowedKeyType = narrowTypeBasedOnPattern( + evaluator, + mappingSubtypeInfo.dictTypeArgs.key, + mappingEntry.d.keyPattern, + isPositiveTest + ); + const narrowedValueType = narrowTypeBasedOnPattern( + evaluator, + mappingSubtypeInfo.dictTypeArgs.value, + mappingEntry.d.valuePattern, + isPositiveTest + ); + if (isNever(narrowedKeyType) || isNever(narrowedValueType)) { + isPlausibleMatch = false; + } + } + } + }); + + return isPlausibleMatch; + }); + + return combineTypes(mappingInfo.map((entry) => entry.subtype)); +} + +// Looks up the "__match_args__" class member to determine the names of +// the attributes used for class pattern matching. +function getPositionalMatchArgNames(evaluator: TypeEvaluator, type: ClassType): string[] { + const matchArgsMemberInfo = lookUpClassMember(type, '__match_args__'); + if (matchArgsMemberInfo) { + const matchArgsType = evaluator.getTypeOfMember(matchArgsMemberInfo); + if ( + isClassInstance(matchArgsType) && + isTupleClass(matchArgsType) && + !isUnboundedTupleClass(matchArgsType) && + matchArgsType.priv.tupleTypeArgs + ) { + const tupleArgs = matchArgsType.priv.tupleTypeArgs; + + // Are all the args string literals? + if ( + tupleArgs.every( + (arg) => + isClassInstance(arg.type) && ClassType.isBuiltIn(arg.type, 'str') && isLiteralType(arg.type) + ) + ) { + return tupleArgs.map((arg) => (arg.type as ClassType).priv.literalValue as string); + } + } + } + + return []; +} + +function narrowTypeBasedOnLiteralPattern( + evaluator: TypeEvaluator, + type: Type, + pattern: PatternLiteralNode, + isPositiveTest: boolean +): Type { + const literalType = evaluator.getTypeOfExpression(pattern.d.expr).type; + + if (!isPositiveTest) { + return evaluator.mapSubtypesExpandTypeVars(type, /* options */ undefined, (expandedSubtype) => { + if ( + isClassInstance(literalType) && + isLiteralType(literalType) && + isClassInstance(expandedSubtype) && + isLiteralType(expandedSubtype) && + evaluator.assignType(literalType, expandedSubtype) + ) { + return undefined; + } + + if (isNoneInstance(expandedSubtype) && isNoneInstance(literalType)) { + return undefined; + } + + // Narrow a non-literal bool based on a literal bool pattern. + if ( + isClassInstance(expandedSubtype) && + ClassType.isBuiltIn(expandedSubtype, 'bool') && + expandedSubtype.priv.literalValue === undefined && + isClassInstance(literalType) && + ClassType.isBuiltIn(literalType, 'bool') && + literalType.priv.literalValue !== undefined + ) { + return ClassType.cloneWithLiteral(literalType, !(literalType.priv.literalValue as boolean)); + } + + return expandedSubtype; + }); + } + + return evaluator.mapSubtypesExpandTypeVars(type, /* options */ undefined, (expandedSubtype, unexpandedSubtype) => { + if (evaluator.assignType(expandedSubtype, literalType)) { + // We have to be careful here because the runtime uses an equality + // check, but the expandedSubtype could be a superclass that is not + // the literal type. For example, the expanded subtype might be float + // and the literal type is Literal[3]. A value of 3.0 will match this + // pattern, but we cannot narrow it to Literal[3] in this case. + if ( + !isClassInstance(literalType) || + !isLiteralType(literalType) || + isTypeSame(evaluator.stripLiteralValue(expandedSubtype), evaluator.stripLiteralValue(literalType)) + ) { + return literalType; + } + + return expandedSubtype; + } + + // See if the subtype is a subclass of the literal's class. For example, + // if it's a literal str, see if the subtype is subclass of str. + if (isClassInstance(literalType) && isClassInstance(expandedSubtype)) { + if (isLiteralType(literalType) && !isLiteralType(expandedSubtype)) { + if ( + evaluator.assignType( + ClassType.cloneWithLiteral(literalType, /* value */ undefined), + expandedSubtype + ) + ) { + return expandedSubtype; + } + } else if (evaluator.assignType(literalType, expandedSubtype)) { + return expandedSubtype; + } + } + return undefined; + }); +} + +function narrowTypeBasedOnClassPattern( + evaluator: TypeEvaluator, + type: Type, + pattern: PatternClassNode, + isPositiveTest: boolean +): Type { + let exprType = evaluator.getTypeOfExpression(pattern.d.className, EvalFlags.CallBaseDefaults).type; + + // If this is a class (but not a type alias that refers to a class), + // specialize it with Unknown type arguments. + if (isClass(exprType) && !exprType.props?.typeAliasInfo) { + exprType = ClassType.cloneRemoveTypePromotions(exprType); + exprType = specializeWithUnknownTypeArgs(exprType, evaluator.getTupleClassType()); + } + + // Are there any positional arguments? If so, try to get the mappings for + // these arguments by fetching the __match_args__ symbol from the class. + let positionalArgNames: string[] = []; + if (pattern.d.args.some((arg) => !arg.d.name) && isInstantiableClass(exprType)) { + positionalArgNames = getPositionalMatchArgNames(evaluator, exprType); + } + + if (!isPositiveTest) { + // Don't attempt to narrow if the class type is a more complex type (e.g. a TypeVar or union). + if (!isInstantiableClass(exprType)) { + return type; + } + + let classType = exprType; + + if (classType.shared.typeParams.length > 0) { + classType = ClassType.specialize(classType, /* typeArgs */ undefined); + } + + const classInstance = ClassType.cloneAsInstance(classType); + const isPatternMetaclass = isMetaclassInstance(classInstance); + + return evaluator.mapSubtypesExpandTypeVars( + type, + { + expandCallback: (type) => evaluator.expandPromotionTypes(pattern, type), + }, + (subjectSubtypeExpanded, subjectSubtypeUnexpanded) => { + // Handle the case where the class pattern references type() or a subtype thereof + // and the subject type is an instantiable class itself. + if (isPatternMetaclass && isInstantiableClass(subjectSubtypeExpanded)) { + const metaclass = subjectSubtypeExpanded.shared.effectiveMetaclass ?? UnknownType.create(); + if (isInstantiableClass(classType) && evaluator.assignType(classType, metaclass)) { + return undefined; + } + + return subjectSubtypeExpanded; + } + + // Handle Callable specially. + if ( + !isAnyOrUnknown(subjectSubtypeExpanded) && + isInstantiableClass(classType) && + ClassType.isBuiltIn(classType, 'Callable') + ) { + if (evaluator.assignType(getUnknownTypeForCallable(), subjectSubtypeExpanded)) { + return undefined; + } + } + + if (!isNoneInstance(subjectSubtypeExpanded) && !isClassInstance(subjectSubtypeExpanded)) { + return subjectSubtypeUnexpanded; + } + + // Handle NoneType specially. + if ( + isNoneInstance(subjectSubtypeExpanded) && + isInstantiableClass(classType) && + ClassType.isBuiltIn(classType, 'NoneType') + ) { + return undefined; + } + + if (!evaluator.assignType(classInstance, subjectSubtypeExpanded)) { + return subjectSubtypeExpanded; + } + + // Handle literal types specially. + if (isClassInstance(subjectSubtypeExpanded) && isLiteralType(subjectSubtypeExpanded)) { + return undefined; + } + + if (pattern.d.args.length === 0) { + if (isClass(classInstance) && isClass(subjectSubtypeExpanded)) { + // We know that this match will always succeed, so we can + // eliminate this subtype. + return undefined; + } + + return subjectSubtypeExpanded; + } + + // We might be able to narrow further based on arguments, but only + // if the types match exactly, the subject subtype is a final class (and + // therefore cannot be subclassed), or the pattern class is a protocol + // class. + if (!evaluator.assignType(subjectSubtypeExpanded, classInstance)) { + if ( + isClass(subjectSubtypeExpanded) && + !ClassType.isFinal(subjectSubtypeExpanded) && + !ClassType.isProtocolClass(classInstance) + ) { + return subjectSubtypeExpanded; + } + } + + for (let index = 0; index < pattern.d.args.length; index++) { + const narrowedArgType = narrowTypeOfClassPatternArg( + evaluator, + pattern.d.args[index], + index, + positionalArgNames, + subjectSubtypeExpanded, + isPositiveTest + ); + + if (!isNever(narrowedArgType)) { + return subjectSubtypeUnexpanded; + } + } + + // We've completely eliminated the type based on the arguments. + return undefined; + } + ); + } + + if (!TypeBase.isInstantiable(exprType) && !isNever(exprType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocAddendum.typeNotClass().format({ type: evaluator.printType(exprType) }), + pattern.d.className + ); + + return isPositiveTest ? UnknownType.create() : type; + } else if (isInstantiableClass(exprType)) { + if (ClassType.isProtocolClass(exprType) && !ClassType.isRuntimeCheckable(exprType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocAddendum.protocolRequiresRuntimeCheckable(), + pattern.d.className + ); + + return isPositiveTest ? UnknownType.create() : type; + } else if (ClassType.isTypedDictClass(exprType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictInClassPattern(), + pattern.d.className + ); + + return isPositiveTest ? UnknownType.create() : type; + } + } + + return evaluator.mapSubtypesExpandTypeVars( + exprType, + /* options */ undefined, + (expandedSubtype, unexpandedSubtype) => { + if (isAnyOrUnknown(expandedSubtype)) { + return unexpandedSubtype; + } + + if (isInstantiableClass(expandedSubtype)) { + const expandedSubtypeInstance = convertToInstance(expandedSubtype); + const isPatternMetaclass = isMetaclassInstance(expandedSubtypeInstance); + + return evaluator.mapSubtypesExpandTypeVars(type, /* options */ undefined, (subjectSubtypeExpanded) => { + if (isAnyOrUnknown(subjectSubtypeExpanded)) { + if (isInstantiableClass(expandedSubtype) && ClassType.isBuiltIn(expandedSubtype, 'Callable')) { + // Convert to an unknown callable type. + const unknownCallable = FunctionType.createSynthesizedInstance( + '', + FunctionTypeFlags.GradualCallableForm + ); + FunctionType.addDefaultParams( + unknownCallable, + /* useUnknown */ isUnknown(subjectSubtypeExpanded) + ); + unknownCallable.shared.declaredReturnType = subjectSubtypeExpanded; + return unknownCallable; + } + + return convertToInstance(unexpandedSubtype); + } + + // Handle the case where the class pattern references type() or a subtype thereof + // and the subject type is a class itself. + if (isPatternMetaclass && isInstantiableClass(subjectSubtypeExpanded)) { + const metaclass = subjectSubtypeExpanded.shared.effectiveMetaclass ?? UnknownType.create(); + if ( + evaluator.assignType(expandedSubtype, metaclass) || + evaluator.assignType(metaclass, expandedSubtype) + ) { + return subjectSubtypeExpanded; + } + + return undefined; + } + + // Handle NoneType specially. + if ( + isNoneInstance(subjectSubtypeExpanded) && + isInstantiableClass(expandedSubtype) && + ClassType.isBuiltIn(expandedSubtype, 'NoneType') + ) { + return subjectSubtypeExpanded; + } + + // Handle Callable specially. + if (isInstantiableClass(expandedSubtype) && ClassType.isBuiltIn(expandedSubtype, 'Callable')) { + const callableType = getUnknownTypeForCallable(); + + if (evaluator.assignType(callableType, subjectSubtypeExpanded)) { + return subjectSubtypeExpanded; + } + + const subjObjType = convertToInstance(subjectSubtypeExpanded); + if (evaluator.assignType(subjObjType, callableType)) { + return callableType; + } + + return undefined; + } + + if (isClassInstance(subjectSubtypeExpanded)) { + let resultType: Type; + + if (evaluator.assignType(ClassType.cloneAsInstance(expandedSubtype), subjectSubtypeExpanded)) { + resultType = subjectSubtypeExpanded; + } else if ( + evaluator.assignType(subjectSubtypeExpanded, ClassType.cloneAsInstance(expandedSubtype)) + ) { + resultType = addConditionToType( + convertToInstance(unexpandedSubtype), + getTypeCondition(subjectSubtypeExpanded) + ); + + // Try to retain the type arguments for the pattern class type. + if (isInstantiableClass(unexpandedSubtype) && isClassInstance(subjectSubtypeExpanded)) { + if ( + ClassType.isSpecialBuiltIn(unexpandedSubtype) || + unexpandedSubtype.shared.typeParams.length > 0 + ) { + const constraints = new ConstraintTracker(); + const unspecializedMatchType = ClassType.specialize( + unexpandedSubtype, + /* typeArgs */ undefined + ); + + const matchTypeInstance = ClassType.cloneAsInstance(unspecializedMatchType); + if ( + addConstraintsForExpectedType( + evaluator, + matchTypeInstance, + subjectSubtypeExpanded, + constraints, + /* liveTypeVarScopes */ undefined, + /* usageOffset */ undefined + ) + ) { + resultType = evaluator.solveAndApplyConstraints( + matchTypeInstance, + constraints, + { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(unexpandedSubtype), + tupleClassType: evaluator.getTupleClassType(), + }, + } + ) as ClassType; + } + } + } + } else { + return undefined; + } + + // Are there any positional arguments? If so, try to get the mappings for + // these arguments by fetching the __match_args__ symbol from the class. + let positionalArgNames: string[] = []; + if (pattern.d.args.some((arg) => !arg.d.name)) { + positionalArgNames = getPositionalMatchArgNames(evaluator, expandedSubtype); + } + + let isMatchValid = true; + pattern.d.args.forEach((arg, index) => { + // Narrow the arg pattern. It's possible that the actual type of the object + // being matched is a subtype of the resultType, so it might contain additional + // attributes that we don't know about. + const narrowedArgType = narrowTypeOfClassPatternArg( + evaluator, + arg, + index, + positionalArgNames, + resultType, + isPositiveTest + ); + + if (isNever(narrowedArgType)) { + isMatchValid = false; + } + }); + + if (isMatchValid) { + return resultType; + } + } + + return undefined; + }); + } + + return undefined; + } + ); +} + +// Some built-in classes are treated as special cases for the class pattern +// if a positional argument is used. +function isClassSpecialCaseForClassPattern(classType: ClassType) { + if (classPatternSpecialCases.some((className) => classType.shared.fullName === className)) { + return true; + } + + // If the class supplies its own `__match_args__`, it's not a special case. + const matchArgsMemberInfo = lookUpClassMember(classType, '__match_args__'); + if (matchArgsMemberInfo) { + return false; + } + + // If the class derives from a built-in class, it is considered a special case. + for (const mroClass of classType.shared.mro) { + if (isClass(mroClass) && classPatternSpecialCases.some((className) => mroClass.shared.fullName === className)) { + return true; + } + } + + return false; +} + +// Narrows the pattern provided for a class pattern argument. +function narrowTypeOfClassPatternArg( + evaluator: TypeEvaluator, + arg: PatternClassArgumentNode, + argIndex: number, + positionalArgNames: string[], + matchType: Type, + isPositiveTest: boolean +) { + let argName: string | undefined; + + if (arg.d.name) { + argName = arg.d.name.d.value; + } else if (argIndex < positionalArgNames.length) { + argName = positionalArgNames[argIndex]; + } + + if (isAnyOrUnknown(matchType)) { + return matchType; + } + + if (!isClass(matchType)) { + return UnknownType.create(); + } + + // According to PEP 634, some built-in types use themselves as the subject + // for the first positional argument to a class pattern. Although the PEP does + // state so explicitly, this is true of subclasses of these built-in classes + // if the subclass doesn't define its own __match_args__. + let useSelfForPattern = false; + let selfForPatternType = matchType; + + if (!arg.d.name && isClass(matchType) && argIndex === 0) { + if (isClassSpecialCaseForClassPattern(matchType)) { + useSelfForPattern = true; + } else if (positionalArgNames.length === 0) { + matchType.shared.mro.forEach((mroClass) => { + if (isClass(mroClass) && isClassSpecialCaseForClassPattern(mroClass)) { + selfForPatternType = mroClass; + useSelfForPattern = true; + } + }); + } + } + + let argType: Type | undefined; + + if (useSelfForPattern) { + argType = ClassType.cloneAsInstance(selfForPatternType); + } else { + if (argName) { + argType = evaluator.useSpeculativeMode(arg, () => + // We need to apply a rather ugly cast here because PatternClassArgumentNode is + // not technically an ExpressionNode, but it is OK to use it in this context. + evaluator.getTypeOfBoundMember( + arg as any as ExpressionNode, + ClassType.cloneAsInstance(matchType), + argName! + ) + )?.type; + } + + if (!argType) { + if (!isPositiveTest) { + return matchType; + } + + // If the class type in question is "final", we know that no additional + // attributes can be added by subtypes, so it's safe to eliminate this + // type entirely. + if (ClassType.isFinal(matchType)) { + return NeverType.createNever(); + } + + argType = UnknownType.create(); + } + } + + return narrowTypeBasedOnPattern(evaluator, argType, arg.d.pattern, isPositiveTest); +} + +function narrowTypeBasedOnValuePattern( + evaluator: TypeEvaluator, + subjectType: Type, + pattern: PatternValueNode, + isPositiveTest: boolean +): Type { + const valueType = evaluator.getTypeOfExpression(pattern.d.expr).type; + const narrowedSubtypes: Type[] = []; + + evaluator.mapSubtypesExpandTypeVars( + valueType, + /* options */ undefined, + (valueSubtypeExpanded, valueSubtypeUnexpanded) => { + narrowedSubtypes.push( + evaluator.mapSubtypesExpandTypeVars( + subjectType, + { conditionFilter: getTypeCondition(valueSubtypeExpanded) }, + (subjectSubtypeExpanded) => { + // If this is a negative test, see if it's an enum value. + if (!isPositiveTest) { + if ( + isClassInstance(subjectSubtypeExpanded) && + isClassInstance(valueSubtypeExpanded) && + isSameWithoutLiteralValue(subjectSubtypeExpanded, valueSubtypeExpanded) + ) { + if (!isLiteralType(subjectSubtypeExpanded) && isLiteralType(valueSubtypeExpanded)) { + const expandedLiterals = enumerateLiteralsForType( + evaluator, + subjectSubtypeExpanded + ); + if (expandedLiterals) { + return combineTypes( + expandedLiterals.filter( + (enumType) => + !ClassType.isLiteralValueSame(valueSubtypeExpanded, enumType) + ) + ); + } + } + + if ( + isLiteralType(subjectSubtypeExpanded) && + ClassType.isLiteralValueSame(valueSubtypeExpanded, subjectSubtypeExpanded) + ) { + return undefined; + } + } + + return subjectSubtypeExpanded; + } + + if (isNever(valueSubtypeExpanded) || isNever(subjectSubtypeExpanded)) { + return NeverType.createNever(); + } + + if (isAnyOrUnknown(valueSubtypeExpanded) || isAnyOrUnknown(subjectSubtypeExpanded)) { + // If either type is "Unknown" (versus Any), propagate the Unknown. + return isUnknown(valueSubtypeExpanded) || isUnknown(subjectSubtypeExpanded) + ? preserveUnknown(valueSubtypeExpanded, subjectSubtypeExpanded) + : AnyType.create(); + } + + // If both types are literals, we can compare the literal values directly. + if ( + isClassInstance(subjectSubtypeExpanded) && + isLiteralType(subjectSubtypeExpanded) && + isClassInstance(valueSubtypeExpanded) && + isLiteralType(valueSubtypeExpanded) + ) { + return isSameWithoutLiteralValue(subjectSubtypeExpanded, valueSubtypeExpanded) && + ClassType.isLiteralValueSame(valueSubtypeExpanded, subjectSubtypeExpanded) + ? valueSubtypeUnexpanded + : undefined; + } + + // Determine if assignment is supported for this combination of + // value subtype and matching subtype. + const returnType = evaluator.useSpeculativeMode(pattern.d.expr, () => + evaluator.getTypeOfMagicMethodCall( + valueSubtypeExpanded, + '__eq__', + [{ type: subjectSubtypeExpanded }], + pattern.d.expr, + /* expectedType */ undefined + ) + ); + + return returnType ? valueSubtypeUnexpanded : undefined; + } + ) + ); + + return undefined; + } + ); + + return combineTypes(narrowedSubtypes); +} + +// Returns information about all subtypes that match the definition of a "mapping" as +// specified in PEP 634. +function getMappingPatternInfo(evaluator: TypeEvaluator, type: Type, node: PatternAtomNode): MappingPatternInfo[] { + const mappingInfo: MappingPatternInfo[] = []; + + doForEachSubtype(type, (subtype) => { + const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(subtype); + + if (isAnyOrUnknown(concreteSubtype)) { + mappingInfo.push({ + subtype, + isDefinitelyMapping: false, + isDefinitelyNotMapping: false, + dictTypeArgs: { + key: concreteSubtype, + value: concreteSubtype, + }, + }); + return; + } + + if (isClassInstance(concreteSubtype)) { + // Is it a TypedDict? + if (ClassType.isTypedDictClass(concreteSubtype)) { + mappingInfo.push({ + subtype, + isDefinitelyMapping: true, + isDefinitelyNotMapping: false, + typedDict: concreteSubtype, + }); + return; + } + + const mappingType = evaluator.getTypingType(node, 'Mapping'); + if (!mappingType || !isInstantiableClass(mappingType)) { + return; + } + const mappingObject = ClassType.cloneAsInstance(mappingType); + + // Is it a subtype of Mapping? + const constraints = new ConstraintTracker(); + if (evaluator.assignType(mappingObject, subtype, /* diag */ undefined, constraints)) { + const specializedMapping = evaluator.solveAndApplyConstraints(mappingObject, constraints) as ClassType; + + if (specializedMapping.priv.typeArgs && specializedMapping.priv.typeArgs.length >= 2) { + mappingInfo.push({ + subtype, + isDefinitelyMapping: true, + isDefinitelyNotMapping: false, + dictTypeArgs: { + key: specializedMapping.priv.typeArgs[0], + value: specializedMapping.priv.typeArgs[1], + }, + }); + } + + return; + } + + // Is it a superclass of Mapping? + if (evaluator.assignType(subtype, mappingObject)) { + mappingInfo.push({ + subtype, + isDefinitelyMapping: false, + isDefinitelyNotMapping: false, + dictTypeArgs: { + key: UnknownType.create(), + value: UnknownType.create(), + }, + }); + return; + } + + mappingInfo.push({ + subtype, + isDefinitelyMapping: false, + isDefinitelyNotMapping: true, + }); + } + }); + + return mappingInfo; +} + +// Returns information about all subtypes that match the definition of a "sequence" as +// specified in PEP 634. For types that are not sequences or sequences that are not of +// sufficient length, it sets definiteNoMatch to true. +function getSequencePatternInfo( + evaluator: TypeEvaluator, + pattern: PatternSequenceNode, + type: Type +): SequencePatternInfo[] { + const patternEntryCount = pattern.d.entries.length; + const patternStarEntryIndex = pattern.d.starEntryIndex; + const sequenceInfo: SequencePatternInfo[] = []; + + doForEachSubtype(type, (subtype) => { + const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(subtype); + let mroClassToSpecialize: ClassType | undefined; + + if (isClassInstance(concreteSubtype)) { + for (const mroClass of concreteSubtype.shared.mro) { + if (!isInstantiableClass(mroClass)) { + break; + } + + // Strings, bytes, and bytearray are explicitly excluded. + if ( + ClassType.isBuiltIn(mroClass, 'str') || + ClassType.isBuiltIn(mroClass, 'bytes') || + ClassType.isBuiltIn(mroClass, 'bytearray') + ) { + // This is definitely not a match. + sequenceInfo.push({ + subtype, + entryTypes: [], + isIndeterminateLength: true, + isDefiniteNoMatch: true, + }); + return; + } + + if (ClassType.isBuiltIn(mroClass, 'Sequence')) { + mroClassToSpecialize = mroClass; + break; + } + + if (isTupleClass(mroClass)) { + mroClassToSpecialize = mroClass; + break; + } + } + + if (mroClassToSpecialize) { + const specializedSequence = partiallySpecializeType( + mroClassToSpecialize, + concreteSubtype, + evaluator.getTypeClassType() + ) as ClassType; + + if (isTupleClass(specializedSequence)) { + const typeArgs = specializedSequence.priv.tupleTypeArgs ?? [ + { type: UnknownType.create(), isUnbounded: true }, + ]; + + let tupleIndeterminateIndex = typeArgs.findIndex( + (t) => t.isUnbounded || isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type) + ); + + let tupleDeterminateEntryCount = typeArgs.length; + + // If the tuple contains an indeterminate entry, expand or remove that + // entry to match the length of the pattern if possible. + let expandedIndeterminate = false; + if (tupleIndeterminateIndex >= 0) { + tupleDeterminateEntryCount--; + + while (typeArgs.length < patternEntryCount) { + typeArgs.splice(tupleIndeterminateIndex, 0, typeArgs[tupleIndeterminateIndex]); + tupleDeterminateEntryCount++; + tupleIndeterminateIndex++; + expandedIndeterminate = true; + } + + if (typeArgs.length > patternEntryCount && patternStarEntryIndex === undefined) { + typeArgs.splice(tupleIndeterminateIndex, 1); + tupleIndeterminateIndex = -1; + } + } + + // If the pattern contains a star entry and there are too many entries + // in the tuple, we can collapse some of them into the star entry. + if ( + patternStarEntryIndex !== undefined && + typeArgs.length >= 2 && + typeArgs.length > patternEntryCount + ) { + const entriesToCombine = typeArgs.length - patternEntryCount + 1; + const removedEntries = typeArgs.splice(patternStarEntryIndex, entriesToCombine); + typeArgs.splice(patternStarEntryIndex, 0, { + type: combineTypes(removedEntries.map((t) => t.type)), + isUnbounded: removedEntries.every( + (t) => t.isUnbounded || isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type) + ), + }); + + tupleDeterminateEntryCount -= entriesToCombine; + if (!typeArgs[patternStarEntryIndex].isUnbounded) { + tupleDeterminateEntryCount++; + } + + // If the collapsed range included the tupleIndeterminateIndex, adjust + // it to reflect the new collapsed entry. + if ( + tupleIndeterminateIndex >= patternStarEntryIndex && + tupleIndeterminateIndex < patternStarEntryIndex + entriesToCombine + ) { + tupleIndeterminateIndex = patternStarEntryIndex; + } + } + + if (typeArgs.length === patternEntryCount) { + let isDefiniteNoMatch = false; + let isPotentialNoMatch = tupleIndeterminateIndex >= 0; + + // If the pattern includes a "star entry" and the tuple includes an + // indeterminate-length entry that aligns to the star entry, we can + // assume it will always match. + if ( + !expandedIndeterminate && + patternStarEntryIndex !== undefined && + tupleIndeterminateIndex >= 0 && + pattern.d.entries.length - 1 === tupleDeterminateEntryCount && + patternStarEntryIndex === tupleIndeterminateIndex + ) { + isPotentialNoMatch = false; + } + + for (let i = 0; i < patternEntryCount; i++) { + const subPattern = pattern.d.entries[i]; + const typeArg = typeArgs[i].type; + const narrowedType = narrowTypeBasedOnPattern( + evaluator, + typeArg, + subPattern, + /* isPositiveTest */ true + ); + + if (isNever(narrowedType)) { + isDefiniteNoMatch = true; + } + } + + sequenceInfo.push({ + subtype, + entryTypes: isDefiniteNoMatch ? [] : typeArgs.map((t) => t.type), + isIndeterminateLength: false, + isTuple: true, + isUnboundedTuple: tupleIndeterminateIndex >= 0, + isDefiniteNoMatch, + isPotentialNoMatch, + }); + return; + } + + // If the pattern contains a star entry and the pattern associated with + // the star entry is unbounded, we can remove it completely under the + // assumption that the star pattern will capture nothing. + if (patternStarEntryIndex !== undefined) { + let tryMatchStarSequence = false; + + if (typeArgs.length === patternEntryCount - 1) { + tryMatchStarSequence = true; + typeArgs.splice(patternStarEntryIndex, 0, { + type: AnyType.create(), + isUnbounded: true, + }); + } else if ( + typeArgs.length === patternEntryCount && + typeArgs[patternStarEntryIndex].isUnbounded + ) { + tryMatchStarSequence = true; + } + + if (tryMatchStarSequence) { + let isDefiniteNoMatch = false; + + for (let i = 0; i < patternEntryCount; i++) { + if (i === patternStarEntryIndex) { + continue; + } + + const subPattern = pattern.d.entries[i]; + const typeArg = typeArgs[i].type; + const narrowedType = narrowTypeBasedOnPattern( + evaluator, + typeArg, + subPattern, + /* isPositiveTest */ true + ); + + if (isNever(narrowedType)) { + isDefiniteNoMatch = true; + } + } + + sequenceInfo.push({ + subtype, + entryTypes: isDefiniteNoMatch ? [] : typeArgs.map((t) => t.type), + isIndeterminateLength: false, + isTuple: true, + isUnboundedTuple: tupleIndeterminateIndex >= 0, + isDefiniteNoMatch, + }); + return; + } + } + } else { + sequenceInfo.push({ + subtype, + entryTypes: [ + specializedSequence.priv.typeArgs && specializedSequence.priv.typeArgs.length > 0 + ? specializedSequence.priv.typeArgs[0] + : UnknownType.create(), + ], + isIndeterminateLength: true, + isDefiniteNoMatch: false, + }); + return; + } + } + } + + if (!mroClassToSpecialize) { + const sequenceType = evaluator.getTypingType(pattern, 'Sequence'); + + if (sequenceType && isInstantiableClass(sequenceType)) { + const sequenceObject = ClassType.cloneAsInstance(sequenceType); + + // Is it a subtype of Sequence? + const constraints = new ConstraintTracker(); + if (evaluator.assignType(sequenceObject, subtype, /* diag */ undefined, constraints)) { + const specializedSequence = evaluator.solveAndApplyConstraints( + sequenceObject, + constraints + ) as ClassType; + + if (specializedSequence.priv.typeArgs && specializedSequence.priv.typeArgs.length > 0) { + sequenceInfo.push({ + subtype, + entryTypes: [specializedSequence.priv.typeArgs[0]], + isIndeterminateLength: true, + isDefiniteNoMatch: false, + isPotentialNoMatch: false, + }); + return; + } + } + + // If it wasn't a subtype of Sequence, see if it's a supertype. + const sequenceConstraints = new ConstraintTracker(); + if ( + addConstraintsForExpectedType( + evaluator, + ClassType.cloneAsInstance(sequenceType), + subtype, + sequenceConstraints, + getTypeVarScopesForNode(pattern), + pattern.start + ) + ) { + const specializedSequence = evaluator.solveAndApplyConstraints( + ClassType.cloneAsInstantiable(sequenceType), + sequenceConstraints + ) as ClassType; + + if (specializedSequence.priv.typeArgs && specializedSequence.priv.typeArgs.length > 0) { + sequenceInfo.push({ + subtype, + entryTypes: [specializedSequence.priv.typeArgs[0]], + isIndeterminateLength: true, + isDefiniteNoMatch: false, + isPotentialNoMatch: true, + }); + return; + } + } + + if ( + evaluator.assignType( + subtype, + ClassType.specialize(ClassType.cloneAsInstance(sequenceType), [UnknownType.create()]) + ) + ) { + sequenceInfo.push({ + subtype, + entryTypes: [UnknownType.create()], + isIndeterminateLength: true, + isDefiniteNoMatch: false, + isPotentialNoMatch: true, + }); + return; + } + } + } + + // Push an entry that indicates that this is definitely not a match. + sequenceInfo.push({ + subtype, + entryTypes: [], + isIndeterminateLength: true, + isDefiniteNoMatch: true, + }); + }); + + return sequenceInfo; +} + +function getTypeOfPatternSequenceEntry( + evaluator: TypeEvaluator, + node: ParseNode, + sequenceInfo: SequencePatternInfo, + entryIndex: number, + entryCount: number, + starEntryIndex: number | undefined, + unpackStarEntry: boolean +): Type { + if (sequenceInfo.isIndeterminateLength) { + let entryType = sequenceInfo.entryTypes[0]; + + if (!unpackStarEntry && entryIndex === starEntryIndex && !isNever(entryType)) { + entryType = wrapTypeInList(evaluator, node, entryType); + } + + return entryType; + } + + if (starEntryIndex === undefined || entryIndex < starEntryIndex) { + return sequenceInfo.entryTypes[entryIndex]; + } + + if (entryIndex === starEntryIndex) { + // Create a list out of the entries that map to the star entry. + // Note that we strip literal types here. + const starEntryTypes = sequenceInfo.entryTypes + .slice(starEntryIndex, starEntryIndex + sequenceInfo.entryTypes.length - entryCount + 1) + .map((type) => { + // If this is a TypeVarTuple, there's not much we can say about + // its type other than it's "Unknown". We could evaluate it as an + // "object", but that will cause problems given that this type will + // be wrapped in a "list" below, and lists are invariant. + if (isTypeVarTuple(type) && !type.priv.isInUnion) { + return UnknownType.create(); + } + + return evaluator.stripLiteralValue(type); + }); + + let entryType = combineTypes(starEntryTypes); + + if (!unpackStarEntry) { + entryType = wrapTypeInList(evaluator, node, entryType); + } + + return entryType; + } + + // The entry index is past the index of the star entry, so we need + // to index from the end of the sequence rather than the start. + const itemIndex = sequenceInfo.entryTypes.length - (entryCount - entryIndex); + assert(itemIndex >= 0 && itemIndex < sequenceInfo.entryTypes.length); + + return sequenceInfo.entryTypes[itemIndex]; +} + +// Recursively assigns the specified type to the pattern and any capture +// nodes within it. It returns the narrowed type, as dictated by the pattern. +export function assignTypeToPatternTargets( + evaluator: TypeEvaluator, + type: Type, + isTypeIncomplete: boolean, + pattern: PatternAtomNode +): Type { + // Further narrow the type based on this pattern. + const narrowedType = narrowTypeBasedOnPattern(evaluator, type, pattern, /* positiveTest */ true); + + switch (pattern.nodeType) { + case ParseNodeType.PatternSequence: { + const sequenceInfo = getSequencePatternInfo(evaluator, pattern, narrowedType).filter( + (seqInfo) => !seqInfo.isDefiniteNoMatch + ); + + pattern.d.entries.forEach((entry, index) => { + const entryType = combineTypes( + sequenceInfo.map((info) => + getTypeOfPatternSequenceEntry( + evaluator, + pattern, + info, + index, + pattern.d.entries.length, + pattern.d.starEntryIndex, + /* unpackStarEntry */ false + ) + ) + ); + + assignTypeToPatternTargets(evaluator, entryType, isTypeIncomplete, entry); + }); + break; + } + + case ParseNodeType.PatternAs: { + if (pattern.d.target) { + evaluator.assignTypeToExpression( + pattern.d.target, + { type: narrowedType, isIncomplete: isTypeIncomplete }, + pattern.d.target + ); + } + + let runningNarrowedType = narrowedType; + pattern.d.orPatterns.forEach((orPattern) => { + assignTypeToPatternTargets(evaluator, runningNarrowedType, isTypeIncomplete, orPattern); + + // OR patterns are evaluated left to right, so we can narrow + // the type as we go. + runningNarrowedType = narrowTypeBasedOnPattern( + evaluator, + runningNarrowedType, + orPattern, + /* positiveTest */ false + ); + }); + break; + } + + case ParseNodeType.PatternCapture: { + if (pattern.d.isWildcard) { + if (!isTypeIncomplete) { + if (isUnknown(narrowedType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportUnknownVariableType, + LocMessage.wildcardPatternTypeUnknown(), + pattern.d.target + ); + } else if (isPartlyUnknown(narrowedType)) { + const diagAddendum = new DiagnosticAddendum(); + diagAddendum.addMessage( + LocAddendum.typeOfSymbol().format({ + name: '_', + type: evaluator.printType(narrowedType, { expandTypeAlias: true }), + }) + ); + evaluator.addDiagnostic( + DiagnosticRule.reportUnknownVariableType, + LocMessage.wildcardPatternTypePartiallyUnknown() + diagAddendum.getString(), + pattern.d.target + ); + } + } + } else { + evaluator.assignTypeToExpression( + pattern.d.target, + { type: narrowedType, isIncomplete: isTypeIncomplete }, + pattern.d.target + ); + } + break; + } + + case ParseNodeType.PatternMapping: { + const mappingInfo = getMappingPatternInfo(evaluator, narrowedType, pattern); + + pattern.d.entries.forEach((mappingEntry) => { + const keyTypes: Type[] = []; + const valueTypes: Type[] = []; + + mappingInfo.forEach((mappingSubtypeInfo) => { + if (mappingSubtypeInfo.typedDict) { + if (mappingEntry.nodeType === ParseNodeType.PatternMappingKeyEntry) { + const keyType = narrowTypeBasedOnPattern( + evaluator, + evaluator.getBuiltInObject(pattern, 'str'), + mappingEntry.d.keyPattern, + /* isPositiveTest */ true + ); + keyTypes.push(keyType); + + doForEachSubtype(keyType, (keySubtype) => { + if ( + isClassInstance(keySubtype) && + ClassType.isBuiltIn(keySubtype, 'str') && + isLiteralType(keySubtype) + ) { + const tdEntries = getTypedDictMembersForClass( + evaluator, + mappingSubtypeInfo.typedDict! + ); + const valueInfo = tdEntries.knownItems.get(keySubtype.priv.literalValue as string); + valueTypes.push(valueInfo ? valueInfo.valueType : UnknownType.create()); + } else { + valueTypes.push(UnknownType.create()); + } + }); + } else if (mappingEntry.nodeType === ParseNodeType.PatternMappingExpandEntry) { + keyTypes.push(evaluator.getBuiltInObject(pattern, 'str')); + valueTypes.push(evaluator.getObjectType()); + } + } else if (mappingSubtypeInfo.dictTypeArgs) { + if (mappingEntry.nodeType === ParseNodeType.PatternMappingKeyEntry) { + const keyType = narrowTypeBasedOnPattern( + evaluator, + mappingSubtypeInfo.dictTypeArgs.key, + mappingEntry.d.keyPattern, + /* isPositiveTest */ true + ); + keyTypes.push(keyType); + valueTypes.push( + narrowTypeBasedOnPattern( + evaluator, + mappingSubtypeInfo.dictTypeArgs.value, + mappingEntry.d.valuePattern, + /* isPositiveTest */ true + ) + ); + } else if (mappingEntry.nodeType === ParseNodeType.PatternMappingExpandEntry) { + keyTypes.push(mappingSubtypeInfo.dictTypeArgs.key); + valueTypes.push(mappingSubtypeInfo.dictTypeArgs.value); + } + } + }); + + const keyType = combineTypes(keyTypes); + const valueType = combineTypes(valueTypes); + + if (mappingEntry.nodeType === ParseNodeType.PatternMappingKeyEntry) { + assignTypeToPatternTargets(evaluator, keyType, isTypeIncomplete, mappingEntry.d.keyPattern); + assignTypeToPatternTargets(evaluator, valueType, isTypeIncomplete, mappingEntry.d.valuePattern); + } else if (mappingEntry.nodeType === ParseNodeType.PatternMappingExpandEntry) { + const dictClass = evaluator.getBuiltInType(pattern, 'dict'); + const strType = evaluator.getBuiltInObject(pattern, 'str'); + const dictType = + dictClass && isInstantiableClass(dictClass) && isClassInstance(strType) + ? ClassType.cloneAsInstance(ClassType.specialize(dictClass, [keyType, valueType])) + : UnknownType.create(); + evaluator.assignTypeToExpression( + mappingEntry.d.target, + { type: dictType, isIncomplete: isTypeIncomplete }, + mappingEntry.d.target + ); + } + }); + break; + } + + case ParseNodeType.PatternClass: { + const argTypes: Type[][] = pattern.d.args.map((arg) => []); + + evaluator.mapSubtypesExpandTypeVars(narrowedType, /* options */ undefined, (expandedSubtype) => { + if (isClassInstance(expandedSubtype)) { + doForEachSubtype(narrowedType, (subjectSubtype) => { + const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(subjectSubtype); + + if (isAnyOrUnknown(concreteSubtype)) { + pattern.d.args.forEach((arg, index) => { + argTypes[index].push(concreteSubtype); + }); + } else if (isClassInstance(concreteSubtype)) { + // Are there any positional arguments? If so, try to get the mappings for + // these arguments by fetching the __match_args__ symbol from the class. + let positionalArgNames: string[] = []; + if (pattern.d.args.some((arg) => !arg.d.name)) { + positionalArgNames = getPositionalMatchArgNames( + evaluator, + ClassType.cloneAsInstantiable(expandedSubtype) + ); + } + + pattern.d.args.forEach((arg, index) => { + const narrowedArgType = narrowTypeOfClassPatternArg( + evaluator, + arg, + index, + positionalArgNames, + ClassType.cloneAsInstantiable(expandedSubtype), + /* isPositiveTest */ true + ); + argTypes[index].push(narrowedArgType); + }); + } + }); + } else { + pattern.d.args.forEach((arg, index) => { + argTypes[index].push(UnknownType.create()); + }); + } + + return undefined; + }); + + pattern.d.args.forEach((arg, index) => { + assignTypeToPatternTargets(evaluator, combineTypes(argTypes[index]), isTypeIncomplete, arg.d.pattern); + }); + break; + } + + case ParseNodeType.PatternLiteral: + case ParseNodeType.PatternValue: + case ParseNodeType.Error: { + // Nothing to do here. + break; + } + } + + return narrowedType; +} + +function wrapTypeInList(evaluator: TypeEvaluator, node: ParseNode, type: Type): Type { + if (isNever(type)) { + return type; + } + + const listObjectType = convertToInstance(evaluator.getBuiltInObject(node, 'list')); + if (listObjectType && isClassInstance(listObjectType)) { + // If the type is a union that contains an Any or Unknown, eliminate the other + // types before wrapping it in a list. + type = containsAnyOrUnknown(type, /* recurse */ false) ?? type; + + return ClassType.specialize(listObjectType, [type]); + } + + return UnknownType.create(); +} + +export function validateClassPattern(evaluator: TypeEvaluator, pattern: PatternClassNode) { + let exprType = evaluator.getTypeOfExpression(pattern.d.className, EvalFlags.CallBaseDefaults).type; + + // If the expression is a type alias or other special form, treat it + // as the special form rather than the class. + if (exprType.props?.specialForm) { + exprType = exprType.props.specialForm; + } + + if (isAnyOrUnknown(exprType)) { + return; + } + + // Check for certain uses of type aliases that generate runtime exceptions. + if ( + exprType.props?.typeAliasInfo && + isInstantiableClass(exprType) && + exprType.priv.typeArgs && + exprType.priv.isTypeArgExplicit + ) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.classPatternTypeAlias().format({ type: evaluator.printType(exprType) }), + pattern.d.className + ); + } else if (!isInstantiableClass(exprType)) { + if (!isNever(exprType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocAddendum.typeNotClass().format({ type: evaluator.printType(exprType) }), + pattern.d.className + ); + } + } else if (ClassType.isNewTypeClass(exprType)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.classPatternNewType().format({ type: evaluator.printType(exprType) }), + pattern.d.className + ); + } else { + const isBuiltIn = isClassSpecialCaseForClassPattern(exprType); + + // If it's a special-case builtin class, only positional arguments are allowed. + if (isBuiltIn) { + if (pattern.d.args.length === 1 && pattern.d.args[0].d.name) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.classPatternBuiltInArgPositional(), + pattern.d.args[0].d.name + ); + } + } + + // Emits an error if the supplied number of positional patterns is less than + // expected for the given subject type. + let positionalPatternCount = pattern.d.args.findIndex((arg) => arg.d.name !== undefined); + if (positionalPatternCount < 0) { + positionalPatternCount = pattern.d.args.length; + } + + let expectedPatternCount = 1; + if (!isBuiltIn) { + let positionalArgNames: string[] = []; + if (pattern.d.args.some((arg) => !arg.d.name)) { + positionalArgNames = getPositionalMatchArgNames(evaluator, exprType); + } + + expectedPatternCount = positionalArgNames.length; + } + + if (positionalPatternCount > expectedPatternCount) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.classPatternPositionalArgCount().format({ + type: exprType.shared.name, + expected: expectedPatternCount, + received: positionalPatternCount, + }), + pattern.d.args[expectedPatternCount] + ); + } + } +} + +// Determines whether the reference expression has a relationship to the subject expression +// in such a way that the type of the reference expression can be narrowed based +// on the narrowed type of the subject expression. +export function getPatternSubtypeNarrowingCallback( + evaluator: TypeEvaluator, + reference: CodeFlowReferenceExpressionNode, + subjectExpression: ExpressionNode +): PatternSubtypeNarrowingCallback | undefined { + // Look for a subject expression of the form [] where + // is either a str (for TypedDict discrimination) or an int + // (for tuple discrimination). + if ( + subjectExpression.nodeType === ParseNodeType.Index && + subjectExpression.d.items.length === 1 && + !subjectExpression.d.trailingComma && + subjectExpression.d.items[0].d.argCategory === ArgCategory.Simple && + isMatchingExpression(reference, subjectExpression.d.leftExpr) + ) { + const indexTypeResult = evaluator.getTypeOfExpression(subjectExpression.d.items[0].d.valueExpr); + const indexType = indexTypeResult.type; + + if (isClassInstance(indexType) && isLiteralType(indexType)) { + if (ClassType.isBuiltIn(indexType, ['int', 'str'])) { + const unnarrowedReferenceTypeResult = evaluator.getTypeOfExpression( + subjectExpression.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const unnarrowedReferenceType = unnarrowedReferenceTypeResult.type; + + return (narrowedSubjectType: Type) => { + let canNarrow = true; + const typesToCombine: Type[] = []; + + doForEachSubtype(narrowedSubjectType, (subtype) => { + subtype = evaluator.makeTopLevelTypeVarsConcrete(subtype); + + if (isClassInstance(subtype) && subtype.priv.literalValue !== undefined) { + if (ClassType.isBuiltIn(indexType, 'str')) { + typesToCombine.push( + narrowTypeForDiscriminatedDictEntryComparison( + evaluator, + unnarrowedReferenceType, + indexType, + subtype, + /* isPositiveTest */ true + ) + ); + } else { + typesToCombine.push( + narrowTypeForDiscriminatedTupleComparison( + evaluator, + unnarrowedReferenceType, + indexType, + subtype, + /* isPositiveTest */ true + ) + ); + } + } else if (!isNever(subtype)) { + // We don't know how to narrow in this case. + canNarrow = false; + } + }); + + if (!canNarrow) { + return undefined; + } + + return { + type: combineTypes(typesToCombine), + isIncomplete: indexTypeResult.isIncomplete || unnarrowedReferenceTypeResult.isIncomplete, + }; + }; + } + } + } + + // Look for a subject expression that contains the reference + // expression as an entry in a tuple. + if (subjectExpression.nodeType === ParseNodeType.Tuple) { + const matchingEntryIndex = subjectExpression.d.items.findIndex((expr) => isMatchingExpression(reference, expr)); + if (matchingEntryIndex >= 0) { + const typeResult = evaluator.getTypeOfExpression(subjectExpression.d.items[matchingEntryIndex]); + + return (narrowedSubjectType: Type) => { + let canNarrow = true; + const narrowedSubtypes: Type[] = []; + + doForEachSubtype(narrowedSubjectType, (subtype) => { + if ( + isClassInstance(subtype) && + ClassType.isBuiltIn(subtype, 'tuple') && + subtype.priv.tupleTypeArgs && + matchingEntryIndex < subtype.priv.tupleTypeArgs.length && + subtype.priv.tupleTypeArgs.every((e) => !e.isUnbounded) + ) { + narrowedSubtypes.push(subtype.priv.tupleTypeArgs[matchingEntryIndex].type); + } else if (isNever(narrowedSubjectType)) { + narrowedSubtypes.push(narrowedSubjectType); + } else { + canNarrow = false; + } + }); + + return canNarrow + ? { type: combineTypes(narrowedSubtypes), isIncomplete: typeResult.isIncomplete } + : undefined; + }; + } + } + + // Look for a subject expression of the form "a.b" where "b" is an attribute + // that is annotated with a literal type. + if ( + subjectExpression.nodeType === ParseNodeType.MemberAccess && + isMatchingExpression(reference, subjectExpression.d.leftExpr) + ) { + const unnarrowedReferenceTypeResult = evaluator.getTypeOfExpression( + subjectExpression.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const unnarrowedReferenceType = unnarrowedReferenceTypeResult.type; + + return (narrowedSubjectType: Type) => { + if (isNever(narrowedSubjectType)) { + return { type: NeverType.createNever() }; + } + + if (!isLiteralTypeOrUnion(narrowedSubjectType)) { + return undefined; + } + + const resultType = mapSubtypes(narrowedSubjectType, (literalSubtype) => { + assert(isClassInstance(literalSubtype) && literalSubtype.priv.literalValue !== undefined); + + return narrowTypeForDiscriminatedLiteralFieldComparison( + evaluator, + unnarrowedReferenceType, + subjectExpression.d.member.d.value, + literalSubtype, + /* isPositiveTest */ true + ); + }); + + return { + type: resultType, + }; + }; + } + + return undefined; +} + +function reportUnnecessaryPattern(evaluator: TypeEvaluator, pattern: PatternAtomNode, subjectType: Type): void { + // If this is a simple wildcard pattern, exempt it from this diagnostic. + if ( + pattern.nodeType === ParseNodeType.PatternAs && + pattern.d.orPatterns.length === 1 && + pattern.d.orPatterns[0].nodeType === ParseNodeType.PatternCapture && + pattern.d.orPatterns[0].d.isWildcard + ) { + return; + } + + evaluator.addDiagnostic( + DiagnosticRule.reportUnnecessaryComparison, + LocMessage.patternNeverMatches().format({ type: evaluator.printType(subjectType) }), + pattern + ); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/program.ts b/python-parser/packages/pyright-internal/src/analyzer/program.ts new file mode 100644 index 00000000..22439f0e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/program.ts @@ -0,0 +1,2305 @@ +/* + * program.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * An object that tracks all of the source files being analyzed + * and all of their recursive imports. + */ + +import { CancellationToken, LSPErrorCodes } from 'vscode-languageserver'; + +import { TextDocument } from 'vscode-languageserver-textdocument'; +import { OperationCanceledException, throwIfCancellationRequested } from '../common/cancellationUtils'; +import { ConfigOptions, ExecutionEnvironment, matchFileSpecs } from '../common/configOptions'; +import { ConsoleInterface, StandardConsole } from '../common/console'; +import { isThenable } from '../common/core'; +import * as debug from '../common/debug'; +import { assert } from '../common/debug'; +import { Diagnostic, DiagnosticCategory } from '../common/diagnostic'; +import { FileDiagnostics } from '../common/diagnosticSink'; +import { FileEditAction } from '../common/editAction'; +import { EditableProgram, ProgramView } from '../common/extensibility'; +import { LogTracker } from '../common/logTracker'; +import { convertRangeToTextRange } from '../common/positionUtils'; +import { ServiceKeys } from '../common/serviceKeys'; +import { ServiceProvider } from '../common/serviceProvider'; +import '../common/serviceProviderExtensions'; +import { Range, TextRange, doRangesIntersect } from '../common/textRange'; +import { Duration, timingStats } from '../common/timing'; +import { Uri } from '../common/uri/uri'; +import { makeDirectories } from '../common/uri/uriUtils'; +import { ParseFileResults, ParserOutput } from '../parser/parser'; +import { RequiringAnalysisCount } from './analysis'; +import { AbsoluteModuleDescriptor, ImportLookupResult, LookupImportOptions } from './analyzerFileInfo'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { CacheManager } from './cacheManager'; +import { CircularDependency } from './circularDependency'; +import { ImportResolver } from './importResolver'; +import { ImportResult, ImportType } from './importResult'; +import { getDocString } from './parseTreeUtils'; +import { ISourceFileFactory } from './programTypes'; +import { Scope } from './scope'; +import { IPythonMode, SourceFile } from './sourceFile'; +import { SourceFileInfo } from './sourceFileInfo'; +import { createChainedByList, isUserCode, verifyNoCyclesInChainedFiles } from './sourceFileInfoUtils'; +import { SourceMapper } from './sourceMapper'; +import { Symbol, SymbolTable } from './symbol'; +import { createTracePrinter } from './tracePrinter'; +import { PrintTypeOptions, TypeEvaluator } from './typeEvaluatorTypes'; +import { createTypeEvaluatorWithTracker } from './typeEvaluatorWithTracker'; +import { getPrintTypeFlags } from './typePrinter'; +import { TypeStubWriter } from './typeStubWriter'; +import { Type } from './types'; + +const _maxImportDepth = 256; + +// Helper function to check if a diagnostic should be filtered due to disableTaggedHints. +// Tagged hints include unreachable code, unused code, and deprecated symbols. +function isTaggedHintDiagnostic(diag: Diagnostic): boolean { + return ( + diag.category === DiagnosticCategory.UnreachableCode || + diag.category === DiagnosticCategory.UnusedCode || + diag.category === DiagnosticCategory.Deprecated + ); +} + +export interface MaxAnalysisTime { + // Maximum number of ms to analyze when there are open files + // that require analysis. This number is usually kept relatively + // small to guarantee responsiveness during typing. + openFilesTimeInMs: number; + + // Maximum number of ms to analyze when all open files and their + // dependencies have been analyzed. This number can be higher + // to reduce overall analysis time but needs to be short enough + // to remain responsive if an open file is modified. + noOpenFilesTimeInMs: number; +} + +interface UpdateImportInfo { + path: Uri; + isTypeshedFile: boolean; + isThirdPartyImport: boolean; + isPyTypedPresent: boolean; +} + +export type PreCheckCallback = (parserOutput: ParserOutput, evaluator: TypeEvaluator) => void; + +export interface ChangedRange { + range: TextRange; + delta: number; +} + +export interface OpenFileOptions { + isTracked: boolean; + ipythonMode: IPythonMode; + chainedFileUri: Uri | undefined; + changedRange?: ChangedRange; +} + +// Track edit mode related information. +class EditModeTracker { + private _isEditMode = false; + private _mutatedFiles: SourceFileInfo[] = []; + + get isEditMode() { + return this._isEditMode; + } + + addMutatedFiles(file: SourceFileInfo) { + this._mutatedFiles.push(file); + } + + enable() { + this._isEditMode = true; + this._mutatedFiles = []; + } + + disable() { + this._isEditMode = false; + + const files = this._mutatedFiles; + this._mutatedFiles = []; + + return files; + } +} + +// Container for all of the files that are being analyzed. Files +// can fall into one or more of the following categories: +// Tracked - specified by the config options +// Referenced - part of the transitive closure +// Opened - temporarily opened in the editor +// Shadowed - implementation file that shadows a type stub file +export class Program { + private static _nextId = 0; + + private readonly _console: ConsoleInterface; + private readonly _sourceFileList: SourceFileInfo[] = []; + private readonly _sourceFileMap = new Map(); + + private readonly _logTracker: LogTracker; + private readonly _cacheManager: CacheManager; + private readonly _id: string; + + private _allowedThirdPartyImports: string[] | undefined; + private _configOptions: ConfigOptions; + private _importResolver: ImportResolver; + private _evaluator: TypeEvaluator | undefined; + private _disposed = false; + private _parsedFileCount = 0; + private _preCheckCallback: PreCheckCallback | undefined; + private _editModeTracker = new EditModeTracker(); + private _sourceFileFactory: ISourceFileFactory; + + constructor( + initialImportResolver: ImportResolver, + initialConfigOptions: ConfigOptions, + readonly serviceProvider: ServiceProvider, + logTracker?: LogTracker, + private _disableChecker?: boolean, + id?: string + ) { + this._console = serviceProvider.tryGet(ServiceKeys.console) || new StandardConsole(); + this._logTracker = logTracker ?? new LogTracker(this._console, 'FG'); + this._importResolver = initialImportResolver; + this._configOptions = initialConfigOptions; + + this._sourceFileFactory = serviceProvider.sourceFileFactory(); + + this._cacheManager = serviceProvider.tryGet(ServiceKeys.cacheManager) ?? new CacheManager(); + this._cacheManager.registerCacheOwner(this); + this._createNewEvaluator(); + + this._id = id ?? `Prog_${Program._nextId}`; + Program._nextId += 1; + } + + get id() { + return this._id; + } + + get console(): ConsoleInterface { + return this._console; + } + + get rootPath(): Uri { + return this._configOptions.projectRoot; + } + + get evaluator(): TypeEvaluator | undefined { + return this._evaluator; + } + + get configOptions(): ConfigOptions { + return this._configOptions; + } + + get importResolver(): ImportResolver { + return this._importResolver; + } + + get fileSystem() { + return this._importResolver.fileSystem; + } + + get isDisposed() { + return this._disposed; + } + get lookUpImport() { + return this._lookUpImport; + } + + dispose() { + this.disposeInternal(this._disposed); + + this._cacheManager.unregisterCacheOwner(this); + this._disposed = true; + } + + enterEditMode() { + this._editModeTracker.enable(); + } + + exitEditMode() { + // Stop applying edit mode to new source files. + const mutatedFiles = this._editModeTracker.disable(); + + const filesToDelete = new Set(); + const edits: FileEditAction[] = []; + + // Tell all source files we're no longer in edit mode. Gather + // up all of their edits and find files that no longer needed. + mutatedFiles.forEach((fileInfo) => { + if (fileInfo.isCreatedInEditMode) { + filesToDelete.add(fileInfo); + } + + const newContents = fileInfo.restore(); + if (newContents) { + // Create a text document so we can compute the edits. + const textDocument = TextDocument.create(fileInfo.uri.toString(), 'python', 1, fileInfo.contents || ''); + + // Add an edit action to the list. + edits.push({ + fileUri: fileInfo.uri, + range: { + start: { line: 0, character: 0 }, + end: { line: textDocument.lineCount, character: 0 }, + }, + replacementText: newContents, + }); + } + }); + + // Delete files added while in edit mode + if (filesToDelete.size > 0) { + // delete from the back to make sure index is valid. + for (let i = this._sourceFileList.length - 1; i >= 0; i--) { + const v = this._sourceFileList[i]; + if (filesToDelete.has(v)) { + // We don't need to care about file diagnostics since in edit mode + // checker won't run. + v.sourceFile.prepareForClose(); + this._removeSourceFileFromListAndMap(v.uri, i); + } + } + } + + if (mutatedFiles.length > 0) { + // All cache is invalid now. + this._createNewEvaluator(); + } + + return edits; + } + + setConfigOptions(configOptions: ConfigOptions) { + this._configOptions = configOptions; + this._importResolver.setConfigOptions(configOptions); + + // Create a new evaluator with the updated config options. + this._createNewEvaluator(); + } + + setImportResolver(importResolver: ImportResolver) { + this._importResolver = importResolver; + + // Create a new evaluator with the updated import resolver. + // Otherwise, lookup import passed to type evaluator might use + // older import resolver when resolving imports after parsing. + this._createNewEvaluator(); + } + + // Sets the list of tracked files that make up the program. + setTrackedFiles(fileUris: Uri[]): FileDiagnostics[] { + if (this._sourceFileList.length > 0) { + // We need to determine which files to remove from the existing file list. + const newFileMap = new Map(); + fileUris.forEach((path) => { + newFileMap.set(path.key, path); + }); + + // Files that are not in the tracked file list are + // marked as no longer tracked. + this._sourceFileList.forEach((oldFile) => { + const fileUri = oldFile.uri; + if (!newFileMap.has(fileUri.key)) { + oldFile.isTracked = false; + } + }); + } + + // Add the new files. Only the new items will be added. + this.addTrackedFiles(fileUris); + + return this._removeUnneededFiles(); + } + + // Allows a caller to set a callback that is called right before + // a source file is type checked. It is intended for testing only. + setPreCheckCallback(preCheckCallback: PreCheckCallback) { + this._preCheckCallback = preCheckCallback; + } + + // By default, no third-party imports are allowed. This enables + // third-party imports for a specified import and its children. + // For example, if importNames is ['tensorflow'], then third-party + // (absolute) imports are allowed for 'import tensorflow', + // 'import tensorflow.optimizers', etc. + setAllowedThirdPartyImports(importNames: string[]) { + this._allowedThirdPartyImports = importNames; + } + + addTrackedFiles(fileUris: Uri[], isThirdPartyImport = false, isInPyTypedPackage = false) { + fileUris.forEach((fileUri) => { + this.addTrackedFile(fileUri, isThirdPartyImport, isInPyTypedPackage); + }); + } + + addInterimFile(fileUri: Uri): SourceFileInfo { + // Double check not already there. + let fileInfo = this.getSourceFileInfo(fileUri); + if (!fileInfo) { + fileInfo = this._createInterimFileInfo(fileUri); + this._addToSourceFileListAndMap(fileInfo); + } + return fileInfo; + } + + addTrackedFile(fileUri: Uri, isThirdPartyImport = false, isInPyTypedPackage = false): SourceFile { + let sourceFileInfo = this.getSourceFileInfo(fileUri); + + if (sourceFileInfo) { + // The module name may have changed based on updates to the + // search paths. Clear any cached module name so it is recomputed. + sourceFileInfo.sourceFile.clearCachedModuleName(); + sourceFileInfo.isTracked = true; + return sourceFileInfo.sourceFile; + } + + // Detect py.typed status if not explicitly provided. This ensures that + // files from py.typed packages are correctly marked even when added + // directly to check paths (e.g., via command line). + let effectiveIsInPyTypedPackage = isInPyTypedPackage; + if (!isInPyTypedPackage) { + const moduleImportInfo = this._getModuleImportInfoForFile(fileUri); + effectiveIsInPyTypedPackage = moduleImportInfo.isThirdPartyPyTypedPresent; + } + + const sourceFile = this._sourceFileFactory.createSourceFile( + this.serviceProvider, + fileUri, + (uri) => this._getModuleName(uri), + isThirdPartyImport, + effectiveIsInPyTypedPackage, + this._editModeTracker, + this._console, + this._logTracker + ); + + // Set the initial diagnostic rule set from the execution environment + // so the file has config-level overrides (e.g. reportPrivateImportUsage: + // false) from the start. Without this, files added via positional args + // (which override configOptions.include) would use the basic defaults + // until parse() runs. + const execEnv = this._configOptions.findExecEnvironment(fileUri); + sourceFile.setInitialDiagnosticRuleSet(execEnv.diagnosticRuleSet); + sourceFileInfo = new SourceFileInfo( + sourceFile, + sourceFile.isTypingStubFile() || sourceFile.isTypeshedStubFile() || sourceFile.isBuiltInStubFile(), + isThirdPartyImport, + effectiveIsInPyTypedPackage, + this._editModeTracker, + { + isTracked: true, + } + ); + this._addToSourceFileListAndMap(sourceFileInfo); + return sourceFile; + } + + setFileOpened(fileUri: Uri, version: number | null, contents: string, options?: OpenFileOptions) { + let sourceFileInfo = this.getSourceFileInfo(fileUri); + if (!sourceFileInfo) { + const moduleImportInfo = this._getModuleImportInfoForFile(fileUri); + const sourceFile = this._sourceFileFactory.createSourceFile( + this.serviceProvider, + fileUri, + (uri) => this._getModuleName(uri), + /* isThirdPartyImport */ false, + moduleImportInfo.isThirdPartyPyTypedPresent, + this._editModeTracker, + this._console, + this._logTracker, + options?.ipythonMode ?? IPythonMode.None + ); + const chainedFilePath = options?.chainedFileUri; + sourceFileInfo = new SourceFileInfo( + sourceFile, + /* isTypeshedFile */ false, + /* isThirdPartyImport */ false, + /* isThirdPartyPyTypedPresent */ false, + this._editModeTracker, + { + isTracked: options?.isTracked ?? false, + chainedSourceFile: chainedFilePath ? this.getSourceFileInfo(chainedFilePath) : undefined, + isOpenByClient: true, + } + ); + this._addToSourceFileListAndMap(sourceFileInfo); + } else { + sourceFileInfo.isOpenByClient = true; + + // Reset the diagnostic version so we force an update to the + // diagnostics, which can change based on whether the file is open. + // We do not set the version to undefined here because that implies + // there are no diagnostics currently reported for this file. + sourceFileInfo.diagnosticsVersion = 0; + } + + verifyNoCyclesInChainedFiles(this, sourceFileInfo); + sourceFileInfo.sourceFile.setClientVersion(version, contents); + } + + getChainedUri(fileUri: Uri): Uri | undefined { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + return sourceFileInfo?.chainedSourceFile?.uri; + } + + updateChainedUri(fileUri: Uri, chainedFileUri: Uri | undefined) { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + if (!sourceFileInfo) { + return; + } + + sourceFileInfo.chainedSourceFile = chainedFileUri ? this.getSourceFileInfo(chainedFileUri) : undefined; + sourceFileInfo.sourceFile.markDirty(); + this._markFileDirtyRecursive(sourceFileInfo, new Set()); + + verifyNoCyclesInChainedFiles(this, sourceFileInfo); + } + + setFileClosed(fileUri: Uri, isTracked?: boolean): FileDiagnostics[] { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + if (sourceFileInfo) { + sourceFileInfo.isOpenByClient = false; + sourceFileInfo.isTracked = isTracked ?? sourceFileInfo.isTracked; + sourceFileInfo.sourceFile.setClientVersion(null, ''); + + // There is no guarantee that content is saved before the file is closed. + // We need to mark the file dirty so we can re-analyze next time. + // This won't matter much for OpenFileOnly users, but it will matter for + // people who use diagnosticMode Workspace. + if (sourceFileInfo.sourceFile.didContentsChangeOnDisk()) { + sourceFileInfo.sourceFile.markDirty(); + this._markFileDirtyRecursive(sourceFileInfo, new Set()); + } + } + + return this._removeUnneededFiles(); + } + + markAllFilesDirty(evenIfContentsAreSame: boolean) { + const markDirtySet = new Set(); + + this._sourceFileList.forEach((sourceFileInfo) => { + if (evenIfContentsAreSame) { + sourceFileInfo.sourceFile.markDirty(); + } else if (sourceFileInfo.sourceFile.didContentsChangeOnDisk()) { + sourceFileInfo.sourceFile.markDirty(); + + // Mark any files that depend on this file as dirty + // also. This will retrigger analysis of these other files. + this._markFileDirtyRecursive(sourceFileInfo, markDirtySet); + } + }); + + if (markDirtySet.size > 0) { + this._createNewEvaluator(); + } + } + + markFilesDirty(fileUris: Uri[], evenIfContentsAreSame: boolean) { + const markDirtySet = new Set(); + fileUris.forEach((fileUri) => { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + if (sourceFileInfo) { + const fileName = fileUri.fileName; + + // Handle builtins and __builtins__ specially. They are implicitly + // included by all source files. + if (fileName === 'builtins.pyi' || fileName === '__builtins__.pyi') { + this.markAllFilesDirty(evenIfContentsAreSame); + return; + } + + // If !evenIfContentsAreSame, see if the on-disk contents have + // changed. If the file is open, the on-disk contents don't matter + // because we'll receive updates directly from the client. + if ( + evenIfContentsAreSame || + (!sourceFileInfo.isOpenByClient && sourceFileInfo.sourceFile.didContentsChangeOnDisk()) + ) { + sourceFileInfo.sourceFile.markDirty(); + + // Mark any files that depend on this file as dirty + // also. This will retrigger analysis of these other files. + this._markFileDirtyRecursive(sourceFileInfo, markDirtySet); + } + } + }); + + if (markDirtySet.size > 0) { + this._createNewEvaluator(); + } + } + + getFileCount(userFileOnly = true) { + if (userFileOnly) { + return this._sourceFileList.filter((f) => isUserCode(f)).length; + } + + return this._sourceFileList.length; + } + + // Returns the number of files that are considered "user" files and therefore + // are checked. + getUserFileCount() { + return this._sourceFileList.filter((s) => isUserCode(s)).length; + } + + getUserFiles(): SourceFileInfo[] { + return this._sourceFileList.filter((s) => isUserCode(s)); + } + + getOpened(): SourceFileInfo[] { + return this._sourceFileList.filter((s) => s.isOpenByClient); + } + + getOwnedFiles(): SourceFileInfo[] { + return this._sourceFileList.filter((s) => isUserCode(s) && this.owns(s.uri)); + } + + getCheckingRequiredFiles(): SourceFileInfo[] { + return this._sourceFileList.filter( + (s) => s.isOpenByClient && this.owns(s.uri) && s.sourceFile.isCheckingRequired() + ); + } + + getFilesToAnalyzeCount(): RequiringAnalysisCount { + let filesToAnalyzeCount = 0; + let cellsToAnalyzeCount = 0; + + if (this._disableChecker) { + return { files: 0, cells: 0 }; + } + + this._sourceFileList.forEach((fileInfo) => { + const sourceFile = fileInfo.sourceFile; + if (sourceFile.isCheckingRequired()) { + if (this._shouldCheckFile(fileInfo)) { + sourceFile.getIPythonMode() === IPythonMode.CellDocs + ? cellsToAnalyzeCount++ + : filesToAnalyzeCount++; + } + } + }); + + return { + files: filesToAnalyzeCount, + cells: cellsToAnalyzeCount, + }; + } + + isCheckingOnlyOpenFiles() { + return this._configOptions.checkOnlyOpenFiles || false; + } + + functionSignatureDisplay() { + return this._configOptions.functionSignatureDisplay; + } + + containsSourceFileIn(folder: Uri): boolean { + for (const normalizedSourceFilePath of this._sourceFileMap.values()) { + if (normalizedSourceFilePath.uri.startsWith(folder)) { + return true; + } + } + + return false; + } + + owns(uri: Uri) { + const fileInfo = this.getSourceFileInfo(uri); + if (fileInfo) { + // If we already determined whether the file is tracked or not, don't do it again. + // This will make sure we have consistent look at the state once it is loaded to the memory. + return fileInfo.isTracked; + } + + return matchFileSpecs(this._configOptions, uri); + } + + getSourceFile(uri: Uri): SourceFile | undefined { + const sourceFileInfo = this.getSourceFileInfo(uri); + if (!sourceFileInfo) { + return undefined; + } + + return sourceFileInfo.sourceFile; + } + + getBoundSourceFile(uri: Uri): SourceFile | undefined { + return this.getBoundSourceFileInfo(uri)?.sourceFile; + } + + getSourceFileInfoList(): readonly SourceFileInfo[] { + return this._sourceFileList; + } + + getSourceFileInfo(uri: Uri): SourceFileInfo | undefined { + if (!uri.isEmpty()) { + return this._sourceFileMap.get(uri.key); + } + return undefined; + } + + getModuleSymbolTable(fileUri: Uri): SymbolTable | undefined { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + if (sourceFileInfo) { + return sourceFileInfo.sourceFile.getModuleSymbolTable(); + } + return undefined; + } + + getBoundSourceFileInfo(uri: Uri, content?: string, force?: boolean): SourceFileInfo | undefined { + const sourceFileInfo = this.getSourceFileInfo(uri); + if (!sourceFileInfo) { + return undefined; + } + + this._bindFile(sourceFileInfo, content, force); + return sourceFileInfo; + } + + // Performs parsing and analysis of any source files in the program + // that require it. If a limit time is specified, the operation + // is interrupted when the time expires. The return value indicates + // whether the method needs to be called again to complete the + // analysis. In interactive mode, the timeout is always limited + // to the smaller value to maintain responsiveness. + analyze(maxTime?: MaxAnalysisTime, token: CancellationToken = CancellationToken.None): boolean { + return this._runEvaluatorWithCancellationToken(token, () => { + const elapsedTime = new Duration(); + + const openFiles = this._sourceFileList.filter( + (sf) => sf.isOpenByClient && sf.sourceFile.isCheckingRequired() + ); + + if (openFiles.length > 0) { + const effectiveMaxTime = maxTime ? maxTime.openFilesTimeInMs : Number.MAX_VALUE; + + // Check the open files. + for (const sourceFileInfo of openFiles) { + if (this._checkTypes(sourceFileInfo)) { + if (elapsedTime.getDurationInMilliseconds() > effectiveMaxTime) { + return true; + } + } + } + + // If the caller specified a maxTime, return at this point + // since we've finalized all open files. We want to get + // the results to the user as quickly as possible. + if (maxTime !== undefined) { + return true; + } + } + + if (!this._configOptions.checkOnlyOpenFiles) { + const effectiveMaxTime = maxTime ? maxTime.noOpenFilesTimeInMs : Number.MAX_VALUE; + + // Now do type parsing and analysis of the remaining. + for (const sourceFileInfo of this._sourceFileList) { + if (!isUserCode(sourceFileInfo)) { + continue; + } + + if (this._checkTypes(sourceFileInfo)) { + if (elapsedTime.getDurationInMilliseconds() > effectiveMaxTime) { + return true; + } + } + } + } + + return false; + }); + } + + // Performs parsing and analysis of a single file in the program. If the file is not part of + // the program returns false to indicate analysis was not performed. + analyzeFile(fileUri: Uri, token: CancellationToken = CancellationToken.None): boolean { + return this._runEvaluatorWithCancellationToken(token, () => { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + if (sourceFileInfo && this._checkTypes(sourceFileInfo, { skipFileNeededCheck: true })) { + return true; + } + return false; + }); + } + + analyzeFileAndGetDiagnostics(fileUri: Uri, token: CancellationToken = CancellationToken.None): Diagnostic[] { + throwIfCancellationRequested(token); + this.analyzeFile(fileUri, token); + throwIfCancellationRequested(token); + const sourceFile = this.getSourceFile(fileUri); + if (!sourceFile) { + return []; + } + return this.getDiagnosticsForRange(fileUri, sourceFile.getRange()); + } + + // This will allow the callback to execute a type evaluator with an associated + // cancellation token and provide a read-only program. + run(callback: (p: ProgramView) => T, token: CancellationToken): T { + return this._runEvaluatorWithCancellationToken(token, () => callback(this)); + } + + // This will allow the callback to execute a type evaluator with an associated + // cancellation token and provide a mutable program. Should already be in edit mode when called. + runEditMode(callback: (v: EditableProgram) => void, token: CancellationToken): void; + runEditMode(callback: (v: EditableProgram) => Promise, token: CancellationToken): Promise; + runEditMode( + callback: (v: EditableProgram) => void | Promise, + token: CancellationToken + ): void | Promise { + if (this._editModeTracker.isEditMode) { + return this._runEvaluatorWithCancellationToken(token, () => callback(this)); + } + } + + getSourceMapper( + fileUri: Uri, + token: CancellationToken, + mapCompiled?: boolean, + preferStubs?: boolean + ): SourceMapper { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + const execEnv = this._configOptions.findExecEnvironment(fileUri); + return this._createSourceMapper(execEnv, token, sourceFileInfo, mapCompiled, preferStubs); + } + + getParserOutput(fileUri: Uri): ParserOutput | undefined { + return this.getBoundSourceFileInfo( + fileUri, + /* content */ undefined, + /* force */ true + )?.sourceFile.getParserOutput(); + } + + getParseResults(fileUri: Uri): ParseFileResults | undefined { + return this.getBoundSourceFileInfo( + fileUri, + /* content */ undefined, + /* force */ true + )?.sourceFile.getParseResults(); + } + + getParseDiagnostics(fileUri: Uri): Diagnostic[] | undefined { + return this.getBoundSourceFileInfo( + fileUri, + /* content */ undefined, + /* force */ true + )?.sourceFile.getParseDiagnostics(); + } + + handleMemoryHighUsage() { + this._handleMemoryHighUsage(); + } + + // Prints a detailed list of files that have been checked and the times associated + // with each of them, sorted greatest to least. + printDetailedAnalysisTimes() { + const sortedFiles = this._sourceFileList + .filter((s) => s.sourceFile.getCheckTime() !== undefined) + .sort((a, b) => { + return b.sourceFile.getCheckTime()! - a.sourceFile.getCheckTime()!; + }); + + this._console.info(''); + this._console.info('Analysis time by file'); + + sortedFiles.forEach((sfInfo) => { + const checkTimeInMs = sfInfo.sourceFile.getCheckTime()!; + this._console.info(`${checkTimeInMs}ms: ${sfInfo.uri}`); + }); + } + + // Prints import dependency information for each of the files in + // the program, skipping any typeshed files. + printDependencies(projectRootDir: Uri, verbose: boolean) { + const fs = this._importResolver.fileSystem; + const sortedFiles = this._sourceFileList + .filter((s) => !s.isTypeshedFile) + .sort((a, b) => { + return fs.getOriginalUri(a.uri) < fs.getOriginalUri(b.uri) ? 1 : -1; + }); + + const zeroImportFiles: SourceFile[] = []; + + sortedFiles.forEach((sfInfo) => { + this._console.info(''); + const fileUri = fs.getOriginalUri(sfInfo.uri); + let fileString = fileUri.toString(); + const relPath = projectRootDir.getRelativePathComponents(fileUri); + if (relPath) { + fileString = relPath.join('/'); + } + + this._console.info(`${fileString}`); + + this._console.info( + ` Imports ${sfInfo.imports.length} ` + `file${sfInfo.imports.length === 1 ? '' : 's'}` + ); + if (verbose) { + sfInfo.imports.forEach((importInfo) => { + this._console.info(` ${fs.getOriginalUri(importInfo.uri)}`); + }); + } + + this._console.info( + ` Imported by ${sfInfo.importedBy.length} ` + `file${sfInfo.importedBy.length === 1 ? '' : 's'}` + ); + if (verbose) { + sfInfo.importedBy.forEach((importInfo) => { + this._console.info(` ${fs.getOriginalUri(importInfo.uri)}`); + }); + } + + if (sfInfo.importedBy.length === 0) { + zeroImportFiles.push(sfInfo.sourceFile); + } + }); + + if (zeroImportFiles.length > 0) { + this._console.info(''); + this._console.info( + `${zeroImportFiles.length} file${zeroImportFiles.length === 1 ? '' : 's'}` + ` not explicitly imported` + ); + zeroImportFiles.forEach((importFile) => { + this._console.info(` ${fs.getOriginalUri(importFile.getUri())}`); + }); + } + } + + writeTypeStub(targetImportPath: Uri, targetIsSingleFile: boolean, stubPath: Uri, token: CancellationToken) { + for (const sourceFileInfo of this._sourceFileList) { + throwIfCancellationRequested(token); + + const fileUri = sourceFileInfo.uri; + + // Generate type stubs only for the files within the target path, + // not any files that the target module happened to import. + const relativePath = targetImportPath.getRelativePath(fileUri); + if (relativePath !== undefined) { + let typeStubPath = stubPath.resolvePaths(relativePath); + + // If the target is a single file implementation, as opposed to + // a package in a directory, transform the name of the type stub + // to __init__.pyi because we're placing it in a directory. + if (targetIsSingleFile) { + typeStubPath = typeStubPath.getDirectory().initPyiUri; + } else { + typeStubPath = typeStubPath.replaceExtension('.pyi'); + } + + const typeStubDir = typeStubPath.getDirectory(); + + try { + makeDirectories(this.fileSystem, typeStubDir, stubPath); + } catch (e: any) { + const errMsg = `Could not create directory for '${typeStubDir}'`; + throw new Error(errMsg); + } + + this._bindFile(sourceFileInfo); + + this._runEvaluatorWithCancellationToken(token, () => { + const writer = new TypeStubWriter(typeStubPath, sourceFileInfo.sourceFile, this._evaluator!); + writer.write(); + }); + + // This operation can consume significant memory, so check + // for situations where we need to discard the type cache. + this._handleMemoryHighUsage(); + } + } + } + + getTypeOfSymbol(symbol: Symbol) { + this._handleMemoryHighUsage(); + + const evaluator = this._evaluator || this._createNewEvaluator(); + return evaluator.getEffectiveTypeOfSymbol(symbol); + } + + printType(type: Type, options?: PrintTypeOptions): string { + this._handleMemoryHighUsage(); + + const evaluator = this._evaluator || this._createNewEvaluator(); + return evaluator.printType(type, options); + } + + getTextOnRange(fileUri: Uri, range: Range, token: CancellationToken): string | undefined { + const sourceFileInfo = this.getSourceFileInfo(fileUri); + if (!sourceFileInfo) { + return undefined; + } + + const sourceFile = sourceFileInfo.sourceFile; + const fileContents = sourceFile.getOpenFileContents(); + if (fileContents === undefined) { + // this only works with opened file + return undefined; + } + + return this._runEvaluatorWithCancellationToken(token, () => { + this._parseFile(sourceFileInfo); + + const parseResults = sourceFile.getParseResults()!; + const textRange = convertRangeToTextRange(range, parseResults.tokenizerOutput.lines); + if (!textRange) { + return undefined; + } + + return fileContents.substr(textRange.start, textRange.length); + }); + } + + getDiagnostics(options: ConfigOptions, reportDeltasOnly = true): FileDiagnostics[] { + const fileDiagnostics: FileDiagnostics[] = this._removeUnneededFiles(); + + this._sourceFileList.forEach((sourceFileInfo) => { + if (this._shouldCheckFile(sourceFileInfo)) { + let diagnostics = sourceFileInfo.sourceFile.getDiagnostics( + options, + reportDeltasOnly ? sourceFileInfo.diagnosticsVersion : undefined + ); + + if (diagnostics !== undefined) { + // Filter out all categories that are translated to tagged hints? + if (options.disableTaggedHints) { + diagnostics = diagnostics.filter((diag) => !isTaggedHintDiagnostic(diag)); + } + + fileDiagnostics.push({ + fileUri: sourceFileInfo.uri, + version: sourceFileInfo.sourceFile.getClientVersion(), + diagnostics, + }); + + // Update the cached diagnosticsVersion so we can determine + // whether there are any updates next time we call getDiagnostics. + sourceFileInfo.diagnosticsVersion = sourceFileInfo.sourceFile.getDiagnosticVersion(); + } + } else if ( + !sourceFileInfo.isOpenByClient && + options.checkOnlyOpenFiles && + sourceFileInfo.diagnosticsVersion !== undefined + ) { + // This condition occurs when the user switches from workspace to + // "open files only" mode. Clear all diagnostics for this file. + fileDiagnostics.push({ + fileUri: sourceFileInfo.uri, + version: sourceFileInfo.sourceFile.getClientVersion(), + diagnostics: [], + }); + sourceFileInfo.diagnosticsVersion = undefined; + } + }); + + return fileDiagnostics; + } + + getDiagnosticsForRange(fileUri: Uri, range: Range): Diagnostic[] { + const sourceFile = this.getSourceFile(fileUri); + if (!sourceFile) { + return []; + } + + const unfilteredDiagnostics = sourceFile.getDiagnostics(this._configOptions); + if (!unfilteredDiagnostics) { + return []; + } + + return unfilteredDiagnostics.filter((diag) => { + if (!doRangesIntersect(diag.range, range)) { + return false; + } + + // Filter out all categories that are translated to tagged hints? + if (this._configOptions.disableTaggedHints && isTaggedHintDiagnostic(diag)) { + return false; + } + + return true; + }); + } + + clone() { + const program = new Program( + this._importResolver, + this._configOptions, + this.serviceProvider, + new LogTracker(this._console, 'Cloned'), + this._disableChecker + ); + + // Cloned program will use whatever user files the program currently has. + const userFiles = this.getUserFiles(); + program.setTrackedFiles(userFiles.map((i) => i.uri)); + program.markAllFilesDirty(/* evenIfContentsAreSame */ true); + + // Make sure we keep editor content (open file) which could be different than one in the file system. + for (const fileInfo of this.getOpened()) { + const version = fileInfo.sourceFile.getClientVersion(); + if (version === undefined) { + continue; + } + + program.setFileOpened(fileInfo.uri, version, fileInfo.sourceFile.getOpenFileContents() ?? '', { + chainedFileUri: fileInfo.chainedSourceFile?.uri, + ipythonMode: fileInfo.ipythonMode, + isTracked: fileInfo.isTracked, + }); + } + + return program; + } + + // Returns a value from 0 to 1 (or more) indicating how "full" the cache is + // relative to some predetermined high-water mark. We'll compute this value + // based on two easy-to-compute metrics: the number of entries in the type + // cache and the number of parsed files. + getCacheUsage() { + const typeCacheEntryCount = this._evaluator!.getTypeCacheEntryCount(); + const entryCountRatio = typeCacheEntryCount / 750000; + const fileCountRatio = this._parsedFileCount / 1000; + + return Math.max(entryCountRatio, fileCountRatio); + } + + // Discards any cached information associated with this program. + emptyCache() { + this._createNewEvaluator(); + this._discardCachedParseResults(); + this._parsedFileCount = 0; + + this.serviceProvider.tryGet(ServiceKeys.stateMutationListeners)?.forEach((l) => l.onClearCache?.()); + } + + bindShadowFile(stubFileUri: Uri, shadowFile: Uri): SourceFile | undefined { + let stubFileInfo = this.getSourceFileInfo(stubFileUri); + if (!stubFileInfo) { + // make sure uri exits before adding interimFile + if (!this.fileSystem.existsSync(stubFileUri)) { + return undefined; + } + + // Special case for import statement like "import X.Y". The SourceFile + // for X might not be in memory since import `X.Y` only brings in Y. + stubFileInfo = this.addInterimFile(stubFileUri); + } + + this._addShadowedFile(stubFileInfo, shadowFile); + return this.getBoundSourceFile(shadowFile); + } + + protected disposeInternal(disposed: boolean) { + // Empty + } + + protected createNewEvaluatorInternal() { + // Empty + } + + private _handleMemoryHighUsage() { + const cacheUsage = this._cacheManager.getCacheUsage(); + const usedHeapRatio = this._cacheManager.getUsedHeapRatio( + this._configOptions.verboseOutput ? this._console : undefined + ); + + const heapRatioHighWaterMark = 0.9; + + // If the total cache has exceeded 75%, determine whether we should empty + // the cache. If the usedHeapRatio has exceeded our high-water mark, we should + // definitely empty the cache. This can happen before the cacheUsage maxes out because + // we might be on the background thread and a bunch of the cacheUsage is on the main + // thread. + if (cacheUsage > 0.75 || usedHeapRatio > heapRatioHighWaterMark) { + // The type cache uses a Map, which has an absolute limit of 2^24 entries + // before it will fail. If we cross the 90% mark, we'll empty the cache. + const absoluteMaxCacheEntryCount = (1 << 24) * 0.9; + const typeCacheEntryCount = this._evaluator!.getTypeCacheEntryCount(); + + if (typeCacheEntryCount > absoluteMaxCacheEntryCount || usedHeapRatio > heapRatioHighWaterMark) { + this._cacheManager.emptyCache(this._console); + } + } + } + + // Discards all cached parse results and file contents to free up memory. + // It does not discard cached index results or diagnostics for files. + private _discardCachedParseResults() { + for (const sourceFileInfo of this._sourceFileList) { + sourceFileInfo.sourceFile.dropParseAndBindInfo(); + } + } + + // Wrapper function that should be used when invoking this._evaluator + // with a cancellation token. It handles cancellation exceptions and + // any other unexpected exceptions. + private _runEvaluatorWithCancellationToken(token: CancellationToken | undefined, callback: () => T): T; + private _runEvaluatorWithCancellationToken( + token: CancellationToken | undefined, + callback: () => Promise + ): Promise; + private _runEvaluatorWithCancellationToken( + token: CancellationToken | undefined, + callback: () => T | Promise + ): T | Promise { + try { + const result = token ? this._evaluator!.runWithCancellationToken(token, callback) : callback(); + if (!isThenable(result)) { + return result; + } + + return result.catch((e) => { + if ( + !OperationCanceledException.is(e) || + e.isTypeCacheInvalid || + e.code === LSPErrorCodes.ServerCancelled + ) { + this._createNewEvaluator(); + } + + throw e; + }); + } catch (e: any) { + // An unexpected exception occurred, potentially leaving the current evaluator + // in an inconsistent state. Discard it and replace it with a fresh one. It is + // Cancellation exceptions are known to handle this correctly. + if (!OperationCanceledException.is(e) || e.isTypeCacheInvalid || e.code === LSPErrorCodes.ServerCancelled) { + this._createNewEvaluator(); + } + throw e; + } + } + + // Returns a list of empty file diagnostic entries for the files + // that have been removed. This is needed to clear out the + // errors for files that have been deleted or closed. + private _removeUnneededFiles(): FileDiagnostics[] { + const fileDiagnostics: FileDiagnostics[] = []; + + // If a file is no longer tracked, opened or shadowed, it can + // be removed from the program. + for (let i = 0; i < this._sourceFileList.length; ) { + const fileInfo = this._sourceFileList[i]; + if (!this._isFileNeeded(fileInfo)) { + // Clear only if there are any errors for this file. + if (fileInfo.diagnosticsVersion !== undefined) { + fileDiagnostics.push({ + fileUri: fileInfo.uri, + version: fileInfo.sourceFile.getClientVersion(), + diagnostics: [], + }); + } + + fileInfo.sourceFile.prepareForClose(); + this._removeSourceFileFromListAndMap(fileInfo.uri, i); + + // Unlink any imports and remove them from the list if + // they are no longer referenced. + fileInfo.imports.forEach((importedFile) => { + const indexToRemove = importedFile.importedBy.findIndex((fi) => fi === fileInfo); + if (indexToRemove < 0) { + return; + } + + importedFile.mutate((s) => s.importedBy.splice(indexToRemove, 1)); + + // See if we need to remove the imported file because it + // is no longer needed. If its index is >= i, it will be + // removed when we get to it. + if (!this._isFileNeeded(importedFile)) { + const indexToRemove = this._sourceFileList.findIndex((fi) => fi === importedFile); + if (indexToRemove >= 0 && indexToRemove < i) { + // Clear if there are any errors for this import. + if (importedFile.diagnosticsVersion !== undefined) { + fileDiagnostics.push({ + fileUri: importedFile.uri, + version: importedFile.sourceFile.getClientVersion(), + diagnostics: [], + }); + } + + importedFile.sourceFile.prepareForClose(); + this._removeSourceFileFromListAndMap(importedFile.uri, indexToRemove); + i--; + } + } + }); + + // Remove any shadowed files corresponding to this file. + fileInfo.shadowedBy.forEach((shadowedFile) => { + shadowedFile.mutate((s) => (s.shadows = s.shadows.filter((f) => f !== fileInfo))); + }); + fileInfo.mutate((s) => (s.shadowedBy = [])); + } else { + // If we're showing the user errors only for open files, clear + // out the errors for the now-closed file. + if (!this._shouldCheckFile(fileInfo) && fileInfo.diagnosticsVersion !== undefined) { + fileDiagnostics.push({ + fileUri: fileInfo.uri, + version: fileInfo.sourceFile.getClientVersion(), + diagnostics: [], + }); + fileInfo.diagnosticsVersion = undefined; + } + + i++; + } + } + + return fileDiagnostics; + } + + private _isFileNeeded(fileInfo: SourceFileInfo, skipFileNeededCheck?: boolean) { + if (fileInfo.sourceFile.isFileDeleted()) { + return false; + } + + if (!!skipFileNeededCheck || fileInfo.isTracked || fileInfo.isOpenByClient) { + return true; + } + + if (fileInfo.shadows.length > 0) { + return true; + } + + if (fileInfo.importedBy.length === 0) { + return false; + } + + // It's possible for a cycle of files to be imported + // by a tracked file but then abandoned. The import cycle + // will keep the entire group "alive" if we don't detect + // the condition and garbage collect them. + return this._isImportNeededRecursive(fileInfo, new Set()); + } + + private _isImportNeededRecursive(fileInfo: SourceFileInfo, recursionSet: Set) { + if (fileInfo.isTracked || fileInfo.isOpenByClient || fileInfo.shadows.length > 0) { + return true; + } + + const fileUri = fileInfo.uri; + + // Avoid infinite recursion. + if (recursionSet.has(fileUri.key)) { + return false; + } + + recursionSet.add(fileUri.key); + + for (const importerInfo of fileInfo.importedBy) { + if (this._isImportNeededRecursive(importerInfo, recursionSet)) { + return true; + } + } + + return false; + } + + private _createSourceMapper( + execEnv: ExecutionEnvironment, + token: CancellationToken, + from?: SourceFileInfo, + mapCompiled?: boolean, + preferStubs?: boolean + ) { + const sourceMapper = new SourceMapper( + this._importResolver, + execEnv, + this._evaluator!, + (stubFileUri: Uri, implFileUri: Uri) => this.bindShadowFile(stubFileUri, implFileUri), + (f) => { + let fileInfo = this.getBoundSourceFileInfo(f); + if (!fileInfo) { + // make sure uri exits before adding interimFile + if (!this.fileSystem.existsSync(f)) { + return undefined; + } + + // Special case for import statement like "import X.Y". The SourceFile + // for X might not be in memory since import `X.Y` only brings in Y. + fileInfo = this.addInterimFile(f); + + // Even though this file is not referenced by anything, make sure + // we have a parse tree for the doc string. + this._parseFile(fileInfo, /* content */ undefined, /* force */ true); + } + + return fileInfo; + }, + mapCompiled ?? false, + preferStubs ?? false, + from, + token + ); + return sourceMapper; + } + + private _isImportAllowed(importer: SourceFileInfo, importResult: ImportResult, isImportStubFile: boolean): boolean { + // Don't import native libs. We don't want to track these files, + // and we definitely don't want to attempt to parse them. + if (importResult.isNativeLib) { + return false; + } + + let thirdPartyImportAllowed = + this._configOptions.useLibraryCodeForTypes || + (importResult.importType === ImportType.ThirdParty && !!importResult.pyTypedInfo) || + (importResult.importType === ImportType.Local && importer.isThirdPartyPyTypedPresent); + + if ( + importResult.importType === ImportType.ThirdParty || + (importer.isThirdPartyImport && importResult.importType === ImportType.Local) + ) { + if (this._allowedThirdPartyImports) { + if (importResult.isRelative) { + // If it's a relative import, we'll allow it because the + // importer was already deemed to be allowed. + thirdPartyImportAllowed = true; + } else if ( + this._allowedThirdPartyImports.some((importName: string) => { + // If this import name is the one that was explicitly + // allowed or is a child of that import name, + // it's considered allowed. + if (importResult.importName === importName) { + return true; + } + + if (importResult.importName.startsWith(importName + '.')) { + return true; + } + + return false; + }) + ) { + thirdPartyImportAllowed = true; + } + } else if (importer.isThirdPartyImport && this._configOptions.useLibraryCodeForTypes) { + // If the importing file is a third-party import, allow importing of + // additional third-party imports. This supports the case where the importer + // is in a py.typed library but is importing from another non-py.typed + // library. It also supports the case where someone explicitly opens a + // library source file in their editor. + thirdPartyImportAllowed = true; + } else if ( + importResult.isNamespacePackage && + importResult.filteredImplicitImports && + Array.from(importResult.filteredImplicitImports.values()).some( + (implicitImport) => !!implicitImport.pyTypedInfo + ) + ) { + // Handle the case where the import targets a namespace package, and a + // submodule contained within it has a py.typed marker. + thirdPartyImportAllowed = true; + } + + // Some libraries ship with stub files that import from non-stubs. Don't + // explore those. + // Don't explore any third-party files unless they're type stub files + // or we've been told explicitly that third-party imports are OK. + if (!isImportStubFile) { + return thirdPartyImportAllowed; + } + } + + return true; + } + + private _getSourceFileInfoFromKey(key: string) { + return this._sourceFileMap.get(key); + } + + private _updateSourceFileImports(sourceFileInfo: SourceFileInfo, options: ConfigOptions): SourceFileInfo[] { + const filesAdded: SourceFileInfo[] = []; + + // Get the new list of imports and see if it changed from the last + // list of imports for this file. + const imports = sourceFileInfo.sourceFile.getImports(); + + // Create a local function that determines whether the import should + // be considered a "third-party import" and whether it is coming from + // a third-party package that claims to be typed. An import is + // considered third-party if it is external to the importer + // or is internal but the importer is itself a third-party package. + const getThirdPartyImportInfo = (importResult: ImportResult) => { + let isThirdPartyImport = false; + let isPyTypedPresent = false; + + if (importResult.importType === ImportType.ThirdParty) { + isThirdPartyImport = true; + if (importResult.pyTypedInfo) { + isPyTypedPresent = true; + } + } else if (sourceFileInfo.isThirdPartyImport && importResult.importType === ImportType.Local) { + isThirdPartyImport = true; + if (sourceFileInfo.isThirdPartyPyTypedPresent) { + isPyTypedPresent = true; + } + } + + return { + isThirdPartyImport, + isPyTypedPresent, + }; + }; + + // Create a map of unique imports, since imports can appear more than once. + const newImportPathMap = new Map(); + + // Add chained source file as import if it exists. + if (sourceFileInfo.chainedSourceFile) { + if (sourceFileInfo.chainedSourceFile.sourceFile.isFileDeleted()) { + sourceFileInfo.chainedSourceFile = undefined; + } else { + const fileUri = sourceFileInfo.chainedSourceFile.uri; + newImportPathMap.set(fileUri.key, { + path: fileUri, + isTypeshedFile: false, + isThirdPartyImport: false, + isPyTypedPresent: false, + }); + } + } + + imports.forEach((importResult) => { + if (importResult.isImportFound) { + if (this._isImportAllowed(sourceFileInfo, importResult, importResult.isStubFile)) { + if (importResult.resolvedUris.length > 0) { + const fileUri = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + if (!fileUri.isEmpty()) { + const thirdPartyTypeInfo = getThirdPartyImportInfo(importResult); + newImportPathMap.set(fileUri.key, { + path: fileUri, + isTypeshedFile: + !!importResult.isStdlibTypeshedFile || !!importResult.isThirdPartyTypeshedFile, + isThirdPartyImport: thirdPartyTypeInfo.isThirdPartyImport, + isPyTypedPresent: thirdPartyTypeInfo.isPyTypedPresent, + }); + } + } + } + + importResult.filteredImplicitImports?.forEach((implicitImport) => { + if (this._isImportAllowed(sourceFileInfo, importResult, implicitImport.isStubFile)) { + if (!implicitImport.isNativeLib) { + const thirdPartyTypeInfo = getThirdPartyImportInfo(importResult); + newImportPathMap.set(implicitImport.uri.key, { + path: implicitImport.uri, + isTypeshedFile: + !!importResult.isStdlibTypeshedFile || !!importResult.isThirdPartyTypeshedFile, + isThirdPartyImport: thirdPartyTypeInfo.isThirdPartyImport, + isPyTypedPresent: thirdPartyTypeInfo.isPyTypedPresent, + }); + } + } + }); + + // If the stub was found but the non-stub (source) file was not, dump + // the failure to the log for diagnostic purposes. + if (importResult.nonStubImportResult && !importResult.nonStubImportResult.isImportFound) { + // We'll skip this for imports from within stub files and imports that target + // stdlib typeshed stubs because many of these are known to not have + // associated source files, and we don't want to fill the logs with noise. + if (!sourceFileInfo.sourceFile.isStubFile() && !importResult.isStdlibTypeshedFile) { + if (options.verboseOutput) { + this._console.info( + `Could not resolve source for '${importResult.importName}' ` + + `in file '${sourceFileInfo.uri.toUserVisibleString()}'` + ); + + if (importResult.nonStubImportResult.importFailureInfo) { + importResult.nonStubImportResult.importFailureInfo.forEach((diag) => { + this._console.info(` ${diag}`); + }); + } + } + } + } + } else if (options.verboseOutput) { + this._console.info( + `Could not import '${importResult.importName}' ` + + `in file '${sourceFileInfo.uri.toUserVisibleString()}'` + ); + if (importResult.importFailureInfo) { + importResult.importFailureInfo.forEach((diag) => { + this._console.info(` ${diag}`); + }); + } + } + }); + + const updatedImportMap = new Map(); + sourceFileInfo.imports.forEach((importInfo) => { + const oldFilePath = importInfo.uri; + + // A previous import was removed. + if (!newImportPathMap.has(oldFilePath.key)) { + importInfo.mutate((s) => { + s.importedBy = s.importedBy.filter((fi) => !fi.uri.equals(sourceFileInfo.uri)); + }); + } else { + updatedImportMap.set(oldFilePath.key, importInfo); + } + }); + + // See if there are any new imports to be added. + newImportPathMap.forEach((importInfo, normalizedImportPath) => { + if (!updatedImportMap.has(normalizedImportPath)) { + // We found a new import to add. See if it's already part + // of the program. + let importedFileInfo = this.getSourceFileInfo(importInfo.path); + if (!importedFileInfo) { + const sourceFile = this._sourceFileFactory.createSourceFile( + this.serviceProvider, + importInfo.path, + (uri) => this._getModuleName(uri), + importInfo.isThirdPartyImport, + importInfo.isPyTypedPresent, + this._editModeTracker, + this._console, + this._logTracker + ); + importedFileInfo = new SourceFileInfo( + sourceFile, + importInfo.isTypeshedFile, + importInfo.isThirdPartyImport, + importInfo.isPyTypedPresent, + this._editModeTracker + ); + + this._addToSourceFileListAndMap(importedFileInfo); + filesAdded.push(importedFileInfo); + } + + importedFileInfo.mutate((s) => s.importedBy.push(sourceFileInfo)); + updatedImportMap.set(normalizedImportPath, importedFileInfo); + } + }); + + // Update the imports list. It should now map the set of imports + // specified by the source file. + const newImports: SourceFileInfo[] = []; + newImportPathMap.forEach((_, key) => { + const newImport = this._getSourceFileInfoFromKey(key); + if (newImport) { + newImports.push(newImport); + } + }); + + // Mutate only when necessary to avoid extra binding operations. + if ( + newImports.length !== sourceFileInfo.imports.length || + !newImports.every((i) => sourceFileInfo.imports.includes(i)) + ) { + sourceFileInfo.mutate((s) => (s.imports = newImports)); + } + + // Resolve the builtins import for the file. This needs to be + // analyzed before the file can be analyzed. + sourceFileInfo.builtinsImport = undefined; + const builtinsImport = sourceFileInfo.sourceFile.getBuiltinsImport(); + if (builtinsImport && builtinsImport.isImportFound) { + const resolvedBuiltinsPath = builtinsImport.resolvedUris[builtinsImport.resolvedUris.length - 1]; + sourceFileInfo.builtinsImport = this.getSourceFileInfo(resolvedBuiltinsPath); + } + + return filesAdded; + } + + private _removeSourceFileFromListAndMap(fileUri: Uri, indexToRemove: number) { + this._sourceFileMap.delete(fileUri.key); + this._sourceFileList.splice(indexToRemove, 1); + } + + private _addToSourceFileListAndMap(fileInfo: SourceFileInfo) { + const fileUri = fileInfo.uri; + + // We should never add a file with the same path twice. + assert(!this._sourceFileMap.has(fileUri.key)); + + // We should never have an empty URI for a source file. + assert(!fileUri.isEmpty()); + + this._sourceFileList.push(fileInfo); + this._sourceFileMap.set(fileUri.key, fileInfo); + } + + private _getModuleName(fileUri: Uri): string { + const moduleInfo = this._getModuleImportInfoForFile(fileUri); + return moduleInfo.moduleName; + } + + private _getModuleImportInfoForFile(fileUri: Uri) { + // We allow illegal module names (e.g. names that include "-" in them) + // because we want a unique name for each module even if it cannot be + // imported through an "import" statement. It's important to have a + // unique name in case two modules declare types with the same local + // name. The type checker uses the fully-qualified (unique) module name + // to differentiate between such types. + const moduleNameAndType = this._importResolver.getModuleNameForImport( + fileUri, + this._configOptions.getDefaultExecEnvironment(), + /* allowIllegalModuleName */ true, + /* detectPyTyped */ true + ); + + return moduleNameAndType; + } + + // A "shadowed" file is a python source file that has been added to the program because + // it "shadows" a type stub file for purposes of finding doc strings and definitions. + // We need to track the relationship so if the original type stub is removed from the + // program, we can remove the corresponding shadowed file and any files it imports. + private _addShadowedFile(stubFile: SourceFileInfo, shadowImplPath: Uri): SourceFile { + let shadowFileInfo = this.getSourceFileInfo(shadowImplPath); + + if (!shadowFileInfo) { + shadowFileInfo = this.addInterimFile(shadowImplPath); + } + + if (!shadowFileInfo.shadows.includes(stubFile)) { + shadowFileInfo.mutate((s) => s.shadows.push(stubFile)); + } + + if (!stubFile.shadowedBy.includes(shadowFileInfo)) { + stubFile.mutate((s) => s.shadowedBy.push(shadowFileInfo!)); + } + + return shadowFileInfo.sourceFile; + } + + private _createInterimFileInfo(fileUri: Uri) { + const moduleImportInfo = this._getModuleImportInfoForFile(fileUri); + const sourceFile = this._sourceFileFactory.createSourceFile( + this.serviceProvider, + fileUri, + (uri) => this._getModuleName(uri), + /* isThirdPartyImport */ false, + moduleImportInfo.isThirdPartyPyTypedPresent, + this._editModeTracker, + this._console, + this._logTracker + ); + const sourceFileInfo = new SourceFileInfo( + sourceFile, + moduleImportInfo.isTypeshedFile, + /* isThirdPartyImport */ false, + moduleImportInfo.isThirdPartyPyTypedPresent, + this._editModeTracker + ); + + return sourceFileInfo; + } + + private _createNewEvaluator() { + this.createNewEvaluatorInternal(); + + if (this._evaluator) { + // We shouldn't need to call this, but there appears to be a bug + // in the v8 garbage collector where it's unable to resolve orphaned + // objects without us giving it some assistance. + this._evaluator.disposeEvaluator(); + } + + this._evaluator = createTypeEvaluatorWithTracker( + this._lookUpImport, + { + printTypeFlags: getPrintTypeFlags(this._configOptions), + logCalls: this._configOptions.logTypeEvaluationTime, + minimumLoggingThreshold: this._configOptions.typeEvaluationTimeThreshold, + evaluateUnknownImportsAsAny: !!this._configOptions.evaluateUnknownImportsAsAny, + verifyTypeCacheEvaluatorFlags: !!this._configOptions.internalTestMode, + }, + this._logTracker, + this._configOptions.logTypeEvaluationTime + ? createTracePrinter( + this._importResolver.getImportRoots( + this._configOptions.findExecEnvironment(this._configOptions.projectRoot) + ) + ) + : undefined + ); + + return this._evaluator; + } + + private _parseFile(fileToParse: SourceFileInfo, content?: string, skipFileNeededCheck?: boolean) { + if (!this._isFileNeeded(fileToParse, skipFileNeededCheck) || !fileToParse.sourceFile.isParseRequired()) { + return; + } + + // SourceFile.parse should only be called here in the program, as calling it + // elsewhere could break the entire dependency graph maintained by the program. + // Other parts of the program should use _parseFile to create ParseResults from + // the sourceFile. For standalone parseResults, use parseFile or the Parser directly. + if (fileToParse.sourceFile.parse(this._configOptions, this._importResolver, content)) { + this._parsedFileCount++; + this._updateSourceFileImports(fileToParse, this._configOptions); + } + + if (fileToParse.sourceFile.isFileDeleted()) { + fileToParse.isTracked = false; + + // Mark any files that depend on this file as dirty + // also. This will retrigger analysis of these other files. + const markDirtySet = new Set(); + this._markFileDirtyRecursive(fileToParse, markDirtySet); + + // Invalidate the import resolver's cache as well. + this._importResolver.invalidateCache(); + } + } + + private _getImplicitImports(file: SourceFileInfo) { + // If file is builtins.pyi, then chainedSourceFile might not exist or be incorrect. + if (file.builtinsImport === file) { + return undefined; + } + + if (file.chainedSourceFile && !file.chainedSourceFile.sourceFile.isFileDeleted()) { + return file.chainedSourceFile; + } + + return file.builtinsImport; + } + + private _bindImplicitImports(fileToAnalyze: SourceFileInfo, skipFileNeededCheck?: boolean) { + // Get all of the potential imports for this file. + const implicitImports: SourceFileInfo[] = []; + const implicitSet = new Set(); + + let nextImplicitImport = this._getImplicitImports(fileToAnalyze); + while (nextImplicitImport) { + const implicitPath = nextImplicitImport.uri; + + if (implicitSet.has(implicitPath.key)) { + // We've found a cycle. Break out of the loop. + debug.fail( + this.serviceProvider + .tryGet(ServiceKeys.debugInfoInspector) + ?.getCycleDetail(this, nextImplicitImport) ?? `Found a cycle in implicit imports files` + ); + } + + implicitSet.add(implicitPath.key); + implicitImports.push(nextImplicitImport); + + this._parseFile(nextImplicitImport, /* content */ undefined, skipFileNeededCheck); + nextImplicitImport = this._getImplicitImports(nextImplicitImport); + } + + if (implicitImports.length === 0) { + return; + } + + // Reverse order, so top of chain is first. + let implicitImport = implicitImports.pop(); + while (implicitImport) { + // Bind this file but don't recurse into its imports. + this._bindFile(implicitImport, /* content */ undefined, skipFileNeededCheck, /* isImplicitImport */ true); + implicitImport = implicitImports.pop(); + } + } + + // Binds the specified file. Returns true if the file was bound or it + // didn't need to be bound. + private _bindFile( + fileToBind: SourceFileInfo, + content?: string, + skipFileNeededCheck = false, + isImplicitImport = false + ): boolean { + if (!this._isFileNeeded(fileToBind, skipFileNeededCheck) || !fileToBind.sourceFile.isBindingRequired()) { + return !fileToBind.sourceFile.isBindingRequired(); + } + + this._parseFile(fileToBind, content, skipFileNeededCheck); + + // Create a function to get the scope info. + const getScopeIfAvailable = (fileInfo: SourceFileInfo | undefined) => { + if (!fileInfo || fileInfo === fileToBind) { + return undefined; + } + + // If the file was deleted, there's no scope to return. + if (fileInfo.sourceFile.isFileDeleted()) { + return undefined; + } + + const parseResults = fileInfo.sourceFile.getParserOutput(); + if (!parseResults) { + return undefined; + } + + // File should already be bound because of the chained file binding above. + const scope = AnalyzerNodeInfo.getScope(parseResults.parseTree); + return scope; + }; + + let builtinsScope: Scope | undefined; + if (fileToBind.builtinsImport && fileToBind.builtinsImport !== fileToBind) { + // Bind all of the implicit imports first. So we don't recurse into them. + if (!isImplicitImport) { + this._bindImplicitImports(fileToBind); + + // Binding the implicit imports may indirectly cause the current file to be bound. + // If so, return now to avoid "Bind called unnecessarily" assert in sourceFile.bind(). + if (!fileToBind.sourceFile.isBindingRequired()) { + return true; + } + } + + // If it is not builtin module itself, we need to parse and bind + // the builtin module. + builtinsScope = + getScopeIfAvailable(fileToBind.chainedSourceFile) ?? getScopeIfAvailable(fileToBind.builtinsImport); + } + + if (fileToBind.sourceFile.isParseRequired()) { + // Ensure the file is parsed before binding. + this._parseFile(fileToBind, content, skipFileNeededCheck); + } + + let futureImports = fileToBind.sourceFile.getParserOutput()!.futureImports; + if (fileToBind.chainedSourceFile) { + futureImports = this._getEffectiveFutureImports(futureImports, fileToBind.chainedSourceFile); + } + fileToBind.effectiveFutureImports = futureImports.size > 0 ? futureImports : undefined; + + fileToBind.sourceFile.bind(this._configOptions, this._lookUpImport, builtinsScope, futureImports); + return true; + } + + private _getEffectiveFutureImports(futureImports: Set, chainedSourceFile: SourceFileInfo): Set { + const effectiveFutureImports = new Set(futureImports); + + chainedSourceFile.effectiveFutureImports?.forEach((value) => { + effectiveFutureImports.add(value); + }); + + return effectiveFutureImports; + } + + private _lookUpImport = ( + fileUriOrModule: Uri | AbsoluteModuleDescriptor, + options?: LookupImportOptions + ): ImportLookupResult | undefined => { + let sourceFileInfo: SourceFileInfo | undefined; + + if (Uri.is(fileUriOrModule)) { + sourceFileInfo = this.getSourceFileInfo(fileUriOrModule); + } else { + // Resolve the import. + const importResult = this._importResolver.resolveImport( + fileUriOrModule.importingFileUri, + this._configOptions.findExecEnvironment(fileUriOrModule.importingFileUri), + { + leadingDots: 0, + nameParts: fileUriOrModule.nameParts, + importedSymbols: undefined, + } + ); + + if (importResult.isImportFound && !importResult.isNativeLib && importResult.resolvedUris.length > 0) { + const resolvedPath = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + if (!resolvedPath.isEmpty()) { + // See if the source file already exists in the program. + sourceFileInfo = this.getSourceFileInfo(resolvedPath); + + if (!sourceFileInfo) { + // Start tracking the source file. + this.addTrackedFile(resolvedPath); + sourceFileInfo = this.getSourceFileInfo(resolvedPath); + } + } + } + } + + if (!sourceFileInfo) { + return undefined; + } + + if (options?.skipParsing) { + // Return dummy information if the caller has indicated that parsing is + // unnecessary. This is used in cases where the caller simply wants to + // know if the source file exists but is not interested in the contents. + return { + symbolTable: new Map(), + dunderAllNames: undefined, + usesUnsupportedDunderAllForm: false, + get docString() { + return undefined; + }, + isInPyTypedPackage: false, + }; + } + + if (sourceFileInfo.sourceFile.isBindingRequired()) { + // If we're running low on memory, free up some space. + this._handleMemoryHighUsage(); + + // Bind the file if it's not already bound. Don't count this time + // against the type checker. + timingStats.typeCheckerTime.subtractFromTime(() => { + this._bindFile(sourceFileInfo!, /* content */ undefined, options?.skipFileNeededCheck); + }); + } + + const symbolTable = sourceFileInfo.sourceFile.getModuleSymbolTable(); + if (!symbolTable) { + return undefined; + } + + const parseResults = sourceFileInfo.sourceFile.getParserOutput(); + const moduleNode = parseResults!.parseTree; + const fileInfo = AnalyzerNodeInfo.getFileInfo(moduleNode); + + const dunderAllInfo = AnalyzerNodeInfo.getDunderAllInfo(parseResults!.parseTree); + + return { + symbolTable, + dunderAllNames: dunderAllInfo?.names, + usesUnsupportedDunderAllForm: dunderAllInfo?.usesUnsupportedDunderAllForm ?? false, + get docString() { + return getDocString(moduleNode.d.statements); + }, + isInPyTypedPackage: fileInfo.isInPyTypedPackage, + }; + }; + + private _shouldCheckFile(fileInfo: SourceFileInfo) { + // Always do a full checking for a file that's open in the editor. + if (fileInfo.isOpenByClient) { + return true; + } + + // If the file isn't currently open, only perform full checking for + // files that are tracked, and only if the checkOnlyOpenFiles is disabled. + if (!this._configOptions.checkOnlyOpenFiles && fileInfo.isTracked) { + return true; + } + + return false; + } + + private _checkTypes( + fileToCheck: SourceFileInfo, + options?: { chainedByList?: SourceFileInfo[]; skipFileNeededCheck?: boolean } + ) { + // For very large programs, we may need to discard the evaluator and + // its cached types to avoid running out of heap space. + this._handleMemoryHighUsage(); + + return this._logTracker.log(`analyzing: ${fileToCheck.uri}`, (logState) => { + // If the file isn't needed because it was eliminated from the + // transitive closure or deleted, skip the file rather than wasting + // time on it. + if (!this._isFileNeeded(fileToCheck)) { + logState.suppress(); + return false; + } + + if (!fileToCheck.sourceFile.isCheckingRequired()) { + logState.suppress(); + return false; + } + + if (!options?.skipFileNeededCheck && !this._shouldCheckFile(fileToCheck)) { + logState.suppress(); + return false; + } + + // Bind the file if necessary even if we're not going to run the checker. + // disableChecker means disable semantic errors, not syntax errors. We need to bind again + // in order to generate syntax errors. + const boundFile = this._bindFile( + fileToCheck, + undefined, + // If binding is required we want to make sure to bind the file, otherwise + // the sourceFile.check below will fail. + /* skipFileNeededCheck */ fileToCheck.sourceFile.isBindingRequired() + ); + + if (!this._disableChecker) { + // For ipython, make sure we check all its dependent files first since + // their results can affect this file's result. + const dependentFiles = this._checkDependentFiles(fileToCheck, options?.chainedByList); + + if (this._preCheckCallback) { + const parseResults = fileToCheck.sourceFile.getParserOutput(); + if (parseResults) { + this._preCheckCallback(parseResults, this._evaluator!); + } + } + + if (boundFile) { + fileToCheck.sourceFile.check( + this.configOptions, + this._lookUpImport, + this._importResolver, + this._evaluator!, + dependentFiles + ); + } + } + + // Detect import cycles that involve the file. + if (this._configOptions.diagnosticRuleSet.reportImportCycles !== 'none') { + // Don't detect import cycles when doing type stub generation. Some + // third-party modules are pretty convoluted. + // Or if the file is the notebook cell. notebook cell can't have cycles. + if (!this._allowedThirdPartyImports && fileToCheck.ipythonMode !== IPythonMode.CellDocs) { + // We need to force all of the files to be parsed and build + // a closure map for the files. + const closureMap = new Map(); + this._getImportsRecursive(fileToCheck, closureMap, 0); + + closureMap.forEach((file) => { + timingStats.cycleDetectionTime.timeOperation(() => { + const filesVisitedMap = new Map(); + + if (!this._detectAndReportImportCycles(file, filesVisitedMap)) { + // If no cycles were found in any of the files we visited, + // set a flag to indicates that we don't need to visit them again + // on subsequent cycle checks. + filesVisitedMap.forEach((sourceFileInfo) => { + sourceFileInfo.sourceFile.setNoCircularDependencyConfirmed(); + }); + } + }); + }); + } + } + + return true; + }); + } + + private _checkDependentFiles(fileToCheck: SourceFileInfo, chainedByList: SourceFileInfo[] | undefined) { + if (fileToCheck.ipythonMode !== IPythonMode.CellDocs) { + return undefined; + } + + // If we don't have chainedByList, it means none of them are checked yet. + const needToRunChecker = !chainedByList; + + chainedByList = chainedByList ?? createChainedByList(this, fileToCheck); + const index = chainedByList.findIndex((v) => v === fileToCheck); + if (index < 0) { + return undefined; + } + + const startIndex = index + 1; + if (startIndex >= chainedByList.length) { + return undefined; + } + + if (needToRunChecker) { + // If the file is already analyzed, it will be no op. + // And make sure we don't dump parse tree and etc while + // calling checker. Otherwise, checkType can dump parse + // tree required by outer check. + const handle = this._cacheManager.pauseTracking(); + try { + for (let i = chainedByList.length - 1; i >= startIndex; i--) { + this._checkTypes(chainedByList[i], { chainedByList }); + } + } finally { + handle.dispose(); + } + } + + const dependentFiles = []; + for (let i = startIndex; i < chainedByList.length; i++) { + const file = chainedByList[i]; + const parseResults = file?.sourceFile.getParserOutput(); + if (!parseResults) { + continue; + } + + // We might not have the file info if binding failed for whatever reasons. + // Check whether the file has been bound + if (file.sourceFile.isBindingRequired()) { + continue; + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(parseResults.parseTree); + if (fileInfo.accessedSymbolSet) { + dependentFiles.push(parseResults); + } + } + + return dependentFiles; + } + + // Builds a map of files that includes the specified file and all of the files + // it imports (recursively) and ensures that all such files. If any of these files + // have already been checked (they and their recursive imports have completed the + // check phase), they are not included in the results. + private _getImportsRecursive( + file: SourceFileInfo, + closureMap: Map, + recursionCount: number + ) { + // If the file is already in the closure map, we found a cyclical + // dependency. Don't recur further. + const fileUri = file.uri; + if (closureMap.has(fileUri.key)) { + return; + } + + // If the import chain is too long, emit an error. Otherwise we + // risk blowing the stack. + if (recursionCount > _maxImportDepth) { + file.sourceFile.setHitMaxImportDepth(_maxImportDepth); + return; + } + + // Add the file to the closure map. + closureMap.set(fileUri.key, file); + + // If this file hasn't already been parsed, parse it now. This will + // discover any files it imports. Skip this if the file is part + // of a library. We'll assume that no cycles will be generated from + // library code or typeshed stubs. + if (isUserCode(file)) { + this._parseFile(file); + } + + // Recursively add the file's imports. + for (const importedFileInfo of file.imports) { + this._getImportsRecursive(importedFileInfo, closureMap, recursionCount + 1); + } + } + + private _detectAndReportImportCycles( + sourceFileInfo: SourceFileInfo, + filesVisited: Map, + dependencyChain: SourceFileInfo[] = [], + dependencyMap = new Map() + ): boolean { + // Don't bother checking for typestub files or third-party files. + if (sourceFileInfo.sourceFile.isStubFile() || sourceFileInfo.isThirdPartyImport) { + return false; + } + + // If we've already confirmed that this source file isn't part of a + // cycle, we can skip it entirely. + if (sourceFileInfo.sourceFile.isNoCircularDependencyConfirmed()) { + return false; + } + + const fileUri = sourceFileInfo.uri; + + filesVisited.set(fileUri.key, sourceFileInfo); + + let detectedCycle = false; + + if (dependencyMap.has(fileUri.key)) { + // We detect a cycle (partial or full). A full cycle is one that is + // rooted in the file at the start of our dependency chain. A partial + // cycle loops back on some other file in the dependency chain. We + // will report only full cycles here and leave the reporting of + // partial cycles to other passes. + detectedCycle = true; + + // Look for chains at least two in length. A file that contains + // an "import . from X" will technically create a cycle with + // itself, but those are not interesting to report. + if (dependencyChain.length > 1 && sourceFileInfo === dependencyChain[0]) { + this._logImportCycle(dependencyChain); + } + } else { + // If we've already checked this dependency along + // some other path, we can skip it. + if (dependencyMap.has(fileUri.key)) { + return false; + } + + // We use both a map (for fast lookups) and a list + // (for ordering information). Set the dependency map + // entry to true to indicate that we're actively exploring + // that dependency. + dependencyMap.set(fileUri.key, true); + dependencyChain.push(sourceFileInfo); + + for (const imp of sourceFileInfo.imports) { + if (this._detectAndReportImportCycles(imp, filesVisited, dependencyChain, dependencyMap)) { + detectedCycle = true; + } + } + + // Set the dependencyMap entry to false to indicate that we have + // already explored this file and don't need to explore it again. + dependencyMap.set(fileUri.key, false); + dependencyChain.pop(); + } + + return detectedCycle; + } + + private _logImportCycle(dependencyChain: SourceFileInfo[]) { + const circDep = new CircularDependency(); + dependencyChain.forEach((sourceFileInfo) => { + circDep.appendPath(sourceFileInfo.uri); + }); + + circDep.normalizeOrder(); + const firstFilePath = circDep.getPaths()[0]; + const firstSourceFile = this.getSourceFileInfo(firstFilePath)!; + assert(firstSourceFile !== undefined); + firstSourceFile.sourceFile.addCircularDependency(this.configOptions, circDep); + } + + private _markFileDirtyRecursive(sourceFileInfo: SourceFileInfo, markSet: Set, forceRebinding = false) { + const fileUri = sourceFileInfo.uri; + + // Don't mark it again if it's already been visited. + if (markSet.has(fileUri.key)) { + return; + } + + sourceFileInfo.sourceFile.markReanalysisRequired(forceRebinding); + markSet.add(fileUri.key); + + sourceFileInfo.importedBy.forEach((dep) => { + // Changes on chained source file can change symbols in the symbol table and + // dependencies on the dependent file. Force rebinding. + const forceRebinding = dep.chainedSourceFile === sourceFileInfo; + this._markFileDirtyRecursive(dep, markSet, forceRebinding); + }); + + // Change in the current file could impact checker result of chainedSourceFile such as unused symbols. + let reevaluationRequired = false; + let chainedSourceFile = sourceFileInfo.chainedSourceFile; + while (chainedSourceFile) { + if (chainedSourceFile.sourceFile.isCheckingRequired()) { + // If the file is marked for checking, its chained one should be marked + // as well. Stop here. + break; + } + + reevaluationRequired = true; + chainedSourceFile.sourceFile.markReanalysisRequired(/* forceRebinding */ false); + chainedSourceFile = chainedSourceFile.chainedSourceFile; + } + + // If the checker is going to run again, we have to recreate the type evaluator so + // that it actually reevaluates all the nodes (instead of using the cache). + // This is necessary because the original file change may not recreate the TypeEvaluator. + // For example, it might be a file delete. + if (reevaluationRequired) { + this._createNewEvaluator(); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/programTypes.ts b/python-parser/packages/pyright-internal/src/analyzer/programTypes.ts new file mode 100644 index 00000000..cc628aba --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/programTypes.ts @@ -0,0 +1,32 @@ +/* + * programTypes.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Various interfaces/types used in + */ +import { ConsoleInterface } from '../common/console'; +import { LogTracker } from '../common/logTracker'; +import { ServiceProvider } from '../common/serviceProvider'; +import { Uri } from '../common/uri/uri'; +import { IPythonMode, SourceFile, SourceFileEditMode } from './sourceFile'; + +export interface ISourceFileFactory { + createSourceFile( + serviceProvider: ServiceProvider, + fileUri: Uri, + moduleNameGetter: (file: Uri) => string, + isThirdPartyImport: boolean, + isThirdPartyPyTypedPresent: boolean, + editMode: SourceFileEditMode, + console?: ConsoleInterface, + logTracker?: LogTracker, + ipythonMode?: IPythonMode + ): SourceFile; +} + +export namespace ISourceFileFactory { + export function is(obj: any): obj is ISourceFileFactory { + return obj.createSourceFile !== undefined; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/properties.ts b/python-parser/packages/pyright-internal/src/analyzer/properties.ts new file mode 100644 index 00000000..1a0d0565 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/properties.ts @@ -0,0 +1,562 @@ +/* + * properties.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides type evaluation logic that is specific to properties. + */ + +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { LocAddendum, LocMessage } from '../localization/localize'; +import { DecoratorNode, FunctionNode, ParamCategory, ParseNode } from '../parser/parseNodes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { ConstraintSolution } from './constraintSolution'; +import { ConstraintTracker } from './constraintTracker'; +import { getClassFullName, getTypeAnnotationForParam, getTypeSourceId } from './parseTreeUtils'; +import { Symbol, SymbolFlags } from './symbol'; +import { AssignTypeFlags, TypeEvaluator } from './typeEvaluatorTypes'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + isAnyOrUnknown, + isClass, + isFunction, + isInstantiableClass, + isTypeSame, + isTypeVar, + ModuleType, + OverloadedType, + Type, + TypeVarType, + UnknownType, +} from './types'; +import { applySolvedTypeVars, computeMroLinearization, getTypeVarScopeId, isProperty } from './typeUtils'; + +export function validatePropertyMethod(evaluator: TypeEvaluator, method: FunctionType, errorNode: ParseNode) { + if (FunctionType.isStaticMethod(method)) { + evaluator.addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.propertyStaticMethod(), errorNode); + } +} + +export function createProperty( + evaluator: TypeEvaluator, + decoratorNode: DecoratorNode, + decoratorType: ClassType, + fget: FunctionType +): ClassType { + const fileInfo = getFileInfo(decoratorNode); + const typeMetaclass = evaluator.getBuiltInType(decoratorNode, 'type'); + const typeSourceId = ClassType.isBuiltIn(decoratorType, 'property') + ? getTypeSourceId(decoratorNode) + : decoratorType.shared.typeSourceId; + + const propertyClass = ClassType.createInstantiable( + decoratorType.shared.name, + getClassFullName(decoratorNode, fileInfo.moduleName, `__property_${fget.shared.name}`), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.PropertyClass | ClassTypeFlags.BuiltIn, + typeSourceId, + /* declaredMetaclass */ undefined, + isInstantiableClass(typeMetaclass) ? typeMetaclass : UnknownType.create() + ); + + propertyClass.shared.declaration = decoratorType.shared.declaration; + propertyClass.shared.typeVarScopeId = decoratorType.shared.typeVarScopeId; + const objectType = evaluator.getBuiltInType(decoratorNode, 'object'); + propertyClass.shared.baseClasses.push(isInstantiableClass(objectType) ? objectType : UnknownType.create()); + computeMroLinearization(propertyClass); + + // Clone the symbol table of the old class type. + const fields = ClassType.getSymbolTable(propertyClass); + ClassType.getSymbolTable(decoratorType).forEach((symbol, name) => { + const ignoredMethods = ['__get__', '__set__', '__delete__']; + + if (!symbol.isIgnoredForProtocolMatch()) { + if (!ignoredMethods.some((m) => m === name)) { + fields.set(name, symbol); + } + } + }); + + const propertyObject = ClassType.cloneAsInstance(propertyClass); + propertyClass.priv.isAsymmetricDescriptor = false; + + // Update the __set__ and __delete__ methods if present. + updateGetSetDelMethodForClonedProperty(evaluator, propertyObject); + + // Fill in the fget method. + propertyObject.priv.fgetInfo = { + methodType: fget, + classType: fget.shared.methodClass, + }; + + if (FunctionType.isClassMethod(fget)) { + propertyClass.shared.flags |= ClassTypeFlags.ClassProperty; + } + + // Fill in the __get__ method with an overload. + addGetMethodToPropertySymbolTable(evaluator, propertyObject, fget); + + // Fill in the getter, setter and deleter methods. + addDecoratorMethodsToPropertySymbolTable(propertyObject); + + return propertyObject; +} + +export function clonePropertyWithSetter( + evaluator: TypeEvaluator, + prop: Type, + fset: FunctionType, + errorNode: FunctionNode +): Type { + if (!isProperty(prop)) { + return prop; + } + + const classType = prop as ClassType; + const flagsToClone = classType.shared.flags; + let isAsymmetricDescriptor = !!classType.priv.isAsymmetricDescriptor; + + // Verify parameters for fset. + // We'll skip this test if the diagnostic rule is disabled because it + // can be somewhat expensive, especially in code that is not annotated. + const fileInfo = getFileInfo(errorNode); + if (errorNode.d.params.length >= 2) { + const typeAnnotation = getTypeAnnotationForParam(errorNode, 1); + if (typeAnnotation) { + // Verify consistency of the type. + const fgetType = evaluator.getGetterTypeFromProperty(classType); + if (fgetType && !isAnyOrUnknown(fgetType)) { + const fsetType = evaluator.getTypeOfAnnotation(typeAnnotation, { + typeVarGetsCurScope: true, + }); + + // The setter type should be assignable to the getter type. + if (fileInfo.diagnosticRuleSet.reportPropertyTypeMismatch !== 'none') { + const diag = new DiagnosticAddendum(); + if (!evaluator.assignType(fgetType, fsetType, diag)) { + evaluator.addDiagnostic( + DiagnosticRule.reportPropertyTypeMismatch, + LocMessage.setterGetterTypeMismatch() + diag.getString(), + typeAnnotation + ); + } + } + + if (!isTypeSame(fgetType, fsetType)) { + isAsymmetricDescriptor = true; + } + } + } + } + + const propertyClass = ClassType.createInstantiable( + classType.shared.name, + classType.shared.fullName, + classType.shared.moduleName, + getFileInfo(errorNode).fileUri, + flagsToClone, + classType.shared.typeSourceId, + classType.shared.declaredMetaclass, + classType.shared.effectiveMetaclass + ); + + propertyClass.shared.declaration = classType.shared.declaration; + propertyClass.shared.typeVarScopeId = classType.shared.typeVarScopeId; + const objectType = evaluator.getBuiltInType(errorNode, 'object'); + propertyClass.shared.baseClasses.push(isInstantiableClass(objectType) ? objectType : UnknownType.create()); + computeMroLinearization(propertyClass); + + propertyClass.priv.fgetInfo = classType.priv.fgetInfo; + propertyClass.priv.fdelInfo = classType.priv.fdelInfo; + propertyClass.priv.isAsymmetricDescriptor = isAsymmetricDescriptor; + const propertyObject = ClassType.cloneAsInstance(propertyClass); + + // Clone the symbol table of the old class type. + const fields = ClassType.getSymbolTable(propertyClass); + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + fields.set(name, symbol); + } + }); + + // Update the __get__ and __delete__ methods if present. + updateGetSetDelMethodForClonedProperty(evaluator, propertyObject); + + // Fill in the new fset method. + propertyObject.priv.fsetInfo = { + methodType: fset, + classType: fset.shared.methodClass, + }; + + // Fill in the __set__ method. + addSetMethodToPropertySymbolTable(evaluator, propertyObject, fset); + + // Fill in the getter, setter and deleter methods. + addDecoratorMethodsToPropertySymbolTable(propertyObject); + + return propertyObject; +} + +export function clonePropertyWithDeleter( + evaluator: TypeEvaluator, + prop: Type, + fdel: FunctionType, + errorNode: FunctionNode +): Type { + if (!isProperty(prop)) { + return prop; + } + + const classType = prop as ClassType; + const propertyClass = ClassType.createInstantiable( + classType.shared.name, + classType.shared.fullName, + classType.shared.moduleName, + getFileInfo(errorNode).fileUri, + classType.shared.flags, + classType.shared.typeSourceId, + classType.shared.declaredMetaclass, + classType.shared.effectiveMetaclass + ); + + propertyClass.shared.declaration = classType.shared.declaration; + propertyClass.shared.typeVarScopeId = classType.shared.typeVarScopeId; + const objectType = evaluator.getBuiltInType(errorNode, 'object'); + propertyClass.shared.baseClasses.push(isInstantiableClass(objectType) ? objectType : UnknownType.create()); + computeMroLinearization(propertyClass); + + propertyClass.priv.fgetInfo = classType.priv.fgetInfo; + propertyClass.priv.fsetInfo = classType.priv.fsetInfo; + const propertyObject = ClassType.cloneAsInstance(propertyClass); + propertyClass.priv.isAsymmetricDescriptor = classType.priv.isAsymmetricDescriptor ?? false; + + // Clone the symbol table of the old class type. + const fields = ClassType.getSymbolTable(propertyClass); + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + fields.set(name, symbol); + } + }); + + // Update the __get__ and __set__ methods if present. + updateGetSetDelMethodForClonedProperty(evaluator, propertyObject); + + // Fill in the fdel method. + propertyObject.priv.fdelInfo = { + methodType: fdel, + classType: fdel.shared.methodClass, + }; + + // Fill in the __delete__ method. + addDelMethodToPropertySymbolTable(evaluator, propertyObject, fdel); + + // Fill in the getter, setter and deleter methods. + addDecoratorMethodsToPropertySymbolTable(propertyObject); + + return propertyObject; +} + +function addGetMethodToPropertySymbolTable(evaluator: TypeEvaluator, propertyObject: ClassType, fget: FunctionType) { + const fields = ClassType.getSymbolTable(propertyObject); + + // The first overload is for accesses through a class object (where + // the instance argument is None). + const getFunction1 = FunctionType.createSynthesizedInstance('__get__', FunctionTypeFlags.Overloaded); + FunctionType.addParam( + getFunction1, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'self') + ); + FunctionType.addParam( + getFunction1, + FunctionParam.create(ParamCategory.Simple, evaluator.getNoneType(), FunctionParamFlags.TypeDeclared, 'obj') + ); + FunctionType.addParam( + getFunction1, + FunctionParam.create( + ParamCategory.Simple, + AnyType.create(), + FunctionParamFlags.TypeDeclared, + 'objtype', + AnyType.create(/* isEllipsis */ true) + ) + ); + getFunction1.shared.declaredReturnType = FunctionType.isClassMethod(fget) + ? FunctionType.getEffectiveReturnType(fget) + : propertyObject; + getFunction1.shared.declaration = fget.shared.declaration; + getFunction1.shared.deprecatedMessage = fget.shared.deprecatedMessage; + getFunction1.shared.methodClass = fget.shared.methodClass; + + // Override the scope ID since we're using parameter types from the + // decorated function. + getFunction1.shared.typeVarScopeId = getTypeVarScopeId(fget); + + // The second overload is for accesses through a class instance. + const getFunction2 = FunctionType.createSynthesizedInstance('__get__', FunctionTypeFlags.Overloaded); + FunctionType.addParam( + getFunction2, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'self') + ); + + const objType = fget.shared.parameters.length > 0 ? FunctionType.getParamType(fget, 0) : AnyType.create(); + + FunctionType.addParam( + getFunction2, + FunctionParam.create(ParamCategory.Simple, objType, FunctionParamFlags.TypeDeclared, 'obj') + ); + + FunctionType.addParam( + getFunction2, + FunctionParam.create( + ParamCategory.Simple, + AnyType.create(), + FunctionParamFlags.TypeDeclared, + 'objtype', + AnyType.create(/* isEllipsis */ true) + ) + ); + getFunction2.shared.declaredReturnType = FunctionType.getEffectiveReturnType(fget); + getFunction2.shared.declaration = fget.shared.declaration; + getFunction2.shared.deprecatedMessage = fget.shared.deprecatedMessage; + getFunction2.shared.methodClass = fget.shared.methodClass; + + // Override the scope ID since we're using parameter types from the + // decorated function. + getFunction2.shared.typeVarScopeId = getTypeVarScopeId(fget); + + // We previously placed getFunction1 before getFunction2, but this creates + // problems specifically for the `NoneType` class because None.__class__ + // is a property, and both overloads match in this case because None + // is passed for the "obj" parameter. + const getFunctionOverload = OverloadedType.create([getFunction2, getFunction1]); + const getSymbol = Symbol.createWithType(SymbolFlags.ClassMember, getFunctionOverload); + fields.set('__get__', getSymbol); +} + +function addSetMethodToPropertySymbolTable(evaluator: TypeEvaluator, propertyObject: ClassType, fset: FunctionType) { + const fields = ClassType.getSymbolTable(propertyObject); + + const setFunction = FunctionType.createSynthesizedInstance('__set__'); + FunctionType.addParam( + setFunction, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'self') + ); + + let objType = fset.shared.parameters.length > 0 ? FunctionType.getParamType(fset, 0) : AnyType.create(); + if (isTypeVar(objType) && TypeVarType.isSelf(objType)) { + objType = evaluator.makeTopLevelTypeVarsConcrete(objType); + } + + FunctionType.addParam( + setFunction, + FunctionParam.create( + ParamCategory.Simple, + combineTypes([objType, evaluator.getNoneType()]), + FunctionParamFlags.TypeDeclared, + 'obj' + ) + ); + + setFunction.shared.declaredReturnType = evaluator.getNoneType(); + + // Adopt the TypeVarScopeId of the fset function in case it has any + // TypeVars that need to be solved. + setFunction.shared.typeVarScopeId = getTypeVarScopeId(fset); + setFunction.shared.deprecatedMessage = fset.shared.deprecatedMessage; + setFunction.shared.methodClass = fset.shared.methodClass; + + let setParamType: Type = UnknownType.create(); + + if ( + fset.shared.parameters.length >= 2 && + fset.shared.parameters[1].category === ParamCategory.Simple && + fset.shared.parameters[1].name + ) { + setParamType = FunctionType.getParamType(fset, 1); + } + FunctionType.addParam( + setFunction, + FunctionParam.create(ParamCategory.Simple, setParamType, FunctionParamFlags.TypeDeclared, 'value') + ); + const setSymbol = Symbol.createWithType(SymbolFlags.ClassMember, setFunction); + fields.set('__set__', setSymbol); +} + +function addDelMethodToPropertySymbolTable(evaluator: TypeEvaluator, propertyObject: ClassType, fdel: FunctionType) { + const fields = ClassType.getSymbolTable(propertyObject); + + const delFunction = FunctionType.createSynthesizedInstance('__delete__'); + FunctionType.addParam( + delFunction, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'self') + ); + + // Adopt the TypeVarScopeId of the fdel function in case it has any + // TypeVars that need to be solved. + delFunction.shared.typeVarScopeId = getTypeVarScopeId(fdel); + delFunction.shared.deprecatedMessage = fdel.shared.deprecatedMessage; + delFunction.shared.methodClass = fdel.shared.methodClass; + + let objType = fdel.shared.parameters.length > 0 ? FunctionType.getParamType(fdel, 0) : AnyType.create(); + + if (isTypeVar(objType) && TypeVarType.isSelf(objType)) { + objType = evaluator.makeTopLevelTypeVarsConcrete(objType); + } + + FunctionType.addParam( + delFunction, + FunctionParam.create( + ParamCategory.Simple, + combineTypes([objType, evaluator.getNoneType()]), + FunctionParamFlags.TypeDeclared, + 'obj' + ) + ); + delFunction.shared.declaredReturnType = evaluator.getNoneType(); + const delSymbol = Symbol.createWithType(SymbolFlags.ClassMember, delFunction); + fields.set('__delete__', delSymbol); +} + +function updateGetSetDelMethodForClonedProperty(evaluator: TypeEvaluator, propertyObject: ClassType) { + const fgetInfo = propertyObject.priv.fgetInfo; + if (fgetInfo && isFunction(fgetInfo.methodType)) { + addGetMethodToPropertySymbolTable(evaluator, propertyObject, fgetInfo.methodType); + } + + const fsetInfo = propertyObject.priv.fsetInfo; + if (fsetInfo && isFunction(fsetInfo.methodType)) { + addSetMethodToPropertySymbolTable(evaluator, propertyObject, fsetInfo.methodType); + } + + const fdelInfo = propertyObject.priv.fdelInfo; + if (fdelInfo && isFunction(fdelInfo.methodType)) { + addDelMethodToPropertySymbolTable(evaluator, propertyObject, fdelInfo.methodType); + } +} + +function addDecoratorMethodsToPropertySymbolTable(propertyObject: ClassType) { + const fields = ClassType.getSymbolTable(propertyObject); + + // Fill in the getter, setter and deleter methods. + ['getter', 'setter', 'deleter'].forEach((accessorName) => { + const accessorFunction = FunctionType.createSynthesizedInstance(accessorName); + FunctionType.addParam( + accessorFunction, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'self') + ); + FunctionType.addParam( + accessorFunction, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'accessor') + ); + accessorFunction.shared.declaredReturnType = propertyObject; + const accessorSymbol = Symbol.createWithType(SymbolFlags.ClassMember, accessorFunction); + fields.set(accessorName, accessorSymbol); + }); +} + +export function assignProperty( + evaluator: TypeEvaluator, + destPropertyType: ClassType, + srcPropertyType: ClassType, + destClass: ClassType, + srcClass: ClassType | ModuleType, + diag: DiagnosticAddendum | undefined, + constraints?: ConstraintTracker, + selfSolution?: ConstraintSolution, + recursionCount = 0 +): boolean { + const srcObjectToBind = isClass(srcClass) ? ClassType.cloneAsInstance(srcClass) : undefined; + const destObjectToBind = ClassType.cloneAsInstance(destClass); + let isAssignable = true; + const accessors: { + getFunction: (c: ClassType) => FunctionType | undefined; + missingDiagMsg: () => string; + incompatibleDiagMsg: () => string; + }[] = [ + { + getFunction: (c: ClassType) => c.priv.fgetInfo?.methodType, + missingDiagMsg: LocAddendum.missingGetter, + incompatibleDiagMsg: LocAddendum.incompatibleGetter, + }, + { + getFunction: (c: ClassType) => c.priv.fsetInfo?.methodType, + missingDiagMsg: LocAddendum.missingSetter, + incompatibleDiagMsg: LocAddendum.incompatibleSetter, + }, + { + getFunction: (c: ClassType) => c.priv.fdelInfo?.methodType, + missingDiagMsg: LocAddendum.missingDeleter, + incompatibleDiagMsg: LocAddendum.incompatibleDeleter, + }, + ]; + + accessors.forEach((accessorInfo) => { + let destAccessType = accessorInfo.getFunction(destPropertyType); + + if (destAccessType && isFunction(destAccessType)) { + const srcAccessType = accessorInfo.getFunction(srcPropertyType); + + if (!srcAccessType || !isFunction(srcAccessType)) { + diag?.addMessage(accessorInfo.missingDiagMsg()); + isAssignable = false; + return; + } + + evaluator.inferReturnTypeIfNecessary(srcAccessType); + evaluator.inferReturnTypeIfNecessary(destAccessType); + + // If the caller provided a "self" TypeVar context, replace any Self types. + // This is needed during protocol matching. + if (selfSolution) { + destAccessType = applySolvedTypeVars(destAccessType, selfSolution) as FunctionType; + } + + const boundDestAccessType = + evaluator.bindFunctionToClassOrObject( + destObjectToBind, + destAccessType, + /* memberClass */ undefined, + /* treatConstructorAsClassMethod */ undefined, + /* firstParamType */ undefined, + diag?.createAddendum(), + recursionCount + ) ?? destAccessType; + + const boundSrcAccessType = + evaluator.bindFunctionToClassOrObject( + srcObjectToBind, + srcAccessType, + /* memberClass */ undefined, + /* treatConstructorAsClassMethod */ undefined, + /* firstParamType */ undefined, + diag?.createAddendum(), + recursionCount + ) ?? srcAccessType; + + if ( + !evaluator.assignType( + boundDestAccessType, + boundSrcAccessType, + diag, + constraints, + AssignTypeFlags.Default, + recursionCount + ) + ) { + isAssignable = false; + } + } + }); + + return isAssignable; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/protocols.ts b/python-parser/packages/pyright-internal/src/analyzer/protocols.ts new file mode 100644 index 00000000..4245507c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/protocols.ts @@ -0,0 +1,880 @@ +/* + * protocols.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides type evaluation logic that is specific to protocol + * (structural subtyping) classes. + */ + +import { assert } from '../common/debug'; +import { defaultMaxDiagnosticDepth, DiagnosticAddendum } from '../common/diagnostic'; +import { LocAddendum } from '../localization/localize'; +import { ConstraintSolution } from './constraintSolution'; +import { assignTypeVar } from './constraintSolver'; +import { ConstraintTracker } from './constraintTracker'; +import { DeclarationType } from './declaration'; +import { assignProperty } from './properties'; +import { Symbol } from './symbol'; +import { getLastTypedDeclarationForSymbol, isEffectivelyClassVar } from './symbolUtils'; +import { AssignTypeFlags, TypeEvaluator } from './typeEvaluatorTypes'; +import { + ClassType, + FunctionType, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isTypeSame, + ModuleType, + OverloadedType, + Type, + TypeBase, + TypeVarType, + UnknownType, + Variance, +} from './types'; +import { + addSolutionForSelfType, + applySolvedTypeVars, + ClassMember, + containsLiteralType, + lookUpClassMember, + makeFunctionTypeVarsBound, + MemberAccessFlags, + partiallySpecializeType, + requiresSpecialization, + requiresTypeArgs, + selfSpecializeClass, + synthesizeTypeVarForSelfCls, +} from './typeUtils'; + +interface ProtocolAssignmentStackEntry { + srcType: ClassType; + destType: ClassType; +} + +interface ProtocolCompatibility { + // Specialized source type or undefined if this entry applies + // to all specializations + srcType: ClassType | undefined; + + // Specialized dest type + destType: ClassType; + + flags: AssignTypeFlags; + preConstraints: ConstraintTracker | undefined; + postConstraints: ConstraintTracker | undefined; + isCompatible: boolean; +} + +const protocolAssignmentStack: ProtocolAssignmentStackEntry[] = []; + +// Maximum number of different types that are cached with a protocol. +const maxProtocolCompatibilityCacheEntries = 64; + +export function assignClassToProtocol( + evaluator: TypeEvaluator, + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number +): boolean { + // We assume that destType is an instantiable class that is a protocol. The + // srcType can be an instantiable class or a class instance. + assert(isInstantiableClass(destType) && ClassType.isProtocolClass(destType)); + + // A literal source type should never affect protocol matching, so strip + // the literal type if it's present. This helps conserve on cache entries. + if (srcType.priv.literalValue !== undefined) { + srcType = evaluator.stripLiteralValue(srcType) as ClassType; + } + + const enforceInvariance = (flags & AssignTypeFlags.Invariant) !== 0; + + // Use a stack of pending protocol class evaluations to detect recursion. + // This can happen when a protocol class refers to itself. + if ( + protocolAssignmentStack.some((entry) => { + return isTypeSame(entry.srcType, srcType) && isTypeSame(entry.destType, destType); + }) + ) { + return !enforceInvariance; + } + + // See if we've already determined that this class is compatible with this protocol. + const compat = getProtocolCompatibility(destType, srcType, flags, constraints); + + if (compat !== undefined) { + if (compat.isCompatible) { + if (compat.postConstraints) { + constraints?.copyFromClone(compat.postConstraints); + } + return true; + } + + // If it's known not to be compatible and the caller hasn't requested + // any detailed diagnostic information or we've already exceeded the + // depth of diagnostic information that will be displayed, we can + // return false immediately. + if (!diag || diag.getNestLevel() > defaultMaxDiagnosticDepth) { + return false; + } + } + + protocolAssignmentStack.push({ srcType, destType }); + let isCompatible = true; + const clonedConstraints = constraints?.clone(); + + try { + isCompatible = assignToProtocolInternal(evaluator, destType, srcType, diag, constraints, flags, recursionCount); + } catch (e) { + // We'd normally use "finally" here, but the TS debugger does such + // a poor job dealing with finally, we'll use a catch instead. + protocolAssignmentStack.pop(); + throw e; + } + + protocolAssignmentStack.pop(); + + // Cache the results for next time. + if (!compat) { + setProtocolCompatibility( + evaluator, + destType, + srcType, + flags, + clonedConstraints, + constraints?.clone(), + isCompatible, + recursionCount + ); + } + + return isCompatible; +} + +export function assignModuleToProtocol( + evaluator: TypeEvaluator, + destType: ClassType, + srcType: ModuleType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number +): boolean { + return assignToProtocolInternal(evaluator, destType, srcType, diag, constraints, flags, recursionCount); +} + +// Determines whether the specified class is a protocol class that has +// only methods, no other symbol types like variables. +export function isMethodOnlyProtocol(classType: ClassType): boolean { + if (!ClassType.isProtocolClass(classType)) { + return false; + } + + // First check for data members in any protocol base classes. + for (const baseClass of classType.shared.baseClasses) { + if (isClass(baseClass) && ClassType.isProtocolClass(baseClass) && !isMethodOnlyProtocol(baseClass)) { + return false; + } + } + + for (const [, symbol] of ClassType.getSymbolTable(classType)) { + if (symbol.isIgnoredForProtocolMatch()) { + continue; + } + + if (symbol.getDeclarations().some((decl) => decl.type !== DeclarationType.Function)) { + return false; + } + } + + return true; +} + +// Determines whether the classType has "unsafe overlap" with a runtime checkable protocol. +// This can occur because the runtime doesn't do full type comparisons. It simply looks at +// the presence of specific attributes. +export function isProtocolUnsafeOverlap(evaluator: TypeEvaluator, protocol: ClassType, classType: ClassType): boolean { + // If the classType is compatible with the protocol, then it doesn't overlap unsafely. + if (evaluator.assignType(protocol, classType)) { + return false; + } + + let isUnsafeOverlap = true; + + protocol.shared.mro.forEach((mroClass) => { + if (!isUnsafeOverlap || !isInstantiableClass(mroClass) || !ClassType.isProtocolClass(mroClass)) { + return; + } + + ClassType.getSymbolTable(mroClass).forEach((destSymbol, name) => { + if (!isUnsafeOverlap || !destSymbol.isClassMember() || destSymbol.isIgnoredForProtocolMatch()) { + return; + } + + // Does the classType have a member with the same name? + const srcMemberInfo = lookUpClassMember(classType, name); + if (!srcMemberInfo) { + isUnsafeOverlap = false; + } + }); + }); + + return isUnsafeOverlap; +} + +function makeProtocolCompatibilityCacheClassKey(classType: ClassType): string { + // Create a unique key based on the full name of the class and its type source ID, + // which is derived from the character offset of the class in the source file. + return `${classType.shared.fullName}.${classType.shared.typeSourceId}`; +} + +// Looks up the protocol compatibility in the cache. If it's not found, +// return undefined. +function getProtocolCompatibility( + destType: ClassType, + srcType: ClassType, + flags: AssignTypeFlags, + constraints: ConstraintTracker | undefined +): ProtocolCompatibility | undefined { + const map = srcType.shared.protocolCompatibility as Map | undefined; + if (!map) { + return undefined; + } + + const classKey = makeProtocolCompatibilityCacheClassKey(destType); + const entries = map.get(classKey); + if (entries === undefined) { + return undefined; + } + + for (const entry of entries) { + if (entry.flags !== flags) { + continue; + } + + if (entry.srcType === undefined) { + if (ClassType.isSameGenericClass(entry.destType, destType)) { + return entry; + } + + continue; + } + + if ( + isTypeSame(entry.destType, destType, { honorIsTypeArgExplicit: true, honorTypeForm: true }) && + isTypeSame(entry.srcType, srcType, { honorIsTypeArgExplicit: true, honorTypeForm: true }) && + isConstraintTrackerSame(constraints, entry.preConstraints) + ) { + return entry; + } + } + + return undefined; +} + +function setProtocolCompatibility( + evaluator: TypeEvaluator, + destType: ClassType, + srcType: ClassType, + flags: AssignTypeFlags, + preConstraints: ConstraintTracker | undefined, + postConstraints: ConstraintTracker | undefined, + isCompatible: boolean, + recursionCount: number +) { + let map = srcType.shared.protocolCompatibility as Map | undefined; + if (!map) { + map = new Map(); + srcType.shared.protocolCompatibility = map; + } + + const classKey = makeProtocolCompatibilityCacheClassKey(destType); + let entries = map.get(classKey); + if (!entries) { + entries = []; + map.set(classKey, entries); + } + + // See if the srcType is always incompatible regardless of how it + // and the destType are specialized. + let isAlwaysIncompatible = false; + + if ( + !isCompatible && + !entries.some((entry) => entry.flags === flags && ClassType.isSameGenericClass(entry.destType, destType)) + ) { + const genericDestType = requiresTypeArgs(destType) + ? selfSpecializeClass(destType, { overrideTypeArgs: true }) + : destType; + const genericSrcType = requiresTypeArgs(srcType) + ? selfSpecializeClass(srcType, { overrideTypeArgs: true }) + : srcType; + + if ( + !assignToProtocolInternal( + evaluator, + genericDestType, + genericSrcType, + /* diag */ undefined, + /* constraints */ undefined, + flags, + recursionCount + ) + ) { + isAlwaysIncompatible = true; + } + } + + const newEntry: ProtocolCompatibility = { + destType, + srcType: isAlwaysIncompatible ? undefined : srcType, + flags, + preConstraints, + postConstraints, + isCompatible, + }; + + entries.push(newEntry); + + // Make sure the cache doesn't grow too large. + if (entries.length > maxProtocolCompatibilityCacheEntries) { + entries.shift(); + } +} + +function isConstraintTrackerSame(context1: ConstraintTracker | undefined, context2: ConstraintTracker | undefined) { + if (!context1 || !context2) { + return context1 === context2; + } + + return context1.isSame(context2); +} + +function assignToProtocolInternal( + evaluator: TypeEvaluator, + destType: ClassType, + srcType: ClassType | ModuleType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number +): boolean { + if ((flags & AssignTypeFlags.Invariant) !== 0) { + return isTypeSame(destType, srcType); + } + + evaluator.inferVarianceForClass(destType); + + const sourceIsClassObject = isClass(srcType) && TypeBase.isInstantiable(srcType); + const protocolConstraints = createProtocolConstraints(evaluator, destType, constraints); + const selfSolution = new ConstraintSolution(); + + let selfType: ClassType | TypeVarType | undefined; + if (isClass(srcType)) { + // If the srcType is conditioned on "self", use "Self" as the selfType. + // Otherwise use the class type for selfType. + const synthCond = srcType.props?.condition?.find((c) => TypeVarType.isSelf(c.typeVar)); + if (synthCond) { + selfType = synthesizeTypeVarForSelfCls( + TypeBase.cloneForCondition(srcType, undefined), + /* isClsType */ false + ); + + if (TypeVarType.isBound(synthCond.typeVar)) { + selfType = TypeVarType.cloneAsBound(selfType); + } + } else { + selfType = srcType; + } + + addSolutionForSelfType(selfSolution, destType, selfType); + } + + // If the source is a TypedDict, use the _TypedDict placeholder class + // instead. We don't want to use the TypedDict members for protocol + // comparison. + if (isClass(srcType) && ClassType.isTypedDictClass(srcType)) { + const typedDictClassType = evaluator.getTypedDictClassType(); + if (typedDictClassType && isInstantiableClass(typedDictClassType)) { + srcType = typedDictClassType; + } + } + + let typesAreConsistent = true; + const checkedSymbolSet = new Set(); + let assignTypeFlags = flags & (AssignTypeFlags.OverloadOverlap | AssignTypeFlags.PartialOverloadOverlap); + + assignTypeFlags |= containsLiteralType(srcType, /* includeTypeArgs */ true) + ? AssignTypeFlags.RetainLiteralsForTypeVar + : AssignTypeFlags.Default; + + destType.shared.mro.forEach((mroClass) => { + if (!isInstantiableClass(mroClass) || !ClassType.isProtocolClass(mroClass)) { + return; + } + + // If we've already determined that the types are not consistent and the caller + // hasn't requested detailed diagnostic output, we can shortcut the remainder. + if (!typesAreConsistent && !diag) { + return; + } + + ClassType.getSymbolTable(mroClass).forEach((destSymbol, name) => { + // If we've already determined that the types are not consistent and the caller + // hasn't requested detailed diagnostic output, we can shortcut the remainder. + if (!typesAreConsistent && !diag) { + return; + } + + if (!destSymbol.isClassMember() || destSymbol.isIgnoredForProtocolMatch() || checkedSymbolSet.has(name)) { + return; + } + + let isMemberFromMetaclass = false; + let srcMemberInfo: ClassMember | undefined; + let srcSymbol: Symbol | undefined; + + // Special-case the `__class_getitem__` for normal protocol comparison. + // This is a convention agreed upon by typeshed maintainers. + if (!sourceIsClassObject && name === '__class_getitem__') { + return; + } + + // Special-case the `__slots__` entry for all protocol comparisons. + // This is a convention agreed upon by typeshed maintainers. + if (name === '__slots__') { + return; + } + + // Note that we've already checked this symbol. It doesn't need to + // be checked again even if it is declared by a subclass. + checkedSymbolSet.add(name); + + let destMemberType = evaluator.getDeclaredTypeOfSymbol(destSymbol)?.type; + if (!destMemberType) { + return; + } + + let srcMemberType: Type; + let isSrcReadOnly = false; + let isDestReadOnly = false; + + if (isClass(srcType)) { + // Look in the metaclass first if we're treating the source as an instantiable class. + if ( + sourceIsClassObject && + srcType.shared.effectiveMetaclass && + isInstantiableClass(srcType.shared.effectiveMetaclass) + ) { + srcMemberInfo = lookUpClassMember(srcType.shared.effectiveMetaclass, name); + if (srcMemberInfo) { + isMemberFromMetaclass = true; + } + } + + if (!srcMemberInfo) { + srcMemberInfo = lookUpClassMember(srcType, name); + } + + if (!srcMemberInfo) { + diag?.addMessage(LocAddendum.protocolMemberMissing().format({ name })); + typesAreConsistent = false; + return; + } + + srcSymbol = srcMemberInfo.symbol; + + // Partially specialize the type of the symbol based on the MRO class. + // We can skip this if it's the dest class because it is already + // specialized. + if (!ClassType.isSameGenericClass(mroClass, destType)) { + destMemberType = partiallySpecializeType( + destMemberType, + mroClass, + evaluator.getTypeClassType(), + selfType + ); + } + + if (isInstantiableClass(srcMemberInfo.classType)) { + const symbolType = evaluator.getEffectiveTypeOfSymbol(srcMemberInfo.symbol); + + // If this is a function, infer its return type prior to specializing it. + if (isFunction(symbolType)) { + evaluator.inferReturnTypeIfNecessary(symbolType); + } + + srcMemberType = partiallySpecializeType( + symbolType, + srcMemberInfo.classType, + evaluator.getTypeClassType(), + selfType + ); + } else { + srcMemberType = UnknownType.create(); + } + + // If the source is a method, bind it. + if (isFunctionOrOverloaded(srcMemberType)) { + if (isMemberFromMetaclass || isInstantiableClass(srcMemberInfo.classType)) { + let isInstanceMember = !srcMemberInfo.symbol.isClassMember(); + + // Special-case dataclasses whose entries act like instance members. + if (ClassType.isDataClass(srcType)) { + const dataClassFields = ClassType.getDataClassEntries(srcType); + if (dataClassFields.some((entry) => entry.name === name)) { + isInstanceMember = true; + } + } + + if (isMemberFromMetaclass) { + isInstanceMember = false; + } + + // If this is a callable stored in an instance member, skip binding. + if (!isInstanceMember) { + const boundSrcFunction = evaluator.bindFunctionToClassOrObject( + sourceIsClassObject && !isMemberFromMetaclass + ? srcType + : ClassType.cloneAsInstance(srcType), + srcMemberType, + isMemberFromMetaclass ? undefined : (srcMemberInfo.classType as ClassType), + /* treatConstructorAsClassMethod */ undefined, + isMemberFromMetaclass ? srcType : selfType, + diag?.createAddendum(), + recursionCount + ); + + if (boundSrcFunction) { + srcMemberType = boundSrcFunction; + } else { + typesAreConsistent = false; + return; + } + } + } + } + + if (srcMemberInfo.isReadOnly) { + isSrcReadOnly = true; + } + } else { + srcSymbol = srcType.priv.fields.get(name); + + if (!srcSymbol) { + diag?.addMessage(LocAddendum.protocolMemberMissing().format({ name })); + typesAreConsistent = false; + return; + } + + srcMemberType = evaluator.getEffectiveTypeOfSymbol(srcSymbol); + } + + // Replace any "Self" TypeVar within the dest with the source type. + destMemberType = applySolvedTypeVars(destMemberType, selfSolution); + + // If the dest is a method, bind it. + if (!destSymbol.isInstanceMember() && isFunctionOrOverloaded(destMemberType)) { + let boundDeclaredType: FunctionType | OverloadedType | undefined; + + // Functions are considered read-only. + isDestReadOnly = true; + + if (isClass(srcType)) { + assert(srcMemberInfo); + + if (isMemberFromMetaclass || isInstantiableClass(srcMemberInfo.classType)) { + boundDeclaredType = evaluator.bindFunctionToClassOrObject( + ClassType.cloneAsInstance(srcType), + destMemberType, + isMemberFromMetaclass ? undefined : (srcMemberInfo.classType as ClassType), + /* treatConstructorAsClassMethod */ undefined, + isMemberFromMetaclass ? srcType : selfType, + diag, + recursionCount + ); + } + } else { + boundDeclaredType = evaluator.bindFunctionToClassOrObject( + ClassType.cloneAsInstance(destType), + destMemberType, + destType, + /* treatConstructorAsClassMethod */ undefined, + /* firstParamType */ undefined, + diag, + recursionCount + ); + } + + if (boundDeclaredType) { + boundDeclaredType = makeFunctionTypeVarsBound(boundDeclaredType); + destMemberType = boundDeclaredType; + } else { + typesAreConsistent = false; + return; + } + } + + const subDiag = diag?.createAddendum(); + + const isDestFinal = destSymbol + .getTypedDeclarations() + .some((decl) => decl.type === DeclarationType.Variable && !!decl.isFinal); + const isSrcFinal = srcSymbol + .getTypedDeclarations() + .some((decl) => decl.type === DeclarationType.Variable && !!decl.isFinal); + + if (isSrcFinal) { + isSrcReadOnly = true; + } + + if (isDestFinal) { + isDestReadOnly = true; + } + + // Properties require special processing. + if (isClassInstance(destMemberType) && ClassType.isPropertyClass(destMemberType)) { + if ( + isClassInstance(srcMemberType) && + ClassType.isPropertyClass(srcMemberType) && + !sourceIsClassObject + ) { + if ( + !assignProperty( + evaluator, + ClassType.cloneAsInstantiable(destMemberType), + ClassType.cloneAsInstantiable(srcMemberType), + mroClass, + srcType, + subDiag?.createAddendum(), + protocolConstraints, + selfSolution, + recursionCount + ) + ) { + if (subDiag) { + subDiag.addMessage(LocAddendum.memberTypeMismatch().format({ name })); + } + typesAreConsistent = false; + } + } else { + // Extract the property type from the property class. + let getterType = evaluator.getGetterTypeFromProperty(destMemberType); + + if (getterType) { + getterType = partiallySpecializeType(getterType, mroClass, evaluator.getTypeClassType()); + } + + if ( + !getterType || + !evaluator.assignType( + getterType, + srcMemberType, + subDiag?.createAddendum(), + protocolConstraints, + assignTypeFlags, + recursionCount + ) + ) { + if (subDiag) { + subDiag.addMessage(LocAddendum.memberTypeMismatch().format({ name })); + } + typesAreConsistent = false; + } + + if ( + !lookUpClassMember(destMemberType, '__set__', MemberAccessFlags.SkipInstanceMembers) && + !lookUpClassMember(destMemberType, '__delete__', MemberAccessFlags.SkipInstanceMembers) + ) { + isDestReadOnly = true; + } + + if (isSrcReadOnly) { + // The source attribute is read-only. Make sure the setter + // is not defined in the dest property. + if (!isDestReadOnly) { + if (subDiag) { + subDiag.addMessage(LocAddendum.memberIsWritableInProtocol().format({ name })); + } + typesAreConsistent = false; + } + } + } + } else { + // Class and instance variables that are mutable need to enforce invariance. + const primaryDecl = destSymbol.getDeclarations()[0]; + const isInvariant = primaryDecl?.type === DeclarationType.Variable && !primaryDecl.isFinal; + + // Temporarily add the TypeVar scope ID for this method to handle method-scoped TypeVars. + const protocolConstraintsClone = protocolConstraints.clone(); + + if ( + !evaluator.assignType( + destMemberType, + srcMemberType, + subDiag?.createAddendum(), + protocolConstraintsClone, + isInvariant ? assignTypeFlags | AssignTypeFlags.Invariant : assignTypeFlags, + recursionCount + ) + ) { + if (subDiag) { + if (isInvariant) { + subDiag.addMessage(LocAddendum.memberIsInvariant().format({ name })); + } + subDiag.addMessage(LocAddendum.memberTypeMismatch().format({ name })); + } + typesAreConsistent = false; + } else { + protocolConstraints.copyFromClone(protocolConstraintsClone); + } + } + + if (!isDestReadOnly && isSrcReadOnly) { + if (subDiag) { + subDiag.addMessage(LocAddendum.memberIsNotReadOnlyInProtocol().format({ name })); + } + typesAreConsistent = false; + } + + const isDestClassVar = isEffectivelyClassVar(destSymbol, /* isDataclass */ false); + const isSrcClassVar = isEffectivelyClassVar( + srcSymbol, + /* isDataclass */ isClass(srcType) && ClassType.isDataClass(srcType) + ); + const isSrcVariable = srcSymbol.getDeclarations().some((decl) => decl.type === DeclarationType.Variable); + + if (sourceIsClassObject) { + // If the source is not marked as a ClassVar or the dest (the protocol) is, + // the types are not consistent given that the source is a class object. + if (isDestClassVar) { + subDiag?.addMessage(LocAddendum.memberIsClassVarInProtocol().format({ name })); + typesAreConsistent = false; + } else if (isSrcVariable && !isSrcClassVar) { + if (!isMemberFromMetaclass) { + subDiag?.addMessage(LocAddendum.memberIsNotClassVarInClass().format({ name })); + typesAreConsistent = false; + } + } + } else { + // If the source is marked as a ClassVar but the dest (the protocol) is not, + // or vice versa, the types are not consistent. + if (isDestClassVar !== isSrcClassVar) { + if (isDestClassVar) { + subDiag?.addMessage(LocAddendum.memberIsClassVarInProtocol().format({ name })); + } else { + subDiag?.addMessage(LocAddendum.memberIsNotClassVarInProtocol().format({ name })); + } + typesAreConsistent = false; + } + } + + const destPrimaryDecl = getLastTypedDeclarationForSymbol(destSymbol); + const srcPrimaryDecl = getLastTypedDeclarationForSymbol(srcSymbol); + + if ( + destPrimaryDecl?.type === DeclarationType.Variable && + srcPrimaryDecl?.type === DeclarationType.Variable + ) { + const isDestReadOnly = !!destPrimaryDecl.isConstant || !!destPrimaryDecl.isFinal; + let isSrcReadOnly = !!srcPrimaryDecl.isConstant; + if (srcMemberInfo && isClass(srcMemberInfo.classType)) { + if (srcMemberInfo.isReadOnly) { + isSrcReadOnly = true; + } + } + + if (!isDestReadOnly && isSrcReadOnly) { + if (subDiag) { + subDiag.addMessage(LocAddendum.memberIsWritableInProtocol().format({ name })); + } + typesAreConsistent = false; + } + } + }); + }); + + // If the dest protocol has type parameters, make sure the source type arguments match. + if (typesAreConsistent && destType.shared.typeParams.length > 0) { + // Create a specialized version of the protocol defined by the dest and + // make sure the resulting type args can be assigned. + const genericProtocolType = ClassType.specialize(destType, undefined); + const specializedProtocolType = evaluator.solveAndApplyConstraints( + genericProtocolType, + protocolConstraints + ) as ClassType; + + if (destType.priv.typeArgs) { + if ( + !evaluator.assignTypeArgs(destType, specializedProtocolType, diag, constraints, flags, recursionCount) + ) { + typesAreConsistent = false; + } + } else if (constraints) { + for (const typeParam of destType.shared.typeParams) { + const typeArgEntry = protocolConstraints.getMainConstraintSet().getTypeVar(typeParam); + + if (typeArgEntry) { + constraints.copyBounds(typeArgEntry); + } + } + } + } + + return typesAreConsistent; +} + +// Given a (possibly-specialized) destType and an optional constraint tracker, +// creates a new constraint tracker that combines the constraints from both the +// destType and the destConstraints. +function createProtocolConstraints( + evaluator: TypeEvaluator, + destType: ClassType, + constraints: ConstraintTracker | undefined +): ConstraintTracker { + const protocolConstraints = new ConstraintTracker(); + + destType.shared.typeParams.forEach((typeParam, index) => { + const entry = constraints?.getMainConstraintSet().getTypeVar(typeParam); + + if (entry) { + protocolConstraints.copyBounds(entry); + } else if (destType.priv.typeArgs && index < destType.priv.typeArgs.length) { + let typeArg = destType.priv.typeArgs[index]; + let flags: AssignTypeFlags; + let hasUnsolvedTypeVars = requiresSpecialization(typeArg); + + // If the type argument has unsolved TypeVars, see if they have + // solved values in the destConstraints. + if (hasUnsolvedTypeVars && constraints) { + typeArg = evaluator.solveAndApplyConstraints(typeArg, constraints, /* applyOptions */ undefined, { + useLowerBoundOnly: true, + }); + flags = AssignTypeFlags.Default; + hasUnsolvedTypeVars = requiresSpecialization(typeArg); + } else { + flags = AssignTypeFlags.PopulateExpectedType; + + const variance = TypeVarType.getVariance(typeParam); + if (variance === Variance.Invariant) { + flags |= AssignTypeFlags.Invariant; + } else if (variance === Variance.Contravariant) { + flags |= AssignTypeFlags.Contravariant; + } + } + + if (!hasUnsolvedTypeVars) { + assignTypeVar(evaluator, typeParam, typeArg, /* diag */ undefined, protocolConstraints, flags); + } + } + }); + + return protocolConstraints; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/pyTypedUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/pyTypedUtils.ts new file mode 100644 index 00000000..569e8fd4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/pyTypedUtils.ts @@ -0,0 +1,65 @@ +/* + * pyTypedUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Parser for py.typed files. + */ + +import { FileSystem } from '../common/fileSystem'; +import { Uri } from '../common/uri/uri'; +import { isDirectory, isFile } from '../common/uri/uriUtils'; + +export interface PyTypedInfo { + pyTypedPath: Uri; + isPartiallyTyped: boolean; +} + +// +// Retrieves information about a py.typed file, if it exists, under the given path. +// +export function getPyTypedInfo(fileSystem: FileSystem, dirPath: Uri): PyTypedInfo | undefined { + if (!fileSystem.existsSync(dirPath) || !isDirectory(fileSystem, dirPath)) { + return undefined; + } + + const pyTypedPath = dirPath.pytypedUri; + if (!fileSystem.existsSync(pyTypedPath) || !isFile(fileSystem, pyTypedPath)) { + return undefined; + } + + return getPyTypedInfoForPyTypedFile(fileSystem, pyTypedPath); +} + +// +// Retrieves information about a py.typed file. The pyTypedPath provided must be a valid path. +// +export function getPyTypedInfoForPyTypedFile(fileSystem: FileSystem, pyTypedPath: Uri) { + // This function intentionally doesn't check whether the given py.typed path exists or not, + // as filesystem access is expensive if done repeatedly. + // The caller should verify the file's validity before calling this method and use a cache if possible + // to avoid high filesystem access costs. + let isPartiallyTyped = false; + + // Read the contents of the file as text. + const fileStats = fileSystem.statSync(pyTypedPath); + + // Do a quick sanity check on the size before we attempt to read it. This + // file should always be really small - typically zero bytes in length. + if (fileStats.size > 0 && fileStats.size < 64 * 1024) { + const pyTypedContents = fileSystem.readFileSync(pyTypedPath, 'utf8'); + + // PEP 561 doesn't specify the format of "py.typed" in any detail other than + // to say that "If a stub package is partial it MUST include partial\n in a top + // level py.typed file." + if (pyTypedContents.match(/partial\n/) || pyTypedContents.match(/partial\r\n/)) { + isPartiallyTyped = true; + } + } + + return { + pyTypedPath, + isPartiallyTyped, + }; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/pythonPathUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/pythonPathUtils.ts new file mode 100644 index 00000000..4901cc19 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/pythonPathUtils.ts @@ -0,0 +1,228 @@ +/* + * pythonPathUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility routines used to resolve various paths in Python. + */ + +import { ConfigOptions } from '../common/configOptions'; +import { compareComparableValues } from '../common/core'; +import { FileSystem } from '../common/fileSystem'; +import { Host } from '../common/host'; +import * as pathConsts from '../common/pathConsts'; +import { PythonVersion } from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import { getFileSystemEntries, isDirectory, tryStat } from '../common/uri/uriUtils'; +import { ImportLogger } from './importLogger'; + +export interface PythonPathResult { + paths: Uri[]; + prefix: Uri | undefined; +} + +export const stdLibFolderName = 'stdlib'; +export const thirdPartyFolderName = 'stubs'; + +export function getTypeShedFallbackPath(fs: FileSystem) { + const moduleDirectory = fs.getModulePath(); + if (!moduleDirectory || moduleDirectory.isEmpty()) { + return undefined; + } + + const typeshedPath = moduleDirectory.combinePaths(pathConsts.typeshedFallback); + if (fs.existsSync(typeshedPath)) { + return fs.realCasePath(typeshedPath); + } + + // In the debug version of Pyright, the code is one level + // deeper, so we need to look one level up for the typeshed fallback. + const debugTypeshedPath = moduleDirectory.getDirectory().combinePaths(pathConsts.typeshedFallback); + if (fs.existsSync(debugTypeshedPath)) { + return fs.realCasePath(debugTypeshedPath); + } + + return undefined; +} + +export function getTypeshedSubdirectory(typeshedPath: Uri, isStdLib: boolean) { + return typeshedPath.combinePaths(isStdLib ? stdLibFolderName : thirdPartyFolderName); +} + +export function findPythonSearchPaths( + fs: FileSystem, + configOptions: ConfigOptions, + host: Host, + importLogger?: ImportLogger | undefined, + includeWatchPathsOnly?: boolean | undefined, + workspaceRoot?: Uri | undefined +): Uri[] { + importLogger?.log('Finding python search paths'); + + if (configOptions.venvPath !== undefined && configOptions.venv) { + const venvDir = configOptions.venv; + const venvPath = configOptions.venvPath.combinePaths(venvDir); + + const foundPaths: Uri[] = []; + const sitePackagesPaths: Uri[] = []; + + [pathConsts.lib, pathConsts.lib64, pathConsts.libAlternate].forEach((libPath) => { + const sitePackagesPath = findSitePackagesPath( + fs, + venvPath.combinePaths(libPath), + configOptions.defaultPythonVersion, + importLogger + ); + if (sitePackagesPath) { + addPathIfUnique(foundPaths, sitePackagesPath); + sitePackagesPaths.push(fs.realCasePath(sitePackagesPath)); + } + }); + + // Now add paths from ".pth" files located in each of the site packages folders. + sitePackagesPaths.forEach((sitePackagesPath) => { + const pthPaths = getPathsFromPthFiles(fs, sitePackagesPath); + pthPaths.forEach((path) => { + addPathIfUnique(foundPaths, path); + }); + }); + + if (foundPaths.length > 0) { + importLogger?.log(`Found the following '${pathConsts.sitePackages}' dirs`); + foundPaths.forEach((path) => { + importLogger?.log(` ${path}`); + }); + return foundPaths; + } + + importLogger?.log(`Did not find any '${pathConsts.sitePackages}' dirs. Falling back on python interpreter.`); + } + + // Fall back on the python interpreter. + const pathResult = host.getPythonSearchPaths(configOptions.pythonPath, importLogger); + if (includeWatchPathsOnly && workspaceRoot && !workspaceRoot.isEmpty()) { + const paths = pathResult.paths + .filter((p) => !p.startsWith(workspaceRoot) || p.startsWith(pathResult.prefix)) + .map((p) => fs.realCasePath(p)); + + return paths; + } + + return pathResult.paths.map((p) => fs.realCasePath(p)); +} + +export function isPythonBinary(p: string): boolean { + p = p.trim(); + return p === 'python' || p === 'python3'; +} + +function findSitePackagesPath( + fs: FileSystem, + libPath: Uri, + pythonVersion: PythonVersion | undefined, + importLogger?: ImportLogger | undefined +): Uri | undefined { + if (fs.existsSync(libPath)) { + importLogger?.log(`Found path '${libPath}'; looking for ${pathConsts.sitePackages}`); + } else { + importLogger?.log(`Did not find '${libPath}'`); + return undefined; + } + + const sitePackagesPath = libPath.combinePaths(pathConsts.sitePackages); + if (fs.existsSync(sitePackagesPath)) { + importLogger?.log(`Found path '${sitePackagesPath}'`); + return sitePackagesPath; + } else { + importLogger?.log(`Did not find '${sitePackagesPath}', so looking for python subdirectory`); + } + + // We didn't find a site-packages directory directly in the lib + // directory. Scan for a "python3.X" directory instead. + const entries = getFileSystemEntries(fs, libPath); + + // Candidate directories start with "python3.". + const candidateDirs = entries.directories.filter((dirName) => { + if (dirName.fileName.startsWith('python3.')) { + const dirPath = dirName.combinePaths(pathConsts.sitePackages); + return fs.existsSync(dirPath); + } + return false; + }); + + // If there is a python3.X directory (where 3.X matches the configured python + // version), prefer that over other python directories. + if (pythonVersion) { + const preferredDir = candidateDirs.find( + (dirName) => dirName.fileName === `python${PythonVersion.toMajorMinorString(pythonVersion)}` + ); + if (preferredDir) { + const dirPath = preferredDir.combinePaths(pathConsts.sitePackages); + importLogger?.log(`Found path '${dirPath}'`); + return dirPath; + } + } + + // If there was no python version or we didn't find an exact match, use the + // first directory that starts with "python". Most of the time, there will be + // only one. + if (candidateDirs.length > 0) { + const dirPath = candidateDirs[0].combinePaths(pathConsts.sitePackages); + importLogger?.log(`Found path '${dirPath}'`); + return dirPath; + } + + return undefined; +} + +export function readPthSearchPaths(pthFile: Uri, fs: FileSystem): Uri[] { + const searchPaths: Uri[] = []; + + if (fs.existsSync(pthFile)) { + const data = fs.readFileSync(pthFile, 'utf8'); + const lines = data.split(/\r?\n/); + lines.forEach((line) => { + const trimmedLine = line.trim(); + if (trimmedLine.length > 0 && !trimmedLine.startsWith('#') && !trimmedLine.match(/^import\s/)) { + const pthPath = pthFile.getDirectory().combinePaths(trimmedLine); + if (fs.existsSync(pthPath) && isDirectory(fs, pthPath)) { + searchPaths.push(fs.realCasePath(pthPath)); + } + } + }); + } + + return searchPaths; +} + +export function getPathsFromPthFiles(fs: FileSystem, parentDir: Uri): Uri[] { + const searchPaths: Uri[] = []; + + // Get a list of all *.pth files within the specified directory. + const pthFiles = fs + .readdirEntriesSync(parentDir) + .filter((entry) => (entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.pth')) + .sort((a, b) => compareComparableValues(a.name, b.name)); + + pthFiles.forEach((pthFile) => { + const filePath = fs.realCasePath(parentDir.combinePaths(pthFile.name)); + const fileStats = tryStat(fs, filePath); + + // Skip all files that are much larger than expected. + if (fileStats?.isFile() && fileStats.size > 0 && fileStats.size < 64 * 1024) { + searchPaths.push(...readPthSearchPaths(filePath, fs)); + } + }); + + return searchPaths; +} + +export function addPathIfUnique(pathList: Uri[], pathToAdd: Uri) { + if (!pathList.some((path) => path.key === pathToAdd.key)) { + pathList.push(pathToAdd); + return true; + } + + return false; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/scope.ts b/python-parser/packages/pyright-internal/src/analyzer/scope.ts new file mode 100644 index 00000000..5cb5588e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/scope.ts @@ -0,0 +1,230 @@ +/* + * scope.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Represents an evaluation scope and its defined symbols. + * It also contains a link to a parent scope (except for the + * top-most built-in scope). + */ + +import { fail } from '../common/debug'; +import { DeclarationType } from './declaration'; +import { Symbol, SymbolFlags, SymbolTable } from './symbol'; + +export const enum ScopeType { + // Used for PEP 695-style type parameters. + TypeParameter, + + // Used for comprehension nodes. + Comprehension, + + // Function scopes are used for lambdas and functions. + Function, + + // Class scopes are used for classes. + Class, + + // Module scopes are used for modules. + Module, + + // Built-in scopes are used for all ambient symbols provided + // by the Python environment. + Builtin, +} + +export const enum NameBindingType { + // With "nonlocal" keyword + Nonlocal, + + // With "global" keyword + Global, +} + +// Provides information for recursive scope lookups. +export interface SymbolWithScope { + // Found symbol + symbol: Symbol; + + // Scope in which symbol was found + scope: Scope; + + // Indicates that the recursion needed to proceed + // outside of the module's scope into the builtins + // scope. + isOutsideCallerModule: boolean; + + // Indicates that the recursion needed to proceed + // to a scope that is beyond the current execution + // scope. An execution scope is defined as a function, + // module, or lambda. Classes are not considered execution + // scopes because they are "executed" immediately as + // part of the scope in which they are contained. + isBeyondExecutionScope: boolean; + + // The symbol was accessed through a nonlocal or global binding. + usesNonlocalBinding: boolean; + usesGlobalBinding: boolean; +} + +export interface GlobalScopeResult { + scope: Scope; + isBeyondExecutionScope: boolean; +} + +export interface LookupSymbolOptions { + isOutsideCallerModule?: boolean; + isBeyondExecutionScope?: boolean; + useProxyScope?: boolean; + usesNonlocalBinding?: boolean; + usesGlobalBinding?: boolean; +} + +export class Scope { + // The scope type, as defined in the enumeration. + readonly type: ScopeType; + + // The next scope in the hierarchy or undefined if it's the + // top-most scope. + readonly parent: Scope | undefined; + + // An alternate parent scope that can be used to resolve symbols + // in certain contexts. Used for TypeParam scopes. + readonly proxy: Scope | undefined; + + // Association between names and symbols. + readonly symbolTable: SymbolTable = new Map(); + + // Names within this scope that are bound to other scopes + // (either nonlocal or global). + readonly notLocalBindings = new Map(); + + // Names defined by __slots__ within this scope (used only + // for class scopes). + slotsNames: string[] | undefined; + + constructor(type: ScopeType, parent?: Scope, proxy?: Scope) { + this.type = type; + this.parent = parent; + this.proxy = proxy; + } + + getGlobalScope(): GlobalScopeResult { + let curScope: Scope | undefined = this; + let isBeyondExecutionScope = false; + + while (curScope) { + if (curScope.type === ScopeType.Module || curScope.type === ScopeType.Builtin) { + return { scope: curScope, isBeyondExecutionScope }; + } + + if (curScope.type === ScopeType.Function) { + isBeyondExecutionScope = true; + } + + curScope = curScope.parent; + } + + fail('failed to find scope'); + return { scope: this, isBeyondExecutionScope }; + } + + // Independently-executable scopes are those that are executed independently + // of their parent scopes. Classes are executed in the context of their parent + // scope, so they don't fit this category. + isIndependentlyExecutable(): boolean { + return this.type === ScopeType.Module || this.type === ScopeType.Function; + } + + lookUpSymbol(name: string): Symbol | undefined { + return this.symbolTable.get(name); + } + + lookUpSymbolRecursive(name: string, options?: LookupSymbolOptions): SymbolWithScope | undefined { + let effectiveScope: Scope = this; + let symbol = this.symbolTable.get(name); + + if (!symbol && options?.useProxyScope && this.proxy) { + symbol = this.proxy.symbolTable.get(name); + effectiveScope = this.proxy; + } + + if (symbol) { + // If we're searching outside of the original caller's module (global) scope, + // hide any names that are not meant to be visible to importers. + if (options?.isOutsideCallerModule && symbol.isExternallyHidden()) { + return undefined; + } + + // If the symbol is a class variable that is defined only in terms of + // member accesses, it is not accessible directly by name, so hide it. + const decls = symbol.getDeclarations(); + if ( + decls.length === 0 || + decls.some((decl) => decl.type !== DeclarationType.Variable || !decl.isDefinedByMemberAccess) + ) { + return { + symbol, + isOutsideCallerModule: !!options?.isOutsideCallerModule, + isBeyondExecutionScope: !!options?.isBeyondExecutionScope, + scope: effectiveScope, + usesNonlocalBinding: !!options?.usesNonlocalBinding, + usesGlobalBinding: !!options?.usesGlobalBinding, + }; + } + } + + let parentScope: Scope | undefined; + let isNextScopeBeyondExecutionScope = options?.isBeyondExecutionScope || this.isIndependentlyExecutable(); + + const notLocalBinding = this.notLocalBindings.get(name); + if (notLocalBinding === NameBindingType.Global) { + const globalScopeResult = this.getGlobalScope(); + if (globalScopeResult.scope !== this) { + parentScope = globalScopeResult.scope; + if (globalScopeResult.isBeyondExecutionScope) { + isNextScopeBeyondExecutionScope = true; + } + } + } else { + parentScope = this.parent; + } + + if (parentScope) { + // If our recursion is about to take us outside the scope of the current + // module (i.e. into a built-in scope), indicate as such with the second + // parameter. + return parentScope.lookUpSymbolRecursive(name, { + isOutsideCallerModule: !!options?.isOutsideCallerModule || this.type === ScopeType.Module, + isBeyondExecutionScope: isNextScopeBeyondExecutionScope, + usesNonlocalBinding: notLocalBinding === NameBindingType.Nonlocal || !!options?.usesNonlocalBinding, + usesGlobalBinding: notLocalBinding === NameBindingType.Global || !!options?.usesGlobalBinding, + }); + } + + return undefined; + } + + addSymbol(name: string, flags: SymbolFlags): Symbol { + const symbol = new Symbol(flags); + this.symbolTable.set(name, symbol); + return symbol; + } + + getBindingType(name: string) { + return this.notLocalBindings.get(name); + } + + setBindingType(name: string, bindingType: NameBindingType) { + return this.notLocalBindings.set(name, bindingType); + } + + setSlotsNames(names: string[]) { + this.slotsNames = names; + } + + getSlotsNames(): string[] | undefined { + return this.slotsNames; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/scopeUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/scopeUtils.ts new file mode 100644 index 00000000..666d924e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/scopeUtils.ts @@ -0,0 +1,96 @@ +/* + * scopeUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Static utility methods related to scopes and their related + * symbol tables. + */ + +import { EvaluationScopeNode, ParseNode } from '../parser/parseNodes'; +import { getScope } from './analyzerNodeInfo'; +import { getEvaluationScopeNode } from './parseTreeUtils'; +import { Scope, ScopeType } from './scope'; + +export function getBuiltInScope(currentScope: Scope): Scope { + // Starting at the current scope, find the built-in scope, which should + // be the top-most parent. + let builtInScope = currentScope; + + while (builtInScope.type !== ScopeType.Builtin) { + builtInScope = builtInScope.parent!; + } + + return builtInScope; +} + +// Locates the evaluation scope associated with the specified parse node. +export function getScopeForNode(node: ParseNode): Scope | undefined { + const scopeNode = getEvaluationScopeNode(node).node; + return getScope(scopeNode); +} + +// Returns a list of scopes associated with the node and its ancestor nodes. +// If stopScope is provided, the search will stop at that scope. +// Returns undefined if stopScope is not found. +export function getScopeHierarchy(node: ParseNode, stopScope?: Scope): Scope[] | undefined { + const scopeHierarchy: Scope[] = []; + let curNode: ParseNode | undefined = node; + + while (curNode) { + const scopeNode: EvaluationScopeNode = getEvaluationScopeNode(curNode).node; + const curScope = getScope(scopeNode); + + if (!curScope) { + return undefined; + } + + if (scopeHierarchy.length === 0 || scopeHierarchy[scopeHierarchy.length - 1] !== curScope) { + scopeHierarchy.push(curScope); + } + + if (curScope === stopScope) { + return scopeHierarchy; + } + + curNode = scopeNode.parent; + } + + return stopScope ? undefined : scopeHierarchy; +} + +// Walks up the parse tree from the specified node to find the top-most node +// that is within specified scope. +export function findTopNodeInScope(node: ParseNode, scope: Scope): ParseNode | undefined { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + let foundScope = false; + + while (curNode) { + if (getScope(curNode) === scope) { + foundScope = true; + } else if (foundScope) { + return prevNode; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return undefined; +} + +export function isScopeContainedWithin(scope: Scope, potentialParentScope: Scope): boolean { + let curScope: Scope | undefined = scope; + + while (curScope) { + if (curScope.parent === potentialParentScope) { + return true; + } + + curScope = curScope.parent; + } + + return false; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/sentinel.ts b/python-parser/packages/pyright-internal/src/analyzer/sentinel.ts new file mode 100644 index 00000000..79b679cf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/sentinel.ts @@ -0,0 +1,87 @@ +/* + * sentinels.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides type evaluation logic that is specific to PEP 661 Sentinels. + */ + +import { DiagnosticRule } from '../common/diagnosticRules'; +import { LocMessage } from '../localization/localize'; +import { ArgCategory, ExpressionNode, ParseNodeType } from '../parser/parseNodes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { getClassFullName, getTypeSourceId } from './parseTreeUtils'; +import { Arg, TypeEvaluator } from './typeEvaluatorTypes'; +import { ClassType, ClassTypeFlags, SentinelLiteral, Type, TypeBase } from './types'; +import { computeMroLinearization } from './typeUtils'; + +export function createSentinelType( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: Arg[] +): Type | undefined { + let className = ''; + + if (argList.length !== 1) { + evaluator.addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.sentinelParamCount(), errorNode); + return undefined; + } + + const nameArg = argList[0]; + if ( + nameArg.argCategory === ArgCategory.Simple && + nameArg.valueExpression && + nameArg.valueExpression.nodeType === ParseNodeType.StringList + ) { + className = nameArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + } + + if (!className) { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.sentinelBadName(), + argList[0].node ?? errorNode + ); + return undefined; + } + + if ( + errorNode.parent?.nodeType === ParseNodeType.Assignment && + errorNode.parent.d.leftExpr.nodeType === ParseNodeType.Name && + errorNode.parent.d.leftExpr.d.value !== className + ) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.sentinelNameMismatch(), + errorNode.parent.d.leftExpr + ); + return undefined; + } + + const fileInfo = getFileInfo(errorNode); + const fullClassName = getClassFullName(errorNode, fileInfo.moduleName, className); + let classType = ClassType.createInstantiable( + className, + fullClassName, + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.Final | ClassTypeFlags.ValidTypeAliasClass, + getTypeSourceId(errorNode), + /* declaredMetaclass */ undefined, + evaluator.getTypeClassType() + ); + + classType.shared.baseClasses.push(evaluator.getObjectType()); + computeMroLinearization(classType); + classType = ClassType.cloneWithLiteral(classType, new SentinelLiteral(fullClassName, className)); + + let instanceType = ClassType.cloneAsInstance(classType); + + // Is TypeForm supported? + if (fileInfo.diagnosticRuleSet.enableExperimentalFeatures) { + instanceType = TypeBase.cloneWithTypeForm(instanceType, instanceType); + } + + return instanceType; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/service.ts b/python-parser/packages/pyright-internal/src/analyzer/service.ts new file mode 100644 index 00000000..89eb73a9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/service.ts @@ -0,0 +1,1935 @@ +/* + * service.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A service that is able to analyze a collection of + * Python files. + */ + +import * as JSONC from 'jsonc-parser'; +import { AbstractCancellationTokenSource, CancellationToken } from 'vscode-languageserver'; +import { parse } from '../common/tomlUtils'; + +import { IBackgroundAnalysis, RefreshOptions } from '../backgroundAnalysisBase'; +import { + CommandLineConfigOptions, + CommandLineLanguageServerOptions, + CommandLineOptions, +} from '../common/commandLineOptions'; +import { ConfigOptions, matchFileSpecs } from '../common/configOptions'; +import { ConsoleInterface, LogLevel, StandardConsole, log } from '../common/console'; +import { isPromise, isString } from '../common/core'; +import { Diagnostic } from '../common/diagnostic'; +import { FileEditAction } from '../common/editAction'; +import { EditableProgram, ProgramView } from '../common/extensibility'; +import { FileSystem } from '../common/fileSystem'; +import { FileWatcher, FileWatcherEventType, ignoredWatchEventFunction } from '../common/fileWatcher'; +import { Host, HostFactory, NoAccessHost } from '../common/host'; +import { configFileName, defaultStubsDirectory } from '../common/pathConsts'; +import { getFileName, isRootedDiskPath, normalizeSlashes } from '../common/pathUtils'; +import { PythonVersion } from '../common/pythonVersion'; +import { ServiceKeys } from '../common/serviceKeys'; +import { ServiceProvider } from '../common/serviceProvider'; +import { Range } from '../common/textRange'; +import { timingStats } from '../common/timing'; +import { Uri } from '../common/uri/uri'; +import { UriMap } from '../common/uri/uriMap'; +import { + FileSpec, + deduplicateFolders, + getFileSpec, + hasPythonExtension, + isDirectory, + isFile, + makeDirectories, + tryStat, +} from '../common/uri/uriUtils'; +import { AnalysisCompleteCallback } from './analysis'; +import { + BackgroundAnalysisProgram, + BackgroundAnalysisProgramFactory, + InvalidatedReason, +} from './backgroundAnalysisProgram'; +import { ImportLogger } from './importLogger'; +import { ImportResolver, ImportResolverFactory, createImportedModuleDescriptor } from './importResolver'; +import { ChangedRange, MaxAnalysisTime, Program } from './program'; +import { findPythonSearchPaths } from './pythonPathUtils'; +import { + findConfigFile, + findConfigFileHereOrUp, + findPyprojectTomlFile, + findPyprojectTomlFileHereOrUp, +} from './serviceUtils'; +import { SourceEnumerator } from './sourceEnumerator'; +import { IPythonMode } from './sourceFile'; + +// How long since the last user activity should we wait until running +// the analyzer on any files that have not yet been analyzed? +const _userActivityBackoffTimeInMs = 250; + +const _gitDirectory = normalizeSlashes('/.git/'); + +export interface LibraryReanalysisTimeProvider { + (): number; + libraryReanalysisStarted?: () => void; + libraryUpdated?: (cancelled: boolean) => void; +} + +export interface AnalyzerServiceOptions { + console?: ConsoleInterface; + hostFactory?: HostFactory; + importResolverFactory?: ImportResolverFactory; + configOptions?: ConfigOptions; + backgroundAnalysis?: IBackgroundAnalysis; + maxAnalysisTime?: MaxAnalysisTime; + backgroundAnalysisProgramFactory?: BackgroundAnalysisProgramFactory; + libraryReanalysisTimeProvider?: LibraryReanalysisTimeProvider; + serviceId?: string; + skipScanningUserFiles?: boolean; + fileSystem?: FileSystem; + onInvalidated?: (reason: InvalidatedReason) => void; + // Optional callback fired once when initial source file enumeration completes. + onSourceEnumerationComplete?: () => void; + shouldRunAnalysis: () => boolean; +} + +interface ConfigFileContents { + configFileDirUri: Uri; + configFileJsonObj: object; +} + +// Hold uniqueId for this service. It can be used to distinguish each service later. +let _nextServiceId = 1; + +export function getNextServiceId(name: string) { + return `${name}_${_nextServiceId++}`; +} + +export class AnalyzerService { + protected readonly options: AnalyzerServiceOptions; + private readonly _backgroundAnalysisProgram: BackgroundAnalysisProgram; + private readonly _serviceProvider: ServiceProvider; + + private _instanceName: string; + private _executionRootUri: Uri; + private _typeStubTargetUri: Uri | undefined; + private _typeStubTargetIsSingleFile = false; + private _sourceFileWatcher: FileWatcher | undefined; + private _reloadConfigTimer: any; + private _libraryReanalysisTimer: any; + private _primaryConfigFileUri: Uri | undefined; + private _extendedConfigFileUris: Uri[] = []; + private _configFileWatcher: FileWatcher | undefined; + private _libraryFileWatcher: FileWatcher | undefined; + private _librarySearchUrisToWatch: Uri[] | undefined; + private _onCompletionCallback: AnalysisCompleteCallback | undefined; + private _commandLineOptions: CommandLineOptions | undefined; + private _analyzeTimer: any; + private _requireTrackedFileUpdate = true; + private _lastUserInteractionTime = 0; + private _backgroundAnalysisCancellationSource: AbstractCancellationTokenSource | undefined; + private _sourceEnumerator: SourceEnumerator | undefined; + + private _disposed = false; + private _pendingLibraryChanges: RefreshOptions = { changesOnly: true }; + + constructor(instanceName: string, serviceProvider: ServiceProvider, options: AnalyzerServiceOptions) { + this._instanceName = instanceName; + + this._executionRootUri = Uri.empty(); + this.options = options; + + this.options.serviceId = this.options.serviceId ?? getNextServiceId(instanceName); + this.options.console = options.console || new StandardConsole(); + + // Create local copy of the given service provider. + this._serviceProvider = serviceProvider.clone(); + + // Override the console and the file system if they were explicitly provided. + if (this.options.console) { + this._serviceProvider.add(ServiceKeys.console, this.options.console); + } + if (this.options.fileSystem) { + this._serviceProvider.add(ServiceKeys.fs, this.options.fileSystem); + } + + this.options.importResolverFactory = options.importResolverFactory ?? AnalyzerService.createImportResolver; + this.options.hostFactory = options.hostFactory ?? (() => new NoAccessHost()); + + this.options.configOptions = + options.configOptions ?? new ConfigOptions(Uri.file(process.cwd(), this._serviceProvider)); + const importResolver = this.options.importResolverFactory( + this._serviceProvider, + this.options.configOptions, + this.options.hostFactory() + ); + + this._backgroundAnalysisProgram = + this.options.backgroundAnalysisProgramFactory !== undefined + ? this.options.backgroundAnalysisProgramFactory( + this.options.serviceId, + this._serviceProvider, + this.options.configOptions, + importResolver, + this.options.backgroundAnalysis, + this.options.maxAnalysisTime + ) + : new BackgroundAnalysisProgram( + this.options.serviceId, + this._serviceProvider, + this.options.configOptions, + importResolver, + this.options.backgroundAnalysis, + this.options.maxAnalysisTime, + /* disableChecker */ undefined + ); + } + + get fs() { + return this._backgroundAnalysisProgram.importResolver.fileSystem; + } + + get serviceProvider() { + return this._serviceProvider; + } + + get cancellationProvider() { + return this.serviceProvider.cancellationProvider(); + } + + get librarySearchUrisToWatch() { + return this._librarySearchUrisToWatch; + } + + get backgroundAnalysisProgram(): BackgroundAnalysisProgram { + return this._backgroundAnalysisProgram; + } + + get test_program() { + return this._program; + } + + get id() { + return this.options.serviceId!; + } + + get checkOnlyOpenFiles() { + return !!this._commandLineOptions?.languageServerSettings.checkOnlyOpenFiles; + } + + setServiceName(instanceName: string) { + this._instanceName = instanceName; + } + + clone( + instanceName: string, + serviceId: string, + backgroundAnalysis?: IBackgroundAnalysis, + fileSystem?: FileSystem + ): AnalyzerService { + const service = new AnalyzerService(instanceName, this._serviceProvider, { + ...this.options, + serviceId, + backgroundAnalysis, + skipScanningUserFiles: true, + fileSystem, + }); + + // Cloned service will use whatever user files the service currently has. + const userFiles = this.getUserFiles(); + service.backgroundAnalysisProgram.setTrackedFiles(userFiles); + service.backgroundAnalysisProgram.markAllFilesDirty(true); + + // Make sure we keep editor content (open file) which could be different than one in the file system. + for (const fileInfo of this.backgroundAnalysisProgram.program.getOpened()) { + const version = fileInfo.sourceFile.getClientVersion(); + if (version !== undefined) { + service.setFileOpened( + fileInfo.uri, + version, + fileInfo.sourceFile.getOpenFileContents()!, + fileInfo.ipythonMode, + fileInfo.chainedSourceFile?.uri + ); + } + } + + return service; + } + + runEditMode(callback: (e: EditableProgram) => void, token: CancellationToken): FileEditAction[]; + runEditMode(callback: (e: EditableProgram) => Promise, token: CancellationToken): Promise; + runEditMode( + callback: (e: EditableProgram) => void | Promise, + token: CancellationToken + ): FileEditAction[] | Promise { + let edits: FileEditAction[] = []; + this._backgroundAnalysisProgram.enterEditMode(); + try { + const result = this._program.runEditMode(callback, token); + if (!isPromise(result)) { + edits = this._backgroundAnalysisProgram.exitEditMode(); + return token.isCancellationRequested ? [] : edits; + } + + return result.then( + (r) => { + edits = this._backgroundAnalysisProgram.exitEditMode(); + return token.isCancellationRequested ? [] : edits; + }, + (e) => { + this._backgroundAnalysisProgram.exitEditMode(); + throw e; + } + ); + } catch (e) { + this._backgroundAnalysisProgram.exitEditMode(); + throw e; + } + } + + dispose() { + if (!this._disposed) { + // Make sure we dispose program, otherwise, entire program + // will leak. + this._backgroundAnalysisProgram.dispose(); + } + + this._disposed = true; + this._removeSourceFileWatchers(); + this._removeConfigFileWatcher(); + this._removeLibraryFileWatcher(); + this._clearReloadConfigTimer(); + this._clearReanalysisTimer(); + this._clearLibraryReanalysisTimer(); + } + + static createImportResolver(serviceProvider: ServiceProvider, options: ConfigOptions, host: Host): ImportResolver { + return new ImportResolver(serviceProvider, options, host); + } + + setCompletionCallback(callback: AnalysisCompleteCallback | undefined): void { + this._onCompletionCallback = callback; + this._backgroundAnalysisProgram.setCompletionCallback(callback); + } + + setOptions(commandLineOptions: CommandLineOptions): void { + this._commandLineOptions = commandLineOptions; + + const host = this._hostFactory(); + const configOptions = this._getConfigOptions(host, commandLineOptions); + + this._backgroundAnalysisProgram.setConfigOptions(configOptions); + + this._executionRootUri = configOptions.projectRoot; + this.applyConfigOptions(host); + } + + hasSourceFile(uri: Uri): boolean { + return this.backgroundAnalysisProgram.hasSourceFile(uri); + } + + isTracked(uri: Uri): boolean { + return this._program.owns(uri); + } + + getUserFiles() { + return this._program.getUserFiles().map((i) => i.uri); + } + + getOpenFiles() { + return this._program.getOpened().map((i) => i.uri); + } + + getOwnedFiles() { + return this._program.getOwnedFiles().map((i) => i.uri); + } + + setFileOpened( + uri: Uri, + version: number | null, + contents: string, + ipythonMode = IPythonMode.None, + chainedFileUri?: Uri + ) { + // Open the file. Notebook cells are always tracked as they aren't 3rd party library files. + // This is how it's worked in the past since each notebook used to have its own + // workspace and the workspace include setting marked all cells as tracked. + // In check-only-open-files mode, treat all opened documents as tracked even if they + // are not owned by this workspace. This ensures in-memory edits to dependency files + // (e.g. files imported via extraPaths in another workspace) invalidate the program + // and are reflected immediately in language features like hover/rename. + this._backgroundAnalysisProgram.setFileOpened(uri, version, contents, { + isTracked: this.isTracked(uri) || this.checkOnlyOpenFiles || ipythonMode !== IPythonMode.None, + ipythonMode, + chainedFileUri: chainedFileUri, + }); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + + getChainedUri(uri: Uri): Uri | undefined { + return this._backgroundAnalysisProgram.getChainedUri(uri); + } + + updateChainedUri(uri: Uri, chainedFileUri: Uri | undefined) { + this._backgroundAnalysisProgram.updateChainedUri(uri, chainedFileUri); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + + updateOpenFileContents( + uri: Uri, + version: number | null, + contents: string, + ipythonMode = IPythonMode.None, + changedRange?: ChangedRange + ) { + this._backgroundAnalysisProgram.updateOpenFileContents(uri, version, contents, { + isTracked: this.isTracked(uri) || this.checkOnlyOpenFiles || ipythonMode !== IPythonMode.None, + ipythonMode, + chainedFileUri: undefined, + changedRange, + }); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + + setFileClosed(uri: Uri, isTracked?: boolean) { + this._backgroundAnalysisProgram.setFileClosed(uri, isTracked); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + + addInterimFile(uri: Uri) { + this._backgroundAnalysisProgram.addInterimFile(uri); + } + + getParserOutput(uri: Uri) { + return this._program.getParserOutput(uri); + } + + getParseResults(uri: Uri) { + return this._program.getParseResults(uri); + } + + getSourceFile(uri: Uri) { + return this._program.getBoundSourceFile(uri); + } + + getTextOnRange(fileUri: Uri, range: Range, token: CancellationToken) { + return this._program.getTextOnRange(fileUri, range, token); + } + + run(callback: (p: ProgramView) => T, token: CancellationToken): T { + return this._program.run(callback, token); + } + + printStats() { + this._console.info(''); + this._console.info('Analysis stats'); + + const boundFileCount = this._program.getFileCount(/* userFileOnly */ false); + this._console.info('Total files parsed and bound: ' + boundFileCount.toString()); + + const checkedFileCount = this._program.getUserFileCount(); + this._console.info('Total files checked: ' + checkedFileCount.toString()); + } + + printDetailedAnalysisTimes() { + this._program.printDetailedAnalysisTimes(); + } + + printDependencies(verbose: boolean) { + this._program.printDependencies(this._executionRootUri, verbose); + } + + analyzeFile(fileUri: Uri, token: CancellationToken): Promise { + return this._backgroundAnalysisProgram.analyzeFile(fileUri, token); + } + + analyzeFileAndGetDiagnostics(fileUri: Uri, token: CancellationToken): Promise { + return this._backgroundAnalysisProgram.analyzeFileAndGetDiagnostics(fileUri, token); + } + + getDiagnosticsForRange(fileUri: Uri, range: Range, token: CancellationToken): Promise { + return this._backgroundAnalysisProgram.getDiagnosticsForRange(fileUri, range, token); + } + + getConfigOptions() { + return this._configOptions; + } + + getImportResolver(): ImportResolver { + return this._backgroundAnalysisProgram.importResolver; + } + + recordUserInteractionTime() { + this._lastUserInteractionTime = Date.now(); + + // If we have a pending timer for reanalysis, cancel it + // and reschedule for some time in the future. + if (this._analyzeTimer) { + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + } + + test_getConfigOptions(commandLineOptions: CommandLineOptions): ConfigOptions { + return this._getConfigOptions(this._backgroundAnalysisProgram.host, commandLineOptions); + } + + test_getFileNamesFromFileSpecs(): Uri[] { + const enumerator = new SourceEnumerator( + this._configOptions.include, + this._configOptions.exclude, + !!this._configOptions.autoExcludeVenv, + this.fs, + this._console + ); + + const results = enumerator.enumerate(0); + return this._getTrackedFileList(results.matches); + } + + test_shouldHandleSourceFileWatchChanges(uri: Uri, isFile: boolean) { + return this._shouldHandleSourceFileWatchChanges(uri, isFile); + } + + test_shouldHandleLibraryFileWatchChanges(uri: Uri, libSearchUris: Uri[]) { + return this._shouldHandleLibraryFileWatchChanges(uri, libSearchUris); + } + + writeTypeStub(token: CancellationToken): void { + const typingsSubdirUri = this._getTypeStubFolder(); + + this._program.writeTypeStub( + this._typeStubTargetUri ?? Uri.empty(), + this._typeStubTargetIsSingleFile, + typingsSubdirUri, + token + ); + } + + writeTypeStubInBackground(token: CancellationToken): Promise { + const typingsSubdirUri = this._getTypeStubFolder(); + + return this._backgroundAnalysisProgram.writeTypeStub( + this._typeStubTargetUri ?? Uri.empty(), + this._typeStubTargetIsSingleFile, + typingsSubdirUri, + token + ); + } + + invalidateAndScheduleReanalysis(reason: InvalidatedReason) { + this.invalidateAndForceReanalysis(reason); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + + invalidateAndForceReanalysis(reason: InvalidatedReason, refreshOptions?: RefreshOptions) { + if (this.options.onInvalidated) { + this.options.onInvalidated(reason); + } + + this._backgroundAnalysisProgram.invalidateAndForceReanalysis(reason, refreshOptions); + } + + // Forces the service to stop all analysis, discard all its caches, + // and research for files. + restart() { + this.applyConfigOptions(this._hostFactory()); + + this._backgroundAnalysisProgram.restart(); + } + + // Attempts to make progress on source file enumeration if there is an active + // source enumerator associated with the service. Returns true if complete. + enumerateSourceFiles(maxSourceEnumeratorTime: number): boolean { + // If there is no active source enumerator, we're done. + if (!this._sourceEnumerator) { + return true; + } + + let fileMap: Map; + + if (this._executionRootUri.isEmpty()) { + // No user files for default workspace. + fileMap = new Map(); + } else { + const enumerator = this._sourceEnumerator; + const enumResults = timingStats.findFilesTime.timeOperation(() => + enumerator.enumerate(maxSourceEnumeratorTime) + ); + + if (!enumResults.isComplete) { + return false; + } + + // Update the config options to include the auto-excluded directories. + const excludes = this.options.configOptions?.exclude; + if (enumResults.autoExcludedDirs && excludes) { + enumResults.autoExcludedDirs.forEach((excludedDir) => { + if (!FileSpec.isInPath(excludedDir, excludes)) { + excludes.push(getFileSpec(this._configOptions.projectRoot, `${excludedDir}/**`)); + } + }); + this._backgroundAnalysisProgram.setConfigOptions(this._configOptions); + } + + fileMap = enumResults.matches; + + const fileList = this._getTrackedFileList(fileMap); + this._backgroundAnalysisProgram.setTrackedFiles(fileList); + + // Source file enumeration is complete. Proceed with analysis. + this._sourceEnumerator = undefined; + + if (this.options.onSourceEnumerationComplete) { + try { + this.options.onSourceEnumerationComplete(); + } catch (e) { + // Swallow exceptions to avoid impacting normal analysis. + this._console.error( + `onSourceEnumerationComplete callback failed: ${(e as Error)?.message ?? String(e)}` + ); + } + } + } + + return true; + } + + protected runAnalysis(token: CancellationToken) { + // Double check we're allowed to run analysis now. We might be in pull mode or + // we might not have a workspace response callback. The creation of the workspace + // callback will cause this to rerun, so no need to start polling. + if (this.options.shouldRunAnalysis()) { + const moreToAnalyze = this._backgroundAnalysisProgram.startAnalysis(token); + if (moreToAnalyze) { + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + } else if (this.options.onInvalidated) { + // Just cause a refresh. + this.options.onInvalidated(InvalidatedReason.Reanalyzed); + } + } + + protected scheduleReanalysis(requireTrackedFileUpdate: boolean) { + if (this._disposed || !this._commandLineOptions?.languageServerSettings.enableAmbientAnalysis) { + // already disposed + return; + } + + if (requireTrackedFileUpdate) { + this._requireTrackedFileUpdate = true; + } + + this._backgroundAnalysisCancellationSource?.cancel(); + + // Remove any existing analysis timer. + this._clearReanalysisTimer(); + + // How long has it been since the user interacted with the service? + // If the user is actively typing, back off to let him or her finish. + const timeSinceLastUserInteractionInMs = Date.now() - this._lastUserInteractionTime; + const minBackoffTimeInMs = _userActivityBackoffTimeInMs; + + // We choose a small non-zero value here. If this value + // is too small (like zero), the VS Code extension becomes + // unresponsive during heavy analysis. If this number is too + // large, analysis takes longer. + const minTimeBetweenAnalysisPassesInMs = 5; + + const timeUntilNextAnalysisInMs = Math.max( + minBackoffTimeInMs - timeSinceLastUserInteractionInMs, + minTimeBetweenAnalysisPassesInMs + ); + + // Schedule a new timer. + this._analyzeTimer = setTimeout(() => { + this._analyzeTimer = undefined; + + if (this._requireTrackedFileUpdate) { + this._updateTrackedFileList(/* markFilesDirtyUnconditionally */ false); + } + + // Continue to enumerate sources if we haven't finished doing so. + // Use the "noOpenFilesTimeInMs" limit if it's provided. Otherwise + // do all enumeration in one shot. The latter is used for the CLI + // and other environments where the user is not blocked on the operation. + const maxSourceEnumeratorTime = this.options.maxAnalysisTime?.noOpenFilesTimeInMs ?? 0; + if (!this.enumerateSourceFiles(maxSourceEnumeratorTime)) { + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + return; + } + + // Recreate the cancellation token every time we start analysis. + this._backgroundAnalysisCancellationSource = this.cancellationProvider.createCancellationTokenSource(); + + // Now that the timer has fired, actually send the message to the BG thread to + // start the analysis. + this.runAnalysis(this._backgroundAnalysisCancellationSource.token); + }, timeUntilNextAnalysisInMs); + } + + protected applyConfigOptions(host: Host) { + // Indicate that we are about to reanalyze because of this config change. + if (this.options.onInvalidated) { + this.options.onInvalidated(InvalidatedReason.Reanalyzed); + } + // Allocate a new import resolver because the old one has information + // cached based on the previous config options. + const importResolver = this._importResolverFactory( + this._serviceProvider, + this._backgroundAnalysisProgram.configOptions, + host + ); + + this._backgroundAnalysisProgram.setImportResolver(importResolver); + + if (this._commandLineOptions?.fromLanguageServer || this._configOptions.verboseOutput) { + const logLevel = this._configOptions.verboseOutput ? LogLevel.Info : LogLevel.Log; + + const execEnvs = this._configOptions.getExecutionEnvironments(); + + for (const execEnv of execEnvs) { + log(this._console, logLevel, `Execution environment: ${execEnv.name}`); + log(this._console, logLevel, ` Extra paths:`); + if (execEnv.extraPaths.length > 0) { + execEnv.extraPaths.forEach((path) => { + log(this._console, logLevel, ` ${path.toUserVisibleString()}`); + }); + } else { + log(this._console, logLevel, ` (none)`); + } + log(this._console, logLevel, ` Python version: ${PythonVersion.toString(execEnv.pythonVersion)}`); + log(this._console, logLevel, ` Python platform: ${execEnv.pythonPlatform ?? 'All'}`); + log(this._console, logLevel, ` Search paths:`); + const roots = importResolver.getImportRoots(execEnv, /* forLogging */ true); + roots.forEach((path) => { + log(this._console, logLevel, ` ${path.toUserVisibleString()}`); + }); + } + } + + this._updateLibraryFileWatcher(); + this._updateConfigFileWatcher(); + this._updateSourceFileWatchers(); + this._updateTrackedFileList(/* markFilesDirtyUnconditionally */ true); + + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + } + + private get _console() { + return this.options.console!; + } + + private get _hostFactory() { + return this.options.hostFactory!; + } + + private get _importResolverFactory() { + return this.options.importResolverFactory!; + } + + private get _program() { + return this._backgroundAnalysisProgram.program; + } + + private get _configOptions() { + return this._backgroundAnalysisProgram.configOptions; + } + + private get _watchForSourceChanges() { + return !!this._commandLineOptions?.languageServerSettings.watchForSourceChanges; + } + + private get _watchForLibraryChanges() { + return ( + !!this._commandLineOptions?.languageServerSettings.watchForLibraryChanges && + !!this.options.libraryReanalysisTimeProvider + ); + } + + private get _watchForConfigChanges() { + return !!this._commandLineOptions?.languageServerSettings.watchForConfigChanges; + } + + private get _typeCheckingMode() { + return this._commandLineOptions?.configSettings.typeCheckingMode; + } + + private get _verboseOutput(): boolean { + return !!this._configOptions.verboseOutput; + } + + private get _typeStubTargetImportName() { + return this._commandLineOptions?.languageServerSettings.typeStubTargetImportName; + } + + // Calculates the effective options based on the command-line options, + // an optional config file, and default values. + private _getConfigOptions(host: Host, commandLineOptions: CommandLineOptions): ConfigOptions { + const optionRoot = commandLineOptions.executionRoot; + const executionRootUri = Uri.is(optionRoot) + ? optionRoot + : isString(optionRoot) && optionRoot.length > 0 + ? Uri.file(optionRoot, this.serviceProvider, /* checkRelative */ true) + : Uri.defaultWorkspace(this.serviceProvider); + + const executionRoot = this.fs.realCasePath(executionRootUri); + let projectRoot = executionRoot; + let configFilePath: Uri | undefined; + let pyprojectFilePath: Uri | undefined; + + if (commandLineOptions.configFilePath) { + // If the config file path was specified, determine whether it's + // a directory (in which case the default config file name is assumed) + // or a file. + configFilePath = this.fs.realCasePath( + isRootedDiskPath(commandLineOptions.configFilePath) + ? Uri.file(commandLineOptions.configFilePath, this.serviceProvider, /* checkRelative */ true) + : projectRoot.resolvePaths(commandLineOptions.configFilePath) + ); + + if (!this.fs.existsSync(configFilePath)) { + this._console.info(`Configuration file not found at ${configFilePath.toUserVisibleString()}.`); + configFilePath = projectRoot; + } else { + if (configFilePath.lastExtension.endsWith('.json') || configFilePath.lastExtension.endsWith('.toml')) { + projectRoot = configFilePath.getDirectory(); + } else { + projectRoot = configFilePath; + configFilePath = findConfigFile(this.fs, configFilePath); + if (!configFilePath) { + this._console.info(`Configuration file not found at ${projectRoot.toUserVisibleString()}.`); + } + } + } + } else if (commandLineOptions.executionRoot) { + // In a project-based IDE like VS Code, we should assume that the + // project root directory contains the config file. + configFilePath = findConfigFile(this.fs, projectRoot); + + // If pyright is being executed from the command line, the working + // directory may be deep within a project, and we need to walk up the + // directory hierarchy to find the project root. + if (!configFilePath && !commandLineOptions.fromLanguageServer) { + configFilePath = findConfigFileHereOrUp(this.fs, projectRoot); + } + + if (configFilePath) { + projectRoot = configFilePath.getDirectory(); + } else { + this._console.log(`No configuration file found.`); + configFilePath = undefined; + } + } + + if (!configFilePath) { + // See if we can find a pyproject.toml file in this directory. + pyprojectFilePath = findPyprojectTomlFile(this.fs, projectRoot); + + if (!pyprojectFilePath && !commandLineOptions.fromLanguageServer) { + pyprojectFilePath = findPyprojectTomlFileHereOrUp(this.fs, projectRoot); + } + + if (pyprojectFilePath) { + projectRoot = pyprojectFilePath.getDirectory(); + this._console.log(`pyproject.toml file found at ${projectRoot.toUserVisibleString()}.`); + } else { + this._console.log(`No pyproject.toml file found.`); + } + } + + const configOptions = new ConfigOptions(projectRoot); + + // If we found a config file, load it and apply its settings. + const configs = this._getExtendedConfigurations(configFilePath ?? pyprojectFilePath); + if (configs && configs.length > 0) { + // With a pyrightconfig.json set, we want the typeCheckingMode to always be standard + // as that's what the Pyright CLI will expect. Command line options (if not a language server) and + // the config file can override this. + configOptions.initializeTypeCheckingMode('standard'); + + // Then we apply the config file settings. This can update the + // the typeCheckingMode. + for (const config of configs) { + configOptions.initializeFromJson( + config.configFileJsonObj, + config.configFileDirUri, + this.serviceProvider, + host + ); + } + + // Set the configFileSource since we have a config file. + configOptions.configFileSource = configFilePath ?? pyprojectFilePath; + + // When not in language server mode, command line options override config file options. + if (!commandLineOptions.fromLanguageServer) { + this._applyCommandLineOverrides(configOptions, commandLineOptions.configSettings, projectRoot, false); + } + } else { + // Initialize the type checking mode based on if this is for a language server or not. Language + // servers default to 'off' when no config file is found. + configOptions.initializeTypeCheckingMode(commandLineOptions.fromLanguageServer ? 'off' : 'standard'); + + // If there are no config files, we can then directly apply the command line options. + this._applyCommandLineOverrides( + configOptions, + commandLineOptions.configSettings, + projectRoot, + commandLineOptions.fromLanguageServer + ); + } + + // Apply the command line options that are not in the config file. These settings + // only apply to the language server. + this._applyLanguageServerOptions(configOptions, projectRoot, commandLineOptions.languageServerSettings); + + // Ensure that if no command line or config options were applied, we have some defaults. + this._ensureDefaultOptions(host, configOptions, projectRoot, executionRoot, commandLineOptions); + + // Once we have defaults, we can then setup the execution environments. Execution environments + // inherit from the defaults. + if (configs) { + for (const config of configs) { + configOptions.setupExecutionEnvironments( + config.configFileJsonObj, + config.configFileDirUri, + this.serviceProvider.console() + ); + } + } + + return configOptions; + } + + private _ensureDefaultOptions( + host: Host, + configOptions: ConfigOptions, + projectRoot: Uri, + executionRoot: Uri, + commandLineOptions: CommandLineOptions + ) { + const defaultExcludes = ['**/node_modules', '**/__pycache__', '**/.*']; + + // If no include paths were provided, assume that all files within + // the project should be included. + if (configOptions.include.length === 0) { + this._console.info(`No include entries specified; assuming ${projectRoot.toUserVisibleString()}`); + configOptions.include.push(getFileSpec(projectRoot, '.')); + } + + // If there was no explicit set of excludes, add a few common ones to + // avoid long scan times. + if (configOptions.exclude.length === 0) { + defaultExcludes.forEach((exclude) => { + this._console.info(`Auto-excluding ${exclude}`); + configOptions.exclude.push(getFileSpec(projectRoot, exclude)); + }); + + if (configOptions.autoExcludeVenv === undefined) { + configOptions.autoExcludeVenv = true; + } + } + + if (!configOptions.defaultExtraPaths) { + configOptions.ensureDefaultExtraPaths( + this.fs, + commandLineOptions.configSettings.autoSearchPaths ?? false, + commandLineOptions.configSettings.extraPaths + ); + } + + if (configOptions.defaultPythonPlatform === undefined) { + configOptions.defaultPythonPlatform = commandLineOptions.configSettings.pythonPlatform; + } + if (configOptions.defaultPythonVersion === undefined) { + configOptions.defaultPythonVersion = commandLineOptions.configSettings.pythonVersion; + } + + // If the caller specified that "typeshedPath" is the root of the project, + // then we're presumably running in the typeshed project itself. Auto-exclude + // stdlib packages that don't match the current Python version. + if ( + configOptions.typeshedPath && + configOptions.typeshedPath === projectRoot && + configOptions.defaultPythonVersion !== undefined + ) { + const excludeList = this.getImportResolver().getTypeshedStdlibExcludeList( + configOptions.typeshedPath, + configOptions.defaultPythonVersion, + configOptions.defaultPythonPlatform + ); + + this._console.info(`Excluding typeshed stdlib stubs according to VERSIONS file:`); + excludeList.forEach((exclude) => { + this._console.info(` ${exclude}`); + configOptions.exclude.push(getFileSpec(executionRoot, exclude.getFilePath())); + }); + } + + // If useLibraryCodeForTypes is unspecified, default it to true. + if (configOptions.useLibraryCodeForTypes === undefined) { + configOptions.useLibraryCodeForTypes = true; + } + if (configOptions.stubPath) { + // If there was a stub path specified, validate it. + if (!this.fs.existsSync(configOptions.stubPath) || !isDirectory(this.fs, configOptions.stubPath)) { + this._console.warn(`stubPath ${configOptions.stubPath} is not a valid directory.`); + } + } else { + // If no stub path was specified, use a default path. + configOptions.stubPath = configOptions.projectRoot.resolvePaths(defaultStubsDirectory); + } + + // Do some sanity checks on the specified settings and report missing + // or inconsistent information. + if (configOptions.venvPath) { + if (!this.fs.existsSync(configOptions.venvPath) || !isDirectory(this.fs, configOptions.venvPath)) { + this._console.error( + `venvPath ${configOptions.venvPath.toUserVisibleString()} is not a valid directory.` + ); + } + + // venvPath without venv means it won't do anything while resolveImport. + // so first, try to set venv from existing configOption if it is null. if both are null, + // then, resolveImport won't consider venv + configOptions.venv = configOptions.venv ?? this._configOptions.venv; + if (configOptions.venv && configOptions.venvPath) { + const fullVenvPath = configOptions.venvPath.resolvePaths(configOptions.venv); + + if (!this.fs.existsSync(fullVenvPath) || !isDirectory(this.fs, fullVenvPath)) { + this._console.error( + `venv ${ + configOptions.venv + } subdirectory not found in venv path ${configOptions.venvPath.toUserVisibleString()}.` + ); + } else { + const importLogger = configOptions.verboseOutput ? new ImportLogger() : undefined; + if (findPythonSearchPaths(this.fs, configOptions, host, importLogger) === undefined) { + this._console.error( + `site-packages directory cannot be located for venvPath ` + + `${configOptions.venvPath.toUserVisibleString()} and venv ${configOptions.venv}.` + ); + + importLogger?.getLogs().forEach((diag) => { + this._console.error(` ${diag}`); + }); + } + } + } + } + + // Is there a reference to a venv? If so, there needs to be a valid venvPath. + if (configOptions.venv) { + if (!configOptions.venvPath) { + this._console.warn(`venvPath not specified, so venv settings will be ignored.`); + } + } + + if (configOptions.typeshedPath) { + if (!this.fs.existsSync(configOptions.typeshedPath) || !isDirectory(this.fs, configOptions.typeshedPath)) { + this._console.error( + `typeshedPath ${configOptions.typeshedPath.toUserVisibleString()} is not a valid directory.` + ); + } + } + + // This is a special case. It can be set in the config file, but if it's set on the command line, we should always + // override it. + if (commandLineOptions.configSettings.verboseOutput !== undefined) { + configOptions.verboseOutput = commandLineOptions.configSettings.verboseOutput; + } + + // Ensure default python version and platform. + configOptions.ensureDefaultPythonVersion(host, this._console); + configOptions.ensureDefaultPythonPlatform(host, this._console); + } + + private _applyLanguageServerOptions( + configOptions: ConfigOptions, + projectRoot: Uri, + languageServerOptions: CommandLineLanguageServerOptions + ) { + configOptions.disableTaggedHints = !!languageServerOptions.disableTaggedHints; + if (languageServerOptions.checkOnlyOpenFiles !== undefined) { + configOptions.checkOnlyOpenFiles = languageServerOptions.checkOnlyOpenFiles; + } + if (languageServerOptions.autoImportCompletions !== undefined) { + configOptions.autoImportCompletions = languageServerOptions.autoImportCompletions; + } + if (languageServerOptions.indexing !== undefined) { + configOptions.indexing = languageServerOptions.indexing; + } + if (languageServerOptions.taskListTokens) { + configOptions.taskListTokens = languageServerOptions.taskListTokens; + } + if (languageServerOptions.logTypeEvaluationTime !== undefined) { + configOptions.logTypeEvaluationTime = languageServerOptions.logTypeEvaluationTime; + } + configOptions.typeEvaluationTimeThreshold = languageServerOptions.typeEvaluationTimeThreshold; + + // Special case, the language service can also set a pythonPath. It should override any other setting. + if (languageServerOptions.pythonPath) { + this._console.info( + `Setting pythonPath for service "${this._instanceName}": ` + `"${languageServerOptions.pythonPath}"` + ); + configOptions.pythonPath = this.fs.realCasePath( + Uri.file(languageServerOptions.pythonPath, this.serviceProvider, /* checkRelative */ true) + ); + } + if (languageServerOptions.venvPath) { + if (!configOptions.venvPath) { + configOptions.venvPath = projectRoot.resolvePaths(languageServerOptions.venvPath); + } + } + } + + private _applyCommandLineOverrides( + configOptions: ConfigOptions, + commandLineOptions: CommandLineConfigOptions, + projectRoot: Uri, + fromLanguageServer: boolean + ) { + if (commandLineOptions.typeCheckingMode) { + configOptions.initializeTypeCheckingMode(commandLineOptions.typeCheckingMode); + } + + if (commandLineOptions.extraPaths) { + configOptions.ensureDefaultExtraPaths( + this.fs, + commandLineOptions.autoSearchPaths ?? false, + commandLineOptions.extraPaths + ); + } + + if (commandLineOptions.pythonVersion || commandLineOptions.pythonPlatform) { + configOptions.defaultPythonVersion = commandLineOptions.pythonVersion ?? configOptions.defaultPythonVersion; + configOptions.defaultPythonPlatform = + commandLineOptions.pythonPlatform ?? configOptions.defaultPythonPlatform; + } + + if (commandLineOptions.pythonPath) { + this._console.info( + `Setting pythonPath for service "${this._instanceName}": ` + `"${commandLineOptions.pythonPath}"` + ); + configOptions.pythonPath = this.fs.realCasePath( + Uri.file(commandLineOptions.pythonPath, this.serviceProvider, /* checkRelative */ true) + ); + } + + if (commandLineOptions.pythonEnvironmentName) { + this._console.info( + `Setting environmentName for service "${this._instanceName}": ` + + `"${commandLineOptions.pythonEnvironmentName}"` + ); + configOptions.pythonEnvironmentName = commandLineOptions.pythonEnvironmentName; + } + + commandLineOptions.includeFileSpecs.forEach((fileSpec) => { + configOptions.include.push(getFileSpec(projectRoot, fileSpec)); + }); + + commandLineOptions.excludeFileSpecs.forEach((fileSpec) => { + configOptions.exclude.push(getFileSpec(projectRoot, fileSpec)); + }); + + commandLineOptions.ignoreFileSpecs.forEach((fileSpec) => { + configOptions.ignore.push(getFileSpec(projectRoot, fileSpec)); + }); + + configOptions.applyDiagnosticOverrides(commandLineOptions.diagnosticSeverityOverrides); + configOptions.applyDiagnosticOverrides(commandLineOptions.diagnosticBooleanOverrides); + + // Override the analyzeUnannotatedFunctions setting based on the command-line setting. + if (commandLineOptions.analyzeUnannotatedFunctions !== undefined) { + configOptions.diagnosticRuleSet.analyzeUnannotatedFunctions = + commandLineOptions.analyzeUnannotatedFunctions; + } + + // Override the include based on command-line settings. + if (commandLineOptions.includeFileSpecsOverride) { + configOptions.include = []; + commandLineOptions.includeFileSpecsOverride.forEach((include) => { + configOptions.include.push( + getFileSpec(Uri.file(include, this.serviceProvider, /* checkRelative */ true), '.') + ); + }); + } + + // Override the venvPath based on the command-line setting. + if (commandLineOptions.venvPath) { + configOptions.venvPath = projectRoot.resolvePaths(commandLineOptions.venvPath); + } + + const reportDuplicateSetting = (settingName: string, configValue: number | string | boolean) => { + const settingSource = fromLanguageServer ? 'the client settings' : 'a command-line option'; + this._console.warn( + `The ${settingName} has been specified in both the config file and ` + + `${settingSource}. The value in the config file (${configValue}) ` + + `will take precedence` + ); + }; + + // Apply the command-line options if the corresponding + // item wasn't already set in the config file. Report any + // duplicates. + + if (commandLineOptions.typeshedPath) { + if (!configOptions.typeshedPath) { + configOptions.typeshedPath = projectRoot.resolvePaths(commandLineOptions.typeshedPath); + } else { + reportDuplicateSetting('typeshedPath', configOptions.typeshedPath.toUserVisibleString()); + } + } + + // If useLibraryCodeForTypes was not specified in the config, allow the command line to override it. + if (configOptions.useLibraryCodeForTypes === undefined) { + configOptions.useLibraryCodeForTypes = commandLineOptions.useLibraryCodeForTypes; + } else if (commandLineOptions.useLibraryCodeForTypes !== undefined) { + reportDuplicateSetting('useLibraryCodeForTypes', configOptions.useLibraryCodeForTypes); + } + + if (commandLineOptions.stubPath) { + if (!configOptions.stubPath) { + configOptions.stubPath = this.fs.realCasePath(projectRoot.resolvePaths(commandLineOptions.stubPath)); + } else { + reportDuplicateSetting('stubPath', configOptions.stubPath.toUserVisibleString()); + } + } + } + + // Loads the config JSON object from the specified config file along with any + // chained config files specified in the "extends" property (recursively). + private _getExtendedConfigurations(primaryConfigFileUri: Uri | undefined): ConfigFileContents[] | undefined { + this._primaryConfigFileUri = primaryConfigFileUri; + this._extendedConfigFileUris = []; + + if (!primaryConfigFileUri) { + return undefined; + } + + let curConfigFileUri = primaryConfigFileUri; + + const configJsonObjs: ConfigFileContents[] = []; + + while (true) { + this._extendedConfigFileUris.push(curConfigFileUri); + + let configFileJsonObj: object | undefined; + + // Is this a TOML or JSON file? + if (curConfigFileUri.lastExtension.endsWith('.toml')) { + this._console.info(`Loading pyproject.toml file at ${curConfigFileUri.toUserVisibleString()}`); + configFileJsonObj = this._parsePyprojectTomlFile(curConfigFileUri); + } else { + this._console.info(`Loading configuration file at ${curConfigFileUri.toUserVisibleString()}`); + configFileJsonObj = this._parseJsonConfigFile(curConfigFileUri); + } + + if (!configFileJsonObj) { + break; + } + + // Push onto the start of the array so base configs are processed first. + configJsonObjs.unshift({ configFileJsonObj, configFileDirUri: curConfigFileUri.getDirectory() }); + + const baseConfigUri = ConfigOptions.resolveExtends(configFileJsonObj, curConfigFileUri.getDirectory()); + if (!baseConfigUri) { + break; + } + + // Check for circular references. + if (this._extendedConfigFileUris.some((uri) => uri.equals(baseConfigUri))) { + this._console.error( + `Circular reference in configuration file "extends" setting: ${curConfigFileUri.toUserVisibleString()} ` + + `extends ${baseConfigUri.toUserVisibleString()}` + ); + break; + } + + curConfigFileUri = baseConfigUri; + } + + return configJsonObjs; + } + + private _getTypeStubFolder() { + const stubPath = + this._configOptions.stubPath ?? + this.fs.realCasePath(this._configOptions.projectRoot.resolvePaths(defaultStubsDirectory)); + + if (!this._typeStubTargetUri || !this._typeStubTargetImportName) { + const errMsg = `Import '${this._typeStubTargetImportName}'` + ` could not be resolved`; + this._console.error(errMsg); + throw new Error(errMsg); + } + + const typeStubInputTargetParts = this._typeStubTargetImportName.split('.'); + if (typeStubInputTargetParts[0].length === 0) { + // We should never get here because the import resolution + // would have failed. + const errMsg = `Import '${this._typeStubTargetImportName}'` + ` could not be resolved`; + this._console.error(errMsg); + throw new Error(errMsg); + } + + try { + // Generate a new typings directory if necessary. + if (!this.fs.existsSync(stubPath)) { + this.fs.mkdirSync(stubPath); + } + } catch (e: any) { + const errMsg = `Could not create typings directory '${stubPath.toUserVisibleString()}'`; + this._console.error(errMsg); + throw new Error(errMsg); + } + + // Generate a typings subdirectory hierarchy. + const typingsSubdirPath = stubPath.resolvePaths(typeStubInputTargetParts[0]); + const typingsSubdirHierarchy = stubPath.resolvePaths(...typeStubInputTargetParts); + + try { + // Generate a new typings subdirectory if necessary. + if (!this.fs.existsSync(typingsSubdirHierarchy)) { + makeDirectories(this.fs, typingsSubdirHierarchy, stubPath); + } + } catch (e: any) { + const errMsg = `Could not create typings subdirectory '${typingsSubdirHierarchy.toUserVisibleString()}'`; + this._console.error(errMsg); + throw new Error(errMsg); + } + + return typingsSubdirPath; + } + + private _parseJsonConfigFile(configPath: Uri): object | undefined { + return this._attemptParseFile(configPath, (fileContents) => { + const errors: JSONC.ParseError[] = []; + const result = JSONC.parse(fileContents, errors, { allowTrailingComma: true }); + if (errors.length > 0) { + throw new Error('Errors parsing JSON file'); + } + + return result; + }); + } + + private _parsePyprojectTomlFile(pyprojectPath: Uri): object | undefined { + return this._attemptParseFile(pyprojectPath, (fileContents, attemptCount) => { + try { + const configObj = parse(fileContents); + if (configObj && 'tool' in configObj) { + return (configObj.tool as Record).pyright as object; + } + } catch (e: any) { + this._console.error(`Pyproject file parse attempt ${attemptCount} error: ${JSON.stringify(e)}`); + throw e; + } + + this._console.info( + `Pyproject file "${pyprojectPath.toUserVisibleString()}" has no "[tool.pyright]" section.` + ); + return undefined; + }); + } + + private _attemptParseFile( + fileUri: Uri, + parseCallback: (contents: string, attempt: number) => object | undefined + ): object | undefined { + let fileContents = ''; + let parseAttemptCount = 0; + + while (true) { + // Attempt to read the file contents. + try { + fileContents = this.fs.readFileSync(fileUri, 'utf8'); + } catch { + this._console.error(`Config file "${fileUri.toUserVisibleString()}" could not be read.`); + this._reportConfigParseError(); + return undefined; + } + + // Attempt to parse the file. + let parseFailed = false; + try { + return parseCallback(fileContents, parseAttemptCount + 1); + } catch (e: any) { + parseFailed = true; + } + + if (!parseFailed) { + break; + } + + // If we attempt to read the file immediately after it was saved, it + // may have been partially written when we read it, resulting in parse + // errors. We'll give it a little more time and try again. + if (parseAttemptCount++ >= 5) { + this._console.error( + `Config file "${fileUri.toUserVisibleString()}" could not be parsed. Verify that format is correct.` + ); + this._reportConfigParseError(); + return undefined; + } + } + + return undefined; + } + + // Given a file map returned by the source enumerator, this function + // adds any open files that match the include file spec and returns a + // final deduped file list. + private _getTrackedFileList(fileMap: Map): Uri[] { + // And scan all matching open files. We need to do this since some of files are not backed by + // files in file system but only exist in memory (ex, virtual workspace) + this._backgroundAnalysisProgram.program + .getOpened() + .map((o) => o.uri) + .filter((f) => f.isUntitled() || matchFileSpecs(this._program.configOptions, f)) + .forEach((f) => fileMap.set(f.key, f)); + + const fileList = Array.from(fileMap.values()); + return fileList; + } + + // If markFilesDirtyUnconditionally is true, we need to reparse + // and reanalyze all files in the program. If false, we will + // reparse and reanalyze only those files whose on-disk contents + // have changed. Unconditional dirtying is needed in the case where + // configuration options have changed. + private _updateTrackedFileList(markFilesDirtyUnconditionally: boolean) { + // Are we in type stub generation mode? If so, we need to search + // for a different set of files. + if (this._typeStubTargetImportName) { + const execEnv = this._configOptions.findExecEnvironment(this._executionRootUri); + const moduleDescriptor = createImportedModuleDescriptor(this._typeStubTargetImportName); + const importResult = this._backgroundAnalysisProgram.importResolver.resolveImport( + Uri.empty(), + execEnv, + moduleDescriptor + ); + + if (importResult.isImportFound) { + const filesToImport: Uri[] = []; + + // Determine the directory that contains the root package. + const finalResolvedPath = importResult.resolvedUris[importResult.resolvedUris.length - 1]; + const isFinalPathFile = isFile(this.fs, finalResolvedPath); + const isFinalPathInitFile = + isFinalPathFile && finalResolvedPath.stripAllExtensions().fileName === '__init__'; + + let rootPackagePath = finalResolvedPath; + + if (isFinalPathFile) { + // If the module is a __init__.pyi? file, use its parent directory instead. + rootPackagePath = rootPackagePath.getDirectory(); + } + + for (let i = importResult.resolvedUris.length - 2; i >= 0; i--) { + if (!importResult.resolvedUris[i].isEmpty()) { + rootPackagePath = importResult.resolvedUris[i]; + } else { + // If there was no file corresponding to this portion + // of the name path, assume that it's contained + // within its parent directory. + rootPackagePath = rootPackagePath.getDirectory(); + } + } + + if (isDirectory(this.fs, rootPackagePath)) { + this._typeStubTargetUri = rootPackagePath; + } else if (isFile(this.fs, rootPackagePath)) { + // This can occur if there is a "dir/__init__.py" at the same level as a + // module "dir/module.py" that is specifically targeted for stub generation. + this._typeStubTargetUri = rootPackagePath.getDirectory(); + } + + if (finalResolvedPath.isEmpty()) { + this._typeStubTargetIsSingleFile = false; + } else { + filesToImport.push(finalResolvedPath); + this._typeStubTargetIsSingleFile = importResult.resolvedUris.length === 1 && !isFinalPathInitFile; + } + + // Add the implicit import paths. + importResult.filteredImplicitImports?.forEach((implicitImport) => { + if (ImportResolver.isSupportedImportSourceFile(implicitImport.uri)) { + filesToImport.push(implicitImport.uri); + } + }); + + this._backgroundAnalysisProgram.setAllowedThirdPartyImports([this._typeStubTargetImportName]); + this._backgroundAnalysisProgram.setTrackedFiles(filesToImport); + } else { + this._console.error(`Import '${this._typeStubTargetImportName}' not found`); + } + + this._requireTrackedFileUpdate = false; + } else if (!this.options.skipScanningUserFiles) { + // Allocate a new source enumerator. We'll call this + // repeatedly until all source files are found. + this._sourceEnumerator = new SourceEnumerator( + this._configOptions.include, + this._configOptions.exclude, + !!this._configOptions.autoExcludeVenv, + this.fs, + this._console + ); + + this._backgroundAnalysisProgram.markAllFilesDirty(markFilesDirtyUnconditionally); + this._requireTrackedFileUpdate = false; + } + } + + private _removeSourceFileWatchers() { + if (this._sourceFileWatcher) { + this._sourceFileWatcher.close(); + this._sourceFileWatcher = undefined; + } + } + + private _updateSourceFileWatchers() { + this._removeSourceFileWatchers(); + + if (!this._watchForSourceChanges) { + return; + } + + if (this._configOptions.include.length > 0) { + const fileList = this._configOptions.include.map((spec) => { + return spec.wildcardRoot; + }); + + try { + if (this._verboseOutput) { + this._console.info(`Adding fs watcher for directories:\n ${fileList.join('\n')}`); + } + + const isIgnored = ignoredWatchEventFunction(fileList.map((f) => f.getFilePath())); + this._sourceFileWatcher = this.fs.createFileSystemWatcher(fileList, (event, path) => { + if (!path) { + return; + } + + if (this._verboseOutput) { + this._console.info(`SourceFile: Received fs event '${event}' for path '${path}'`); + } + + if (isIgnored(path)) { + return; + } + + // Wholesale ignore events that appear to be from tmp file / .git modification. + if (path.endsWith('.tmp') || path.endsWith('.git') || path.includes(_gitDirectory)) { + return; + } + + let uri = Uri.file(path, this.serviceProvider, /* checkRelative */ true); + + // Make sure path is the true case. + uri = this.fs.realCasePath(uri); + + const eventInfo = getEventInfo(this.fs, this._console, this._program, event, uri); + if (!eventInfo) { + // no-op event, return. + return; + } + + if (!this._shouldHandleSourceFileWatchChanges(uri, eventInfo.isFile)) { + return; + } + + // This is for performance optimization. If the change only pertains to the content of one file, + // then it can't affect the 'import resolution' result. All we need to do is reanalyze the related files + // (those that have a transitive dependency on this file). + if (eventInfo.isFile && eventInfo.event === 'change') { + this._backgroundAnalysisProgram.markFilesDirty([uri], /* evenIfContentsAreSame */ false); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + return; + } + + // When the file system structure changes, like when files are added or removed, + // this can affect how we resolve imports. This requires us to reset caches and reanalyze everything. + // + // However, we don't need to rebuild any indexes in this situation. Changes to workspace files don't affect library indices. + this.invalidateAndForceReanalysis(InvalidatedReason.SourceWatcherChanged); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ true); + }); + } catch { + this._console.error( + `Exception caught when installing fs watcher for:\n ${fileList + .map((f) => f.toUserVisibleString()) + .join('\n')}` + ); + } + } + + function getEventInfo( + fs: FileSystem, + console: ConsoleInterface, + program: Program, + event: FileWatcherEventType, + path: Uri + ) { + // Due to the way we implemented file watcher, we will only get 2 events; 'add' and 'change'. + // Here, we will convert those 2 to 3 events. 'add', 'change' and 'unlink'; + const stats = tryStat(fs, path); + if (event === 'add') { + if (!stats) { + // If we are told that the path is added, but if we can't access it, then consider it as already deleted. + // there is nothing we need to do. + return undefined; + } + + return { event, isFile: stats.isFile() }; + } + + if (event === 'change') { + // If we got 'change', but can't access the path, then we consider it as delete. + if (!stats) { + // See whether it is a file that got deleted. + const isFile = !!program.getSourceFile(path); + + // If not, check whether it is a part of the workspace at all. + if (!isFile && !program.containsSourceFileIn(path)) { + // There is no source file under the given path. There is nothing we need to do. + return undefined; + } + + return { event: 'unlink', isFile }; + } + + return { event, isFile: stats.isFile() }; + } + + // We have unknown event. + console.warn(`Received unknown file change event: '${event}' for '${path}'`); + return undefined; + } + } + + private _shouldHandleSourceFileWatchChanges(path: Uri, isFile: boolean) { + if (isFile) { + if (!hasPythonExtension(path) || isTemporaryFile(path)) { + return false; + } + + // Check whether the file change can affect semantics. If the file changed is not a user file or already a part of + // the program (since we lazily load library files or extra path files when they are used), then the change can't + // affect semantics. so just bail out. + if (!this.isTracked(path) && !this._program.getSourceFileInfo(path)) { + return false; + } + + return true; + } + + // The fs change is on a folder. + if (!matchFileSpecs(this._program.configOptions, path, /* isFile */ false)) { + // First, make sure the folder is included. By default, we exclude any folder whose name starts with '.' + return false; + } + + const parentPath = path.getDirectory(); + const hasInit = + parentPath.startsWith(this._configOptions.projectRoot) && + (this.fs.existsSync(parentPath.initPyUri) || this.fs.existsSync(parentPath.initPyiUri)); + + // We don't have any file under the given path and its parent folder doesn't have __init__ then this folder change + // doesn't have any meaning to us. + if (!hasInit && !this._program.containsSourceFileIn(path)) { + return false; + } + + return true; + + function isTemporaryFile(path: Uri) { + // Determine if this is an add or delete event related to a temporary + // file. Some tools (like auto-formatters) create temporary files + // alongside the original file and name them "x.py..py" where + // is a 32-character random string of hex digits. We don't + // want these events to trigger a full reanalysis. + const fileName = path.fileName; + const fileNameSplit = fileName.split('.'); + if (fileNameSplit.length === 4) { + if (fileNameSplit[3] === fileNameSplit[1] && fileNameSplit[2].length === 32) { + return true; + } + } + + return false; + } + } + + private _removeLibraryFileWatcher() { + if (this._libraryFileWatcher) { + this._libraryFileWatcher.close(); + this._libraryFileWatcher = undefined; + } + } + + private _updateLibraryFileWatcher() { + this._removeLibraryFileWatcher(); + + if (!this._watchForLibraryChanges) { + this._librarySearchUrisToWatch = undefined; + return; + } + + // Watch the library paths for package install/uninstall. + this._librarySearchUrisToWatch = findPythonSearchPaths( + this.fs, + this._backgroundAnalysisProgram.configOptions, + this._backgroundAnalysisProgram.host, + /* importLogger */ undefined, + /* includeWatchPathsOnly */ true, + this._executionRootUri + ); + + // Make sure the watch list includes extra paths that are not part of user files. + // Sometimes, nested folders of the workspace are added as extra paths to import modules as top-level modules. + const extraPaths = this._configOptions + .getExecutionEnvironments() + .map((e) => e.extraPaths.filter((p) => !matchFileSpecs(this._configOptions, p, /* isFile */ false))) + .flat(); + + const watchList = deduplicateFolders([this._librarySearchUrisToWatch, extraPaths]); + if (watchList.length > 0) { + try { + if (this._verboseOutput) { + this._console.info(`Adding fs watcher for library directories:\n ${watchList.join('\n')}`); + } + const isIgnored = ignoredWatchEventFunction(watchList.map((f) => f.getFilePath())); + this._libraryFileWatcher = this.fs.createFileSystemWatcher(watchList, (event, path) => { + if (!path) { + return; + } + + if (this._verboseOutput) { + this._console.info(`LibraryFile: Received fs event '${event}' for path '${path}'`); + } + + if (isIgnored(path)) { + return; + } + + const uri = Uri.file(path, this.serviceProvider, /* checkRelative */ true); + + if (!this._shouldHandleLibraryFileWatchChanges(uri, watchList)) { + return; + } + + // If file doesn't exist, it is delete. + const isChange = event === 'change' && this.fs.existsSync(uri); + this._scheduleLibraryAnalysis(isChange, uri); + }); + } catch { + this._console.error( + `Exception caught when installing fs watcher for:\n ${watchList + .map((w) => w.toUserVisibleString()) + .join('\n')}` + ); + } + } + } + + private _shouldHandleLibraryFileWatchChanges(path: Uri, libSearchPaths: Uri[]) { + if (this._program.getSourceFileInfo(path)) { + return true; + } + + // find the innermost matching search path + let matchingSearchPath; + for (const libSearchPath of libSearchPaths) { + if ( + path.isChild(libSearchPath) && + (!matchingSearchPath || matchingSearchPath.getPathLength() < libSearchPath.getPathLength()) + ) { + matchingSearchPath = libSearchPath; + } + } + + if (!matchingSearchPath) { + return true; + } + + const parentComponents = matchingSearchPath.getPathComponents(); + const childComponents = path.getPathComponents(); + + for (let i = parentComponents.length; i < childComponents.length; i++) { + if (childComponents[i].startsWith('.')) { + return false; + } + } + + return true; + } + + private _clearLibraryReanalysisTimer() { + if (this._libraryReanalysisTimer) { + clearTimeout(this._libraryReanalysisTimer); + this._libraryReanalysisTimer = undefined; + + const handled = this._backgroundAnalysisProgram?.libraryUpdated(); + this.options.libraryReanalysisTimeProvider?.libraryUpdated?.(handled); + } + } + + private _scheduleLibraryAnalysis(isChange: boolean, changedFileUri?: Uri) { + if (this._disposed) { + // Already disposed. + return; + } + + this._clearLibraryReanalysisTimer(); + + const reanalysisTimeProvider = this.options.libraryReanalysisTimeProvider; + const backOffTimeInMS = reanalysisTimeProvider?.(); + if (!backOffTimeInMS) { + // We don't support library reanalysis. + return; + } + + // Add pending library files/folders changes. + this._pendingLibraryChanges.changesOnly = this._pendingLibraryChanges.changesOnly && isChange; + + // Track the specific file that changed only if all accumulated changes are content-only. + // If any change is structural (add/delete), clear the map since all files need updating. + if (this._pendingLibraryChanges.changesOnly && changedFileUri) { + if (!this._pendingLibraryChanges.changedFileUris) { + this._pendingLibraryChanges.changedFileUris = new UriMap(); + } + // Add to map (automatically handles duplicates via O(1) lookup) + this._pendingLibraryChanges.changedFileUris.set(changedFileUri, true); + } else if (!this._pendingLibraryChanges.changesOnly) { + // Clear the map if we've encountered a structural change + this._pendingLibraryChanges.changedFileUris = undefined; + } + + // Wait for a little while, since library changes + // tend to happen in big batches when packages + // are installed or uninstalled. + this._libraryReanalysisTimer = setTimeout(() => { + this._clearLibraryReanalysisTimer(); + + // Invalidate import resolver, mark files dirty (specific files if available), + // and reanalyze. + this.invalidateAndForceReanalysis( + this._pendingLibraryChanges.changesOnly + ? InvalidatedReason.LibraryWatcherContentOnlyChanged + : InvalidatedReason.LibraryWatcherChanged, + this._pendingLibraryChanges + ); + this.scheduleReanalysis(/* requireTrackedFileUpdate */ false); + + // No more pending changes. + reanalysisTimeProvider!.libraryReanalysisStarted?.(); + this._pendingLibraryChanges.changesOnly = true; + this._pendingLibraryChanges.changedFileUris = undefined; + }, backOffTimeInMS); + } + + private _removeConfigFileWatcher() { + if (this._configFileWatcher) { + this._configFileWatcher.close(); + this._configFileWatcher = undefined; + } + } + + private _updateConfigFileWatcher() { + this._removeConfigFileWatcher(); + + if (!this._watchForConfigChanges) { + return; + } + + if (this._primaryConfigFileUri) { + this._configFileWatcher = this.fs.createFileSystemWatcher(this._extendedConfigFileUris, (event) => { + if (this._verboseOutput) { + this._console.info(`Received fs event '${event}' for config file`); + } + this._scheduleReloadConfigFile(); + }); + } else if (!this._executionRootUri.isEmpty()) { + this._configFileWatcher = this.fs.createFileSystemWatcher([this._executionRootUri], (event, path) => { + if (!path) { + return; + } + + if (event === 'add' || event === 'change') { + const fileName = getFileName(path); + if (fileName === configFileName) { + if (this._verboseOutput) { + this._console.info(`Received fs event '${event}' for config file`); + } + if (this._commandLineOptions) { + this.setOptions(this._commandLineOptions); + } + } + } + }); + } + } + + private _clearReloadConfigTimer() { + if (this._reloadConfigTimer) { + clearTimeout(this._reloadConfigTimer); + this._reloadConfigTimer = undefined; + } + } + + private _scheduleReloadConfigFile() { + this._clearReloadConfigTimer(); + + // Wait for a little while after we receive the + // change update event because it may take a while + // for the file to be written out. Plus, there may + // be multiple changes. + this._reloadConfigTimer = setTimeout(() => { + this._clearReloadConfigTimer(); + this._reloadConfigFile(); + }, 100); + } + + private _reloadConfigFile() { + this._updateConfigFileWatcher(); + + if (this._primaryConfigFileUri) { + this._console.info(`Reloading configuration file at ${this._primaryConfigFileUri.toUserVisibleString()}`); + + const host = this._backgroundAnalysisProgram.host; + + // We can't just reload config file when it is changed; we need to consider + // command line options as well to construct new config Options. + const configOptions = this._getConfigOptions(host, this._commandLineOptions!); + this._backgroundAnalysisProgram.setConfigOptions(configOptions); + + this.applyConfigOptions(host); + } + } + + private _clearReanalysisTimer() { + if (this._analyzeTimer) { + clearTimeout(this._analyzeTimer); + this._analyzeTimer = undefined; + } + } + + private _reportConfigParseError() { + if (this._onCompletionCallback) { + this._onCompletionCallback({ + diagnostics: [], + filesInProgram: 0, + requiringAnalysisCount: { files: 0, cells: 0 }, + checkingOnlyOpenFiles: true, + fatalErrorOccurred: false, + configParseErrorOccurred: true, + elapsedTime: 0, + reason: 'analysis', + }); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/serviceUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/serviceUtils.ts new file mode 100644 index 00000000..43d631bf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/serviceUtils.ts @@ -0,0 +1,29 @@ +import { ReadOnlyFileSystem } from '../common/fileSystem'; +import { configFileName, pyprojectTomlName } from '../common/pathConsts'; +import { Uri } from '../common/uri/uri'; +import { forEachAncestorDirectory } from '../common/uri/uriUtils'; + +export function findPyprojectTomlFileHereOrUp(fs: ReadOnlyFileSystem, searchPath: Uri): Uri | undefined { + return forEachAncestorDirectory(searchPath, (ancestor) => findPyprojectTomlFile(fs, ancestor)); +} + +export function findPyprojectTomlFile(fs: ReadOnlyFileSystem, searchPath: Uri) { + const fileName = searchPath.resolvePaths(pyprojectTomlName); + if (fs.existsSync(fileName)) { + return fs.realCasePath(fileName); + } + return undefined; +} + +export function findConfigFileHereOrUp(fs: ReadOnlyFileSystem, searchPath: Uri): Uri | undefined { + return forEachAncestorDirectory(searchPath, (ancestor) => findConfigFile(fs, ancestor)); +} + +export function findConfigFile(fs: ReadOnlyFileSystem, searchPath: Uri): Uri | undefined { + const fileName = searchPath.resolvePaths(configFileName); + if (fs.existsSync(fileName)) { + return fs.realCasePath(fileName); + } + + return undefined; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/sourceEnumerator.ts b/python-parser/packages/pyright-internal/src/analyzer/sourceEnumerator.ts new file mode 100644 index 00000000..e586c399 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/sourceEnumerator.ts @@ -0,0 +1,198 @@ +/* + * sourceEnumerator.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Logic for enumerating all of the Python source files in + * a project. + */ + +import { ConsoleInterface } from '../common/console'; +import { FileSystem } from '../common/fileSystem'; +import { Uri } from '../common/uri/uri'; +import { FileSpec, getFileSystemEntries, tryRealpath, tryStat } from '../common/uri/uriUtils'; + +export interface SourceEnumerateResult { + matches: Map; + autoExcludedDirs: Uri[]; + isComplete: boolean; +} + +const envMarkers = [['bin', 'activate'], ['Scripts', 'activate'], ['pyvenv.cfg'], ['conda-meta']]; + +interface DirToExplore { + uri: Uri; + includeRegExp: RegExp; + hasDirectoryWildcard: boolean; +} + +export class SourceEnumerator { + private _elapsedTimeInMs = 0; + private _includesToExplore: FileSpec[]; + private _dirsToExplore: DirToExplore[] = []; + private _matches = new Map(); + private _autoExcludeDirs: Uri[] = []; + private _isComplete = false; + private _numFilesVisited = 0; + private _loggedLongOperationError = false; + private _seenDirs = new Set(); + + constructor( + include: FileSpec[], + private _excludes: FileSpec[], + private _autoExcludeVenv: boolean, + private _fs: FileSystem, + private _console: ConsoleInterface + ) { + this._includesToExplore = include.slice(0).reverse(); + + this._console.log(`Searching for source files`); + } + + // Enumerates as many files as possible within the specified + // time limit and returns all matching files. + enumerate(timeLimitInMs: number): SourceEnumerateResult { + const startTime = Date.now(); + + while (!this._isComplete) { + if (this._doNext()) { + if (!this._isComplete) { + this._finish(); + } + } + + const elapsedTime = Date.now() - startTime; + if (timeLimitInMs > 0 && elapsedTime > timeLimitInMs) { + break; + } + } + + this._elapsedTimeInMs += Date.now() - startTime; + + if (!this._loggedLongOperationError) { + const longOperationLimitInMs = 10000; + const nFilesToSuggestSubfolder = 50; + + // If this is taking a long time, log an error to help the user + // diagnose and mitigate the problem. + if (this._elapsedTimeInMs >= longOperationLimitInMs && this._numFilesVisited >= nFilesToSuggestSubfolder) { + this._console.error( + `Enumeration of workspace source files is taking longer than ${ + longOperationLimitInMs * 0.001 + } seconds.\n` + + 'This may be because:\n' + + '* You have opened your home directory or entire hard drive as a workspace\n' + + '* Your workspace contains a very large number of directories and files\n' + + '* Your workspace contains a symlink to a directory with many files\n' + + '* Your workspace is remote, and file enumeration is slow\n' + + 'To reduce this time, open a workspace directory with fewer files ' + + 'or add a pyrightconfig.json configuration file with an "exclude" section to exclude ' + + 'subdirectories from your workspace. For more details, refer to ' + + 'https://github.com/microsoft/pyright/blob/main/docs/configuration.md.' + ); + + this._loggedLongOperationError = true; + } + } + + return { + matches: this._matches, + autoExcludedDirs: this._autoExcludeDirs, + isComplete: this._isComplete, + }; + } + + // Performs the next enumeration action. Returns true if complete. + private _doNext(): boolean { + const dirToExplore = this._dirsToExplore.pop(); + if (dirToExplore) { + this._exploreDir(dirToExplore); + return false; + } + + const includeToExplore = this._includesToExplore.pop(); + if (includeToExplore) { + this._exploreInclude(includeToExplore); + return false; + } + + return true; + } + + private _exploreDir(dir: DirToExplore) { + const realDirPath = tryRealpath(this._fs, dir.uri); + if (!realDirPath) { + this._console.warn(`Skipping broken link "${dir.uri}"`); + return; + } + + if (this._seenDirs.has(realDirPath.key)) { + this._console.info(`Skipping recursive symlink "${dir.uri}" -> "${realDirPath}"`); + return; + } + this._seenDirs.add(realDirPath.key); + + if (this._autoExcludeVenv) { + if (envMarkers.some((f) => this._fs.existsSync(dir.uri.resolvePaths(...f)))) { + this._autoExcludeDirs.push(dir.uri); + this._console.info(`Auto-excluding ${dir.uri.toUserVisibleString()}`); + return; + } + } + + const { files, directories } = getFileSystemEntries(this._fs, dir.uri); + + for (const file of files) { + if (FileSpec.matchIncludeFileSpec(dir.includeRegExp, this._excludes, file)) { + this._numFilesVisited++; + this._matches.set(file.key, file); + } + } + + for (const subDir of directories.slice().reverse()) { + if (subDir.matchesRegex(dir.includeRegExp) || dir.hasDirectoryWildcard) { + if (!FileSpec.isInPath(subDir, this._excludes)) { + this._dirsToExplore.push({ + uri: subDir, + includeRegExp: dir.includeRegExp, + hasDirectoryWildcard: dir.hasDirectoryWildcard, + }); + } + } + } + } + + private _exploreInclude(includeSpec: FileSpec) { + if (FileSpec.isInPath(includeSpec.wildcardRoot, this._excludes)) { + return; + } + + this._seenDirs.clear(); + + const stat = tryStat(this._fs, includeSpec.wildcardRoot); + if (stat?.isFile()) { + this._matches.set(includeSpec.wildcardRoot.key, includeSpec.wildcardRoot); + } else if (stat?.isDirectory()) { + this._dirsToExplore.push({ + uri: includeSpec.wildcardRoot, + includeRegExp: includeSpec.regExp, + hasDirectoryWildcard: includeSpec.hasDirectoryWildcard, + }); + } else { + this._console.error( + `File or directory "${includeSpec.wildcardRoot.toUserVisibleString()}" does not exist.` + ); + } + } + + private _finish() { + this._isComplete = true; + + const fileCount = this._matches.size; + if (fileCount === 0) { + this._console.info(`No source files found.`); + } else { + this._console.info(`Found ${fileCount} ` + `source ${fileCount === 1 ? 'file' : 'files'}`); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/sourceFile.ts b/python-parser/packages/pyright-internal/src/analyzer/sourceFile.ts new file mode 100644 index 00000000..776801ec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/sourceFile.ts @@ -0,0 +1,1563 @@ +/* + * sourceFile.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Class that represents a single Python source or stub file. + */ + +import { isMainThread } from 'worker_threads'; + +import { OperationCanceledException } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { + ConfigOptions, + DiagnosticRuleSet, + ExecutionEnvironment, + getBasicDiagnosticRuleSet, +} from '../common/configOptions'; +import { ConsoleInterface, StandardConsole } from '../common/console'; +import { assert } from '../common/debug'; +import { Diagnostic, DiagnosticCategory, TaskListToken, convertLevelToCategory } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { DiagnosticSink, TextRangeDiagnosticSink } from '../common/diagnosticSink'; +import { FileSystem } from '../common/fileSystem'; +import { LogTracker, getPathForLogging } from '../common/logTracker'; +import { stripFileExtension } from '../common/pathUtils'; +import { convertOffsetsToRange, convertTextRangeToRange } from '../common/positionUtils'; +import { ServiceKeys } from '../common/serviceKeys'; +import { ServiceProvider } from '../common/serviceProvider'; +import '../common/serviceProviderExtensions'; +import * as StringUtils from '../common/stringUtils'; +import { Range, TextRange, getEmptyRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { Duration, timingStats } from '../common/timing'; +import { Uri } from '../common/uri/uri'; +import { LocMessage } from '../localization/localize'; +import { ModuleNode } from '../parser/parseNodes'; +import { ModuleImport, ParseFileResults, ParseOptions, Parser, ParserOutput } from '../parser/parser'; +import { IgnoreComment, Tokenizer, TokenizerOutput } from '../parser/tokenizer'; +import { Token } from '../parser/tokenizerTypes'; +import { AnalyzerFileInfo, ImportLookup } from './analyzerFileInfo'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { Binder } from './binder'; +import { Checker } from './checker'; +import { CircularDependency } from './circularDependency'; +import * as CommentUtils from './commentUtils'; +import { ImportResolver } from './importResolver'; +import { ImportResult } from './importResult'; +import { ParseTreeCleanerWalker } from './parseTreeCleaner'; +import { Scope } from './scope'; +import { SymbolTable } from './symbol'; +import { TestWalker } from './testWalker'; +import { TypeEvaluator } from './typeEvaluatorTypes'; + +// Limit the number of import cycles tracked per source file. +const _maxImportCyclesPerFile = 4; + +// Allow files up to 50MB in length, same as VS Code. +// https://github.com/microsoft/vscode/blob/1e750a7514f365585d8dab1a7a82e0938481ea2f/src/vs/editor/common/model/textModel.ts#L194 +export const maxSourceFileSize = 50 * 1024 * 1024; + +interface ResolveImportResult { + imports: ImportResult[]; + builtinsImportResult?: ImportResult | undefined; +} + +// Indicates whether IPython syntax is supported and if so, what +// type of notebook support is in use. +export enum IPythonMode { + // Not a notebook. This is the only falsy enum value, so you + // can test if IPython is supported via "if (ipythonMode)" + None = 0, + // Each cell is its own document. + CellDocs, +} + +// A monotonically increasing number used to create unique file IDs. +let nextUniqueFileId = 1; + +class WriteableData { + // Number that is incremented every time the diagnostics + // are updated. + diagnosticVersion = 0; + + // Generation count of the file contents. When the contents + // change, this is incremented. + fileContentsVersion = 0; + + // Number that is incremented every time semantic of the file + // might have changed. + semanticVersion = 0; + + // Length and hash of the file the last time it was read from disk. + lastFileContentLength: number | undefined = undefined; + lastFileContentHash: number | undefined = undefined; + + // Client's version of the file. Undefined implies that contents + // need to be read from disk. + clientDocumentContents: string | undefined; + clientDocumentVersion: number | undefined; + + // Version of file contents that have been analyzed. + analyzedFileContentsVersion = -1; + + // Do we need to walk the parse tree and clean + // the binder information hanging from it? + parseTreeNeedsCleaning = false; + + parsedFileContents: string | undefined; + tokenizerLines: TextRangeCollection | undefined; + tokenizerOutput: TokenizerOutput | undefined; + lineCount: number | undefined; + + moduleSymbolTable: SymbolTable | undefined; + + // Reentrancy check for binding and checking. + isBindingInProgress = false; + isCheckingInProgress = false; + + // Diagnostics generated during different phases of analysis. + parseDiagnostics: Diagnostic[] = []; + commentDiagnostics: Diagnostic[] = []; + bindDiagnostics: Diagnostic[] = []; + checkerDiagnostics: Diagnostic[] = []; + taskListDiagnostics: Diagnostic[] = []; + typeIgnoreLines = new Map(); + typeIgnoreAll: IgnoreComment | undefined; + pyrightIgnoreLines = new Map(); + + // Accumulated and filtered diagnostics that combines all of the + // above information. This needs to be recomputed any time the + // above change. + accumulatedDiagnostics: Diagnostic[] = []; + + // Circular dependencies that have been reported in this file. + circularDependencies: CircularDependency[] = []; + noCircularDependencyConfirmed = false; + + // Did we hit the maximum import depth? + hitMaxImportDepth: number | undefined; + + // Do we need to perform a binding step? + isBindingNeeded = true; + + // Do we have valid diagnostic results from a checking pass? + isCheckingNeeded = true; + + // Time (in ms) that the last check() call required for this file. + checkTime: number | undefined; + + // Information about implicit and explicit imports from this file. + imports: ImportResult[] | undefined; + builtinsImport: ImportResult | undefined; + // True if the file appears to have been deleted. + isFileDeleted = false; + + parserOutput: ParserOutput | undefined; + + constructor() { + // Empty + } + + debugPrint() { + return `WritableData: + diagnosticVersion=${this.diagnosticVersion}, + noCircularDependencyConfirmed=${this.noCircularDependencyConfirmed}, + isBindingNeeded=${this.isBindingNeeded}, + isBindingInProgress=${this.isBindingInProgress}, + isCheckingInProgress=${this.isCheckingInProgress}, + isCheckingNeeded=${this.isCheckingNeeded}, + isFileDeleted=${this.isFileDeleted}, + hitMaxImportDepth=${this.hitMaxImportDepth}, + parseTreeNeedsCleaning=${this.parseTreeNeedsCleaning}, + fileContentsVersion=${this.fileContentsVersion}, + analyzedFileContentsVersion=${this.analyzedFileContentsVersion}, + clientDocumentVersion=${this.clientDocumentVersion}, + lastFileContentLength=${this.lastFileContentLength}, + lastFileContentHash=${this.lastFileContentHash}, + typeIgnoreAll=${this.typeIgnoreAll}, + imports=${this.imports?.length}, + builtinsImport=${this.builtinsImport?.importName}, + circularDependencies=${this.circularDependencies?.length}, + parseDiagnostics=${this.parseDiagnostics?.length}, + commentDiagnostics=${this.commentDiagnostics?.length}, + bindDiagnostics=${this.bindDiagnostics?.length}, + checkerDiagnostics=${this.checkerDiagnostics?.length}, + taskListDiagnostics=${this.taskListDiagnostics?.length}, + accumulatedDiagnostics=${this.accumulatedDiagnostics?.length}, + typeIgnoreLines=${this.typeIgnoreLines?.size}, + pyrightIgnoreLines=${this.pyrightIgnoreLines?.size}, + checkTime=${this.checkTime}, + clientDocumentContents=${this.clientDocumentContents?.length}, + parseResults=${this.parserOutput?.parseTree.length}, + semanticVersion=${this.semanticVersion}, `; + } +} + +export interface SourceFileEditMode { + readonly isEditMode: boolean; +} + +export class SourceFile { + // Console interface to use for debugging. + private _console: ConsoleInterface; + + // Uri unique to this file within the workspace. May not represent + // a real file on disk. + private readonly _uri: Uri; + + // A short string that is guaranteed to uniquely + // identify this file. + private readonly _fileId: string; + + // Getter to lazily compute the module name from the file URI. + private _moduleNameGetter: (file: Uri) => string; + + // Period-delimited import path for the module. + private _cachedModuleName: string | undefined; + + // True if file is a type-hint (.pyi) file versus a python + // (.py) file. + private readonly _isStubFile: boolean; + + // True if the file was imported as a third-party import. + private readonly _isThirdPartyImport: boolean; + + // True if the file is the "typing.pyi" file, which needs + // special-case handling. + private readonly _isTypingStubFile: boolean; + + // True if the file is the "typing_extensions.pyi" file, which needs + // special-case handling. + private readonly _isTypingExtensionsStubFile: boolean; + + // True if the file is the "_typeshed.pyi" file, which needs special- + // case handling. + private readonly _isTypeshedStubFile: boolean; + + // True if the file one of the other built-in stub files + // that require special-case handling: "collections.pyi", + // "dataclasses.pyi", "abc.pyi", "asyncio/coroutines.pyi". + private readonly _isBuiltInStubFile: boolean; + + // True if the file is part of a package that contains a + // "py.typed" file. + private readonly _isThirdPartyPyTypedPresent: boolean; + + private readonly _editMode: SourceFileEditMode; + + // Settings that control which diagnostics should be output. The rules + // are initialized to the basic set. They should be updated after the + // the file is parsed. + private _diagnosticRuleSet = getBasicDiagnosticRuleSet(); + + // Indicate whether this file is for ipython or not. + private _ipythonMode = IPythonMode.None; + private _logTracker: LogTracker; + private _preEditData: WriteableData | undefined; + + // Data that changes when the source file changes. + private _writableData: WriteableData; + + readonly fileSystem: FileSystem; + + constructor( + readonly serviceProvider: ServiceProvider, + uri: Uri, + moduleNameGetter: (file: Uri) => string, + isThirdPartyImport: boolean, + isThirdPartyPyTypedPresent: boolean, + editMode: SourceFileEditMode, + console?: ConsoleInterface, + logTracker?: LogTracker, + ipythonMode?: IPythonMode + ) { + this.fileSystem = serviceProvider.get(ServiceKeys.fs); + this._console = console || new StandardConsole(); + this._writableData = new WriteableData(); + + this._editMode = editMode; + this._uri = uri; + this._fileId = this._makeFileId(uri); + this._moduleNameGetter = moduleNameGetter; + this._isStubFile = uri.hasExtension('.pyi'); + this._isThirdPartyImport = isThirdPartyImport; + this._isThirdPartyPyTypedPresent = isThirdPartyPyTypedPresent; + const fileName = uri.fileName; + this._isTypingStubFile = + this._isStubFile && (this._uri.pathEndsWith('stdlib/typing.pyi') || fileName === 'typing_extensions.pyi'); + this._isTypingExtensionsStubFile = this._isStubFile && fileName === 'typing_extensions.pyi'; + this._isTypeshedStubFile = + this._isStubFile && + (this._uri.pathEndsWith('stdlib/_typeshed/__init__.pyi') || + this._uri.pathEndsWith('stdlib/_typeshed/_type_checker_internals.pyi')); + + this._isBuiltInStubFile = false; + if (this._isStubFile) { + if ( + this._uri.pathEndsWith('stdlib/collections/__init__.pyi') || + this._uri.pathEndsWith('stdlib/asyncio/futures.pyi') || + this._uri.pathEndsWith('stdlib/asyncio/tasks.pyi') || + this._uri.pathEndsWith('stdlib/builtins.pyi') || + this._uri.pathEndsWith('stdlib/_importlib_modulespec.pyi') || + this._uri.pathEndsWith('stdlib/dataclasses.pyi') || + this._uri.pathEndsWith('stdlib/abc.pyi') || + this._uri.pathEndsWith('stdlib/enum.pyi') || + this._uri.pathEndsWith('stdlib/queue.pyi') || + this._uri.pathEndsWith('stdlib/string/templatelib.pyi') || + this._uri.pathEndsWith('stdlib/types.pyi') || + this._uri.pathEndsWith('stdlib/warnings.pyi') + ) { + this._isBuiltInStubFile = true; + } + } + + // 'FG' or 'BG' based on current thread. + this._logTracker = logTracker ?? new LogTracker(console, isMainThread ? 'FG' : 'BG'); + this._ipythonMode = ipythonMode ?? IPythonMode.None; + } + + // Sets the initial diagnostic rule set from the execution environment's + // config-level overrides. This should be called immediately after + // construction so the file has the correct rules before parse/bind. + setInitialDiagnosticRuleSet(ruleSet: DiagnosticRuleSet) { + this._diagnosticRuleSet = { ...ruleSet }; + } + + getIPythonMode(): IPythonMode { + return this._ipythonMode; + } + + getUri(): Uri { + return this._uri; + } + + getModuleName(): string { + if (!this._cachedModuleName) { + // Call the module name getter. If it returns '' (which can happen if the file is not part + // of the project), fall back to the file name.) + return this._moduleNameGetter(this._uri) || stripFileExtension(this._uri.fileName); + } + + return this._cachedModuleName; + } + + clearCachedModuleName() { + this._cachedModuleName = undefined; + } + + getDiagnosticVersion(): number { + return this._writableData.diagnosticVersion; + } + + getParseDiagnostics(): Diagnostic[] { + return this._writableData.parseDiagnostics; + } + + isStubFile() { + return this._isStubFile; + } + + isTypingStubFile() { + return this._isTypingStubFile; + } + + isTypeshedStubFile() { + return this._isTypeshedStubFile; + } + + isBuiltInStubFile() { + return this._isBuiltInStubFile; + } + + isThirdPartyPyTypedPresent() { + return this._isThirdPartyPyTypedPresent; + } + + // Returns a list of cached diagnostics from the latest analysis job. + // If the prevVersion is specified, the method returns undefined if + // the diagnostics haven't changed. + getDiagnostics(options: ConfigOptions, prevDiagnosticVersion?: number): Diagnostic[] | undefined { + if (this._writableData.diagnosticVersion === prevDiagnosticVersion) { + return undefined; + } + + return this._writableData.accumulatedDiagnostics; + } + + getImports(): ImportResult[] { + return this._writableData.imports || []; + } + + getBuiltinsImport(): ImportResult | undefined { + return this._writableData.builtinsImport; + } + + getModuleSymbolTable(): SymbolTable | undefined { + return this._writableData.moduleSymbolTable; + } + + getCheckTime() { + return this._writableData.checkTime; + } + + restore(): string | undefined { + // If we had an edit, return our text. + if (this._preEditData) { + const text = this._writableData.clientDocumentContents!; + this._writableData = this._preEditData; + this._preEditData = undefined; + + return text; + } + + return undefined; + } + + // Indicates whether the contents of the file have changed since + // the last analysis was performed. + didContentsChangeOnDisk(): boolean { + // If this is an open file any content changes will be + // provided through the editor. We can assume contents + // didn't change without us knowing about them. + if (this._writableData.clientDocumentContents) { + return false; + } + + // If the file was never read previously we can't tell if the file has changed or not so + // we'll assume that it has. Otherwise, we may fail to analyze a file that was changed. + if (this._writableData.lastFileContentLength === undefined) { + return true; + } + + // Read in the latest file contents and see if the hash matches + // that of the previous contents. + try { + // Read the file's contents. + if (this.fileSystem.existsSync(this._uri)) { + const fileContents = this.fileSystem.readFileSync(this._uri, 'utf8'); + + if (fileContents.length !== this._writableData.lastFileContentLength) { + return true; + } + + if (StringUtils.hashString(fileContents) !== this._writableData.lastFileContentHash) { + return true; + } + } else { + // No longer exists, so yes it has changed. + return true; + } + } catch (error) { + return true; + } + + return false; + } + + // Drop parse and binding info to save memory. It is used + // in cases where memory is low. When info is needed, the file + // will be re-parsed and rebound. + dropParseAndBindInfo(): void { + // If we are actively binding or checking this file, we can't + // safely drop parse and binding info. + if (this._writableData.isBindingInProgress || this._writableData.isCheckingInProgress) { + return; + } + + this._fireFileDirtyEvent(); + + this._writableData.parserOutput = undefined; + this._writableData.tokenizerLines = undefined; + this._writableData.tokenizerOutput = undefined; + this._writableData.parsedFileContents = undefined; + this._writableData.moduleSymbolTable = undefined; + this._writableData.isBindingNeeded = true; + this._writableData.imports = []; + } + + markDirty(): void { + this._writableData.fileContentsVersion++; + this._writableData.semanticVersion++; + this._writableData.noCircularDependencyConfirmed = false; + this._writableData.isCheckingNeeded = true; + this._writableData.isBindingNeeded = true; + this._writableData.moduleSymbolTable = undefined; + this._writableData.lineCount = undefined; + + this._fireFileDirtyEvent(); + } + + markReanalysisRequired(forceRebinding: boolean): void { + // Keep the parse info, but reset the analysis to the beginning. + this._writableData.semanticVersion++; + this._writableData.isCheckingNeeded = true; + this._writableData.noCircularDependencyConfirmed = false; + + // If the file contains a wildcard import or __all__ symbols, + // we need to rebind because a dependent import may have changed. + if (this._writableData.parserOutput) { + if ( + this._writableData.parserOutput.containsWildcardImport || + AnalyzerNodeInfo.getDunderAllInfo(this._writableData.parserOutput.parseTree) !== undefined || + forceRebinding + ) { + // We don't need to rebuild index data since wildcard + // won't affect user file indices. User file indices + // don't contain import alias info. + this._writableData.parseTreeNeedsCleaning = true; + this._writableData.isBindingNeeded = true; + this._writableData.moduleSymbolTable = undefined; + } + } + } + + getFileContentsVersion() { + return this._writableData.fileContentsVersion; + } + + getClientVersion() { + return this._writableData.clientDocumentVersion; + } + + getSemanticVersion() { + return this._writableData.semanticVersion; + } + + getRange() { + return { start: { line: 0, character: 0 }, end: { line: this._writableData.lineCount ?? 0, character: 0 } }; + } + + getOpenFileContents() { + return this._writableData.clientDocumentContents; + } + + getFileContent(): string | undefined { + // Get current buffer content if the file is opened. + const openFileContent = this.getOpenFileContents(); + if (openFileContent !== undefined) { + return openFileContent; + } + + // Ensure that the content used here is identical to the content obtained from the parse results. + if (!this.isParseRequired() && this._writableData.parsedFileContents !== undefined) { + return this._writableData.parsedFileContents; + } + + // Otherwise, get content from file system. + try { + // Check the file's length before attempting to read its full contents. + const fileStat = this.fileSystem.statSync(this._uri); + if (fileStat.size > maxSourceFileSize) { + this._console.error( + `File length of "${this._uri}" is ${fileStat.size} ` + + `which exceeds the maximum supported file size of ${maxSourceFileSize}` + ); + throw new Error('File larger than max'); + } + + return this.fileSystem.readFileSync(this._uri, 'utf8'); + } catch (error) { + return undefined; + } + } + + setClientVersion(version: number | null, contents: string): void { + // Save pre edit state if in edit mode. + this._cachePreEditState(); + + if (version === null) { + this._writableData.clientDocumentVersion = undefined; + this._writableData.clientDocumentContents = undefined; + + // Since the file is no longer open, dump the tokenizer output + // so it doesn't consume memory. + this._writableData.tokenizerOutput = undefined; + } else { + this._writableData.clientDocumentVersion = version; + this._writableData.clientDocumentContents = contents; + + const contentsHash = StringUtils.hashString(contents); + + // Have the contents of the file changed? + if ( + contents.length !== this._writableData.lastFileContentLength || + contentsHash !== this._writableData.lastFileContentHash + ) { + this.markDirty(); + } + + this._writableData.lastFileContentLength = contents.length; + this._writableData.lastFileContentHash = contentsHash; + this._writableData.isFileDeleted = false; + } + } + + prepareForClose() { + this._fireFileDirtyEvent(); + } + + isFileDeleted() { + return this._writableData.isFileDeleted; + } + + isParseRequired() { + return ( + !this._writableData.parserOutput || + this._writableData.analyzedFileContentsVersion !== this._writableData.fileContentsVersion + ); + } + + isBindingRequired() { + if (this._writableData.isBindingInProgress) { + return false; + } + + if (this.isParseRequired()) { + return true; + } + + return this._writableData.isBindingNeeded; + } + + isCheckingRequired() { + return this._writableData.isCheckingNeeded; + } + + getParseResults(): ParseFileResults | undefined { + if (this.isParseRequired()) { + return undefined; + } + + assert(this._writableData.parserOutput !== undefined && this._writableData.parsedFileContents !== undefined); + + // If we've cached the tokenizer output, use the cached version. + // Otherwise re-tokenize the contents on demand. + const tokenizeContents = this._tokenizeContents.bind(this); + const parsedFileContents = this._writableData.parsedFileContents; + const contentHash = + this._writableData.lastFileContentHash || StringUtils.hashString(this._writableData.parsedFileContents); + let tokenizerOutput: TokenizerOutput | undefined = this._writableData.tokenizerOutput; + + return { + contentHash, + parserOutput: this._writableData.parserOutput, + get tokenizerOutput(): TokenizerOutput { + // Lazily tokenize the file contents only when accessed for the first time. + if (!tokenizerOutput) { + tokenizerOutput = tokenizeContents(parsedFileContents, contentHash); + } + return tokenizerOutput!; + }, + text: this._writableData.parsedFileContents, + }; + } + + getParserOutput(): ParserOutput | undefined { + if (this.isParseRequired()) { + return undefined; + } + + assert(this._writableData.parserOutput !== undefined); + + return this._writableData.parserOutput; + } + + // Adds a new circular dependency for this file but only if + // it hasn't already been added. + addCircularDependency(configOptions: ConfigOptions, circDependency: CircularDependency) { + let updatedDependencyList = false; + + // Some topologies can result in a massive number of cycles. We'll cut it off. + if (this._writableData.circularDependencies.length < _maxImportCyclesPerFile) { + if (!this._writableData.circularDependencies.some((dep) => dep.isEqual(circDependency))) { + this._writableData.circularDependencies.push(circDependency); + updatedDependencyList = true; + } + } + + if (updatedDependencyList) { + this._recomputeDiagnostics(configOptions); + } + } + + setNoCircularDependencyConfirmed() { + this._writableData.noCircularDependencyConfirmed = true; + } + + isNoCircularDependencyConfirmed() { + return !this.isParseRequired() && this._writableData.noCircularDependencyConfirmed; + } + + setHitMaxImportDepth(maxImportDepth: number) { + this._writableData.hitMaxImportDepth = maxImportDepth; + } + + // Parse the file and update the state. Callers should wait for completion + // (or at least cancel) prior to calling again. It returns true if a parse + // was required and false if the parse information was up to date already. + parse(configOptions: ConfigOptions, importResolver: ImportResolver, content?: string): boolean { + return this._logTracker.log(`parsing: ${this._getPathForLogging(this._uri)}`, (logState) => { + // If the file is already parsed, we can skip. + if (!this.isParseRequired()) { + logState.suppress(); + return false; + } + + const diagSink = this.createDiagnosticSink(); + let fileContents = this.getOpenFileContents(); + if (fileContents === undefined) { + try { + const startTime = timingStats.readFileTime.totalTime; + timingStats.readFileTime.timeOperation(() => { + // Read the file's contents. + fileContents = content ?? this.getFileContent(); + if (fileContents === undefined) { + throw new Error("Can't get file content"); + } + + // Remember the length and hash for comparison purposes. + this._writableData.lastFileContentLength = fileContents.length; + this._writableData.lastFileContentHash = StringUtils.hashString(fileContents); + }); + logState.add(`fs read ${timingStats.readFileTime.totalTime - startTime}ms`); + } catch (error) { + diagSink.addError(`Source file could not be read`, getEmptyRange()); + fileContents = ''; + + if (!this.fileSystem.existsSync(this._uri)) { + this._writableData.isFileDeleted = true; + } + } + } + + try { + // Parse the token stream, building the abstract syntax tree. + const parseFileResults = this._parseFile( + configOptions, + this._uri, + fileContents!, + this._ipythonMode !== IPythonMode.None, + diagSink + ); + + assert(parseFileResults !== undefined && parseFileResults.tokenizerOutput !== undefined); + this._writableData.parserOutput = parseFileResults.parserOutput; + this._writableData.tokenizerLines = parseFileResults.tokenizerOutput.lines; + this._writableData.parsedFileContents = fileContents; + this._writableData.typeIgnoreLines = parseFileResults.tokenizerOutput.typeIgnoreLines; + this._writableData.typeIgnoreAll = parseFileResults.tokenizerOutput.typeIgnoreAll; + this._writableData.pyrightIgnoreLines = parseFileResults.tokenizerOutput.pyrightIgnoreLines; + this._writableData.lineCount = parseFileResults.tokenizerOutput.lines.length; + + // Cache the tokenizer output only if this file is open. + if (this._writableData.clientDocumentContents !== undefined) { + this._writableData.tokenizerOutput = parseFileResults.tokenizerOutput; + } + + // Resolve imports. + const execEnvironment = configOptions.findExecEnvironment(this._uri); + timingStats.resolveImportsTime.timeOperation(() => { + const importResult = this._resolveImports( + importResolver, + parseFileResults.parserOutput.importedModules, + execEnvironment + ); + + this._writableData.imports = importResult.imports; + this._writableData.builtinsImport = importResult.builtinsImportResult; + + this._writableData.parseDiagnostics = diagSink.fetchAndClear(); + + this._writableData.taskListDiagnostics = []; + this._addTaskListDiagnostics( + configOptions.taskListTokens, + parseFileResults.tokenizerOutput, + this._writableData.taskListDiagnostics + ); + }); + + // Is this file in a "strict" path? + const useStrict = + configOptions.strict.find((strictFileSpec) => this._uri.matchesRegex(strictFileSpec.regExp)) !== + undefined; + + const commentDiags: CommentUtils.CommentDiagnostic[] = []; + this._diagnosticRuleSet = CommentUtils.getFileLevelDirectives( + parseFileResults.tokenizerOutput.tokens, + parseFileResults.tokenizerOutput.lines, + execEnvironment.diagnosticRuleSet, + useStrict, + commentDiags + ); + + this._writableData.commentDiagnostics = []; + + commentDiags.forEach((commentDiag) => { + this._writableData.commentDiagnostics.push( + new Diagnostic( + DiagnosticCategory.Error, + commentDiag.message, + convertTextRangeToRange(commentDiag.range, parseFileResults.tokenizerOutput.lines) + ) + ); + }); + } catch (e: any) { + const message: string = + (e.stack ? e.stack.toString() : undefined) || + (typeof e.message === 'string' ? e.message : undefined) || + JSON.stringify(e); + this._console.error( + LocMessage.internalParseError().format({ + file: this.getUri().toUserVisibleString(), + message, + }) + ); + + // Create dummy parse results. + this._writableData.parsedFileContents = ''; + this._writableData.tokenizerLines = new TextRangeCollection([]); + + this._writableData.parserOutput = { + parseTree: ModuleNode.create({ start: 0, length: 0 }), + importedModules: [], + futureImports: new Set(), + containsWildcardImport: false, + typingSymbolAliases: new Map(), + hasTypeAnnotations: false, + lines: this._writableData.tokenizerLines, + }; + + this._writableData.tokenizerOutput = { + tokens: new TextRangeCollection([]), + lines: this._writableData.tokenizerLines, + typeIgnoreAll: undefined, + typeIgnoreLines: new Map(), + pyrightIgnoreLines: new Map(), + predominantEndOfLineSequence: '\n', + hasPredominantTabSequence: false, + predominantTabSequence: ' ', + predominantSingleQuoteCharacter: "'", + }; + + this._writableData.imports = undefined; + this._writableData.builtinsImport = undefined; + + const diagSink = this.createDiagnosticSink(); + diagSink.addError( + LocMessage.internalParseError().format({ + file: this.getUri().toUserVisibleString(), + message, + }), + getEmptyRange() + ); + this._writableData.parseDiagnostics = diagSink.fetchAndClear(); + this._writableData.taskListDiagnostics = diagSink.fetchAndClear(); + + // Do not rethrow the exception, swallow it here. Callers are not + // prepared to handle an exception. + } + + this._writableData.analyzedFileContentsVersion = this._writableData.fileContentsVersion; + this._writableData.isBindingNeeded = true; + this._writableData.isCheckingNeeded = true; + this._writableData.parseTreeNeedsCleaning = false; + this._writableData.hitMaxImportDepth = undefined; + + this._recomputeDiagnostics(configOptions); + + return true; + }); + } + + bind( + configOptions: ConfigOptions, + importLookup: ImportLookup, + builtinsScope: Scope | undefined, + futureImports: Set + ) { + assert(!this.isParseRequired(), 'Bind called before parsing'); + assert(this.isBindingRequired(), 'Bind called unnecessarily'); + assert(!this._writableData.isBindingInProgress, 'Bind called while binding in progress'); + assert(this._writableData.parserOutput !== undefined, 'Parse results not available'); + + return this._logTracker.log(`binding: ${this._getPathForLogging(this._uri)}`, () => { + try { + // Perform name binding. + timingStats.bindTime.timeOperation(() => { + this._cleanParseTreeIfRequired(); + + const fileInfo = this._buildFileInfo(configOptions, importLookup, builtinsScope, futureImports); + AnalyzerNodeInfo.setFileInfo(this._writableData.parserOutput!.parseTree, fileInfo); + + const binder = new Binder(fileInfo, configOptions.indexGenerationMode); + this._writableData.isBindingInProgress = true; + binder.bindModule(this._writableData.parserOutput!.parseTree); + + // If we're in "test mode" (used for unit testing), run an additional + // "test walker" over the parse tree to validate its internal consistency. + if (configOptions.internalTestMode) { + const testWalker = new TestWalker(); + testWalker.walk(this._writableData.parserOutput!.parseTree); + } + + this._writableData.bindDiagnostics = fileInfo.diagnosticSink.fetchAndClear(); + const moduleScope = AnalyzerNodeInfo.getScope(this._writableData.parserOutput!.parseTree); + assert(moduleScope !== undefined, 'Module scope not returned by binder'); + this._writableData.moduleSymbolTable = moduleScope!.symbolTable; + }); + } catch (e: any) { + const message: string = + (e.stack ? e.stack.toString() : undefined) || + (typeof e.message === 'string' ? e.message : undefined) || + JSON.stringify(e); + this._console.error( + LocMessage.internalBindError().format({ + file: this.getUri().toUserVisibleString(), + message, + }) + ); + + const diagSink = this.createDiagnosticSink(); + diagSink.addError( + LocMessage.internalBindError().format({ + file: this.getUri().toUserVisibleString(), + message, + }), + getEmptyRange() + ); + this._writableData.bindDiagnostics = diagSink.fetchAndClear(); + + // Do not rethrow the exception, swallow it here. Callers are not + // prepared to handle an exception. + } finally { + this._writableData.isBindingInProgress = false; + } + + // Prepare for the next stage of the analysis. + this._writableData.isCheckingNeeded = true; + this._writableData.isBindingNeeded = false; + + this._recomputeDiagnostics(configOptions); + }); + } + + check( + configOptions: ConfigOptions, + importLookup: ImportLookup, + importResolver: ImportResolver, + evaluator: TypeEvaluator, + dependentFiles?: ParserOutput[] + ) { + assert(!this.isParseRequired(), `Check called before parsing: state=${this._writableData.debugPrint()}`); + assert(!this.isBindingRequired(), `Check called before binding: state=${this._writableData.debugPrint()}`); + assert(!this._writableData.isBindingInProgress, 'Check called while binding in progress'); + assert(!this._writableData.isCheckingInProgress, 'Check called while checking in progress'); + assert(this.isCheckingRequired(), 'Check called unnecessarily'); + assert(this._writableData.parserOutput !== undefined, 'Parse results not available'); + + return this._logTracker.log(`checking: ${this._getPathForLogging(this._uri)}`, () => { + try { + timingStats.typeCheckerTime.timeOperation(() => { + const checkDuration = new Duration(); + const checker = new Checker( + importResolver, + evaluator, + this._writableData.parserOutput!, + dependentFiles + ); + this._writableData.isCheckingInProgress = true; + checker.check(); + this._writableData.isCheckingNeeded = false; + + const fileInfo = AnalyzerNodeInfo.getFileInfo(this._writableData.parserOutput!.parseTree)!; + this._writableData.checkerDiagnostics = fileInfo.diagnosticSink.fetchAndClear(); + this._writableData.checkTime = checkDuration.getDurationInMilliseconds(); + }); + } catch (e: any) { + const isCancellation = OperationCanceledException.is(e); + if (!isCancellation) { + const message: string = + (e.stack ? e.stack.toString() : undefined) || + (typeof e.message === 'string' ? e.message : undefined) || + JSON.stringify(e); + this._console.error( + LocMessage.internalTypeCheckingError().format({ + file: this.getUri().toUserVisibleString(), + message, + }) + ); + const diagSink = this.createDiagnosticSink(); + diagSink.addError( + LocMessage.internalTypeCheckingError().format({ + file: this.getUri().toUserVisibleString(), + message, + }), + getEmptyRange() + ); + + this._writableData.checkerDiagnostics = diagSink.fetchAndClear(); + + // Mark the file as complete so we don't get into an infinite loop. + this._writableData.isCheckingNeeded = false; + } + + throw e; + } finally { + this._writableData.isCheckingInProgress = false; + + // Clear any circular dependencies associated with this file. + // These will be detected by the program module and associated + // with the source file right before it is finalized. + this._writableData.circularDependencies = []; + + this._recomputeDiagnostics(configOptions); + } + }); + } + + test_enableIPythonMode(enable: boolean) { + this._ipythonMode = enable ? IPythonMode.CellDocs : IPythonMode.None; + } + + protected createDiagnosticSink(): DiagnosticSink { + return new DiagnosticSink(); + } + + protected createTextRangeDiagnosticSink(lines: TextRangeCollection): TextRangeDiagnosticSink { + return new TextRangeDiagnosticSink(lines); + } + + // Creates a short string that can be used to uniquely identify + // this file from all other files. It is used in the type evaluator + // to distinguish between types that are defined in different files + // or scopes. + private _makeFileId(uri: Uri) { + const maxNameLength = 8; + + // Use a small portion of the file name to help with debugging. + let fileName = uri.fileNameWithoutExtensions; + if (fileName.length > maxNameLength) { + fileName = fileName.substring(fileName.length - maxNameLength); + } + + // Append a number to guarantee uniqueness. + const uniqueNumber = nextUniqueFileId++; + + // Use a "/" to separate the two components, since this + // character will never appear in a file name. + return `${fileName}/${uniqueNumber.toString()}`; + } + + // Computes an updated set of accumulated diagnostics for the file + // based on the partial diagnostics from various analysis stages. + private _recomputeDiagnostics(configOptions: ConfigOptions) { + this._writableData.diagnosticVersion++; + + let includeWarningsAndErrors = true; + + // If a file was imported as a third-party file, don't report + // any errors for it. The user can't fix them anyway. + if (this._isThirdPartyImport) { + includeWarningsAndErrors = false; + } + + let diagList: Diagnostic[] = []; + appendArray(diagList, this._writableData.parseDiagnostics); + appendArray(diagList, this._writableData.commentDiagnostics); + appendArray(diagList, this._writableData.bindDiagnostics); + appendArray(diagList, this._writableData.checkerDiagnostics); + appendArray(diagList, this._writableData.taskListDiagnostics); + + const prefilteredDiagList = diagList; + const typeIgnoreLinesClone = new Map(this._writableData.typeIgnoreLines); + const pyrightIgnoreLinesClone = new Map(this._writableData.pyrightIgnoreLines); + + // Filter the diagnostics based on "type: ignore" lines. + if (this._diagnosticRuleSet.enableTypeIgnoreComments) { + if (this._writableData.typeIgnoreLines.size > 0) { + diagList = diagList.filter((d) => { + if ( + d.category !== DiagnosticCategory.UnusedCode && + d.category !== DiagnosticCategory.UnreachableCode && + d.category !== DiagnosticCategory.Deprecated + ) { + for (let line = d.range.start.line; line <= d.range.end.line; line++) { + if (this._writableData.typeIgnoreLines.has(line)) { + typeIgnoreLinesClone.delete(line); + return false; + } + } + } + + return true; + }); + } + } + + // Filter the diagnostics based on "pyright: ignore" lines. + if (this._writableData.pyrightIgnoreLines.size > 0) { + diagList = diagList.filter((d) => { + if ( + d.category !== DiagnosticCategory.UnusedCode && + d.category !== DiagnosticCategory.UnreachableCode && + d.category !== DiagnosticCategory.Deprecated + ) { + for (let line = d.range.start.line; line <= d.range.end.line; line++) { + const pyrightIgnoreComment = this._writableData.pyrightIgnoreLines.get(line); + if (pyrightIgnoreComment) { + if (!pyrightIgnoreComment.rulesList) { + pyrightIgnoreLinesClone.delete(line); + return false; + } + + const diagRule = d.getRule(); + if (!diagRule) { + // If there's no diagnostic rule, it won't match + // against a rules list. + return true; + } + + // Did we find this rule in the list? + if (pyrightIgnoreComment.rulesList.find((rule) => rule.text === diagRule)) { + // Update the pyrightIgnoreLinesClone to remove this rule. + const oldClone = pyrightIgnoreLinesClone.get(line); + if (oldClone?.rulesList) { + const filteredRulesList = oldClone.rulesList.filter( + (rule) => rule.text !== diagRule + ); + if (filteredRulesList.length === 0) { + pyrightIgnoreLinesClone.delete(line); + } else { + pyrightIgnoreLinesClone.set(line, { + range: oldClone.range, + rulesList: filteredRulesList, + }); + } + } + + return false; + } + + return true; + } + } + } + + return true; + }); + } + + const unnecessaryTypeIgnoreDiags: Diagnostic[] = []; + + // Skip this step if type checking is needed. Otherwise we'll likely produce + // incorrect (false positive) reportUnnecessaryTypeIgnoreComment diagnostics + // until checking is performed on this file. + if ( + this._diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment !== 'none' && + !this._writableData.isCheckingNeeded + ) { + const diagCategory = convertLevelToCategory(this._diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment); + + const prefilteredErrorList = prefilteredDiagList.filter( + (diag) => + diag.category === DiagnosticCategory.Error || + diag.category === DiagnosticCategory.Warning || + diag.category === DiagnosticCategory.Information + ); + + const isUnreachableCodeRange = (range: Range) => { + return prefilteredDiagList.find( + (diag) => + diag.category === DiagnosticCategory.UnreachableCode && + diag.range.start.line <= range.start.line && + diag.range.end.line >= range.end.line + ); + }; + + if (prefilteredErrorList.length === 0 && this._writableData.typeIgnoreAll !== undefined) { + const rangeStart = this._writableData.typeIgnoreAll.range.start; + const rangeEnd = rangeStart + this._writableData.typeIgnoreAll.range.length; + const range = convertOffsetsToRange(rangeStart, rangeEnd, this._writableData.tokenizerLines!); + + if (!isUnreachableCodeRange(range) && this._diagnosticRuleSet.enableTypeIgnoreComments) { + const diag = new Diagnostic(diagCategory, LocMessage.unnecessaryTypeIgnore(), range); + diag.setRule(DiagnosticRule.reportUnnecessaryTypeIgnoreComment); + unnecessaryTypeIgnoreDiags.push(diag); + } + } + + typeIgnoreLinesClone.forEach((ignoreComment) => { + if (this._writableData.tokenizerLines!) { + const rangeStart = ignoreComment.range.start; + const rangeEnd = rangeStart + ignoreComment.range.length; + const range = convertOffsetsToRange(rangeStart, rangeEnd, this._writableData.tokenizerLines!); + + if (!isUnreachableCodeRange(range) && this._diagnosticRuleSet.enableTypeIgnoreComments) { + const diag = new Diagnostic(diagCategory, LocMessage.unnecessaryTypeIgnore(), range); + diag.setRule(DiagnosticRule.reportUnnecessaryTypeIgnoreComment); + unnecessaryTypeIgnoreDiags.push(diag); + } + } + }); + + pyrightIgnoreLinesClone.forEach((ignoreComment) => { + if (this._writableData.tokenizerLines!) { + if (!ignoreComment.rulesList) { + const rangeStart = ignoreComment.range.start; + const rangeEnd = rangeStart + ignoreComment.range.length; + const range = convertOffsetsToRange(rangeStart, rangeEnd, this._writableData.tokenizerLines!); + + if (!isUnreachableCodeRange(range)) { + const diag = new Diagnostic(diagCategory, LocMessage.unnecessaryTypeIgnore(), range); + diag.setRule(DiagnosticRule.reportUnnecessaryTypeIgnoreComment); + unnecessaryTypeIgnoreDiags.push(diag); + } + } else { + ignoreComment.rulesList.forEach((unusedRule) => { + const rangeStart = unusedRule.range.start; + const rangeEnd = rangeStart + unusedRule.range.length; + const range = convertOffsetsToRange( + rangeStart, + rangeEnd, + this._writableData.tokenizerLines! + ); + + if (!isUnreachableCodeRange(range)) { + const diag = new Diagnostic( + diagCategory, + LocMessage.unnecessaryPyrightIgnoreRule().format({ + name: unusedRule.text, + }), + range + ); + diag.setRule(DiagnosticRule.reportUnnecessaryTypeIgnoreComment); + unnecessaryTypeIgnoreDiags.push(diag); + } + }); + } + } + }); + } + + if ( + this._diagnosticRuleSet.reportImportCycles !== 'none' && + this._writableData.circularDependencies.length > 0 + ) { + const category = convertLevelToCategory(this._diagnosticRuleSet.reportImportCycles); + + this._writableData.circularDependencies.forEach((cirDep) => { + const diag = new Diagnostic( + category, + LocMessage.importCycleDetected() + + '\n' + + cirDep + .getPaths() + .map((path) => ' ' + path.toUserVisibleString()) + .join('\n'), + getEmptyRange() + ); + diag.setRule(DiagnosticRule.reportImportCycles); + diagList.push(diag); + }); + } + + if (this._writableData.hitMaxImportDepth !== undefined) { + diagList.push( + new Diagnostic( + DiagnosticCategory.Error, + LocMessage.importDepthExceeded().format({ depth: this._writableData.hitMaxImportDepth }), + getEmptyRange() + ) + ); + } + + // If there is a "type: ignore" comment at the top of the file, clear + // the diagnostic list of all error, warning, and information diagnostics. + if (this._diagnosticRuleSet.enableTypeIgnoreComments) { + if (this._writableData.typeIgnoreAll !== undefined) { + diagList = diagList.filter( + (diag) => + diag.category !== DiagnosticCategory.Error && + diag.category !== DiagnosticCategory.Warning && + diag.category !== DiagnosticCategory.Information + ); + } + } + + // Now add in the "unnecessary type ignore" diagnostics. + diagList = diagList.concat(unnecessaryTypeIgnoreDiags); + + // If we're not returning any diagnostics, filter out all of + // the errors and warnings, leaving only the unreachable code + // and deprecated diagnostics. + if (!includeWarningsAndErrors) { + diagList = diagList.filter( + (diag) => + diag.category === DiagnosticCategory.UnusedCode || + diag.category === DiagnosticCategory.UnreachableCode || + diag.category === DiagnosticCategory.Deprecated + ); + } + + // If the file is in the ignore list, clear the diagnostic list. + if (configOptions.ignore.find((ignoreFileSpec) => this._uri.matchesRegex(ignoreFileSpec.regExp))) { + diagList = []; + } + + this._writableData.accumulatedDiagnostics = diagList; + } + + private _cachePreEditState() { + // If this is our first write, then make a copy of the writable data. + if (!this._editMode.isEditMode || this._preEditData) { + return; + } + + // Copy over the writable data. + this._preEditData = this._writableData; + + // Recreate all the writable data from scratch. + this._writableData = new WriteableData(); + } + + // Get all task list diagnostics for the current file and add them + // to the specified diagnostic list. + private _addTaskListDiagnostics( + taskListTokens: TaskListToken[] | undefined, + tokenizerOutput: TokenizerOutput, + diagList: Diagnostic[] + ) { + if (!taskListTokens || taskListTokens.length === 0 || !diagList) { + return; + } + + for (let i = 0; i < tokenizerOutput.tokens.count; i++) { + const token = tokenizerOutput.tokens.getItemAt(i); + + // If there are no comments, skip this token. + if (!token.comments || token.comments.length === 0) { + continue; + } + + for (const comment of token.comments) { + for (const token of taskListTokens) { + // Check if the comment matches the task list token. + // The comment must start with zero or more whitespace characters, + // followed by the taskListToken (case insensitive), + // followed by (0+ whitespace + EOL) OR (1+ NON-alphanumeric characters) + const regexStr = '^[\\s]*' + token.text + '([\\s]*$|[\\W]+)'; + const regex = RegExp(regexStr, 'i'); // case insensitive + + // If the comment doesn't match, skip it. + if (!regex.test(comment.value)) { + continue; + } + + // Calculate the range for the diagnostic. This allows navigation + // to the comment via double clicking the item in the task list pane. + let rangeStart = comment.start; + + // The comment technically starts right after the comment identifier(#), + // but we want the caret right before the task list token (since there + // might be whitespace before it). + const indexOfToken = comment.value.toLowerCase().indexOf(token.text.toLowerCase()); + rangeStart += indexOfToken; + + const rangeEnd = TextRange.getEnd(comment); + const range = convertOffsetsToRange(rangeStart, rangeEnd, tokenizerOutput.lines!); + + // Add the diagnostic to the list and trim whitespace from the comment so + // it's easier to read in the task list. + diagList.push( + new Diagnostic(DiagnosticCategory.TaskItem, comment.value.trim(), range, token.priority) + ); + } + } + } + } + + private _buildFileInfo( + configOptions: ConfigOptions, + importLookup: ImportLookup, + builtinsScope: Scope | undefined, + futureImports: Set + ) { + assert(this._writableData.parserOutput !== undefined, 'Parse results not available'); + const analysisDiagnostics = this.createTextRangeDiagnosticSink(this._writableData.tokenizerLines!); + + const fileInfo: AnalyzerFileInfo = { + importLookup, + futureImports, + builtinsScope, + diagnosticSink: analysisDiagnostics, + executionEnvironment: configOptions.findExecEnvironment(this._uri), + diagnosticRuleSet: this._diagnosticRuleSet, + lines: this._writableData.tokenizerLines!, + typingSymbolAliases: this._writableData.parserOutput!.typingSymbolAliases, + definedConstants: configOptions.defineConstant, + fileId: this._fileId, + fileUri: this._uri, + moduleName: this.getModuleName(), + isStubFile: this._isStubFile, + isTypingStubFile: this._isTypingStubFile, + isTypingExtensionsStubFile: this._isTypingExtensionsStubFile, + isTypeshedStubFile: this._isTypeshedStubFile, + isBuiltInStubFile: this._isBuiltInStubFile, + isInPyTypedPackage: this._isThirdPartyPyTypedPresent, + ipythonMode: this._ipythonMode, + accessedSymbolSet: new Set(), + }; + return fileInfo; + } + + private _cleanParseTreeIfRequired() { + if (this._writableData.parserOutput) { + if (this._writableData.parseTreeNeedsCleaning) { + const cleanerWalker = new ParseTreeCleanerWalker(this._writableData.parserOutput.parseTree); + cleanerWalker.clean(); + this._writableData.parseTreeNeedsCleaning = false; + } + } + } + + private _resolveImports( + importResolver: ImportResolver, + moduleImports: ModuleImport[], + execEnv: ExecutionEnvironment + ): ResolveImportResult { + const imports: ImportResult[] = []; + + const resolveAndAddIfNotSelf = (nameParts: string[], skipMissingImport = false) => { + const importResult = importResolver.resolveImport(this._uri, execEnv, { + leadingDots: 0, + nameParts, + importedSymbols: undefined, + }); + + if (skipMissingImport && !importResult.isImportFound) { + return undefined; + } + + // Avoid importing module from the module file itself. + if (importResult.resolvedUris.length === 0 || importResult.resolvedUris[0] !== this._uri) { + imports.push(importResult); + return importResult; + } + + return undefined; + }; + + // Always include an implicit import of the builtins module. + let builtinsImportResult: ImportResult | undefined; + + // If this is a project source file (not a stub), try to resolve + // the __builtins__ stub first. + if (!this._isThirdPartyImport && !this._isStubFile) { + builtinsImportResult = resolveAndAddIfNotSelf(['__builtins__'], /* skipMissingImport */ true); + } + + if (!builtinsImportResult) { + builtinsImportResult = resolveAndAddIfNotSelf(['builtins']); + } + + resolveAndAddIfNotSelf(['_typeshed', '_type_checker_internals'], /* skipMissingImport */ true); + + for (const moduleImport of moduleImports) { + const importResult = importResolver.resolveImport(this._uri, execEnv, { + leadingDots: moduleImport.leadingDots, + nameParts: moduleImport.nameParts, + importedSymbols: moduleImport.importedSymbols, + }); + + imports.push(importResult); + + // Associate the import results with the module import + // name node in the parse tree so we can access it later + // (for hover and definition support). + if (moduleImport.nameParts.length === moduleImport.nameNode.d.nameParts.length) { + AnalyzerNodeInfo.setImportInfo(moduleImport.nameNode, importResult); + } else { + // For implicit imports of higher-level modules within a multi-part + // module name, the moduleImport.nameParts will refer to the subset + // of the multi-part name rather than the full multi-part name. In this + // case, store the import info on the name part node. + assert(moduleImport.nameParts.length > 0); + assert(moduleImport.nameParts.length - 1 < moduleImport.nameNode.d.nameParts.length); + AnalyzerNodeInfo.setImportInfo( + moduleImport.nameNode.d.nameParts[moduleImport.nameParts.length - 1], + importResult + ); + } + } + + return { + imports, + builtinsImportResult, + }; + } + + private _getPathForLogging(fileUri: Uri) { + return getPathForLogging(this.fileSystem, fileUri); + } + + private _parseFile( + configOptions: ConfigOptions, + fileUri: Uri, + fileContents: string, + useNotebookMode: boolean, + diagSink: DiagnosticSink + ): ParseFileResults { + // Use the configuration options to determine the environment zin which + // this source file will be executed. + const execEnvironment = configOptions.findExecEnvironment(fileUri); + + const parseOptions = new ParseOptions(); + parseOptions.useNotebookMode = useNotebookMode; + if (fileUri.pathEndsWith('pyi')) { + parseOptions.isStubFile = true; + } + parseOptions.pythonVersion = execEnvironment.pythonVersion; + parseOptions.skipFunctionAndClassBody = configOptions.indexGenerationMode ?? false; + + // Parse the token stream, building the abstract syntax tree. + const parser = new Parser(); + return parser.parseSourceFile(fileContents, parseOptions, diagSink); + } + + private _tokenizeContents(fileContents: string, contentHash: number): TokenizerOutput { + const tokenizer = new Tokenizer(); + const output = tokenizer.tokenize(fileContents); + + // When the file is open, cache the tokenizer results. + // Because the tokenizer is lazy, ensure that the state remains unchanged before caching its output. + if ( + this._writableData.clientDocumentContents !== undefined && + this._writableData.lastFileContentHash === contentHash + ) { + this._writableData.tokenizerOutput = output; + + // Replace the existing tokenizerLines with the newly-returned + // version. They should have the same contents, but we want to use + // the same object so the older object can be deallocated. + this._writableData.tokenizerLines = output.lines; + } + + return output; + } + + private _fireFileDirtyEvent() { + this.serviceProvider.tryGet(ServiceKeys.stateMutationListeners)?.forEach((l) => { + try { + l.onFileDirty?.(this._uri); + } catch (ex: any) { + const console = this.serviceProvider.tryGet(ServiceKeys.console); + if (console) { + console.error(`State mutation listener exception: ${ex.message}`); + } + } + }); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/sourceFileInfo.ts b/python-parser/packages/pyright-internal/src/analyzer/sourceFileInfo.ts new file mode 100644 index 00000000..f5f17069 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/sourceFileInfo.ts @@ -0,0 +1,245 @@ +/* + * sourceFileInfo.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Class that represents information around single source file. + */ + +import { SourceFile } from './sourceFile'; +import * as extensibility from '../common/extensibility'; + +// Tracks information about each source file in a program, +// including the reason it was added to the program and any +// dependencies that it has on other files in the program. +export class SourceFileInfo implements extensibility.SourceFileInfo { + private _writableData: WriteableData; + private _preEditData?: WriteableData; + + readonly isCreatedInEditMode: boolean; + + constructor( + readonly sourceFile: SourceFile, + readonly isTypeshedFile: boolean, + readonly isThirdPartyImport: boolean, + readonly isThirdPartyPyTypedPresent: boolean, + private readonly _editModeTracker: EditModeTracker, + args: OptionalArguments = {} + ) { + this.isCreatedInEditMode = this._editModeTracker.isEditMode; + + this._writableData = this._createWriteableData(args); + + this._cachePreEditState(); + } + + get diagnosticsVersion() { + return this._writableData.diagnosticsVersion; + } + + get builtinsImport() { + return this._writableData.builtinsImport; + } + + // Information about the chained source file + // Chained source file is not supposed to exist on file system but + // must exist in the program's source file list. Module level + // scope of the chained source file will be inserted before + // current file's scope. + get chainedSourceFile() { + return this._writableData.chainedSourceFile; + } + + get effectiveFutureImports() { + return this._writableData.effectiveFutureImports; + } + + // Information about why the file is included in the program + // and its relation to other source files in the program. + get isTracked() { + return this._writableData.isTracked; + } + + get isOpenByClient() { + return this._writableData.isOpenByClient; + } + get uri() { + return this.sourceFile.getUri(); + } + + get contents() { + return this.sourceFile.getFileContent() ?? ''; + } + + get ipythonMode() { + return this.sourceFile.getIPythonMode(); + } + + get isStubFile() { + return this.sourceFile.isStubFile(); + } + + get isTypingStubFile() { + return this.sourceFile.isTypingStubFile(); + } + + get hasTypeAnnotations() { + const parseResults = this.sourceFile.getParserOutput(); + if (parseResults) { + return parseResults.hasTypeAnnotations; + } + return false; + } + + get imports(): readonly SourceFileInfo[] { + return this._writableData.imports; + } + + get importedBy(): readonly SourceFileInfo[] { + return this._writableData.importedBy; + } + + get shadows(): readonly SourceFileInfo[] { + return this._writableData.shadows; + } + + get shadowedBy(): readonly SourceFileInfo[] { + return this._writableData.shadowedBy; + } + + get clientVersion() { + return this.sourceFile.getClientVersion(); + } + + get semanticVersion() { + return this.sourceFile.getSemanticVersion(); + } + + set diagnosticsVersion(value: number | undefined) { + this._cachePreEditState(); + this._writableData.diagnosticsVersion = value; + } + + set builtinsImport(value: SourceFileInfo | undefined) { + this._cachePreEditState(); + this._writableData.builtinsImport = value; + } + + set chainedSourceFile(value: SourceFileInfo | undefined) { + this._cachePreEditState(); + this._writableData.chainedSourceFile = value; + } + + set effectiveFutureImports(value: ReadonlySet | undefined) { + this._cachePreEditState(); + this._writableData.effectiveFutureImports = value; + } + + set isTracked(value: boolean) { + this._cachePreEditState(); + this._writableData.isTracked = value; + } + + set isOpenByClient(value: boolean) { + this._cachePreEditState(); + this._writableData.isOpenByClient = value; + } + + mutate(callback: (s: WriteableData) => void) { + this._cachePreEditState(); + callback(this._writableData); + } + + restore() { + if (this._preEditData) { + this._writableData = this._preEditData; + this._preEditData = undefined; + + // Some states have changed. Force some of info to be re-calculated. + this.sourceFile.dropParseAndBindInfo(); + } + + return this.sourceFile.restore(); + } + + private _cachePreEditState() { + if (!this._editModeTracker.isEditMode || this._preEditData) { + return; + } + + this._preEditData = this._writableData; + this._writableData = this._cloneWriteableData(this._writableData); + + this._editModeTracker.addMutatedFiles(this); + } + + private _createWriteableData(args: OptionalArguments): WriteableData { + return { + isTracked: args.isTracked ?? false, + isOpenByClient: args.isOpenByClient ?? false, + builtinsImport: args.builtinsImport, + chainedSourceFile: args.chainedSourceFile, + diagnosticsVersion: args.diagnosticsVersion, + effectiveFutureImports: args.effectiveFutureImports, + imports: [], + importedBy: [], + shadows: [], + shadowedBy: [], + }; + } + + private _cloneWriteableData(data: WriteableData): WriteableData { + return { + isTracked: data.isTracked, + isOpenByClient: data.isOpenByClient, + builtinsImport: data.builtinsImport, + chainedSourceFile: data.chainedSourceFile, + diagnosticsVersion: data.diagnosticsVersion, + effectiveFutureImports: data.effectiveFutureImports, + imports: data.imports.slice(), + importedBy: data.importedBy.slice(), + shadows: data.shadows.slice(), + shadowedBy: data.shadowedBy.slice(), + }; + } +} + +interface EditModeTracker { + readonly isEditMode: boolean; + addMutatedFiles(file: SourceFileInfo): void; +} + +interface OptionalArguments { + isTracked?: boolean; + isOpenByClient?: boolean; + diagnosticsVersion?: number | undefined; + builtinsImport?: SourceFileInfo | undefined; + chainedSourceFile?: SourceFileInfo | undefined; + effectiveFutureImports?: ReadonlySet; +} + +interface WriteableData { + // Reference to the source file + // Information about the source file + diagnosticsVersion?: number | undefined; + + builtinsImport?: SourceFileInfo | undefined; + + // Information about the chained source file + // Chained source file is not supposed to exist on file system but + // must exist in the program's source file list. Module level + // scope of the chained source file will be inserted before + // current file's scope. + chainedSourceFile?: SourceFileInfo | undefined; + + effectiveFutureImports?: ReadonlySet; + + // Information about why the file is included in the program + // and its relation to other source files in the program. + isTracked: boolean; + isOpenByClient: boolean; + imports: SourceFileInfo[]; + importedBy: SourceFileInfo[]; + shadows: SourceFileInfo[]; + shadowedBy: SourceFileInfo[]; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/sourceFileInfoUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/sourceFileInfoUtils.ts new file mode 100644 index 00000000..bb7440a9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/sourceFileInfoUtils.ts @@ -0,0 +1,108 @@ +/* + * sourceFileInfoUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Functions that operate on SourceFileInfo objects. + */ + +import { fail } from '../common/debug'; +import { ProgramView, SourceFileInfo } from '../common/extensibility'; +import { ServiceKeys } from '../common/serviceKeys'; +import { IPythonMode } from './sourceFile'; + +export function isUserCode(fileInfo: SourceFileInfo | undefined) { + return !!fileInfo && fileInfo.isTracked && !fileInfo.isThirdPartyImport && !fileInfo.isTypeshedFile; +} + +export function collectImportedByCells(program: ProgramView, fileInfo: T): Set { + // The ImportedBy only works when files are parsed. Due to the lazy-loading nature of our system, + // we can't ensure that all files within the program are parsed, which might lead to an incomplete dependency graph. + // Parsing all regular files goes against our lazy-nature, but for notebook cells, which we open by default, + // it makes sense to force complete parsing since they'll be parsed at some point anyway due to things like + // `semantic tokens` or `checkers`. + _parseAllOpenCells(program); + + const importedByCells = new Set(); + collectImportedByRecursively(fileInfo, importedByCells); + return importedByCells; +} + +export function collectImportedByRecursively(fileInfo: SourceFileInfo, importedBy: Set) { + fileInfo.importedBy.forEach((dep) => { + if (importedBy.has(dep)) { + // Already visited. + return; + } + + importedBy.add(dep); + collectImportedByRecursively(dep, importedBy); + }); +} + +export function verifyNoCyclesInChainedFiles(program: ProgramView, fileInfo: T): void { + let nextChainedFile = fileInfo.chainedSourceFile; + if (!nextChainedFile) { + return; + } + + const set = new Set([fileInfo.uri.key]); + while (nextChainedFile) { + const path = nextChainedFile.uri.key; + if (set.has(path)) { + // We found a cycle. + fail( + program.serviceProvider + .tryGet(ServiceKeys.debugInfoInspector) + ?.getCycleDetail(program, nextChainedFile) ?? `Found a cycle in implicit imports files for ${path}` + ); + } + + set.add(path); + nextChainedFile = nextChainedFile.chainedSourceFile; + } +} + +export function createChainedByList(program: ProgramView, fileInfo: T): T[] { + // We want to create reverse map of all chained files. + const map = new Map(); + for (const file of program.getSourceFileInfoList()) { + if (!file.chainedSourceFile) { + continue; + } + + map.set(file.chainedSourceFile, file); + } + + const visited = new Set(); + + const chainedByList: SourceFileInfo[] = [fileInfo]; + let current: SourceFileInfo | undefined = fileInfo; + while (current) { + if (visited.has(current)) { + fail( + program.serviceProvider.tryGet(ServiceKeys.debugInfoInspector)?.getCycleDetail(program, current) ?? + 'detected a cycle in chained files' + ); + } + visited.add(current); + + current = map.get(current); + if (current) { + chainedByList.push(current); + } + } + + return chainedByList as T[]; +} + +function _parseAllOpenCells(program: ProgramView): void { + for (const file of program.getSourceFileInfoList()) { + if (file.ipythonMode !== IPythonMode.CellDocs) { + continue; + } + + program.getParserOutput(file.uri); + program.handleMemoryHighUsage(); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/sourceMapper.ts b/python-parser/packages/pyright-internal/src/analyzer/sourceMapper.ts new file mode 100644 index 00000000..c51aebf8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/sourceMapper.ts @@ -0,0 +1,808 @@ +/* + * sourceMapper.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Logic that maps a ".pyi" stub to its ".py" source file. + */ + +import { CancellationToken } from 'vscode-jsonrpc'; + +import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { appendArray } from '../common/collectionUtils'; +import { ExecutionEnvironment } from '../common/configOptions'; +import { isDefined } from '../common/core'; +import { assert, assertNever } from '../common/debug'; +import { Uri } from '../common/uri/uri'; +import { ClassNode, ModuleNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { + AliasDeclaration, + ClassDeclaration, + Declaration, + FunctionDeclaration, + isAliasDeclaration, + isClassDeclaration, + isFunctionDeclaration, + isParamDeclaration, + isSpecialBuiltInClassDeclaration, + isVariableDeclaration, + ParamDeclaration, + SpecialBuiltInClassDeclaration, + VariableDeclaration, +} from './declaration'; +import { ImportResolver } from './importResolver'; +import { SourceFile } from './sourceFile'; +import { SourceFileInfo } from './sourceFileInfo'; +import { isUserCode } from './sourceFileInfoUtils'; +import { buildImportTree } from './sourceMapperUtils'; +import { TypeEvaluator } from './typeEvaluatorTypes'; +import { lookUpClassMember } from './typeUtils'; +import { ClassType, isFunction, isInstantiableClass, isOverloaded, OverloadedType } from './types'; + +type ClassOrFunctionOrVariableDeclaration = + | ClassDeclaration + | SpecialBuiltInClassDeclaration + | FunctionDeclaration + | VariableDeclaration; + +// Creates and binds a shadowed file within the program. +export type ShadowFileBinder = (stubFileUri: Uri, implFileUri: Uri) => SourceFile | undefined; +export type BoundSourceGetter = (fileUri: Uri) => SourceFileInfo | undefined; + +export class SourceMapper { + constructor( + private _importResolver: ImportResolver, + private _execEnv: ExecutionEnvironment, + private _evaluator: TypeEvaluator, + private _fileBinder: ShadowFileBinder, + private _boundSourceGetter: BoundSourceGetter, + private _mapCompiled: boolean, + private _preferStubs: boolean, + private _fromFile: SourceFileInfo | undefined, + private _cancelToken: CancellationToken + ) {} + + findModules(stubFileUri: Uri): ModuleNode[] { + const sourceFiles = this._isStubThatShouldBeMappedToImplementation(stubFileUri) + ? this._getBoundSourceFilesFromStubFile(stubFileUri) + : [this._boundSourceGetter(stubFileUri)?.sourceFile]; + + return sourceFiles + .filter(isDefined) + .map((sf) => sf.getParserOutput()?.parseTree) + .filter(isDefined); + } + + getModuleNode(fileUri: Uri): ModuleNode | undefined { + return this._boundSourceGetter(fileUri)?.sourceFile.getParserOutput()?.parseTree; + } + + findDeclarations(stubDecl: Declaration): Declaration[] { + if (isClassDeclaration(stubDecl)) { + return this._findClassOrTypeAliasDeclarations(stubDecl); + } else if (isFunctionDeclaration(stubDecl)) { + return this._findFunctionOrTypeAliasDeclarations(stubDecl); + } else if (isVariableDeclaration(stubDecl)) { + return this._findVariableDeclarations(stubDecl); + } else if (isParamDeclaration(stubDecl)) { + return this._findParamDeclarations(stubDecl); + } else if (isSpecialBuiltInClassDeclaration(stubDecl)) { + return this._findSpecialBuiltInClassDeclarations(stubDecl); + } + + return []; + } + + findDeclarationsByType(originatedPath: Uri, type: ClassType, useTypeAlias = false): Declaration[] { + const result: ClassOrFunctionOrVariableDeclaration[] = []; + this._addClassTypeDeclarations(originatedPath, type, result, new Set(), useTypeAlias); + return result; + } + + findClassDeclarationsByType(originatedPath: Uri, type: ClassType): ClassDeclaration[] { + const result = this.findDeclarationsByType(originatedPath, type); + return result.filter((r) => isClassDeclaration(r)).map((r) => r); + } + + findFunctionDeclarations(stubDecl: FunctionDeclaration): FunctionDeclaration[] { + return this._findFunctionOrTypeAliasDeclarations(stubDecl) + .filter((d) => isFunctionDeclaration(d)) + .map((d) => d); + } + + isUserCode(uri: Uri): boolean { + return isUserCode(this._boundSourceGetter(uri)); + } + + getNextFileName(uri: Uri) { + const withoutExtension = uri.stripExtension(); + let suffix = 1; + let result = withoutExtension.addExtension(`_${suffix}.py`); + while (this.isUserCode(result) && suffix < 1000) { + suffix += 1; + result = withoutExtension.addExtension(`_${suffix}.py`); + } + return result; + } + + getSourcePathsFromStub(stubFileUri: Uri, fromFile: Uri | undefined): Uri[] { + // Attempt our stubFileUri to see if we can resolve it as a source file path + let results = this._importResolver.getSourceFilesFromStub(stubFileUri, this._execEnv, this._mapCompiled); + if (results.length > 0) { + return results; + } + + // If that didn't work, try looking through the graph up to our fromFile. + // One of them should be able to resolve to an actual file. + const stubFileImportTree = this._getStubFileImportTree(stubFileUri, fromFile); + + // Go through the items in this tree until we find at least one path. + for (let i = 0; i < stubFileImportTree.length; i++) { + results = this._importResolver.getSourceFilesFromStub( + stubFileImportTree[i], + this._execEnv, + this._mapCompiled + ); + if (results.length > 0) { + return results; + } + } + + return []; + } + + private _findSpecialBuiltInClassDeclarations( + stubDecl: SpecialBuiltInClassDeclaration, + recursiveDeclCache = new Set() + ) { + if (stubDecl.node.d.valueExpr.nodeType === ParseNodeType.Name) { + const className = stubDecl.node.d.valueExpr.d.value; + const sourceFiles = this._getBoundSourceFilesFromStubFile(stubDecl.uri); + + return sourceFiles.flatMap((sourceFile) => + this._findClassDeclarationsByName(sourceFile, className, recursiveDeclCache) + ); + } + + return []; + } + + private _findClassOrTypeAliasDeclarations(stubDecl: ClassDeclaration, recursiveDeclCache = new Set()) { + const className = this._getFullClassName(stubDecl.node); + const sourceFiles = this._getBoundSourceFilesFromStubFile(stubDecl.uri); + + return sourceFiles.flatMap((sourceFile) => + this._findClassDeclarationsByName(sourceFile, className, recursiveDeclCache) + ); + } + + private _findFunctionOrTypeAliasDeclarations( + stubDecl: FunctionDeclaration, + recursiveDeclCache = new Set() + ): ClassOrFunctionOrVariableDeclaration[] { + const functionName = stubDecl.node.d.name.d.value; + const sourceFiles = this._getBoundSourceFilesFromStubFile(stubDecl.uri); + + if (stubDecl.isMethod) { + const classNode = ParseTreeUtils.getEnclosingClass(stubDecl.node); + if (classNode === undefined) { + return []; + } + + const className = this._getFullClassName(classNode); + return sourceFiles.flatMap((sourceFile) => + this._findMethodDeclarationsByName(sourceFile, className, functionName, recursiveDeclCache) + ); + } else { + return sourceFiles.flatMap((sourceFile) => + this._findFunctionDeclarationsByName(sourceFile, functionName, recursiveDeclCache) + ); + } + } + + private _findVariableDeclarations( + stubDecl: VariableDeclaration, + recursiveDeclCache = new Set() + ): ClassOrFunctionOrVariableDeclaration[] { + if (stubDecl.node.nodeType !== ParseNodeType.Name) { + return []; + } + + const variableName = stubDecl.node.d.value; + const sourceFiles = this._getBoundSourceFilesFromStubFile(stubDecl.uri); + const classNode = ParseTreeUtils.getEnclosingClass(stubDecl.node); + + if (classNode) { + const className = this._getFullClassName(classNode); + + return sourceFiles.flatMap((sourceFile) => + this._findFieldDeclarationsByName(sourceFile, className, variableName, recursiveDeclCache) + ); + } else { + return sourceFiles.flatMap((sourceFile) => + this._findVariableDeclarationsByName(sourceFile, variableName, recursiveDeclCache) + ); + } + } + + private _findParamDeclarations(stubDecl: ParamDeclaration): ParamDeclaration[] { + const result: ParamDeclaration[] = []; + + if (!stubDecl.node.d.name) { + return result; + } + + const functionNode = ParseTreeUtils.getEnclosingFunction(stubDecl.node); + if (!functionNode) { + return result; + } + + const functionStubDecls = this._evaluator.getDeclInfoForNameNode(functionNode.d.name)?.decls; + if (!functionStubDecls) { + return result; + } + + const recursiveDeclCache = new Set(); + for (const functionStubDecl of functionStubDecls) { + if (isFunctionDeclaration(functionStubDecl)) { + for (const functionDecl of this._findFunctionOrTypeAliasDeclarations( + functionStubDecl, + recursiveDeclCache + )) { + appendArray( + result, + this._lookUpSymbolDeclarations(functionDecl.node, stubDecl.node.d.name.d.value) + .filter((d) => isParamDeclaration(d)) + .map((d) => d) + ); + } + } + } + + return result; + } + + private _findMemberDeclarationsByName( + sourceFile: SourceFile, + className: string, + memberName: string, + declAdder: (d: Declaration, c: Set, r: T[]) => void, + recursiveDeclCache: Set + ): T[] { + const result: T[] = []; + const classDecls = this._findClassDeclarationsByName(sourceFile, className, recursiveDeclCache); + + for (const classDecl of classDecls.filter((d) => isClassDeclaration(d)).map((d) => d)) { + const classResults = this._evaluator.getTypeOfClass(classDecl.node); + if (!classResults) { + continue; + } + + const member = lookUpClassMember(classResults.classType, memberName); + if (member) { + for (const decl of member.symbol.getDeclarations()) { + declAdder(decl, recursiveDeclCache, result); + } + } + } + + return result; + } + + private _findFieldDeclarationsByName( + sourceFile: SourceFile, + className: string, + variableName: string, + recursiveDeclCache: Set + ): VariableDeclaration[] { + let result: VariableDeclaration[] = []; + + const uniqueId = `@${sourceFile.getUri()}/c/${className}/v/${variableName}`; + if (recursiveDeclCache.has(uniqueId)) { + return result; + } + + recursiveDeclCache.add(uniqueId); + + result = this._findMemberDeclarationsByName( + sourceFile, + className, + variableName, + (decl, cache, result) => { + if (isVariableDeclaration(decl)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.uri)) { + for (const implDecl of this._findVariableDeclarations(decl, cache)) { + if (isVariableDeclaration(implDecl)) { + result.push(implDecl); + } + } + } else { + result.push(decl); + } + } + }, + recursiveDeclCache + ); + + recursiveDeclCache.delete(uniqueId); + return result; + } + + private _findMethodDeclarationsByName( + sourceFile: SourceFile, + className: string, + functionName: string, + recursiveDeclCache: Set + ): ClassOrFunctionOrVariableDeclaration[] { + let result: ClassOrFunctionOrVariableDeclaration[] = []; + + const uniqueId = `@${sourceFile.getUri()}/c/${className}/f/${functionName}`; + if (recursiveDeclCache.has(uniqueId)) { + return result; + } + + recursiveDeclCache.add(uniqueId); + + result = this._findMemberDeclarationsByName( + sourceFile, + className, + functionName, + (decl, cache, result) => { + if (isFunctionDeclaration(decl)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.uri)) { + appendArray(result, this._findFunctionOrTypeAliasDeclarations(decl, cache)); + } else { + result.push(decl); + } + } + }, + recursiveDeclCache + ); + + recursiveDeclCache.delete(uniqueId); + return result; + } + + private _findVariableDeclarationsByName( + sourceFile: SourceFile, + variableName: string, + recursiveDeclCache: Set + ): ClassOrFunctionOrVariableDeclaration[] { + const result: ClassOrFunctionOrVariableDeclaration[] = []; + + const uniqueId = `@${sourceFile.getUri()}/v/${variableName}`; + if (recursiveDeclCache.has(uniqueId)) { + return result; + } + + recursiveDeclCache.add(uniqueId); + + const moduleNode = sourceFile.getParserOutput()?.parseTree; + if (!moduleNode) { + // Don't bother deleting from the cache; we'll never get any info from this + // file if it has no tree. + return result; + } + + const decls = this._lookUpSymbolDeclarations(moduleNode, variableName); + if (decls.length === 0) { + this._addDeclarationsFollowingWildcardImports(moduleNode, variableName, result, recursiveDeclCache); + } else { + for (const decl of decls) { + this._addVariableDeclarations(decl, result, recursiveDeclCache); + } + } + + recursiveDeclCache.delete(uniqueId); + return result; + } + + private _findFunctionDeclarationsByName( + sourceFile: SourceFile, + functionName: string, + recursiveDeclCache: Set + ): ClassOrFunctionOrVariableDeclaration[] { + const result: ClassOrFunctionOrVariableDeclaration[] = []; + + const uniqueId = `@${sourceFile.getUri()}/f/${functionName}`; + if (recursiveDeclCache.has(uniqueId)) { + return result; + } + + recursiveDeclCache.add(uniqueId); + + const moduleNode = sourceFile.getParserOutput()?.parseTree; + if (!moduleNode) { + // Don't bother deleting from the cache; we'll never get any info from this + // file if it has no tree. + return result; + } + + const decls = this._lookUpSymbolDeclarations(moduleNode, functionName); + if (decls.length === 0) { + this._addDeclarationsFollowingWildcardImports(moduleNode, functionName, result, recursiveDeclCache); + } else { + for (const decl of decls) { + this._addClassOrFunctionDeclarations(decl, result, recursiveDeclCache); + } + } + + recursiveDeclCache.delete(uniqueId); + return result; + } + + private _findClassDeclarationsByName( + sourceFile: SourceFile, + fullClassName: string, + recursiveDeclCache: Set + ): ClassOrFunctionOrVariableDeclaration[] { + let classDecls: ClassOrFunctionOrVariableDeclaration[] = []; + + // fullClassName is period delimited, for example: 'OuterClass.InnerClass' + const parentNode = sourceFile.getParserOutput()?.parseTree; + if (parentNode) { + let classNameParts = fullClassName.split('.'); + if (classNameParts.length > 0) { + classDecls = this._findClassDeclarations(sourceFile, classNameParts[0], parentNode, recursiveDeclCache); + classNameParts = classNameParts.slice(1); + } + + for (const classNamePart of classNameParts) { + classDecls = classDecls.flatMap((parentDecl) => + this._findClassDeclarations(sourceFile, classNamePart, parentDecl.node, recursiveDeclCache) + ); + } + } + + return classDecls; + } + + private _findClassDeclarations( + sourceFile: SourceFile, + className: string, + parentNode: ParseNode, + recursiveDeclCache: Set + ): ClassOrFunctionOrVariableDeclaration[] { + const result: ClassOrFunctionOrVariableDeclaration[] = []; + + const uniqueId = `@${sourceFile.getUri()}[${parentNode.start}]${className}`; + if (recursiveDeclCache.has(uniqueId)) { + return result; + } + + recursiveDeclCache.add(uniqueId); + + const decls = this._lookUpSymbolDeclarations(parentNode, className); + if (decls.length === 0 && parentNode.nodeType === ParseNodeType.Module) { + this._addDeclarationsFollowingWildcardImports(parentNode, className, result, recursiveDeclCache); + } else { + for (const decl of decls) { + this._addClassOrFunctionDeclarations(decl, result, recursiveDeclCache); + } + } + + recursiveDeclCache.delete(uniqueId); + return result; + } + + private _addVariableDeclarations( + decl: Declaration, + result: ClassOrFunctionOrVariableDeclaration[], + recursiveDeclCache: Set + ) { + if (isVariableDeclaration(decl)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.uri)) { + appendArray(result, this._findVariableDeclarations(decl, recursiveDeclCache)); + } else { + result.push(decl); + } + } else if (isAliasDeclaration(decl)) { + const resolvedDecl = this._evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ true); + if (resolvedDecl) { + if (isVariableDeclaration(resolvedDecl)) { + this._addVariableDeclarations(resolvedDecl, result, recursiveDeclCache); + } else if (isClassDeclaration(resolvedDecl) || isFunctionDeclaration(resolvedDecl)) { + this._addClassOrFunctionDeclarations(resolvedDecl, result, recursiveDeclCache); + } + } + } + } + + private _addClassOrFunctionDeclarations( + decl: Declaration, + result: ClassOrFunctionOrVariableDeclaration[], + recursiveDeclCache: Set + ) { + if (isClassDeclaration(decl)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.uri)) { + appendArray(result, this._findClassOrTypeAliasDeclarations(decl, recursiveDeclCache)); + } else { + result.push(decl); + } + } else if (isSpecialBuiltInClassDeclaration(decl)) { + result.push(decl); + } else if (isFunctionDeclaration(decl)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.uri)) { + appendArray(result, this._findFunctionOrTypeAliasDeclarations(decl, recursiveDeclCache)); + } else { + result.push(decl); + } + } else if (isAliasDeclaration(decl)) { + const adjustedDecl = this._handleSpecialBuiltInModule(decl); + const resolvedDecl = this._evaluator.resolveAliasDeclaration(adjustedDecl, /* resolveLocalNames */ true); + if (resolvedDecl && !isAliasDeclaration(resolvedDecl)) { + this._addClassOrFunctionDeclarations(resolvedDecl, result, recursiveDeclCache); + } + } else if (isVariableDeclaration(decl)) { + // Always add decl. This handles a case where function is dynamically generated such as pandas.read_csv or type alias. + this._addVariableDeclarations(decl, result, recursiveDeclCache); + + // And try to add the real decl if we can. Sometimes, we can't since import resolver can't follow up the type alias or assignment. + // Import resolver can't resolve an import that only exists in the lib but not in the stub in certain circumstance. + const nodeToBind = decl.typeAliasName ?? decl.node; + const type = this._evaluator.getType(nodeToBind); + if (!type) { + return; + } + + if (isFunction(type) && type.shared.declaration) { + this._addClassOrFunctionDeclarations(type.shared.declaration, result, recursiveDeclCache); + } else if (isOverloaded(type)) { + const overloads = OverloadedType.getOverloads(type); + for (const overloadDecl of overloads.map((o) => o.shared.declaration).filter(isDefined)) { + this._addClassOrFunctionDeclarations(overloadDecl, result, recursiveDeclCache); + } + } else if (isInstantiableClass(type)) { + this._addClassTypeDeclarations(decl.uri, type, result, recursiveDeclCache); + } + } + } + + private _handleSpecialBuiltInModule(decl: AliasDeclaration) { + // Some stdlib modules import builtin modules that don't actually exist as a file. + // For example, io.py has an import statement such as from _io import (..., ByteIO) + // but _io doesn't actually exist on disk so, decl.path will be empty. + // That means for symbols that belong to _io such as ByteIO, our regular method + // won't work. to make it work, this method does 2 things, first, it fakes path + // to _io in stdlib path which doesn't actually exist and call getSourceFiles to + // generate or extract builtin module info from runtime, the same way we do for builtin.pyi, + // and second, clone the given decl and set path to the generated pyi for the + // builtin module (ex, _io) to make resolveAliasDeclaration to work. + // once the path is set, our regular code path will work as expected. + if (!decl.uri.isEmpty() || !decl.node) { + // If module actually exists, nothing we need to do. + return decl; + } + + // See if it is one of those special cases. + if (decl.moduleName !== 'io' && decl.moduleName !== 'collections') { + return decl; + } + + const stdLibPath = this._importResolver.getTypeshedStdLibPath(this._execEnv); + if (!stdLibPath) { + return decl; + } + + const fileInfo = ParseTreeUtils.getFileInfoFromNode(decl.node); + if (!fileInfo) { + return decl; + } + + // ImportResolver might be able to generate or extract builtin module's info + // from runtime if we provide right synthesized stub path. + const fakeStubPath = stdLibPath.combinePaths( + getModuleName() + .d.nameParts.map((n) => n.d.value) + .join('.') + '.pyi' + ); + + const sources = this._getSourceFiles(fakeStubPath, fileInfo.fileUri); + if (sources.length === 0) { + return decl; + } + + const synthesizedDecl = { ...decl }; + synthesizedDecl.uri = sources[0].getUri(); + + return synthesizedDecl; + + function getModuleName() { + switch (decl.node.nodeType) { + case ParseNodeType.ImportAs: + return decl.node.d.module; + case ParseNodeType.ImportFromAs: + assert(decl.node.parent?.nodeType === ParseNodeType.ImportFrom); + return decl.node.parent.d.module; + case ParseNodeType.ImportFrom: + return decl.node.d.module; + default: + return assertNever(decl.node); + } + } + } + + private _addClassTypeDeclarations( + originated: Uri, + type: ClassType, + result: ClassOrFunctionOrVariableDeclaration[], + recursiveDeclCache: Set, + useTypeAlias = false + ) { + const fileUri = + useTypeAlias && type.props?.typeAliasInfo ? type.props.typeAliasInfo.shared.fileUri : type.shared.fileUri; + const sourceFiles = this._getSourceFiles(fileUri, /* stubToShadow */ undefined, originated); + + const fullName = + useTypeAlias && type.props?.typeAliasInfo ? type.props.typeAliasInfo.shared.fullName : type.shared.fullName; + const moduleName = + useTypeAlias && type.props?.typeAliasInfo + ? type.props.typeAliasInfo.shared.moduleName + : type.shared.moduleName; + const fullClassName = fullName.substring(moduleName.length + 1 /* +1 for trailing dot */); + + for (const sourceFile of sourceFiles) { + appendArray(result, this._findClassDeclarationsByName(sourceFile, fullClassName, recursiveDeclCache)); + } + } + + private _getSourceFiles(fileUri: Uri, stubToShadow?: Uri, originated?: Uri) { + const sourceFiles: SourceFile[] = []; + + if (this._isStubThatShouldBeMappedToImplementation(fileUri)) { + appendArray(sourceFiles, this._getBoundSourceFilesFromStubFile(fileUri, stubToShadow, originated)); + } else { + const sourceFileInfo = this._boundSourceGetter(fileUri); + if (sourceFileInfo) { + sourceFiles.push(sourceFileInfo.sourceFile); + } + } + + return sourceFiles; + } + + private _addDeclarationsFollowingWildcardImports( + moduleNode: ModuleNode, + symbolName: string, + result: ClassOrFunctionOrVariableDeclaration[], + recursiveDeclCache: Set + ) { + // Symbol exists in a stub doesn't exist in a python file. Use some heuristic + // to find one from sources. + const table = AnalyzerNodeInfo.getScope(moduleNode)?.symbolTable; + if (!table) { + return; + } + + // Dig down imports with wildcard imports. + for (const symbol of table.values()) { + for (const decl of symbol.getDeclarations()) { + if ( + !isAliasDeclaration(decl) || + decl.uri.isEmpty() || + decl.node.nodeType !== ParseNodeType.ImportFrom || + !decl.node.d.isWildcardImport + ) { + continue; + } + + const uniqueId = `@${decl.uri.key}/l/${symbolName}`; + if (recursiveDeclCache.has(uniqueId)) { + continue; + } + + // While traversing these tables, we may encounter the same decl + // more than once (via different files' wildcard imports). To avoid this, + // add an ID unique to this function to the recursiveDeclCache to deduplicate + // them. + // + // The ID is not deleted to avoid needing a second Set to track all decls + // seen in this function. This is safe because the ID here is unique to this + // function. + recursiveDeclCache.add(uniqueId); + + const sourceFiles = this._getSourceFiles(decl.uri); + for (const sourceFile of sourceFiles) { + const moduleNode = sourceFile.getParserOutput()?.parseTree; + if (!moduleNode) { + continue; + } + + const decls = this._lookUpSymbolDeclarations(moduleNode, symbolName); + if (decls.length === 0) { + this._addDeclarationsFollowingWildcardImports( + moduleNode, + symbolName, + result, + recursiveDeclCache + ); + } else { + for (const decl of decls) { + const resolvedDecl = this._evaluator.resolveAliasDeclaration( + decl, + /* resolveLocalNames */ true + ); + if (!resolvedDecl) { + continue; + } + + if (isFunctionDeclaration(resolvedDecl) || isClassDeclaration(resolvedDecl)) { + this._addClassOrFunctionDeclarations(resolvedDecl, result, recursiveDeclCache); + } else if (isVariableDeclaration(resolvedDecl)) { + this._addVariableDeclarations(resolvedDecl, result, recursiveDeclCache); + } + } + } + } + } + } + } + + private _lookUpSymbolDeclarations(node: ParseNode | undefined, symbolName: string): Declaration[] { + if (node === undefined) { + return []; + } + + const containingScope = AnalyzerNodeInfo.getScope(node); + const symbol = containingScope?.lookUpSymbol(symbolName); + const decls = symbol?.getDeclarations(); + + return decls ?? []; + } + + private _getFullClassName(node: ClassNode) { + const fullName: string[] = []; + + let current: ClassNode | undefined = node; + while (current !== undefined) { + fullName.push(current.d.name.d.value); + current = ParseTreeUtils.getEnclosingClass(current); + } + + return fullName.reverse().join('.'); + } + + private _getBoundSourceFilesFromStubFile(stubFileUri: Uri, stubToShadow?: Uri, originated?: Uri): SourceFile[] { + const paths = this.getSourcePathsFromStub(stubFileUri, originated ?? this._fromFile?.uri); + return paths.map((fp) => this._fileBinder(stubToShadow ?? stubFileUri, fp)).filter(isDefined); + } + + private _getStubFileImportTree(stubFileUri: Uri, fromFile: Uri | undefined): Uri[] { + if (!fromFile || !this._isStubThatShouldBeMappedToImplementation(stubFileUri)) { + // No path to search, just return the starting point. + return [stubFileUri]; + } else { + // Otherwise recurse through the importedBy list up to our 'fromFile'. + return buildImportTree( + fromFile, + stubFileUri, + (p) => { + const boundSourceInfo = this._boundSourceGetter(p); + return boundSourceInfo ? boundSourceInfo.importedBy.map((info) => info.uri) : []; + }, + this._cancelToken + ).filter((p) => this._isStubThatShouldBeMappedToImplementation(p)); + } + } + + private _isStubThatShouldBeMappedToImplementation(fileUri: Uri): boolean { + if (this._preferStubs) { + return false; + } + + const stub = isStubFile(fileUri); + if (!stub) { + return false; + } + + // If we get the same file as a source file, then we treat the file as a regular file even if it has "pyi" extension. + return this._importResolver + .getSourceFilesFromStub(fileUri, this._execEnv, this._mapCompiled) + .every((f) => f !== fileUri); + } +} + +export function isStubFile(uri: Uri): boolean { + return uri.lastExtension === '.pyi'; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/sourceMapperUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/sourceMapperUtils.ts new file mode 100644 index 00000000..8f407908 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/sourceMapperUtils.ts @@ -0,0 +1,63 @@ +/* + * sourceMapperUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import { CancellationToken } from 'vscode-jsonrpc'; +import { Uri } from '../common/uri/uri'; + +const MAX_TREE_SEARCH_COUNT = 1000; + +class NumberReference { + value = 0; +} + +// Builds an array of imports from the 'from' to the 'to' entry where 'from' +// is on the front of the array and the item just before 'to' is on the +// back of the array. +export function buildImportTree(to: Uri, from: Uri, next: (from: Uri) => Uri[], token: CancellationToken): Uri[] { + const totalCountRef = new NumberReference(); + const results = _buildImportTreeImpl(to, from, next, [], totalCountRef, token); + + // Result should always have the 'from' node in it. + return results.length > 0 ? results : [from]; +} + +function _buildImportTreeImpl( + to: Uri, + from: Uri, + next: (from: Uri) => Uri[], + previous: Uri[], + totalSearched: NumberReference, + token: CancellationToken +): Uri[] { + // Exit early if cancellation is requested or we've exceeded max count + if (totalSearched.value > MAX_TREE_SEARCH_COUNT || token.isCancellationRequested) { + return []; + } + totalSearched.value += 1; + + if (from.equals(to)) { + // At the top, previous should have our way into this recursion. + return previous.length ? previous : [from]; + } + + if (previous.length > 1 && previous.find((s) => s.equals(from))) { + // Fail the search, we're stuck in a loop. + return []; + } + + const nextEntries = next(from); + for (let i = 0; i < nextEntries.length && !token.isCancellationRequested; i++) { + // Do a search through the next level to get to the 'to' entry. + const subentries = _buildImportTreeImpl(to, nextEntries[i], next, [...previous, from], totalSearched, token); + + if (subentries.length > 0) { + return subentries; + } + } + + // Search failed on this tree. Fail so we can exit recursion. + return []; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/staticExpressions.ts b/python-parser/packages/pyright-internal/src/analyzer/staticExpressions.ts new file mode 100644 index 00000000..80eac94c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/staticExpressions.ts @@ -0,0 +1,376 @@ +/* + * staticExpressions.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Functions that operate on expressions (parse node trees) + * whose values can be evaluated statically. + */ + +import { ExecutionEnvironment, PythonPlatform } from '../common/configOptions'; +import { PythonReleaseLevel, PythonVersion } from '../common/pythonVersion'; +import { ArgCategory, ExpressionNode, NameNode, NumberNode, ParseNodeType, TupleNode } from '../parser/parseNodes'; +import { KeywordType, OperatorType } from '../parser/tokenizerTypes'; + +// Returns undefined if the expression cannot be evaluated +// statically as a bool value or true/false if it can. +export function evaluateStaticBoolExpression( + node: ExpressionNode, + execEnv: ExecutionEnvironment, + definedConstants: Map, + typingImportAliases?: string[], + sysImportAliases?: string[] +): boolean | undefined { + if (node.nodeType === ParseNodeType.AssignmentExpression) { + return evaluateStaticBoolExpression( + node.d.rightExpr, + execEnv, + definedConstants, + typingImportAliases, + sysImportAliases + ); + } + + if (node.nodeType === ParseNodeType.UnaryOperation) { + if (node.d.operator === OperatorType.Not) { + const value = evaluateStaticBoolLikeExpression( + node.d.expr, + execEnv, + definedConstants, + typingImportAliases, + sysImportAliases + ); + if (value !== undefined) { + return !value; + } + } + } else if (node.nodeType === ParseNodeType.BinaryOperation) { + // Is it an OR or AND expression? + if (node.d.operator === OperatorType.Or || node.d.operator === OperatorType.And) { + const leftValue = evaluateStaticBoolExpression( + node.d.leftExpr, + execEnv, + definedConstants, + typingImportAliases, + sysImportAliases + ); + const rightValue = evaluateStaticBoolExpression( + node.d.rightExpr, + execEnv, + definedConstants, + typingImportAliases, + sysImportAliases + ); + + if (leftValue === undefined || rightValue === undefined) { + return undefined; + } + + if (node.d.operator === OperatorType.Or) { + return leftValue || rightValue; + } else { + return leftValue && rightValue; + } + } + + if ( + _isSysVersionInfoExpression(node.d.leftExpr, sysImportAliases) && + node.d.rightExpr.nodeType === ParseNodeType.Tuple + ) { + // Handle the special case of "sys.version_info >= (3, x)" + const comparisonVersion = _convertTupleToVersion(node.d.rightExpr); + return _evaluateVersionBinaryOperation(node.d.operator, execEnv.pythonVersion, comparisonVersion); + } + + if ( + node.d.leftExpr.nodeType === ParseNodeType.Index && + _isSysVersionInfoExpression(node.d.leftExpr.d.leftExpr, sysImportAliases) && + node.d.leftExpr.d.items.length === 1 && + !node.d.leftExpr.d.trailingComma && + !node.d.leftExpr.d.items[0].d.name && + node.d.leftExpr.d.items[0].d.argCategory === ArgCategory.Simple && + node.d.leftExpr.d.items[0].d.valueExpr.nodeType === ParseNodeType.Number && + !node.d.leftExpr.d.items[0].d.valueExpr.d.isImaginary && + node.d.leftExpr.d.items[0].d.valueExpr.d.value === 0 && + node.d.rightExpr.nodeType === ParseNodeType.Number && + node.d.rightExpr.d.isInteger && + typeof node.d.rightExpr.d.value === 'number' + ) { + // Handle the special case of "sys.version_info[0] >= X" + return _evaluateVersionBinaryOperation( + node.d.operator, + PythonVersion.create(execEnv.pythonVersion.major, 0), + PythonVersion.create(node.d.rightExpr.d.value, 0) + ); + } + + if ( + _isSysPlatformInfoExpression(node.d.leftExpr, sysImportAliases) && + node.d.rightExpr.nodeType === ParseNodeType.StringList + ) { + // Handle the special case of "sys.platform != 'X'" + const comparisonPlatform = node.d.rightExpr.d.strings.map((s) => s.d.value).join(''); + const expectedPlatformName = _getExpectedPlatformNameFromPlatform(execEnv); + return _evaluateStringBinaryOperation(node.d.operator, expectedPlatformName, comparisonPlatform); + } + + if (_isOsNameInfoExpression(node.d.leftExpr) && node.d.rightExpr.nodeType === ParseNodeType.StringList) { + // Handle the special case of "os.name == 'X'" + const comparisonOsName = node.d.rightExpr.d.strings.map((s) => s.d.value).join(''); + const expectedOsName = _getExpectedOsNameFromPlatform(execEnv); + if (expectedOsName !== undefined) { + return _evaluateStringBinaryOperation(node.d.operator, expectedOsName, comparisonOsName); + } + } else { + // Handle the special case of == 'X' or != 'X'. + if (node.d.rightExpr.nodeType === ParseNodeType.StringList) { + let constantValue: string | number | boolean | undefined; + + if (node.d.leftExpr.nodeType === ParseNodeType.Name) { + constantValue = definedConstants.get(node.d.leftExpr.d.value); + } else if (node.d.leftExpr.nodeType === ParseNodeType.MemberAccess) { + constantValue = definedConstants.get(node.d.leftExpr.d.member.d.value); + } + + if (constantValue !== undefined && typeof constantValue === 'string') { + const comparisonStringName = node.d.rightExpr.d.strings.map((s) => s.d.value).join(''); + return _evaluateStringBinaryOperation(node.d.operator, constantValue, comparisonStringName); + } + } + } + } else if (node.nodeType === ParseNodeType.Constant) { + if (node.d.constType === KeywordType.True) { + return true; + } else if (node.d.constType === KeywordType.False) { + return false; + } + } else if (node.nodeType === ParseNodeType.Name) { + if (node.d.value === 'TYPE_CHECKING') { + return true; + } + + const constant = definedConstants.get(node.d.value); + if (constant !== undefined) { + return !!constant; + } + } else if (node.nodeType === ParseNodeType.MemberAccess) { + if ( + typingImportAliases && + node.d.member.d.value === 'TYPE_CHECKING' && + node.d.leftExpr.nodeType === ParseNodeType.Name && + typingImportAliases.some((alias) => alias === (node.d.leftExpr as NameNode).d.value) + ) { + return true; + } + + const constant = definedConstants.get(node.d.member.d.value); + if (constant !== undefined) { + return !!constant; + } + } + + return undefined; +} + +// Similar to evaluateStaticBoolExpression except that it handles +// other non-bool values that are statically falsy or truthy +// (like "None"). +export function evaluateStaticBoolLikeExpression( + node: ExpressionNode, + execEnv: ExecutionEnvironment, + definedConstants: Map, + typingImportAliases?: string[], + sysImportAliases?: string[] +): boolean | undefined { + if (node.nodeType === ParseNodeType.Constant) { + if (node.d.constType === KeywordType.None) { + return false; + } + } + + return evaluateStaticBoolExpression(node, execEnv, definedConstants, typingImportAliases, sysImportAliases); +} + +function _convertTupleToVersion(node: TupleNode): PythonVersion | undefined { + if (node.d.items.length >= 2) { + if ( + node.d.items[0].nodeType === ParseNodeType.Number && + !node.d.items[0].d.isImaginary && + node.d.items[1].nodeType === ParseNodeType.Number && + !node.d.items[1].d.isImaginary + ) { + const majorNode = node.d.items[0]; + const minorNode = node.d.items[1]; + if (typeof majorNode.d.value !== 'number' || typeof minorNode.d.value !== 'number') { + return undefined; + } + + const major = majorNode.d.value; + const minor = minorNode.d.value; + let micro: number | undefined; + if ( + node.d.items.length >= 3 && + node.d.items[2].nodeType === ParseNodeType.Number && + !node.d.items[2].d.isImaginary && + typeof node.d.items[2].d.value === 'number' + ) { + micro = node.d.items[2].d.value; + } + + let releaseLevel: PythonReleaseLevel | undefined; + if ( + node.d.items.length >= 4 && + node.d.items[3].nodeType === ParseNodeType.StringList && + node.d.items[3].d.strings.length === 1 && + node.d.items[3].d.strings[0].nodeType === ParseNodeType.String + ) { + releaseLevel = node.d.items[3].d.strings[0].d.value as PythonReleaseLevel; + } + + let serial: number | undefined; + if ( + node.d.items.length >= 5 && + node.d.items[4].nodeType === ParseNodeType.Number && + !node.d.items[4].d.isImaginary && + typeof node.d.items[4].d.value === 'number' + ) { + serial = node.d.items[4].d.value; + } + + return PythonVersion.create(major, minor, micro, releaseLevel, serial); + } + } else if (node.d.items.length === 1) { + const major = node.d.items[0] as NumberNode; + if (typeof major.d.value === 'number') { + return PythonVersion.create(major.d.value, 0); + } + } + + return undefined; +} + +function _evaluateVersionBinaryOperation( + operatorType: OperatorType, + leftValue: PythonVersion | undefined, + rightValue: PythonVersion | undefined +): any | undefined { + if (leftValue !== undefined && rightValue !== undefined) { + if (operatorType === OperatorType.LessThan) { + return PythonVersion.isLessThan(leftValue, rightValue); + } + + if (operatorType === OperatorType.LessThanOrEqual) { + return PythonVersion.isLessOrEqualTo(leftValue, rightValue); + } + + if (operatorType === OperatorType.GreaterThan) { + return PythonVersion.isGreaterThan(leftValue, rightValue); + } + + if (operatorType === OperatorType.GreaterThanOrEqual) { + return PythonVersion.isGreaterOrEqualTo(leftValue, rightValue); + } + + if (operatorType === OperatorType.Equals) { + return PythonVersion.isEqualTo(leftValue, rightValue); + } + + if (operatorType === OperatorType.NotEquals) { + return !PythonVersion.isEqualTo(leftValue, rightValue); + } + } + + return undefined; +} + +function _evaluateStringBinaryOperation( + operatorType: OperatorType, + leftValue: string | undefined, + rightValue: string | undefined +): any | undefined { + if (leftValue !== undefined && rightValue !== undefined) { + if (operatorType === OperatorType.Equals) { + return leftValue === rightValue; + } else if (operatorType === OperatorType.NotEquals) { + return leftValue !== rightValue; + } + } + + return undefined; +} + +function _isSysVersionInfoExpression(node: ExpressionNode, sysImportAliases: string[] = ['sys']): boolean { + if (node.nodeType === ParseNodeType.MemberAccess) { + if (node.d.leftExpr.nodeType === ParseNodeType.Name && node.d.member.d.value === 'version_info') { + if (sysImportAliases.some((alias) => alias === (node.d.leftExpr as NameNode).d.value)) { + return true; + } + } + } + + return false; +} + +function _isSysPlatformInfoExpression(node: ExpressionNode, sysImportAliases: string[] = ['sys']): boolean { + if (node.nodeType === ParseNodeType.MemberAccess) { + if (node.d.leftExpr.nodeType === ParseNodeType.Name && node.d.member.d.value === 'platform') { + if (sysImportAliases.some((alias) => alias === (node.d.leftExpr as NameNode).d.value)) { + return true; + } + } + } + + return false; +} + +function _isOsNameInfoExpression(node: ExpressionNode): boolean { + if (node.nodeType === ParseNodeType.MemberAccess) { + if ( + node.d.leftExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.value === 'os' && + node.d.member.d.value === 'name' + ) { + return true; + } + } + + return false; +} + +function _getExpectedPlatformNameFromPlatform(execEnv: ExecutionEnvironment): string | undefined { + if (execEnv.pythonPlatform === PythonPlatform.Darwin) { + return 'darwin'; + } else if (execEnv.pythonPlatform === PythonPlatform.Windows) { + return 'win32'; + } else if (execEnv.pythonPlatform === PythonPlatform.Linux) { + return 'linux'; + } else if (execEnv.pythonPlatform === PythonPlatform.iOS) { + return 'ios'; + } else if (execEnv.pythonPlatform === PythonPlatform.Android) { + // Python >= 3.13 reports Android as 'android', earlier used to report it as 'linux' + if (execEnv.pythonVersion.major === 3 && execEnv.pythonVersion.minor >= 13) { + return 'android'; + } else { + return 'linux'; + } + } + + return undefined; +} + +function _getExpectedOsNameFromPlatform(execEnv: ExecutionEnvironment): string | undefined { + if (execEnv.pythonPlatform === PythonPlatform.Darwin) { + return 'posix'; + } else if (execEnv.pythonPlatform === PythonPlatform.Windows) { + return 'nt'; + } else if (execEnv.pythonPlatform === PythonPlatform.Linux) { + return 'posix'; + } else if (execEnv.pythonPlatform === PythonPlatform.iOS) { + return 'posix'; + } else if (execEnv.pythonPlatform === PythonPlatform.Android) { + return 'posix'; + } + + return undefined; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/symbol.ts b/python-parser/packages/pyright-internal/src/analyzer/symbol.ts new file mode 100644 index 00000000..57ab6fc5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/symbol.ts @@ -0,0 +1,311 @@ +/* + * symbol.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Represents an association between a name and the type + * (or multiple types) that the symbol is associated with + * in the program. + */ + +import { NameNode } from '../parser/parseNodes'; +import { Declaration, DeclarationType } from './declaration'; +import { areDeclarationsSame, hasTypeForDeclaration } from './declarationUtils'; +import { Type } from './types'; + +export const enum SymbolFlags { + None = 0, + + // Indicates that the symbol is unbound at the start of + // execution. Some symbols are initialized by the module + // loader, so they are bound even before the first statement + // in the module is executed. + InitiallyUnbound = 1 << 0, + + // Indicates that the symbol is not visible from other files. + // Used for module-level symbols. + ExternallyHidden = 1 << 1, + + // Indicates that the symbol is a class member of a class. + ClassMember = 1 << 2, + + // Indicates that the symbol is an instance member of a class. + InstanceMember = 1 << 3, + + // Indicates that the symbol is specified in the __slots__ + // declaration of a class. Such symbols act like instance members + // in some respects but are actually implemented as class members + // using descriptor objects. + SlotsMember = 1 << 4, + + // Indicates that the symbol is considered "private" to the + // class or module and should not be accessed outside or overridden. + PrivateMember = 1 << 5, + + // Indicates that the symbol is not considered for protocol + // matching. This applies to some built-in symbols like __module__. + IgnoredForProtocolMatch = 1 << 6, + + // Indicates that the symbol is a ClassVar, so it cannot be + // set when accessed through a class instance. + ClassVar = 1 << 7, + + // Indicates that the symbol is in __all__. + InDunderAll = 1 << 8, + + // Indicates that the symbol is a private import in a py.typed module. + PrivatePyTypedImport = 1 << 9, + + // Indicates that the symbol is an InitVar as specified in PEP 557. + InitVar = 1 << 10, + + // Indicates that the symbol is a field in a NamedTuple class, which + // is modeled as an instance member but in some respects acts as a + // class member. + NamedTupleMember = 1 << 11, + + // Indicates that the symbol should be exempt from override type checks. + IgnoredForOverrideChecks = 1 << 12, + + // Indicates that the symbol is marked Final and is assigned a value + // in the class body. The typing spec indicates that these should be + // considered ClassVars unless they are found in a dataclass. + FinalVarInClassBody = 1 << 13, +} + +let nextSymbolId = 1; +function getUniqueSymbolId() { + return nextSymbolId++; +} + +// Symbol ID that indicates that there is no specific symbol. +export const indeterminateSymbolId = 0; + +export interface SynthesizedTypeInfo { + type: Type; + + // An optional node that is not used by the type evaluator + // but can be used by language services to provide additional + // functionality (such as go-to-definition). + node?: NameNode; +} + +export class Symbol { + // Information about the node that declared the value - + // i.e. where the editor will take the user if "show definition" + // is selected. Multiple declarations can exist for variables, + // properties, and functions (in the case of @overload). + private _declarations?: Declaration[]; + + // Flags that provide information about the symbol. + private _flags: SymbolFlags; + + // Unique numeric ID for each symbol allocated. + readonly id: number; + + // Symbols that are completely synthesized (i.e. have no + // corresponding declarations in the program) can have + // a specified type. + private _synthesizedTypeInfo?: SynthesizedTypeInfo; + + // Is this symbol an alias for a symbol originally imported from + // the typing or typing_extensions module (e.g. "Final")? + private _typingSymbolAlias?: string; + + constructor(flags: SymbolFlags) { + this.id = getUniqueSymbolId(); + this._flags = flags; + } + + static createWithType(flags: SymbolFlags, type: Type, node?: NameNode): Symbol { + const newSymbol = new Symbol(flags); + newSymbol._synthesizedTypeInfo = { type, node }; + return newSymbol; + } + + isInitiallyUnbound() { + return !!(this._flags & SymbolFlags.InitiallyUnbound); + } + + setIsExternallyHidden() { + this._flags |= SymbolFlags.ExternallyHidden; + } + + isExternallyHidden() { + return !!(this._flags & SymbolFlags.ExternallyHidden); + } + + setIsIgnoredForProtocolMatch() { + this._flags |= SymbolFlags.IgnoredForProtocolMatch; + } + + isIgnoredForProtocolMatch() { + return !!(this._flags & SymbolFlags.IgnoredForProtocolMatch); + } + + setIsClassMember() { + this._flags |= SymbolFlags.ClassMember; + } + + isClassMember() { + return !!(this._flags & SymbolFlags.ClassMember); + } + + setIsInstanceMember() { + this._flags |= SymbolFlags.InstanceMember; + } + + isInstanceMember() { + return !!(this._flags & SymbolFlags.InstanceMember); + } + + setIsSlotsMember() { + this._flags |= SymbolFlags.ClassMember | SymbolFlags.InstanceMember | SymbolFlags.SlotsMember; + } + + isSlotsMember() { + return !!(this._flags & SymbolFlags.SlotsMember); + } + + setIsClassVar() { + this._flags |= SymbolFlags.ClassVar; + } + + isClassVar() { + return !!(this._flags & SymbolFlags.ClassVar); + } + + setIsFinalVarInClassBody() { + this._flags |= SymbolFlags.FinalVarInClassBody; + } + + isFinalVarInClassBody() { + return !!(this._flags & SymbolFlags.FinalVarInClassBody); + } + + setIsInitVar() { + this._flags |= SymbolFlags.InitVar; + } + + isInitVar() { + return !!(this._flags & SymbolFlags.InitVar); + } + + setIsInDunderAll() { + this._flags |= SymbolFlags.InDunderAll; + } + + isInDunderAll() { + return !!(this._flags & SymbolFlags.InDunderAll); + } + + setIsPrivateMember() { + this._flags |= SymbolFlags.PrivateMember; + } + + isPrivateMember() { + return !!(this._flags & SymbolFlags.PrivateMember); + } + + setPrivatePyTypedImport() { + this._flags |= SymbolFlags.PrivatePyTypedImport; + } + + isPrivatePyTypedImport() { + return !!(this._flags & SymbolFlags.PrivatePyTypedImport); + } + + isNamedTupleMemberMember() { + return !!(this._flags & SymbolFlags.NamedTupleMember); + } + + isIgnoredForOverrideChecks() { + return !!(this._flags & SymbolFlags.IgnoredForOverrideChecks); + } + + setTypingSymbolAlias(aliasedName: string) { + this._typingSymbolAlias = aliasedName; + } + + getTypingSymbolAlias(): string | undefined { + return this._typingSymbolAlias; + } + + addDeclaration(declaration: Declaration) { + if (this._declarations) { + // See if this node was already identified as a declaration. If so, + // replace it. Otherwise, add it as a new declaration to the end of + // the list. + const declIndex = this._declarations.findIndex((decl) => areDeclarationsSame(decl, declaration)); + if (declIndex < 0) { + this._declarations.push(declaration); + + // If there is more than one declaration for a symbol, we will + // assume it is not a type alias. + this._declarations.forEach((decl) => { + if (decl.type === DeclarationType.Variable && decl.typeAliasName) { + delete decl.typeAliasName; + } + }); + } else { + // If the new declaration has a defined type, it should replace + // the existing one. + const curDecl = this._declarations[declIndex]; + if (hasTypeForDeclaration(declaration)) { + this._declarations[declIndex] = declaration; + if (curDecl.type === DeclarationType.Variable && declaration.type === DeclarationType.Variable) { + if (!declaration.inferredTypeSource && curDecl.inferredTypeSource) { + declaration.inferredTypeSource = curDecl.inferredTypeSource; + } + } + } else if (declaration.type === DeclarationType.Variable) { + // If it's marked "final" or "type alias", this should be reflected + // in the existing declaration. Likewise, if the existing declaration + // doesn't have a type source, add it. + if (curDecl.type === DeclarationType.Variable) { + if (declaration.isFinal) { + curDecl.isFinal = true; + } + + curDecl.typeAliasName = declaration.typeAliasName; + + if (!curDecl.inferredTypeSource && declaration.inferredTypeSource) { + curDecl.inferredTypeSource = declaration.inferredTypeSource; + } + } + } + } + } else { + this._declarations = [declaration]; + } + } + + hasDeclarations() { + return this._declarations ? this._declarations.length > 0 : false; + } + + getDeclarations() { + return this._declarations ? this._declarations : []; + } + + hasTypedDeclarations() { + // We'll treat an synthesized type as an implicit declaration. + if (this._synthesizedTypeInfo) { + return true; + } + + return this.getDeclarations().some((decl) => hasTypeForDeclaration(decl)); + } + + getTypedDeclarations() { + return this.getDeclarations().filter((decl) => hasTypeForDeclaration(decl)); + } + + getSynthesizedType() { + return this._synthesizedTypeInfo; + } +} + +// Maps names to symbol information. +export type SymbolTable = Map; diff --git a/python-parser/packages/pyright-internal/src/analyzer/symbolNameUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/symbolNameUtils.ts new file mode 100644 index 00000000..e344563e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/symbolNameUtils.ts @@ -0,0 +1,50 @@ +/* + * symbolNameUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Static methods that apply to symbols or symbol names. + */ + +const _constantRegEx = /^[A-Z0-9_]+$/; +const _underscoreOnlyRegEx = /^[_]+$/; +const _camelCaseRegEx = /^_{0,2}[A-Z][A-Za-z0-9_]+$/; + +// Private symbol names start with a double underscore. +export function isPrivateName(name: string) { + return name.length > 2 && name.startsWith('__') && !name.endsWith('__'); +} + +// Protected symbol names start with a single underscore. +export function isProtectedName(name: string) { + return name.length > 1 && name.startsWith('_') && !name.startsWith('__'); +} + +export function isPrivateOrProtectedName(name: string) { + return isPrivateName(name) || isProtectedName(name); +} + +// "Dunder" names start and end with two underscores. +export function isDunderName(name: string) { + return name.length > 4 && name.startsWith('__') && name.endsWith('__'); +} + +// "Single Dunder" names start and end with single underscores. +export function isSingleDunderName(name: string) { + return name.length > 2 && name.startsWith('_') && name.endsWith('_'); +} + +// Constants are all-caps with possible numbers and underscores. +export function isConstantName(name: string) { + return !!name.match(_constantRegEx) && !name.match(_underscoreOnlyRegEx); +} + +// Type aliases are CamelCase with possible numbers and underscores. +export function isTypeAliasName(name: string) { + return !!name.match(_camelCaseRegEx); +} + +export function isPublicConstantOrTypeAlias(name: string) { + return !isPrivateOrProtectedName(name) && (isConstantName(name) || isTypeAliasName(name)); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/symbolUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/symbolUtils.ts new file mode 100644 index 00000000..c265eade --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/symbolUtils.ts @@ -0,0 +1,52 @@ +/* + * symbolUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Functions that operate on Symbol objects. + */ + +import { Declaration, DeclarationType } from './declaration'; +import { Symbol } from './symbol'; + +export function getLastTypedDeclarationForSymbol(symbol: Symbol): Declaration | undefined { + const typedDecls = symbol.getTypedDeclarations(); + + if (typedDecls.length > 0) { + return typedDecls[typedDecls.length - 1]; + } + + return undefined; +} + +// Within TypedDict classes, member variables are not accessible as +// normal attributes. Instead, they are accessed through index operations. +export function isTypedDictMemberAccessedThroughIndex(symbol: Symbol): boolean { + const typedDecls = symbol.getTypedDeclarations(); + + if (typedDecls.length > 0) { + const lastDecl = typedDecls[typedDecls.length - 1]; + if (lastDecl.type === DeclarationType.Variable) { + return true; + } + } + + return false; +} + +export function isVisibleExternally(symbol: Symbol) { + return !symbol.isExternallyHidden() && !symbol.isPrivatePyTypedImport(); +} + +export function isEffectivelyClassVar(symbol: Symbol, isInDataclass: boolean) { + if (symbol.isClassVar()) { + return true; + } + + if (symbol.isFinalVarInClassBody()) { + return !isInDataclass; + } + + return false; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/testWalker.ts b/python-parser/packages/pyright-internal/src/analyzer/testWalker.ts new file mode 100644 index 00000000..b9689f26 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/testWalker.ts @@ -0,0 +1,122 @@ +/* + * testWalker.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Walks a parse tree to validate internal consistency and completeness. + */ + +import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import { assertNever, fail } from '../common/debug'; +import { TextRange } from '../common/textRange'; +import { NameNode, ParseNode, ParseNodeArray, ParseNodeType } from '../parser/parseNodes'; +import { isCompliantWithNodeRangeRules } from './parseTreeUtils'; +import { TypeEvaluator } from './typeEvaluatorTypes'; + +export class TestWalker extends ParseTreeWalker { + constructor() { + super(); + } + + override visitNode(node: ParseNode) { + const children = super.visitNode(node); + this._verifyParentChildLinks(node, children); + this._verifyChildRanges(node, children); + + return children; + } + + // Make sure that all of the children point to their parent. + private _verifyParentChildLinks(node: ParseNode, children: ParseNodeArray) { + children.forEach((child) => { + if (child) { + if (child.parent !== node) { + fail( + `Child node ${child.nodeType} does not ` + `contain a reference to its parent ${node.nodeType}` + ); + } + } + }); + } + + // Verify that: + // Children are all contained within the parent + // Children have non-overlapping ranges + // Children are listed in increasing order + private _verifyChildRanges(node: ParseNode, children: ParseNodeArray) { + let prevNode: ParseNode | undefined; + + const compliant = isCompliantWithNodeRangeRules(node); + children.forEach((child) => { + if (child) { + let skipCheck = false; + + if (!compliant) { + switch (node.nodeType) { + case ParseNodeType.Assignment: + // There are a few exceptions we need to deal with here. Comment + // annotations can occur outside of an assignment node's range. + if (child === node.d.annotationComment) { + skipCheck = true; + } + + // Portions of chained assignments can occur outside of an + // assignment node's range. + if (child.nodeType === ParseNodeType.Assignment) { + skipCheck = true; + } + break; + + case ParseNodeType.StringList: + if (child === node.d.annotation) { + skipCheck = true; + } + break; + + default: + assertNever(node); + } + } + + if (!skipCheck) { + // Make sure the child is contained within the parent. + if (child.start < node.start || TextRange.getEnd(child) > TextRange.getEnd(node)) { + fail(`Child node ${child.nodeType} is not contained within its parent ${node.nodeType}`); + } + + if (prevNode) { + // Make sure the child is after the previous child. + if (child.start < TextRange.getEnd(prevNode)) { + // Special-case the function annotation which can "bleed" into the suite. + const exempted = prevNode.nodeType === ParseNodeType.FunctionAnnotation; + + if (!exempted) { + fail(`Child node is not after previous child node`); + } + } + } + + prevNode = child; + } + } + }); + } +} + +// Custom parse node walker that evaluates the types of all +// NameNodes. This helps find bugs in evaluation ordering. +export class NameTypeWalker extends ParseTreeWalker { + constructor(private _evaluator: TypeEvaluator) { + super(); + } + + override visitName(node: NameNode) { + if (node.parent?.nodeType !== ParseNodeType.ImportFromAs && node.parent?.nodeType !== ParseNodeType.ImportAs) { + if (this._evaluator.isNodeReachable(node, /* sourceNode */ undefined)) { + this._evaluator.getType(node); + } + } + return true; + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/tracePrinter.ts b/python-parser/packages/pyright-internal/src/analyzer/tracePrinter.ts new file mode 100644 index 00000000..5d7985a5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/tracePrinter.ts @@ -0,0 +1,272 @@ +/* + * tracePrinter.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Converts various types into string representations. + */ + +import { isNumber, isString } from '../common/core'; +import { assertNever } from '../common/debug'; +import { stripFileExtension } from '../common/pathUtils'; +import { convertOffsetToPosition } from '../common/positionUtils'; +import { Uri } from '../common/uri/uri'; +import { ParseNode, ParseNodeType, isExpressionNode } from '../parser/parseNodes'; +import { AbsoluteModuleDescriptor } from './analyzerFileInfo'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { Declaration, DeclarationType } from './declaration'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { Symbol } from './symbol'; +import { OverloadedType, Type, TypeBase, TypeCategory } from './types'; + +const separatorRegExp = /[\\/]/g; + +export type PrintableType = ParseNode | Declaration | Symbol | Type | undefined; + +export interface TracePrinter { + print(o: PrintableType): string; + printFileOrModuleName(fileUriOrModule: Uri | AbsoluteModuleDescriptor): string; +} + +export function createTracePrinter(roots: Uri[], includeRoots: boolean = false): TracePrinter { + function wrap(value: string | undefined, ch = "'") { + return value ? `${ch}${value}${ch}` : ''; + } + + // Sort roots in desc order so that we compare longer path first + // when getting relative path. + // ex) d:/root/.env/lib/site-packages, d:/root/.env + roots = roots.sort((a, b) => a.key.localeCompare(b.key)).reverse(); + + function printFileOrModuleName(fileUriOrModule: Uri | AbsoluteModuleDescriptor | undefined) { + if (fileUriOrModule) { + if (Uri.is(fileUriOrModule)) { + for (const root of roots) { + if (fileUriOrModule.isChild(root)) { + const subFile = root.getRelativePath(fileUriOrModule); + const stripped = stripFileExtension(subFile!).replace(separatorRegExp, '.'); + return includeRoots ? `${root.fileName}:${stripped}` : stripped; + } + } + + return fileUriOrModule.toUserVisibleString(); + } else if (fileUriOrModule.nameParts) { + return fileUriOrModule.nameParts.join('.'); + } + } + return ''; + } + + function printType(type: Type | undefined): string { + if (type) { + switch (type.category) { + case TypeCategory.Any: + return `Any ${wrap(type.props?.typeAliasInfo?.shared.fullName)}`; + + case TypeCategory.Class: + if (TypeBase.isInstantiable(type)) { + return `Class '${type.shared.name}' (${type.shared.moduleName})`; + } else { + return `Object '${type.shared.name}' (${type.shared.moduleName})`; + } + + case TypeCategory.Function: + return `Function '${type.shared.name}' (${type.shared.moduleName})`; + + case TypeCategory.Module: + return `Module '${type.priv.moduleName}' (${type.priv.moduleName})`; + + case TypeCategory.Never: + return `Never ${wrap(type.props?.typeAliasInfo?.shared.fullName)}`; + + case TypeCategory.Overloaded: + return `Overloaded [${OverloadedType.getOverloads(type) + .map((o) => wrap(printType(o), '"')) + .join(',')}]`; + + case TypeCategory.TypeVar: + return `TypeVar '${type.shared.name}' ${wrap(type.props?.typeAliasInfo?.shared.fullName)}`; + + case TypeCategory.Unbound: + return `Unbound ${wrap(type.props?.typeAliasInfo?.shared.fullName)}`; + + case TypeCategory.Union: + return `Union [${type.priv.subtypes.map((o) => wrap(printType(o), '"')).join(',')}]`; + + case TypeCategory.Unknown: + return `Unknown ${wrap(type.props?.typeAliasInfo?.shared.fullName)}`; + + default: + assertNever(type); + } + } + return ''; + } + + function printSymbol(symbol: Symbol | undefined) { + if (symbol) { + if (symbol.hasDeclarations()) { + return `symbol ${printDeclaration(symbol.getDeclarations()[0])}`; + } + + return ``; + } + + return ''; + } + + function printDeclaration(decl: Declaration | undefined) { + if (decl) { + switch (decl.type) { + case DeclarationType.Alias: + return `Alias, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + case DeclarationType.Class: + return `Class, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + case DeclarationType.Function: + return `Function, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + case DeclarationType.Intrinsic: + return `Intrinsic, ${printNode(decl.node)} ${decl.intrinsicType} (${printFileOrModuleName( + decl.uri + )})`; + + case DeclarationType.Param: + return `Param, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + case DeclarationType.TypeParam: + return `TypeParam, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + case DeclarationType.SpecialBuiltInClass: + return `SpecialBuiltInClass, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + case DeclarationType.Variable: + return `Variable, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + case DeclarationType.TypeAlias: + return `TypeAlias, ${printNode(decl.node)} (${printFileOrModuleName(decl.uri)})`; + + default: + assertNever(decl); + } + } + + return ''; + } + + function getFileInfo(node: ParseNode) { + while (node.nodeType !== ParseNodeType.Module && node.parent) { + node = node.parent; + } + + return node.nodeType === ParseNodeType.Module ? AnalyzerNodeInfo.getFileInfo(node) : undefined; + } + + function getText(value: string, max = 30) { + if (value.length < max) { + return value; + } + + return value.substring(0, max) + ' '; + } + + function printNode(node: ParseNode | undefined, printPath = false): string { + if (!node) { + return ''; + } + + let path = printPath ? `(${printFileOrModuleName(getFileInfo(node)?.fileUri)})` : ''; + + const fileInfo = getFileInfo(node); + if (fileInfo?.lines) { + const position = convertOffsetToPosition(node.start, fileInfo.lines); + path += ` [${position.line + 1}:${position.character + 1}]`; + } + + if (isExpressionNode(node)) { + return wrap(getText(ParseTreeUtils.printExpression(node)), '"') + ` ${path}`; + } + + switch (node.nodeType) { + case ParseNodeType.ImportAs: + return `importAs '${printNode(node.d.module)}' ${wrap( + node.d.alias ? printNode(node.d.alias) : '' + )} ${path}`; + + case ParseNodeType.ImportFrom: + return `importFrom [${node.d.imports.map((i) => wrap(printNode(i), '"')).join(',')}]`; + + case ParseNodeType.ImportFromAs: + return `ImportFromAs '${printNode(node.d.name)}' ${wrap( + node.d.alias ? printNode(node.d.alias) : '' + )} ${path}`; + + case ParseNodeType.Module: + return `module ${path}`; + + case ParseNodeType.Class: + return `class '${printNode(node.d.name)}' ${path}`; + + case ParseNodeType.Function: + return `function '${printNode(node.d.name)}' ${path}`; + + case ParseNodeType.ModuleName: + return `moduleName '${node.d.nameParts.map((n) => printNode(n)).join('.')}' ${path}`; + + case ParseNodeType.Argument: + return `argument '${node.d.name ? printNode(node.d.name) : 'N/A'}' ${path}`; + + case ParseNodeType.Parameter: + return `parameter '${node.d.name ? printNode(node.d.name) : 'N/A'}' ${path}`; + + default: + return `${ParseTreeUtils.printParseNodeType(node.nodeType)} ${path}`; + } + } + + function isNode(o: any): o is ParseNode { + const n = o as ParseNode; + return n && isNumber(n.nodeType); + } + + function isDeclaration(o: any): o is Declaration { + const d = o as Declaration; + return d && isNumber(d.type) && Uri.is(d.uri) && isString(d.moduleName); + } + + function isType(o: any): o is Type { + const t = o as Type; + return t && isNumber(t.category) && isNumber(t.flags); + } + + function print(o: PrintableType) { + if (!o) { + return ''; + } + + if (isNode(o)) { + return printNode(o, /* printPath */ true); + } + + if (isDeclaration(o)) { + return printDeclaration(o as Declaration); + } + + if (o instanceof Symbol) { + return printSymbol(o); + } + + if (isType(o)) { + return printType(o as Type); + } + + // Do nothing, we can't print it. + return ''; + } + + return { + print: print, + printFileOrModuleName: printFileOrModuleName, + }; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/tuples.ts b/python-parser/packages/pyright-internal/src/analyzer/tuples.ts new file mode 100644 index 00000000..f6e68468 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/tuples.ts @@ -0,0 +1,636 @@ +/* + * tuples.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides special-case logic for type analysis of tuples. + */ + +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { LocAddendum, LocMessage } from '../localization/localize'; +import { ExpressionNode, ParseNodeType, SliceNode, TupleNode } from '../parser/parseNodes'; +import { addConstraintsForExpectedType } from './constraintSolver'; +import { ConstraintTracker } from './constraintTracker'; +import { getTypeVarScopesForNode } from './parseTreeUtils'; +import { AssignTypeFlags, EvalFlags, maxInferredContainerDepth, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { + AnyType, + ClassType, + combineTypes, + isAny, + isAnyOrUnknown, + isClassInstance, + isInstantiableClass, + isTypeVar, + isTypeVarTuple, + isUnion, + isUnpackedTypeVar, + isUnpackedTypeVarTuple, + TupleTypeArg, + Type, + TypeVarType, + UnknownType, +} from './types'; +import { + convertToInstance, + doForEachSubtype, + getContainerDepth, + InferenceContext, + isLiteralType, + isTupleClass, + isTupleGradualForm, + makeInferenceContext, + specializeTupleClass, + transformPossibleRecursiveTypeAlias, +} from './typeUtils'; + +// If a tuple expression with no declared type contains a large number +// of elements, it can cause performance issues. This value limits the +// number of elements that will be included in the tuple type before +// we default to tuple[Unknown, ...]. +const maxInferredTupleEntryCount = 256; + +export function makeTupleObject(evaluator: TypeEvaluator, typeArgs: TupleTypeArg[], isUnpacked = false) { + const tupleClass = evaluator.getTupleClassType(); + if (tupleClass && isInstantiableClass(tupleClass)) { + return convertToInstance(specializeTupleClass(tupleClass, typeArgs, /* isTypeArgExplicit */ true, isUnpacked)); + } + + return UnknownType.create(); +} + +export function getTypeOfTuple( + evaluator: TypeEvaluator, + node: TupleNode, + flags: EvalFlags, + inferenceContext?: InferenceContext | undefined +): TypeResult { + if ((flags & EvalFlags.TypeExpression) !== 0 && node.parent?.nodeType !== ParseNodeType.Argument) { + // This is allowed inside of an index trailer, specifically + // to support Tuple[()], which is the documented way to annotate + // a zero-length tuple. + const diag = new DiagnosticAddendum(); + diag.addMessage(LocAddendum.useTupleInstead()); + evaluator.addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.tupleInAnnotation() + diag.getString(), + node + ); + + return { type: UnknownType.create() }; + } + + if ((flags & EvalFlags.InstantiableType) !== 0 && node.d.items.length === 0 && !inferenceContext) { + return { type: makeTupleObject(evaluator, []), isEmptyTupleShorthand: true }; + } + + flags &= ~(EvalFlags.TypeExpression | EvalFlags.StrLiteralAsType | EvalFlags.InstantiableType); + + // If the expected type is a union, recursively call for each of the subtypes + // to find one that matches. + let expectedType = inferenceContext?.expectedType; + let expectedTypeContainsAny = inferenceContext && isAny(inferenceContext.expectedType); + + if (inferenceContext && isUnion(inferenceContext.expectedType)) { + let matchingSubtype: Type | undefined; + + doForEachSubtype( + inferenceContext.expectedType, + (subtype) => { + if (isAny(subtype)) { + expectedTypeContainsAny = true; + } + + if (!matchingSubtype) { + const subtypeResult = evaluator.useSpeculativeMode(node, () => { + return getTypeOfTupleWithContext(evaluator, node, flags, makeInferenceContext(subtype)); + }); + + if (subtypeResult && evaluator.assignType(subtype, subtypeResult.type)) { + matchingSubtype = subtype; + } + } + }, + /* sortSubtypes */ true + ); + + expectedType = matchingSubtype; + } + + let expectedTypeDiagAddendum: DiagnosticAddendum | undefined; + if (expectedType) { + const result = getTypeOfTupleWithContext(evaluator, node, flags, makeInferenceContext(expectedType)); + + if (result && !result.typeErrors) { + return result; + } + + expectedTypeDiagAddendum = result?.expectedTypeDiagAddendum; + } + + const typeResult = getTypeOfTupleInferred(evaluator, node, flags); + + // If there was an expected type of Any, replace the resulting type + // with Any rather than return a type with unknowns. + if (expectedTypeContainsAny) { + typeResult.type = AnyType.create(); + } + + return { ...typeResult, expectedTypeDiagAddendum }; +} + +export function getTypeOfTupleWithContext( + evaluator: TypeEvaluator, + node: TupleNode, + flags: EvalFlags, + inferenceContext: InferenceContext +): TypeResult | undefined { + inferenceContext.expectedType = transformPossibleRecursiveTypeAlias(inferenceContext.expectedType); + if (!isClassInstance(inferenceContext.expectedType)) { + return undefined; + } + + const tupleClass = evaluator.getTupleClassType(); + if (!tupleClass || !isInstantiableClass(tupleClass)) { + return undefined; + } + + // Build an array of expected types. + let expectedTypes: Type[] = []; + + if (isTupleClass(inferenceContext.expectedType) && inferenceContext.expectedType.priv.tupleTypeArgs) { + expectedTypes = inferenceContext.expectedType.priv.tupleTypeArgs.map((t) => + transformPossibleRecursiveTypeAlias(t.type) + ); + const unboundedIndex = inferenceContext.expectedType.priv.tupleTypeArgs.findIndex((t) => t.isUnbounded); + if (unboundedIndex >= 0) { + if (expectedTypes.length > node.d.items.length) { + expectedTypes.splice(unboundedIndex, 1); + } else { + while (expectedTypes.length < node.d.items.length) { + expectedTypes.splice(unboundedIndex, 0, expectedTypes[unboundedIndex]); + } + } + } + } else { + const tupleConstraints = new ConstraintTracker(); + if ( + !addConstraintsForExpectedType( + evaluator, + ClassType.cloneAsInstance(tupleClass), + inferenceContext.expectedType, + tupleConstraints, + getTypeVarScopesForNode(node), + node.start + ) + ) { + return undefined; + } + + const specializedTuple = evaluator.solveAndApplyConstraints(tupleClass, tupleConstraints) as ClassType; + if (!specializedTuple.priv.typeArgs || specializedTuple.priv.typeArgs.length !== 1) { + return undefined; + } + + const homogenousType = transformPossibleRecursiveTypeAlias(specializedTuple.priv.typeArgs[0]); + for (let i = 0; i < node.d.items.length; i++) { + expectedTypes.push(homogenousType); + } + } + + const entryTypeResults = node.d.items.map((expr, index) => + evaluator.getTypeOfExpression( + expr, + flags | EvalFlags.StripTupleLiterals, + makeInferenceContext( + index < expectedTypes.length ? expectedTypes[index] : undefined, + inferenceContext.isTypeIncomplete + ) + ) + ); + const isIncomplete = entryTypeResults.some((result) => result.isIncomplete); + + // Copy any expected type diag addenda for precision error reporting. + let expectedTypeDiagAddendum: DiagnosticAddendum | undefined; + if (entryTypeResults.some((result) => result.expectedTypeDiagAddendum)) { + expectedTypeDiagAddendum = new DiagnosticAddendum(); + entryTypeResults.forEach((result) => { + if (result.expectedTypeDiagAddendum) { + expectedTypeDiagAddendum!.addAddendum(result.expectedTypeDiagAddendum); + } + }); + } + + // If the tuple contains a very large number of entries, it's probably + // generated code. If we encounter type errors, don't bother building + // the full tuple type. + let type: Type; + if (node.d.items.length > maxInferredTupleEntryCount && entryTypeResults.some((result) => result.typeErrors)) { + type = makeTupleObject(evaluator, [{ type: UnknownType.create(), isUnbounded: true }]); + } else { + type = makeTupleObject( + evaluator, + evaluator.buildTupleTypesList(entryTypeResults, /* stripLiterals */ false, /* convertModule */ false) + ); + } + + return { type, expectedTypeDiagAddendum, isIncomplete }; +} + +export function getTypeOfTupleInferred(evaluator: TypeEvaluator, node: TupleNode, flags: EvalFlags): TypeResult { + const entryTypeResults = node.d.items.map((expr) => + evaluator.getTypeOfExpression(expr, flags | EvalFlags.StripTupleLiterals) + ); + const isIncomplete = entryTypeResults.some((result) => result.isIncomplete); + + // If the tuple contains a very large number of entries, it's probably + // generated code. Rather than taking the time to evaluate every entry, + // simply return an unknown type in this case. + if (node.d.items.length > maxInferredTupleEntryCount) { + return { type: makeTupleObject(evaluator, [{ type: UnknownType.create(), isUnbounded: true }]) }; + } + + const type = makeTupleObject( + evaluator, + evaluator.buildTupleTypesList( + entryTypeResults, + (flags & EvalFlags.StripTupleLiterals) !== 0, + /* convertModule */ true + ) + ); + + if (isIncomplete) { + if (getContainerDepth(type) > maxInferredContainerDepth) { + return { type: UnknownType.create() }; + } + } + + return { type, isIncomplete }; +} + +// Assigns the source type arguments to the dest type arguments. It assumed +// the the caller has already verified that both the dest and source are +// tuple classes. +export function assignTupleTypeArgs( + evaluator: TypeEvaluator, + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number +) { + const destTypeArgs = [...(destType.priv.tupleTypeArgs ?? [])]; + const srcTypeArgs = [...(srcType.priv.tupleTypeArgs ?? [])]; + + if (adjustTupleTypeArgs(evaluator, destTypeArgs, srcTypeArgs, flags)) { + for (let argIndex = 0; argIndex < srcTypeArgs.length; argIndex++) { + const entryDiag = diag?.createAddendum(); + const destArgType = destTypeArgs[argIndex].type; + const srcArgType = srcTypeArgs[argIndex].type; + + // Handle the special case where the dest is a TypeVarTuple + // and the source is a `*tuple[Any, ...]`. This is allowed. + if ( + isTypeVarTuple(destArgType) && + destArgType.priv.isUnpacked && + !destArgType.priv.isInUnion && + isTupleGradualForm(srcArgType) + ) { + return true; + } + + if ( + !evaluator.assignType( + destArgType, + srcArgType, + entryDiag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + if (entryDiag) { + entryDiag.addMessage( + LocAddendum.tupleEntryTypeMismatch().format({ + entry: argIndex + 1, + }) + ); + } + return false; + } + } + } else { + const isDestIndeterminate = destTypeArgs.some((t) => t.isUnbounded || isTypeVarTuple(t.type)); + + if (srcTypeArgs.some((t) => t.isUnbounded || isTypeVarTuple(t.type))) { + if (isDestIndeterminate) { + diag?.addMessage( + LocAddendum.tupleSizeIndeterminateSrcDest().format({ + expected: destTypeArgs.length - 1, + }) + ); + } else { + diag?.addMessage( + LocAddendum.tupleSizeIndeterminateSrc().format({ + expected: destTypeArgs.length, + }) + ); + } + } else { + if (isDestIndeterminate) { + diag?.addMessage( + LocAddendum.tupleSizeMismatchIndeterminateDest().format({ + expected: destTypeArgs.length - 1, + received: srcTypeArgs.length, + }) + ); + } else { + diag?.addMessage( + LocAddendum.tupleSizeMismatch().format({ + expected: destTypeArgs.length, + received: srcTypeArgs.length, + }) + ); + } + } + + return false; + } + + return true; +} + +// Adjusts the source and/or dest type arguments list to attempt to match +// the length of the src type arguments list if the dest or source contain +// entries with indeterminate length or unpacked TypeVarTuple entries. +// It returns true if the source is potentially compatible with the dest +// type, false otherwise. +export function adjustTupleTypeArgs( + evaluator: TypeEvaluator, + destTypeArgs: TupleTypeArg[], + srcTypeArgs: TupleTypeArg[], + flags: AssignTypeFlags +): boolean { + const destUnboundedOrVariadicIndex = destTypeArgs.findIndex( + (t) => t.isUnbounded || isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type) + ); + const srcUnboundedIndex = srcTypeArgs.findIndex((t) => t.isUnbounded); + const srcVariadicIndex = srcTypeArgs.findIndex((t) => isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type)); + + if (srcUnboundedIndex >= 0) { + if (isAnyOrUnknown(srcTypeArgs[srcUnboundedIndex].type)) { + // If the source contains an unbounded Any, expand it to match the dest length. + const typeToReplicate = srcTypeArgs.length > 0 ? srcTypeArgs[srcUnboundedIndex].type : AnyType.create(); + + while (srcTypeArgs.length < destTypeArgs.length) { + srcTypeArgs.splice(srcUnboundedIndex, 0, { type: typeToReplicate, isUnbounded: true }); + } + + if (srcTypeArgs.length > destTypeArgs.length) { + srcTypeArgs.splice(srcUnboundedIndex, 1); + } + } else if (destUnboundedOrVariadicIndex < 0) { + // If the source contains an unbounded type but the dest does not, it's incompatible. + return false; + } + } + + // If the dest contains an unbounded Any, expand it to match the source length. + if ( + destUnboundedOrVariadicIndex >= 0 && + destTypeArgs[destUnboundedOrVariadicIndex].isUnbounded && + isAnyOrUnknown(destTypeArgs[destUnboundedOrVariadicIndex].type) + ) { + while (destTypeArgs.length < srcTypeArgs.length) { + destTypeArgs.splice(destUnboundedOrVariadicIndex, 0, destTypeArgs[destUnboundedOrVariadicIndex]); + } + } + + // Remove any optional parameters from the end of the two lists until the lengths match. + while (srcTypeArgs.length > destTypeArgs.length && srcTypeArgs[srcTypeArgs.length - 1].isOptional) { + srcTypeArgs.splice(srcTypeArgs.length - 1, 1); + } + + while (destTypeArgs.length > srcTypeArgs.length && destTypeArgs[destTypeArgs.length - 1].isOptional) { + destTypeArgs.splice(destTypeArgs.length - 1, 1); + } + + const srcArgsToCapture = srcTypeArgs.length - destTypeArgs.length + 1; + let skipAdjustSrc = false; + + // If we're doing reverse type mappings and the source contains a TypeVarTuple, + // we need to adjust the dest so the reverse type mapping assignment + // can be performed. + if ((flags & AssignTypeFlags.Contravariant) !== 0) { + const destArgsToCapture = destTypeArgs.length - srcTypeArgs.length + 1; + + if (srcVariadicIndex >= 0 && destArgsToCapture >= 0) { + // If the only removed arg from the dest type args is itself a variadic, + // don't bother adjusting it. + const skipAdjustment = destArgsToCapture === 1 && isTypeVarTuple(destTypeArgs[srcVariadicIndex].type); + const tupleClass = evaluator.getTupleClassType(); + + if (!skipAdjustment && tupleClass && isInstantiableClass(tupleClass)) { + const removedArgs = destTypeArgs.splice(srcVariadicIndex, destArgsToCapture); + + // Package up the remaining type arguments into a tuple object. + const variadicTuple = ClassType.cloneAsInstance( + specializeTupleClass( + tupleClass, + removedArgs.map((typeArg) => { + return { + type: typeArg.type, + isUnbounded: typeArg.isUnbounded, + isOptional: typeArg.isOptional, + }; + }), + /* isTypeArgExplicit */ true, + /* isUnpacked */ true + ) + ); + + destTypeArgs.splice(srcVariadicIndex, 0, { + type: variadicTuple, + isUnbounded: false, + }); + } + + skipAdjustSrc = true; + } + } else { + if (destUnboundedOrVariadicIndex >= 0 && srcArgsToCapture >= 0) { + // If the dest contains a variadic element, determine which source + // args map to this element and package them up into an unpacked tuple. + if (isTypeVarTuple(destTypeArgs[destUnboundedOrVariadicIndex].type)) { + const tupleClass = evaluator.getTupleClassType(); + + if (tupleClass && isInstantiableClass(tupleClass)) { + const removedArgs = srcTypeArgs.splice(destUnboundedOrVariadicIndex, srcArgsToCapture); + + let variadicTuple: Type; + + // If we're left with a single unpacked variadic type var, there's no + // need to wrap it in a nested tuple. + if (removedArgs.length === 1 && isUnpackedTypeVarTuple(removedArgs[0].type)) { + variadicTuple = removedArgs[0].type; + } else { + // Package up the remaining type arguments into a tuple object. + variadicTuple = ClassType.cloneAsInstance( + specializeTupleClass( + tupleClass, + removedArgs.map((typeArg) => { + return { + type: typeArg.type, + isUnbounded: typeArg.isUnbounded, + isOptional: typeArg.isOptional, + }; + }), + /* isTypeArgExplicit */ true, + /* isUnpacked */ true + ) + ); + } + + srcTypeArgs.splice(destUnboundedOrVariadicIndex, 0, { + type: variadicTuple, + isUnbounded: false, + }); + } + + skipAdjustSrc = true; + } + } + } + + if (!skipAdjustSrc && destUnboundedOrVariadicIndex >= 0 && srcArgsToCapture >= 0) { + // If possible, package up the source entries that correspond to + // the dest unbounded tuple. This isn't possible if the source contains + // an unbounded tuple outside of this range. + if ( + srcUnboundedIndex < 0 || + (srcUnboundedIndex >= destUnboundedOrVariadicIndex && + srcUnboundedIndex < destUnboundedOrVariadicIndex + srcArgsToCapture) + ) { + const removedArgTypes = srcTypeArgs.splice(destUnboundedOrVariadicIndex, srcArgsToCapture).map((t) => { + if (isTypeVar(t.type) && isUnpackedTypeVarTuple(t.type)) { + return TypeVarType.cloneForUnpacked(t.type, /* isInUnion */ true); + } + return t.type; + }); + + srcTypeArgs.splice(destUnboundedOrVariadicIndex, 0, { + type: removedArgTypes.length > 0 ? combineTypes(removedArgTypes) : AnyType.create(), + isUnbounded: false, + }); + } + } + + return destTypeArgs.length === srcTypeArgs.length; +} + +// Given a tuple type and a slice expression, determines the resulting +// type if it can be determined. If not, it returns undefined. +export function getSlicedTupleType( + evaluator: TypeEvaluator, + tupleType: ClassType, + sliceNode: SliceNode +): Type | undefined { + // We don't handle step values. + if (sliceNode.d.stepValue || !tupleType.priv.tupleTypeArgs) { + return undefined; + } + + const tupleTypeArgs = tupleType.priv.tupleTypeArgs; + const startValue = getTupleSliceParam(evaluator, sliceNode.d.startValue, 0, tupleTypeArgs); + const endValue = getTupleSliceParam(evaluator, sliceNode.d.endValue, tupleTypeArgs.length, tupleTypeArgs); + + if (startValue === undefined || endValue === undefined || endValue < startValue) { + return undefined; + } + + const slicedTypeArgs = tupleTypeArgs.slice(startValue, endValue); + return ClassType.cloneAsInstance(specializeTupleClass(tupleType, slicedTypeArgs)); +} + +// If the type is a fixed-length tuple instance and one or more of the element types +// is a union, this function expands the tuple into a union of tuples where each +// element is a union of the corresponding element types. This is done for all +// element combinations until the total number of tuples exceeds maxExpansion, +// at which point the function returns the original tuple type. +export function expandTuple(tupleType: ClassType, maxExpansion: number): Type[] | undefined { + if ( + !isTupleClass(tupleType) || + !tupleType.priv.tupleTypeArgs || + tupleType.priv.tupleTypeArgs.some((typeArg) => typeArg.isUnbounded || isTypeVarTuple(typeArg.type)) + ) { + return undefined; + } + + let typesToCombine: ClassType[] = [tupleType]; + let index = 0; + + while (index < tupleType.priv.tupleTypeArgs.length) { + const elemType = tupleType.priv.tupleTypeArgs[index].type; + if (isUnion(elemType)) { + const newTypesToCombine: ClassType[] = []; + + for (const typeToCombine of typesToCombine) { + doForEachSubtype(elemType, (subtype) => { + const newTypeArgs = [...typeToCombine.priv.tupleTypeArgs!]; + newTypeArgs[index] = { type: subtype, isUnbounded: false }; + newTypesToCombine.push(ClassType.cloneAsInstance(specializeTupleClass(typeToCombine, newTypeArgs))); + }); + } + typesToCombine = newTypesToCombine; + } + + if (typesToCombine.length > maxExpansion) { + return undefined; + } + + index++; + } + + return typesToCombine.length === 1 ? undefined : typesToCombine; +} + +function getTupleSliceParam( + evaluator: TypeEvaluator, + expression: ExpressionNode | undefined, + defaultValue: number, + tupleTypeArgs: TupleTypeArg[] +): number | undefined { + let value = defaultValue; + + if (expression) { + const valType = evaluator.getTypeOfExpression(expression).type; + if (!isClassInstance(valType) || !ClassType.isBuiltIn(valType, 'int') || !isLiteralType(valType)) { + return undefined; + } + + value = valType.priv.literalValue as number; + const unboundedIndex = tupleTypeArgs.findIndex( + (typeArg) => typeArg.isUnbounded || isTypeVarTuple(typeArg.type) + ); + + if (value < 0) { + value = tupleTypeArgs.length + value; + if (unboundedIndex >= 0 && value <= unboundedIndex) { + return undefined; + } else if (value < 0) { + return 0; + } + } else { + if (unboundedIndex >= 0 && value > unboundedIndex) { + return undefined; + } else if (value > tupleTypeArgs.length) { + return tupleTypeArgs.length; + } + } + } + + return value; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeCacheUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/typeCacheUtils.ts new file mode 100644 index 00000000..c893f8cf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeCacheUtils.ts @@ -0,0 +1,253 @@ +/* + * typeCacheUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utilities for managing type caches. + */ + +import { assert } from '../common/debug'; +import { ParseNode } from '../parser/parseNodes'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { isTypeSame, Type } from './types'; + +// Define an interface to track speculative entries that need to +// be cleaned up when they go out of scope. +interface SpeculativeEntry { + cache: Map; + id: number; +} + +interface SpeculativeContext { + speculativeRootNode: ParseNode; + entriesToUndo: SpeculativeEntry[]; + dependentType: Type | undefined; + allowDiagnostics?: boolean; +} + +interface DependentType { + speculativeRootNode: ParseNode; + dependentType: Type; +} + +export interface TypeResult { + type: Type; + isIncomplete?: boolean; +} + +export interface SpeculativeTypeEntry { + typeResult: TypeResult; + expectedType: Type | undefined; + incompleteGenerationCount: number; + dependentTypes?: DependentType[]; +} + +export interface SpeculativeModeOptions { + // If specified, the type cached speculative result depends on + // this dependent type. + dependentType?: Type; + + // Normally, diagnostics are suppressed for nodes under + // a speculative root, but this can be overridden by specifying + // this option. + allowDiagnostics?: boolean; +} + +// This class maintains a stack of "speculative type contexts". When +// a context is popped off the stack, all of the speculative type cache +// entries that were created within that context are removed from the +// corresponding type caches because they are no longer valid. +// The tracker also also contains a map of "speculative types" that are +// contextually evaluated based on an "expected type" and potentially +// one or more "dependent types". The "expected type" applies in cases +// where the speculative root node is being evaluated with bidirectional +// type inference. Dependent types apply in cases where the type of +// many subnodes depends on the expected type of a parent node, as in the +// case of lambda type inference. +export class SpeculativeTypeTracker { + private _speculativeContextStack: SpeculativeContext[] = []; + private _speculativeTypeCache = new Map(); + private _activeDependentTypes: DependentType[] = []; + + enterSpeculativeContext(speculativeRootNode: ParseNode, options?: SpeculativeModeOptions) { + this._speculativeContextStack.push({ + speculativeRootNode, + entriesToUndo: [], + dependentType: options?.dependentType, + allowDiagnostics: options?.allowDiagnostics, + }); + + // Retain a list of active dependent types. This information is already + // contained within the speculative context stack, but we retain a copy + // in this alternate form for performance reasons. + if (options?.dependentType) { + this._activeDependentTypes.push({ + speculativeRootNode, + dependentType: options.dependentType, + }); + } + } + + leaveSpeculativeContext() { + assert(this._speculativeContextStack.length > 0); + const context = this._speculativeContextStack.pop(); + + if (context?.dependentType) { + assert(this._activeDependentTypes.length > 0); + this._activeDependentTypes.pop(); + } + + // Delete all of the speculative type cache entries + // that were tracked in this context. + context!.entriesToUndo.forEach((entry) => { + entry.cache.delete(entry.id); + }); + } + + isSpeculative(node: ParseNode | undefined, ignoreIfDiagnosticsAllowed = false) { + if (this._speculativeContextStack.length === 0) { + return false; + } + + if (!node) { + return true; + } + + for (let i = this._speculativeContextStack.length - 1; i >= 0; i--) { + const stackEntry = this._speculativeContextStack[i]; + if (ParseTreeUtils.isNodeContainedWithin(node, stackEntry.speculativeRootNode)) { + if (!ignoreIfDiagnosticsAllowed || !stackEntry.allowDiagnostics) { + return true; + } + } + } + + return false; + } + + trackEntry(cache: Map, id: number) { + const stackSize = this._speculativeContextStack.length; + if (stackSize > 0) { + this._speculativeContextStack[stackSize - 1].entriesToUndo.push({ + cache, + id, + }); + } + } + + // Temporarily disables speculative mode, clearing the stack + // of speculative contexts. It returns the stack so the caller + // can later restore it by calling enableSpeculativeMode. + disableSpeculativeMode() { + const stack = this._speculativeContextStack; + this._speculativeContextStack = []; + return stack; + } + + enableSpeculativeMode(stack: SpeculativeContext[]) { + assert(this._speculativeContextStack.length === 0); + this._speculativeContextStack = stack; + } + + addSpeculativeType( + node: ParseNode, + typeResult: TypeResult, + incompleteGenerationCount: number, + expectedType: Type | undefined + ) { + assert(this._speculativeContextStack.length > 0); + + const maxCacheEntriesPerNode = 8; + let cacheEntries = this._speculativeTypeCache.get(node.id); + + if (!cacheEntries) { + cacheEntries = []; + } else { + cacheEntries = cacheEntries.filter((entry) => { + // Filter out any incomplete entries that no longer match the generation count. + // These are obsolete and cannot be used. + if (entry.typeResult.isIncomplete && entry.incompleteGenerationCount !== incompleteGenerationCount) { + return false; + } + + // Filter out any entries that match the expected type of the + // new entry. The new entry replaces the old in this case. + if (expectedType) { + if (!entry.expectedType) { + return true; + } + return !isTypeSame(entry.expectedType, expectedType); + } + + return !!entry.expectedType; + }); + + // Don't allow the cache to grow too large. + if (cacheEntries.length >= maxCacheEntriesPerNode) { + cacheEntries.slice(1); + } + } + + // Add the new entry. + const newEntry: SpeculativeTypeEntry = { + typeResult, + expectedType, + incompleteGenerationCount, + }; + + if (this._activeDependentTypes.length > 0) { + newEntry.dependentTypes = Array.from(this._activeDependentTypes); + } + + cacheEntries.push(newEntry); + + this._speculativeTypeCache.set(node.id, cacheEntries); + } + + getSpeculativeType(node: ParseNode, expectedType: Type | undefined): SpeculativeTypeEntry | undefined { + if ( + this._speculativeContextStack.some((context) => + ParseTreeUtils.isNodeContainedWithin(node, context.speculativeRootNode) + ) + ) { + const entries = this._speculativeTypeCache.get(node.id); + if (entries) { + for (const entry of entries) { + if (!expectedType) { + if (!entry.expectedType && this._dependentTypesMatch(entry)) { + return entry; + } + } else if ( + entry.expectedType && + isTypeSame(expectedType, entry.expectedType) && + this._dependentTypesMatch(entry) + ) { + return entry; + } + } + } + } + + return undefined; + } + + // Determines whether a cache entry matches the current set of + // active dependent types. If not, the cache entry can't be used + // in the current context. + private _dependentTypesMatch(entry: SpeculativeTypeEntry): boolean { + const cachedDependentTypes = entry.dependentTypes ?? []; + if (cachedDependentTypes.length !== this._activeDependentTypes.length) { + return false; + } + + return cachedDependentTypes.every((cachedDepType, index) => { + const activeDepType = this._activeDependentTypes[index]; + if (cachedDepType.speculativeRootNode !== activeDepType.speculativeRootNode) { + return false; + } + + return isTypeSame(cachedDepType.dependentType, activeDepType.dependentType); + }); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeComplexity.ts b/python-parser/packages/pyright-internal/src/analyzer/typeComplexity.ts new file mode 100644 index 00000000..30f66b9d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeComplexity.ts @@ -0,0 +1,102 @@ +/* + * typeComplexity.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Routines that compute a "complexity score" for a type. This is used + * during constraint solving to pick a "best" type when multiple types + * meet the constraints. + */ + +import { AnyType, ClassType, isInstantiableClass, maxTypeRecursionCount, Type, TypeBase, TypeCategory } from './types'; + +// Returns a "score" for a type that captures the relative complexity +// of the type. Scores should all be between 0 and 1 where 0 means +// very simple and 1 means complex. This is a heuristic, so there's +// often no objectively correct answer. +export function getComplexityScoreForType(type: Type, recursionCount = 0): number { + if (recursionCount > maxTypeRecursionCount) { + return 1; + } + recursionCount++; + + switch (type.category) { + case TypeCategory.Unknown: + case TypeCategory.Any: { + return 0.5; + } + + case TypeCategory.TypeVar: { + // Assume type[T] is more complex than T. + return TypeBase.isInstantiable(type) ? 0.55 : 0.5; + } + + case TypeCategory.Function: + case TypeCategory.Overloaded: { + // Classes and unions should be preferred over functions, + // so make this relatively high (more than 0.75). + return TypeBase.isInstantiable(type) ? 0.85 : 0.8; + } + + case TypeCategory.Unbound: + case TypeCategory.Never: + return 1.0; + + case TypeCategory.Union: { + let maxScore = 0; + + // If this union has a very large number of subtypes, don't bother + // accurately computing the score. Assume a fixed value. + if (type.priv.subtypes.length < 16) { + type.priv.subtypes.forEach((subtype) => { + const subtypeScore = getComplexityScoreForType(subtype, recursionCount); + maxScore = Math.max(maxScore, subtypeScore); + }); + } else { + maxScore = 0.5; + } + + return maxScore; + } + + case TypeCategory.Class: { + return getComplexityScoreForClass(type, recursionCount); + } + } + + // For all other types, return a score of 0. + return 0; +} + +function getComplexityScoreForClass(classType: ClassType, recursionCount: number): number { + let typeArgScoreSum = 0; + let typeArgCount = 0; + + if (classType.priv.tupleTypeArgs) { + classType.priv.tupleTypeArgs.forEach((typeArg) => { + typeArgScoreSum += getComplexityScoreForType(typeArg.type, recursionCount); + typeArgCount++; + }); + } else if (classType.priv.typeArgs) { + classType.priv.typeArgs.forEach((type) => { + typeArgScoreSum += getComplexityScoreForType(type, recursionCount); + typeArgCount++; + }); + } else if (classType.shared.typeParams) { + classType.shared.typeParams.forEach((type) => { + typeArgScoreSum += getComplexityScoreForType(AnyType.create(), recursionCount); + typeArgCount++; + }); + } + + const averageTypeArgComplexity = typeArgCount > 0 ? typeArgScoreSum / typeArgCount : 0; + let result = 0.5 + averageTypeArgComplexity * 0.25; + + // Assume type[T] is more complex than T. + if (isInstantiableClass(classType)) { + result += 0.05; + } + + return result; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeDocStringUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/typeDocStringUtils.ts new file mode 100644 index 00000000..e4736c92 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeDocStringUtils.ts @@ -0,0 +1,412 @@ +/* + * typeDocStringUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Logic that obtains the doc string for types by looking + * at the declaration in the type stub, and if needed, in + * the source file. + */ + +import { + ClassDeclaration, + Declaration, + DeclarationBase, + FunctionDeclaration, + isClassDeclaration, + isFunctionDeclaration, + isSpecialBuiltInClassDeclaration, + isVariableDeclaration, + SpecialBuiltInClassDeclaration, + VariableDeclaration, +} from '../analyzer/declaration'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { isStubFile, SourceMapper } from '../analyzer/sourceMapper'; +import { + ClassType, + FunctionType, + isFunction, + isInstantiableClass, + isOverloaded, + ModuleType, + OverloadedType, + Type, + TypeCategory, +} from '../analyzer/types'; +import { addIfNotNull, appendArray } from '../common/collectionUtils'; +import { Uri } from '../common/uri/uri'; +import { ModuleNode, ParseNodeType } from '../parser/parseNodes'; +import { TypeEvaluator } from './typeEvaluatorTypes'; +import { + ClassIteratorFlags, + getClassIterator, + getClassMemberIterator, + isMaybeDescriptorInstance, + MemberAccessFlags, +} from './typeUtils'; + +export const DefaultClassIteratorFlagsForFunctions = + MemberAccessFlags.SkipObjectBaseClass | + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipOriginalClass | + MemberAccessFlags.DeclaredTypesOnly; + +export function isInheritedFromBuiltin(type: FunctionType | OverloadedType, classType?: ClassType): boolean { + if (type.category === TypeCategory.Overloaded) { + const overloads = OverloadedType.getOverloads(type); + if (overloads.length === 0) { + return false; + } + type = overloads[0]; + } + + // Functions that are bound to a different type than where they + // were declared are inherited. + return ( + !!type.shared.methodClass && + ClassType.isBuiltIn(type.shared.methodClass) && + !!type.priv.boundToType && + !ClassType.isBuiltIn(type.priv.boundToType) + ); +} + +export function getFunctionDocStringInherited( + type: FunctionType, + resolvedDecl: Declaration | undefined, + sourceMapper: SourceMapper, + classType?: ClassType +) { + let docString: string | undefined; + + // Don't allow docs to be inherited from the builtins to other classes; + // they typically not helpful (and object's __init__ doc causes issues + // with our current docstring traversal). + if (!isInheritedFromBuiltin(type, classType) && resolvedDecl && isFunctionDeclaration(resolvedDecl)) { + docString = _getFunctionDocString(type, resolvedDecl, sourceMapper); + } + + // Search mro + if (!docString && classType) { + const funcName = type.shared.name; + const memberIterator = getClassMemberIterator(classType, funcName, DefaultClassIteratorFlagsForFunctions); + + for (const classMember of memberIterator) { + const decls = classMember.symbol.getDeclarations(); + if (decls.length > 0) { + const inheritedDecl = classMember.symbol.getDeclarations().slice(-1)[0]; + if (isFunctionDeclaration(inheritedDecl)) { + docString = _getFunctionDocStringFromDeclaration(inheritedDecl, sourceMapper); + if (docString) { + break; + } + } + } + } + } + + return docString || type.shared.docString; +} + +export function getOverloadedDocStringsInherited( + type: OverloadedType, + resolvedDecls: Declaration[], + sourceMapper: SourceMapper, + evaluator: TypeEvaluator, + classType?: ClassType +) { + let docStrings: string[] | undefined; + + // Don't allow docs to be inherited from the builtins to other classes; + // they typically not helpful (and object's __init__ doc causes issues + // with our current docstring traversal). + if (!isInheritedFromBuiltin(type, classType)) { + for (const resolvedDecl of resolvedDecls) { + docStrings = getOverloadedDocStrings(type, resolvedDecl, sourceMapper); + if (docStrings && docStrings.length > 0) { + return docStrings; + } + } + } + + // Search mro + const overloads = OverloadedType.getOverloads(type); + if (classType && overloads.length > 0) { + const funcName = overloads[0].shared.name; + const memberIterator = getClassMemberIterator(classType, funcName, DefaultClassIteratorFlagsForFunctions); + + for (const classMember of memberIterator) { + const inheritedDecl = classMember.symbol.getDeclarations().slice(-1)[0]; + const declType = evaluator.getTypeForDeclaration(inheritedDecl)?.type; + if (declType) { + docStrings = getOverloadedDocStrings(declType, inheritedDecl, sourceMapper); + if (docStrings && docStrings.length > 0) { + break; + } + } + } + } + + return docStrings ?? []; +} + +export function getPropertyDocStringInherited( + decl: FunctionDeclaration, + sourceMapper: SourceMapper, + evaluator: TypeEvaluator +) { + const enclosingClass = ParseTreeUtils.getEnclosingClass(decl.node.d.name, /* stopAtFunction */ false); + const classResults = enclosingClass ? evaluator.getTypeOfClass(enclosingClass) : undefined; + if (classResults) { + return _getPropertyDocStringInherited(decl, sourceMapper, evaluator, classResults.classType); + } + return undefined; +} + +export function getVariableInStubFileDocStrings(decl: VariableDeclaration, sourceMapper: SourceMapper) { + const docStrings: string[] = []; + if (!isStubFile(decl.uri)) { + return docStrings; + } + + for (const implDecl of sourceMapper.findDeclarations(decl)) { + if (isVariableDeclaration(implDecl) && !!implDecl.docString) { + docStrings.push(implDecl.docString); + } else if (isClassDeclaration(implDecl) || isFunctionDeclaration(implDecl)) { + // It is possible that the variable on the stub is not actually a variable on the corresponding py file. + // in that case, get the doc string from original symbol if possible. + const docString = getFunctionOrClassDeclDocString(implDecl); + if (docString) { + docStrings.push(docString); + } + } + } + + return docStrings; +} + +export function isBuiltInModule(uri: Uri | undefined) { + if (uri) { + return uri.getPath().includes('typeshed-fallback/stdlib'); + } + return false; +} + +export function getModuleDocStringFromModuleNodes(modules: ModuleNode[]): string | undefined { + for (const module of modules) { + if (module.d.statements) { + const docString = ParseTreeUtils.getDocString(module.d.statements); + if (docString) { + return docString; + } + } + } + + return undefined; +} + +export function getModuleDocStringFromUris(uris: Uri[], sourceMapper: SourceMapper) { + const modules: ModuleNode[] = []; + for (const uri of uris) { + if (isStubFile(uri)) { + addIfNotNull(modules, sourceMapper.getModuleNode(uri)); + } + + appendArray(modules, sourceMapper.findModules(uri)); + } + + return getModuleDocStringFromModuleNodes(modules); +} + +export function getModuleDocString( + type: ModuleType, + resolvedDecl: DeclarationBase | undefined, + sourceMapper: SourceMapper +) { + let docString = type.priv.docString; + if (!docString) { + const uri = resolvedDecl?.uri ?? type.priv.fileUri; + docString = getModuleDocStringFromUris([uri], sourceMapper); + } + + return docString; +} + +export function getClassDocString( + classType: ClassType, + resolvedDecl: Declaration | undefined, + sourceMapper: SourceMapper +) { + let docString = classType.shared.docString; + if (!docString && resolvedDecl && _isAnyClassDeclaration(resolvedDecl)) { + docString = isClassDeclaration(resolvedDecl) ? _getFunctionOrClassDeclsDocString([resolvedDecl]) : undefined; + if (!docString && resolvedDecl && isStubFile(resolvedDecl.uri)) { + for (const implDecl of sourceMapper.findDeclarations(resolvedDecl)) { + if (isVariableDeclaration(implDecl) && !!implDecl.docString) { + docString = implDecl.docString; + break; + } + + if (isClassDeclaration(implDecl) || isFunctionDeclaration(implDecl)) { + docString = getFunctionOrClassDeclDocString(implDecl); + break; + } + } + } + } + + if (!docString && resolvedDecl) { + const implDecls = sourceMapper.findClassDeclarationsByType(resolvedDecl.uri, classType); + if (implDecls) { + const classDecls = implDecls.filter((d) => isClassDeclaration(d)).map((d) => d); + docString = _getFunctionOrClassDeclsDocString(classDecls); + } + } + + return docString; +} + +export function getFunctionOrClassDeclDocString(decl: FunctionDeclaration | ClassDeclaration): string | undefined { + return ParseTreeUtils.getDocString(decl.node?.d.suite?.d.statements ?? []); +} + +export function getVariableDocString( + decl: VariableDeclaration | undefined, + sourceMapper: SourceMapper +): string | undefined { + if (!decl) { + return undefined; + } + + if (decl.docString !== undefined) { + return decl.docString; + } else { + return getVariableInStubFileDocStrings(decl, sourceMapper).find((doc) => doc); + } +} + +export function getOverloadedDocStrings(type: Type, resolvedDecl: Declaration | undefined, sourceMapper: SourceMapper) { + if (!isOverloaded(type)) { + return undefined; + } + + const docStrings: string[] = []; + const overloads = OverloadedType.getOverloads(type); + const impl = OverloadedType.getImplementation(type); + + if (overloads.some((o) => o.shared.docString)) { + overloads.forEach((overload) => { + if (overload.shared.docString) { + docStrings.push(overload.shared.docString); + } + }); + } + + if (impl && isFunction(impl) && impl.shared.docString) { + docStrings.push(impl.shared.docString); + } + + if ( + docStrings.length === 0 && + resolvedDecl && + isStubFile(resolvedDecl.uri) && + isFunctionDeclaration(resolvedDecl) + ) { + const implDecls = sourceMapper.findFunctionDeclarations(resolvedDecl); + const docString = _getFunctionOrClassDeclsDocString(implDecls); + if (docString) { + docStrings.push(docString); + } + } + + return docStrings; +} + +function _getPropertyDocStringInherited( + decl: Declaration | undefined, + sourceMapper: SourceMapper, + evaluator: TypeEvaluator, + classType: ClassType +) { + if (!decl || !isFunctionDeclaration(decl)) { + return; + } + + const declaredType = evaluator.getTypeForDeclaration(decl)?.type; + if (!declaredType || !isMaybeDescriptorInstance(declaredType)) { + return; + } + + const fieldName = decl.node.nodeType === ParseNodeType.Function ? decl.node.d.name.d.value : undefined; + if (!fieldName) { + return; + } + + const classItr = getClassIterator(classType, ClassIteratorFlags.Default); + // Walk the inheritance list starting with the current class searching for docStrings + for (const [mroClass] of classItr) { + if (!isInstantiableClass(mroClass)) { + continue; + } + + const symbol = ClassType.getSymbolTable(mroClass).get(fieldName); + // Get both the setter and getter declarations + const decls = symbol?.getDeclarations(); + if (decls) { + for (const decl of decls) { + if (isFunctionDeclaration(decl)) { + const declaredType = evaluator.getTypeForDeclaration(decl)?.type; + if (declaredType && isMaybeDescriptorInstance(declaredType)) { + const docString = _getFunctionDocStringFromDeclaration(decl, sourceMapper); + if (docString) { + return docString; + } + } + } + } + } + } + + return; +} + +function _getFunctionDocString(type: Type, resolvedDecl: FunctionDeclaration | undefined, sourceMapper: SourceMapper) { + if (!isFunction(type)) { + return undefined; + } + + let docString = type.shared.docString; + if (!docString && resolvedDecl) { + docString = _getFunctionDocStringFromDeclaration(resolvedDecl, sourceMapper); + } + + if (!docString && type.shared.declaration) { + docString = _getFunctionDocStringFromDeclaration(type.shared.declaration, sourceMapper); + } + + return docString; +} + +function _getFunctionDocStringFromDeclaration(resolvedDecl: FunctionDeclaration, sourceMapper: SourceMapper) { + let docString = _getFunctionOrClassDeclsDocString([resolvedDecl]); + if (!docString && isStubFile(resolvedDecl.uri)) { + const implDecls = sourceMapper.findFunctionDeclarations(resolvedDecl); + docString = _getFunctionOrClassDeclsDocString(implDecls); + } + + return docString; +} + +function _getFunctionOrClassDeclsDocString(decls: FunctionDeclaration[] | ClassDeclaration[]): string | undefined { + for (const decl of decls) { + const docString = getFunctionOrClassDeclDocString(decl); + if (docString) { + return docString; + } + } + + return undefined; +} + +function _isAnyClassDeclaration(decl: Declaration): decl is ClassDeclaration | SpecialBuiltInClassDeclaration { + return isClassDeclaration(decl) || isSpecialBuiltInClassDeclaration(decl); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeEvaluator.ts b/python-parser/packages/pyright-internal/src/analyzer/typeEvaluator.ts new file mode 100644 index 00000000..6cd8a1b8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeEvaluator.ts @@ -0,0 +1,28880 @@ +/* + * typeEvaluator.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Module that evaluates types of parse tree nodes within + * a program. + * + * Note: This is a gargantuan module - much larger than I would + * normally create. It is written this way primarily for performance, + * with the internal methods having access to the full closure of + * the createTypeEvaluator function. This is the same approach + * taken by the TypeScript compiler. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { invalidateTypeCacheIfCanceled, throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { DiagnosticLevel } from '../common/configOptions'; +import { ConsoleInterface } from '../common/console'; +import { isThenable } from '../common/core'; +import { assert, assertNever, fail } from '../common/debug'; +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { convertOffsetsToRange, convertOffsetToPosition } from '../common/positionUtils'; +import { + PythonVersion, + pythonVersion3_13, + pythonVersion3_14, + pythonVersion3_6, + pythonVersion3_7, + pythonVersion3_9, +} from '../common/pythonVersion'; +import { TextRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { LocAddendum, LocMessage, ParameterizedString } from '../localization/localize'; +import { + ArgCategory, + ArgumentNode, + AssignmentNode, + AugmentedAssignmentNode, + AwaitNode, + CallNode, + CaseNode, + ClassNode, + ComprehensionForIfNode, + ComprehensionNode, + ConstantNode, + DecoratorNode, + DictionaryNode, + ErrorExpressionCategory, + ExceptNode, + ExecutionScopeNode, + ExpressionNode, + FormatStringNode, + ForNode, + FunctionNode, + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + IndexNode, + isExpressionNode, + LambdaNode, + ListNode, + MatchNode, + MemberAccessNode, + NameNode, + NumberNode, + ParamCategory, + ParameterNode, + ParseNode, + ParseNodeType, + SetNode, + SliceNode, + StringListNode, + StringNode, + TupleNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParameterListNode, + TypeParameterNode, + TypeParameterScopeNode, + TypeParamKind, + UnpackNode, + WithItemNode, + YieldFromNode, + YieldNode, +} from '../parser/parseNodes'; +import { ParseOptions, Parser, ParseTextMode } from '../parser/parser'; +import { KeywordType, OperatorType, StringTokenFlags } from '../parser/tokenizerTypes'; +import { AnalyzerFileInfo, ImportLookup, isAnnotationEvaluationPostponed } from './analyzerFileInfo'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { CodeFlowAnalyzer, FlowNodeTypeOptions, FlowNodeTypeResult, getCodeFlowEngine } from './codeFlowEngine'; +import { + CodeFlowReferenceExpressionNode, + createKeyForReference, + FlowFlags, + FlowNode, + FlowWildcardImport, + isCodeFlowSupportedForReference, + wildcardImportReferenceKey, +} from './codeFlowTypes'; +import { ConstraintSolution } from './constraintSolution'; +import { + addConstraintsForExpectedType, + applySourceSolutionToConstraints, + assignTypeVar, + solveConstraints, + solveConstraintSet, +} from './constraintSolver'; +import { ConstraintSet, ConstraintTracker } from './constraintTracker'; +import { createFunctionFromConstructor, getBoundInitMethod, validateConstructorArgs } from './constructors'; +import { applyDataClassClassBehaviorOverrides, synthesizeDataClassMethods } from './dataClasses'; +import { + ClassDeclaration, + Declaration, + DeclarationType, + FunctionDeclaration, + isVariableDeclaration, + ModuleLoaderActions, + SpecialBuiltInClassDeclaration, + VariableDeclaration, +} from './declaration'; +import { + getDeclarationsWithUsesLocalNameRemoved, + getNameNodeForDeclaration, + resolveAliasDeclaration as resolveAliasDeclarationUtil, + ResolvedAliasInfo, + synthesizeAliasDeclaration, +} from './declarationUtils'; +import { + addOverloadsToFunctionType, + applyClassDecorator, + applyFunctionDecorator, + FunctionDecoratorInfo, + getDeprecatedMessageFromCall, + getFunctionInfoFromDecorators, +} from './decorators'; +import { + createEnumType, + getEnumAutoValueType, + getTypeOfEnumMember, + isDeclInEnumClass, + isEnumClassWithMembers, + isEnumMetaclass, +} from './enums'; +import { applyFunctionTransform } from './functionTransform'; +import { createNamedTupleType } from './namedTuples'; +import { + getTypeOfAugmentedAssignment, + getTypeOfBinaryOperation, + getTypeOfTernaryOperation, + getTypeOfUnaryOperation, +} from './operations'; +import { + getParamListDetails, + isParamSpecArgs, + isParamSpecKwargs, + ParamAssignmentTracker, + ParamKind, + ParamListDetails, + VirtualParamDetails, +} from './parameterUtils'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { assignTypeToPatternTargets, checkForUnusedPattern, narrowTypeBasedOnPattern } from './patternMatching'; +import { assignProperty } from './properties'; +import { assignClassToProtocol, assignModuleToProtocol } from './protocols'; +import { Scope, ScopeType, SymbolWithScope } from './scope'; +import * as ScopeUtils from './scopeUtils'; +import { createSentinelType } from './sentinel'; +import { evaluateStaticBoolExpression } from './staticExpressions'; +import { indeterminateSymbolId, Symbol, SymbolFlags, SynthesizedTypeInfo } from './symbol'; +import { isConstantName, isPrivateName, isPrivateOrProtectedName } from './symbolNameUtils'; +import { getLastTypedDeclarationForSymbol, isEffectivelyClassVar } from './symbolUtils'; +import { assignTupleTypeArgs, expandTuple, getSlicedTupleType, getTypeOfTuple, makeTupleObject } from './tuples'; +import { SpeculativeModeOptions, SpeculativeTypeTracker } from './typeCacheUtils'; +import { + assignToTypedDict, + assignTypedDictToTypedDict, + createTypedDictType, + createTypedDictTypeInlined, + getTypedDictDictEquivalent, + getTypedDictMappingEquivalent, + getTypedDictMembersForClass, + getTypeOfIndexedTypedDict, + synthesizeTypedDictClassMethods, +} from './typedDicts'; +import { + AbstractSymbol, + Arg, + ArgResult, + ArgWithExpression, + AssignTypeFlags, + CallResult, + CallSignature, + CallSignatureInfo, + CallSiteEvaluationInfo, + ClassMemberLookup, + ClassTypeResult, + DeclaredSymbolTypeInfo, + EffectiveTypeResult, + EvalFlags, + EvaluatorUsage, + ExpectedTypeOptions, + ExpectedTypeResult, + FunctionTypeResult, + MagicMethodDeprecationInfo, + MapSubtypesOptions, + maxInferredContainerDepth, + maxSubtypesForInferredType, + MemberAccessDeprecationInfo, + PrefetchedTypes, + PrintTypeOptions, + Reachability, + ResolveAliasOptions, + SolveConstraintsOptions, + SymbolDeclInfo, + TypeEvaluator, + TypeResult, + TypeResultWithNode, + ValidateArgTypeParams, + ValidateTypeArgsOptions, +} from './typeEvaluatorTypes'; +import { enumerateLiteralsForType } from './typeGuards'; +import * as TypePrinter from './typePrinter'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + DataClassBehaviors, + EnumLiteral, + findSubtype, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + InheritanceChain, + isAny, + isAnyOrUnknown, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isMethodType, + isModule, + isNever, + isOverloaded, + isParamSpec, + isPositionOnlySeparator, + isTypeSame, + isTypeVar, + isTypeVarTuple, + isUnbound, + isUnion, + isUnknown, + isUnpacked, + isUnpackedClass, + isUnpackedTypeVarTuple, + LiteralValue, + maxTypeRecursionCount, + ModuleType, + NeverType, + OverloadedType, + ParamSpecType, + removeFromUnion, + removeUnbound, + SentinelLiteral, + TupleTypeArg, + Type, + TypeAliasInfo, + TypeBase, + TypeCategory, + TypeCondition, + TypedDictEntries, + TypeVarKind, + TypeVarScopeId, + TypeVarScopeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UnionType, + UnknownType, + Variance, +} from './types'; +import { + addConditionToType, + addTypeVarsToListIfUnique, + applySolvedTypeVars, + ApplyTypeVarOptions, + areTypesSame, + buildSolutionFromSpecializedClass, + ClassMember, + combineSameSizedTuples, + combineTupleTypeArgs, + combineVariances, + computeMroLinearization, + containsAnyOrUnknown, + containsLiteralType, + convertToInstance, + convertToInstantiable, + convertTypeToParamSpecValue, + derivesFromAnyOrUnknown, + derivesFromClassRecursive, + derivesFromStdlibClass, + doForEachSubtype, + ensureSignaturesAreUnique, + explodeGenericClass, + getContainerDepth, + getDeclaredGeneratorReturnType, + getGeneratorTypeArgs, + getGeneratorYieldType, + getSpecializedTupleType, + getTypeCondition, + getTypeVarArgsRecursive, + getTypeVarScopeId, + getTypeVarScopeIds, + getUnknownForTypeVar, + getUnknownTypeForCallable, + InferenceContext, + invertVariance, + isDescriptorInstance, + isEffectivelyInstantiable, + isEllipsisType, + isIncompleteUnknown, + isInstantiableMetaclass, + isLiteralLikeType, + isLiteralType, + isMaybeDescriptorInstance, + isMemberReadOnly, + isMetaclassInstance, + isNoneInstance, + isNoneTypeClass, + isOptionalType, + isPartlyUnknown, + isProperty, + isSentinelLiteral, + isTupleClass, + isTupleIndexUnambiguous, + isTypeAliasPlaceholder, + isTypeAliasRecursive, + isTypeVarSame, + isUnboundedTupleClass, + isVarianceOfTypeArgCompatible, + lookUpClassMember, + lookUpObjectMember, + makeFunctionTypeVarsBound, + makeInferenceContext, + makePacked, + makeTypeVarsBound, + makeTypeVarsFree, + mapSignatures, + mapSubtypes, + MemberAccessFlags, + partiallySpecializeType, + preserveUnknown, + removeNoneFromUnion, + requiresSpecialization, + requiresTypeArgs, + selfSpecializeClass, + simplifyFunctionToParamSpec, + sortTypes, + specializeForBaseClass, + specializeTupleClass, + specializeWithDefaultTypeArgs, + specializeWithUnknownTypeArgs, + stripTypeForm, + stripTypeFormRecursive, + synthesizeTypeVarForSelfCls, + transformExpectedType, + transformPossibleRecursiveTypeAlias, + UniqueSignatureTracker, + validateTypeVarDefault, +} from './typeUtils'; + +interface GetTypeArgsOptions { + isAnnotatedClass?: boolean; + hasCustomClassGetItem?: boolean; + isFinalAnnotation?: boolean; + isClassVarAnnotation?: boolean; + supportsTypedDictTypeArg?: boolean; +} + +interface MatchArgsToParamsResult { + overload: FunctionType; + overloadIndex: number; + + argumentErrors: boolean; + isTypeIncomplete: boolean; + argParams: ValidateArgTypeParams[]; + activeParam?: FunctionParam | undefined; + paramSpecTarget?: ParamSpecType | undefined; + paramSpecArgList?: Arg[] | undefined; + + // Was there an unpacked argument of unknown length? + unpackedArgOfUnknownLength?: boolean; + + // Did that unpacked argument map to a variadic parameter? + unpackedArgMapsToVariadic?: boolean; + + // A score that indicates how well the overload matches with + // supplied arguments. Used to pick the "best" for purposes + // of error reporting when no matches are found. The higher + // the score, the worse the match. + argumentMatchScore: number; +} + +export interface MemberAccessTypeResult { + type: Type; + isDescriptorApplied?: boolean; + isAsymmetricAccessor?: boolean; + memberAccessDeprecationInfo?: MemberAccessDeprecationInfo; + typeErrors?: boolean; +} + +interface ScopedTypeVarResult { + type: TypeVarType; + scopeNode: TypeParameterScopeNode | AssignmentNode | undefined; + foundInterveningClass: boolean; +} + +interface AliasMapEntry { + alias: string; + module: 'builtins' | 'collections' | 'internals'; + implicitBaseClass?: string; + isSpecialForm?: boolean; + isIllegalInIsinstance?: boolean; + typeParamVariance?: Variance; +} + +interface AssignClassToSelfInfo { + class: ClassType; + assumedVariance: Variance; +} + +interface MatchedOverloadInfo { + overload: FunctionType; + matchResults: MatchArgsToParamsResult; + constraints: ConstraintTracker; + argResults: ArgResult[]; + returnType: Type; +} + +interface ValidateArgTypeOptions { + skipUnknownArgCheck?: boolean; + isArgFirstPass?: boolean; + conditionFilter?: TypeCondition[]; + skipReportError?: boolean; +} + +interface EffectiveReturnTypeOptions { + callSiteInfo?: CallSiteEvaluationInfo; +} + +interface SignatureTrackerStackEntry { + tracker: UniqueSignatureTracker; + rootNode: ParseNode; +} + +// This table contains the names of several built-in types that +// are not subscriptable at runtime on older versions of Python. +// It lists the first version of Python where subscripting is +// allowed. +const nonSubscriptableBuiltinTypes: Map = new Map([ + ['asyncio.futures.Future', pythonVersion3_9], + ['asyncio.tasks.Task', pythonVersion3_9], + ['builtins.dict', pythonVersion3_9], + ['builtins.frozenset', pythonVersion3_9], + ['builtins.list', pythonVersion3_9], + ['builtins._PathLike', pythonVersion3_9], + ['builtins.set', pythonVersion3_9], + ['builtins.tuple', pythonVersion3_9], + ['collections.ChainMap', pythonVersion3_9], + ['collections.Counter', pythonVersion3_9], + ['collections.defaultdict', pythonVersion3_9], + ['collections.DefaultDict', pythonVersion3_9], + ['collections.deque', pythonVersion3_9], + ['collections.OrderedDict', pythonVersion3_9], + ['queue.Queue', pythonVersion3_9], +]); + +// Some types that do not inherit from others are still considered +// "compatible" based on the Python spec. These are sometimes referred +// to as "type promotions". +const typePromotions: Map = new Map([ + ['builtins.float', ['builtins.int']], + ['builtins.complex', ['builtins.float', 'builtins.int']], + ['builtins.bytes', ['builtins.bytearray', 'builtins.memoryview']], +]); + +interface SymbolResolutionStackEntry { + // The symbol ID and declaration being resolved. + symbolId: number; + declaration: Declaration; + + // Initially true, it's set to false if a recursion + // is detected. + isResultValid: boolean; + + // Some limited forms of recursion are allowed. In these + // cases, a partially-constructed type can be registered. + partialType?: Type | undefined; +} + +interface ReturnTypeInferenceContext { + functionNode: FunctionNode; + codeFlowAnalyzer: CodeFlowAnalyzer; +} + +interface ParamSpecArgResult { + argumentErrors: boolean; + constraintTrackers: (ConstraintTracker | undefined)[]; +} + +// How many levels deep should we attempt to infer return +// types based on call-site argument types? The deeper we go, +// the more types we may be able to infer, but the worse the +// performance. +const maxReturnTypeInferenceStackSize = 2; + +// What is the max number of input arguments we should allow +// for call-site return type inference? We've found that large, +// complex functions with many arguments can take too long to +// analyze. +const maxReturnTypeInferenceArgCount = 6; + +// What is the max complexity of the code flow graph that +// we will analyze to determine the return type of a function +// when its parameters are unannotated? We want to keep this +// pretty low because this can be very costly. +const maxReturnTypeInferenceCodeFlowComplexity = 32; + +// What is the max complexity of the code flow graph for +// call-site type inference? This is very expensive, so we +// want to keep this very low. +const maxReturnCallSiteTypeInferenceCodeFlowComplexity = 8; + +// What is the max number of return types cached per function +// when using call-site inference? +const maxCallSiteReturnTypeCacheSize = 8; + +// How many entries in a list, set, or dict should we examine +// when inferring the type? We need to cut it off at some point +// to avoid excessive computation. +const maxEntriesToUseForInference = 64; + +// How many times should attempt to infer a return type of a +// function before giving up and assuming that it won't converge +// due to recursion? +const maxReturnTypeInferenceAttempts = 8; + +// How many assignments to an unannotated variable should be used +// when inferring its type? We need to cut it off at some point +// to avoid excessive computation. +const maxDeclarationsToUseForInference = 64; + +// Maximum number of times to attempt effective type evaluation +// of a variable that has no type declaration. +const maxEffectiveTypeEvaluationAttempts = 16; + +// Maximum number of combinatoric argument type expansions allowed +// when resolving an overload. +const maxTotalOverloadArgTypeExpansionCount = 256; + +// Maximum size of an enum that will be expanded during overload +// argument type expansion. +const maxSingleOverloadArgTypeExpansionCount = 64; + +// Maximum number of recursive function return type inference attempts +// that can be concurrently pending before we give up. +const maxInferFunctionReturnRecursionCount = 12; + +// Maximum recursion amount when comparing two recursive type aliases. +// Increasing this can greatly increase the time required to evaluate +// two recursive type aliases that have the same definition. Decreasing +// it can increase the chance of false negatives for such recursive +// type aliases. +const maxRecursiveTypeAliasRecursionCount = 10; + +// Normally a symbol can have only one type declaration, but there are +// cases where multiple are possible (e.g. a property with a setter +// and a deleter). In extreme cases, we need to limit the number of +// type declarations we consider to avoid excessive computation. +const maxTypedDeclsPerSymbol = 16; + +// This switch enables a special debug mode that attempts to catch +// bugs due to inconsistent evaluation flags used when reading types +// from the type cache. +const verifyTypeCacheEvaluatorFlags = false; + +// This debugging option prints each expression and its evaluated type. +const printExpressionTypes = false; + +// The following number is chosen somewhat arbitrarily. We need to cut +// off code flow analysis at some point for code flow graphs that are too +// complex. Otherwise we risk overflowing the stack or incurring extremely +// long analysis times. This number has been tuned empirically. +export const maxCodeComplexity = 768; + +export interface EvaluatorOptions { + printTypeFlags: TypePrinter.PrintTypeFlags; + logCalls: boolean; + minimumLoggingThreshold: number; + evaluateUnknownImportsAsAny: boolean; + verifyTypeCacheEvaluatorFlags: boolean; +} + +// Describes a "deferred class completion" that is run when a class type is +// fully created and the "PartiallyEvaluated" flag has just been cleared. +// This allows us to properly compute information like the MRO which +// depends on a full understanding of base classes. +interface DeferredClassCompletion { + dependsUpon: ClassType; + classesToComplete: ClassNode[]; +} + +interface TypeCacheEntry { + typeResult: TypeResult; + incompleteGenCount: number; + flags: EvalFlags | undefined; +} + +interface CodeFlowAnalyzerCacheEntry { + typeAtStart: TypeResult | undefined; + codeFlowAnalyzer: CodeFlowAnalyzer; +} + +interface FunctionRecursionInfo { + callerNode: ExpressionNode | undefined; +} + +type LogWrapper = any>(func: T) => (...args: Parameters) => ReturnType; + +interface SuppressedNodeStackEntry { + node: ParseNode; + suppressedDiags: string[] | undefined; +} + +export function createTypeEvaluator( + importLookup: ImportLookup, + evaluatorOptions: EvaluatorOptions, + wrapWithLogger: LogWrapper +): TypeEvaluator { + const symbolResolutionStack: SymbolResolutionStackEntry[] = []; + const speculativeTypeTracker = new SpeculativeTypeTracker(); + const suppressedNodeStack: SuppressedNodeStackEntry[] = []; + const assignClassToSelfStack: AssignClassToSelfInfo[] = []; + + let functionRecursionMap = new Map(); + let codeFlowAnalyzerCache = new Map(); + let typeCache = new Map(); + let effectiveTypeCache = new Map>(); + let expectedTypeCache = new Map(); + let asymmetricAccessorAssignmentCache = new Set(); + let deferredClassCompletions: DeferredClassCompletion[] = []; + let cancellationToken: CancellationToken | undefined; + let printExpressionSpaceCount = 0; + let incompleteGenCount = 0; + const returnTypeInferenceContextStack: ReturnTypeInferenceContext[] = []; + let returnTypeInferenceTypeCache: Map | undefined; + const signatureTrackerStack: SignatureTrackerStackEntry[] = []; + let prefetched: Partial | undefined; + + function runWithCancellationToken(token: CancellationToken, callback: () => T): T; + function runWithCancellationToken(token: CancellationToken, callback: () => Promise): Promise; + function runWithCancellationToken(token: CancellationToken, callback: () => T | Promise): T | Promise { + // Save the current token and restore it after the callback to support nested calls + const oldToken = cancellationToken; + let result: T | Promise | undefined = undefined; + try { + cancellationToken = token; + result = callback(); + + if (!isThenable(result)) { + return result; + } + + return result.finally(() => { + cancellationToken = oldToken; + }); + } finally { + if (!isThenable(result)) { + cancellationToken = oldToken; + } + } + } + + function checkForCancellation() { + if (cancellationToken) { + throwIfCancellationRequested(cancellationToken); + } + } + + function getTypeCacheEntryCount(): number { + return typeCache.size; + } + + // This function should be called immediately prior to discarding + // the type evaluator. It forcibly replaces existing cache maps + // with empty equivalents. This shouldn't be necessary, but there + // is apparently a bug in the v8 GC where it is unable to detect + // circular references in complex data structures, so it fails + // to clean up the objects if we don't help it out. + function disposeEvaluator() { + functionRecursionMap = new Map(); + codeFlowAnalyzerCache = new Map(); + typeCache = new Map(); + effectiveTypeCache = new Map>(); + expectedTypeCache = new Map(); + asymmetricAccessorAssignmentCache = new Set(); + } + + function readTypeCacheEntry(node: ParseNode) { + // Should we use a temporary cache associated with a contextual + // analysis of a function, contextualized based on call-site argument types? + if (returnTypeInferenceTypeCache && isNodeInReturnTypeInferenceContext(node)) { + return returnTypeInferenceTypeCache.get(node.id); + } else { + return typeCache.get(node.id); + } + } + + function isTypeCached(node: ParseNode) { + const cacheEntry = readTypeCacheEntry(node); + if (!cacheEntry) { + return false; + } + + return !cacheEntry.typeResult.isIncomplete || cacheEntry.incompleteGenCount === incompleteGenCount; + } + + function readTypeCache(node: ParseNode, flags: EvalFlags | undefined): Type | undefined { + const cacheEntry = readTypeCacheEntry(node); + if (!cacheEntry || cacheEntry.typeResult.isIncomplete) { + return undefined; + } + + if (evaluatorOptions.verifyTypeCacheEvaluatorFlags || verifyTypeCacheEvaluatorFlags) { + if (flags !== undefined) { + const expectedFlags = cacheEntry.flags; + + if (expectedFlags !== undefined && flags !== expectedFlags) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const position = convertOffsetToPosition(node.start, fileInfo.lines); + + const message = + `Type cache flag mismatch for node type ${node.nodeType} ` + + `(parent ${node.parent?.nodeType ?? 'none'}): ` + + `cached flags = ${expectedFlags}, access flags = ${flags}, ` + + `file = {${fileInfo.fileUri} [${position.line + 1}:${position.character + 1}]}`; + if (evaluatorOptions.verifyTypeCacheEvaluatorFlags) { + fail(message); + } else { + console.log(message); + } + } + } + } + + return cacheEntry.typeResult.type; + } + + function writeTypeCache( + node: ParseNode, + typeResult: TypeResult, + flags: EvalFlags | undefined, + inferenceContext?: InferenceContext, + allowSpeculativeCaching = false + ) { + // Should we use a temporary cache associated with a contextual + // analysis of a function, contextualized based on call-site argument types? + const typeCacheToUse = + returnTypeInferenceTypeCache && isNodeInReturnTypeInferenceContext(node) + ? returnTypeInferenceTypeCache + : typeCache; + + if (!typeResult.isIncomplete) { + incompleteGenCount++; + } else { + const oldValue = typeCacheToUse.get(node.id); + if (oldValue !== undefined && !isTypeSame(typeResult.type, oldValue.typeResult.type)) { + incompleteGenCount++; + } + } + + typeCacheToUse.set(node.id, { typeResult, flags, incompleteGenCount }); + + // If the entry is located within a part of the parse tree that is currently being + // "speculatively" evaluated, track it so we delete the cached entry when we leave + // this speculative context. + if (isSpeculativeModeInUse(node)) { + speculativeTypeTracker.trackEntry(typeCacheToUse, node.id); + if (allowSpeculativeCaching) { + speculativeTypeTracker.addSpeculativeType( + node, + typeResult, + incompleteGenCount, + inferenceContext?.expectedType + ); + } + } + } + + function setTypeResultForNode(node: ParseNode, typeResult: TypeResult, flags = EvalFlags.None) { + writeTypeCache(node, typeResult, flags); + } + + function setAsymmetricDescriptorAssignment(node: ParseNode) { + if (isSpeculativeModeInUse(/* node */ undefined)) { + return; + } + + asymmetricAccessorAssignmentCache.add(node.id); + } + + function isAsymmetricAccessorAssignment(node: ParseNode) { + return asymmetricAccessorAssignmentCache.has(node.id); + } + + // Determines whether the specified node is contained within + // the function node corresponding to the function that we + // are currently analyzing in the context of parameter types + // defined by a call site. + function isNodeInReturnTypeInferenceContext(node: ParseNode) { + const stackSize = returnTypeInferenceContextStack.length; + if (stackSize === 0) { + return false; + } + + const contextNode = returnTypeInferenceContextStack[stackSize - 1]; + + let curNode: ParseNode | undefined = node; + while (curNode) { + if (curNode === contextNode.functionNode) { + return true; + } + curNode = curNode.parent; + } + + return false; + } + + function getCodeFlowAnalyzerForReturnTypeInferenceContext() { + const stackSize = returnTypeInferenceContextStack.length; + assert(stackSize > 0); + const contextNode = returnTypeInferenceContextStack[stackSize - 1]; + return contextNode.codeFlowAnalyzer; + } + + function getIndexOfSymbolResolution(symbol: Symbol, declaration: Declaration) { + return symbolResolutionStack.findIndex( + (entry) => entry.symbolId === symbol.id && entry.declaration === declaration + ); + } + + function pushSymbolResolution(symbol: Symbol, declaration: Declaration) { + const index = getIndexOfSymbolResolution(symbol, declaration); + if (index >= 0) { + // Mark all of the entries between these two as invalid. + for (let i = index + 1; i < symbolResolutionStack.length; i++) { + symbolResolutionStack[i].isResultValid = false; + } + return false; + } + + symbolResolutionStack.push({ + symbolId: symbol.id, + declaration, + isResultValid: true, + }); + return true; + } + + function popSymbolResolution(symbol: Symbol) { + const poppedEntry = symbolResolutionStack.pop()!; + assert(poppedEntry.symbolId === symbol.id); + return poppedEntry.isResultValid; + } + + function setSymbolResolutionPartialType(symbol: Symbol, declaration: Declaration, type: Type) { + const index = getIndexOfSymbolResolution(symbol, declaration); + if (index >= 0) { + symbolResolutionStack[index].partialType = type; + } + } + + function getSymbolResolutionPartialType(symbol: Symbol, declaration: Declaration): Type | undefined { + const index = getIndexOfSymbolResolution(symbol, declaration); + if (index >= 0) { + return symbolResolutionStack[index].partialType; + } + + return undefined; + } + + // Determines the type of the specified node by evaluating it in + // context, logging any errors in the process. This may require the + // type of surrounding statements to be evaluated. + function getType(node: ExpressionNode): Type | undefined { + initializePrefetchedTypes(node); + + let type = evaluateTypeForSubnode(node, () => { + evaluateTypesForExpressionInContext(node); + })?.type; + + // If this is a type parameter with a calculated variance, see if we + // can swap it out for a version that has a computed variance. + if (type && isTypeVar(type) && type.shared.declaredVariance === Variance.Auto) { + const typeVarType = type; + const typeParamListNode = ParseTreeUtils.getParentNodeOfType( + node, + ParseNodeType.TypeParameterList + ); + + if (typeParamListNode?.parent?.nodeType === ParseNodeType.Class) { + const classTypeResult = getTypeOfClass(typeParamListNode.parent); + + if (classTypeResult) { + inferVarianceForClass(classTypeResult.classType); + + const typeParam = classTypeResult.classType.shared.typeParams.find((param) => + isTypeSame(param, typeVarType, { ignoreTypeFlags: true }) + ); + + if (typeParam?.priv.computedVariance !== undefined) { + type = TypeVarType.cloneWithComputedVariance(type, typeParam.priv.computedVariance); + } + } + } else if (typeParamListNode?.parent?.nodeType === ParseNodeType.TypeAlias) { + const typeAliasType = getTypeOfTypeAlias(typeParamListNode.parent); + const typeParamIndex = typeParamListNode.d.params.findIndex((param) => param.d.name === node); + + if (typeParamIndex >= 0) { + inferVarianceForTypeAlias(typeAliasType); + + const typeAliasInfo = typeAliasType.props?.typeAliasInfo; + if (typeAliasInfo?.shared.computedVariance) { + const computedVariance = typeAliasInfo.shared.computedVariance[typeParamIndex]; + + type = TypeVarType.cloneWithComputedVariance(type, computedVariance); + } + } + } + } + + if (type) { + type = transformPossibleRecursiveTypeAlias(type); + } + + return type; + } + + function getTypeResult(node: ExpressionNode): TypeResult | undefined { + return evaluateTypeForSubnode(node, () => { + evaluateTypesForExpressionInContext(node); + }); + } + + function getTypeResultForDecorator(node: DecoratorNode): TypeResult | undefined { + return evaluateTypeForSubnode(node, () => { + evaluateTypesForExpressionInContext(node.d.expr); + }); + } + + // Reads the type of the node from the cache. + function getCachedType(node: ExpressionNode | DecoratorNode): Type | undefined { + return readTypeCache(node, EvalFlags.None); + } + + // Determines the expected type of a specified node based on surrounding + // context. For example, if it's a subexpression of an argument expression, + // the associated parameter type might inform the expected type. + function getExpectedType(node: ExpressionNode): ExpectedTypeResult | undefined { + // This is a primary entry point called by language server providers, + // and it might be called before any other type evaluation has occurred. + // Use this opportunity to do some initialization. + initializePrefetchedTypes(node); + + // Scan up the parse tree to find the top-most expression node + // so we can evaluate the entire expression. + let topExpression = node; + let curNode: ParseNode | undefined = node; + while (curNode) { + if (isExpressionNode(curNode)) { + topExpression = curNode; + } + + curNode = curNode.parent; + } + + // Evaluate the expression. This will have the side effect of + // storing an expected type in the expected type cache. + evaluateTypesForExpressionInContext(topExpression); + + // Look for the resulting expected type by scanning up the parse tree. + curNode = node; + while (curNode) { + const expectedType = expectedTypeCache.get(curNode.id); + if (expectedType) { + return { + type: expectedType, + node: curNode, + }; + } + + if (curNode === topExpression) { + break; + } + + curNode = curNode.parent; + } + + return undefined; + } + + function initializePrefetchedTypes(node: ParseNode) { + if (!prefetched) { + // Some of these types have cyclical dependencies on each other, + // so don't re-enter this block once we start executing it. + prefetched = {}; + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + prefetched.objectClass = getBuiltInType(node, 'object'); + prefetched.typeClass = getBuiltInType(node, 'type'); + prefetched.functionClass = getTypesType(node, 'FunctionType') ?? getBuiltInType(node, 'function'); + prefetched.methodClass = getTypesType(node, 'MethodType'); + + prefetched.unionTypeClass = getTypesType(node, 'UnionType'); + if (prefetched.unionTypeClass && isClass(prefetched.unionTypeClass)) { + prefetched.unionTypeClass.shared.flags |= ClassTypeFlags.SpecialFormClass; + } + + // Initialize and cache "Collection" to break a cyclical dependency + // that occurs when resolving tuple below. + getTypingType(node, 'Collection'); + + prefetched.noneTypeClass = getTypeshedType(node, 'NoneType') ?? UnknownType.create(); + prefetched.tupleClass = getBuiltInType(node, 'tuple'); + prefetched.boolClass = getBuiltInType(node, 'bool'); + prefetched.intClass = getBuiltInType(node, 'int'); + prefetched.strClass = getBuiltInType(node, 'str'); + prefetched.dictClass = getBuiltInType(node, 'dict'); + prefetched.moduleTypeClass = getTypingType(node, 'ModuleType'); + prefetched.typedDictPrivateClass = + getTypeCheckerInternalsType(node, 'TypedDictFallback') ?? getTypingType(node, '_TypedDict'); + prefetched.typedDictClass = getTypingType(node, 'TypedDict'); + prefetched.awaitableClass = getTypingType(node, 'Awaitable'); + prefetched.mappingClass = getTypingType(node, 'Mapping'); + + // Don't attempt to resolve the string.templatelib if pyright is configured for + // Python 3.13 or older. Doing so will either fail to resolve (if running on Python 3.13 + // or older) or resolve to the templatelib.py source file (if running on Python 3.14). + if (PythonVersion.isGreaterOrEqualTo(fileInfo.executionEnvironment.pythonVersion, pythonVersion3_14)) { + prefetched.templateClass = getTypeOfModule(node, 'Template', ['string', 'templatelib']); + } else { + prefetched.templateClass = UnknownType.create(); + } + + prefetched.supportsKeysAndGetItemClass = getTypeshedType(node, 'SupportsKeysAndGetItem'); + if (!prefetched.supportsKeysAndGetItemClass) { + // Fall back on 'Mapping' if 'SupportsKeysAndGetItem' is not available. + prefetched.supportsKeysAndGetItemClass = prefetched.mappingClass; + } + + // Wire up the `Any` class to the special-form version of our internal AnyType. + if ( + prefetched.objectClass && + isInstantiableClass(prefetched.objectClass) && + prefetched.typeClass && + isInstantiableClass(prefetched.typeClass) + ) { + const anyClass = ClassType.createInstantiable( + 'Any', + 'typing.Any', + 'typing', + Uri.empty(), + ClassTypeFlags.BuiltIn | ClassTypeFlags.SpecialFormClass | ClassTypeFlags.IllegalIsinstanceClass, + /* typeSourceId */ -1, + /* declaredMetaclass */ undefined, + /* effectiveMetaclass */ prefetched.typeClass + ); + anyClass.shared.baseClasses.push(prefetched.objectClass); + computeMroLinearization(anyClass); + const anySpecialForm = AnyType.createSpecialForm(); + + if (isAny(anySpecialForm)) { + TypeBase.setSpecialForm(anySpecialForm, anyClass); + + if (isTypeFormSupported(node)) { + TypeBase.setTypeForm(anySpecialForm, convertToInstance(anySpecialForm)); + } + } + } + } + } + + function getTypeOfExpression( + node: ExpressionNode, + flags = EvalFlags.None, + inferenceContext?: InferenceContext + ): TypeResult { + // Is this type already cached? + const cacheEntry = readTypeCacheEntry(node); + if (cacheEntry) { + if (!cacheEntry.typeResult.isIncomplete || cacheEntry.incompleteGenCount === incompleteGenCount) { + if (printExpressionTypes) { + console.log( + `${getPrintExpressionTypesSpaces()}${ParseTreeUtils.printExpression(node)} (${getLineNum( + node + )}): Cached ${printType(cacheEntry.typeResult.type)} ${ + cacheEntry.typeResult.typeErrors ? ' Errors' : '' + }` + ); + } + + return cacheEntry.typeResult; + } + } + + // Is it cached in the speculative type cache? + const specCacheEntry = speculativeTypeTracker.getSpeculativeType(node, inferenceContext?.expectedType); + if (specCacheEntry) { + if ( + !specCacheEntry.typeResult.isIncomplete || + specCacheEntry.incompleteGenerationCount === incompleteGenCount + ) { + if (printExpressionTypes) { + console.log( + `${getPrintExpressionTypesSpaces()}${ParseTreeUtils.printExpression(node)} (${getLineNum( + node + )}): Speculative ${printType(specCacheEntry.typeResult.type)}` + ); + } + + return specCacheEntry.typeResult; + } + } + + if (printExpressionTypes) { + console.log( + `${getPrintExpressionTypesSpaces()}${ParseTreeUtils.printExpression(node)} (${getLineNum(node)}): Pre` + ); + printExpressionSpaceCount++; + } + + // This is a frequently-called routine, so it's a good place to call + // the cancellation check. If the operation is canceled, an exception + // will be thrown at this point. + checkForCancellation(); + + if (inferenceContext) { + inferenceContext.expectedType = transformPossibleRecursiveTypeAlias(inferenceContext.expectedType); + } + + // If we haven't already fetched some core type definitions from the + // typeshed stubs, do so here. It would be better to fetch this when it's + // needed in assignType, but we don't have access to the parse tree + // at that point. + initializePrefetchedTypes(node); + + let typeResult = getTypeOfExpressionCore(node, flags, inferenceContext); + + // Should we disable type promotions for bytes? + if ( + isInstantiableClass(typeResult.type) && + typeResult.type.priv.includePromotions && + !typeResult.type.priv.includeSubclasses && + ClassType.isBuiltIn(typeResult.type, 'bytes') + ) { + if (AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.disableBytesTypePromotions) { + typeResult = { + ...typeResult, + type: ClassType.cloneRemoveTypePromotions(typeResult.type), + }; + } + } + + if (inferenceContext) { + // Handle TypeForm assignments. + typeResult.type = convertToTypeFormType(inferenceContext.expectedType, typeResult.type); + } + + // Don't allow speculative caching for assignment expressions because + // the target name node won't have a corresponding type cached speculatively. + const allowSpeculativeCaching = node.nodeType !== ParseNodeType.AssignmentExpression; + + writeTypeCache(node, typeResult, flags, inferenceContext, allowSpeculativeCaching); + + if (node.nodeType === ParseNodeType.Name || node.nodeType === ParseNodeType.MemberAccess) { + // If this is a generic function and there is a signature tracker, + // make sure the signature is unique. + typeResult.type = ensureSignatureIsUnique(typeResult.type, node); + } + + // If there was an expected type, make sure that the result type is compatible. + if ( + inferenceContext && + !isAnyOrUnknown(inferenceContext.expectedType) && + !isNever(inferenceContext.expectedType) + ) { + expectedTypeCache.set(node.id, inferenceContext.expectedType); + + if (!typeResult.isIncomplete && !typeResult.expectedTypeDiagAddendum) { + const diag = new DiagnosticAddendum(); + + // Make sure the resulting type is assignable to the expected type. + if ( + !assignType( + inferenceContext.expectedType, + typeResult.type, + diag, + /* constraints */ undefined, + AssignTypeFlags.Default + ) + ) { + // Set the typeErrors to true, but first make a copy of the + // type result because the (non-error) version may already + // be cached. + typeResult = { ...typeResult, typeErrors: true }; + typeResult.expectedTypeDiagAddendum = diag; + diag.addTextRange(node); + } + } + } + + if (printExpressionTypes) { + printExpressionSpaceCount--; + console.log( + `${getPrintExpressionTypesSpaces()}${ParseTreeUtils.printExpression(node)} (${getLineNum( + node + )}): Post ${printType(typeResult.type)}${typeResult.isIncomplete ? ' Incomplete' : ''}` + ); + } + + return typeResult; + } + + // This is a helper function that implements the core of getTypeOfExpression. + function getTypeOfExpressionCore( + node: ExpressionNode, + flags = EvalFlags.None, + inferenceContext?: InferenceContext + ): TypeResult { + let typeResult: TypeResult | undefined; + let expectingInstantiable = (flags & EvalFlags.InstantiableType) !== 0; + + switch (node.nodeType) { + case ParseNodeType.Name: { + typeResult = getTypeOfName(node, flags); + break; + } + + case ParseNodeType.MemberAccess: { + typeResult = getTypeOfMemberAccess(node, flags); + break; + } + + case ParseNodeType.Index: { + typeResult = getTypeOfIndex(node, flags); + break; + } + + case ParseNodeType.Call: { + typeResult = useSignatureTracker(node, () => getTypeOfCall(node, flags, inferenceContext)); + break; + } + + case ParseNodeType.Tuple: { + typeResult = getTypeOfTuple(evaluatorInterface, node, flags, inferenceContext); + break; + } + + case ParseNodeType.Constant: { + typeResult = getTypeOfConstant(node, flags); + break; + } + + case ParseNodeType.StringList: { + if ((flags & EvalFlags.StrLiteralAsType) !== 0) { + // Don't report expecting type errors again. We will have already + // reported them when analyzing the contents of the string. + expectingInstantiable = false; + } + + typeResult = getTypeOfStringList(node, flags); + break; + } + + case ParseNodeType.Number: { + typeResult = getTypeOfNumber(node, typeResult); + break; + } + + case ParseNodeType.Ellipsis: { + typeResult = getTypeOfEllipsis(flags, typeResult, node); + break; + } + + case ParseNodeType.UnaryOperation: { + typeResult = getTypeOfUnaryOperation(evaluatorInterface, node, flags, inferenceContext); + break; + } + + case ParseNodeType.BinaryOperation: { + let effectiveFlags = flags; + + // If we're expecting an instantiable type and this isn't a union operator, + // don't require that the two operands are also instantiable types. + if (expectingInstantiable && node.d.operator !== OperatorType.BitwiseOr) { + effectiveFlags &= ~EvalFlags.InstantiableType; + } + + typeResult = getTypeOfBinaryOperation(evaluatorInterface, node, effectiveFlags, inferenceContext); + break; + } + + case ParseNodeType.AugmentedAssignment: { + typeResult = getTypeOfAugmentedAssignment(evaluatorInterface, node, inferenceContext); + break; + } + + case ParseNodeType.List: + case ParseNodeType.Set: { + typeResult = getTypeOfListOrSet(node, flags, inferenceContext); + break; + } + + case ParseNodeType.Slice: { + typeResult = getTypeOfSlice(node); + break; + } + + case ParseNodeType.Await: { + typeResult = getTypeOfAwaitOperator(node, flags, inferenceContext); + break; + } + + case ParseNodeType.Ternary: { + typeResult = getTypeOfTernaryOperation(evaluatorInterface, node, flags, inferenceContext); + break; + } + + case ParseNodeType.Comprehension: { + typeResult = getTypeOfComprehension(node, flags, inferenceContext); + break; + } + + case ParseNodeType.Dictionary: { + typeResult = getTypeOfDictionary(node, flags, inferenceContext); + break; + } + + case ParseNodeType.Lambda: { + typeResult = getTypeOfLambda(node, inferenceContext); + break; + } + + case ParseNodeType.Assignment: { + typeResult = getTypeOfExpression(node.d.rightExpr, flags, inferenceContext); + assignTypeToExpression( + node.d.leftExpr, + typeResult, + node.d.rightExpr, + /* ignoreEmptyContainers */ true, + /* allowAssignmentToFinalVar */ true + ); + break; + } + + case ParseNodeType.AssignmentExpression: { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.walrusNotAllowed(), node); + } + + typeResult = getTypeOfExpression(node.d.rightExpr, flags, inferenceContext); + assignTypeToExpression(node.d.name, typeResult, node.d.rightExpr, /* ignoreEmptyContainers */ true); + break; + } + + case ParseNodeType.Yield: { + typeResult = getTypeOfYield(node); + break; + } + + case ParseNodeType.YieldFrom: { + typeResult = getTypeOfYieldFrom(node); + break; + } + + case ParseNodeType.Unpack: { + typeResult = getTypeOfUnpackOperator(node, flags, inferenceContext); + break; + } + + case ParseNodeType.TypeAnnotation: { + typeResult = getTypeOfExpression( + node.d.annotation, + EvalFlags.InstantiableType | + EvalFlags.TypeExpression | + EvalFlags.StrLiteralAsType | + EvalFlags.NoParamSpec | + EvalFlags.NoTypeVarTuple | + EvalFlags.VarTypeAnnotation + ); + break; + } + + case ParseNodeType.String: + case ParseNodeType.FormatString: { + typeResult = getTypeOfString(node); + break; + } + + case ParseNodeType.Error: { + // Evaluate the child expression as best we can so the + // type information is cached for the completion handler. + suppressDiagnostics(node, () => { + if (node.d.child) { + getTypeOfExpression(node.d.child); + } + }); + typeResult = { type: UnknownType.create() }; + break; + } + + default: + assertNever(node, `Illegal node type: ${(node as any).nodeType}`); + } + + if (!typeResult) { + // We shouldn't get here. If we do, report an error. + fail(`Unhandled expression type '${ParseTreeUtils.printExpression(node)}'`); + } + + // Do we need to validate that the type is instantiable? + if (expectingInstantiable) { + validateTypeIsInstantiable(typeResult, flags, node); + } + + // If this is a PEP 695 type alias, remove the special form so the type + // printer prints it as its aliased type rather than TypeAliasType. + if ((flags & EvalFlags.TypeExpression) !== 0 && typeResult.type.props?.typeForm === undefined) { + const specialForm = typeResult.type.props?.specialForm; + if (specialForm && ClassType.isBuiltIn(specialForm, 'TypeAliasType')) { + typeResult.type = TypeBase.cloneAsSpecialForm(typeResult.type, undefined); + } + } + + return typeResult; + } + + // Reports the case where a function or class has been decorated with + // @type_check_only and is used in a value expression. + function reportUseOfTypeCheckOnly(type: Type, node: ExpressionNode) { + let isTypeCheckingOnly = false; + let name = ''; + + if (isInstantiableClass(type) && !type.priv.includeSubclasses) { + isTypeCheckingOnly = ClassType.isTypeCheckOnly(type); + name = type.shared.name; + } else if (isFunction(type)) { + isTypeCheckingOnly = FunctionType.isTypeCheckOnly(type); + name = type.shared.name; + } + + if (isTypeCheckingOnly) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + if (!fileInfo.isStubFile) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeCheckOnly().format({ name }), + node + ); + } + } + } + + function validateTypeIsInstantiable(typeResult: TypeResult, flags: EvalFlags, node: ExpressionNode) { + // If the type is incomplete, don't log any diagnostics yet. + if (typeResult.isIncomplete) { + return; + } + + if ((flags & EvalFlags.NoTypeVarTuple) !== 0) { + if (isTypeVarTuple(typeResult.type) && !typeResult.type.priv.isInUnion) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeVarTupleContext(), node); + typeResult.type = UnknownType.create(); + } + } + + if (isEffectivelyInstantiable(typeResult.type, { honorTypeVarBounds: true })) { + return; + } + + // Exempt ellipses. + if (isClassInstance(typeResult.type) && ClassType.isBuiltIn(typeResult.type, ['EllipsisType', 'ellipsis'])) { + return; + } + + // Emit these errors only if we know we're evaluating a type expression. + if ((flags & EvalFlags.TypeExpression) !== 0) { + const diag = new DiagnosticAddendum(); + if (isUnion(typeResult.type)) { + doForEachSubtype(typeResult.type, (subtype) => { + if (!isEffectivelyInstantiable(subtype, { honorTypeVarBounds: true })) { + diag.addMessage(LocAddendum.typeNotClass().format({ type: printType(subtype) })); + } + }); + } + + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeExpectedClass().format({ type: printType(typeResult.type) }) + diag.getString(), + node + ); + + typeResult.type = UnknownType.create(); + } + + typeResult.typeErrors = true; + } + + function getTypeOfAwaitOperator(node: AwaitNode, flags: EvalFlags, inferenceContext?: InferenceContext) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.awaitNotAllowed(), node); + return { type: UnknownType.create() }; + } + + const expectedType = inferenceContext + ? createAwaitableReturnType( + node, + inferenceContext.expectedType, + /* isGenerator */ false, + /* useCoroutine */ false + ) + : undefined; + + const exprTypeResult = getTypeOfExpression(node.d.expr, flags, makeInferenceContext(expectedType)); + const awaitableResult = getTypeOfAwaitable(exprTypeResult, node.d.expr); + const typeResult: TypeResult = { + type: awaitableResult.type, + isIncomplete: exprTypeResult.isIncomplete || awaitableResult.isIncomplete, + typeErrors: exprTypeResult.typeErrors, + }; + + if (exprTypeResult.isIncomplete) { + typeResult.isIncomplete = true; + } + return typeResult; + } + + function getTypeOfEllipsis(flags: EvalFlags, typeResult: TypeResult | undefined, node: ExpressionNode) { + if ((flags & EvalFlags.ConvertEllipsisToAny) !== 0) { + typeResult = { type: AnyType.create(/* isEllipsis */ true) }; + } else { + if ((flags & EvalFlags.TypeExpression) !== 0 && (flags & EvalFlags.AllowEllipsis) === 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.ellipsisContext(), node); + typeResult = { type: UnknownType.create() }; + } else { + const ellipsisType = + getBuiltInObject(node, 'EllipsisType') ?? getBuiltInObject(node, 'ellipsis') ?? AnyType.create(); + typeResult = { type: ellipsisType }; + } + } + return typeResult; + } + + function getTypeOfNumber(node: NumberNode, typeResult: TypeResult | undefined) { + if (node.d.isImaginary) { + typeResult = { type: getBuiltInObject(node, 'complex') }; + } else if (node.d.isInteger) { + typeResult = { type: cloneBuiltinObjectWithLiteral(node, 'int', node.d.value) }; + } else { + typeResult = { type: getBuiltInObject(node, 'float') }; + } + return typeResult; + } + + function getTypeOfUnpackOperator(node: UnpackNode, flags: EvalFlags, inferenceContext?: InferenceContext) { + let typeResult: TypeResult | undefined; + let iterExpectedType: Type | undefined; + + if (inferenceContext) { + const iterableType = getBuiltInType(node, 'Iterable'); + if (iterableType && isInstantiableClass(iterableType)) { + iterExpectedType = ClassType.cloneAsInstance( + ClassType.specialize(iterableType, [inferenceContext.expectedType]) + ); + } + } + + const iterTypeResult = getTypeOfExpression(node.d.expr, flags, makeInferenceContext(iterExpectedType)); + const iterType = iterTypeResult.type; + if ((flags & EvalFlags.NoTypeVarTuple) === 0 && isTypeVarTuple(iterType) && !iterType.priv.isUnpacked) { + typeResult = { type: TypeVarType.cloneForUnpacked(iterType) }; + } else if ( + (flags & EvalFlags.AllowUnpackedTuple) !== 0 && + isInstantiableClass(iterType) && + ClassType.isBuiltIn(iterType, 'tuple') + ) { + typeResult = { type: ClassType.cloneForUnpacked(iterType) }; + } else if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackInAnnotation(), + node, + node.d.starToken + ); + typeResult = { type: UnknownType.create() }; + } else { + const iteratorTypeResult = getTypeOfIterator(iterTypeResult, /* isAsync */ false, node) ?? { + type: UnknownType.create(!!iterTypeResult.isIncomplete), + isIncomplete: iterTypeResult.isIncomplete, + }; + typeResult = { + type: iteratorTypeResult.type, + typeErrors: iterTypeResult.typeErrors, + unpackedType: iterType, + isIncomplete: iteratorTypeResult.isIncomplete, + }; + } + + return typeResult; + } + + function getTypeOfStringList(node: StringListNode, flags: EvalFlags): TypeResult { + let typeResult: TypeResult | undefined; + + if ((flags & EvalFlags.StrLiteralAsType) !== 0 && (flags & EvalFlags.TypeFormArg) === 0) { + return getTypeOfStringListAsType(node, flags); + } + + const isBytesNode = (node: StringNode | FormatStringNode) => + (node.d.token.flags & StringTokenFlags.Bytes) !== 0; + + // Check for mixing of bytes and str, which is not allowed. + const firstStrIndex = node.d.strings.findIndex((str) => !isBytesNode(str)); + const firstBytesIndex = node.d.strings.findIndex((str) => isBytesNode(str)); + if (firstStrIndex >= 0 && firstBytesIndex >= 0) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.mixingBytesAndStr(), + node.d.strings[Math.max(firstBytesIndex, firstStrIndex)] + ); + + return { type: UnknownType.create() }; + } + + const isBytes = firstBytesIndex >= 0; + let isLiteralString = true; + let isIncomplete = false; + let isTemplate = false; + + node.d.strings.forEach((expr) => { + // Handle implicit concatenation. + const typeResult = getTypeOfString(expr); + + if (typeResult.isIncomplete) { + isIncomplete = true; + } + + let isExprLiteralString = false; + + if (isClassInstance(typeResult.type)) { + if (ClassType.isBuiltIn(typeResult.type, 'str') && typeResult.type.priv.literalValue !== undefined) { + isExprLiteralString = true; + } else if (ClassType.isBuiltIn(typeResult?.type, 'LiteralString')) { + isExprLiteralString = true; + } + + if (typeResult.type.shared.name === 'Template') { + isTemplate = true; + } + } + + if (!isExprLiteralString) { + isLiteralString = false; + } + }); + + if (isTemplate) { + const templateType = + prefetched?.templateClass && isInstantiableClass(prefetched?.templateClass) + ? ClassType.cloneAsInstance(prefetched.templateClass) + : UnknownType.create(); + + typeResult = { type: templateType, isIncomplete }; + } else if (node.d.strings.some((str) => str.nodeType === ParseNodeType.FormatString)) { + if (isLiteralString) { + const literalStringType = getTypingType(node, 'LiteralString'); + if (literalStringType && isInstantiableClass(literalStringType)) { + typeResult = { type: ClassType.cloneAsInstance(literalStringType) }; + } + } + + if (!typeResult) { + typeResult = { + type: getBuiltInObject(node, isBytes ? 'bytes' : 'str'), + isIncomplete, + }; + } + } else { + typeResult = { + type: cloneBuiltinObjectWithLiteral( + node, + isBytes ? 'bytes' : 'str', + node.d.strings.map((s) => s.d.value).join('') + ), + isIncomplete, + }; + } + + if ( + node.d.strings.length !== 1 || + node.d.strings[0].nodeType !== ParseNodeType.String || + !isTypeFormSupported(node) + ) { + return typeResult; + } + + // For performance reasons, do not attempt to treat the string literal + // as a TypeForm if it's going to fail anyway or is unlikely to be a + // TypeForm (really long, triple-quoted, etc.). + const stringNode = node.d.strings[0]; + const tokenFlags = stringNode.d.token.flags; + const disallowedTokenFlags = + StringTokenFlags.Bytes | + StringTokenFlags.Raw | + StringTokenFlags.Format | + StringTokenFlags.Template | + StringTokenFlags.Triplicate; + const maxTypeFormStringLength = 256; + + if ( + (tokenFlags & disallowedTokenFlags) !== 0 || + stringNode.d.token.escapedValue.length >= maxTypeFormStringLength + ) { + return typeResult; + } + + const typeFormResult = getTypeOfStringListAsType(node, flags); + if (typeFormResult.type.props?.typeForm) { + typeResult.type = TypeBase.cloneWithTypeForm(typeResult.type, typeFormResult.type.props.typeForm); + } + + return typeResult; + } + + function getTypeOfStringListAsType(node: StringListNode, flags: EvalFlags): TypeResult { + const reportTypeErrors = (flags & EvalFlags.StrLiteralAsType) !== 0; + let updatedFlags = flags | EvalFlags.ForwardRefs | EvalFlags.InstantiableType; + let typeResult: TypeResult | undefined; + + // In most cases, annotations within a string are not parsed by the interpreter. + // There are a few exceptions (e.g. the "bound" value for a TypeVar constructor). + if ((flags & EvalFlags.ParsesStringLiteral) === 0) { + updatedFlags |= EvalFlags.NotParsed; + } + + updatedFlags &= ~EvalFlags.TypeFormArg; + + if (node.d.annotation && (flags & EvalFlags.TypeExpression) !== 0) { + return getTypeOfExpression(node.d.annotation, updatedFlags); + } + + if (node.d.strings.length === 1) { + const tokenFlags = node.d.strings[0].d.token.flags; + + if (tokenFlags & StringTokenFlags.Bytes) { + if (reportTypeErrors) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.annotationBytesString(), node); + } + return { type: UnknownType.create() }; + } + + if (tokenFlags & StringTokenFlags.Raw) { + if (reportTypeErrors) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.annotationRawString(), node); + } + return { type: UnknownType.create() }; + } + + if (tokenFlags & StringTokenFlags.Format) { + if (reportTypeErrors) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.annotationFormatString(), node); + } + return { type: UnknownType.create() }; + } + + if (tokenFlags & StringTokenFlags.Template) { + if (reportTypeErrors) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.annotationTemplateString(), node); + } + return { type: UnknownType.create() }; + } + + // We didn't know at parse time that this string node was going + // to be evaluated as a forward-referenced type. We need + // to re-invoke the parser at this stage. + const expr = parseStringAsTypeAnnotation(node, reportTypeErrors); + if (expr) { + typeResult = useSpeculativeMode(reportTypeErrors ? undefined : node, () => { + return getTypeOfExpression(expr, updatedFlags); + }); + } + } + + if (!typeResult) { + if (reportTypeErrors) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.expectedTypeNotString(), node); + } + typeResult = { type: UnknownType.create() }; + } + + return typeResult; + } + + function getTypeOfString(node: StringNode | FormatStringNode): TypeResult { + const isBytes = (node.d.token.flags & StringTokenFlags.Bytes) !== 0; + let typeResult: TypeResult | undefined; + let isIncomplete = false; + + if (node.nodeType === ParseNodeType.String) { + typeResult = { + type: cloneBuiltinObjectWithLiteral(node, isBytes ? 'bytes' : 'str', node.d.value), + isIncomplete, + }; + } else { + const isTemplateString = (node.d.token.flags & StringTokenFlags.Template) !== 0; + let isLiteralString = true; + + // If all of the format expressions are of type LiteralString, then + // the resulting formatted string is also LiteralString. + node.d.fieldExprs.forEach((expr) => { + const exprTypeResult = getTypeOfExpression(expr); + const exprType = exprTypeResult.type; + + if (exprTypeResult.isIncomplete) { + isIncomplete = true; + } + + doForEachSubtype(exprType, (exprSubtype) => { + if (!isClassInstance(exprSubtype)) { + isLiteralString = false; + return; + } + + if (ClassType.isBuiltIn(exprSubtype, 'LiteralString')) { + return; + } + + if (ClassType.isBuiltIn(exprSubtype, 'str') && exprSubtype.priv.literalValue !== undefined) { + return; + } + + isLiteralString = false; + }); + }); + + if (isTemplateString) { + const templateType = + prefetched?.templateClass && isInstantiableClass(prefetched?.templateClass) + ? ClassType.cloneAsInstance(prefetched.templateClass) + : UnknownType.create(); + + typeResult = { type: templateType, isIncomplete }; + } else if (!isBytes && isLiteralString) { + const literalStringType = getTypingType(node, 'LiteralString'); + if (literalStringType && isInstantiableClass(literalStringType)) { + typeResult = { type: ClassType.cloneAsInstance(literalStringType), isIncomplete }; + } + } + + if (!typeResult) { + typeResult = { + type: getBuiltInObject(node, isBytes ? 'bytes' : 'str'), + isIncomplete, + }; + + if (isClass(typeResult.type) && typeResult.type.priv.includePromotions) { + typeResult.type = ClassType.cloneRemoveTypePromotions(typeResult.type); + } + } + } + + return typeResult; + } + + function stripLiteralValue(type: Type): Type { + // Handle the not-uncommon case where the type is a union that consists + // only of literal values. + if (isUnion(type) && type.priv.subtypes.length > 0) { + if ( + type.priv.literalInstances.literalStrMap?.size === type.priv.subtypes.length || + type.priv.literalInstances.literalIntMap?.size === type.priv.subtypes.length || + type.priv.literalInstances.literalEnumMap?.size === type.priv.subtypes.length + ) { + return stripLiteralValue(type.priv.subtypes[0]); + } + } + + return mapSubtypes(type, (subtype) => { + if (isClass(subtype)) { + if (subtype.priv.literalValue !== undefined) { + subtype = ClassType.cloneWithLiteral(subtype, /* value */ undefined); + } + + if (ClassType.isBuiltIn(subtype, 'LiteralString')) { + // Handle "LiteralString" specially. + if (prefetched?.strClass && isInstantiableClass(prefetched.strClass)) { + let strInstance = ClassType.cloneAsInstance(prefetched.strClass); + strInstance = TypeBase.cloneForCondition(strInstance, getTypeCondition(subtype)); + return strInstance; + } + } + } + + return subtype; + }); + } + + function getTypeOfParamAnnotation(paramTypeNode: ExpressionNode, paramCategory: ParamCategory) { + return getTypeOfAnnotation(paramTypeNode, { + typeVarGetsCurScope: true, + allowUnpackedTuple: paramCategory === ParamCategory.ArgsList, + allowUnpackedTypedDict: paramCategory === ParamCategory.KwargsDict, + }); + } + + function getTypeOfAnnotation(node: ExpressionNode, options?: ExpectedTypeOptions): Type { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // Special-case the typing.pyi file, which contains some special + // types that the type analyzer needs to interpret differently. + if (fileInfo.isTypingStubFile || fileInfo.isTypingExtensionsStubFile) { + const specialType = handleTypingStubTypeAnnotation(node); + if (specialType) { + return specialType; + } + } + + const adjustedOptions: ExpectedTypeOptions = options ? { ...options } : {}; + + adjustedOptions.typeExpression = true; + adjustedOptions.convertEllipsisToAny = true; + + // If the annotation is part of a comment, allow forward references + // even if it's not enclosed in quotes. + if (node?.parent?.nodeType === ParseNodeType.Assignment && node.parent.d.annotationComment === node) { + adjustedOptions.forwardRefs = true; + adjustedOptions.notParsed = true; + } else if (node?.parent?.nodeType === ParseNodeType.FunctionAnnotation) { + if (node.parent.d.returnAnnotation === node || node.parent.d.paramAnnotations.some((n) => n === node)) { + adjustedOptions.forwardRefs = true; + adjustedOptions.notParsed = true; + } + } else if (node?.parent?.nodeType === ParseNodeType.Parameter) { + if (node.parent.d.annotationComment === node) { + adjustedOptions.forwardRefs = true; + adjustedOptions.notParsed = true; + } + } + + const annotationType = getTypeOfExpressionExpectingType(node, adjustedOptions).type; + + if (isModule(annotationType)) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.moduleAsType(), node); + } + + return convertToInstance(annotationType); + } + + function canBeFalsy(type: Type, recursionCount = 0): boolean { + type = makeTopLevelTypeVarsConcrete(type); + + if (recursionCount > maxTypeRecursionCount) { + return true; + } + recursionCount++; + + switch (type.category) { + case TypeCategory.Unbound: + case TypeCategory.Unknown: + case TypeCategory.Any: + case TypeCategory.Never: { + return true; + } + + case TypeCategory.Union: { + return findSubtype(type, (subtype) => canBeFalsy(subtype, recursionCount)) !== undefined; + } + + case TypeCategory.Function: + case TypeCategory.Overloaded: + case TypeCategory.Module: + case TypeCategory.TypeVar: { + return false; + } + + case TypeCategory.Class: { + if (TypeBase.isInstantiable(type)) { + return false; + } + + // Sentinels are always truthy. + if (isSentinelLiteral(type)) { + return false; + } + + // Handle tuples specially. + if (isTupleClass(type) && type.priv.tupleTypeArgs) { + return isUnboundedTupleClass(type) || type.priv.tupleTypeArgs.length === 0; + } + + // Handle subclasses of tuple, such as NamedTuple. + const tupleBaseClass = type.shared.mro.find((mroClass) => !isClass(mroClass) || isTupleClass(mroClass)); + if (tupleBaseClass && isClass(tupleBaseClass) && tupleBaseClass.priv.tupleTypeArgs) { + return isUnboundedTupleClass(tupleBaseClass) || tupleBaseClass.priv.tupleTypeArgs.length === 0; + } + + // Handle TypedDicts specially. If one or more entries are required + // or known to exist, we can say for sure that the type is not falsy. + if (ClassType.isTypedDictClass(type)) { + const tdEntries = getTypedDictMembersForClass(evaluatorInterface, type, /* allowNarrowed */ true); + if (tdEntries) { + for (const tdEntry of tdEntries.knownItems.values()) { + if (tdEntry.isRequired || tdEntry.isProvided) { + return false; + } + } + } + } + + // Check for bool, int, str and bytes literals that are never falsy. + if (type.priv.literalValue !== undefined) { + if (ClassType.isBuiltIn(type, ['bool', 'int', 'str', 'bytes'])) { + return !type.priv.literalValue || type.priv.literalValue === BigInt(0); + } + + if (type.priv.literalValue instanceof EnumLiteral) { + // Does the Enum class forward the truthiness check to the + // underlying member type? + if (type.priv.literalValue.isReprEnum) { + return canBeFalsy(type.priv.literalValue.itemType, recursionCount); + } + } + } + + // If this is a protocol class, don't make any assumptions about the absence + // of specific methods. These could be provided by a class that conforms + // to the protocol. + if (ClassType.isProtocolClass(type)) { + return true; + } + + const lenMethod = lookUpObjectMember(type, '__len__'); + if (lenMethod) { + return true; + } + + const boolMethod = lookUpObjectMember(type, '__bool__'); + if (boolMethod) { + const boolMethodType = getTypeOfMember(boolMethod); + + // If the __bool__ function unconditionally returns True, it can never be falsy. + if (isFunction(boolMethodType) && boolMethodType.shared.declaredReturnType) { + const returnType = boolMethodType.shared.declaredReturnType; + if ( + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, 'bool') && + returnType.priv.literalValue === true + ) { + return false; + } + } + + return true; + } + + // If the class is not final, it's possible that it could be overridden + // such that it is falsy. To be fully correct, we'd need to do the + // following: + // return !ClassType.isFinal(type); + // However, pragmatically if the class is not an `object`, it's typically + // OK to assume that it will not be overridden in this manner. + return ClassType.isBuiltIn(type, 'object'); + } + } + } + + function canBeTruthy(type: Type, recursionCount = 0): boolean { + type = makeTopLevelTypeVarsConcrete(type); + + if (recursionCount > maxTypeRecursionCount) { + return true; + } + recursionCount++; + + switch (type.category) { + case TypeCategory.Unknown: + case TypeCategory.Function: + case TypeCategory.Overloaded: + case TypeCategory.Module: + case TypeCategory.TypeVar: + case TypeCategory.Never: + case TypeCategory.Any: { + return true; + } + + case TypeCategory.Union: { + return findSubtype(type, (subtype) => canBeTruthy(subtype, recursionCount)) !== undefined; + } + + case TypeCategory.Unbound: { + return false; + } + + case TypeCategory.Class: { + if (TypeBase.isInstantiable(type)) { + return true; + } + + if (isNoneInstance(type)) { + return false; + } + + // // Check for tuple[()] (an empty tuple). + if (type.priv.tupleTypeArgs && type.priv.tupleTypeArgs.length === 0) { + return false; + } + + // Check for bool, int, str and bytes literals that are never falsy. + if (type.priv.literalValue !== undefined) { + if (ClassType.isBuiltIn(type, ['bool', 'int', 'str', 'bytes'])) { + return !!type.priv.literalValue && type.priv.literalValue !== BigInt(0); + } + + if (type.priv.literalValue instanceof EnumLiteral) { + // Does the Enum class forward the truthiness check to the + // underlying member type? + if (type.priv.literalValue.isReprEnum) { + return canBeTruthy(type.priv.literalValue.itemType, recursionCount); + } + } + } + + // If this is a protocol class, don't make any assumptions about the absence + // of specific methods. These could be provided by a class that conforms + // to the protocol. + if (ClassType.isProtocolClass(type)) { + return true; + } + + const boolMethod = lookUpObjectMember(type, '__bool__'); + if (boolMethod) { + const boolMethodType = getTypeOfMember(boolMethod); + + // If the __bool__ function unconditionally returns False, it can never be truthy. + if (isFunction(boolMethodType) && boolMethodType.shared.declaredReturnType) { + const returnType = boolMethodType.shared.declaredReturnType; + if ( + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, 'bool') && + returnType.priv.literalValue === false + ) { + return false; + } + } + } + + return true; + } + } + } + + // Filters a type such that that no part of it is definitely + // truthy. For example, if a type is a union of None + // and a custom class "Foo" that has no __len__ or __nonzero__ + // method, this method would strip off the "Foo" + // and return only the "None". + function removeTruthinessFromType(type: Type): Type { + return mapSubtypes(type, (subtype) => { + const concreteSubtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isClassInstance(concreteSubtype)) { + if (concreteSubtype.priv.literalValue !== undefined) { + let isLiteralFalsy: boolean; + + if (concreteSubtype.priv.literalValue instanceof EnumLiteral) { + isLiteralFalsy = !canBeTruthy(concreteSubtype); + } else { + isLiteralFalsy = !concreteSubtype.priv.literalValue; + } + + // If the object is already definitely falsy, it's fine to + // include, otherwise it should be removed. + return isLiteralFalsy ? subtype : undefined; + } + + // If the object is a sentinel, we can eliminate it. + if (isSentinelLiteral(concreteSubtype)) { + return undefined; + } + + // If the object is a bool, make it "false", since + // "true" is a truthy value. + if (ClassType.isBuiltIn(concreteSubtype, 'bool')) { + return ClassType.cloneWithLiteral(concreteSubtype, /* value */ false); + } + + // If the object is an int, str or bytes, narrow to a literal type. + // This is slightly unsafe in that someone could subclass `int`, `str` + // or `bytes` and override the `__bool__` method to change its behavior, + // but this is extremely unlikely (and ill advised). + if (ClassType.isBuiltIn(concreteSubtype, 'int')) { + return ClassType.cloneWithLiteral(concreteSubtype, /* value */ 0); + } else if (ClassType.isBuiltIn(concreteSubtype, ['str', 'bytes'])) { + return ClassType.cloneWithLiteral(concreteSubtype, /* value */ ''); + } + } + + // If it's possible for the type to be falsy, include it. + if (canBeFalsy(subtype)) { + return subtype; + } + + return undefined; + }); + } + + // Filters a type such that that no part of it is definitely + // falsy. For example, if a type is a union of None + // and an "int", this method would strip off the "None" + // and return only the "int". + function removeFalsinessFromType(type: Type): Type { + return mapSubtypes(type, (subtype) => { + const concreteSubtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isClassInstance(concreteSubtype)) { + if (concreteSubtype.priv.literalValue !== undefined) { + let isLiteralTruthy: boolean; + + if (concreteSubtype.priv.literalValue instanceof EnumLiteral) { + isLiteralTruthy = !canBeFalsy(concreteSubtype); + } else if (concreteSubtype.priv.literalValue instanceof SentinelLiteral) { + isLiteralTruthy = true; + } else { + isLiteralTruthy = !!concreteSubtype.priv.literalValue; + } + + // If the object is already definitely truthy, it's fine to + // include, otherwise it should be removed. + return isLiteralTruthy ? subtype : undefined; + } + + // If the object is a bool, make it "true", since + // "false" is a falsy value. + if (ClassType.isBuiltIn(concreteSubtype, 'bool')) { + return ClassType.cloneWithLiteral(concreteSubtype, /* value */ true); + } + + // If the object is a "None" instance, we can eliminate it. + if (isNoneInstance(concreteSubtype)) { + return undefined; + } + + // If this is an instance of a class that cannot be subclassed, + // we cannot say definitively that it's not falsy because a subclass + // could override `__bool__`. For this reason, the code should not + // remove any classes that are not final. + // if (!ClassType.isFinal(concreteSubtype)) { + // return subtype; + // } + // However, we're going to pragmatically assume that any classes + // other than `object` will not be overridden in this manner. + if (ClassType.isBuiltIn(concreteSubtype, 'object')) { + return subtype; + } + } + + // If it's possible for the type to be truthy, include it. + if (canBeTruthy(subtype)) { + return subtype; + } + + return undefined; + }); + } + + // If a type contains a TypeGuard or TypeIs, convert it to a bool. + function stripTypeGuard(type: Type): Type { + return mapSubtypes(type, (subtype) => { + if (isClassInstance(subtype) && ClassType.isBuiltIn(subtype, ['TypeGuard', 'TypeIs'])) { + return prefetched?.boolClass ? convertToInstance(prefetched.boolClass) : UnknownType.create(); + } + + return subtype; + }); + } + + function solveAndApplyConstraints( + type: Type, + constraints: ConstraintTracker, + applyOptions?: ApplyTypeVarOptions, + solveOptions?: SolveConstraintsOptions + ): Type { + const solution = solveConstraints(evaluatorInterface, constraints, solveOptions); + return applySolvedTypeVars(type, solution, applyOptions); + } + + // Gets a member type from an object or class. If it's a function, binds + // it to the object or class. If selfType is undefined, the binding is done + // using the objectType parameter. Callers can specify these separately + // to handle the case where we're fetching the object member from a + // metaclass but binding to the class. + function getTypeOfBoundMember( + errorNode: ExpressionNode | undefined, + objectType: ClassType, + memberName: string, + usage: EvaluatorUsage = { method: 'get' }, + diag: DiagnosticAddendum | undefined = undefined, + flags = MemberAccessFlags.Default, + selfType?: ClassType | TypeVarType, + recursionCount = 0 + ): TypeResult | undefined { + if (ClassType.isPartiallyEvaluated(objectType)) { + if (errorNode) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.classDefinitionCycle().format({ name: objectType.shared.name }), + errorNode + ); + } + return { type: UnknownType.create() }; + } + + // If this is an unspecialized generic class, specialize it using the default + // values for its type parameters. + if ( + isInstantiableClass(objectType) && + !objectType.priv.includeSubclasses && + objectType.shared.typeParams.length > 0 + ) { + // Skip this if we're suppressing the use of attribute access override, + // such as with dundered methods (like __call__). + if ((flags & MemberAccessFlags.SkipAttributeAccessOverride) === 0) { + objectType = specializeWithDefaultTypeArgs(objectType); + } + } + + // Determine the class that was used to instantiate the objectType. + // If the objectType is a class itself, then the class used to instantiate + // it is the metaclass. + const objectTypeIsInstantiable = TypeBase.isInstantiable(objectType); + const metaclass = objectType.shared.effectiveMetaclass; + + let memberInfo: ClassMemberLookup | undefined; + + // If the object type is an instantiable (i.e. it derives from "type") and + // we've been asked not to consider instance members, don't look in the class. + // Consider only the metaclass class variables in this case. + let skipObjectTypeLookup = objectTypeIsInstantiable && (flags & MemberAccessFlags.SkipInstanceMembers) !== 0; + + // Look up the attribute in the metaclass first. If the member is a descriptor + // (an object with a __get__ and __set__ method) and the access is a 'get', + // the Python runtime uses this descriptor to satisfy the lookup. Skip this + // costly lookup in the common case where the metaclass is 'type' since we know + // that `type` doesn't have any attributes that are descriptors. + if ( + usage.method === 'get' && + objectTypeIsInstantiable && + metaclass && + isInstantiableClass(metaclass) && + !ClassType.isBuiltIn(metaclass, 'type') && + !ClassType.isSameGenericClass(metaclass, objectType) + ) { + const descMemberInfo = getTypeOfClassMemberName( + errorNode, + metaclass, + memberName, + usage, + /* diag */ undefined, + flags | MemberAccessFlags.SkipAttributeAccessOverride | MemberAccessFlags.SkipTypedDictEntries, + objectType, + recursionCount + ); + + if (descMemberInfo) { + const isProperty = + isClassInstance(descMemberInfo.type) && ClassType.isPropertyClass(descMemberInfo.type); + if (isDescriptorInstance(descMemberInfo.type, /* requireSetter */ true) || isProperty) { + skipObjectTypeLookup = true; + } + } + } + + let subDiag: DiagnosticAddendum | undefined; + + if (!skipObjectTypeLookup) { + let effectiveFlags = flags | MemberAccessFlags.SkipTypedDictEntries; + + if (objectTypeIsInstantiable) { + effectiveFlags |= + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipAttributeAccessOverride | + MemberAccessFlags.DisallowGenericInstanceVariableAccess; + effectiveFlags &= ~MemberAccessFlags.SkipClassMembers; + } else { + effectiveFlags |= MemberAccessFlags.DisallowClassVarWrites; + } + + subDiag = diag ? new DiagnosticAddendum() : undefined; + + // See if the member is present in the object itself. + memberInfo = getTypeOfClassMemberName( + errorNode, + objectType, + memberName, + usage, + subDiag, + effectiveFlags, + selfType, + recursionCount + ); + } + + // If it wasn't found on the object, see if it's part of the metaclass. + if (!memberInfo && metaclass && isInstantiableClass(metaclass)) { + let effectiveFlags = flags; + + // Class members cannot be accessed on a class's metaclass through + // an instance of a class. Limit access to metaclass instance members + // in this case. + if (!objectTypeIsInstantiable) { + effectiveFlags |= + MemberAccessFlags.SkipClassMembers | + MemberAccessFlags.SkipAttributeAccessOverride | + MemberAccessFlags.SkipTypeBaseClass; + effectiveFlags &= ~MemberAccessFlags.SkipInstanceMembers; + } + + const metaclassDiag = diag ? new DiagnosticAddendum() : undefined; + memberInfo = getTypeOfClassMemberName( + errorNode, + ClassType.cloneAsInstance(metaclass), + memberName, + usage, + metaclassDiag, + effectiveFlags, + objectTypeIsInstantiable ? objectType : ClassType.cloneAsInstantiable(objectType), + recursionCount + ); + + // If there was a descriptor error (as opposed to an error where the members + // was simply not found), use this diagnostic message. + if (memberInfo?.isDescriptorError) { + subDiag = metaclassDiag; + } + } + + if (memberInfo) { + if (memberInfo.isDescriptorError && diag && subDiag) { + diag.addAddendum(subDiag); + } + + return { + type: memberInfo.type, + classType: memberInfo.classType, + isIncomplete: !!memberInfo.isTypeIncomplete, + isAsymmetricAccessor: memberInfo.isAsymmetricAccessor, + narrowedTypeForSet: memberInfo.narrowedTypeForSet, + memberAccessDeprecationInfo: memberInfo.memberAccessDeprecationInfo, + typeErrors: memberInfo.isDescriptorError, + }; + } + + // If this is a type[Any] or type[Unknown], allow any other members. + if ( + isClassInstance(objectType) && + ClassType.isBuiltIn(objectType, 'type') && + objectType.priv.includeSubclasses + ) { + if ((flags & (MemberAccessFlags.SkipTypeBaseClass | MemberAccessFlags.SkipAttributeAccessOverride)) === 0) { + const typeArg = + objectType.priv.typeArgs && objectType.priv.typeArgs.length >= 1 + ? objectType.priv.typeArgs[0] + : UnknownType.create(); + + if (isAnyOrUnknown(typeArg)) { + return { type: typeArg, classType: UnknownType.create() }; + } + } + } + + if (diag && subDiag) { + diag.addAddendum(subDiag); + } + + return undefined; + } + + function getBoundMagicMethod( + classType: ClassType, + memberName: string, + selfType?: ClassType | TypeVarType | undefined, + errorNode?: ExpressionNode | undefined, + diag?: DiagnosticAddendum, + recursionCount = 0 + ): FunctionType | OverloadedType | undefined { + const boundMethodResult = getTypeOfBoundMember( + errorNode, + classType, + memberName, + /* usage */ undefined, + diag, + MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.SkipAttributeAccessOverride, + selfType, + recursionCount + ); + + if (!boundMethodResult || boundMethodResult.typeErrors) { + return undefined; + } + + if (isFunctionOrOverloaded(boundMethodResult.type)) { + return boundMethodResult.type; + } + + if (isClassInstance(boundMethodResult.type)) { + if (recursionCount > maxTypeRecursionCount) { + return undefined; + } + recursionCount++; + + return getBoundMagicMethod( + boundMethodResult.type, + '__call__', + /* selfType */ undefined, + errorNode, + diag, + recursionCount + ); + } + + if (isAnyOrUnknown(boundMethodResult.type)) { + return getUnknownTypeForCallable(); + } + + return undefined; + } + + // Returns the signature(s) associated with a call node that contains + // the specified node. It also returns the index of the argument + // that contains the node. + function getCallSignatureInfo( + callNode: CallNode, + activeIndex: number, + activeOrFake: boolean + ): CallSignatureInfo | undefined { + const exprNode = callNode.d.leftExpr; + const callType = getType(exprNode); + if (!callType) { + return undefined; + } + + const argList: Arg[] = []; + let previousCategory = ArgCategory.Simple; + + // Empty arguments do not enter the AST as nodes, but instead are left blank. + // Instead, we detect when we appear to be between two known arguments or at the + // end of the argument list and insert a fake argument of an unknown type to have + // something to match later. + function addFakeArg() { + argList.push({ + argCategory: previousCategory, + typeResult: { type: UnknownType.create() }, + active: true, + }); + } + + callNode.d.args.forEach((arg, index) => { + let active = false; + if (index === activeIndex) { + if (activeOrFake) { + active = true; + } else { + addFakeArg(); + } + } + + previousCategory = arg.d.argCategory; + + argList.push({ + valueExpression: arg.d.valueExpr, + argCategory: arg.d.argCategory, + name: arg.d.name, + active: active, + }); + }); + + if (callNode.d.args.length < activeIndex) { + addFakeArg(); + } + + let signatures: CallSignature[] = []; + + function addOneFunctionToSignature(type: FunctionType) { + let callResult: CallResult | undefined; + + useSpeculativeMode(callNode, () => { + callResult = validateArgs( + exprNode, + argList, + { type }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ true, + /* inferenceContext */ undefined + ); + }); + + signatures.push({ + type: expandTypedKwargs(type), + activeParam: callResult?.activeParam, + }); + } + + function addFunctionToSignature(type: FunctionType | OverloadedType) { + if (isFunction(type)) { + addOneFunctionToSignature(type); + } else { + OverloadedType.getOverloads(type).forEach((func) => { + addOneFunctionToSignature(func); + }); + } + } + + doForEachSubtype(callType, (subtype) => { + switch (subtype.category) { + case TypeCategory.Function: + case TypeCategory.Overloaded: { + addFunctionToSignature(subtype); + break; + } + + case TypeCategory.Class: { + if (TypeBase.isInstantiable(subtype)) { + const constructorType = createFunctionFromConstructor(evaluatorInterface, subtype); + + if (constructorType) { + doForEachSubtype(constructorType, (subtype) => { + if (isFunctionOrOverloaded(subtype)) { + addFunctionToSignature(subtype); + } + }); + + // It's common for either the `__new__` or `__init__` methods to be + // simple (*args: Any, **kwargs: Any) signatures. If so, we'll try + // to filter out these signatures if they add nothing of value. + const filteredSignatures = signatures.filter( + (sig) => + !FunctionType.isGradualCallableForm(sig.type) || + sig.type.shared.parameters.length > 2 || + sig.type.shared.docString || + sig.type.shared.deprecatedMessage + ); + + if (filteredSignatures.length > 0) { + signatures = filteredSignatures; + } + } + } else { + const methodType = getBoundMagicMethod(subtype, '__call__'); + if (methodType) { + addFunctionToSignature(methodType); + } + } + break; + } + } + }); + + if (signatures.length === 0) { + return undefined; + } + + return { callNode, signatures }; + } + + // If the function includes a `**kwargs: Unpack[TypedDict]` parameter, the + // parameter is expanded to include individual keyword args. + function expandTypedKwargs(functionType: FunctionType): FunctionType { + const kwargsIndex = functionType.shared.parameters.findIndex( + (param) => param.category === ParamCategory.KwargsDict + ); + if (kwargsIndex < 0) { + return functionType; + } + assert(kwargsIndex === functionType.shared.parameters.length - 1); + + const kwargsType = FunctionType.getParamType(functionType, kwargsIndex); + if (!isClassInstance(kwargsType) || !ClassType.isTypedDictClass(kwargsType) || !kwargsType.priv.isUnpacked) { + return functionType; + } + + const tdEntries = kwargsType.priv.typedDictNarrowedEntries ?? kwargsType.shared.typedDictEntries?.knownItems; + if (!tdEntries) { + return functionType; + } + + const newFunction = FunctionType.clone(functionType); + newFunction.shared.parameters.splice(kwargsIndex); + if (newFunction.priv.specializedTypes) { + newFunction.priv.specializedTypes.parameterTypes.splice(kwargsIndex); + } + + const kwSeparatorIndex = functionType.shared.parameters.findIndex( + (param) => param.category === ParamCategory.ArgsList + ); + + // Add a keyword separator if necessary. + if (kwSeparatorIndex < 0 && tdEntries.size > 0) { + FunctionType.addKeywordOnlyParamSeparator(newFunction); + } + + tdEntries.forEach((tdEntry, name) => { + FunctionType.addParam( + newFunction, + FunctionParam.create( + ParamCategory.Simple, + tdEntry.valueType, + FunctionParamFlags.TypeDeclared, + name, + tdEntry.isRequired ? undefined : tdEntry.valueType + ) + ); + }); + + const extraItemsType = kwargsType.shared.typedDictEntries?.extraItems?.valueType; + + if (extraItemsType && !isNever(extraItemsType)) { + FunctionType.addParam( + newFunction, + FunctionParam.create( + ParamCategory.KwargsDict, + extraItemsType, + FunctionParamFlags.TypeDeclared, + 'kwargs' + ) + ); + } + + return newFunction; + } + + // Determines whether the specified expression is an explicit TypeAlias declaration. + function isDeclaredTypeAlias(expression: ExpressionNode): boolean { + if (expression.nodeType === ParseNodeType.TypeAnnotation) { + if (expression.d.valueExpr.nodeType === ParseNodeType.Name) { + const symbolWithScope = lookUpSymbolRecursive( + expression, + expression.d.valueExpr.d.value, + /* honorCodeFlow */ false + ); + if (symbolWithScope) { + const symbol = symbolWithScope.symbol; + return symbol.getDeclarations().find((decl) => isExplicitTypeAliasDeclaration(decl)) !== undefined; + } + } + } + + return false; + } + + // Determines whether the specified expression is a symbol with a declared type. + function getDeclaredTypeForExpression(expression: ExpressionNode, usage?: EvaluatorUsage): Type | undefined { + let symbol: Symbol | undefined; + let selfType: ClassType | TypeVarType | undefined; + let classOrObjectBase: ClassType | undefined; + let memberAccessClass: Type | undefined; + let bindFunction = true; + let useDescriptorSetterType = false; + + switch (expression.nodeType) { + case ParseNodeType.Name: { + const symbolWithScope = lookUpSymbolRecursive(expression, expression.d.value, /* honorCodeFlow */ true); + if (symbolWithScope) { + symbol = symbolWithScope.symbol; + + // Handle the case where the symbol is a class-level variable + // where the type isn't declared in this class but is in + // a parent class. + if ( + !getDeclaredTypeOfSymbol(symbol, expression)?.type && + symbolWithScope.scope.type === ScopeType.Class + ) { + const enclosingClass = ParseTreeUtils.getEnclosingClassOrFunction(expression); + if (enclosingClass && enclosingClass.nodeType === ParseNodeType.Class) { + const classTypeInfo = getTypeOfClass(enclosingClass); + if (classTypeInfo) { + const classMemberInfo = lookUpClassMember( + classTypeInfo.classType, + expression.d.value, + MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.DeclaredTypesOnly + ); + if (classMemberInfo) { + symbol = classMemberInfo.symbol; + } + } + } + } + } + break; + } + + case ParseNodeType.TypeAnnotation: { + return getDeclaredTypeForExpression(expression.d.valueExpr, usage); + } + + case ParseNodeType.MemberAccess: { + const baseType = getTypeOfExpression(expression.d.leftExpr, EvalFlags.MemberAccessBaseDefaults).type; + const baseTypeConcrete = makeTopLevelTypeVarsConcrete(baseType); + const memberName = expression.d.member.d.value; + + // Normally, baseTypeConcrete will not be a composite type (a union), + // but this can occur. In this case, it's not clear how to handle this + // correctly. For now, we'll just loop through the subtypes and + // use one of them. We'll sort the subtypes for determinism. + doForEachSubtype( + baseTypeConcrete, + (baseSubtype) => { + if (isClassInstance(baseSubtype)) { + const classMemberInfo = lookUpObjectMember( + baseSubtype, + memberName, + MemberAccessFlags.DeclaredTypesOnly + ); + + classOrObjectBase = baseSubtype; + memberAccessClass = classMemberInfo?.classType; + symbol = classMemberInfo?.symbol; + useDescriptorSetterType = true; + + // If this is an instance member (e.g. a dataclass field), don't + // bind it to the object if it's a function. + bindFunction = !classMemberInfo?.isInstanceMember; + } else if (isInstantiableClass(baseSubtype)) { + const classMemberInfo = lookUpClassMember( + baseSubtype, + memberName, + MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.DeclaredTypesOnly + ); + + classOrObjectBase = baseSubtype; + memberAccessClass = classMemberInfo?.classType; + symbol = classMemberInfo?.symbol; + useDescriptorSetterType = false; + bindFunction = true; + } else if (isModule(baseSubtype)) { + classOrObjectBase = undefined; + memberAccessClass = undefined; + symbol = ModuleType.getField(baseSubtype, memberName); + if (symbol && !symbol.hasTypedDeclarations()) { + // Do not use inferred types for the declared type. + symbol = undefined; + } + useDescriptorSetterType = false; + bindFunction = false; + } + }, + /* sortSubtypes */ true + ); + + if (isTypeVar(baseType)) { + selfType = baseType; + } + break; + } + + case ParseNodeType.Index: { + const baseType = makeTopLevelTypeVarsConcrete( + getTypeOfExpression(expression.d.leftExpr, EvalFlags.IndexBaseDefaults).type + ); + + if (baseType && isClassInstance(baseType)) { + if (ClassType.isTypedDictClass(baseType)) { + const typeFromTypedDict = getTypeOfIndexedTypedDict( + evaluatorInterface, + expression, + baseType, + usage || { method: 'get' } + ); + if (typeFromTypedDict) { + return typeFromTypedDict.type; + } + } + + let setItemType = getBoundMagicMethod(baseType, '__setitem__'); + if (!setItemType) { + break; + } + + if (isOverloaded(setItemType)) { + // Determine whether we need to use the slice overload. + const expectsSlice = + expression.d.items.length === 1 && + expression.d.items[0].d.valueExpr.nodeType === ParseNodeType.Slice; + const overloads = OverloadedType.getOverloads(setItemType); + setItemType = overloads.find((overload) => { + if (overload.shared.parameters.length < 2) { + return false; + } + + const keyType = FunctionType.getParamType(overload, 0); + const isSlice = isClassInstance(keyType) && ClassType.isBuiltIn(keyType, 'slice'); + return expectsSlice === isSlice; + }); + + if (!setItemType) { + break; + } + } + + if (isFunction(setItemType) && setItemType.shared.parameters.length >= 2) { + const paramType = FunctionType.getParamType(setItemType, 1); + if (!isAnyOrUnknown(paramType)) { + return paramType; + } + } + } + break; + } + + case ParseNodeType.Tuple: { + // If this is a tuple expression with at least one item and no + // unpacked items, and all of the items have declared types, + // we can assume a declared type for the resulting tuple. This + // is needed to enable bidirectional type inference when assigning + // to an unpacked tuple. + if ( + expression.d.items.length > 0 && + !expression.d.items.some((item) => item.nodeType === ParseNodeType.Unpack) + ) { + const itemTypes: Type[] = []; + expression.d.items.forEach((expr) => { + const itemType = getDeclaredTypeForExpression(expr, usage); + if (itemType) { + itemTypes.push(itemType); + } + }); + + if (itemTypes.length === expression.d.items.length) { + // If all items have a declared type, return a tuple of those types. + return makeTupleObject( + evaluatorInterface, + itemTypes.map((t) => { + return { type: t, isUnbounded: false }; + }) + ); + } + } + break; + } + } + + if (symbol) { + let declaredType = getDeclaredTypeOfSymbol(symbol)?.type; + if (declaredType) { + // If it's a descriptor, we need to get the setter type. + if (useDescriptorSetterType && isClassInstance(declaredType)) { + const setter = getBoundMagicMethod(declaredType, '__set__'); + if (setter && isFunction(setter) && setter.shared.parameters.length >= 2) { + declaredType = FunctionType.getParamType(setter, 1); + + if (isAnyOrUnknown(declaredType)) { + return undefined; + } + } + } + + if (classOrObjectBase) { + if (memberAccessClass && isInstantiableClass(memberAccessClass)) { + declaredType = partiallySpecializeType( + declaredType, + memberAccessClass, + getTypeClassType(), + selfType + ); + } + + if (isFunctionOrOverloaded(declaredType)) { + if (bindFunction) { + declaredType = bindFunctionToClassOrObject( + classOrObjectBase, + declaredType, + /* memberClass */ undefined, + /* treatConstructorAsClassMethod */ undefined, + selfType + ); + } + } + } + + return declaredType; + } + } + + return undefined; + } + + // Applies an "await" operation to the specified type and returns + // the result. According to PEP 492, await operates on an Awaitable + // (object that provides an __await__ that returns a generator object). + // If errorNode is undefined, no errors are reported. + function getTypeOfAwaitable(typeResult: TypeResult, errorNode?: ExpressionNode): TypeResult { + if ( + !prefetched?.awaitableClass || + !isInstantiableClass(prefetched.awaitableClass) || + prefetched.awaitableClass.shared.typeParams.length !== 1 + ) { + return { type: UnknownType.create(), isIncomplete: typeResult.isIncomplete }; + } + + const awaitableProtocolObj = ClassType.cloneAsInstance(prefetched.awaitableClass); + const isIncomplete = !!typeResult.isIncomplete; + + const type = mapSubtypes(typeResult.type, (subtype) => { + subtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + const diag = errorNode ? new DiagnosticAddendum() : undefined; + + if (isClassInstance(subtype)) { + const constraints = new ConstraintTracker(); + + if (assignType(awaitableProtocolObj, subtype, diag, constraints)) { + const specializedType = solveAndApplyConstraints(awaitableProtocolObj, constraints); + + if ( + isClass(specializedType) && + specializedType.priv.typeArgs && + specializedType.priv.typeArgs.length > 0 + ) { + return specializedType.priv.typeArgs[0]; + } + + return UnknownType.create(); + } + } + + if (errorNode && !typeResult.isIncomplete) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeNotAwaitable().format({ type: printType(subtype) }) + diag?.getString(), + errorNode + ); + } + + return UnknownType.create(); + }); + + return { type, isIncomplete }; + } + + // Validates that the type is an iterator and returns the iterated type + // (i.e. the type returned from the '__next__' or '__anext__' method). + function getTypeOfIterator( + typeResult: TypeResult, + isAsync: boolean, + errorNode: ExpressionNode, + emitNotIterableError = true + ): TypeResult | undefined { + const iterMethodName = isAsync ? '__aiter__' : '__iter__'; + const nextMethodName = isAsync ? '__anext__' : '__next__'; + let isValidIterator = true; + let isIncomplete = typeResult.isIncomplete; + + let type = transformPossibleRecursiveTypeAlias(typeResult.type); + type = makeTopLevelTypeVarsConcrete(type); + type = removeUnbound(type); + + if (isOptionalType(type) && emitNotIterableError) { + if (!typeResult.isIncomplete) { + addDiagnostic(DiagnosticRule.reportOptionalIterable, LocMessage.noneNotIterable(), errorNode); + } + type = removeNoneFromUnion(type); + } + + const iterableType = mapSubtypes(type, (subtype) => { + subtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + const diag = new DiagnosticAddendum(); + if (isClass(subtype)) { + // Handle an empty tuple specially. + if ( + TypeBase.isInstance(subtype) && + isTupleClass(subtype) && + subtype.priv.tupleTypeArgs && + subtype.priv.tupleTypeArgs.length === 0 + ) { + return NeverType.createNever(); + } + + const iterReturnType = getTypeOfMagicMethodCall(subtype, iterMethodName, [], errorNode)?.type; + + if (!iterReturnType) { + // There was no __iter__. See if we can fall back to + // the __getitem__ method instead. + if (!isAsync && isClassInstance(subtype)) { + const getItemReturnType = getTypeOfMagicMethodCall( + subtype, + '__getitem__', + [ + { + type: + prefetched?.intClass && isInstantiableClass(prefetched.intClass) + ? ClassType.cloneAsInstance(prefetched.intClass) + : UnknownType.create(), + }, + ], + errorNode + )?.type; + if (getItemReturnType) { + return getItemReturnType; + } + } + + diag.addMessage(LocMessage.methodNotDefined().format({ name: iterMethodName })); + } else { + const iterReturnTypeDiag = new DiagnosticAddendum(); + + const returnType = mapSubtypesExpandTypeVars(iterReturnType, /* options */ undefined, (subtype) => { + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + let nextReturnType = getTypeOfMagicMethodCall(subtype, nextMethodName, [], errorNode)?.type; + + if (!nextReturnType) { + iterReturnTypeDiag.addMessage( + LocMessage.methodNotDefinedOnType().format({ + name: nextMethodName, + type: printType(subtype), + }) + ); + } else { + // Convert any unpacked TypeVarTuples into object instances. We don't + // know anything more about them. + nextReturnType = mapSubtypes(nextReturnType, (returnSubtype) => { + if (isTypeVar(returnSubtype) && isUnpackedTypeVarTuple(returnSubtype)) { + return getObjectType(); + } + + return returnSubtype; + }); + + if (!isAsync) { + return nextReturnType; + } + + // If it's an async iteration, there's an implicit + // 'await' operator applied. + const awaitableResult = getTypeOfAwaitable( + { type: nextReturnType, isIncomplete: typeResult.isIncomplete }, + errorNode + ); + if (awaitableResult.isIncomplete) { + isIncomplete = true; + } + return awaitableResult.type; + } + + return undefined; + }); + + if (iterReturnTypeDiag.isEmpty()) { + return returnType; + } + + diag.addAddendum(iterReturnTypeDiag); + } + } + + if (!isIncomplete && emitNotIterableError) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeNotIterable().format({ type: printType(subtype) }) + diag.getString(), + errorNode + ); + } + + isValidIterator = false; + return undefined; + }); + + return isValidIterator ? { type: iterableType, isIncomplete } : undefined; + } + + // Validates that the type is an iterable and returns the iterable type argument. + function getTypeOfIterable( + typeResult: TypeResult, + isAsync: boolean, + errorNode: ExpressionNode, + emitNotIterableError = true + ): TypeResult | undefined { + const iterMethodName = isAsync ? '__aiter__' : '__iter__'; + let isValidIterable = true; + + let type = makeTopLevelTypeVarsConcrete(typeResult.type); + + if (isOptionalType(type)) { + if (!typeResult.isIncomplete && emitNotIterableError) { + addDiagnostic(DiagnosticRule.reportOptionalIterable, LocMessage.noneNotIterable(), errorNode); + } + type = removeNoneFromUnion(type); + } + + const iterableType = mapSubtypes(type, (subtype) => { + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + if (isClass(subtype)) { + const iterReturnType = getTypeOfMagicMethodCall(subtype, iterMethodName, [], errorNode)?.type; + + if (iterReturnType) { + return makeTopLevelTypeVarsConcrete(iterReturnType); + } + } + + if (emitNotIterableError) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeNotIterable().format({ type: printType(subtype) }), + errorNode + ); + } + + isValidIterable = false; + return undefined; + }); + + return isValidIterable ? { type: iterableType, isIncomplete: typeResult.isIncomplete } : undefined; + } + + function isTypeHashable(type: Type): boolean { + let isTypeHashable = true; + + doForEachSubtype(makeTopLevelTypeVarsConcrete(type), (subtype) => { + if (isClassInstance(subtype)) { + // Assume the class is hashable. + let isObjectHashable = true; + + // Have we already computed and cached the hashability? + if (subtype.shared.isInstanceHashable !== undefined) { + isObjectHashable = subtype.shared.isInstanceHashable; + } else { + const hashMember = lookUpObjectMember(subtype, '__hash__', MemberAccessFlags.SkipObjectBaseClass); + + if (hashMember && hashMember.isTypeDeclared) { + const decls = hashMember.symbol.getTypedDeclarations(); + const synthesizedType = hashMember.symbol.getSynthesizedType(); + + // Handle the case where the type is synthesized (used for + // dataclasses). + if (synthesizedType) { + isObjectHashable = !isNoneInstance(synthesizedType.type); + } else { + // Assume that if '__hash__' is declared as a variable, it is + // not hashable. If it's declared as a function, it is. We'll + // skip evaluating its full type because that's not needed in + // this case. + if (decls.every((decl) => decl.type === DeclarationType.Variable)) { + isObjectHashable = false; + } + } + } + + // Cache the hashability for next time. + subtype.shared.isInstanceHashable = isObjectHashable; + } + + if (!isObjectHashable) { + isTypeHashable = false; + } + } + }); + + return isTypeHashable; + } + + function getTypedDictClassType(): ClassType | undefined { + return prefetched?.typedDictPrivateClass && isInstantiableClass(prefetched.typedDictPrivateClass) + ? prefetched.typedDictPrivateClass + : undefined; + } + + function getTupleClassType(): ClassType | undefined { + return prefetched?.tupleClass && isInstantiableClass(prefetched.tupleClass) ? prefetched.tupleClass : undefined; + } + + function getDictClassType(): ClassType | undefined { + return prefetched?.dictClass && isInstantiableClass(prefetched.dictClass) ? prefetched.dictClass : undefined; + } + + function getStrClassType(): ClassType | undefined { + return prefetched?.strClass && isInstantiableClass(prefetched.strClass) ? prefetched.strClass : undefined; + } + + function getObjectType(): Type { + return prefetched?.objectClass ? convertToInstance(prefetched.objectClass) : UnknownType.create(); + } + + function getNoneType(): Type { + return prefetched?.noneTypeClass ? convertToInstance(prefetched.noneTypeClass) : UnknownType.create(); + } + + function getUnionClassType(): Type { + return prefetched?.unionTypeClass ?? UnknownType.create(); + } + + function getTypeClassType(): ClassType | undefined { + if (prefetched?.typeClass && isInstantiableClass(prefetched.typeClass)) { + return prefetched.typeClass; + } + return undefined; + } + + function getTypingType(node: ParseNode, symbolName: string): Type | undefined { + return ( + getTypeOfModule(node, symbolName, ['typing']) ?? getTypeOfModule(node, symbolName, ['typing_extensions']) + ); + } + + function getTypeCheckerInternalsType(node: ParseNode, symbolName: string): Type | undefined { + return getTypeOfModule(node, symbolName, ['_typeshed', '_type_checker_internals']); + } + + function getTypesType(node: ParseNode, symbolName: string): Type | undefined { + return getTypeOfModule(node, symbolName, ['types']); + } + + function getTypeshedType(node: ParseNode, symbolName: string): Type | undefined { + return getTypeOfModule(node, symbolName, ['_typeshed']); + } + + function getTypeOfModule(node: ParseNode, symbolName: string, nameParts: string[]) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const lookupResult = importLookup({ nameParts, importingFileUri: fileInfo.fileUri }); + + if (!lookupResult) { + return undefined; + } + + const symbol = lookupResult.symbolTable.get(symbolName); + if (!symbol) { + return undefined; + } + + return getEffectiveTypeOfSymbol(symbol); + } + + function checkCodeFlowTooComplex(node: ParseNode): boolean { + const scopeNode = node.nodeType === ParseNodeType.Function ? node : ParseTreeUtils.getExecutionScopeNode(node); + const codeComplexity = AnalyzerNodeInfo.getCodeFlowComplexity(scopeNode); + + if (codeComplexity > maxCodeComplexity) { + let errorRange: TextRange = scopeNode; + if (scopeNode.nodeType === ParseNodeType.Function) { + errorRange = scopeNode.d.name; + } else if (scopeNode.nodeType === ParseNodeType.Module) { + errorRange = { start: 0, length: 0 }; + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + addDiagnosticForTextRange( + fileInfo, + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.codeTooComplexToAnalyze(), + errorRange + ); + + return true; + } + + return false; + } + + function isNodeReachable(node: ParseNode, sourceNode?: ParseNode): boolean { + return getNodeReachability(node, sourceNode) === Reachability.Reachable; + } + + function isAfterNodeReachable(node: ParseNode): boolean { + return getAfterNodeReachability(node) === Reachability.Reachable; + } + + function getNodeReachability(node: ParseNode, sourceNode?: ParseNode): Reachability { + if (checkCodeFlowTooComplex(node)) { + return Reachability.Reachable; + } + + const flowNode = AnalyzerNodeInfo.getFlowNode(node); + if (!flowNode) { + if (node.parent) { + return getNodeReachability(node.parent, sourceNode); + } + return Reachability.UnreachableStructural; + } + + const sourceFlowNode = sourceNode ? AnalyzerNodeInfo.getFlowNode(sourceNode) : undefined; + + return codeFlowEngine.getFlowNodeReachability(flowNode, sourceFlowNode); + } + + function getAfterNodeReachability(node: ParseNode): Reachability { + const returnFlowNode = AnalyzerNodeInfo.getAfterFlowNode(node); + if (!returnFlowNode) { + return Reachability.UnreachableStructural; + } + + if (checkCodeFlowTooComplex(node)) { + return Reachability.Reachable; + } + + const reachability = codeFlowEngine.getFlowNodeReachability(returnFlowNode); + if (reachability !== Reachability.Reachable) { + return reachability; + } + + const executionScopeNode = ParseTreeUtils.getExecutionScopeNode(node); + if (!isFlowNodeReachableUsingNeverNarrowing(executionScopeNode, returnFlowNode)) { + return Reachability.UnreachableByAnalysis; + } + + return Reachability.Reachable; + } + + // Although isFlowNodeReachable indicates that the node is reachable, it + // may not be reachable if we apply "never narrowing". + function isFlowNodeReachableUsingNeverNarrowing(node: ExecutionScopeNode, flowNode: FlowNode) { + const analyzer = getCodeFlowAnalyzerForNode(node, /* typeAtStart */ undefined); + + if (checkCodeFlowTooComplex(node)) { + return true; + } + + const codeFlowResult = analyzer.getTypeFromCodeFlow(flowNode, /* reference */ undefined, { + typeAtStart: { type: UnboundType.create() }, + }); + + return codeFlowResult.type !== undefined && !isNever(codeFlowResult.type); + } + + // Determines whether there is a code flow path from sourceNode to sinkNode. + function isFlowPathBetweenNodes(sourceNode: ParseNode, sinkNode: ParseNode, allowSelf = true) { + if (checkCodeFlowTooComplex(sourceNode)) { + return true; + } + + const sourceFlowNode = AnalyzerNodeInfo.getFlowNode(sourceNode); + const sinkFlowNode = AnalyzerNodeInfo.getFlowNode(sinkNode); + if (!sourceFlowNode || !sinkFlowNode) { + return false; + } + if (sourceFlowNode === sinkFlowNode) { + return allowSelf; + } + + return ( + codeFlowEngine.getFlowNodeReachability(sinkFlowNode, sourceFlowNode, /* ignoreNoReturn */ true) === + Reachability.Reachable + ); + } + + function addInformation(message: string, node: ParseNode, range?: TextRange) { + return addDiagnosticWithSuppressionCheck('information', message, node, range); + } + + function addUnreachableCode(node: ParseNode, reachability: Reachability, textRange: TextRange) { + if (reachability === Reachability.Reachable) { + return; + } + + if (!isDiagnosticSuppressedForNode(node)) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const reportTypeReachability = fileInfo.diagnosticRuleSet.enableReachabilityAnalysis; + + if ( + reachability === Reachability.UnreachableStructural || + reachability === Reachability.UnreachableStaticCondition || + reportTypeReachability + ) { + fileInfo.diagnosticSink.addUnreachableCodeWithTextRange( + reachability === Reachability.UnreachableStructural + ? LocMessage.unreachableCodeStructure() + : reachability === Reachability.UnreachableStaticCondition + ? LocMessage.unreachableCodeCondition() + : LocMessage.unreachableCodeType(), + textRange + ); + } + } + } + + function addDeprecated(message: string, node: ParseNode) { + if (!isDiagnosticSuppressedForNode(node)) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + fileInfo.diagnosticSink.addDeprecatedWithTextRange(message, node); + } + } + + function addDiagnosticWithSuppressionCheck( + diagLevel: DiagnosticLevel, + message: string, + node: ParseNode, + range?: TextRange + ) { + if (isDiagnosticSuppressedForNode(node)) { + // See if this node is suppressed but the diagnostic should be generated + // anyway so it can be used by the caller that requested the suppression. + const suppressionEntry = suppressedNodeStack.find( + (suppressedNode) => + ParseTreeUtils.isNodeContainedWithin(node, suppressedNode.node) && suppressedNode.suppressedDiags + ); + suppressionEntry?.suppressedDiags?.push(message); + + return undefined; + } + + if (isNodeReachable(node)) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + return fileInfo.diagnosticSink.addDiagnosticWithTextRange(diagLevel, message, range ?? node); + } + + return undefined; + } + + function isDiagnosticSuppressedForNode(node: ParseNode) { + if (speculativeTypeTracker.isSpeculative(node, /* ignoreIfDiagnosticsAllowed */ true)) { + return true; + } + + return suppressedNodeStack.some((suppressedNode) => + ParseTreeUtils.isNodeContainedWithin(node, suppressedNode.node) + ); + } + + // This function is similar to isDiagnosticSuppressedForNode except that it + // returns false if diagnostics are suppressed for the node but the caller + // has requested that diagnostics be generated anyway. + function canSkipDiagnosticForNode(node: ParseNode) { + if (speculativeTypeTracker.isSpeculative(node, /* ignoreIfDiagnosticsAllowed */ true)) { + return true; + } + + const suppressedEntries = suppressedNodeStack.filter((suppressedNode) => + ParseTreeUtils.isNodeContainedWithin(node, suppressedNode.node) + ); + + if (suppressedEntries.length === 0) { + return false; + } + + return suppressedEntries.every((entry) => !entry.suppressedDiags); + } + + function addDiagnostic(rule: DiagnosticRule, message: string, node: ParseNode, range?: TextRange) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const diagLevel = fileInfo.diagnosticRuleSet[rule] as DiagnosticLevel; + + if (diagLevel === 'none') { + return undefined; + } + + const containingFunction = ParseTreeUtils.getEnclosingFunction(node); + + if (containingFunction) { + // Should we suppress this diagnostic because it's within an unannotated function? + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + if (!fileInfo.diagnosticRuleSet.analyzeUnannotatedFunctions) { + // Is the target node within the body of the function? If so, suppress the diagnostic. + if ( + ParseTreeUtils.isUnannotatedFunction(containingFunction) && + ParseTreeUtils.isNodeContainedWithin(node, containingFunction.d.suite) + ) { + return undefined; + } + } + + // Should we suppress this diagnostic because it's within a no_type_check function? + const containingClassNode = ParseTreeUtils.getEnclosingClass(containingFunction, /* stopAtFunction */ true); + const functionInfo = getFunctionInfoFromDecorators( + evaluatorInterface, + containingFunction, + !!containingClassNode + ); + + if ((functionInfo.flags & FunctionTypeFlags.NoTypeCheck) !== 0) { + return undefined; + } + } + + const diagnostic = addDiagnosticWithSuppressionCheck(diagLevel, message, node, range); + if (diagnostic) { + diagnostic.setRule(rule); + } + + return diagnostic; + } + + function addDiagnosticForTextRange( + fileInfo: AnalyzerFileInfo, + rule: DiagnosticRule, + message: string, + range: TextRange + ) { + const diagLevel = fileInfo.diagnosticRuleSet[rule] as DiagnosticLevel; + + if (diagLevel === 'none') { + return undefined; + } + + const diagnostic = fileInfo.diagnosticSink.addDiagnosticWithTextRange(diagLevel, message, range); + if (rule) { + diagnostic.setRule(rule); + } + + return diagnostic; + } + + function assignTypeToNameNode( + nameNode: NameNode, + typeResult: TypeResult, + ignoreEmptyContainers: boolean, + srcExpression?: ParseNode, + allowAssignmentToFinalVar = false, + expectedTypeDiagAddendum?: DiagnosticAddendum + ) { + const nameValue = nameNode.d.value; + + const symbolWithScope = lookUpSymbolRecursive(nameNode, nameValue, /* honorCodeFlow */ false); + if (!symbolWithScope) { + // This can happen when we are evaluating a piece of code that was + // determined to be unreachable by the binder. + return; + } + + const declarations = symbolWithScope.symbol.getDeclarations(); + let declaredType = getDeclaredTypeOfSymbol(symbolWithScope.symbol)?.type; + const fileInfo = AnalyzerNodeInfo.getFileInfo(nameNode); + + // If this is a class scope and there is no type declared for this class variable, + // see if a parent class has a type declared. + if (declaredType === undefined && symbolWithScope.scope.type === ScopeType.Class) { + const containingClass = ParseTreeUtils.getEnclosingClass(nameNode); + if (containingClass) { + const classType = getTypeOfClass(containingClass); + if (classType) { + const memberInfo = lookUpClassMember( + classType.classType, + nameNode.d.value, + MemberAccessFlags.SkipOriginalClass + ); + if (memberInfo?.isTypeDeclared) { + declaredType = getTypeOfMember(memberInfo); + } + } + } + } + + // We found an existing declared type. Make sure the type is assignable. + let destType = typeResult.type; + const isTypeAlias = + !!declaredType && isClassInstance(declaredType) && ClassType.isBuiltIn(declaredType, 'TypeAlias'); + + if (declaredType && !isTypeAlias) { + let diagAddendum = new DiagnosticAddendum(); + + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(nameNode); + const boundDeclaredType = makeTypeVarsBound(declaredType, liveScopeIds); + const srcType = makeTypeVarsBound(typeResult.type, liveScopeIds); + + if (!assignType(boundDeclaredType, srcType, diagAddendum)) { + // If there was an expected type mismatch, use that diagnostic + // addendum because it will be more informative. + if (expectedTypeDiagAddendum) { + diagAddendum = expectedTypeDiagAddendum; + } + + if (!typeResult.isIncomplete) { + addDiagnostic( + DiagnosticRule.reportAssignmentType, + LocMessage.typeAssignmentMismatch().format(printSrcDestTypes(typeResult.type, declaredType)) + + diagAddendum.getString(), + srcExpression ?? nameNode, + diagAddendum.getEffectiveTextRange() ?? srcExpression ?? nameNode + ); + } + + // Replace the assigned type with the (unnarrowed) declared type. + destType = declaredType; + } else { + // Constrain the resulting type to match the declared type. + destType = narrowTypeBasedOnAssignment(declaredType, typeResult).type; + } + } else { + // If this is a member name (within a class scope) and the member name + // appears to be a constant, use the strict source type. If it's a member + // variable that can be overridden by a child class, use the more general + // version by stripping off the literal and TypeForm. + const scope = ScopeUtils.getScopeForNode(nameNode); + if (scope?.type === ScopeType.Class) { + if ( + TypeBase.isInstance(destType) && + !isConstantName(nameValue) && + !isFinalVariable(symbolWithScope.symbol) + ) { + destType = stripTypeForm(stripLiteralValue(destType)); + } + } + } + + const varDeclIndex = declarations.findIndex((decl) => decl.type === DeclarationType.Variable); + const varDecl = varDeclIndex >= 0 ? declarations[varDeclIndex] : undefined; + + // Are there any non-var decls before the var decl? + const nonVarDecl = declarations.find( + (decl, index) => varDeclIndex < index && decl.type !== DeclarationType.Variable + ); + + if (varDecl && varDecl.type === DeclarationType.Variable) { + if (varDecl.isConstant) { + // A constant variable can be assigned only once. If this + // isn't the first assignment, generate an error. + if (nameNode !== getNameNodeForDeclaration(declarations[0]) || !!nonVarDecl) { + addDiagnostic( + DiagnosticRule.reportConstantRedefinition, + LocMessage.constantRedefinition().format({ name: nameValue }), + nameNode + ); + } + } else if (isFinalVariableDeclaration(varDecl) && !allowAssignmentToFinalVar) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.finalReassigned().format({ name: nameValue }), + nameNode + ); + } + } + + if (!typeResult.isIncomplete) { + reportPossibleUnknownAssignment( + fileInfo.diagnosticRuleSet.reportUnknownVariableType, + DiagnosticRule.reportUnknownVariableType, + nameNode, + typeResult.type, + nameNode, + ignoreEmptyContainers + ); + } + + writeTypeCache(nameNode, { type: destType, isIncomplete: typeResult.isIncomplete }, EvalFlags.None); + } + + function assignTypeToMemberAccessNode( + target: MemberAccessNode, + typeResult: TypeResult, + srcExpr?: ExpressionNode, + expectedTypeDiagAddendum?: DiagnosticAddendum + ) { + const baseTypeResult = getTypeOfExpression(target.d.leftExpr, EvalFlags.MemberAccessBaseDefaults); + const baseType = makeTopLevelTypeVarsConcrete(baseTypeResult.type); + let enclosingClass: ClassType | undefined; + + // Handle member accesses (e.g. self.x or cls.y). + if (target.d.leftExpr.nodeType === ParseNodeType.Name) { + // Determine whether we're writing to a class or instance member. + const enclosingClassNode = ParseTreeUtils.getEnclosingClass(target); + + if (enclosingClassNode) { + const classTypeResults = getTypeOfClass(enclosingClassNode); + + if (classTypeResults && isInstantiableClass(classTypeResults.classType)) { + enclosingClass = classTypeResults.classType; + + if (isClassInstance(baseType)) { + if ( + ClassType.isSameGenericClass( + ClassType.cloneAsInstantiable(baseType), + classTypeResults.classType + ) + ) { + assignTypeToMemberVariable(target, typeResult, /* isInstanceMember */ true, srcExpr); + } + } else if (isInstantiableClass(baseType)) { + if (ClassType.isSameGenericClass(baseType, classTypeResults.classType)) { + assignTypeToMemberVariable(target, typeResult, /* isInstanceMember */ false, srcExpr); + } + } + + // Assignments to instance or class variables through "self" or "cls" is not + // allowed for protocol classes unless it is also declared within the class. + if (ClassType.isProtocolClass(classTypeResults.classType)) { + const memberSymbol = ClassType.getSymbolTable(classTypeResults.classType).get( + target.d.member.d.value + ); + if (memberSymbol) { + const classLevelDecls = memberSymbol.getDeclarations().filter((decl) => { + return !ParseTreeUtils.getEnclosingFunction(decl.node); + }); + if (classLevelDecls.length === 0) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.assignmentInProtocol(), + target.d.member + ); + } + } + } + } + } + } + + const setTypeResult = getTypeOfMemberAccessWithBaseType( + target, + baseTypeResult, + { + method: 'set', + setType: typeResult, + setErrorNode: srcExpr, + setExpectedTypeDiag: expectedTypeDiagAddendum, + }, + EvalFlags.None + ); + + if (setTypeResult.isAsymmetricAccessor) { + setAsymmetricDescriptorAssignment(target); + } + + const resultToCache: TypeResult = { + type: setTypeResult.narrowedTypeForSet ?? typeResult.type, + isIncomplete: typeResult.isIncomplete, + memberAccessDeprecationInfo: setTypeResult.memberAccessDeprecationInfo, + }; + writeTypeCache(target, resultToCache, EvalFlags.None); + + // If the target is an instance or class variable, update any class-scoped + // type variables so the inferred type of the variable uses "external" + // type variables. + let memberResultToCache = resultToCache; + if (enclosingClass?.shared.typeVarScopeId) { + memberResultToCache = { + ...resultToCache, + type: makeTypeVarsFree(resultToCache.type, [enclosingClass.shared.typeVarScopeId]), + memberAccessDeprecationInfo: setTypeResult.memberAccessDeprecationInfo, + }; + } + writeTypeCache(target.d.member, memberResultToCache, EvalFlags.None); + } + + function assignTypeToMemberVariable( + node: MemberAccessNode, + typeResult: TypeResult, + isInstanceMember: boolean, + srcExprNode?: ExpressionNode + ) { + const memberName = node.d.member.d.value; + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + const classDef = ParseTreeUtils.getEnclosingClass(node); + if (!classDef) { + return; + } + + const classTypeInfo = getTypeOfClass(classDef); + if (classTypeInfo && isInstantiableClass(classTypeInfo.classType)) { + let memberInfo = lookUpClassMember( + classTypeInfo.classType, + memberName, + isInstanceMember ? MemberAccessFlags.Default : MemberAccessFlags.SkipInstanceMembers + ); + + const memberFields = ClassType.getSymbolTable(classTypeInfo.classType); + if (memberInfo) { + // Are we accessing an existing member on this class, or is + // it a member on a parent class? + const memberClass = isInstantiableClass(memberInfo.classType) ? memberInfo.classType : undefined; + const isThisClass = memberClass && ClassType.isSameGenericClass(classTypeInfo.classType, memberClass); + + // Check for an attempt to write to an instance variable that is + // not defined by __slots__. + if (isThisClass && isInstanceMember && memberClass) { + const inheritedSlotsNames = ClassType.getInheritedSlotsNames(memberClass); + + if (inheritedSlotsNames && memberClass.shared.localSlotsNames) { + // Skip this check if the local slots is specified but empty + // and the class isn't final. This pattern is used in a + // legitimate manner for mix-in classes. + if ( + (memberClass.shared.localSlotsNames.length > 0 || ClassType.isFinal(memberClass)) && + !inheritedSlotsNames.some((name) => name === memberName) + ) { + // Determine whether the assignment corresponds to a descriptor + // that was assigned as a class variable. If so, then slots will not + // apply in this case. + const classMemberDetails = lookUpClassMember( + memberClass, + memberName, + MemberAccessFlags.SkipInstanceMembers + ); + let isPotentiallyDescriptor = false; + + if (classMemberDetails) { + const classMemberSymbolType = getEffectiveTypeOfSymbol(classMemberDetails.symbol); + if ( + isAnyOrUnknown(classMemberSymbolType) || + isUnbound(classMemberSymbolType) || + isMaybeDescriptorInstance(classMemberSymbolType) + ) { + isPotentiallyDescriptor = true; + } + } + + if (!isPotentiallyDescriptor) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.slotsAttributeError().format({ name: memberName }), + node.d.member + ); + } + } + } + } + + if (isThisClass && memberInfo.isInstanceMember === isInstanceMember) { + const symbol = memberFields.get(memberName)!; + assert(symbol !== undefined); + + const typedDecls = symbol.getDeclarations(); + + // Check for an attempt to overwrite a constant member variable. + if ( + typedDecls.length > 0 && + typedDecls[0].type === DeclarationType.Variable && + srcExprNode && + node.d.member !== typedDecls[0].node + ) { + if (typedDecls[0].isConstant) { + addDiagnostic( + DiagnosticRule.reportConstantRedefinition, + LocMessage.constantRedefinition().format({ name: node.d.member.d.value }), + node.d.member + ); + } + } + } else { + // Is the target a property? + const declaredType = getDeclaredTypeOfSymbol(memberInfo.symbol)?.type; + if (declaredType && !isProperty(declaredType)) { + // Handle the case where there is a class variable defined with the same + // name, but there's also now an instance variable introduced. Combine the + // type of the class variable with that of the new instance variable. + if (!memberInfo.isInstanceMember && isInstanceMember) { + // The class variable is accessed in this case. + setSymbolAccessed(fileInfo, memberInfo.symbol, node.d.member); + const memberType = getTypeOfMember(memberInfo); + typeResult = { ...typeResult, type: combineTypes([typeResult.type, memberType]) }; + } + } + } + } + + // Look up the member info again, now that we've potentially updated it. + memberInfo = lookUpClassMember(classTypeInfo.classType, memberName, MemberAccessFlags.DeclaredTypesOnly); + + if (!memberInfo && srcExprNode && !typeResult.isIncomplete) { + reportPossibleUnknownAssignment( + fileInfo.diagnosticRuleSet.reportUnknownMemberType, + DiagnosticRule.reportUnknownMemberType, + node.d.member, + typeResult.type, + node, + /* ignoreEmptyContainers */ true + ); + } + } + } + + function assignTypeToTupleOrListNode( + target: TupleNode | ListNode, + typeResult: TypeResult, + srcExpr: ExpressionNode + ) { + const targetExpressions = target.nodeType === ParseNodeType.List ? target.d.items : target.d.items; + + // Initialize the array of target types, one for each target. + const targetTypes: Type[][] = new Array(targetExpressions.length); + for (let i = 0; i < targetExpressions.length; i++) { + targetTypes[i] = []; + } + const targetUnpackIndex = targetExpressions.findIndex((expr) => expr.nodeType === ParseNodeType.Unpack); + + // Do any of the targets use an unpack operator? If so, it will consume all of the + // entries at that location. + const unpackIndex = targetExpressions.findIndex((expr) => expr.nodeType === ParseNodeType.Unpack); + + typeResult = { ...typeResult, type: makeTopLevelTypeVarsConcrete(typeResult.type) }; + + const diagAddendum = new DiagnosticAddendum(); + + doForEachSubtype(typeResult.type, (subtype) => { + // Is this subtype a tuple? + const tupleType = getSpecializedTupleType(subtype); + if (tupleType && tupleType.priv.tupleTypeArgs) { + const sourceEntryTypes = tupleType.priv.tupleTypeArgs.map((t) => + addConditionToType(t.type, getTypeCondition(subtype), { skipSelfCondition: true }) + ); + + const unboundedIndex = tupleType.priv.tupleTypeArgs.findIndex((t) => t.isUnbounded); + + if (unboundedIndex >= 0) { + if (sourceEntryTypes.length < targetTypes.length) { + const typeToReplicate = + sourceEntryTypes.length > 0 ? sourceEntryTypes[unboundedIndex] : AnyType.create(); + + // Add elements to make the count match the target count. + while (sourceEntryTypes.length < targetTypes.length) { + sourceEntryTypes.splice(unboundedIndex, 0, typeToReplicate); + } + } + + if (sourceEntryTypes.length > targetTypes.length) { + // Remove elements to make the count match the target count. + sourceEntryTypes.splice(unboundedIndex, 1); + } + } + + // If there's an unpack operator in the target and we have too many source elements, + // combine them to assign to the unpacked target. + if (targetUnpackIndex >= 0) { + if (sourceEntryTypes.length > targetTypes.length) { + const removedEntries = sourceEntryTypes.splice( + targetUnpackIndex, + sourceEntryTypes.length - targetTypes.length + 1 + ); + let combinedTypes = combineTypes(removedEntries); + if (target.nodeType === ParseNodeType.List) { + combinedTypes = stripLiteralValue(combinedTypes); + } + sourceEntryTypes.splice(targetUnpackIndex, 0, combinedTypes); + } else if (sourceEntryTypes.length === targetTypes.length - 1) { + sourceEntryTypes.splice(targetUnpackIndex, 0, NeverType.createNever()); + } + } + + sourceEntryTypes.forEach((type, targetIndex) => { + if (targetIndex < targetTypes.length) { + targetTypes[targetIndex].push(type); + } + }); + + // Have we accounted for all of the targets and sources? If not, we have a size mismatch. + if (sourceEntryTypes.length !== targetExpressions.length) { + const subDiag = diagAddendum.createAddendum(); + subDiag.addMessage( + (target.nodeType === ParseNodeType.List + ? LocAddendum.listAssignmentMismatch() + : LocAddendum.tupleAssignmentMismatch() + ).format({ + type: printType(subtype), + }) + ); + + subDiag.createAddendum().addMessage( + (unpackIndex >= 0 + ? LocAddendum.tupleSizeMismatchIndeterminateDest() + : LocAddendum.tupleSizeMismatch() + ).format({ + expected: unpackIndex >= 0 ? targetExpressions.length - 1 : targetExpressions.length, + received: sourceEntryTypes.length, + }) + ); + } + } else { + // The assigned expression isn't a tuple, so it had better + // be some iterable type. + const iterableType = + getTypeOfIterator( + { type: subtype, isIncomplete: typeResult.isIncomplete }, + /* isAsync */ false, + srcExpr + )?.type ?? UnknownType.create(); + for (let index = 0; index < targetExpressions.length; index++) { + targetTypes[index].push(addConditionToType(iterableType, getTypeCondition(subtype))); + } + } + }); + + if (!diagAddendum.isEmpty()) { + addDiagnostic( + DiagnosticRule.reportAssignmentType, + (target.nodeType === ParseNodeType.List + ? LocMessage.listAssignmentMismatch() + : LocMessage.tupleAssignmentMismatch() + ).format({ + type: printType(typeResult.type), + }) + diagAddendum.getString(), + target + ); + } + + // Assign the resulting types to the individual names in the tuple + // or list target expression. + targetExpressions.forEach((expr, index) => { + const typeList = targetTypes[index]; + const targetType = typeList.length === 0 ? UnknownType.create() : combineTypes(typeList); + + assignTypeToExpression( + expr, + { type: targetType, isIncomplete: typeResult.isIncomplete }, + srcExpr, + /* ignoreEmptyContainers */ true + ); + }); + + writeTypeCache(target, typeResult, EvalFlags.None); + } + + // If the type includes promotion types, expand these to their constituent types. + function expandPromotionTypes(node: ParseNode, type: Type, excludeBytes = false): Type { + return mapSubtypes(type, (subtype) => { + if (!isClass(subtype) || !subtype.priv.includePromotions || subtype.priv.literalValue !== undefined) { + return subtype; + } + + if (excludeBytes && ClassType.isBuiltIn(subtype, 'bytes')) { + return subtype; + } + + const typesToCombine: Type[] = [ClassType.cloneRemoveTypePromotions(subtype)]; + + const promotionTypeNames = typePromotions.get(subtype.shared.fullName); + if (promotionTypeNames) { + for (const promotionTypeName of promotionTypeNames) { + const nameSplit = promotionTypeName.split('.'); + let promotionSubtype = getBuiltInType(node, nameSplit[nameSplit.length - 1]); + + if (promotionSubtype && isInstantiableClass(promotionSubtype)) { + promotionSubtype = ClassType.cloneRemoveTypePromotions(promotionSubtype); + + if (isClassInstance(subtype)) { + promotionSubtype = ClassType.cloneAsInstance(promotionSubtype); + } + + promotionSubtype = addConditionToType(promotionSubtype, subtype.props?.condition); + typesToCombine.push(promotionSubtype); + } + } + } + + return combineTypes(typesToCombine); + }); + } + + // Replaces all of the top-level TypeVars (as opposed to TypeVars + // used as type arguments in other types) with their concrete form. + // If conditionFilter is specified and the TypeVar is a constrained + // TypeVar, only the conditions that match the filter will be included. + function makeTopLevelTypeVarsConcrete( + type: Type, + makeParamSpecsConcrete = false, + conditionFilter?: TypeCondition[] + ): Type { + type = transformPossibleRecursiveTypeAlias(type); + + return mapSubtypes(type, (subtype) => { + if (isParamSpec(subtype)) { + if (subtype.priv.paramSpecAccess === 'args') { + return makeTupleObject(evaluatorInterface, [{ type: getObjectType(), isUnbounded: true }]); + } else if (subtype.priv.paramSpecAccess === 'kwargs') { + if ( + prefetched?.dictClass && + isInstantiableClass(prefetched.dictClass) && + prefetched?.strClass && + isInstantiableClass(prefetched.strClass) + ) { + return ClassType.cloneAsInstance( + ClassType.specialize(prefetched.dictClass, [ + convertToInstance(prefetched.strClass), + getObjectType(), + ]) + ); + } + + return UnknownType.create(); + } + } + + // If this is a function that contains only a ParamSpec (no additional + // parameters), convert it to a concrete type of (*args: Unknown, **kwargs: Unknown). + if (makeParamSpecsConcrete && isFunction(subtype)) { + const convertedType = simplifyFunctionToParamSpec(subtype); + if (isParamSpec(convertedType)) { + return ParamSpecType.getUnknown(); + } + } + + if (isTypeVarTuple(subtype)) { + // If it's in a union, convert to type or object. + if (subtype.priv.isInUnion) { + if (TypeBase.isInstantiable(subtype)) { + if (prefetched?.typeClass && isInstantiableClass(prefetched.typeClass)) { + return prefetched.typeClass; + } + } else { + return getObjectType(); + } + + return AnyType.create(); + } + + // Fall back to "*tuple[object, ...]". + return makeTupleObject( + evaluatorInterface, + [{ type: getObjectType(), isUnbounded: true }], + /* isUnpacked */ true + ); + } + + if (isTypeVar(subtype)) { + // If this is a recursive type alias placeholder + // that hasn't yet been resolved, return it as is. + if (subtype.shared.recursiveAlias) { + return subtype; + } + + if (TypeVarType.hasConstraints(subtype)) { + const typesToCombine: Type[] = []; + + // Expand the list of constrained subtypes, filtering out any that are + // disallowed by the conditionFilter. + subtype.shared.constraints.forEach((constraintType, constraintIndex) => { + if (conditionFilter) { + const typeVarName = TypeVarType.getNameWithScope(subtype); + const applicableConstraint = conditionFilter.find( + (filter) => filter.typeVar.priv.nameWithScope === typeVarName + ); + + // If this type variable is being constrained to a single index, + // don't include the other indices. + if (applicableConstraint && applicableConstraint.constraintIndex !== constraintIndex) { + return; + } + } + + if (TypeBase.isInstantiable(subtype)) { + constraintType = convertToInstantiable(constraintType); + } + + typesToCombine.push( + addConditionToType(constraintType, [{ typeVar: subtype, constraintIndex }]) + ); + }); + + return combineTypes(typesToCombine); + } + + if (subtype.shared.isExemptFromBoundCheck) { + return AnyType.create(); + } + + // Fall back to a bound of "object" if no bound is provided. + let boundType = subtype.shared.boundType ?? getObjectType(); + + // If this is a synthesized self/cls type var, self-specialize its type arguments. + if (TypeVarType.isSelf(subtype) && isClass(boundType) && !ClassType.isPseudoGenericClass(boundType)) { + boundType = selfSpecializeClass(boundType, { + useBoundTypeVars: TypeVarType.isBound(subtype), + }); + } + + if (subtype.priv.isUnpacked && isClass(boundType)) { + boundType = ClassType.cloneForUnpacked(boundType); + } + + boundType = TypeBase.isInstantiable(subtype) ? convertToInstantiable(boundType) : boundType; + + return addConditionToType(boundType, [{ typeVar: subtype, constraintIndex: 0 }]); + } + + return subtype; + }); + } + + // Creates a new type by mapping an existing type (which could be a union) + // to another type or types. The callback is called for each subtype. + // Top-level TypeVars are expanded (e.g. a bound TypeVar is expanded to + // its bound type and a constrained TypeVar is expanded to its individual + // constrained types). If conditionFilter is specified, conditions that + // do not match will be ignored. + function mapSubtypesExpandTypeVars( + type: Type, + options: MapSubtypesOptions | undefined, + callback: (expandedSubtype: Type, unexpandedSubtype: Type, isLastIteration: boolean) => Type | undefined, + recursionCount = 0 + ): Type { + const newSubtypes: Type[] = []; + let typeChanged = false; + + function expandSubtype(unexpandedType: Type, isLastSubtype: boolean) { + let expandedType = isUnion(unexpandedType) ? unexpandedType : makeTopLevelTypeVarsConcrete(unexpandedType); + + expandedType = transformPossibleRecursiveTypeAlias(expandedType); + if (options?.expandCallback) { + expandedType = options.expandCallback(expandedType); + } + + doForEachSubtype( + expandedType, + (subtype, index, allSubtypes) => { + if (options?.conditionFilter) { + const filteredType = applyConditionFilterToType( + subtype, + options.conditionFilter, + recursionCount + ); + if (!filteredType) { + return undefined; + } + + subtype = filteredType; + } + + let transformedType = callback( + subtype, + unexpandedType, + isLastSubtype && index === allSubtypes.length - 1 + ); + + if (transformedType !== unexpandedType) { + typeChanged = true; + } + + if (transformedType) { + // Apply the type condition if it's associated with a constrained TypeVar. + const typeCondition = getTypeCondition(subtype)?.filter((condition) => + TypeVarType.hasConstraints(condition.typeVar) + ); + + if (typeCondition && typeCondition.length > 0) { + transformedType = addConditionToType(transformedType, typeCondition); + } + + // This code path can often produce many duplicate subtypes. We can + // reduce the cost of the combineTypes call below by filtering out these + // duplicates proactively. + if ( + newSubtypes.length === 0 || + !isTypeSame(transformedType, newSubtypes[newSubtypes.length - 1]) + ) { + newSubtypes.push(transformedType); + } + } + return undefined; + }, + options?.sortSubtypes + ); + } + + if (isUnion(type)) { + const subtypes = options?.sortSubtypes ? sortTypes(type.priv.subtypes) : type.priv.subtypes; + subtypes.forEach((subtype, index) => { + expandSubtype(subtype, index === type.priv.subtypes.length - 1); + }); + } else { + expandSubtype(type, /* isLastSubtype */ true); + } + + if (!typeChanged) { + return type; + } + + const newType = combineTypes(newSubtypes); + + // Do our best to retain type aliases. + if (newType.category === TypeCategory.Union) { + UnionType.addTypeAliasSource(newType, type); + } + return newType; + } + + function applyConditionFilterToType( + type: Type, + conditionFilter: TypeCondition[], + recursionCount: number + ): Type | undefined { + if (recursionCount > maxTypeRecursionCount) { + return type; + } + recursionCount++; + + // If the type has a condition associated with it, make sure it's compatible. + if (!TypeCondition.isCompatible(getTypeCondition(type), conditionFilter)) { + return undefined; + } + + // If the type is generic, see if any of its type arguments should be filtered. + // This is possible only in cases where the type parameter is covariant. + + // TODO - handle functions and tuples + if (isClass(type) && type.priv.typeArgs && !type.priv.tupleTypeArgs) { + inferVarianceForClass(type); + + let typeWasTransformed = false; + + const filteredTypeArgs = type.priv.typeArgs.map((typeArg, index) => { + if (index >= type.shared.typeParams.length) { + return typeArg; + } + + const variance = TypeVarType.getVariance(type.shared.typeParams[index]); + if (variance !== Variance.Covariant) { + return typeArg; + } + + // Don't expand recursive type aliases because they can + // cause infinite recursion. + if (isTypeVar(typeArg) && typeArg.shared.recursiveAlias) { + return typeArg; + } + + const filteredTypeArg = mapSubtypesExpandTypeVars( + typeArg, + { conditionFilter }, + (expandedSubtype) => { + return expandedSubtype; + }, + recursionCount + ); + + if (filteredTypeArg !== typeArg) { + typeWasTransformed = true; + } + + return filteredTypeArg; + }); + + if (typeWasTransformed) { + return ClassType.specialize(type, filteredTypeArgs); + } + } + + return type; + } + + function markNamesAccessed(node: ParseNode, names: string[]) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const scope = ScopeUtils.getScopeForNode(node); + + if (scope) { + names.forEach((symbolName) => { + const symbolInScope = scope.lookUpSymbolRecursive(symbolName); + if (symbolInScope) { + setSymbolAccessed(fileInfo, symbolInScope.symbol, node); + } + }); + } + } + + function assignTypeToExpression( + target: ExpressionNode, + typeResult: TypeResult, + srcExpr: ExpressionNode, + ignoreEmptyContainers = false, + allowAssignmentToFinalVar = false, + expectedTypeDiagAddendum?: DiagnosticAddendum + ) { + // Is the source expression a TypeVar() call? + if (isTypeVar(typeResult.type)) { + if (srcExpr && srcExpr.nodeType === ParseNodeType.Call) { + const callType = getTypeOfExpression(srcExpr.d.leftExpr, EvalFlags.CallBaseDefaults).type; + if ( + isInstantiableClass(callType) && + (ClassType.isBuiltIn(callType, 'TypeVar') || + ClassType.isBuiltIn(callType, 'TypeVarTuple') || + ClassType.isBuiltIn(callType, 'ParamSpec')) + ) { + const typeVarTarget = + target.nodeType === ParseNodeType.TypeAnnotation ? target.d.valueExpr : target; + if ( + typeVarTarget.nodeType !== ParseNodeType.Name || + typeVarTarget.d.value !== typeResult.type.shared.name + ) { + const name = TypeVarType.getReadableName(typeResult.type); + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + isParamSpec(typeResult.type) + ? LocMessage.paramSpecAssignedName().format({ name }) + : LocMessage.typeVarAssignedName().format({ name }), + typeVarTarget + ); + } + } + } + } + + // If the type was partially unbound, an error will have already been logged. + // Remove the unbound before assigning to the target expression so the unbound + // error doesn't propagate. + if (findSubtype(typeResult.type, (subtype) => isUnbound(subtype))) { + typeResult = { ...typeResult, type: removeUnbound(typeResult.type) }; + } + + switch (target.nodeType) { + case ParseNodeType.Name: { + assignTypeToNameNode( + target, + typeResult, + ignoreEmptyContainers, + srcExpr, + allowAssignmentToFinalVar, + expectedTypeDiagAddendum + ); + break; + } + + case ParseNodeType.MemberAccess: { + assignTypeToMemberAccessNode(target, typeResult, srcExpr, expectedTypeDiagAddendum); + break; + } + + case ParseNodeType.Index: { + const baseTypeResult = getTypeOfExpression(target.d.leftExpr, EvalFlags.IndexBaseDefaults); + + getTypeOfIndexWithBaseType( + target, + baseTypeResult, + { + method: 'set', + setType: typeResult, + setErrorNode: srcExpr, + setExpectedTypeDiag: expectedTypeDiagAddendum, + }, + EvalFlags.None + ); + + writeTypeCache(target, typeResult, EvalFlags.None); + break; + } + + case ParseNodeType.List: + case ParseNodeType.Tuple: { + assignTypeToTupleOrListNode(target, typeResult, srcExpr); + break; + } + + case ParseNodeType.TypeAnnotation: { + getTypeOfAnnotation(target.d.annotation, { + varTypeAnnotation: true, + allowFinal: isFinalAllowedForAssignmentTarget(target.d.valueExpr), + allowClassVar: isClassVarAllowedForAssignmentTarget(target.d.valueExpr), + }); + + assignTypeToExpression( + target.d.valueExpr, + typeResult, + srcExpr, + ignoreEmptyContainers, + allowAssignmentToFinalVar, + expectedTypeDiagAddendum + ); + break; + } + + case ParseNodeType.Unpack: { + assignTypeToExpression( + target.d.expr, + { + type: getBuiltInObject(target.d.expr, 'list', [typeResult.type]), + isIncomplete: typeResult.isIncomplete, + }, + srcExpr, + ignoreEmptyContainers, + allowAssignmentToFinalVar, + expectedTypeDiagAddendum + ); + break; + } + + case ParseNodeType.Error: { + // Evaluate the child expression as best we can so the + // type information is cached for the completion handler. + if (target.d.child) { + suppressDiagnostics(target.d.child, () => { + getTypeOfExpression(target.d.child!); + }); + } + break; + } + + default: { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.assignmentTargetExpr(), target); + break; + } + } + } + + function isClassVarAllowedForAssignmentTarget(targetNode: ExpressionNode): boolean { + // ClassVar is allowed only in a class body. + const classNode = ParseTreeUtils.getEnclosingClass(targetNode, /* stopAtFunction */ true); + if (!classNode) { + return false; + } + + // ClassVar is not allowed in a TypedDict or a NamedTuple class. + return !isInTypedDictOrNamedTuple(classNode); + } + + function isFinalAllowedForAssignmentTarget(targetNode: ExpressionNode): boolean { + const classNode = ParseTreeUtils.getEnclosingClass(targetNode, /* stopAtFunction */ true); + + // Final is not allowed in the body of a TypedDict or NamedTuple class. + if (classNode && isInTypedDictOrNamedTuple(classNode)) { + return false; + } + + return ParseTreeUtils.isFinalAllowedForAssignmentTarget(targetNode); + } + + function isInTypedDictOrNamedTuple(classNode: ClassNode): boolean { + const classType = getTypeOfClass(classNode)?.classType; + if (!classType) { + return false; + } + + return ClassType.isTypedDictClass(classType) || !!classType.shared.namedTupleEntries; + } + + function verifyRaiseExceptionType(node: ExpressionNode, allowNone: boolean) { + const baseExceptionType = getBuiltInType(node, 'BaseException'); + const exceptionType = getTypeOfExpression(node).type; + + // Validate that the argument of "raise" is an exception object or class. + // If it is a class, validate that the class's constructor accepts zero + // arguments. + if (exceptionType && baseExceptionType && isInstantiableClass(baseExceptionType)) { + const diag = new DiagnosticAddendum(); + + doForEachSubtype(exceptionType, (subtype) => { + const concreteSubtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isAnyOrUnknown(concreteSubtype) || isNever(concreteSubtype)) { + return; + } + + if (allowNone && isNoneInstance(concreteSubtype)) { + return; + } + + if (isInstantiableClass(concreteSubtype) && concreteSubtype.priv.literalValue === undefined) { + if (!derivesFromClassRecursive(concreteSubtype, baseExceptionType, /* ignoreUnknown */ false)) { + diag.addMessage( + LocMessage.exceptionTypeIncorrect().format({ + type: printType(subtype), + }) + ); + } else { + let callResult: CallResult | undefined; + suppressDiagnostics(node, () => { + callResult = validateConstructorArgs( + evaluatorInterface, + node, + [], + concreteSubtype, + /* skipUnknownArgCheck */ false, + /* inferenceContext */ undefined + ); + }); + + if (callResult && callResult.argumentErrors) { + diag.addMessage( + LocMessage.exceptionTypeNotInstantiable().format({ + type: printType(subtype), + }) + ); + } + } + } else if (isClassInstance(concreteSubtype)) { + if ( + !derivesFromClassRecursive( + ClassType.cloneAsInstantiable(concreteSubtype), + baseExceptionType, + /* ignoreUnknown */ false + ) + ) { + diag.addMessage( + LocMessage.exceptionTypeIncorrect().format({ + type: printType(subtype), + }) + ); + } + } else { + diag.addMessage( + LocMessage.exceptionTypeIncorrect().format({ + type: printType(subtype), + }) + ); + } + }); + + if (!diag.isEmpty()) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.expectedExceptionClass() + diag.getString(), + node + ); + } + } + } + + function verifyDeleteExpression(node: ExpressionNode) { + switch (node.nodeType) { + case ParseNodeType.Name: { + // Get the type to evaluate whether it's bound + // and to mark it accessed. + getTypeOfExpression(node); + break; + } + + case ParseNodeType.MemberAccess: { + const baseTypeResult = getTypeOfExpression(node.d.leftExpr, EvalFlags.MemberAccessBaseDefaults); + const delAccessResult = getTypeOfMemberAccessWithBaseType( + node, + baseTypeResult, + { method: 'del' }, + EvalFlags.None + ); + const resultToCache: TypeResult = { + type: delAccessResult.type, + memberAccessDeprecationInfo: delAccessResult.memberAccessDeprecationInfo, + }; + writeTypeCache(node.d.member, resultToCache, EvalFlags.None); + writeTypeCache(node, resultToCache, EvalFlags.None); + break; + } + + case ParseNodeType.Index: { + const baseTypeResult = getTypeOfExpression(node.d.leftExpr, EvalFlags.IndexBaseDefaults); + getTypeOfIndexWithBaseType(node, baseTypeResult, { method: 'del' }, EvalFlags.None); + writeTypeCache(node, { type: UnboundType.create() }, EvalFlags.None); + break; + } + + case ParseNodeType.Tuple: { + node.d.items.forEach((expr) => { + verifyDeleteExpression(expr); + }); + break; + } + + case ParseNodeType.Error: { + // Evaluate the child expression as best we can so the + // type information is cached for the completion handler. + if (node.d.child) { + suppressDiagnostics(node.d.child, () => { + getTypeOfExpression(node.d.child!); + }); + } + break; + } + + default: { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.delTargetExpr(), node); + break; + } + } + } + + function setSymbolAccessed(fileInfo: AnalyzerFileInfo, symbol: Symbol, node: ParseNode) { + if (!isSpeculativeModeInUse(node)) { + fileInfo.accessedSymbolSet.add(symbol.id); + } + } + + function getTypeOfName(node: NameNode, flags: EvalFlags): TypeResult { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const name = node.d.value; + let symbol: Symbol | undefined; + let type: Type | undefined; + let isIncomplete = false; + const allowForwardReferences = (flags & EvalFlags.ForwardRefs) !== 0 || fileInfo.isStubFile; + + // Look for the scope that contains the value definition and + // see if it has a declared type. + let symbolWithScope = lookUpSymbolRecursive( + node, + name, + !allowForwardReferences, + allowForwardReferences && (flags & EvalFlags.TypeExpression) !== 0 + ); + + if (!symbolWithScope) { + // If the node is part of a "from X import Y as Z" statement and the node + // is the "Y" (non-aliased) name, we need to look up the alias symbol + // since the non-aliased name is not in the symbol table. + const alias = getAliasFromImport(node); + if (alias) { + symbolWithScope = lookUpSymbolRecursive( + alias, + alias.d.value, + !allowForwardReferences, + allowForwardReferences && (flags & EvalFlags.TypeExpression) !== 0 + ); + } + } + + if (symbolWithScope) { + let useCodeFlowAnalysis = !allowForwardReferences; + + // If the symbol is implicitly imported from the builtin + // scope, there's no need to use code flow analysis. + if (symbolWithScope.scope.type === ScopeType.Builtin) { + useCodeFlowAnalysis = false; + } + + symbol = symbolWithScope.symbol; + setSymbolAccessed(fileInfo, symbol, node); + + // If we're not supposed to be analyzing this function, skip the remaining work + // to determine the name's type. Simply evaluate its type as Any. + if (!fileInfo.diagnosticRuleSet.analyzeUnannotatedFunctions) { + const containingFunction = ParseTreeUtils.getEnclosingFunction(node); + if (containingFunction && ParseTreeUtils.isUnannotatedFunction(containingFunction)) { + return { + type: AnyType.create(), + isIncomplete: false, + }; + } + } + + // Get the effective type (either the declared type or the inferred type). + // If we're using code flow analysis, pass the usage node so we consider + // only the assignment nodes that are reachable from this usage. + const effectiveTypeInfo = getEffectiveTypeOfSymbolForUsage(symbol, useCodeFlowAnalysis ? node : undefined); + let effectiveType = transformPossibleRecursiveTypeAlias(effectiveTypeInfo.type); + + if (effectiveTypeInfo.isIncomplete) { + if (isUnbound(effectiveType)) { + effectiveType = UnknownType.create(/* isIncomplete */ true); + } + isIncomplete = true; + } + + if (effectiveTypeInfo.isRecursiveDefinition && isNodeReachable(node)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.recursiveDefinition().format({ name }), + node + ); + } + + const isSpecialBuiltIn = + !!effectiveType && isInstantiableClass(effectiveType) && ClassType.isSpecialBuiltIn(effectiveType); + + type = effectiveType; + if (useCodeFlowAnalysis && !isSpecialBuiltIn) { + // See if code flow analysis can tell us anything more about the type. + // If the symbol is declared outside of our execution scope, use its effective + // type. If it's declared inside our execution scope, it generally starts + // as unbound at the start of the code flow. + let typeAtStart = effectiveType; + let isTypeAtStartIncomplete = false; + + if (!symbolWithScope.isBeyondExecutionScope && symbol.isInitiallyUnbound()) { + typeAtStart = UnboundType.create(); + + // Is this a module-level scope? If so, see if it's an alias of a builtin. + if (symbolWithScope.scope.type === ScopeType.Module) { + assert(symbolWithScope.scope.parent); + const builtInSymbol = symbolWithScope.scope.parent.lookUpSymbol(name); + if (builtInSymbol) { + const builtInEffectiveType = getEffectiveTypeOfSymbolForUsage(builtInSymbol); + typeAtStart = builtInEffectiveType.type; + } + } + } + + if (symbolWithScope.isBeyondExecutionScope) { + const outerScopeTypeResult = getCodeFlowTypeForCapturedVariable( + node, + symbolWithScope, + effectiveType + ); + + if (outerScopeTypeResult?.type) { + type = outerScopeTypeResult.type; + typeAtStart = type; + isTypeAtStartIncomplete = !!outerScopeTypeResult.isIncomplete; + } + } + + const codeFlowTypeResult = getFlowTypeOfReference(node, /* startNode */ undefined, { + targetSymbolId: symbol.id, + typeAtStart: { type: typeAtStart, isIncomplete: isTypeAtStartIncomplete }, + skipConditionalNarrowing: (flags & EvalFlags.TypeExpression) !== 0, + }); + + if (codeFlowTypeResult.type) { + type = codeFlowTypeResult.type; + } + + if (codeFlowTypeResult.isIncomplete) { + isIncomplete = true; + } + } + + // Detect, report, and fill in missing type arguments if appropriate. + type = reportMissingTypeArgs(node, type, flags); + + // Report inappropriate use of variables in type expressions. + if ((flags & EvalFlags.TypeExpression) !== 0) { + type = validateSymbolIsTypeExpression(node, type, !!effectiveTypeInfo.includesVariableDecl); + } + + if (isTypeVar(type) && !type.shared.isSynthesized) { + type = validateTypeVarUsage(node, type, flags); + } + + // Add TypeForm details if appropriate. + type = addTypeFormForSymbol(node, type, flags, !!effectiveTypeInfo.includesVariableDecl); + } else { + // Handle the special case of "reveal_type" and "reveal_locals". + if (name === 'reveal_type' || name === 'reveal_locals') { + type = AnyType.create(); + } else { + addDiagnostic( + DiagnosticRule.reportUndefinedVariable, + LocMessage.symbolIsUndefined().format({ name }), + node + ); + + type = UnknownType.create(); + } + } + + if (isParamSpec(type) && type.priv.scopeId) { + if (flags & EvalFlags.NoParamSpec) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.paramSpecContext(), node); + type = UnknownType.create(); + } + } + + // If we're expecting a type expression and got a sentinel literal instance, + // treat it as its instantiable counterpart. This is similar to how None + // is treated in a type expression context. + if ((flags & EvalFlags.InstantiableType) !== 0 && isClassInstance(type) && isSentinelLiteral(type)) { + type = ClassType.cloneAsInstantiable(type); + } + + type = convertSpecialFormToRuntimeValue(type, flags); + + if ((flags & EvalFlags.TypeExpression) === 0) { + reportUseOfTypeCheckOnly(type, node); + } + + if ((flags & EvalFlags.InstantiableType) !== 0) { + if ((flags & EvalFlags.AllowGeneric) === 0) { + if (isInstantiableClass(type) && ClassType.isBuiltIn(type, 'Generic')) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.genericNotAllowed(), node); + } + } + } + + return { type, isIncomplete }; + } + + function addTypeFormForSymbol(node: ExpressionNode, type: Type, flags: EvalFlags, includesVarDecl: boolean): Type { + if (!isTypeFormSupported(node)) { + return type; + } + + const isValid = isSymbolValidTypeExpression(type, includesVarDecl); + + // If the type already has type information associated with it, don't replace. + if (type.props?.typeForm) { + // If the NoConvertSpecialForm flag is set, we are evaluating in + // the interior of a type expression, so variables are not allowed. + // Clear any existing type form type for this symbol in this case. + if ((flags & EvalFlags.NoConvertSpecialForm) !== 0 && !isValid) { + type = TypeBase.cloneWithTypeForm(type, undefined); + } + return type; + } + + // If the symbol is not valid for a type expression (e.g. it's a variable), + // don't add TypeForm info. + if (!isValid) { + return type; + } + + if (isTypeVar(type) && type.priv.scopeId && !type.shared.isSynthesized) { + if (!isTypeVarTuple(type) || !type.priv.isInUnion) { + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(node); + type = TypeBase.cloneWithTypeForm(type, convertToInstance(makeTypeVarsBound(type, liveScopeIds))); + } + } else if (isInstantiableClass(type) && !type.priv.includeSubclasses && !ClassType.isSpecialBuiltIn(type)) { + if (ClassType.isBuiltIn(type, 'Any')) { + type = TypeBase.cloneWithTypeForm(type, AnyType.create()); + } else { + type = TypeBase.cloneWithTypeForm(type, ClassType.cloneAsInstance(specializeWithDefaultTypeArgs(type))); + } + } + + if (type.props?.typeAliasInfo && TypeBase.isInstantiable(type)) { + let typeFormType = type; + if ((flags & EvalFlags.NoSpecialize) === 0) { + typeFormType = specializeTypeAliasWithDefaults(typeFormType, /* errorNode */ undefined); + } + + type = TypeBase.cloneWithTypeForm(type, convertToInstance(typeFormType)); + } + + return type; + } + + function isSymbolValidTypeExpression(type: Type, includesVarDecl: boolean): boolean { + // Verify that the name does not refer to a (non type alias) variable. + if (!includesVarDecl || type.props?.typeAliasInfo) { + return true; + } + + if (isTypeAliasPlaceholder(type)) { + return true; + } + + if (isTypeVar(type)) { + if (type.props?.specialForm || type.props?.typeAliasInfo) { + return true; + } + } + + // Exempts class types that are created by calling NewType, NamedTuple, etc. + if (isClass(type) && !type.priv.includeSubclasses && ClassType.isValidTypeAliasClass(type)) { + return true; + } + + if (isSentinelLiteral(type)) { + return true; + } + + return false; + } + + // Reports diagnostics if type isn't valid within a type expression. + function validateSymbolIsTypeExpression(node: ExpressionNode, type: Type, includesVarDecl: boolean): Type { + if (isSymbolValidTypeExpression(type, includesVarDecl)) { + return type; + } + + // Disable for assignments in the typings.pyi file, since it defines special forms. + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + if (fileInfo.isTypingStubFile) { + return type; + } + + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeAnnotationVariable(), node); + return UnknownType.create(); + } + + // If the value is a special form (like a TypeVar or `Any`) and is being + // evaluated in a value expression context, convert it from its special + // meaning to its runtime value. If convertModule is true, a module is + // converted to an instance of types.ModuleType. + function convertSpecialFormToRuntimeValue(type: Type, flags: EvalFlags, convertModule = false) { + const exemptFlags = EvalFlags.TypeExpression | EvalFlags.InstantiableType | EvalFlags.NoConvertSpecialForm; + + if ((flags & exemptFlags) !== 0) { + return type; + } + + if ( + convertModule && + isModule(type) && + prefetched?.moduleTypeClass && + isInstantiableClass(prefetched.moduleTypeClass) + ) { + return ClassType.cloneAsInstance(prefetched.moduleTypeClass); + } + + // Isinstance treats traditional (non-PEP 695) type aliases that are unions + // as tuples of classes rather than unions. + if ((flags & EvalFlags.IsinstanceArg) !== 0) { + if (isUnion(type) && type.props?.typeAliasInfo && !type.props.typeAliasInfo.shared.isTypeAliasType) { + return type; + } + } + + if (!type.props?.specialForm) { + return type; + } + + // If this is a type alias and we are not supposed to specialize it, return it as is. + if ((flags & EvalFlags.NoSpecialize) !== 0 && type.props?.typeAliasInfo) { + // Special-case TypeAliasType which should be converted in this case. + if (!ClassType.isBuiltIn(type.props.specialForm, 'TypeAliasType')) { + return type; + } + } + + if (type.props?.typeForm) { + return TypeBase.cloneWithTypeForm(type.props.specialForm, type.props.typeForm); + } + + return type.props.specialForm; + } + + // Handles the case where a variable or parameter is defined in an outer + // scope and captured by an inner scope (a function, lambda, or comprehension). + function getCodeFlowTypeForCapturedVariable( + node: NameNode, + symbolWithScope: SymbolWithScope, + effectiveType: Type + ): FlowNodeTypeResult | undefined { + // This function applies only to captured variables, not those that + // are accessed via an explicit nonlocal or global binding. + if (symbolWithScope.usesGlobalBinding || symbolWithScope.usesNonlocalBinding) { + return undefined; + } + + // This function applies only to variables, parameters, and imports, not to other + // types of symbols. + const decls = symbolWithScope.symbol.getDeclarations(); + if ( + !decls.every( + (decl) => + decl.type === DeclarationType.Variable || + decl.type === DeclarationType.Param || + decl.type === DeclarationType.Alias + ) + ) { + return undefined; + } + + // If the symbol is modified in scopes other than the one in which it is + // declared (e.g. through a nonlocal or global binding), it is not eligible + // for code flow analysis. + if ( + !decls.every( + (decl) => + decl.type === DeclarationType.Param || + ScopeUtils.getScopeForNode(decl.node) === symbolWithScope.scope + ) + ) { + return undefined; + } + + // If the symbol is a non-final variable in the global scope, it is not + // eligible because it could be modified by other modules. + if ( + !decls.every( + (decl) => + decl.type !== DeclarationType.Variable || + decl.isFinal || + ScopeUtils.getScopeForNode(decl.node)?.type !== ScopeType.Module + ) + ) { + return undefined; + } + + // If the symbol is a variable captured by an inner function + // or lambda, see if we can infer the type from the outer scope. + const scopeHierarchy = ScopeUtils.getScopeHierarchy(node, symbolWithScope.scope); + + if (scopeHierarchy && scopeHierarchy.length >= 2) { + // Find the parse node associated with the scope that is just inside of the + // scope that declares the captured variable. + const innerScopeNode = ScopeUtils.findTopNodeInScope(node, scopeHierarchy[scopeHierarchy.length - 2]); + if ( + innerScopeNode?.nodeType === ParseNodeType.Function || + innerScopeNode?.nodeType === ParseNodeType.Lambda || + innerScopeNode?.nodeType === ParseNodeType.Class + ) { + const innerScopeCodeFlowNode = AnalyzerNodeInfo.getFlowNode(innerScopeNode); + if (innerScopeCodeFlowNode) { + // See if any of the assignments of the symbol are reachable + // from this node. If so, we cannot apply any narrowing because + // the type could change after the capture. + if ( + symbolWithScope.symbol.getDeclarations().every((decl) => { + // Parameter declarations always start life at the beginning + // of the execution scope, so they are always safe to narrow. + if (decl.type === DeclarationType.Param) { + return true; + } + + const declCodeFlowNode = AnalyzerNodeInfo.getFlowNode(decl.node); + if (!declCodeFlowNode) { + return false; + } + + return ( + codeFlowEngine.getFlowNodeReachability( + declCodeFlowNode, + innerScopeCodeFlowNode, + /* ignoreNoReturn */ true + ) !== Reachability.Reachable + ); + }) + ) { + let typeAtStart = effectiveType; + if (symbolWithScope.symbol.isInitiallyUnbound()) { + typeAtStart = UnboundType.create(); + } + + return getFlowTypeOfReference(node, innerScopeNode, { + targetSymbolId: symbolWithScope.symbol.id, + typeAtStart: { type: typeAtStart }, + }); + } + } + } + } + + return undefined; + } + + // Validates that a TypeVar is valid in this context. If so, it clones it + // and provides a scope ID defined by its containing scope (class, function + // or type alias). If not, it emits errors indicating why the TypeVar + // cannot be used in this location. + function validateTypeVarUsage(node: ExpressionNode, type: TypeVarType, flags: EvalFlags) { + if (!TypeBase.isInstantiable(type) || isTypeAliasPlaceholder(type)) { + return type; + } + + // If the TypeVar doesn't have a scope ID, try to assign one. + if (!type.priv.scopeId) { + type = assignTypeVarScopeId(node, type, flags); + } + + // If this is a free type var, see if we need to make it into a bound type var. + if (type.priv.scopeId && !TypeVarType.isBound(type)) { + // If this is a reference to a TypeVar defined in an outer scope, + // mark it as bound. + const scopedNode = findScopedTypeVar(node, type)?.scopeNode; + + if (scopedNode) { + const enclosingSuite = ParseTreeUtils.getEnclosingClassOrFunctionSuite(node); + + if (enclosingSuite && ParseTreeUtils.isNodeContainedWithin(enclosingSuite, scopedNode)) { + if (scopedNode.nodeType !== ParseNodeType.Class || scopedNode.d.suite !== enclosingSuite) { + type = TypeVarType.cloneAsBound(type); + } + } + } + } + + // If this is a TypeVarTuple, the name refers to the packed form. It + // must be unpacked in most contexts. + if (isUnpackedTypeVarTuple(type)) { + type = TypeVarType.cloneForPacked(type); + } + + if ((flags & EvalFlags.EnforceClassTypeVarScope) !== 0 && !enforceClassTypeVarScope(node, type)) { + return UnknownType.create(); + } + + return type; + } + + function assignTypeVarScopeId(node: ExpressionNode, type: TypeVarType, flags: EvalFlags): TypeVarType { + const scopedTypeVarInfo = findScopedTypeVar(node, type); + type = scopedTypeVarInfo.type; + + if ((flags & EvalFlags.NoTypeVarWithScopeId) !== 0 && !!type.priv.scopeId) { + if (type.shared.isSynthesized || isParamSpec(type)) { + return type; + } + + // This TypeVar already has a scope ID assigned to it. See if it + // originates from type parameter syntax. If so, allow it. + if (type.shared.isTypeParamSyntax) { + return type; + } + + // If this type variable expression is used within a generic class, + // function, or type alias that uses type parameter syntax, there is + // no need to report an error here. + const typeVarScopeNode = ParseTreeUtils.getTypeVarScopeNode(node); + if ( + typeVarScopeNode && + typeVarScopeNode.d.typeParams && + !typeVarScopeNode.d.typeParams.d.params.some((t) => t.d.name === node) + ) { + return type; + } + + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarUsedByOuterScope().format({ name: type.shared.name }), + node + ); + + return type; + } + + if ((flags & EvalFlags.TypeVarGetsCurScope) !== 0) { + if (type.priv.scopeId) { + return type; + } + + if (scopedTypeVarInfo.foundInterveningClass) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarUsedByOuterScope().format({ name: type.shared.name }), + node + ); + return type; + } + + let enclosingScope = ParseTreeUtils.getEnclosingClassOrFunction(node); + + // Handle P.args and P.kwargs as a special case for inner functions. + if ( + enclosingScope && + node.parent?.nodeType === ParseNodeType.MemberAccess && + node.parent.d.leftExpr === node + ) { + const memberName = node.parent.d.member.d.value; + if (memberName === 'args' || memberName === 'kwargs') { + const outerFunctionScope = ParseTreeUtils.getEnclosingClassOrFunction(enclosingScope); + + if (outerFunctionScope?.nodeType === ParseNodeType.Function) { + enclosingScope = outerFunctionScope; + } else if (!scopedTypeVarInfo.type.priv.scopeId) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.paramSpecNotUsedByOuterScope().format({ + name: type.shared.name, + }), + node + ); + } + } + } + + if (!enclosingScope) { + fail('AssociateTypeVarsWithCurrentScope flag was set but enclosing scope not found'); + } + + // If the enclosing scope is using type parameter syntax, traditional + // type variables can't be used in this context. + if ( + enclosingScope.d.typeParams && + !enclosingScope.d.typeParams.d.params.some((param) => param.d.name.d.value === type.shared.name) + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeParameterNotDeclared().format({ + name: type.shared.name, + container: enclosingScope.d.name.d.value, + }), + node + ); + } + + const scopeIdToAssign = ParseTreeUtils.getScopeIdForNode(enclosingScope); + + return TypeVarType.cloneForScopeId( + type, + scopeIdToAssign, + enclosingScope.d.name.d.value, + enclosingScope.nodeType === ParseNodeType.Function ? TypeVarScopeType.Function : TypeVarScopeType.Class + ); + } + + if ((flags & EvalFlags.AllowTypeVarWithoutScopeId) === 0) { + if (type.priv.scopeId && !scopedTypeVarInfo.foundInterveningClass) { + return type; + } + + if (!type.shared.isSynthesized && (flags & EvalFlags.InstantiableType) !== 0) { + const message = isParamSpec(type) + ? LocMessage.paramSpecNotUsedByOuterScope() + : LocMessage.typeVarNotUsedByOuterScope(); + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, message.format({ name: type.shared.name }), node); + } + } + + return type; + } + + // Enforce that the type variable is scoped to the enclosing class or + // an outer class that contains the class definition. + function enforceClassTypeVarScope(node: ExpressionNode, type: TypeVarType): boolean { + const scopeId = type.priv.freeTypeVar?.priv.scopeId ?? type.priv.scopeId; + if (!scopeId) { + return true; + } + + const enclosingClass = ParseTreeUtils.getEnclosingClass(node); + if (enclosingClass) { + const liveTypeVarScopeIds = ParseTreeUtils.getTypeVarScopesForNode(enclosingClass); + if (!liveTypeVarScopeIds.includes(scopeId)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarInvalidForMemberVariable().format({ + name: TypeVarType.getReadableName(type), + }), + node + ); + + return false; + } + } + + return true; + } + + // Determines if the type is a generic class or type alias with missing + // type arguments. If so, it fills in these type arguments with Unknown + // and optionally reports an error. + function reportMissingTypeArgs(node: ExpressionNode, type: Type, flags: EvalFlags): Type { + if ((flags & EvalFlags.NoSpecialize) !== 0) { + return type; + } + + // Is this a generic class that needs to be specialized? + if (isInstantiableClass(type)) { + if ((flags & EvalFlags.InstantiableType) !== 0 && (flags & EvalFlags.AllowMissingTypeArgs) === 0) { + if (!type.props?.typeAliasInfo && requiresTypeArgs(type)) { + if (!type.priv.typeArgs || !type.priv.isTypeArgExplicit) { + addDiagnostic( + DiagnosticRule.reportMissingTypeArgument, + LocMessage.typeArgsMissingForClass().format({ + name: type.priv.aliasName || type.shared.name, + }), + node + ); + } + } + } + + if (!type.priv.typeArgs) { + type = createSpecializedClassType(type, /* typeArgs */ undefined, flags, node)?.type; + } + } + + // Is this a generic type alias that needs to be specialized? + if ((flags & EvalFlags.InstantiableType) !== 0) { + type = specializeTypeAliasWithDefaults(type, node); + } + + return type; + } + + // Walks up the parse tree to find a function, class, or type alias + // declaration that provides the context for a type variable. + function findScopedTypeVar(node: ExpressionNode, type: TypeVarType): ScopedTypeVarResult { + let curNode: ParseNode | undefined = node; + let nestedClassCount = 0; + + assert(TypeBase.isInstantiable(type)); + + while (curNode) { + const scopeNode = ParseTreeUtils.getTypeVarScopeNode(curNode); + if (!scopeNode) { + break; + } + curNode = scopeNode; + + let typeParamsForScope: TypeVarType[] | undefined; + let scopeUsesTypeParamSyntax = false; + + if (curNode.nodeType === ParseNodeType.Class) { + const classTypeInfo = getTypeOfClass(curNode); + if (classTypeInfo && !ClassType.isPartiallyEvaluated(classTypeInfo.classType)) { + typeParamsForScope = classTypeInfo.classType.shared.typeParams; + } + + scopeUsesTypeParamSyntax = !!curNode.d.typeParams; + nestedClassCount++; + } else if (curNode.nodeType === ParseNodeType.Function) { + const functionType = getTypeOfFunctionPredecorated(curNode); + if (functionType) { + const functionDetails = functionType.shared; + typeParamsForScope = functionDetails.typeParams; + } + + scopeUsesTypeParamSyntax = !!curNode.d.typeParams; + } else if (curNode.nodeType === ParseNodeType.TypeAlias) { + scopeUsesTypeParamSyntax = !!curNode.d.typeParams; + } + + if (typeParamsForScope) { + const match = typeParamsForScope.find((typeVar) => typeVar.shared.name === type.shared.name); + + if ( + match?.priv.scopeId !== undefined && + match.priv.scopeName !== undefined && + match.priv.scopeType !== undefined + ) { + // Use the scoped version of the TypeVar rather than the (unscoped) original type. + type = TypeVarType.cloneForScopeId( + type, + match.priv.scopeId, + match.priv.scopeName, + match.priv.scopeType + ); + type.shared.declaredVariance = match.shared.declaredVariance; + return { + type, + scopeNode, + foundInterveningClass: nestedClassCount > 1 && !scopeUsesTypeParamSyntax, + }; + } + } + + curNode = curNode.parent; + } + + // See if this is part of an assignment statement that is defining a type alias. + curNode = node; + while (curNode) { + let leftType: Type | undefined; + let typeAliasNode: TypeAliasNode | undefined; + let scopeNode: TypeAliasNode | AssignmentNode | undefined; + + if (curNode.nodeType === ParseNodeType.TypeAlias) { + leftType = readTypeCache(curNode.d.name, EvalFlags.None); + typeAliasNode = curNode; + scopeNode = curNode; + } else if (curNode.nodeType === ParseNodeType.Assignment) { + leftType = readTypeCache(curNode.d.leftExpr, EvalFlags.None); + scopeNode = curNode; + } + + if (leftType && scopeNode) { + // Is this a placeholder that was temporarily written to the cache for + // purposes of resolving type aliases? + if (leftType && isTypeVar(leftType) && leftType.shared.recursiveAlias) { + // Type alias statements cannot be used with old-style type variables. + if (typeAliasNode && !type.shared.isTypeParamSyntax && !type.props?.typeAliasInfo) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeParameterNotDeclared().format({ + name: type.shared.name, + container: typeAliasNode.d.name.d.value, + }), + node + ); + } else { + // If this is a TypeAliasType call, the recursive type parameters will already + // be populated, and we need to verify that the type parameter is in the + // list of allowed type parameters. + const allowedTypeParams = leftType.shared.recursiveAlias?.typeParams; + if (allowedTypeParams) { + if (!allowedTypeParams.some((param) => param.shared.name === type.shared.name)) { + // Return the original type. + return { type, scopeNode, foundInterveningClass: false }; + } + } + } + + return { + type: TypeVarType.cloneForScopeId( + type, + leftType.shared.recursiveAlias.typeVarScopeId, + leftType.shared.recursiveAlias.name, + TypeVarScopeType.TypeAlias + ), + scopeNode, + foundInterveningClass: false, + }; + } + } + + curNode = curNode.parent; + } + + // Return the original type. + return { type, scopeNode: undefined, foundInterveningClass: false }; + } + + function getTypeOfMemberAccess(node: MemberAccessNode, flags: EvalFlags): TypeResult { + // Compute flags specifically for evaluating the left expression. + let leftExprFlags = EvalFlags.MemberAccessBaseDefaults; + leftExprFlags |= + flags & + (EvalFlags.TypeExpression | + EvalFlags.VarTypeAnnotation | + EvalFlags.ForwardRefs | + EvalFlags.NotParsed | + EvalFlags.NoTypeVarWithScopeId | + EvalFlags.TypeVarGetsCurScope); + + // Handle special casing for ParamSpec "args" and "kwargs" accesses. + if ((flags & EvalFlags.InstantiableType) !== 0) { + const memberName = node.d.member.d.value; + if (memberName === 'args' || memberName === 'kwargs') { + leftExprFlags |= EvalFlags.NoConvertSpecialForm; + } + } + const baseTypeResult = getTypeOfExpression(node.d.leftExpr, leftExprFlags); + + if (isTypeAliasPlaceholder(baseTypeResult.type)) { + return { + type: UnknownType.create(/* isIncomplete */ true), + isIncomplete: true, + }; + } + + const typeResult = getTypeOfMemberAccessWithBaseType( + node, + baseTypeResult, + { method: 'get' }, + flags | EvalFlags.NoSpecialize + ); + + if (isCodeFlowSupportedForReference(node)) { + // Before performing code flow analysis, update the cache to prevent recursion. + writeTypeCache(node, { ...typeResult, isIncomplete: true }, flags); + writeTypeCache(node.d.member, { ...typeResult, isIncomplete: true }, flags); + + // If the type is initially unbound, see if there's a parent class that + // potentially initialized the value. + let typeAtStart = typeResult.type; + let isTypeAtStartIncomplete = !!typeResult.isIncomplete; + if (isUnbound(typeAtStart)) { + const baseType = makeTopLevelTypeVarsConcrete(baseTypeResult.type); + + let classMemberInfo: ClassMember | undefined; + if (isInstantiableClass(baseType)) { + classMemberInfo = lookUpClassMember( + baseType, + node.d.member.d.value, + MemberAccessFlags.SkipOriginalClass + ); + } else if (isClassInstance(baseType)) { + classMemberInfo = lookUpObjectMember( + baseType, + node.d.member.d.value, + MemberAccessFlags.SkipOriginalClass + ); + } + + if (classMemberInfo) { + typeAtStart = getTypeOfMember(classMemberInfo); + isTypeAtStartIncomplete = false; + } + } + + // See if we can refine the type based on code flow analysis. + const codeFlowTypeResult = getFlowTypeOfReference(node, /* startNode */ undefined, { + targetSymbolId: indeterminateSymbolId, + typeAtStart: { type: typeAtStart, isIncomplete: isTypeAtStartIncomplete }, + skipConditionalNarrowing: (flags & EvalFlags.TypeExpression) !== 0, + }); + + if (codeFlowTypeResult.type) { + typeResult.type = codeFlowTypeResult.type; + } + + if (codeFlowTypeResult.isIncomplete) { + typeResult.isIncomplete = true; + } + + // Detect, report, and fill in missing type arguments if appropriate. + typeResult.type = reportMissingTypeArgs(node, typeResult.type, flags); + + // Add TypeForm details if appropriate. + typeResult.type = addTypeFormForSymbol(node, typeResult.type, flags, /* includesVarDecl */ false); + } + + if (baseTypeResult.isIncomplete) { + typeResult.isIncomplete = true; + } + + // See if we need to log an "unknown member access" diagnostic. + let skipPartialUnknownCheck = typeResult.isIncomplete; + + // Don't report an error if the type is a partially-specialized + // class being passed as an argument. This comes up frequently in + // cases where a type is passed as an argument (e.g. "defaultdict(list)"). + // It can also come up in cases like "isinstance(x, (list, dict))". + // We need to check for functions as well to handle Callable. + if ( + (isInstantiableClass(typeResult.type) && !typeResult.type.priv.includeSubclasses) || + typeResult.type.props?.specialForm + ) { + const argNode = ParseTreeUtils.getParentNodeOfType(node, ParseNodeType.Argument); + if (argNode && argNode?.parent?.nodeType === ParseNodeType.Call) { + skipPartialUnknownCheck = true; + } + } + + if (!skipPartialUnknownCheck) { + reportPossibleUnknownAssignment( + AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.reportUnknownMemberType, + DiagnosticRule.reportUnknownMemberType, + node.d.member, + typeResult.type, + node, + /* ignoreEmptyContainers */ false + ); + } + + // Cache the type information in the member name node. + writeTypeCache(node.d.member, typeResult, flags); + + return typeResult; + } + + function getTypeOfMemberAccessWithBaseType( + node: MemberAccessNode, + baseTypeResult: TypeResult, + usage: EvaluatorUsage, + flags: EvalFlags + ): TypeResult { + let baseType = transformPossibleRecursiveTypeAlias(baseTypeResult.type); + const memberName = node.d.member.d.value; + let diag = new DiagnosticAddendum(); + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + let type: Type | undefined; + let narrowedTypeForSet: Type | undefined; + let typeErrors = false; + let isIncomplete = !!baseTypeResult.isIncomplete; + let isAsymmetricAccessor: boolean | undefined; + const isRequired = false; + const isNotRequired = false; + let memberAccessDeprecationInfo: MemberAccessDeprecationInfo | undefined; + + if (usage?.setType?.isIncomplete) { + isIncomplete = true; + } + + // If the base type was incomplete and unbound, don't proceed + // because false positive errors will be generated. + if (baseTypeResult.isIncomplete && isUnbound(baseType)) { + return { type: UnknownType.create(/* isIncomplete */ true), isIncomplete: true }; + } + + if (baseType.props?.specialForm && (flags & EvalFlags.TypeExpression) === 0) { + baseType = baseType.props.specialForm; + } + + if (isParamSpec(baseType) && baseType.priv.paramSpecAccess) { + baseType = makeTopLevelTypeVarsConcrete(baseType); + } + + switch (baseType.category) { + case TypeCategory.Any: + case TypeCategory.Unknown: + case TypeCategory.Never: { + type = baseType; + break; + } + + case TypeCategory.Unbound: { + break; + } + + case TypeCategory.TypeVar: { + if (isParamSpec(baseType)) { + // Handle special cases for "P.args" and "P.kwargs". + if (memberName === 'args' || memberName === 'kwargs') { + const isArgs = memberName === 'args'; + const paramNode = ParseTreeUtils.getEnclosingParam(node); + const expectedCategory = isArgs ? ParamCategory.ArgsList : ParamCategory.KwargsDict; + + if (!paramNode || paramNode.d.category !== expectedCategory) { + const errorMessage = isArgs + ? LocMessage.paramSpecArgsUsage() + : LocMessage.paramSpecKwargsUsage(); + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, errorMessage, node); + type = UnknownType.create(isIncomplete); + break; + } + + type = TypeVarType.cloneForParamSpecAccess(baseType, memberName); + break; + } + + if (!isIncomplete) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.paramSpecUnknownMember().format({ name: memberName }), + node + ); + } + + type = UnknownType.create(isIncomplete); + break; + } + + // It's illegal to reference a member from a type variable. + if ((flags & EvalFlags.TypeExpression) !== 0) { + if (!isIncomplete) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarNoMember().format({ + type: printType(baseType), + name: memberName, + }), + node.d.leftExpr + ); + } + + type = UnknownType.create(isIncomplete); + break; + } + + if (baseType.shared.recursiveAlias) { + type = UnknownType.create(/* isIncomplete */ true); + isIncomplete = true; + break; + } + + if (isTypeVarTuple(baseType)) { + break; + } + + return getTypeOfMemberAccessWithBaseType( + node, + { + type: makeTopLevelTypeVarsConcrete(baseType), + bindToSelfType: TypeBase.isInstantiable(baseType) ? convertToInstance(baseType) : baseType, + isIncomplete, + }, + usage, + EvalFlags.None + ); + } + + case TypeCategory.Class: { + let typeResult: TypeResult | undefined; + + // If this is a class-like function created via NewType, treat + // it like a function for purposes of member accesses. + if ( + ClassType.isNewTypeClass(baseType) && + !baseType.priv.includeSubclasses && + prefetched?.functionClass && + isClass(prefetched.functionClass) + ) { + baseType = ClassType.cloneAsInstance(prefetched.functionClass); + } + + const enumMemberResult = getTypeOfEnumMember( + evaluatorInterface, + node, + baseType, + memberName, + isIncomplete + ); + + if (enumMemberResult) { + if (usage.method === 'get') { + typeResult = enumMemberResult; + } else { + // Is this an attempt to delete or overwrite an enum member? + if ( + isClassInstance(enumMemberResult.type) && + ClassType.isSameGenericClass(enumMemberResult.type, ClassType.cloneAsInstance(baseType)) && + enumMemberResult.type.priv.literalValue !== undefined + ) { + const diagMessage = + usage.method === 'set' ? LocMessage.enumMemberSet() : LocMessage.enumMemberDelete(); + addDiagnostic( + DiagnosticRule.reportAttributeAccessIssue, + diagMessage.format({ name: memberName }) + diag.getString(), + node.d.member, + diag.getEffectiveTextRange() ?? node.d.member + ); + } + } + } + + if (!typeResult) { + typeResult = getTypeOfBoundMember( + node.d.member, + baseType, + memberName, + usage, + diag, + (flags & EvalFlags.TypeExpression) === 0 ? undefined : MemberAccessFlags.TypeExpression, + baseTypeResult.bindToSelfType + ); + } + + if (typeResult) { + if (!typeResult.typeErrors) { + type = addConditionToType(typeResult.type, getTypeCondition(baseType), { + skipSelfCondition: true, + skipBoundTypeVars: true, + }); + } else { + typeErrors = true; + } + + if (typeResult.isAsymmetricAccessor) { + isAsymmetricAccessor = true; + } + + if (typeResult.isIncomplete) { + isIncomplete = true; + } + + if (typeResult.narrowedTypeForSet) { + narrowedTypeForSet = addConditionToType( + typeResult.narrowedTypeForSet, + getTypeCondition(baseType), + { skipSelfCondition: true, skipBoundTypeVars: true } + ); + } + + if (typeResult.memberAccessDeprecationInfo) { + memberAccessDeprecationInfo = typeResult.memberAccessDeprecationInfo; + } + } + break; + } + + case TypeCategory.Module: { + let symbol = ModuleType.getField(baseType, memberName); + + // If the symbol isn't found in the module's symbol table, + // see if it's defined in the `ModuleType` class. This is + // needed for modules that are synthesized for namespace + // packages. + if (!symbol && prefetched?.moduleTypeClass && isInstantiableClass(prefetched.moduleTypeClass)) { + symbol = ClassType.getSymbolTable(prefetched.moduleTypeClass).get(memberName); + } + + if (symbol && !symbol.isExternallyHidden()) { + if (usage.method === 'get') { + setSymbolAccessed(fileInfo, symbol, node.d.member); + } + + const typeResult = getEffectiveTypeOfSymbolForUsage( + symbol, + /* usageNode */ undefined, + /* useLastDecl */ true + ); + type = typeResult.type; + + if ((flags & EvalFlags.TypeExpression) !== 0) { + type = validateSymbolIsTypeExpression(node, type, !!typeResult.includesVariableDecl); + } + + // Add TypeForm details if appropriate. + type = addTypeFormForSymbol(node, type, flags, !!typeResult.includesVariableDecl); + + if (isTypeVar(type)) { + type = validateTypeVarUsage(node, type, flags); + } + + // If the type resolved to "unbound", treat it as "unknown" in + // the case of a module reference because if it's truly unbound, + // that error will be reported within the module and should not + // leak into other modules that import it. + if (isUnbound(type)) { + type = UnknownType.create(/* isIncomplete */ true); + } + + if (symbol.isPrivateMember()) { + addDiagnostic( + DiagnosticRule.reportPrivateUsage, + LocMessage.privateUsedOutsideOfModule().format({ + name: memberName, + }), + node.d.member + ); + } + + if (symbol.isPrivatePyTypedImport()) { + addDiagnostic( + DiagnosticRule.reportPrivateImportUsage, + LocMessage.privateImportFromPyTypedModule().format({ + name: memberName, + module: baseType.priv.moduleName, + }), + node.d.member + ); + } + } else { + // Does the module export a top-level __getattr__ function? + if (usage.method === 'get') { + const getAttrSymbol = ModuleType.getField(baseType, '__getattr__'); + if (getAttrSymbol) { + const isModuleGetAttrSupported = + PythonVersion.isGreaterOrEqualTo( + fileInfo.executionEnvironment.pythonVersion, + pythonVersion3_7 + ) || getAttrSymbol.getDeclarations().some((decl) => decl.uri.hasExtension('.pyi')); + + if (isModuleGetAttrSupported) { + const getAttrTypeResult = getEffectiveTypeOfSymbolForUsage(getAttrSymbol); + if (isFunction(getAttrTypeResult.type)) { + const returnTypeResult = getEffectiveReturnTypeResult(getAttrTypeResult.type); + type = returnTypeResult.type; + if (getAttrTypeResult.isIncomplete || returnTypeResult.isIncomplete) { + isIncomplete = true; + } + } + } + } + } + + // If the field was not found and the module type is marked + // such that all fields should be Any/Unknown, return that type. + if (!type && baseType.priv.notPresentFieldType) { + type = baseType.priv.notPresentFieldType; + } + + if (!type) { + if (!isIncomplete) { + addDiagnostic( + DiagnosticRule.reportAttributeAccessIssue, + LocMessage.moduleUnknownMember().format({ + memberName, + moduleName: baseType.priv.moduleName, + }), + node.d.member + ); + } + type = evaluatorOptions.evaluateUnknownImportsAsAny ? AnyType.create() : UnknownType.create(); + } + } + break; + } + + case TypeCategory.Union: { + type = mapSubtypes(baseType, (subtype) => { + if (isUnbound(subtype)) { + // Don't do anything if it's unbound. The error will already + // be reported elsewhere. + return undefined; + } + + if (isNoneInstance(subtype)) { + assert(isClassInstance(subtype)); + const typeResult = getTypeOfBoundMember(node.d.member, subtype, memberName, usage, diag); + + if (typeResult && !typeResult.typeErrors) { + type = addConditionToType(typeResult.type, getTypeCondition(baseType), { + skipBoundTypeVars: true, + }); + if (typeResult.isIncomplete) { + isIncomplete = true; + } + + return type; + } + + if (!isIncomplete) { + addDiagnostic( + DiagnosticRule.reportOptionalMemberAccess, + LocMessage.noneUnknownMember().format({ name: memberName }), + node.d.member + ); + } + + return undefined; + } + + const typeResult = getTypeOfMemberAccessWithBaseType( + node, + { + type: subtype, + isIncomplete: baseTypeResult.isIncomplete, + }, + usage, + EvalFlags.None + ); + + if (typeResult.isIncomplete) { + isIncomplete = true; + } + + if (typeResult.memberAccessDeprecationInfo) { + memberAccessDeprecationInfo = typeResult.memberAccessDeprecationInfo; + } + + if (typeResult.typeErrors) { + typeErrors = true; + } + + return typeResult.type; + }); + break; + } + + case TypeCategory.Function: + case TypeCategory.Overloaded: { + const hasSelf = isMethodType(baseType); + + if (memberName === '__self__' && hasSelf) { + // Handle "__self__" specially because MethodType defines + // it simply as "object". We can do better here. + let functionType: FunctionType | undefined; + + if (isFunction(baseType)) { + functionType = baseType; + } else { + const overloads = OverloadedType.getOverloads(baseType); + if (overloads.length > 0) { + functionType = overloads[0]; + } + } + + type = functionType?.priv.boundToType; + } else { + const altType = hasSelf ? prefetched?.methodClass : prefetched?.functionClass; + type = getTypeOfMemberAccessWithBaseType( + node, + { type: altType ? convertToInstance(altType) : UnknownType.create() }, + usage, + flags + ).type; + } + break; + } + + default: + assertNever(baseType); + } + + // If type is undefined, emit a general error message indicating that the + // member could not be accessed. + if (!type) { + const isFunctionRule = + isFunctionOrOverloaded(baseType) || + (isClassInstance(baseType) && ClassType.isBuiltIn(baseType, ['function', 'FunctionType'])); + + if (!baseTypeResult.isIncomplete) { + let diagMessage = LocMessage.memberAccess(); + if (usage.method === 'set') { + diagMessage = LocMessage.memberSet(); + } else if (usage.method === 'del') { + diagMessage = LocMessage.memberDelete(); + } + + // If there is an expected type diagnostic addendum (used for assignments), + // use that rather than the local diagnostic addendum because it will be + // more informative. + if (usage.setExpectedTypeDiag && !usage.setExpectedTypeDiag.isEmpty()) { + diag = usage.setExpectedTypeDiag; + } + + // If the class is a TypedDict, and there's a key with the same name, + // suggest that they user want to use ["key"] name instead. + if (isClass(baseType) && baseType.shared.typedDictEntries) { + const tdKey = baseType.shared.typedDictEntries.knownItems.get(memberName); + if (tdKey) { + const subDiag = new DiagnosticAddendum(); + subDiag.addMessage(LocAddendum.typedDictKeyAccess().format({ name: memberName })); + diag.addAddendum(subDiag); + } + } + + const rule = isFunctionRule + ? DiagnosticRule.reportFunctionMemberAccess + : DiagnosticRule.reportAttributeAccessIssue; + + addDiagnostic( + rule, + diagMessage.format({ name: memberName, type: printType(baseType) }) + diag.getString(), + node.d.member, + diag.getEffectiveTextRange() ?? node.d.member + ); + } + + // If this is member access on a function, use "Any" so if the + // reportFunctionMemberAccess rule is disabled, we don't trigger + // additional reportUnknownMemberType diagnostics. + type = isFunctionRule ? AnyType.create() : UnknownType.create(); + } + + if ((flags & EvalFlags.TypeExpression) === 0) { + reportUseOfTypeCheckOnly(type, node.d.member); + } + + type = convertSpecialFormToRuntimeValue(type, flags); + + return { + type, + isIncomplete, + isAsymmetricAccessor, + narrowedTypeForSet, + isRequired, + isNotRequired, + memberAccessDeprecationInfo, + typeErrors, + }; + } + + function getTypeOfClassMemberName( + errorNode: ExpressionNode | undefined, + classType: ClassType, + memberName: string, + usage: EvaluatorUsage, + diag: DiagnosticAddendum | undefined, + flags: MemberAccessFlags, + selfType?: ClassType | TypeVarType, + recursionCount = 0 + ): ClassMemberLookup | undefined { + const isAccessedThroughObject = TypeBase.isInstance(classType); + + // Always look for a member with a declared type first. + let memberInfo = lookUpClassMember(classType, memberName, flags | MemberAccessFlags.DeclaredTypesOnly); + + // If we couldn't find a symbol with a declared type, use + // a symbol with an inferred type. + if (!memberInfo) { + memberInfo = lookUpClassMember(classType, memberName, flags); + } + + if (!memberInfo) { + // No attribute of that name was found. If this is a member access + // through an object, see if there's an attribute access override + // method ("__getattr__", etc.). + if ((flags & MemberAccessFlags.SkipAttributeAccessOverride) === 0 && errorNode) { + const generalAttrType = applyAttributeAccessOverride(errorNode, classType, usage, memberName, selfType); + if (generalAttrType) { + return { + symbol: undefined, + type: generalAttrType.type, + isTypeIncomplete: false, + isDescriptorError: false, + isClassMember: false, + isClassVar: false, + isAsymmetricAccessor: !!generalAttrType.isAsymmetricAccessor, + }; + } + } + + // Report that the member could not be accessed. + diag?.addMessage(LocAddendum.memberUnknown().format({ name: memberName })); + return undefined; + } + + let type: Type | undefined; + let isTypeIncomplete = false; + let narrowedTypeForSet: Type | undefined; + + if (memberInfo.symbol.isInitVar()) { + diag?.addMessage(LocAddendum.memberIsInitVar().format({ name: memberName })); + return undefined; + } + + if (usage.method !== 'get' && errorNode) { + // If the usage indicates a 'set' or 'delete' and the access is within the + // class definition itself, use only the declared type to avoid circular + // type evaluation. + const containingClass = ParseTreeUtils.getEnclosingClass(errorNode); + if (containingClass) { + const containingClassType = getTypeOfClass(containingClass)?.classType; + if ( + containingClassType && + isInstantiableClass(containingClassType) && + ClassType.isSameGenericClass( + isAccessedThroughObject ? ClassType.cloneAsInstance(containingClassType) : containingClassType, + classType + ) + ) { + type = getDeclaredTypeOfSymbol(memberInfo.symbol)?.type; + if (type && isInstantiableClass(memberInfo.classType)) { + type = partiallySpecializeType( + type, + memberInfo.classType, + /* typeClassType */ undefined, + selfType + ); + } + + // If we're setting a class variable via a write through an object, + // this is normally considered a type violation. But it is allowed + // if the class variable is a descriptor object. In this case, we will + // clear the flag that causes an error to be generated. + if ( + usage.method === 'set' && + isEffectivelyClassVar(memberInfo.symbol, ClassType.isDataClass(containingClassType)) && + isAccessedThroughObject + ) { + const selfClass = selfType ?? memberName === '__new__' ? undefined : classType; + const typeResult = getTypeOfMemberInternal(errorNode, memberInfo, selfClass, flags); + + if (typeResult) { + if (isDescriptorInstance(typeResult.type, /* requireSetter */ true)) { + type = typeResult.type; + flags &= MemberAccessFlags.DisallowClassVarWrites; + } + } + } + + if (!type) { + type = UnknownType.create(); + } + } + } + } + + if (!type) { + let selfClass: ClassType | TypeVarType | undefined; + + if (selfType) { + selfClass = convertToInstantiable(selfType) as TypeVarType | ClassType; + } else { + // Skip this for __new__ methods because they are not bound + // to the class but rather assume the type of the cls argument. + if (memberName !== '__new__') { + selfClass = classType; + } + } + + const typeResult = getTypeOfMemberInternal(errorNode, memberInfo, selfClass, flags); + + type = typeResult?.type ?? UnknownType.create(); + if (typeResult?.isIncomplete) { + isTypeIncomplete = true; + } + } + + // Don't include variables within typed dict classes. + if (isClass(memberInfo.classType) && ClassType.isTypedDictClass(memberInfo.classType)) { + const typedDecls = memberInfo.symbol.getTypedDeclarations(); + if (typedDecls.length > 0 && typedDecls[0].type === DeclarationType.Variable) { + diag?.addMessage(LocAddendum.memberUnknown().format({ name: memberName })); + return undefined; + } + } + + if (usage.method === 'get') { + // Mark the member accessed if it's not coming from a parent class. + if ( + errorNode && + isInstantiableClass(memberInfo.classType) && + ClassType.isSameGenericClass( + memberInfo.classType, + isAccessedThroughObject ? ClassType.cloneAsInstantiable(classType) : classType + ) + ) { + setSymbolAccessed(AnalyzerNodeInfo.getFileInfo(errorNode), memberInfo.symbol, errorNode); + } + + // Special-case `__init_subclass` and `__class_getitem__` because + // these are always treated as class methods even if they're not + // decorated as such. + if (memberName === '__init_subclass__' || memberName === '__class_getitem__') { + if (isFunction(type) && !FunctionType.isClassMethod(type)) { + type = FunctionType.cloneWithNewFlags(type, type.shared.flags | FunctionTypeFlags.ClassMethod); + } + } + } + + // If the member is a descriptor object, apply the descriptor protocol + // now. If the member is an instance or class method, bind the method. + let isDescriptorError = false; + let isAsymmetricAccessor = false; + let isDescriptorApplied = false; + let memberAccessDeprecationInfo: MemberAccessDeprecationInfo | undefined; + + type = mapSubtypes( + type, + (subtype) => { + const concreteSubtype = makeTopLevelTypeVarsConcrete(subtype); + const isClassMember = !memberInfo || (memberInfo.isClassMember && !memberInfo.isSlotsMember); + let resultType: Type; + + if (isClass(concreteSubtype) && isClassMember && errorNode) { + const descResult = applyDescriptorAccessMethod( + subtype, + concreteSubtype, + memberInfo, + classType, + selfType, + flags, + errorNode, + memberName, + usage, + diag + ); + + if (descResult.isAsymmetricAccessor) { + isAsymmetricAccessor = true; + } + + if (descResult.memberAccessDeprecationInfo) { + memberAccessDeprecationInfo = descResult.memberAccessDeprecationInfo; + } + + if (descResult.typeErrors) { + isDescriptorError = true; + } + + if (descResult.isDescriptorApplied) { + isDescriptorApplied = true; + } + + resultType = descResult.type; + } else if (isFunctionOrOverloaded(concreteSubtype) && TypeBase.isInstance(concreteSubtype)) { + const typeResult = bindMethodForMemberAccess( + subtype, + concreteSubtype, + memberInfo, + classType, + selfType, + flags, + memberName, + usage, + diag, + recursionCount + ); + + resultType = typeResult.type; + if (typeResult.typeErrors) { + isDescriptorError = true; + } + } else { + resultType = subtype; + } + + // If this is a "set" or "delete" operation, we have a bit more work to do. + if (usage.method === 'get') { + return resultType; + } + + // Check for an attempt to overwrite or delete a ClassVar member from an instance. + if ( + !isDescriptorApplied && + memberInfo && + isEffectivelyClassVar(memberInfo.symbol, ClassType.isDataClass(classType)) && + (flags & MemberAccessFlags.DisallowClassVarWrites) !== 0 + ) { + diag?.addMessage(LocAddendum.memberSetClassVar().format({ name: memberName })); + isDescriptorError = true; + } + + // Check for an attempt to overwrite or delete a final member variable. + const finalVarTypeDecl = memberInfo?.symbol + .getDeclarations() + .find((decl) => isFinalVariableDeclaration(decl)); + + if ( + finalVarTypeDecl && + errorNode && + !ParseTreeUtils.isNodeContainedWithin(errorNode, finalVarTypeDecl.node) + ) { + // If a Final instance variable is declared in the class body but is + // being assigned within an __init__ method, it's allowed. + const enclosingFunctionNode = ParseTreeUtils.getEnclosingFunction(errorNode); + if ( + !enclosingFunctionNode || + enclosingFunctionNode.d.name.d.value !== '__init__' || + (finalVarTypeDecl as VariableDeclaration).inferredTypeSource !== undefined || + isInstantiableClass(classType) + ) { + diag?.addMessage(LocMessage.finalReassigned().format({ name: memberName })); + isDescriptorError = true; + } + } + + // Check for an attempt to overwrite or delete an instance variable that is + // read-only (e.g. in a named tuple). + if (memberInfo?.isInstanceMember && isClass(memberInfo.classType) && memberInfo.isReadOnly) { + diag?.addMessage(LocAddendum.readOnlyAttribute().format({ name: memberName })); + isDescriptorError = true; + } + + return resultType; + }, + { retainTypeAlias: true } + ); + + if (!isDescriptorError && usage.method === 'set' && usage.setType) { + if (errorNode && memberInfo.symbol.hasTypedDeclarations()) { + // This is an assignment to a member with a declared type. Apply + // narrowing logic based on the assigned type. Skip this for + // descriptor-based accesses. + narrowedTypeForSet = isDescriptorApplied + ? usage.setType.type + : narrowTypeBasedOnAssignment(type, usage.setType).type; + } + + // Verify that the assigned type is compatible. + if (!assignType(type, usage.setType.type, diag?.createAddendum())) { + if (!usage.setType.isIncomplete) { + diag?.addMessage( + LocAddendum.memberAssignment().format({ + type: printType(usage.setType.type), + name: memberName, + classType: printObjectTypeForClass(classType), + }) + ); + } + + // Do not narrow the type in this case. Assume the declared type. + narrowedTypeForSet = type; + isDescriptorError = true; + } + + if ( + isInstantiableClass(memberInfo.classType) && + ClassType.isDataClassFrozen(memberInfo.classType) && + isAccessedThroughObject + ) { + diag?.addMessage( + LocAddendum.dataClassFrozen().format({ + name: printType(ClassType.cloneAsInstance(memberInfo.classType)), + }) + ); + + isDescriptorError = true; + } + } + + return { + symbol: memberInfo.symbol, + type, + isTypeIncomplete, + isDescriptorError, + isClassMember: !memberInfo.isInstanceMember, + isClassVar: memberInfo.isClassVar, + classType: memberInfo.classType, + isAsymmetricAccessor, + narrowedTypeForSet, + memberAccessDeprecationInfo, + }; + } + + // Applies descriptor access methods "__get__", "__set__", or "__delete__" + // if they apply. + function applyDescriptorAccessMethod( + memberType: Type, + concreteMemberType: ClassType, + memberInfo: ClassMember | undefined, + classType: ClassType, + selfType: ClassType | TypeVarType | undefined, + flags: MemberAccessFlags, + errorNode: ExpressionNode, + memberName: string, + usage: EvaluatorUsage, + diag: DiagnosticAddendum | undefined + ): MemberAccessTypeResult { + const isAccessedThroughObject = TypeBase.isInstance(classType); + + let accessMethodName: string; + if (usage.method === 'get') { + accessMethodName = '__get__'; + } else if (usage.method === 'set') { + accessMethodName = '__set__'; + } else { + accessMethodName = '__delete__'; + } + + const subDiag = diag ? new DiagnosticAddendum() : undefined; + + const methodTypeResult = getTypeOfBoundMember( + errorNode, + concreteMemberType, + accessMethodName, + /* usage */ undefined, + subDiag, + MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.SkipAttributeAccessOverride + ); + + if (!methodTypeResult || methodTypeResult.typeErrors) { + // Provide special error messages for properties. + if (ClassType.isPropertyClass(concreteMemberType) && usage.method !== 'get') { + const message = + usage.method === 'set' ? LocAddendum.propertyMissingSetter() : LocAddendum.propertyMissingDeleter(); + diag?.addMessage(message.format({ name: memberName })); + return { type: AnyType.create(), typeErrors: true }; + } + + if (classType.shared.typeVarScopeId) { + memberType = makeTypeVarsBound(memberType, [classType.shared.typeVarScopeId]); + } + + return { type: memberType }; + } + + const methodClassType = methodTypeResult.classType; + let methodType = methodTypeResult.type; + + if (methodTypeResult.typeErrors || !methodClassType) { + if (diag && subDiag) { + diag.addAddendum(subDiag); + } + return { type: UnknownType.create(), typeErrors: true }; + } + + if (!isFunctionOrOverloaded(methodType)) { + if (isAnyOrUnknown(methodType)) { + return { type: methodType }; + } + + // TODO - emit an error for this condition. + return { type: memberType, typeErrors: true }; + } + + // Special-case logic for properties. + if ( + ClassType.isPropertyClass(concreteMemberType) && + memberInfo && + isInstantiableClass(memberInfo.classType) && + methodType + ) { + // If the property is being accessed from a protocol class (not an instance), + // flag this as an error because a property within a protocol is meant to be + // interpreted as a read-only attribute rather than a protocol, so accessing + // it directly from the class has an ambiguous meaning. + if ((flags & MemberAccessFlags.SkipInstanceMembers) !== 0 && ClassType.isProtocolClass(classType)) { + diag?.addMessage(LocAddendum.propertyAccessFromProtocolClass()); + return { type: memberType, typeErrors: true }; + } + + // Infer return types before specializing. Otherwise a generic inferred + // return type won't be properly specialized. + inferReturnTypeIfNecessary(methodType); + + // This specialization is required specifically for properties, which should be + // generic but are not defined that way. Because of this, we use type variables + // in the synthesized methods (e.g. __get__) for the property class that are + // defined in the class that declares the fget method. + let accessMethodClass: ClassType | undefined; + if (usage.method === 'get') { + accessMethodClass = concreteMemberType.priv.fgetInfo?.classType; + } else if (usage.method === 'set') { + accessMethodClass = concreteMemberType.priv.fsetInfo?.classType; + } else { + accessMethodClass = concreteMemberType.priv.fdelInfo?.classType; + } + + if (accessMethodClass) { + const constraints = new ConstraintTracker(); + accessMethodClass = selfSpecializeClass(accessMethodClass); + assignType( + ClassType.cloneAsInstance(accessMethodClass), + ClassType.cloneAsInstance(memberInfo.classType), + /* diag */ undefined, + constraints + ); + accessMethodClass = solveAndApplyConstraints(accessMethodClass, constraints) as ClassType; + + const specializedType = partiallySpecializeType( + methodType, + accessMethodClass, + getTypeClassType(), + selfType ? (convertToInstantiable(selfType) as ClassType | TypeVarType) : classType + ); + + if (isFunctionOrOverloaded(specializedType)) { + methodType = specializedType; + } + } + } + + // Determine if we're calling __set__ on an asymmetric descriptor or property. + let isAsymmetricAccessor = false; + if (usage.method === 'set' && isClass(methodClassType)) { + if (isAsymmetricDescriptorClass(methodClassType)) { + isAsymmetricAccessor = true; + } + } + + if (!methodType) { + diag?.addMessage( + LocAddendum.descriptorAccessBindingFailed().format({ + name: accessMethodName, + className: printType(convertToInstance(methodClassType)), + }) + ); + + return { + type: UnknownType.create(), + typeErrors: true, + isDescriptorApplied: true, + isAsymmetricAccessor, + }; + } + + // Simulate a call to the access method. + const argList: Arg[] = []; + + // Provide "obj" argument. + let objArgType: Type; + if (ClassType.isClassProperty(concreteMemberType)) { + // Handle "class properties" as a special case. We need to pass + // the class rather than the object instance in this case. + objArgType = isAccessedThroughObject ? ClassType.cloneAsInstantiable(classType) : classType; + } else if (isAccessedThroughObject) { + objArgType = selfType ?? ClassType.cloneAsInstance(classType); + } else { + objArgType = getNoneType(); + } + + argList.push({ + argCategory: ArgCategory.Simple, + typeResult: { type: objArgType }, + }); + + if (usage.method === 'get') { + let classArgType: Type; + if (selfType) { + classArgType = convertToInstantiable(selfType); + } else { + classArgType = isAccessedThroughObject ? ClassType.cloneAsInstantiable(classType) : classType; + } + + // Provide "owner" argument. + argList.push({ + argCategory: ArgCategory.Simple, + typeResult: { type: classArgType }, + }); + } else if (usage.method === 'set') { + // Provide "value" argument. + argList.push({ + argCategory: ArgCategory.Simple, + typeResult: { + type: usage.setType?.type ?? UnknownType.create(), + isIncomplete: !!usage.setType?.isIncomplete, + }, + }); + } + + // Suppress diagnostics for these method calls because they would be redundant. + const callResult = suppressDiagnostics( + errorNode, + () => { + return validateCallArgs( + errorNode, + argList, + { type: methodType }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ true, + /* inferenceContext */ undefined + ); + }, + (suppressedDiags) => { + // If diagnostics were recorded when suppressed, add them to the + // diagnostic as messages. + if (diag) { + suppressedDiags.forEach((message) => { + diag?.addMessageMultiline(message); + }); + } + } + ); + + // Collect deprecation information associated with the member access method. + let deprecationInfo: MemberAccessDeprecationInfo | undefined; + if (callResult.overloadsUsedForCall && callResult.overloadsUsedForCall.length >= 1) { + const overloadUsed = callResult.overloadsUsedForCall[0]; + if (overloadUsed.shared.deprecatedMessage) { + deprecationInfo = { + deprecatedMessage: overloadUsed.shared.deprecatedMessage, + accessType: ClassType.isPropertyClass(concreteMemberType) ? 'property' : 'descriptor', + accessMethod: usage.method, + }; + } + } + + if (!callResult.argumentErrors) { + return { + // For set or delete, always return Any. + type: usage.method === 'get' ? callResult.returnType ?? UnknownType.create() : AnyType.create(), + isDescriptorApplied: true, + isAsymmetricAccessor, + memberAccessDeprecationInfo: deprecationInfo, + }; + } + + return { + type: UnknownType.create(), + typeErrors: true, + isDescriptorApplied: true, + isAsymmetricAccessor, + memberAccessDeprecationInfo: deprecationInfo, + }; + } + + function bindMethodForMemberAccess( + type: Type, + concreteType: FunctionType | OverloadedType, + memberInfo: ClassMember | undefined, + classType: ClassType, + selfType: ClassType | TypeVarType | undefined, + flags: MemberAccessFlags, + memberName: string, + usage: EvaluatorUsage, + diag: DiagnosticAddendum | undefined, + recursionCount = 0 + ): TypeResult { + // Check for an attempt to overwrite a final method. + if (usage.method === 'set') { + const impl = isFunction(concreteType) ? concreteType : OverloadedType.getImplementation(concreteType); + + if (impl && isFunction(impl) && FunctionType.isFinal(impl) && memberInfo && isClass(memberInfo.classType)) { + diag?.addMessage( + LocMessage.finalMethodOverride().format({ + name: memberName, + className: memberInfo.classType.shared.name, + }) + ); + + return { type: UnknownType.create(), typeErrors: true }; + } + } + + // If this function is an instance member (e.g. a lambda that was + // assigned to an instance variable), don't perform any binding. + if (TypeBase.isInstance(classType)) { + if (!memberInfo || memberInfo.isInstanceMember) { + return { type: type }; + } + } + + const boundType = bindFunctionToClassOrObject( + classType, + concreteType, + memberInfo && isInstantiableClass(memberInfo.classType) ? memberInfo.classType : undefined, + (flags & MemberAccessFlags.TreatConstructorAsClassMethod) !== 0, + selfType && isClass(selfType) ? ClassType.cloneIncludeSubclasses(selfType) : selfType, + diag, + recursionCount + ); + + return { type: boundType ?? UnknownType.create(), typeErrors: !boundType }; + } + + function isAsymmetricDescriptorClass(classType: ClassType): boolean { + // If the value has already been cached in this type, return the cached value. + if (classType.priv.isAsymmetricDescriptor !== undefined) { + return classType.priv.isAsymmetricDescriptor; + } + + let isAsymmetric = false; + + const getterSymbolResult = lookUpClassMember(classType, '__get__', MemberAccessFlags.SkipBaseClasses); + const setterSymbolResult = lookUpClassMember(classType, '__set__', MemberAccessFlags.SkipBaseClasses); + + if (!getterSymbolResult || !setterSymbolResult) { + isAsymmetric = false; + } else { + let getterType = getTypeOfMember(getterSymbolResult); + const setterType = getTypeOfMember(setterSymbolResult); + + // If this is an overload, find the appropriate overload. + if (isOverloaded(getterType)) { + const getOverloads = OverloadedType.getOverloads(getterType).filter((overload) => { + if (overload.shared.parameters.length < 2) { + return false; + } + const param1Type = FunctionType.getParamType(overload, 1); + return !isNoneInstance(param1Type); + }); + + if (getOverloads.length === 1) { + getterType = getOverloads[0]; + } else { + isAsymmetric = true; + } + } + + // If this is an overload, find the appropriate overload. + if (isOverloaded(setterType)) { + isAsymmetric = true; + } + + // If either the setter or getter is an overload (or some other non-function type), + // conservatively assume that it's not asymmetric. + if (isFunction(getterType) && isFunction(setterType)) { + // If there's no declared return type on the getter, assume it's symmetric. + if (setterType.shared.parameters.length >= 3 && getterType.shared.declaredReturnType) { + const setterValueType = FunctionType.getParamType(setterType, 2); + const getterReturnType = FunctionType.getEffectiveReturnType(getterType) ?? UnknownType.create(); + + if (!isTypeSame(setterValueType, getterReturnType)) { + isAsymmetric = true; + } + } + } + } + + // Cache the value for next time. + classType.priv.isAsymmetricDescriptor = isAsymmetric; + return isAsymmetric; + } + + function isClassWithAsymmetricAttributeAccessor(classType: ClassType): boolean { + // If the value has already been cached in this type, return the cached value. + if (classType.priv.isAsymmetricAttributeAccessor !== undefined) { + return classType.priv.isAsymmetricAttributeAccessor; + } + + let isAsymmetric = false; + + const getterSymbolResult = lookUpClassMember(classType, '__getattr__', MemberAccessFlags.SkipBaseClasses); + const setterSymbolResult = lookUpClassMember(classType, '__setattr__', MemberAccessFlags.SkipBaseClasses); + + if (!getterSymbolResult || !setterSymbolResult) { + isAsymmetric = false; + } else { + const getterType = getEffectiveTypeOfSymbol(getterSymbolResult.symbol); + const setterType = getEffectiveTypeOfSymbol(setterSymbolResult.symbol); + + // If either the setter or getter is an overload (or some other non-function type), + // conservatively assume that it's not asymmetric. + if (isFunction(getterType) && isFunction(setterType)) { + // If there's no declared return type on the getter, assume it's symmetric. + if (setterType.shared.parameters.length >= 3 && getterType.shared.declaredReturnType) { + const setterValueType = FunctionType.getParamType(setterType, 2); + const getterReturnType = FunctionType.getEffectiveReturnType(getterType) ?? UnknownType.create(); + + if (!isTypeSame(setterValueType, getterReturnType)) { + isAsymmetric = true; + } + } + } + } + + // Cache the value for next time. + classType.priv.isAsymmetricAttributeAccessor = isAsymmetric; + return isAsymmetric; + } + + // Applies the __getattr__, __setattr__ or __delattr__ method if present. + // If it's not applicable, returns undefined. + function applyAttributeAccessOverride( + errorNode: ExpressionNode, + classType: ClassType, + usage: EvaluatorUsage, + memberName: string, + selfType?: ClassType | TypeVarType + ): MemberAccessTypeResult | undefined { + const getAttributeAccessMember = (name: string) => { + return getTypeOfBoundMember( + errorNode, + classType, + name, + /* usage */ undefined, + /* diag */ undefined, + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipObjectBaseClass | + MemberAccessFlags.SkipTypeBaseClass | + MemberAccessFlags.SkipAttributeAccessOverride, + selfType + )?.type; + }; + + let accessMemberType: Type | undefined; + if (usage.method === 'get') { + accessMemberType = getAttributeAccessMember('__getattribute__') ?? getAttributeAccessMember('__getattr__'); + } else if (usage.method === 'set') { + accessMemberType = getAttributeAccessMember('__setattr__'); + } else { + assert(usage.method === 'del'); + accessMemberType = getAttributeAccessMember('__delattr__'); + } + + if (!accessMemberType) { + return undefined; + } + + const argList: Arg[] = []; + + // Provide "name" argument. + argList.push({ + argCategory: ArgCategory.Simple, + typeResult: { + type: + prefetched?.strClass && isInstantiableClass(prefetched.strClass) + ? ClassType.cloneWithLiteral(ClassType.cloneAsInstance(prefetched.strClass), memberName) + : AnyType.create(), + }, + }); + + if (usage.method === 'set') { + // Provide "value" argument. + argList.push({ + argCategory: ArgCategory.Simple, + typeResult: { + type: usage.setType?.type ?? UnknownType.create(), + isIncomplete: !!usage.setType?.isIncomplete, + }, + }); + } + + if (!isFunctionOrOverloaded(accessMemberType)) { + if (isAnyOrUnknown(accessMemberType)) { + return { type: accessMemberType }; + } + + // TODO - emit an error for this condition. + return undefined; + } + + const callResult = validateCallArgs( + errorNode, + argList, + { type: accessMemberType }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ true, + /* inferenceContext */ undefined + ); + + let isAsymmetricAccessor = false; + if (usage.method === 'set') { + isAsymmetricAccessor = isClassWithAsymmetricAttributeAccessor(classType); + } + + return { + type: callResult.returnType ?? UnknownType.create(), + typeErrors: callResult.argumentErrors, + isAsymmetricAccessor, + }; + } + + function getTypeOfIndex(node: IndexNode, flags = EvalFlags.None): TypeResult { + const baseTypeResult = getTypeOfExpression(node.d.leftExpr, flags | EvalFlags.IndexBaseDefaults); + + // If this is meant to be a type and the base expression is a string expression, + // emit an error because this is an illegal annotation form and will generate a + // runtime exception. + if (flags & EvalFlags.InstantiableType) { + if (node.d.leftExpr.nodeType === ParseNodeType.StringList) { + addDiagnostic(DiagnosticRule.reportIndexIssue, LocMessage.stringNotSubscriptable(), node.d.leftExpr); + } + } + + // Check for builtin classes that will generate runtime exceptions if subscripted. + if ((flags & EvalFlags.ForwardRefs) === 0) { + // We can skip this check if the class is used within a PEP 526 variable + // type annotation within a class or function. For some undocumented reason, + // they don't result in runtime exceptions when used in this manner. + let skipSubscriptCheck = (flags & EvalFlags.VarTypeAnnotation) !== 0; + if (skipSubscriptCheck) { + const scopeNode = ParseTreeUtils.getExecutionScopeNode(node); + if (scopeNode?.nodeType === ParseNodeType.Module) { + skipSubscriptCheck = false; + } + } + + if (!skipSubscriptCheck) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + if ( + isInstantiableClass(baseTypeResult.type) && + ClassType.isBuiltIn(baseTypeResult.type) && + !baseTypeResult.type.priv.aliasName + ) { + const minPythonVersion = nonSubscriptableBuiltinTypes.get(baseTypeResult.type.shared.fullName); + if ( + minPythonVersion !== undefined && + PythonVersion.isLessThan(fileInfo.executionEnvironment.pythonVersion, minPythonVersion) && + !fileInfo.isStubFile + ) { + addDiagnostic( + DiagnosticRule.reportIndexIssue, + LocMessage.classNotRuntimeSubscriptable().format({ + name: baseTypeResult.type.priv.aliasName || baseTypeResult.type.shared.name, + }), + node.d.leftExpr + ); + } + } + } + } + + const indexTypeResult = getTypeOfIndexWithBaseType(node, baseTypeResult, { method: 'get' }, flags); + + if (isCodeFlowSupportedForReference(node)) { + // We limit type narrowing for index expressions to built-in types that are + // known to have symmetric __getitem__ and __setitem__ methods (i.e. the value + // passed to __setitem__ is the same type as the value returned by __getitem__). + let baseTypeSupportsIndexNarrowing = !isAny(baseTypeResult.type); + mapSubtypesExpandTypeVars(baseTypeResult.type, /* options */ undefined, (subtype) => { + if ( + !isClassInstance(subtype) || + !(ClassType.isBuiltIn(subtype) || ClassType.isTypedDictClass(subtype)) + ) { + baseTypeSupportsIndexNarrowing = false; + } + + return undefined; + }); + + if (baseTypeSupportsIndexNarrowing) { + // Before performing code flow analysis, update the cache to prevent recursion. + writeTypeCache(node, { ...indexTypeResult, isIncomplete: true }, flags); + + // See if we can refine the type based on code flow analysis. + const codeFlowTypeResult = getFlowTypeOfReference(node, /* startNode */ undefined, { + targetSymbolId: indeterminateSymbolId, + typeAtStart: { + type: indexTypeResult.type, + isIncomplete: !!baseTypeResult.isIncomplete || !!indexTypeResult.isIncomplete, + }, + skipConditionalNarrowing: (flags & EvalFlags.TypeExpression) !== 0, + }); + + if (codeFlowTypeResult.type) { + indexTypeResult.type = codeFlowTypeResult.type; + } + + if (codeFlowTypeResult.isIncomplete) { + indexTypeResult.isIncomplete = true; + } + } + } + + if (baseTypeResult.isIncomplete) { + indexTypeResult.isIncomplete = true; + } + + return indexTypeResult; + } + + // If the list of type parameters includes a TypeVarTuple, we may need to adjust + // the supplied type arguments to map to the type parameter list. + function adjustTypeArgsForTypeVarTuple( + typeArgs: TypeResultWithNode[], + typeParams: TypeVarType[], + errorNode: ExpressionNode + ): TypeResultWithNode[] { + const variadicIndex = typeParams.findIndex((param) => isTypeVarTuple(param)); + + // Is there a *tuple[T, ...] somewhere in the type arguments that we can expand if needed? + let srcUnboundedTupleType: Type | undefined; + const findUnboundedTupleIndex = (startArgIndex: number) => { + return typeArgs.findIndex((arg, index) => { + if (index < startArgIndex) { + return false; + } + if ( + isUnpackedClass(arg.type) && + arg.type.priv.tupleTypeArgs && + arg.type.priv.tupleTypeArgs.length === 1 && + arg.type.priv.tupleTypeArgs[0].isUnbounded + ) { + srcUnboundedTupleType = arg.type.priv.tupleTypeArgs[0].type; + return true; + } + + return false; + }); + }; + let srcUnboundedTupleIndex = findUnboundedTupleIndex(0); + + // Allow only one unpacked tuple that maps to a TypeVarTuple. + if (srcUnboundedTupleIndex >= 0) { + const secondUnboundedTupleIndex = findUnboundedTupleIndex(srcUnboundedTupleIndex + 1); + if (secondUnboundedTupleIndex >= 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.variadicTypeArgsTooMany(), + typeArgs[secondUnboundedTupleIndex].node + ); + } + } + + if ( + srcUnboundedTupleType && + srcUnboundedTupleIndex >= 0 && + variadicIndex >= 0 && + typeArgs.length < typeParams.length + ) { + // "Smear" the tuple type across type argument slots prior to the TypeVarTuple. + while (variadicIndex > srcUnboundedTupleIndex) { + typeArgs = [ + ...typeArgs.slice(0, srcUnboundedTupleIndex), + { node: typeArgs[srcUnboundedTupleIndex].node, type: srcUnboundedTupleType }, + ...typeArgs.slice(srcUnboundedTupleIndex), + ]; + srcUnboundedTupleIndex++; + } + + // "Smear" the tuple type across type argument slots following the TypeVarTuple. + while (typeArgs.length < typeParams.length) { + typeArgs = [ + ...typeArgs.slice(0, srcUnboundedTupleIndex + 1), + { node: typeArgs[srcUnboundedTupleIndex].node, type: srcUnboundedTupleType }, + ...typeArgs.slice(srcUnboundedTupleIndex + 1), + ]; + } + } + + // Do we need to adjust the type arguments to map to a variadic type + // param somewhere in the list? + if (variadicIndex >= 0) { + const variadicTypeVar = typeParams[variadicIndex]; + + // If the type param list ends with a ParamSpec with a default value, + // we can ignore it for purposes of finding type args that map to the + // TypeVarTuple. + let typeParamCount = typeParams.length; + while (typeParamCount > 0) { + const lastTypeParam = typeParams[typeParamCount - 1]; + if (!isParamSpec(lastTypeParam) || !lastTypeParam.shared.isDefaultExplicit) { + break; + } + + typeParamCount--; + } + + if (variadicIndex < typeArgs.length) { + // If there are typeArg lists at the end, these should map to ParamSpecs rather + // than the TypeVarTuple, so exclude them. + let variadicEndIndex = variadicIndex + 1 + typeArgs.length - typeParamCount; + while (variadicEndIndex > variadicIndex) { + if (!typeArgs[variadicEndIndex - 1].typeList) { + break; + } + variadicEndIndex--; + } + const variadicTypeResults = typeArgs.slice(variadicIndex, variadicEndIndex); + + // If the type args consist of a lone TypeVarTuple, don't wrap it in a tuple. + if (variadicTypeResults.length === 1 && isTypeVarTuple(variadicTypeResults[0].type)) { + validateTypeVarTupleIsUnpacked(variadicTypeResults[0].type, variadicTypeResults[0].node); + } else { + variadicTypeResults.forEach((arg, index) => { + validateTypeArg(arg, { + allowEmptyTuple: index === 0, + allowTypeVarTuple: true, + allowUnpackedTuples: true, + }); + }); + + const variadicTypes: TupleTypeArg[] = []; + if (variadicTypeResults.length !== 1 || !variadicTypeResults[0].isEmptyTupleShorthand) { + variadicTypeResults.forEach((typeResult) => { + if (isUnpackedClass(typeResult.type) && typeResult.type.priv.tupleTypeArgs) { + appendArray(variadicTypes, typeResult.type.priv.tupleTypeArgs); + } else { + variadicTypes.push({ + type: convertToInstance(typeResult.type), + isUnbounded: false, + }); + } + }); + } + + const tupleObject = makeTupleObject(evaluatorInterface, variadicTypes, /* isUnpacked */ true); + + typeArgs = [ + ...typeArgs.slice(0, variadicIndex), + { node: typeArgs[variadicIndex].node, type: tupleObject }, + ...typeArgs.slice(variadicEndIndex, typeArgs.length), + ]; + } + } else if (!variadicTypeVar.shared.isDefaultExplicit) { + // Add an empty tuple that maps to the TypeVarTuple type parameter. + typeArgs.push({ + node: errorNode, + type: makeTupleObject(evaluatorInterface, [], /* isUnpacked */ true), + }); + } + } + + return typeArgs; + } + + // If the variadic type variable is not unpacked, report an error. + function validateTypeVarTupleIsUnpacked(type: TypeVarTupleType, node: ParseNode) { + if (!type.priv.isUnpacked) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackedTypeVarTupleExpected().format({ + name1: type.shared.name, + name2: type.shared.name, + }), + node + ); + return false; + } + + return true; + } + + // If the type is a generic type alias that is not specialized, provides + // default type arguments for the type alias. It optionally logs diagnostics + // for missing type arguments. + function specializeTypeAliasWithDefaults(type: Type, errorNode: ExpressionNode | undefined) { + // Is this a type alias? + const aliasInfo = type.props?.typeAliasInfo; + if (!aliasInfo) { + return type; + } + + // Is this a generic type alias that needs specializing? + if (!aliasInfo.shared.typeParams || aliasInfo.shared.typeParams.length === 0 || aliasInfo.typeArgs) { + return type; + } + + let reportDiag = false; + const defaultTypeArgs: Type[] = []; + const constraints = new ConstraintTracker(); + + aliasInfo.shared.typeParams.forEach((param) => { + if (!param.shared.isDefaultExplicit) { + reportDiag = true; + } + + let defaultType: Type; + if (param.shared.isDefaultExplicit || isParamSpec(param)) { + defaultType = solveAndApplyConstraints(param, constraints, { + replaceUnsolved: { + scopeIds: [aliasInfo.shared.typeVarScopeId], + tupleClassType: getTupleClassType(), + }, + }); + } else if (isTypeVarTuple(param) && prefetched?.tupleClass && isInstantiableClass(prefetched.tupleClass)) { + defaultType = makeTupleObject( + evaluatorInterface, + [{ type: UnknownType.create(), isUnbounded: true }], + /* isUnpacked */ true + ); + } else { + defaultType = UnknownType.create(); + } + + defaultTypeArgs.push(defaultType); + constraints.setBounds(param, defaultType); + }); + + if (reportDiag && errorNode) { + addDiagnostic( + DiagnosticRule.reportMissingTypeArgument, + LocMessage.typeArgsMissingForAlias().format({ + name: aliasInfo.shared.name, + }), + errorNode + ); + } + + type = TypeBase.cloneForTypeAlias( + solveAndApplyConstraints(type, constraints, { + replaceUnsolved: { + scopeIds: [aliasInfo.shared.typeVarScopeId], + tupleClassType: getTupleClassType(), + }, + }), + { ...aliasInfo, typeArgs: defaultTypeArgs } + ); + + return type; + } + + // Handles index expressions that are providing type arguments for a + // generic type alias. + function createSpecializedTypeAlias( + node: IndexNode, + baseType: Type, + flags: EvalFlags + ): TypeResultWithNode | undefined { + let aliasInfo = baseType.props?.typeAliasInfo; + let aliasBaseType = baseType; + + if (!aliasInfo && baseType.props?.typeForm) { + aliasInfo = baseType.props.typeForm?.props?.typeAliasInfo; + aliasBaseType = convertToInstantiable(baseType.props.typeForm); + } + + if (!aliasInfo?.shared.typeParams || (aliasInfo.shared.typeParams.length === 0 && aliasInfo.typeArgs)) { + return undefined; + } + + // If this is not instantiable, then the index expression isn't a specialization. + if (!TypeBase.isInstantiable(aliasBaseType)) { + return undefined; + } + + // If this is already specialized, the index expression isn't a specialization. + if (aliasInfo.typeArgs) { + return undefined; + } + + inferVarianceForTypeAlias(baseType); + + const typeParams = aliasInfo.shared.typeParams; + let typeArgs: TypeResultWithNode[] | undefined; + typeArgs = adjustTypeArgsForTypeVarTuple(getTypeArgs(node, flags), typeParams, node); + let reportedError = false; + + typeArgs = transformTypeArgsForParamSpec(typeParams, typeArgs, node); + if (!typeArgs) { + typeArgs = []; + reportedError = true; + } + + let minTypeArgCount = typeParams.length; + const firstDefaultParamIndex = typeParams.findIndex((param) => !!param.shared.isDefaultExplicit); + if (firstDefaultParamIndex >= 0) { + minTypeArgCount = firstDefaultParamIndex; + } + + if (typeArgs.length > typeParams.length) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgsTooMany().format({ + name: printType(aliasBaseType), + expected: typeParams.length, + received: typeArgs.length, + }), + typeArgs[typeParams.length].node + ); + reportedError = true; + } else if (typeArgs.length < minTypeArgCount) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgsTooFew().format({ + name: printType(aliasBaseType), + expected: typeParams.length, + received: typeArgs.length, + }), + node.d.items[node.d.items.length - 1] + ); + reportedError = true; + } + + // Handle the mypy_extensions.FlexibleAlias type specially. + if ( + isInstantiableClass(aliasBaseType) && + aliasBaseType.shared.fullName === 'mypy_extensions.FlexibleAlias' && + typeArgs.length >= 1 + ) { + return { node, type: typeArgs[0].type }; + } + + const constraints = new ConstraintTracker(); + const diag = new DiagnosticAddendum(); + + typeParams.forEach((param, index) => { + if (isParamSpec(param) && index < typeArgs.length) { + const typeArgType = typeArgs[index].type; + const typeList = typeArgs[index].typeList; + + if (typeList) { + const functionType = FunctionType.createSynthesizedInstance('', FunctionTypeFlags.ParamSpecValue); + typeList.forEach((paramTypeResult, paramIndex) => { + let paramType = paramTypeResult.type; + + if (!validateTypeArg(paramTypeResult)) { + paramType = UnknownType.create(); + } + + FunctionType.addParam( + functionType, + FunctionParam.create( + ParamCategory.Simple, + convertToInstance(paramType), + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `__p${paramIndex}` + ) + ); + }); + + if (typeList.length > 0) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + + assignTypeVar( + evaluatorInterface, + param, + functionType, + diag, + constraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ); + } else if (isParamSpec(typeArgType)) { + assignTypeVar( + evaluatorInterface, + param, + convertToInstance(typeArgType), + diag, + constraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ); + } else if (isInstantiableClass(typeArgType) && ClassType.isBuiltIn(typeArgType, 'Concatenate')) { + const concatTypeArgs = typeArgType.priv.typeArgs; + const functionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.None); + + if (concatTypeArgs && concatTypeArgs.length > 0) { + concatTypeArgs.forEach((typeArg, index) => { + if (index === concatTypeArgs.length - 1) { + FunctionType.addPositionOnlyParamSeparator(functionType); + + if (isParamSpec(typeArg)) { + FunctionType.addParamSpecVariadics(functionType, typeArg); + } else if (isEllipsisType(typeArg)) { + FunctionType.addDefaultParams(functionType); + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } + } else { + FunctionType.addParam( + functionType, + FunctionParam.create( + ParamCategory.Simple, + typeArg, + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `__p${index}` + ) + ); + } + }); + } + + assignTypeVar( + evaluatorInterface, + param, + functionType, + diag, + constraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ); + } else if (isEllipsisType(typeArgType)) { + const functionType = FunctionType.createSynthesizedInstance( + '', + FunctionTypeFlags.ParamSpecValue | FunctionTypeFlags.GradualCallableForm + ); + FunctionType.addDefaultParams(functionType); + assignTypeVar(evaluatorInterface, param, functionType, diag, constraints); + } else { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgListExpected(), + typeArgs[index].node + ); + reportedError = true; + } + } else { + if (index < typeArgs.length && typeArgs[index].typeList) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgListNotAllowed(), + typeArgs[index].node + ); + reportedError = true; + } + + let typeArgType: Type; + if (index < typeArgs.length) { + typeArgType = convertToInstance(typeArgs[index].type); + } else if (param.shared.isDefaultExplicit) { + typeArgType = solveAndApplyConstraints(param, constraints, { + replaceUnsolved: { + scopeIds: [aliasInfo.shared.typeVarScopeId], + tupleClassType: getTupleClassType(), + }, + }); + } else { + typeArgType = UnknownType.create(); + } + + if ((flags & EvalFlags.EnforceVarianceConsistency) !== 0) { + const usageVariances = inferVarianceForTypeAlias(aliasBaseType); + if (usageVariances && index < usageVariances.length) { + const usageVariance = usageVariances[index]; + + if (!isVarianceOfTypeArgCompatible(typeArgType, usageVariance)) { + const messageDiag = diag.createAddendum(); + messageDiag.addMessage( + LocAddendum.varianceMismatchForTypeAlias().format({ + typeVarName: printType(typeArgType), + typeAliasParam: printType(typeParams[index]), + }) + ); + messageDiag.addTextRange(typeArgs[index].node); + } + } + } + + if (isUnpacked(typeArgType) && !isTypeVarTuple(param)) { + const messageDiag = diag.createAddendum(); + messageDiag.addMessage(LocMessage.unpackedArgInTypeArgument()); + messageDiag.addTextRange(typeArgs[index].node); + typeArgType = UnknownType.create(); + } + + assignTypeVar( + evaluatorInterface, + param, + typeArgType, + diag, + constraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ); + } + }); + + if (!diag.isEmpty()) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeNotSpecializable().format({ type: printType(aliasBaseType) }) + diag.getString(), + node, + diag.getEffectiveTextRange() ?? node + ); + reportedError = true; + } + + const solutionSet = solveConstraints(evaluatorInterface, constraints).getMainSolutionSet(); + const aliasTypeArgs: Type[] = []; + + aliasInfo.shared.typeParams?.forEach((typeParam) => { + let typeVarType = solutionSet.getType(typeParam); + + // Fill in any unsolved type arguments with unknown. + if (!typeVarType) { + typeVarType = getUnknownForTypeVar(typeParam, getTupleClassType()); + constraints.setBounds(typeParam, typeVarType); + } + + aliasTypeArgs.push(typeVarType); + }); + + let type = TypeBase.cloneForTypeAlias(solveAndApplyConstraints(aliasBaseType, constraints), { + ...aliasInfo, + typeArgs: aliasTypeArgs, + }); + + if (isTypeFormSupported(node)) { + type = TypeBase.cloneWithTypeForm(type, reportedError ? undefined : convertToInstance(type)); + } + + if (baseType.props?.typeAliasInfo) { + return { type, node }; + } + + return { type: TypeBase.cloneWithTypeForm(baseType, convertToInstance(type)), node }; + } + + function getTypeOfIndexWithBaseType( + node: IndexNode, + baseTypeResult: TypeResult, + usage: EvaluatorUsage, + flags: EvalFlags + ): TypeResult { + // Handle the case where we're specializing a generic type alias. + const typeAliasResult = createSpecializedTypeAlias(node, baseTypeResult.type, flags); + if (typeAliasResult) { + return typeAliasResult; + } + + // Handle the case where Never or NoReturn are being specialized. + if (isNever(baseTypeResult.type) && baseTypeResult.type.props?.specialForm) { + // Swap in the special form type, which is the Never or NoReturn class. + baseTypeResult = { ...baseTypeResult, type: baseTypeResult.type.props.specialForm }; + } + + // Handle the case where a TypeAliasType symbol is being specialized + // in a value expression. + if ( + isClassInstance(baseTypeResult.type) && + ClassType.isBuiltIn(baseTypeResult.type, 'TypeAliasType') && + baseTypeResult.type.props?.typeForm + ) { + const typeAliasInfo = baseTypeResult.type.props.typeForm.props?.typeAliasInfo; + if (typeAliasInfo && typeAliasInfo.shared.typeParams) { + const origTypeAlias = TypeBase.cloneForTypeAlias( + convertToInstantiable(baseTypeResult.type.props.typeForm), + { ...typeAliasInfo, typeArgs: undefined } + ); + const typeFormType = createSpecializedTypeAlias(node, origTypeAlias, flags); + if (typeFormType) { + return { + type: TypeBase.cloneWithTypeForm(baseTypeResult.type, convertToInstance(typeFormType.type)), + }; + } + } + } + + if (isTypeVar(baseTypeResult.type) && isTypeAliasPlaceholder(baseTypeResult.type)) { + const typeArgTypes = getTypeArgs(node, flags).map((t) => convertToInstance(t.type)); + const type = TypeBase.cloneForTypeAlias(baseTypeResult.type, { + shared: baseTypeResult.type.shared.recursiveAlias!, + typeArgs: typeArgTypes, + }); + return { type }; + } + + let isIncomplete = baseTypeResult.isIncomplete; + let isRequired = false; + let isNotRequired = false; + let isReadOnly = false; + + const type = mapSubtypesExpandTypeVars( + baseTypeResult.type, + /* options */ undefined, + (concreteSubtype, unexpandedSubtype) => { + const selfType = isTypeVar(unexpandedSubtype) ? unexpandedSubtype : undefined; + + if (isAnyOrUnknown(concreteSubtype)) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + // If we are expecting a type annotation here, assume that + // the subscripts are type arguments and evaluate them + // accordingly. + getTypeArgs(node, flags); + } + + return concreteSubtype; + } + + if (flags & EvalFlags.InstantiableType) { + if (isTypeVar(unexpandedSubtype)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarNotSubscriptable().format({ + type: printType(unexpandedSubtype), + }), + node.d.leftExpr + ); + + // Evaluate the index expressions as though they are type arguments for error-reporting. + getTypeArgs(node, flags); + + return UnknownType.create(); + } + } + + if (isInstantiableClass(concreteSubtype)) { + // See if the class has a custom metaclass that supports __getitem__, etc. + if ( + concreteSubtype.shared.effectiveMetaclass && + isInstantiableClass(concreteSubtype.shared.effectiveMetaclass) && + !ClassType.isBuiltIn(concreteSubtype.shared.effectiveMetaclass, ['type', '_InitVarMeta']) && + (flags & EvalFlags.InstantiableType) === 0 + ) { + const itemMethodType = getBoundMagicMethod( + concreteSubtype, + getIndexAccessMagicMethodName(usage), + /* selfType */ undefined, + node.d.leftExpr + ); + + if ((flags & EvalFlags.TypeExpression) !== 0) { + // If the class doesn't derive from Generic, a type argument should not be allowed. + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.typeArgsExpectingNone().format({ + name: printType(ClassType.cloneAsInstance(concreteSubtype)), + }), + node + ); + } + + if (itemMethodType) { + return getTypeOfIndexedObjectOrClass(node, concreteSubtype, selfType, usage).type; + } + } + + // Setting the value of an indexed class will always result + // in an exception. + if (usage.method === 'set') { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.genericClassAssigned(), + node.d.leftExpr + ); + } else if (usage.method === 'del') { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.genericClassDeleted(), + node.d.leftExpr + ); + } + + if (ClassType.isSpecialBuiltIn(concreteSubtype, 'Literal')) { + // Special-case Literal types. + return createLiteralType(concreteSubtype, node, flags); + } + + if (ClassType.isBuiltIn(concreteSubtype, 'InitVar')) { + // Special-case InitVar, used in dataclasses. + const typeArgs = getTypeArgs(node, flags); + + if ((flags & EvalFlags.TypeExpression) !== 0) { + if ((flags & EvalFlags.VarTypeAnnotation) === 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.initVarNotAllowed(), + node.d.leftExpr + ); + } + } + + if (typeArgs.length === 1) { + return typeArgs[0].type; + } else { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgsMismatchOne().format({ received: typeArgs.length }), + node.d.leftExpr + ); + + return UnknownType.create(); + } + } + + if (ClassType.isEnumClass(concreteSubtype)) { + // Special-case Enum types. + // TODO - validate that there's only one index entry + // that is a str type. + // TODO - validate that literal strings are referencing + // a known enum member. + return ClassType.cloneAsInstance(concreteSubtype); + } + + const isAnnotatedClass = + isInstantiableClass(concreteSubtype) && ClassType.isBuiltIn(concreteSubtype, 'Annotated'); + const hasCustomClassGetItem = + isInstantiableClass(concreteSubtype) && ClassType.hasCustomClassGetItem(concreteSubtype); + const isGenericClass = + concreteSubtype.shared.typeParams?.length > 0 || + ClassType.isSpecialBuiltIn(concreteSubtype) || + ClassType.isBuiltIn(concreteSubtype, 'type') || + ClassType.isPartiallyEvaluated(concreteSubtype); + const isFinalAnnotation = + isInstantiableClass(concreteSubtype) && ClassType.isBuiltIn(concreteSubtype, 'Final'); + const isClassVarAnnotation = + isInstantiableClass(concreteSubtype) && ClassType.isBuiltIn(concreteSubtype, 'ClassVar'); + + // This feature is currently experimental. + const supportsTypedDictTypeArg = + AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.enableExperimentalFeatures && + ClassType.isBuiltIn(concreteSubtype, 'TypedDict'); + + let typeArgs = getTypeArgs(node, flags, { + isAnnotatedClass, + hasCustomClassGetItem: hasCustomClassGetItem || !isGenericClass, + isFinalAnnotation, + isClassVarAnnotation, + supportsTypedDictTypeArg, + }); + + if (!isAnnotatedClass) { + typeArgs = adjustTypeArgsForTypeVarTuple(typeArgs, concreteSubtype.shared.typeParams, node); + } + + // If this is a custom __class_getitem__, there's no need to specialize the class. + // Just return it as is. + if (hasCustomClassGetItem) { + return concreteSubtype; + } + + if (concreteSubtype.priv.typeArgs) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.classAlreadySpecialized().format({ + type: printType(convertToInstance(concreteSubtype), { expandTypeAlias: true }), + }), + node.d.leftExpr + ); + return concreteSubtype; + } + + const result = createSpecializedClassType(concreteSubtype, typeArgs, flags, node); + if (result.isRequired) { + isRequired = true; + } else if (result.isNotRequired) { + isNotRequired = true; + } + + if (result.isReadOnly) { + isReadOnly = true; + } + + return result.type; + } + + if (isNoneInstance(concreteSubtype)) { + if (!isIncomplete) { + addDiagnostic( + DiagnosticRule.reportOptionalSubscript, + LocMessage.noneNotSubscriptable(), + node.d.leftExpr + ); + } + + return UnknownType.create(); + } + + if (isClassInstance(concreteSubtype)) { + const typeResult = getTypeOfIndexedObjectOrClass(node, concreteSubtype, selfType, usage); + if (typeResult.isIncomplete) { + isIncomplete = true; + } + return typeResult.type; + } + + if (isNever(concreteSubtype)) { + return NeverType.createNever(); + } + + if (isUnbound(concreteSubtype)) { + return UnknownType.create(); + } + + if (!isIncomplete) { + addDiagnostic( + DiagnosticRule.reportIndexIssue, + LocMessage.typeNotSubscriptable().format({ type: printType(concreteSubtype) }), + node.d.leftExpr + ); + } + + return UnknownType.create(); + } + ); + + // In case we didn't walk the list items above, do so now. + // If we have, this information will be cached. + if (!baseTypeResult.isIncomplete) { + node.d.items.forEach((item) => { + if (!isTypeCached(item.d.valueExpr)) { + getTypeOfExpression(item.d.valueExpr, flags & EvalFlags.ForwardRefs); + } + }); + } + + return { type, isIncomplete, isReadOnly, isRequired, isNotRequired }; + } + + // Determines the effective variance of the type parameters for a generic + // type alias. Normally, variance is not important for type aliases, but + // it can be important in cases where the type alias is used to specify + // a base class in a class definition. + function inferVarianceForTypeAlias(type: Type): Variance[] | undefined { + const aliasInfo = type.props?.typeAliasInfo; + + // If this isn't a generic type alias, there's nothing to do. + if (!aliasInfo || !aliasInfo.shared.typeParams) { + return undefined; + } + + // Is the computed variance info already cached? + if (aliasInfo.shared.computedVariance) { + return aliasInfo.shared.computedVariance; + } + + const typeParams = aliasInfo.shared.typeParams; + + // Start with all of the usage variances unknown. + const usageVariances: Variance[] = typeParams.map(() => Variance.Unknown); + + // Prepopulate the cached value for the type alias to handle + // recursive type aliases. + aliasInfo.shared.computedVariance = usageVariances; + + // Traverse the type alias type definition and adjust the usage + // variances accordingly. + updateUsageVariancesRecursive(type, typeParams, usageVariances, Variance.Covariant); + + return usageVariances; + } + + // Looks at uses of the type parameters within the type and adjusts the + // variances accordingly. For example, if the type is `Mapping[T1, T2]`, + // then T1 will be set to invariant and T2 will be set to covariant. + function updateUsageVariancesRecursive( + type: Type, + typeAliasTypeParams: TypeVarType[], + usageVariances: Variance[], + varianceContext: Variance, + pendingTypes: Type[] = [], + recursionCount = 0 + ) { + if (recursionCount > maxTypeRecursionCount) { + return; + } + + const transformedType = transformPossibleRecursiveTypeAlias(type); + const isRecursiveTypeAlias = transformedType !== type; + + // If this is a recursive type alias, see if we've already recursed + // seen it once before in the recursion stack. If so, don't recurse + // further. + if (isRecursiveTypeAlias) { + const pendingOverlaps = pendingTypes.filter((pendingType) => isTypeSame(pendingType, type)); + if (pendingOverlaps.length > 1) { + return; + } + + pendingTypes.push(type); + } + + recursionCount++; + + // Define a helper function that performs the actual usage variant update. + function updateUsageVarianceForType(type: Type, variance: Variance) { + doForEachSubtype(type, (subtype) => { + const typeParamIndex = typeAliasTypeParams.findIndex((param) => isTypeSame(param, subtype)); + if (typeParamIndex >= 0) { + usageVariances[typeParamIndex] = combineVariances(usageVariances[typeParamIndex], variance); + } else { + updateUsageVariancesRecursive( + subtype, + typeAliasTypeParams, + usageVariances, + variance, + pendingTypes, + recursionCount + ); + } + }); + } + + doForEachSubtype(transformedType, (subtype) => { + if (subtype.category === TypeCategory.Function) { + subtype.shared.parameters.forEach((param, index) => { + const paramType = FunctionType.getParamType(subtype, index); + updateUsageVarianceForType(paramType, invertVariance(varianceContext)); + }); + + const returnType = FunctionType.getEffectiveReturnType(subtype); + if (returnType) { + updateUsageVarianceForType(returnType, varianceContext); + } + } else if (subtype.category === TypeCategory.Class) { + if (subtype.priv.typeArgs) { + // If the class includes type parameters that uses auto variance, + // compute the calculated variance. + inferVarianceForClass(subtype); + + // Is the class specialized using any type arguments that correspond to + // the type alias' type parameters? + subtype.priv.typeArgs.forEach((typeArg, classParamIndex) => { + if (isTupleClass(subtype)) { + updateUsageVarianceForType(typeArg, varianceContext); + } else if (classParamIndex < subtype.shared.typeParams.length) { + const classTypeParam = subtype.shared.typeParams[classParamIndex]; + if (isUnpackedClass(typeArg) && typeArg.priv.tupleTypeArgs) { + typeArg.priv.tupleTypeArgs.forEach((tupleTypeArg) => { + updateUsageVarianceForType(tupleTypeArg.type, Variance.Invariant); + }); + } else { + const effectiveVariance = + classTypeParam.priv.computedVariance ?? classTypeParam.shared.declaredVariance; + updateUsageVarianceForType( + typeArg, + varianceContext === Variance.Contravariant + ? invertVariance(effectiveVariance) + : effectiveVariance + ); + } + } + }); + } + } + }); + + if (isRecursiveTypeAlias) { + pendingTypes.pop(); + } + } + + function getIndexAccessMagicMethodName(usage: EvaluatorUsage): string { + if (usage.method === 'get') { + return '__getitem__'; + } else if (usage.method === 'set') { + return '__setitem__'; + } else { + assert(usage.method === 'del'); + return '__delitem__'; + } + } + + function getTypeOfIndexedObjectOrClass( + node: IndexNode, + baseType: ClassType, + selfType: ClassType | TypeVarType | undefined, + usage: EvaluatorUsage + ): TypeResult { + // Handle index operations for TypedDict classes specially. + if (isClassInstance(baseType) && ClassType.isTypedDictClass(baseType)) { + const typeFromTypedDict = getTypeOfIndexedTypedDict(evaluatorInterface, node, baseType, usage); + if (typeFromTypedDict) { + return typeFromTypedDict; + } + } + + const magicMethodName = getIndexAccessMagicMethodName(usage); + const itemMethodType = getBoundMagicMethod(baseType, magicMethodName, selfType, node.d.leftExpr); + + if (!itemMethodType) { + addDiagnostic( + DiagnosticRule.reportIndexIssue, + LocMessage.methodNotDefinedOnType().format({ + name: magicMethodName, + type: printType(baseType), + }), + node.d.leftExpr + ); + return { type: UnknownType.create() }; + } + + // Handle the special case where the object is a tuple and + // the index is a constant number (integer) or a slice with integer + // start and end values. In these cases, we can determine + // the exact type by indexing into the tuple type array. + if ( + node.d.items.length === 1 && + !node.d.trailingComma && + !node.d.items[0].d.name && + node.d.items[0].d.argCategory === ArgCategory.Simple && + isClassInstance(baseType) + ) { + const index0Expr = node.d.items[0].d.valueExpr; + const valueType = getTypeOfExpression(index0Expr).type; + + if ( + isClassInstance(valueType) && + ClassType.isBuiltIn(valueType, 'int') && + isLiteralType(valueType) && + typeof valueType.priv.literalValue === 'number' + ) { + const indexValue = valueType.priv.literalValue; + const tupleType = getSpecializedTupleType(baseType); + + if (tupleType && tupleType.priv.tupleTypeArgs) { + if (isTupleIndexUnambiguous(tupleType, indexValue)) { + if (indexValue >= 0 && indexValue < tupleType.priv.tupleTypeArgs.length) { + return { type: tupleType.priv.tupleTypeArgs[indexValue].type }; + } else if (indexValue < 0 && tupleType.priv.tupleTypeArgs.length + indexValue >= 0) { + return { + type: tupleType.priv.tupleTypeArgs[tupleType.priv.tupleTypeArgs.length + indexValue] + .type, + }; + } + } + } + } else if (isClassInstance(valueType) && ClassType.isBuiltIn(valueType, 'slice')) { + const tupleType = getSpecializedTupleType(baseType); + + if (tupleType && index0Expr.nodeType === ParseNodeType.Slice) { + const slicedTupleType = getSlicedTupleType(evaluatorInterface, tupleType, index0Expr); + if (slicedTupleType) { + return { type: slicedTupleType }; + } + } + } + } + + const positionalArgs = node.d.items.filter((item) => item.d.argCategory === ArgCategory.Simple); + const unpackedListArgs = node.d.items.filter((item) => item.d.argCategory === ArgCategory.UnpackedList); + + let positionalIndexType: Type; + let isPositionalIndexTypeIncomplete = false; + + if (positionalArgs.length === 1 && unpackedListArgs.length === 0 && !node.d.trailingComma) { + // Handle the common case where there is a single positional argument. + const typeResult = getTypeOfExpression(positionalArgs[0].d.valueExpr); + positionalIndexType = typeResult.type; + if (typeResult.isIncomplete) { + isPositionalIndexTypeIncomplete = true; + } + } else { + // Package up all of the positionals into a tuple. + const tupleTypeArgs: TupleTypeArg[] = []; + + const getDeterministicTupleEntries = (type: Type): TupleTypeArg[] | undefined => { + let aggregatedArgs: TupleTypeArg[] | undefined; + let isDeterministic = true; + + doForEachSubtype(type, (subtype) => { + if (!isDeterministic) { + return; + } + + const tupleType = getSpecializedTupleType(subtype); + const tupleTypeArgs = tupleType?.priv.tupleTypeArgs; + + if ( + !tupleTypeArgs || + tupleTypeArgs.some((entry) => entry.isUnbounded || isTypeVarTuple(entry.type)) + ) { + isDeterministic = false; + return; + } + + if (!aggregatedArgs) { + aggregatedArgs = tupleTypeArgs.map((entry) => ({ type: entry.type, isUnbounded: false })); + return; + } + + if (aggregatedArgs.length !== tupleTypeArgs.length) { + isDeterministic = false; + return; + } + + for (let i = 0; i < aggregatedArgs.length; i++) { + aggregatedArgs[i] = { + type: combineTypes([aggregatedArgs[i].type, tupleTypeArgs[i].type]), + isUnbounded: false, + }; + } + }); + + if (!isDeterministic || !aggregatedArgs) { + return undefined; + } + + return aggregatedArgs; + }; + + node.d.items.forEach((arg) => { + if (arg.d.argCategory === ArgCategory.Simple) { + const typeResult = getTypeOfExpression(arg.d.valueExpr); + tupleTypeArgs.push({ type: typeResult.type, isUnbounded: false }); + if (typeResult.isIncomplete) { + isPositionalIndexTypeIncomplete = true; + } + return; + } + + if (arg.d.argCategory === ArgCategory.UnpackedList) { + const typeResult = getTypeOfExpression(arg.d.valueExpr); + if (typeResult.isIncomplete) { + isPositionalIndexTypeIncomplete = true; + } + + const deterministicEntries = getDeterministicTupleEntries(typeResult.type); + if (deterministicEntries) { + appendArray(tupleTypeArgs, deterministicEntries); + return; + } + + const iterableType = + getTypeOfIterator(typeResult, /* isAsync */ false, arg.d.valueExpr)?.type ?? + UnknownType.create(); + tupleTypeArgs.push({ type: iterableType, isUnbounded: true }); + } + }); + + const unboundedCount = tupleTypeArgs.filter((typeArg) => typeArg.isUnbounded).length; + if (unboundedCount > 1) { + const firstUnboundedIndex = tupleTypeArgs.findIndex((typeArg) => typeArg.isUnbounded); + const removedEntries = tupleTypeArgs.splice(firstUnboundedIndex); + tupleTypeArgs.push({ + type: combineTypes(removedEntries.map((entry) => entry.type)), + isUnbounded: true, + }); + } + + positionalIndexType = makeTupleObject(evaluatorInterface, tupleTypeArgs); + } + + const argList: Arg[] = [ + { + argCategory: ArgCategory.Simple, + typeResult: { type: positionalIndexType, isIncomplete: isPositionalIndexTypeIncomplete }, + }, + ]; + + if (usage.method === 'set') { + let setType = usage.setType?.type ?? AnyType.create(); + + // Expand constrained type variables. + if (isTypeVar(setType) && TypeVarType.hasConstraints(setType)) { + const conditionFilter = isClassInstance(baseType) ? baseType.props?.condition : undefined; + setType = makeTopLevelTypeVarsConcrete( + setType, + /* makeParamSpecsConcrete */ undefined, + conditionFilter + ); + } + + argList.push({ + argCategory: ArgCategory.Simple, + typeResult: { + type: setType, + isIncomplete: !!usage.setType?.isIncomplete, + }, + }); + } + + const callResult = validateCallArgs( + node, + argList, + { type: itemMethodType }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ true, + /* inferenceContext */ undefined + ); + + return { + type: callResult.returnType ?? UnknownType.create(), + isIncomplete: !!callResult.isTypeIncomplete, + }; + } + + function getTypeArgs(node: IndexNode, flags: EvalFlags, options?: GetTypeArgsOptions): TypeResultWithNode[] { + const typeArgs: TypeResultWithNode[] = []; + let adjFlags = flags | EvalFlags.NoConvertSpecialForm; + adjFlags &= ~EvalFlags.TypeFormArg; + + const allowFinalClassVar = () => { + // If the annotation is a variable within the body of a dataclass, a + // Final is allowed with a ClassVar annotation. In all other cases, + // it's disallowed. + const enclosingClassNode = ParseTreeUtils.getEnclosingClass(node, /* stopeAtFunction */ true); + if (enclosingClassNode) { + const classTypeInfo = getTypeOfClass(enclosingClassNode); + if (classTypeInfo && ClassType.isDataClass(classTypeInfo.classType)) { + return true; + } + } + return false; + }; + + if (options?.isFinalAnnotation) { + adjFlags |= EvalFlags.NoFinal; + + if (!allowFinalClassVar()) { + adjFlags |= EvalFlags.NoClassVar; + } + } else if (options?.isClassVarAnnotation) { + adjFlags |= EvalFlags.NoClassVar; + + if (!allowFinalClassVar()) { + adjFlags |= EvalFlags.NoFinal; + } + } else { + adjFlags &= ~( + EvalFlags.NoSpecialize | + EvalFlags.NoParamSpec | + EvalFlags.NoTypeVarTuple | + EvalFlags.AllowRequired | + EvalFlags.EnforceVarianceConsistency + ); + + if (!options?.isAnnotatedClass) { + adjFlags |= EvalFlags.NoClassVar | EvalFlags.NoFinal; + } + + adjFlags |= EvalFlags.AllowUnpackedTuple | EvalFlags.AllowConcatenate; + } + + // Create a local function that validates a single type argument. + const getTypeArgTypeResult = (expr: ExpressionNode, argIndex: number) => { + let typeResult: TypeResultWithNode; + + // If it's a custom __class_getitem__, none of the arguments should be + // treated as types. + if (options?.hasCustomClassGetItem) { + adjFlags = + EvalFlags.NoParamSpec | EvalFlags.NoTypeVarTuple | EvalFlags.NoSpecialize | EvalFlags.NoClassVar; + typeResult = { + ...getTypeOfExpression(expr, adjFlags), + node: expr, + }; + } else if (options?.isAnnotatedClass && argIndex > 0) { + // If it's an Annotated[a, b, c], only the first index should be + // treated as a type. The others can be regular (non-type) objects. + adjFlags = + EvalFlags.NoParamSpec | EvalFlags.NoTypeVarTuple | EvalFlags.NoSpecialize | EvalFlags.NoClassVar; + if (isAnnotationEvaluationPostponed(AnalyzerNodeInfo.getFileInfo(node))) { + adjFlags |= EvalFlags.ForwardRefs; + } + + typeResult = { + ...getTypeOfExpression(expr, adjFlags), + node: expr, + }; + } else { + typeResult = getTypeArg(expr, adjFlags, !!options?.supportsTypedDictTypeArg && argIndex === 0); + } + + return typeResult; + }; + + // A tuple is treated the same as a list of items in the index. + if ( + node.d.items.length === 1 && + !node.d.trailingComma && + !node.d.items[0].d.name && + node.d.items[0].d.valueExpr.nodeType === ParseNodeType.Tuple + ) { + node.d.items[0].d.valueExpr.d.items.forEach((item, index) => { + typeArgs.push(getTypeArgTypeResult(item, index)); + }); + + // Set the node's type so it isn't reevaluated later. + setTypeResultForNode(node.d.items[0].d.valueExpr, { type: UnknownType.create() }); + + return typeArgs; + } + + node.d.items.forEach((arg, index) => { + const typeResult = getTypeArgTypeResult(arg.d.valueExpr, index); + + if (arg.d.argCategory !== ArgCategory.Simple) { + if (arg.d.argCategory === ArgCategory.UnpackedList) { + if (!options?.isAnnotatedClass || index === 0) { + const unpackedType = applyUnpackToTupleLike(typeResult.type); + + if (unpackedType) { + typeResult.type = unpackedType; + } else { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackNotAllowed(), + arg.d.valueExpr + ); + typeResult.typeErrors = true; + } else { + typeResult.type = UnknownType.create(); + } + } + } + } + } + + if (arg.d.name) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.keywordArgInTypeArgument(), + arg.d.valueExpr + ); + typeResult.typeErrors = true; + } else { + typeResult.type = UnknownType.create(); + } + } + + if ( + arg.d.valueExpr.nodeType !== ParseNodeType.Error || + arg.d.valueExpr.d.category !== ErrorExpressionCategory.MissingIndexOrSlice + ) { + typeArgs.push(typeResult); + } + }); + + return typeArgs; + } + + function applyUnpackToTupleLike(type: Type): Type | undefined { + if (isTypeVarTuple(type)) { + if (!type.priv.isUnpacked) { + return TypeVarType.cloneForUnpacked(type); + } + + return undefined; + } + + if (isParamSpec(type)) { + return undefined; + } + + // Is this a TypeVar that has a tuple upper bound? + if (isTypeVar(type)) { + const upperBound = type.shared.boundType; + + if (upperBound && isClassInstance(upperBound) && isTupleClass(upperBound)) { + return TypeVarType.cloneForUnpacked(type); + } + + return undefined; + } + + if (isInstantiableClass(type) && !type.priv.includeSubclasses) { + if (isTupleClass(type)) { + return ClassType.cloneForUnpacked(type); + } + } + + return undefined; + } + + function getTypeArg(node: ExpressionNode, flags: EvalFlags, supportsDictExpression: boolean): TypeResultWithNode { + let typeResult: TypeResultWithNode; + + let adjustedFlags = + flags | EvalFlags.InstantiableType | EvalFlags.ConvertEllipsisToAny | EvalFlags.StrLiteralAsType; + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + if (fileInfo.isStubFile) { + adjustedFlags |= EvalFlags.ForwardRefs; + } + + if (node.nodeType === ParseNodeType.List) { + typeResult = { + type: UnknownType.create(), + typeList: node.d.items.map((entry) => { + return { ...getTypeOfExpression(entry, adjustedFlags), node: entry }; + }), + node, + }; + + // Set the node's type so it isn't reevaluated later. + setTypeResultForNode(node, { type: UnknownType.create() }); + } else if (node.nodeType === ParseNodeType.Dictionary && supportsDictExpression) { + const inlinedTypeDict = + prefetched?.typedDictClass && isInstantiableClass(prefetched.typedDictClass) + ? createTypedDictTypeInlined(evaluatorInterface, node, prefetched.typedDictClass) + : undefined; + const keyTypeFallback = + prefetched?.strClass && isInstantiableClass(prefetched.strClass) + ? prefetched.strClass + : UnknownType.create(); + + typeResult = { + type: keyTypeFallback, + inlinedTypeDict, + node, + }; + } else { + typeResult = { ...getTypeOfExpression(node, adjustedFlags), node }; + + if (node.nodeType === ParseNodeType.Dictionary) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.dictInAnnotation(), node); + } + + if ((flags & EvalFlags.NoClassVar) !== 0) { + // "ClassVar" is not allowed as a type argument. + if (isClass(typeResult.type) && ClassType.isBuiltIn(typeResult.type, 'ClassVar')) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.classVarNotAllowed(), node); + } + } + } + + return typeResult; + } + + function buildTupleTypesList( + entryTypeResults: TypeResult[], + stripLiterals: boolean, + convertModule: boolean + ): TupleTypeArg[] { + const entryTypes: TupleTypeArg[] = []; + + for (const typeResult of entryTypeResults) { + let possibleUnpackedTuple: Type | undefined; + if (typeResult.unpackedType) { + possibleUnpackedTuple = typeResult.unpackedType; + } else if (isUnpacked(typeResult.type)) { + possibleUnpackedTuple = typeResult.type; + } + + // Is this an unpacked tuple? If so, we can append the individual + // unpacked entries onto the new tuple. If it's not an upacked tuple + // but some other iterator (e.g. a List), we won't know the number of + // items, so we'll need to leave the Tuple open-ended. + if ( + possibleUnpackedTuple && + isClassInstance(possibleUnpackedTuple) && + possibleUnpackedTuple.priv.tupleTypeArgs + ) { + const typeArgs = possibleUnpackedTuple.priv.tupleTypeArgs; + + if (!typeArgs) { + entryTypes.push({ type: UnknownType.create(), isUnbounded: true }); + } else { + appendArray(entryTypes, typeArgs); + } + } else if (isNever(typeResult.type) && typeResult.isIncomplete && !typeResult.unpackedType) { + entryTypes.push({ type: UnknownType.create(/* isIncomplete */ true), isUnbounded: false }); + } else { + let entryType = convertSpecialFormToRuntimeValue(typeResult.type, EvalFlags.None, convertModule); + entryType = stripLiterals ? stripTypeForm(stripLiteralValue(entryType)) : entryType; + entryTypes.push({ type: entryType, isUnbounded: !!typeResult.unpackedType }); + } + } + + // If there are multiple unbounded entries, combine all of them into a single + // unbounded entry to avoid violating the invariant that there can be at most + // one unbounded entry in a tuple. + if (entryTypes.filter((t) => t.isUnbounded).length > 1) { + const firstUnboundedEntryIndex = entryTypes.findIndex((t) => t.isUnbounded); + const removedEntries = entryTypes.splice(firstUnboundedEntryIndex); + entryTypes.push({ type: combineTypes(removedEntries.map((t) => t.type)), isUnbounded: true }); + } + + return entryTypes; + } + + function getTypeOfCall( + node: CallNode, + flags: EvalFlags, + inferenceContext: InferenceContext | undefined + ): TypeResult { + let baseTypeResult: TypeResult | undefined; + + // Check for the use of `type(x)` within a type annotation. This isn't + // allowed, and it's a common mistake, so we want to emit a diagnostic + // that guides the user to the right solution. + if ( + (flags & EvalFlags.TypeExpression) !== 0 && + node.d.leftExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.value === 'type' + ) { + const diag = new DiagnosticAddendum(); + diag.addMessage(LocAddendum.useTypeInstead()); + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeCallNotAllowed() + diag.getString(), + node + ); + } + + // Handle immediate calls of lambdas specially. + if (node.d.leftExpr.nodeType === ParseNodeType.Lambda) { + baseTypeResult = getTypeOfLambdaForCall(node, inferenceContext); + } else { + baseTypeResult = getTypeOfExpression( + node.d.leftExpr, + EvalFlags.CallBaseDefaults | (flags & EvalFlags.ForwardRefs) + ); + } + + const argList = ParseTreeUtils.getArgsByRuntimeOrder(node).map((arg) => { + const functionArg: Arg = { + valueExpression: arg.d.valueExpr, + argCategory: arg.d.argCategory, + node: arg, + name: arg.d.name, + }; + return functionArg; + }); + + let typeResult: TypeResult = { type: UnknownType.create() }; + + baseTypeResult.type = ensureSignatureIsUnique(baseTypeResult.type, node); + + if (!isTypeAliasPlaceholder(baseTypeResult.type)) { + if (node.d.leftExpr.nodeType === ParseNodeType.Name && node.d.leftExpr.d.value === 'super') { + // Handle the built-in "super" call specially. + typeResult = getTypeOfSuperCall(node); + } else if ( + isAnyOrUnknown(baseTypeResult.type) && + node.d.leftExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.value === 'reveal_type' + ) { + // Handle the implicit "reveal_type" call. + typeResult = getTypeOfRevealType(node, inferenceContext); + } else if (isFunction(baseTypeResult.type) && FunctionType.isBuiltIn(baseTypeResult.type, 'reveal_type')) { + // Handle the "typing.reveal_type" call. + typeResult = getTypeOfRevealType(node, inferenceContext); + } else if (isFunction(baseTypeResult.type) && FunctionType.isBuiltIn(baseTypeResult.type, 'assert_type')) { + // Handle the "typing.assert_type" call. + typeResult = getTypeOfAssertType(node, inferenceContext); + } else if (isClass(baseTypeResult.type) && ClassType.isBuiltIn(baseTypeResult.type, 'TypeForm')) { + // Handle the "typing.TypeForm" call. + typeResult = getTypeOfTypeForm(node, baseTypeResult.type); + } else if ( + isAnyOrUnknown(baseTypeResult.type) && + node.d.leftExpr.nodeType === ParseNodeType.Name && + node.d.leftExpr.d.value === 'reveal_locals' + ) { + if (node.d.args.length === 0) { + // Handle the special-case "reveal_locals" call. + typeResult.type = getTypeOfRevealLocals(node); + } else { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.revealLocalsArgs(), node); + } + } else { + const callResult = validateCallArgs( + node, + argList, + baseTypeResult, + /* constraints */ undefined, + /* skipUnknownArgCheck */ false, + inferenceContext + ); + + typeResult.type = callResult.returnType ?? UnknownType.create(); + + if (callResult.argumentErrors) { + typeResult.typeErrors = true; + } else { + typeResult.overloadsUsedForCall = callResult.overloadsUsedForCall; + } + + if (callResult.isTypeIncomplete) { + typeResult.isIncomplete = true; + } + } + + if (baseTypeResult.isIncomplete) { + typeResult.isIncomplete = true; + } + } else { + typeResult.isIncomplete = true; + } + + // Don't bother evaluating the arguments if we're speculatively evaluating the call + // or the base type is incomplete. + if (!isSpeculativeModeInUse(node) && !baseTypeResult.isIncomplete) { + // Touch all of the args so they're marked accessed even if there were errors. + // We skip this if it's a TypeVar() call in the typing.pyi module because + // this results in a cyclical type resolution problem whereby we try to + // retrieve the str class, which inherits from Sequence, which inherits from + // Iterable, which uses a TypeVar. Without this, Iterable and Sequence classes + // have invalid type parameters. + const isCyclicalTypeVarCall = + isInstantiableClass(baseTypeResult.type) && + ClassType.isBuiltIn(baseTypeResult.type, 'TypeVar') && + AnalyzerNodeInfo.getFileInfo(node).isTypingStubFile; + + if (!isCyclicalTypeVarCall) { + argList.forEach((arg) => { + if ( + arg.valueExpression && + arg.valueExpression.nodeType !== ParseNodeType.StringList && + !isTypeCached(arg.valueExpression) + ) { + getTypeOfExpression(arg.valueExpression); + } + }); + } + } + + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeAnnotationCall(), node); + + typeResult = { type: UnknownType.create() }; + } + + return typeResult; + } + + // This function is used in cases where a lambda is defined and immediately + // called. In this case, we can't use normal bidirectional type inference + // to determine the lambda's type. It needs to be inferred from the argument + // types instead. + function getTypeOfLambdaForCall(node: CallNode, inferenceContext: InferenceContext | undefined): TypeResult { + assert(node.d.leftExpr.nodeType === ParseNodeType.Lambda); + + const expectedType = FunctionType.createSynthesizedInstance(''); + expectedType.shared.declaredReturnType = inferenceContext + ? inferenceContext.expectedType + : UnknownType.create(); + + let isArgTypeIncomplete = false; + node.d.args.forEach((arg, index) => { + const argTypeResult = getTypeOfExpression(arg.d.valueExpr); + if (argTypeResult.isIncomplete) { + isArgTypeIncomplete = true; + } + + FunctionType.addParam( + expectedType, + FunctionParam.create( + ParamCategory.Simple, + argTypeResult.type, + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `p${index.toString()}` + ) + ); + }); + + // If the lambda's param list ends with a "/" positional parameter separator, + // add a corresponding separator to the expected type. + const lambdaParams = node.d.leftExpr.d.params; + if (lambdaParams.length > 0) { + const lastParam = lambdaParams[lambdaParams.length - 1]; + if (lastParam.d.category === ParamCategory.Simple && !lastParam.d.name) { + FunctionType.addPositionOnlyParamSeparator(expectedType); + } + } + + function getLambdaType() { + return getTypeOfExpression(node.d.leftExpr, EvalFlags.CallBaseDefaults, makeInferenceContext(expectedType)); + } + + // If one or more of the arguments are incomplete, use speculative mode + // for the lambda evaluation because it may need to be reevaluated once + // the arg types are complete. + let typeResult = + isArgTypeIncomplete || isSpeculativeModeInUse(node) || inferenceContext?.isTypeIncomplete + ? useSpeculativeMode(node.d.leftExpr, getLambdaType) + : getLambdaType(); + + // If bidirectional type inference failed, use normal type inference instead. + if (typeResult.typeErrors) { + typeResult = getTypeOfExpression(node.d.leftExpr, EvalFlags.CallBaseDefaults); + } + + return typeResult; + } + + function getTypeOfTypeForm(node: CallNode, typeFormClass: ClassType): TypeResult { + if ( + node.d.args.length !== 1 || + node.d.args[0].d.argCategory !== ArgCategory.Simple || + node.d.args[0].d.name !== undefined + ) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.typeFormArgs(), node); + return { type: UnknownType.create() }; + } + + const typeFormResult = getTypeOfArgExpectingType(convertNodeToArg(node.d.args[0]), { + typeFormArg: isTypeFormSupported(node), + noNonTypeSpecialForms: true, + typeExpression: true, + }); + + if (!typeFormResult.typeErrors && typeFormResult.type.props?.typeForm) { + typeFormResult.type = convertToInstance( + ClassType.specialize(typeFormClass, [convertToInstance(typeFormResult.type.props.typeForm)]) + ); + } + + return typeFormResult; + } + + function getTypeOfAssertType(node: CallNode, inferenceContext: InferenceContext | undefined): TypeResult { + if ( + node.d.args.length !== 2 || + node.d.args[0].d.argCategory !== ArgCategory.Simple || + node.d.args[0].d.name !== undefined || + node.d.args[0].d.argCategory !== ArgCategory.Simple || + node.d.args[1].d.name !== undefined + ) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.assertTypeArgs(), node); + return { type: UnknownType.create() }; + } + + const arg0TypeResult = getTypeOfExpression(node.d.args[0].d.valueExpr, /* flags */ undefined, inferenceContext); + if (arg0TypeResult.isIncomplete) { + return { type: UnknownType.create(/* isIncomplete */ true), isIncomplete: true }; + } + + const assertedType = convertToInstance( + getTypeOfArgExpectingType(convertNodeToArg(node.d.args[1]), { + typeExpression: true, + }).type + ); + + // We'll replace TypeGuard and TypeIs with bool for purposes of assert_type testing. + // The spec is unclear on whether this is the correct behavior, but it seems to be + // what mypy does -- and what various library authors expect. + const arg0Type = stripTypeGuard(arg0TypeResult.type); + + if ( + !isTypeSame(assertedType, arg0Type, { + treatAnySameAsUnknown: true, + ignorePseudoGeneric: true, + ignoreConditions: true, + }) + ) { + const srcDestTypes = printSrcDestTypes(arg0TypeResult.type, assertedType, { expandTypeAlias: true }); + + addDiagnostic( + DiagnosticRule.reportAssertTypeFailure, + LocMessage.assertTypeTypeMismatch().format({ + expected: srcDestTypes.destType, + received: srcDestTypes.sourceType, + }), + node.d.args[0].d.valueExpr + ); + } + + return { type: arg0TypeResult.type }; + } + + function convertNodeToArg(node: ArgumentNode): ArgWithExpression { + return { + argCategory: node.d.argCategory, + name: node.d.name, + valueExpression: node.d.valueExpr, + }; + } + + function getTypeOfRevealType(node: CallNode, inferenceContext: InferenceContext | undefined): TypeResult { + let arg0Value: ExpressionNode | undefined; + let expectedRevealTypeNode: ExpressionNode | undefined; + let expectedRevealType: Type | undefined; + let expectedTextNode: ExpressionNode | undefined; + let expectedText: string | undefined; + + // Make sure there is only one positional argument passed as arg 0. + node.d.args.forEach((arg, index) => { + if (index === 0) { + if (arg.d.argCategory === ArgCategory.Simple && !arg.d.name) { + arg0Value = arg.d.valueExpr; + } + } else if (arg.d.argCategory !== ArgCategory.Simple || !arg.d.name) { + arg0Value = undefined; + } else if (arg.d.name.d.value === 'expected_text') { + expectedTextNode = arg.d.valueExpr; + const expectedTextType = getTypeOfExpression(arg.d.valueExpr).type; + + if ( + !isClassInstance(expectedTextType) || + !ClassType.isBuiltIn(expectedTextType, 'str') || + typeof expectedTextType.priv.literalValue !== 'string' + ) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.revealTypeExpectedTextArg(), + arg.d.valueExpr + ); + } else { + expectedText = expectedTextType.priv.literalValue; + } + } else if (arg.d.name.d.value === 'expected_type') { + expectedRevealTypeNode = arg.d.valueExpr; + expectedRevealType = convertToInstance( + getTypeOfArgExpectingType(convertNodeToArg(arg), { + typeExpression: true, + }).type + ); + } + }); + + if (!arg0Value) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.revealTypeArgs(), node); + return { type: UnknownType.create() }; + } + + const typeResult = getTypeOfExpression(arg0Value, /* flags */ undefined, inferenceContext); + const type = typeResult.type; + + const exprString = ParseTreeUtils.printExpression(arg0Value); + const typeString = printType(type, { expandTypeAlias: true }); + + if (!typeResult.isIncomplete) { + if (expectedText !== undefined) { + if (expectedText !== typeString) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.revealTypeExpectedTextMismatch().format({ + expected: expectedText, + received: typeString, + }), + expectedTextNode ?? arg0Value + ); + } + } + + if (expectedRevealType) { + if (!isTypeSame(expectedRevealType, type, { ignorePseudoGeneric: true })) { + const expectedRevealTypeText = printType(expectedRevealType); + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.revealTypeExpectedTypeMismatch().format({ + expected: expectedRevealTypeText, + received: typeString, + }), + expectedRevealTypeNode ?? arg0Value + ); + } + } + + addInformation(LocAddendum.typeOfSymbol().format({ name: exprString, type: typeString }), node.d.args[0]); + } + + return { type, isIncomplete: typeResult.isIncomplete }; + } + + function getTypeOfRevealLocals(node: CallNode) { + let curNode: ParseNode | undefined = node; + let scope: Scope | undefined; + + while (curNode) { + scope = ScopeUtils.getScopeForNode(curNode); + + // Stop when we get a valid scope that's not a list comprehension + // scope. That includes lambdas, functions, classes, and modules. + if (scope && scope.type !== ScopeType.Comprehension) { + break; + } + + curNode = curNode.parent; + } + + const infoMessages: string[] = []; + + if (scope) { + scope.symbolTable.forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + const typeOfSymbol = getEffectiveTypeOfSymbol(symbol); + infoMessages.push( + LocAddendum.typeOfSymbol().format({ + name, + type: printType(typeOfSymbol, { expandTypeAlias: true }), + }) + ); + } + }); + } + + if (infoMessages.length > 0) { + addInformation(infoMessages.join('\n'), node); + } else { + addInformation(LocMessage.revealLocalsNone(), node); + } + + return getNoneType(); + } + + function getTypeOfSuperCall(node: CallNode): TypeResult { + if (node.d.args.length > 2) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.superCallArgCount(), node.d.args[2]); + } + + const enclosingFunction = ParseTreeUtils.getEnclosingFunctionEvaluationScope(node); + const enclosingClass = enclosingFunction ? ParseTreeUtils.getEnclosingClass(enclosingFunction) : undefined; + const enclosingClassType = enclosingClass ? getTypeOfClass(enclosingClass)?.classType : undefined; + + // Determine which class the "super" call is applied to. If + // there is no first argument, then the class is implicit. + let targetClassType: Type; + if (node.d.args.length > 0) { + targetClassType = getTypeOfExpression(node.d.args[0].d.valueExpr).type; + const concreteTargetClassType = makeTopLevelTypeVarsConcrete(targetClassType); + + if ( + !isAnyOrUnknown(concreteTargetClassType) && + !isInstantiableClass(concreteTargetClassType) && + !isMetaclassInstance(concreteTargetClassType) + ) { + addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.superCallFirstArg().format({ type: printType(targetClassType) }), + node.d.args[0].d.valueExpr + ); + } + } else { + if (enclosingClassType) { + targetClassType = enclosingClassType ?? UnknownType.create(); + + // Zero-argument forms of super are not allowed within static methods. + // This results in a runtime exception. + if (enclosingFunction) { + const functionInfo = getFunctionInfoFromDecorators( + evaluatorInterface, + enclosingFunction, + /* isInClass */ true + ); + + if ((functionInfo?.flags & FunctionTypeFlags.StaticMethod) !== 0) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.superCallZeroArgFormStaticMethod(), + node.d.leftExpr + ); + } + } + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.superCallZeroArgForm(), + node.d.leftExpr + ); + targetClassType = UnknownType.create(); + } + } + + const concreteTargetClassType = makeTopLevelTypeVarsConcrete(targetClassType); + + // Determine whether to further narrow the type. + let secondArgType: Type | undefined; + let bindToType: ClassType | undefined; + + if (node.d.args.length > 1) { + secondArgType = getTypeOfExpression(node.d.args[1].d.valueExpr).type; + const secondArgConcreteType = makeTopLevelTypeVarsConcrete(secondArgType); + + let reportError = false; + + doForEachSubtype(secondArgConcreteType, (secondArgSubtype) => { + if (isAnyOrUnknown(secondArgSubtype)) { + // Ignore unknown or any types. + } else if (isClassInstance(secondArgSubtype)) { + if (isInstantiableClass(concreteTargetClassType)) { + if ( + !derivesFromClassRecursive( + ClassType.cloneAsInstantiable(secondArgSubtype), + concreteTargetClassType, + /* ignoreUnknown */ true + ) + ) { + reportError = true; + } + } + bindToType = secondArgSubtype; + } else if (isInstantiableClass(secondArgSubtype)) { + if (isInstantiableClass(concreteTargetClassType)) { + if ( + !ClassType.isBuiltIn(concreteTargetClassType, 'type') && + !derivesFromClassRecursive( + secondArgSubtype, + concreteTargetClassType, + /* ignoreUnknown */ true + ) + ) { + reportError = true; + } + } + bindToType = secondArgSubtype; + } else { + reportError = true; + } + }); + + if (reportError) { + addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.superCallSecondArg().format({ type: printType(targetClassType) }), + node.d.args[1].d.valueExpr + ); + + return { type: UnknownType.create() }; + } + } else if (enclosingClassType) { + bindToType = ClassType.cloneAsInstance(enclosingClassType); + + // Get the type from the self or cls parameter if it is explicitly annotated. + // If it's a TypeVar, change the bindToType into a conditional type. + const enclosingMethod = ParseTreeUtils.getEnclosingFunction(node); + let implicitBindToType: Type | undefined; + + if (enclosingMethod) { + const methodTypeInfo = getTypeOfFunction(enclosingMethod); + if (methodTypeInfo) { + const methodType = methodTypeInfo.functionType; + if ( + FunctionType.isClassMethod(methodType) || + FunctionType.isConstructorMethod(methodType) || + FunctionType.isInstanceMethod(methodType) + ) { + if ( + methodType.shared.parameters.length > 0 && + FunctionParam.isTypeDeclared(methodType.shared.parameters[0]) + ) { + let paramType = FunctionType.getParamType(methodType, 0); + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(node); + paramType = makeTypeVarsBound(paramType, liveScopeIds); + implicitBindToType = makeTopLevelTypeVarsConcrete(paramType); + } + } + } + } + + if (bindToType && implicitBindToType) { + const typeCondition = getTypeCondition(implicitBindToType); + if (typeCondition) { + bindToType = addConditionToType(bindToType, typeCondition); + } else if (isClass(implicitBindToType)) { + bindToType = implicitBindToType; + } + } + } + + // Determine whether super() should return an instance of the class or + // the class itself. It depends on whether the super() call is located + // within an instance method or not. + let resultIsInstance = true; + if (node.d.args.length <= 1) { + const enclosingMethod = ParseTreeUtils.getEnclosingFunction(node); + if (enclosingMethod) { + const methodType = getTypeOfFunction(enclosingMethod); + if (methodType) { + if ( + FunctionType.isStaticMethod(methodType.functionType) || + FunctionType.isConstructorMethod(methodType.functionType) || + FunctionType.isClassMethod(methodType.functionType) + ) { + resultIsInstance = false; + } + } + } + } + + // Python docs indicate that super() isn't valid for + // operations other than member accesses or attribute lookups. + const parentNode = node.parent; + if (parentNode?.nodeType === ParseNodeType.MemberAccess) { + const memberName = parentNode.d.member.d.value; + let effectiveTargetClass = isClass(concreteTargetClassType) ? concreteTargetClassType : undefined; + + // If the bind-to type is a protocol, don't use the effective target class. + // This pattern is used for mixins, where the mixin type is a protocol class + // that is used to decorate the "self" or "cls" parameter. + let isProtocolClass = false; + if ( + bindToType && + ClassType.isProtocolClass(bindToType) && + effectiveTargetClass && + !ClassType.isSameGenericClass( + TypeBase.isInstance(bindToType) ? ClassType.cloneAsInstantiable(bindToType) : bindToType, + effectiveTargetClass + ) + ) { + isProtocolClass = true; + effectiveTargetClass = undefined; + } + + if (bindToType) { + bindToType = selfSpecializeClass(bindToType, { useBoundTypeVars: true }); + } + + const lookupResults = bindToType + ? lookUpClassMember(bindToType, memberName, MemberAccessFlags.Default, effectiveTargetClass) + : undefined; + + let resultType: Type; + if (lookupResults && isInstantiableClass(lookupResults.classType)) { + resultType = lookupResults.classType; + + if (isProtocolClass) { + // If the bindToType is a protocol class, set the "include subclasses" flag + // so we don't enforce that called methods are implemented within the protocol. + resultType = ClassType.cloneIncludeSubclasses(resultType); + } + } else if ( + effectiveTargetClass && + !isAnyOrUnknown(effectiveTargetClass) && + !derivesFromAnyOrUnknown(effectiveTargetClass) + ) { + resultType = prefetched?.objectClass ?? UnknownType.create(); + } else { + resultType = UnknownType.create(); + } + + let bindToSelfType: ClassType | TypeVarType | undefined; + if (bindToType) { + if (secondArgType) { + // If a TypeVar was passed as the second argument, use it + // to derive the the self type. + if (isTypeVar(secondArgType)) { + bindToSelfType = convertToInstance(secondArgType); + } + } else { + // If this is a zero-argument form of super(), synthesize + // a Self type to bind to. + bindToSelfType = TypeBase.cloneForCondition( + TypeVarType.cloneAsBound( + synthesizeTypeVarForSelfCls( + ClassType.cloneIncludeSubclasses(bindToType, /* includeSubclasses */ false), + /* isClsParam */ false + ) + ), + bindToType.props?.condition + ); + } + } + + const type = resultIsInstance ? convertToInstance(resultType, /* includeSubclasses */ false) : resultType; + + return { type, bindToSelfType }; + } + + // Handle the super() call when used outside of a member access expression. + if (isInstantiableClass(concreteTargetClassType)) { + // We don't know which member is going to be accessed, so we cannot + // deterministically determine the correct type in this case. We'll + // use a heuristic that produces the "correct" (desired) behavior in + // most cases. If there's a bindToType and the targetClassType is one + // of the base classes of the bindToType, we'll return the next base + // class. + if (bindToType) { + let nextBaseClassType: Type | undefined; + + if ( + ClassType.isSameGenericClass( + TypeBase.isInstance(bindToType) ? ClassType.cloneAsInstantiable(bindToType) : bindToType, + concreteTargetClassType + ) + ) { + if (bindToType.shared.baseClasses.length > 0) { + nextBaseClassType = bindToType.shared.baseClasses[0]; + } + } else { + const baseClassIndex = bindToType.shared.baseClasses.findIndex( + (baseClass) => + isClass(baseClass) && + ClassType.isSameGenericClass(baseClass, concreteTargetClassType as ClassType) + ); + + if (baseClassIndex >= 0 && baseClassIndex < bindToType.shared.baseClasses.length - 1) { + nextBaseClassType = bindToType.shared.baseClasses[baseClassIndex + 1]; + } + } + + if (nextBaseClassType) { + if (isInstantiableClass(nextBaseClassType)) { + nextBaseClassType = specializeForBaseClass(bindToType, nextBaseClassType); + } + return { type: resultIsInstance ? convertToInstance(nextBaseClassType) : nextBaseClassType }; + } + + // There's not much we can say about the type. Simply return object or type. + if (prefetched?.typeClass && isInstantiableClass(prefetched.typeClass)) { + return { + type: resultIsInstance ? getObjectType() : convertToInstance(prefetched.typeClass), + }; + } + } else { + // If the class derives from one or more unknown classes, + // return unknown here to prevent spurious errors. + if (concreteTargetClassType.shared.mro.some((mroBase) => isAnyOrUnknown(mroBase))) { + return { type: UnknownType.create() }; + } + + const baseClasses = concreteTargetClassType.shared.baseClasses; + if (baseClasses.length > 0) { + const baseClassType = baseClasses[0]; + if (isInstantiableClass(baseClassType)) { + return { + type: resultIsInstance ? ClassType.cloneAsInstance(baseClassType) : baseClassType, + }; + } + } + } + } + + return { type: UnknownType.create() }; + } + + // When evaluating a call, the errorNode is typically the call node, which + // encompasses all of the argument expressions. This means we can normally + // use the errorNode as the root for speculative evaluation. However, there + // are some cases where we don't have a call node (e.g. in the case of an + // __init_subclass__ validation). Here we need to find some other parent + // node of the error node that encompasses all of the arguments. + function getSpeculativeNodeForCall(errorNode: ExpressionNode): ParseNode { + // If the error node is within an arg, expand to include the parent of the arg list. + const argParent = ParseTreeUtils.getParentNodeOfType(errorNode, ParseNodeType.Argument); + if (argParent?.parent) { + return argParent.parent; + } + + // If the error node is the name in a class declaration, expand to include the class node. + if ( + errorNode.nodeType === ParseNodeType.Name && + errorNode.parent?.nodeType === ParseNodeType.Class && + errorNode.parent.d.name === errorNode + ) { + return errorNode.parent; + } + + return errorNode; + } + + // Attempts to find an overloaded function for each set of argument + // types in the expandedArgTypes list. If an argument type is undefined, + // its type is evaluated from the argument's expression using the + // corresponding parameter's expected type. The first time this is called, + // there will be only one argument list in expandedArgTypes, and all entries + // (one for each argument) will be undefined. On subsequent calls, this + // list will grow to include union expansions. + function validateOverloadsWithExpandedTypes( + errorNode: ExpressionNode, + expandedArgTypes: (Type | undefined)[][], + argParamMatches: MatchArgsToParamsResult[], + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined + ): CallResult { + const returnTypes: Type[] = []; + let matchedOverloads: MatchedOverloadInfo[] = []; + let isTypeIncomplete = false; + let overloadsUsedForCall: FunctionType[] = []; + let isDefinitiveMatchFound = false; + const speculativeNode = getSpeculativeNodeForCall(errorNode); + + for (let expandedTypesIndex = 0; expandedTypesIndex < expandedArgTypes.length; expandedTypesIndex++) { + let matchedOverload: FunctionType | undefined; + const argTypeOverride = expandedArgTypes[expandedTypesIndex]; + const hasArgTypeOverride = argTypeOverride.some((a) => a !== undefined); + let possibleMatchResults: MatchedOverloadInfo[] = []; + let possibleMatchInvolvesIncompleteUnknown = false; + isDefinitiveMatchFound = false; + + for (let overloadIndex = 0; overloadIndex < argParamMatches.length; overloadIndex++) { + const overload = argParamMatches[overloadIndex].overload; + + let matchResults = argParamMatches[overloadIndex]; + if (hasArgTypeOverride) { + matchResults = { ...argParamMatches[overloadIndex] }; + matchResults.argParams = matchResults.argParams.map((argParam, argIndex) => { + if (!argTypeOverride[argIndex]) { + return argParam; + } + const argParamCopy = { ...argParam }; + argParamCopy.argType = argTypeOverride[argIndex]; + return argParamCopy; + }); + } + + // Clone the constraints so we don't modify the original. + const effectiveConstraints = constraints?.clone() ?? new ConstraintTracker(); + + // Use speculative mode so we don't output any diagnostics or + // record any final types in the type cache. + const callResult = useSpeculativeMode(speculativeNode, () => { + return validateArgTypesWithContext( + errorNode, + matchResults, + effectiveConstraints, + /* skipUnknownArgCheck */ true, + inferenceContext + ); + }); + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (!callResult.argumentErrors && callResult.returnType) { + overloadsUsedForCall.push(overload); + + matchedOverload = overload; + const matchedOverloadInfo: MatchedOverloadInfo = { + overload: matchedOverload, + matchResults, + constraints: effectiveConstraints, + returnType: callResult.returnType, + argResults: callResult.argResults ?? [], + }; + matchedOverloads.push(matchedOverloadInfo); + + if (callResult.anyOrUnknownArg || matchResults.unpackedArgOfUnknownLength) { + possibleMatchResults.push(matchedOverloadInfo); + + if (callResult.anyOrUnknownArg) { + if (isIncompleteUnknown(callResult.anyOrUnknownArg)) { + possibleMatchInvolvesIncompleteUnknown = true; + } + } + } else { + returnTypes.push(callResult.returnType); + isDefinitiveMatchFound = true; + break; + } + } + } + + // If we didn't find a definitive match that doesn't depend on + // an Any or Unknown argument, fall back on the possible match. + // If there were multiple possible matches, evaluate the type as + // Unknown, but include the "possible types" to allow for completion + // suggestions. + if (!isDefinitiveMatchFound && possibleMatchResults.length > 0) { + possibleMatchResults = filterOverloadMatchesForUnpackedArgs(possibleMatchResults); + possibleMatchResults = filterOverloadMatchesForAnyArgs(possibleMatchResults); + + // Did the filtering produce a single result? If so, we're done. + if (possibleMatchResults.length === 1) { + overloadsUsedForCall = [possibleMatchResults[0].overload]; + returnTypes.push(possibleMatchResults[0].returnType); + matchedOverloads = [possibleMatchResults[0]]; + } else { + // Eliminate any return types that are subsumed by other return types. + let dedupedMatchResults: Type[] = []; + let dedupedResultsIncludeAny = false; + + possibleMatchResults.forEach((result) => { + let isSubtypeSubsumed = false; + + for (let dedupedIndex = 0; dedupedIndex < dedupedMatchResults.length; dedupedIndex++) { + if (assignType(dedupedMatchResults[dedupedIndex], result.returnType)) { + const anyOrUnknown = containsAnyOrUnknown( + dedupedMatchResults[dedupedIndex], + /* recurse */ false + ); + if (!anyOrUnknown) { + isSubtypeSubsumed = true; + } else if (isAny(anyOrUnknown)) { + dedupedResultsIncludeAny = true; + } + break; + } else if (assignType(result.returnType, dedupedMatchResults[dedupedIndex])) { + const anyOrUnknown = containsAnyOrUnknown(result.returnType, /* recurse */ false); + if (!anyOrUnknown) { + dedupedMatchResults[dedupedIndex] = NeverType.createNever(); + } else if (isAny(anyOrUnknown)) { + dedupedResultsIncludeAny = true; + } + break; + } + } + + if (!isSubtypeSubsumed) { + dedupedMatchResults.push(result.returnType); + } + }); + + dedupedMatchResults = dedupedMatchResults.filter((t) => !isNever(t)); + const combinedTypes = combineTypes(dedupedMatchResults); + + let returnType = combinedTypes; + if (dedupedMatchResults.length > 1) { + // If one or more of the deduped types is Any or contains Any, + // we will assume that the person who defined the overload really + // wanted Any rather than Unknown. In cases where the deduped types + // simply contains conflicting results without an Any, we'll use + // an UnknownType. + if (dedupedResultsIncludeAny) { + returnType = AnyType.create(); + } else { + returnType = UnknownType.createPossibleType( + combinedTypes, + possibleMatchInvolvesIncompleteUnknown + ); + } + } + + returnTypes.push(returnType); + } + } + + if (!matchedOverload) { + return { argumentErrors: true, isTypeIncomplete, overloadsUsedForCall }; + } + } + + // We found a match for all of the expanded argument lists. Copy the + // resulting type var context back into the caller's type var context. + // Use the type var context from the last matched overload because it + // includes the type var solutions for all earlier matched overloads. + if (constraints && isDefinitiveMatchFound) { + constraints.copyFromClone(matchedOverloads[matchedOverloads.length - 1].constraints); + } + + // And run through the first expanded argument list one more time to + // populate the type cache. + const finalConstraints = constraints ?? matchedOverloads[0].constraints; + const finalCallResult = validateArgTypesWithContext( + errorNode, + matchedOverloads[0].matchResults, + finalConstraints, + skipUnknownArgCheck, + inferenceContext + ); + + if (finalCallResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + return { + argumentErrors: finalCallResult.argumentErrors, + anyOrUnknownArg: finalCallResult.anyOrUnknownArg, + returnType: combineTypes(returnTypes), + isTypeIncomplete, + specializedInitSelfType: finalCallResult.specializedInitSelfType, + overloadsUsedForCall, + }; + } + + // Determines whether one or more overloads can be eliminated because they + // rely on an unpacked argument of unknown length when there is at least + // one overload that doesn't because it maps to an *args parameter. + function filterOverloadMatchesForUnpackedArgs(matches: MatchedOverloadInfo[]): MatchedOverloadInfo[] { + if (matches.length < 2) { + return matches; + } + + // Is there at least one overload that relies on unpacked args for a match? + const unpackedArgsOverloads = matches.filter((match) => match.matchResults.unpackedArgMapsToVariadic); + if (unpackedArgsOverloads.length === matches.length || unpackedArgsOverloads.length === 0) { + return matches; + } + + return unpackedArgsOverloads; + } + + // Determines whether multiple incompatible overloads match + // due to an Any or Unknown argument type. + function filterOverloadMatchesForAnyArgs(matches: MatchedOverloadInfo[]): MatchedOverloadInfo[] { + if (matches.length < 2) { + return matches; + } + + // If all of the return types match, select the first one. + if ( + areTypesSame( + matches.map((match) => match.returnType), + { treatAnySameAsUnknown: true } + ) + ) { + return [matches[0]]; + } + + const firstArgResults = matches[0].argResults; + if (!firstArgResults) { + return matches; + } + + let foundAmbiguousAnyArg = false; + for (let i = 0; i < firstArgResults.length; i++) { + // If the arg is Any or Unknown, see if the corresponding + // parameter types differ in any way. + if (isAnyOrUnknown(firstArgResults[i].argType)) { + const paramTypes = matches.map((match) => + i < match.matchResults.argParams.length + ? match.matchResults.argParams[i].paramType + : UnknownType.create() + ); + if (!areTypesSame(paramTypes, { treatAnySameAsUnknown: true })) { + foundAmbiguousAnyArg = true; + } + } + } + + // If the first overload has a different number of effective arguments + // than latter overloads, don't filter any of them. This typically means + // that one of the arguments is an unpacked iterator, and it maps to + // an indeterminate number of parameters, which means that the overload + // selection is ambiguous. + if (foundAmbiguousAnyArg || matches.some((match) => match.argResults.length !== firstArgResults.length)) { + return matches; + } + + return [matches[0]]; + } + + function getBestOverloadForArgs( + errorNode: ExpressionNode, + typeResult: TypeResult, + argList: Arg[] + ): FunctionType | undefined { + let overloadIndex = 0; + const matches: MatchArgsToParamsResult[] = []; + const speculativeNode = getSpeculativeNodeForCall(errorNode); + + useSignatureTracker(errorNode, () => { + // Create a list of potential overload matches based on arguments. + OverloadedType.getOverloads(typeResult.type).forEach((overload) => { + useSpeculativeMode(speculativeNode, () => { + const matchResults = matchArgsToParams( + errorNode, + argList, + { type: overload, isIncomplete: typeResult.isIncomplete }, + overloadIndex + ); + + if (!matchResults.argumentErrors) { + matches.push(matchResults); + } + + overloadIndex++; + }); + }); + }); + + let winningOverloadIndex: number | undefined; + + matches.forEach((match, matchIndex) => { + if (winningOverloadIndex === undefined) { + useSpeculativeMode(speculativeNode, () => { + const callResult = validateArgTypes( + errorNode, + match, + new ConstraintTracker(), + /* skipUnknownArgCheck */ true + ); + + if (callResult && !callResult.argumentErrors) { + winningOverloadIndex = matchIndex; + } + }); + } + }); + + return winningOverloadIndex === undefined ? undefined : matches[winningOverloadIndex].overload; + } + + function validateOverloadedArgTypes( + errorNode: ExpressionNode, + argList: Arg[], + typeResult: TypeResult, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined + ): CallResult { + const filteredMatchResults: MatchArgsToParamsResult[] = []; + let contextFreeArgTypes: Type[] | undefined; + let isTypeIncomplete = !!typeResult.isIncomplete; + const type = typeResult.type; + const speculativeNode = getSpeculativeNodeForCall(errorNode); + + // Start by evaluating the types of the arguments without any expected + // type. Also, filter the list of overloads based on the number of + // positional and keyword arguments that are present. We do all of this + // speculatively because we don't want to record any types in the type + // cache or record any diagnostics at this stage. + useSpeculativeMode(speculativeNode, () => { + let overloadIndex = 0; + OverloadedType.getOverloads(type).forEach((overload) => { + // Consider only the functions that have the @overload decorator, + // not the final function that omits the overload. This is the + // intended behavior according to PEP 484. + const matchResults = matchArgsToParams( + errorNode, + argList, + { type: overload, isIncomplete: typeResult.isIncomplete }, + overloadIndex + ); + + if (!matchResults.argumentErrors) { + filteredMatchResults.push(matchResults); + } + + overloadIndex++; + }); + }); + + // If there are no possible arg/param matches among the overloads, + // emit an error that includes the argument types. + if (filteredMatchResults.length === 0) { + // Skip the error message if we're in speculative mode because it's very + // expensive, and we're going to suppress the diagnostic anyway. + if (!canSkipDiagnosticForNode(errorNode)) { + const overloads = OverloadedType.getOverloads(type); + const functionName = + overloads.length > 0 && overloads[0].shared.name + ? overloads[0].shared.name + : ''; + const diagAddendum = new DiagnosticAddendum(); + const argTypes = argList.map((t) => { + const typeString = printType(getTypeOfArg(t, /* inferenceContext */ undefined).type); + + if (t.argCategory === ArgCategory.UnpackedList) { + return `*${typeString}`; + } + + if (t.argCategory === ArgCategory.UnpackedDictionary) { + return `**${typeString}`; + } + + return typeString; + }); + + diagAddendum.addMessage(LocAddendum.argumentTypes().format({ types: argTypes.join(', ') })); + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.noOverload().format({ name: functionName }) + diagAddendum.getString(), + errorNode + ); + } + + return { argumentErrors: true, isTypeIncomplete, overloadsUsedForCall: [] }; + } + + // Create a helper function that evaluates the overload that best + // matches the arg/param lists. + function evaluateUsingBestMatchingOverload(skipUnknownArgCheck: boolean, emitNoOverloadFoundError: boolean) { + // Find the match with the smallest argument match score. If there + // are more than one with the same score, use the one with the + // largest index. Later overloads tend to be more general. + const bestMatch = filteredMatchResults.reduce((previous, current) => { + if (current.argumentMatchScore === previous.argumentMatchScore) { + return current.overloadIndex > previous.overloadIndex ? current : previous; + } + return current.argumentMatchScore < previous.argumentMatchScore ? current : previous; + }); + + // If there is more than one filtered match, report that no match + // was possible and emit a diagnostic that provides the most likely. + if (emitNoOverloadFoundError) { + const functionName = bestMatch.overload.shared.name || ''; + const diagnostic = addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.noOverload().format({ name: functionName }), + errorNode + ); + + const overrideDecl = bestMatch.overload.shared.declaration; + if (diagnostic && overrideDecl) { + diagnostic.addRelatedInfo( + LocAddendum.overloadIndex().format({ index: bestMatch.overloadIndex + 1 }), + overrideDecl.uri, + overrideDecl.range + ); + } + } + + const effectiveConstraints = constraints ?? new ConstraintTracker(); + + return validateArgTypesWithContext( + errorNode, + bestMatch, + effectiveConstraints, + skipUnknownArgCheck, + inferenceContext + ); + } + + // If there is only one possible arg/param match among the overloads, + // use the normal type matching mechanism because it is faster and + // will provide a clearer error message. + if (filteredMatchResults.length === 1) { + return evaluateUsingBestMatchingOverload( + /* skipUnknownArgCheck */ false, + /* emitNoOverloadFoundError */ false + ); + } + + let expandedArgTypes: (Type | undefined)[][] | undefined = [argList.map((arg) => undefined)]; + + while (true) { + const callResult = validateOverloadsWithExpandedTypes( + errorNode, + expandedArgTypes, + filteredMatchResults, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (!callResult.argumentErrors) { + return callResult; + } + + // We didn't find an overload match. Try to expand the next union + // argument type into individual types and retry with the expanded types. + if (!contextFreeArgTypes) { + useSpeculativeMode(getSpeculativeNodeForCall(errorNode), () => { + // Evaluate the types of each argument expression without regard to + // the context. We'll use this to determine whether we need to do + // union expansion. + contextFreeArgTypes = argList.map((arg) => { + if (arg.typeResult) { + return arg.typeResult.type; + } + + if (arg.valueExpression) { + const valueExpressionNode = arg.valueExpression; + return useSpeculativeMode(valueExpressionNode, () => { + return getTypeOfExpression(valueExpressionNode).type; + }); + } + + return AnyType.create(); + }); + }); + } + + expandedArgTypes = expandArgTypes(contextFreeArgTypes!, expandedArgTypes); + + // Check for combinatoric explosion and break out of loop. + if (!expandedArgTypes || expandedArgTypes.length > maxTotalOverloadArgTypeExpansionCount) { + break; + } + } + + // We couldn't find any valid overloads. Skip the error message if we're + // in speculative mode because it's very expensive, and we're going to + // suppress the diagnostic anyway. + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + const result = evaluateUsingBestMatchingOverload( + /* skipUnknownArgCheck */ true, + /* emitNoOverloadFoundError */ true + ); + + // Replace the result with an unknown type since we don't know + // what overload should have been used. + result.returnType = UnknownType.create(); + return { ...result, argumentErrors: true }; + } + + return { argumentErrors: true, isTypeIncomplete, overloadsUsedForCall: [] }; + } + + // Replaces each item in the expandedArgTypes with n items where n is + // the number of subtypes in a union or other expandable type. + // The contextFreeArgTypes parameter represents the types of the arguments + // evaluated with no bidirectional type inference (i.e. without the help of + // the corresponding parameter's expected type). If the function returns + // undefined, that indicates that all types have been expanded, and no + // more expansion is possible. + function expandArgTypes( + contextFreeArgTypes: Type[], + expandedArgTypes: (Type | undefined)[][] + ): (Type | undefined)[][] | undefined { + // Find the rightmost already-expanded argument. + let indexToExpand = contextFreeArgTypes.length - 1; + while (indexToExpand >= 0 && !expandedArgTypes[0][indexToExpand]) { + indexToExpand--; + } + + // Move to the next candidate for expansion. + indexToExpand++; + + if (indexToExpand >= contextFreeArgTypes.length) { + return undefined; + } + + let expandedTypes: Type[] | undefined; + while (indexToExpand < contextFreeArgTypes.length) { + // Is this a union type? If so, we can expand it. + const argType = contextFreeArgTypes[indexToExpand]; + + expandedTypes = expandArgType(argType); + if (expandedTypes) { + break; + } + indexToExpand++; + } + + // We have nothing left to expand. + if (!expandedTypes) { + return undefined; + } + + // Expand entry indexToExpand. + const newExpandedArgTypes: (Type | undefined)[][] = []; + + expandedArgTypes.forEach((preExpandedTypes) => { + expandedTypes.forEach((subtype) => { + const expandedTypes = [...preExpandedTypes]; + expandedTypes[indexToExpand] = subtype; + newExpandedArgTypes.push(expandedTypes); + }); + }); + + return newExpandedArgTypes; + } + + function expandArgType(type: Type): Type[] | undefined { + const expandedTypes: Type[] = []; + + // Expand any top-level type variables with constraints. + type = makeTopLevelTypeVarsConcrete(type); + + doForEachSubtype(type, (subtype) => { + if (isClassInstance(subtype)) { + // Expand any bool or Enum literals. + const expandedLiteralTypes = enumerateLiteralsForType(evaluatorInterface, subtype); + if (expandedLiteralTypes && expandedLiteralTypes.length <= maxSingleOverloadArgTypeExpansionCount) { + appendArray(expandedTypes, expandedLiteralTypes); + return; + } + + // Expand any fixed-size tuples. + const expandedTuples = expandTuple(subtype, maxSingleOverloadArgTypeExpansionCount); + if (expandedTuples) { + appendArray(expandedTypes, expandedTuples); + return; + } + } + + expandedTypes.push(subtype); + }); + + return expandedTypes.length > 1 ? expandedTypes : undefined; + } + + // Validates that the arguments can be assigned to the call's parameter + // list, specializes the call based on arg types, and returns the + // specialized type of the return value. If it detects an error along + // the way, it emits a diagnostic and sets argumentErrors to true. + function validateCallArgs( + errorNode: ExpressionNode, + argList: Arg[], + callTypeResult: TypeResult, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined, + recursionCount = 0 + ): CallResult { + let argumentErrors = false; + let isTypeIncomplete = false; + let specializedInitSelfType: Type | undefined; + const overloadsUsedForCall: FunctionType[] = []; + + if (recursionCount > maxTypeRecursionCount) { + return { returnType: UnknownType.create(), argumentErrors: true, overloadsUsedForCall }; + } + recursionCount++; + + // Special forms are not callable. + if (callTypeResult.type.props?.specialForm) { + const exprNode = errorNode.nodeType === ParseNodeType.Call ? errorNode.d.leftExpr : errorNode; + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.objectNotCallable().format({ + type: printType(callTypeResult.type.props.specialForm, { expandTypeAlias: true }), + }), + exprNode + ); + return { returnType: UnknownType.create(), argumentErrors: true, overloadsUsedForCall }; + } + + let returnType = mapSubtypesExpandTypeVars( + callTypeResult.type, + { sortSubtypes: true }, + (expandedSubtype, unexpandedSubtype, isLastIteration) => { + return useSpeculativeMode( + isLastIteration ? undefined : getSpeculativeNodeForCall(errorNode), + () => { + const callResult = validateCallArgsForSubtype( + errorNode, + argList, + expandedSubtype, + unexpandedSubtype, + !!callTypeResult.isIncomplete, + constraints, + skipUnknownArgCheck, + inferenceContext, + recursionCount + ); + + if (callResult.argumentErrors) { + argumentErrors = true; + } + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (callResult.overloadsUsedForCall) { + appendArray(overloadsUsedForCall, callResult.overloadsUsedForCall); + } + + specializedInitSelfType = callResult.specializedInitSelfType; + + return callResult.returnType; + }, + { + allowDiagnostics: true, + } + ); + } + ); + + // If we ended up with a "Never" type because all code paths returned + // undefined due to argument errors, transform the result into an Unknown + // to avoid subsequent false positives. + if (argumentErrors && isNever(returnType) && !returnType.priv.isNoReturn) { + returnType = UnknownType.create(); + } + + return { + argumentErrors, + returnType, + isTypeIncomplete, + specializedInitSelfType, + overloadsUsedForCall, + }; + } + + function validateCallArgsForSubtype( + errorNode: ExpressionNode, + argList: Arg[], + expandedCallType: Type, + unexpandedCallType: Type, + isCallTypeIncomplete: boolean, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined, + recursionCount: number + ): CallResult { + function touchArgTypes() { + if (!isCallTypeIncomplete) { + argList.forEach((arg) => { + if (arg.valueExpression && !isSpeculativeModeInUse(arg.valueExpression)) { + getTypeOfArg(arg, /* inferenceContext */ undefined); + } + }); + } + } + + switch (expandedCallType.category) { + case TypeCategory.Never: + case TypeCategory.Unknown: + case TypeCategory.Any: { + // Create a dummy callable that accepts all arguments and validate + // that the argument expressions are valid. + const dummyFunctionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.None); + FunctionType.addDefaultParams(dummyFunctionType); + + const dummyCallResult = validateCallForFunction( + errorNode, + argList, + dummyFunctionType, + isCallTypeIncomplete, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + + return { ...dummyCallResult, returnType: expandedCallType }; + } + + case TypeCategory.Function: { + return validateCallForFunction( + errorNode, + argList, + expandedCallType, + isCallTypeIncomplete, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + } + + case TypeCategory.Overloaded: { + return validateCallForOverloaded( + errorNode, + argList, + expandedCallType, + isCallTypeIncomplete, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + } + + case TypeCategory.Class: { + if (isNoneInstance(expandedCallType)) { + addDiagnostic(DiagnosticRule.reportOptionalCall, LocMessage.noneNotCallable(), errorNode); + + touchArgTypes(); + return { argumentErrors: true }; + } + + if (TypeBase.isInstantiable(expandedCallType)) { + return validateCallForInstantiableClass( + errorNode, + argList, + expandedCallType, + unexpandedCallType, + skipUnknownArgCheck, + inferenceContext + ); + } + + return validateCallForClassInstance( + errorNode, + argList, + expandedCallType, + unexpandedCallType, + constraints, + skipUnknownArgCheck, + inferenceContext, + recursionCount + ); + } + + // TypeVars should have been expanded in most cases, + // but we still need to handle the case of Type[T] where + // T is a constrained type that contains a union. We also + // need to handle recursive type aliases. + case TypeCategory.TypeVar: { + return validateCallArgs( + errorNode, + argList, + { type: transformPossibleRecursiveTypeAlias(expandedCallType), isIncomplete: isCallTypeIncomplete }, + constraints, + skipUnknownArgCheck, + inferenceContext, + recursionCount + ); + } + + case TypeCategory.Module: { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.moduleNotCallable(), errorNode); + + touchArgTypes(); + return { argumentErrors: true }; + } + } + + touchArgTypes(); + return { argumentErrors: true }; + } + + function validateCallForFunction( + errorNode: ExpressionNode, + argList: Arg[], + type: FunctionType, + isCallTypeIncomplete: boolean, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined + ): CallResult { + if (TypeBase.isInstantiable(type)) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.callableNotInstantiable().format({ + type: printType(type), + }), + errorNode + ); + return { returnType: undefined, argumentErrors: true }; + } + + // The stdlib collections/__init__.pyi stub file defines namedtuple + // as a function rather than a class, so we need to check for it here. + if (FunctionType.isBuiltIn(type, 'namedtuple')) { + addDiagnostic(DiagnosticRule.reportUntypedNamedTuple, LocMessage.namedTupleNoTypes(), errorNode); + + const result: CallResult = { + returnType: createNamedTupleType(evaluatorInterface, errorNode, argList, /* includesTypes */ false), + }; + + validateArgs(errorNode, argList, { type: type }, constraints, skipUnknownArgCheck, inferenceContext); + + return result; + } + + // Handle the NewType specially, replacing the normal return type. + if (FunctionType.isBuiltIn(type, 'NewType')) { + return { returnType: createNewType(errorNode, argList) }; + } + + const functionResult = validateArgs( + errorNode, + argList, + { type, isIncomplete: isCallTypeIncomplete }, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + + let isTypeIncomplete = !!functionResult.isTypeIncomplete; + let returnType = functionResult.returnType; + + let argumentErrors = !!functionResult.argumentErrors; + if (!argumentErrors) { + // Call the function transform logic to handle special-cased functions. + const transformed = applyFunctionTransform(evaluatorInterface, errorNode, argList, type, { + argumentErrors: !!functionResult.argumentErrors, + returnType: functionResult.returnType ?? UnknownType.create(isTypeIncomplete), + isTypeIncomplete, + }); + + returnType = transformed.returnType; + if (transformed.isTypeIncomplete) { + isTypeIncomplete = true; + } + if (transformed.argumentErrors) { + argumentErrors = true; + } + } + + if (FunctionType.isBuiltIn(type, '__import__')) { + // For the special __import__ type, we'll override the return type to be "Any". + // This is required because we don't know what module was imported, and we don't + // want to fail type checks when accessing members of the resulting module type. + returnType = AnyType.create(); + } + + return { + returnType, + isTypeIncomplete, + argumentErrors, + overloadsUsedForCall: functionResult.overloadsUsedForCall, + specializedInitSelfType: functionResult.specializedInitSelfType, + }; + } + + // Determines whether a symbol is abstract. In an ABC class, this means a function + // is specifically decorated with @abstractmethod. In a protocol class, the rules + // are more complicated and depend on whether the method is defined in a stub file. + function getAbstractSymbolInfo(classType: ClassType, symbolName: string): AbstractSymbol | undefined { + const isProtocolClass = ClassType.isProtocolClass(classType); + + const symbol = ClassType.getSymbolTable(classType).get(symbolName); + if (!symbol) { + return undefined; + } + + // Ignore instance variables. Also, ignore named tuple members, which are + // modeled in pyright as instance variables, but their runtime implementation + // uses a descriptor object. + if (!symbol.isClassMember() && !symbol.isNamedTupleMemberMember()) { + return undefined; + } + + const lastDecl = getLastTypedDeclarationForSymbol(symbol); + if (!lastDecl) { + return undefined; + } + + // Handle protocol variables specially. + if (isProtocolClass && lastDecl.type === DeclarationType.Variable) { + // If none of the declarations involve assignments, assume it's + // not implemented in the protocol. + const allDecls = symbol.getDeclarations(); + if (!allDecls.some((decl) => decl.type === DeclarationType.Variable && !!decl.inferredTypeSource)) { + return { symbol, symbolName, classType, hasImplementation: false }; + } + } + + if (lastDecl.type !== DeclarationType.Function) { + return undefined; + } + + let isAbstract = false; + const lastFunctionInfo = getFunctionInfoFromDecorators(evaluatorInterface, lastDecl.node, /* isInClass */ true); + if ((lastFunctionInfo.flags & FunctionTypeFlags.AbstractMethod) !== 0) { + isAbstract = true; + } + + const isStubFile = AnalyzerNodeInfo.getFileInfo(lastDecl.node).isStubFile; + + // In an overloaded method, the first overload can also be marked abstract. + // In stub files, there is no implementation, so this is the only way to mark + // an overloaded method as abstract. + const firstDecl = symbol.getDeclarations()[0]; + let firstFunctionInfo: FunctionDecoratorInfo | undefined; + + if (firstDecl !== lastDecl && firstDecl.type === DeclarationType.Function) { + firstFunctionInfo = getFunctionInfoFromDecorators(evaluatorInterface, firstDecl.node, /* isInClass */ true); + if ((firstFunctionInfo.flags & FunctionTypeFlags.AbstractMethod) !== 0) { + isAbstract = true; + } + + // If there's no implementation, assume it's unimplemented. + if (isProtocolClass && (lastFunctionInfo.flags & FunctionTypeFlags.Overloaded) !== 0) { + // If this is a protocol class method defined in a stub file and + // it's not marked abstract, assume it's not abstract and implemented. + if (isProtocolClass && !isAbstract && isStubFile) { + return undefined; + } + + return { symbol, symbolName, classType, hasImplementation: false }; + } + } + + // In a non-protocol class, if the method isn't explicitly marked abstract, + // then it's not abstract. + if (!isProtocolClass && !isAbstract) { + return undefined; + } + + const hasImplementation = + !ParseTreeUtils.isSuiteEmpty(lastDecl.node.d.suite) && !methodAlwaysRaisesNotImplemented(lastDecl); + + // If this is a protocol class, the method isn't explicitly marked + // as abstract, and there is an implementation, then it's a default + // implementation, and it's not considered abstract. If it's in a stub + // file, assume it's implemented in this case. + if (isProtocolClass && !isAbstract) { + if (hasImplementation || isStubFile) { + return undefined; + } + } + + return { symbol, symbolName, classType, hasImplementation }; + } + + function validateCallForOverloaded( + errorNode: ExpressionNode, + argList: Arg[], + expandedCallType: OverloadedType, + isCallTypeIncomplete: boolean, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined + ): CallResult { + const overloads = OverloadedType.getOverloads(expandedCallType); + // Handle the 'cast' call as a special case. + if ( + overloads.length > 0 && + FunctionType.isBuiltIn(overloads[0], ['typing.cast', 'typing_extensions.cast']) && + argList.length === 2 + ) { + return { returnType: evaluateCastCall(argList, errorNode) }; + } + + const callResult = validateOverloadedArgTypes( + errorNode, + argList, + { type: expandedCallType, isIncomplete: isCallTypeIncomplete }, + constraints, + skipUnknownArgCheck, + inferenceContext + ); + + let returnType = callResult.returnType ?? UnknownType.create(); + let isTypeIncomplete = !!callResult.isTypeIncomplete; + let argumentErrors = !!callResult.argumentErrors; + + if (!argumentErrors) { + // Call the function transform logic to handle special-cased functions. + const transformed = applyFunctionTransform(evaluatorInterface, errorNode, argList, expandedCallType, { + argumentErrors: !!callResult.argumentErrors, + returnType: callResult.returnType ?? UnknownType.create(isTypeIncomplete), + isTypeIncomplete, + }); + + returnType = transformed.returnType; + if (transformed.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (transformed.argumentErrors) { + argumentErrors = true; + } + } + + return { + returnType, + isTypeIncomplete, + argumentErrors, + overloadsUsedForCall: callResult.overloadsUsedForCall, + specializedInitSelfType: callResult.specializedInitSelfType, + }; + } + + function validateCallForInstantiableClass( + errorNode: ExpressionNode, + argList: Arg[], + expandedCallType: ClassType, + unexpandedCallType: Type, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined + ): CallResult { + if (expandedCallType.priv.literalValue !== undefined) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.literalNotCallable(), errorNode); + + return { returnType: UnknownType.create(), argumentErrors: true }; + } + + if (ClassType.isBuiltIn(expandedCallType)) { + const className = expandedCallType.priv.aliasName ?? expandedCallType.shared.name; + + // Handle a call to a metaclass explicitly. + if (isInstantiableMetaclass(expandedCallType)) { + if (expandedCallType.priv.typeArgs && expandedCallType.priv.isTypeArgExplicit) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.objectNotCallable().format({ + type: printType(expandedCallType), + }), + errorNode + ); + return { returnType: UnknownType.create(), argumentErrors: true }; + } + + // Validate the constructor arguments. + validateConstructorArgs( + evaluatorInterface, + errorNode, + argList, + expandedCallType, + skipUnknownArgCheck, + inferenceContext + ); + + // The one-parameter form of "type" returns the class + // for the specified object. + if (expandedCallType.shared.name === 'type' && argList.length === 1) { + const argTypeResult = getTypeOfArg(argList[0], /* inferenceContext */ undefined); + const argType = argTypeResult.type; + const returnType = mapSubtypes(argType, (subtype) => { + if (isNever(subtype)) { + return subtype; + } + + if (isClass(subtype)) { + // Specifically handle the case where the subtype is a class-like + // object created by calling NewType. At runtime, it's actually + // a FunctionType object. + if ( + isClassInstance(subtype) && + ClassType.isNewTypeClass(subtype) && + !subtype.priv.includeSubclasses + ) { + if (prefetched?.functionClass) { + return prefetched.functionClass; + } + } + + return convertToInstantiable(stripLiteralValue(subtype)); + } + + if (TypeBase.isInstance(subtype)) { + if (isFunction(subtype) || isTypeVar(subtype)) { + return convertToInstantiable(subtype); + } + } + + return ClassType.specialize(ClassType.cloneAsInstance(expandedCallType), [ + UnknownType.create(), + ]); + }); + + return { returnType, isTypeIncomplete: argTypeResult.isIncomplete }; + } + + if (argList.length >= 2) { + // The two-parameter form of a call to a metaclass returns a new class + // built from the specified base types. + return { + returnType: createClassFromMetaclass(errorNode, argList, expandedCallType) || AnyType.create(), + }; + } + + // If the parameter to type() is not statically known, + // fall back to Any. + return { returnType: AnyType.create() }; + } + + if (className === 'TypeVar') { + return { + returnType: createTypeVarType(errorNode, expandedCallType, argList), + }; + } + + if (className === 'TypeVarTuple') { + return { + returnType: createTypeVarTupleType(errorNode, expandedCallType, argList), + }; + } + + if (className === 'ParamSpec') { + return { + returnType: createParamSpecType(errorNode, expandedCallType, argList), + }; + } + + if (className === 'TypeAliasType') { + const newTypeAlias = createTypeAliasType(errorNode, argList); + if (newTypeAlias) { + return { returnType: newTypeAlias }; + } + } + + if (className === 'NamedTuple') { + const result: CallResult = { + returnType: createNamedTupleType(evaluatorInterface, errorNode, argList, /* includesTypes */ true), + }; + + const initTypeResult = getBoundInitMethod( + evaluatorInterface, + errorNode, + ClassType.cloneAsInstance(expandedCallType), + /* diag */ undefined, + /* additionalFlags */ MemberAccessFlags.Default + ); + + if (initTypeResult && isOverloaded(initTypeResult.type)) { + validateOverloadedArgTypes( + errorNode, + argList, + { type: initTypeResult.type }, + /* constraints */ undefined, + skipUnknownArgCheck, + /* inferenceContext */ undefined + ); + } + + return result; + } + + if (className === 'NewType') { + return { returnType: createNewType(errorNode, argList) }; + } + + // Handle the Sentinel call specially. + if (className === 'Sentinel') { + if (AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.enableExperimentalFeatures) { + return { returnType: createSentinelType(evaluatorInterface, errorNode, argList) }; + } + } + + if (ClassType.isSpecialFormClass(expandedCallType)) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.typeNotIntantiable().format({ type: className }), + errorNode + ); + + return { returnType: UnknownType.create(), argumentErrors: true }; + } + + if (className === 'TypedDict') { + return { returnType: createTypedDictType(evaluatorInterface, errorNode, expandedCallType, argList) }; + } + + if (className === 'auto' && argList.length === 0) { + return { returnType: getEnumAutoValueType(evaluatorInterface, errorNode) }; + } + } + + // Is it a call to an Enum class factory? + if ( + isClass(expandedCallType) && + expandedCallType.shared.effectiveMetaclass && + isClass(expandedCallType.shared.effectiveMetaclass) && + isEnumMetaclass(expandedCallType.shared.effectiveMetaclass) && + !isEnumClassWithMembers(evaluatorInterface, expandedCallType) + ) { + return { + returnType: + createEnumType(evaluatorInterface, errorNode, expandedCallType, argList) ?? + convertToInstance(unexpandedCallType), + }; + } + + if (ClassType.supportsAbstractMethods(expandedCallType)) { + const abstractSymbols = getAbstractSymbols(expandedCallType); + + if ( + abstractSymbols.length > 0 && + !expandedCallType.priv.includeSubclasses && + !isTypeVar(unexpandedCallType) + ) { + // If the class is abstract, it can't be instantiated. + const diagAddendum = new DiagnosticAddendum(); + const errorsToDisplay = 2; + + abstractSymbols.forEach((abstractMethod, index) => { + if (index === errorsToDisplay) { + diagAddendum.addMessage( + LocAddendum.memberIsAbstractMore().format({ + count: abstractSymbols.length - errorsToDisplay, + }) + ); + } else if (index < errorsToDisplay) { + if (isInstantiableClass(abstractMethod.classType)) { + const className = abstractMethod.classType.shared.name; + diagAddendum.addMessage( + LocAddendum.memberIsAbstract().format({ + type: className, + name: abstractMethod.symbolName, + }) + ); + } + } + }); + + addDiagnostic( + DiagnosticRule.reportAbstractUsage, + LocMessage.instantiateAbstract().format({ + type: expandedCallType.shared.name, + }) + diagAddendum.getString(), + errorNode + ); + } + } + + if (ClassType.isProtocolClass(expandedCallType) && !expandedCallType.priv.includeSubclasses) { + // If the class is a protocol, it can't be instantiated. + addDiagnostic( + DiagnosticRule.reportAbstractUsage, + LocMessage.instantiateProtocol().format({ + type: expandedCallType.shared.name, + }), + errorNode + ); + } + + // Assume this is a call to the constructor. + const constructorResult = validateConstructorArgs( + evaluatorInterface, + errorNode, + argList, + expandedCallType, + skipUnknownArgCheck, + inferenceContext + ); + + const overloadsUsedForCall = constructorResult.overloadsUsedForCall; + const argumentErrors = constructorResult.argumentErrors; + const isTypeIncomplete = constructorResult.isTypeIncomplete; + + let returnType = constructorResult.returnType; + + // If the expandedCallType originated from a TypeVar, convert + // the constructed type back to the TypeVar. For example, if + // we have `cls: Type[_T]` followed by `_T()`. + if (isTypeVar(unexpandedCallType)) { + returnType = convertToInstance(unexpandedCallType); + } + + // If we instantiated the "deprecated" class, attach the deprecation + // message to the instance. + if ( + errorNode.nodeType === ParseNodeType.Call && + returnType && + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, 'deprecated') + ) { + returnType = ClassType.cloneForDeprecatedInstance(returnType, getDeprecatedMessageFromCall(errorNode)); + } + + // If we instantiated a type, transform it into a class. + // This can happen if someone directly instantiates a metaclass + // deriving from type. + if ( + returnType && + isClassInstance(returnType) && + returnType.shared.mro.some( + (baseClass) => isInstantiableClass(baseClass) && ClassType.isBuiltIn(baseClass, 'type') + ) + ) { + let newClassName = '__class_' + returnType.shared.name; + if (argList.length === 3) { + const firstArgType = getTypeOfArg(argList[0], /* inferenceContext */ undefined).type; + + if ( + isClassInstance(firstArgType) && + ClassType.isBuiltIn(firstArgType, 'str') && + typeof firstArgType.priv.literalValue === 'string' + ) { + newClassName = firstArgType.priv.literalValue; + } + } + + const newClassType = ClassType.createInstantiable( + newClassName, + '', + '', + AnalyzerNodeInfo.getFileInfo(errorNode).fileUri, + ClassTypeFlags.None, + ParseTreeUtils.getTypeSourceId(errorNode), + ClassType.cloneAsInstantiable(returnType), + ClassType.cloneAsInstantiable(returnType) + ); + newClassType.shared.baseClasses.push(getBuiltInType(errorNode, 'object')); + newClassType.shared.effectiveMetaclass = expandedCallType; + newClassType.shared.declaration = returnType.shared.declaration; + + computeMroLinearization(newClassType); + returnType = newClassType; + } + + return { returnType, overloadsUsedForCall, argumentErrors, isTypeIncomplete }; + } + + function validateCallForClassInstance( + errorNode: ExpressionNode, + argList: Arg[], + expandedCallType: ClassType, + unexpandedCallType: Type, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined, + recursionCount: number + ): CallResult { + const callDiag = new DiagnosticAddendum(); + const callMethodResult = getTypeOfBoundMember( + errorNode, + expandedCallType, + '__call__', + /* usage */ undefined, + callDiag, + MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.SkipAttributeAccessOverride, + /* selfType */ undefined, + recursionCount + ); + const callMethodType = callMethodResult?.type; + + if (!callMethodType || callMethodResult.typeErrors) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.objectNotCallable().format({ + type: printType(expandedCallType), + }) + callDiag.getString(), + errorNode + ); + + return { returnType: UnknownType.create(), argumentErrors: true }; + } + + const callResult = validateCallArgs( + errorNode, + argList, + { type: callMethodType }, + constraints, + skipUnknownArgCheck, + inferenceContext, + recursionCount + ); + + let returnType = callResult.returnType ?? UnknownType.create(); + if ( + isTypeVar(unexpandedCallType) && + TypeBase.isInstantiable(unexpandedCallType) && + isClass(expandedCallType) && + ClassType.isBuiltIn(expandedCallType, 'type') + ) { + // Handle the case where a type[T] is being called. We presume this + // will instantiate an object of type T. + returnType = convertToInstance(unexpandedCallType); + } + + return { + returnType, + argumentErrors: callResult.argumentErrors, + overloadsUsedForCall: callResult.overloadsUsedForCall, + }; + } + + // Evaluates the type of the "cast" call. + function evaluateCastCall(argList: Arg[], errorNode: ExpressionNode) { + if (argList[0].argCategory !== ArgCategory.Simple && argList[0].valueExpression) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackInAnnotation(), + argList[0].valueExpression + ); + } + + // Verify that the cast is necessary. + let castToType = getTypeOfArgExpectingType(argList[0], { typeExpression: true }).type; + + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(errorNode); + castToType = makeTypeVarsBound(castToType, liveScopeIds); + + let castFromType = getTypeOfArg(argList[1], /* inferenceContext */ undefined).type; + + if (castFromType.props?.specialForm) { + castFromType = castFromType.props.specialForm; + } + + if (TypeBase.isInstantiable(castToType) && !isUnknown(castToType)) { + if ( + isTypeSame(convertToInstance(castToType), castFromType, { + ignorePseudoGeneric: true, + }) + ) { + addDiagnostic( + DiagnosticRule.reportUnnecessaryCast, + LocMessage.unnecessaryCast().format({ + type: printType(castFromType), + }), + errorNode + ); + } + } + + return convertToInstance(castToType); + } + + // Expands any unpacked tuples within an argument list. + function expandArgList(argList: Arg[]): Arg[] { + const expandedArgList: Arg[] = []; + + for (const arg of argList) { + if (arg.argCategory === ArgCategory.UnpackedList) { + const argType = getTypeOfArg(arg, /* inferenceContext */ undefined).type; + + // If this is a tuple with specified element types, use those + // specified types rather than using the more generic iterator + // type which will be a union of all element types. + const combinedArgType = combineSameSizedTuples( + makeTopLevelTypeVarsConcrete(argType), + prefetched?.tupleClass + ); + + if (isClassInstance(combinedArgType) && isTupleClass(combinedArgType)) { + const tupleTypeArgs = combinedArgType.priv.tupleTypeArgs ?? []; + + if (tupleTypeArgs.length !== 1 || !tupleTypeArgs[0].isUnbounded) { + for (const tupleTypeArg of tupleTypeArgs) { + if (tupleTypeArg.isUnbounded) { + expandedArgList.push({ + ...arg, + argCategory: ArgCategory.UnpackedList, + valueExpression: undefined, + typeResult: { + type: makeTupleObject(evaluatorInterface, [tupleTypeArg]), + }, + }); + } else { + expandedArgList.push({ + ...arg, + argCategory: ArgCategory.Simple, + valueExpression: undefined, + typeResult: { + type: tupleTypeArg.type, + }, + }); + } + } + continue; + } + } + } + + expandedArgList.push(arg); + } + + return expandedArgList; + } + + // Matches the arguments passed to a function to the corresponding parameters in that + // function. This matching is done based on positions and keywords. Type evaluation and + // validation is left to the caller. + // This logic is based on PEP 3102: https://www.python.org/dev/peps/pep-3102/ + function matchArgsToParams( + errorNode: ExpressionNode, + argList: Arg[], + typeResult: TypeResult, + overloadIndex: number + ): MatchArgsToParamsResult { + const overload = typeResult.type; + const paramDetails = getParamListDetails(overload, { disallowExtraKwargsForTd: true }); + const paramSpec = FunctionType.getParamSpecFromArgsKwargs(overload); + + let argIndex = 0; + let unpackedArgOfUnknownLength = false; + let unpackedArgMapsToVariadic = false; + let reportedArgError = false; + let isTypeIncomplete = !!typeResult.isIncomplete; + let isTypeVarTupleFullyMatched = false; + + // Expand any unpacked tuples in the arg list. + argList = expandArgList(argList); + + // Construct an object that racks which parameters have been assigned arguments. + const paramTracker = new ParamAssignmentTracker(paramDetails.params); + + let positionalOnlyLimitIndex = paramDetails.positionOnlyParamCount; + let positionParamLimitIndex = paramDetails.firstKeywordOnlyIndex ?? paramDetails.params.length; + + const varArgListParamIndex = paramDetails.argsIndex; + const varArgDictParamIndex = paramDetails.kwargsIndex; + + // Is this an function that uses the *args and **kwargs + // from a param spec? If so, we need to treat all positional parameters + // prior to the *args as positional-only according to PEP 612. + let paramSpecArgList: Arg[] | undefined; + let paramSpecTarget: ParamSpecType | undefined; + let hasParamSpecArgsKwargs = false; + + // Determine how many positional args are being passed before + // we see a keyword arg. + let positionalArgCount = argList.findIndex( + (arg) => arg.argCategory === ArgCategory.UnpackedDictionary || arg.name !== undefined + ); + if (positionalArgCount < 0) { + positionalArgCount = argList.length; + } + + if (varArgListParamIndex !== undefined && varArgDictParamIndex !== undefined) { + assert(paramDetails.params[varArgListParamIndex], 'varArgListParamIndex params entry is undefined'); + const varArgListParamType = paramDetails.params[varArgListParamIndex].type; + assert(paramDetails.params[varArgDictParamIndex], 'varArgDictParamIndex params entry is undefined'); + const varArgDictParamType = paramDetails.params[varArgDictParamIndex].type; + + if ( + isParamSpec(varArgListParamType) && + varArgListParamType.priv.paramSpecAccess === 'args' && + isParamSpec(varArgDictParamType) && + varArgDictParamType.priv.paramSpecAccess === 'kwargs' && + varArgListParamType.shared.name === varArgDictParamType.shared.name + ) { + hasParamSpecArgsKwargs = true; + + // Does this function define the param spec, or is it an inner + // function nested within another function that defines the param + // spec? We need to handle these two cases differently. + const paramSpecScopeId = varArgListParamType.priv.scopeId; + + if (getTypeVarScopeIds(overload).some((id) => id === paramSpecScopeId)) { + paramSpecArgList = []; + paramSpecTarget = TypeVarType.cloneForParamSpecAccess(varArgListParamType, /* access */ undefined); + } else { + positionalOnlyLimitIndex = varArgListParamIndex; + positionalArgCount = Math.min(varArgListParamIndex, positionalArgCount); + positionParamLimitIndex = varArgListParamIndex; + } + } + } else if (paramSpec) { + if (getTypeVarScopeIds(overload).some((id) => id === paramSpec.priv.scopeId)) { + hasParamSpecArgsKwargs = true; + paramSpecArgList = []; + paramSpecTarget = paramSpec; + } + } + + // If there are keyword arguments present after a *args argument, + // the keyword arguments may target one or more parameters that are positional. + // In this case, we will limit the number of positional parameters so the + // *args doesn't consume them all. + if (argList.some((arg) => arg.argCategory === ArgCategory.UnpackedList)) { + argList.forEach((arg) => { + if (arg.name) { + const keywordParamIndex = paramDetails.params.findIndex((paramInfo) => { + assert(paramInfo, 'paramInfo entry is undefined fork kwargs check'); + return ( + paramInfo.param.name === arg.name!.d.value && + paramInfo.param.category === ParamCategory.Simple + ); + }); + + // Is this a parameter that can be interpreted as either a keyword or a positional? + // If so, we'll treat it as a keyword parameter in this case because it's being + // targeted by a keyword argument. + if (keywordParamIndex >= 0 && keywordParamIndex >= positionalOnlyLimitIndex) { + if (positionParamLimitIndex < 0 || keywordParamIndex < positionParamLimitIndex) { + positionParamLimitIndex = keywordParamIndex; + } + } + } + }); + } + + // If we didn't see any special cases, then all parameters are positional. + if (positionParamLimitIndex < 0) { + positionParamLimitIndex = paramDetails.params.length; + } + + let validateArgTypeParams: ValidateArgTypeParams[] = []; + + let activeParam: FunctionParam | undefined; + function trySetActive(arg: Arg, param: FunctionParam) { + if (arg.active) { + activeParam = param; + } + } + + const foundUnpackedListArg = argList.find((arg) => arg.argCategory === ArgCategory.UnpackedList) !== undefined; + + // Map the positional args to parameters. + let paramIndex = 0; + + while (argIndex < positionalArgCount) { + if (argIndex < positionalOnlyLimitIndex && argList[argIndex].name) { + const nameNode = argList[argIndex].name; + if (nameNode) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.argPositional(), nameNode); + reportedArgError = true; + } + } + + const remainingArgCount = positionalArgCount - argIndex; + const remainingParamCount = positionParamLimitIndex - paramIndex - 1; + + if (paramIndex >= positionParamLimitIndex) { + if (paramSpecArgList) { + // Push the remaining positional args onto the param spec arg list. + while (argIndex < positionalArgCount) { + paramSpecArgList.push(argList[argIndex]); + argIndex++; + } + } else { + let tooManyPositionals = false; + + if (argList[argIndex].argCategory === ArgCategory.UnpackedList) { + // If this is an unpacked iterable, we will conservatively assume that it + // might have zero iterations unless we can tell from its type that it + // definitely has at least one iterable value. + const argType = getTypeOfArg(argList[argIndex], /* inferenceContext */ undefined).type; + + if ( + isClassInstance(argType) && + isTupleClass(argType) && + !isUnboundedTupleClass(argType) && + argType.priv.tupleTypeArgs !== undefined && + argType.priv.tupleTypeArgs.length > 0 + ) { + tooManyPositionals = true; + } else { + unpackedArgOfUnknownLength = true; + } + } else { + tooManyPositionals = true; + } + + if (tooManyPositionals) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + positionParamLimitIndex === 1 + ? LocMessage.argPositionalExpectedOne() + : LocMessage.argPositionalExpectedCount().format({ + expected: positionParamLimitIndex, + }), + argList[argIndex].valueExpression ?? errorNode + ); + } + reportedArgError = true; + } + } + break; + } + + if (paramIndex >= paramDetails.params.length) { + break; + } + + assert(paramDetails.params[paramIndex], 'paramIndex params entry is undefined'); + const paramInfo = paramDetails.params[paramIndex]; + const paramType = paramInfo.type; + const paramName = paramInfo.param.name; + + const isParamVariadic = paramInfo.param.category === ParamCategory.ArgsList && isUnpacked(paramType); + + if (argList[argIndex].argCategory === ArgCategory.UnpackedList) { + let isArgCompatibleWithVariadic = false; + + const argTypeResult = getTypeOfArg(argList[argIndex], /* inferenceContext */ undefined); + + let listElementType: Type | undefined; + let enforceIterable = false; + let advanceToNextArg = false; + + // Handle the case where *args is being passed to a function defined + // with a ParamSpec and a Concatenate operator. PEP 612 indicates that + // all positional parameters specified in the Concatenate must be + // filled explicitly. + if (paramIndex < positionParamLimitIndex) { + if ( + isParamSpec(argTypeResult.type) && + argTypeResult.type.priv.paramSpecAccess === 'args' && + paramInfo.param.category !== ParamCategory.ArgsList + ) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + positionParamLimitIndex === 1 + ? LocMessage.argPositionalExpectedOne() + : LocMessage.argPositionalExpectedCount().format({ + expected: positionParamLimitIndex, + }), + argList[argIndex].valueExpression ?? errorNode + ); + } + reportedArgError = true; + } + } + + const argType = argTypeResult.type; + + if (isParamVariadic && isUnpackedTypeVarTuple(argType)) { + // Allow an unpacked TypeVarTuple arg to satisfy an + // unpacked TypeVarTuple param. + listElementType = argType; + isArgCompatibleWithVariadic = true; + advanceToNextArg = true; + isTypeVarTupleFullyMatched = true; + } else if ( + isClassInstance(argType) && + isTupleClass(argType) && + argType.priv.tupleTypeArgs && + argType.priv.tupleTypeArgs.length === 1 && + isUnpackedTypeVarTuple(argType.priv.tupleTypeArgs[0].type) + ) { + // Handle the case where an unpacked TypeVarTuple has + // been packaged into a tuple. + listElementType = argType.priv.tupleTypeArgs[0].type; + isArgCompatibleWithVariadic = true; + advanceToNextArg = true; + isTypeVarTupleFullyMatched = true; + } else if (isParamVariadic && isClassInstance(argType) && isTupleClass(argType)) { + // Handle the case where an unpacked tuple argument is + // matched to a TypeVarTuple parameter. + isArgCompatibleWithVariadic = true; + advanceToNextArg = true; + + // Determine whether we should treat the variadic type as fully matched. + // This depends on how many args and unmatched parameters exist. + if (remainingArgCount < remainingParamCount) { + isTypeVarTupleFullyMatched = true; + } + + listElementType = ClassType.cloneForUnpacked(argType); + } else if (isParamSpec(argType) && argType.priv.paramSpecAccess === 'args') { + listElementType = undefined; + } else { + listElementType = getTypeOfIterator( + { type: argType, isIncomplete: argTypeResult.isIncomplete }, + /* isAsync */ false, + errorNode, + /* emitNotIterableError */ false + )?.type; + + if (!listElementType) { + enforceIterable = true; + } + + unpackedArgOfUnknownLength = true; + + if (paramInfo.param.category === ParamCategory.ArgsList) { + unpackedArgMapsToVariadic = true; + } + + if (isParamVariadic && listElementType) { + isArgCompatibleWithVariadic = true; + listElementType = makeTupleObject( + evaluatorInterface, + [{ type: listElementType, isUnbounded: true }], + /* isUnpacked */ true + ); + } + } + + const funcArg: Arg | undefined = listElementType + ? { + argCategory: ArgCategory.Simple, + typeResult: { type: listElementType, isIncomplete: argTypeResult.isIncomplete }, + } + : { ...argList[argIndex], enforceIterable }; + + if (argTypeResult.isIncomplete) { + isTypeIncomplete = true; + } + + // It's not allowed to use unpacked arguments with a variadic *args + // parameter unless the argument is a variadic arg as well. + if (isParamVariadic && !isArgCompatibleWithVariadic) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.unpackedArgWithVariadicParam(), + argList[argIndex].valueExpression || errorNode + ); + } + reportedArgError = true; + } else { + if (paramSpecArgList && paramInfo.param.category !== ParamCategory.Simple) { + paramSpecArgList.push(argList[argIndex]); + } + + if (funcArg) { + validateArgTypeParams.push({ + paramCategory: paramInfo.param.category, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: funcArg, + errorNode: argList[argIndex].valueExpression ?? errorNode, + paramName, + isParamNameSynthesized: FunctionParam.isNameSynthesized(paramInfo.param), + mapsToVarArgList: isParamVariadic && remainingArgCount > remainingParamCount, + }); + } + } + + trySetActive(argList[argIndex], paramDetails.params[paramIndex].param); + + // Note that the parameter has received an argument. + if (paramName && paramDetails.params[paramIndex].param.category === ParamCategory.Simple) { + paramTracker.markArgReceived(paramInfo); + } + + if (advanceToNextArg || paramDetails.params[paramIndex].param.category === ParamCategory.ArgsList) { + argIndex++; + } + + if ( + isTypeVarTupleFullyMatched || + paramDetails.params[paramIndex].param.category !== ParamCategory.ArgsList + ) { + paramIndex++; + } + } else if (paramDetails.params[paramIndex].param.category === ParamCategory.ArgsList) { + trySetActive(argList[argIndex], paramDetails.params[paramIndex].param); + + if (paramSpecArgList) { + paramSpecArgList.push(argList[argIndex]); + argIndex++; + } else { + let paramCategory = paramDetails.params[paramIndex].param.category; + let effectiveParamType = paramType; + const paramName = paramDetails.params[paramIndex].param.name; + + if ( + isUnpackedClass(paramType) && + paramType.priv.tupleTypeArgs && + paramType.priv.tupleTypeArgs.length > 0 + ) { + effectiveParamType = paramType.priv.tupleTypeArgs[0].type; + } + + paramCategory = isUnpacked(effectiveParamType) ? ParamCategory.ArgsList : ParamCategory.Simple; + + if (remainingArgCount <= remainingParamCount) { + if (remainingArgCount < remainingParamCount) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + // Have we run out of arguments and still have parameters left to fill? + addDiagnostic( + DiagnosticRule.reportCallIssue, + remainingArgCount === 1 + ? LocMessage.argMorePositionalExpectedOne() + : LocMessage.argMorePositionalExpectedCount().format({ + expected: remainingArgCount, + }), + argList[argIndex].valueExpression || errorNode + ); + } + reportedArgError = true; + } + + paramIndex++; + } else { + validateArgTypeParams.push({ + paramCategory, + paramType: effectiveParamType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: argList[argIndex], + errorNode: argList[argIndex].valueExpression || errorNode, + paramName, + isParamNameSynthesized: FunctionParam.isNameSynthesized( + paramDetails.params[paramIndex].param + ), + mapsToVarArgList: true, + }); + + argIndex++; + } + } + } else { + const paramInfo = paramDetails.params[paramIndex]; + const paramName = paramInfo.param.name; + + validateArgTypeParams.push({ + paramCategory: paramInfo.param.category, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: argList[argIndex], + errorNode: argList[argIndex].valueExpression || errorNode, + paramName, + isParamNameSynthesized: FunctionParam.isNameSynthesized(paramInfo.param), + }); + trySetActive(argList[argIndex], paramInfo.param); + + // Note that the parameter has received an argument. + paramTracker.markArgReceived(paramInfo); + + argIndex++; + paramIndex++; + } + } + + // If there weren't enough positional arguments to populate all of the + // positional-only parameters and the next positional-only parameter is + // an unbounded tuple, skip past it. + let skippedArgsParam = false; + if ( + positionalOnlyLimitIndex >= 0 && + paramIndex < positionalOnlyLimitIndex && + paramIndex < paramDetails.params.length && + paramDetails.params[paramIndex].param.category === ParamCategory.ArgsList && + !isParamSpec(paramDetails.params[paramIndex].type) + ) { + paramIndex++; + skippedArgsParam = true; + } + + // Check if there weren't enough positional arguments to populate all of + // the positional-only parameters. + if ( + positionalOnlyLimitIndex >= 0 && + paramIndex < positionalOnlyLimitIndex && + (!foundUnpackedListArg || hasParamSpecArgsKwargs) + ) { + const firstParamWithDefault = paramDetails.params.findIndex((paramInfo) => !!paramInfo.defaultType); + const positionOnlyWithoutDefaultsCount = + firstParamWithDefault >= 0 && firstParamWithDefault < positionalOnlyLimitIndex + ? firstParamWithDefault + : positionalOnlyLimitIndex; + + // Calculate the number of remaining positional parameters to report. + let argsRemainingCount = positionOnlyWithoutDefaultsCount - positionalArgCount; + if (skippedArgsParam) { + // If we skipped an args parameter above, reduce the count by one + // because it's permitted to pass zero arguments to *args. + argsRemainingCount--; + } + + const firstArgsParam = paramDetails.params.findIndex( + (paramInfo) => paramInfo.param.category === ParamCategory.ArgsList && !isParamSpec(paramInfo.type) + ); + if (firstArgsParam >= paramIndex && firstArgsParam < positionalOnlyLimitIndex) { + // If there is another args parameter beyond the current param index, + // reduce the count by one because it's permitted to pass zero arguments + // to *args. + argsRemainingCount--; + } + + if (argsRemainingCount > 0) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + argsRemainingCount === 1 + ? LocMessage.argMorePositionalExpectedOne() + : LocMessage.argMorePositionalExpectedCount().format({ + expected: argsRemainingCount, + }), + argList.length > positionalArgCount + ? argList[positionalArgCount].valueExpression || errorNode + : errorNode + ); + } + reportedArgError = true; + } + } + + if (!reportedArgError) { + let unpackedDictKeyNames: string[] | undefined; + let unpackedDictArgType: Type | undefined; + + // Now consume any keyword arguments. + while (argIndex < argList.length) { + if (argList[argIndex].argCategory === ArgCategory.UnpackedDictionary) { + // Verify that the type used in this expression is a SupportsKeysAndGetItem[str, T]. + const argTypeResult = getTypeOfArg( + argList[argIndex], + makeInferenceContext(paramDetails.unpackedKwargsTypedDictType) + ); + const argType = argTypeResult.type; + + if (argTypeResult.isIncomplete) { + isTypeIncomplete = true; + } + + if (isAnyOrUnknown(argType)) { + unpackedDictArgType = argType; + } else if (isClassInstance(argType) && ClassType.isTypedDictClass(argType)) { + // Handle the special case where it is a TypedDict and we know which + // keys are present. + const tdEntries = getTypedDictMembersForClass(evaluatorInterface, argType); + const diag = new DiagnosticAddendum(); + + tdEntries.knownItems.forEach((entry, name) => { + const paramEntry = paramTracker.lookupName(name); + if (paramEntry) { + if (paramEntry.argsReceived > 0) { + diag.addMessage(LocMessage.paramAlreadyAssigned().format({ name })); + } else { + paramEntry.argsReceived++; + + const paramInfoIndex = paramDetails.params.findIndex( + (paramInfo) => paramInfo.param.name === name + ); + assert(paramInfoIndex >= 0); + const paramType = paramDetails.params[paramInfoIndex].type; + + validateArgTypeParams.push({ + paramCategory: ParamCategory.Simple, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: { + argCategory: ArgCategory.Simple, + typeResult: { type: entry.valueType }, + }, + errorNode: argList[argIndex].valueExpression ?? errorNode, + paramName: name, + }); + } + } else if (paramDetails.kwargsIndex !== undefined) { + const paramType = paramDetails.params[paramDetails.kwargsIndex].type; + validateArgTypeParams.push({ + paramCategory: ParamCategory.KwargsDict, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: { + argCategory: ArgCategory.Simple, + typeResult: { type: entry.valueType }, + }, + errorNode: argList[argIndex].valueExpression ?? errorNode, + paramName: name, + }); + + // Remember that this parameter has already received a value. + paramTracker.addKeywordParam(name, paramDetails.params[paramDetails.kwargsIndex]); + } else { + // If the function doesn't have a **kwargs parameter, we need to emit an error. + // However, it's possible that there was a **kwargs but it was eliminated by + // getParamListDetails because it was associated with an unpacked TypedDict. + // In this case, we can skip the error. + if (!paramDetails.hasUnpackedTypedDict) { + diag.addMessage(LocMessage.paramNameMissing().format({ name })); + } + } + }); + + const extraItemsType = tdEntries.extraItems?.valueType ?? getObjectType(); + if (!isNever(extraItemsType)) { + if (paramDetails.kwargsIndex !== undefined) { + const kwargsParam = paramDetails.params[paramDetails.kwargsIndex]; + + validateArgTypeParams.push({ + paramCategory: ParamCategory.KwargsDict, + paramType: kwargsParam.type, + requiresTypeVarMatching: requiresSpecialization(kwargsParam.type), + argument: { + argCategory: ArgCategory.UnpackedDictionary, + typeResult: { type: extraItemsType }, + }, + errorNode: argList[argIndex].valueExpression ?? errorNode, + paramName: kwargsParam.param.name, + }); + } + } + + if (!diag.isEmpty()) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.unpackedTypedDictArgument() + diag.getString(), + argList[argIndex].valueExpression || errorNode + ); + } + reportedArgError = true; + } + } else if (paramSpec && isParamSpecKwargs(paramSpec, argType)) { + unpackedDictArgType = AnyType.create(); + + if (!paramSpecArgList) { + validateArgTypeParams.push({ + paramCategory: ParamCategory.KwargsDict, + paramType: paramSpec, + requiresTypeVarMatching: false, + argument: argList[argIndex], + argType: isParamSpec(argType) ? undefined : AnyType.create(), + errorNode: argList[argIndex].valueExpression || errorNode, + }); + } + } else { + const strObjType = getBuiltInObject(errorNode, 'str'); + + if ( + prefetched?.supportsKeysAndGetItemClass && + isInstantiableClass(prefetched.supportsKeysAndGetItemClass) && + strObjType && + isClassInstance(strObjType) + ) { + const mappingConstraints = new ConstraintTracker(); + let isValidMappingType = false; + + // If this was a TypeVar (e.g. for pseudo-generic classes), + // don't emit this error. + if (isTypeVar(argType)) { + isValidMappingType = true; + } else if ( + assignType( + ClassType.cloneAsInstance(prefetched.supportsKeysAndGetItemClass), + argType, + /* diag */ undefined, + mappingConstraints + ) + ) { + const specializedMapping = solveAndApplyConstraints( + prefetched.supportsKeysAndGetItemClass, + mappingConstraints + ) as ClassType; + const typeArgs = specializedMapping.priv.typeArgs; + if (typeArgs && typeArgs.length >= 2) { + if (assignType(strObjType, typeArgs[0])) { + isValidMappingType = true; + } + + unpackedDictKeyNames = []; + doForEachSubtype(typeArgs[0], (keyType) => { + if (isClassInstance(keyType) && typeof keyType.priv.literalValue === 'string') { + unpackedDictKeyNames?.push(keyType.priv.literalValue); + } else { + unpackedDictKeyNames = undefined; + } + }); + + unpackedDictArgType = typeArgs[1]; + } else { + isValidMappingType = true; + unpackedDictArgType = UnknownType.create(); + } + } + + unpackedArgOfUnknownLength = true; + + if (paramDetails.kwargsIndex !== undefined && unpackedDictArgType) { + const paramType = paramDetails.params[paramDetails.kwargsIndex].type; + validateArgTypeParams.push({ + paramCategory: ParamCategory.Simple, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argType: unpackedDictArgType, + argument: argList[argIndex], + errorNode: argList[argIndex].valueExpression || errorNode, + paramName: paramDetails.params[paramDetails.kwargsIndex].param.name, + }); + + unpackedArgMapsToVariadic = true; + } + + if (!isValidMappingType) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.unpackedDictArgumentNotMapping(), + argList[argIndex].valueExpression || errorNode + ); + } + reportedArgError = true; + } + } + } + + if (paramSpecArgList) { + paramSpecArgList.push(argList[argIndex]); + } + } else { + // Protect against the case where a non-keyword argument appears after + // a keyword argument. This will have already been reported as a parse + // error, but we need to protect against it here. + const paramName = argList[argIndex].name; + if (paramName) { + const paramNameValue = paramName.d.value; + const paramEntry = paramTracker.lookupName(paramNameValue); + + if (paramEntry) { + if (paramEntry.argsReceived > 0) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramAlreadyAssigned().format({ name: paramNameValue }), + paramName + ); + } + reportedArgError = true; + } else { + paramEntry.argsReceived++; + + const paramInfoIndex = paramDetails.params.findIndex( + (paramInfo) => + paramInfo.param.name === paramNameValue && + paramInfo.kind !== ParamKind.Positional + ); + assert(paramInfoIndex >= 0); + const paramType = paramDetails.params[paramInfoIndex].type; + + validateArgTypeParams.push({ + paramCategory: ParamCategory.Simple, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: argList[argIndex], + errorNode: argList[argIndex].valueExpression ?? errorNode, + paramName: paramNameValue, + }); + trySetActive(argList[argIndex], paramDetails.params[paramInfoIndex].param); + } + } else if (paramSpecArgList) { + paramSpecArgList.push(argList[argIndex]); + } else if (paramDetails.kwargsIndex !== undefined) { + const paramType = paramDetails.params[paramDetails.kwargsIndex].type; + if (isParamSpec(paramType)) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramNameMissing().format({ name: paramName.d.value }), + paramName + ); + } + reportedArgError = true; + } else { + validateArgTypeParams.push({ + paramCategory: ParamCategory.KwargsDict, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: argList[argIndex], + errorNode: argList[argIndex].valueExpression ?? errorNode, + paramName: paramNameValue, + }); + + assert( + paramDetails.params[paramDetails.kwargsIndex], + 'paramDetails.kwargsIndex params entry is undefined' + ); + + // Remember that this parameter has already received a value. + paramTracker.addKeywordParam( + paramNameValue, + paramDetails.params[paramDetails.kwargsIndex] + ); + } + trySetActive(argList[argIndex], paramDetails.params[paramDetails.kwargsIndex].param); + } else { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramNameMissing().format({ name: paramName.d.value }), + paramName + ); + } + reportedArgError = true; + } + } else if (argList[argIndex].argCategory === ArgCategory.Simple) { + if (paramSpecArgList) { + paramSpecArgList.push(argList[argIndex]); + } else { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + positionParamLimitIndex === 1 + ? LocMessage.argPositionalExpectedOne() + : LocMessage.argPositionalExpectedCount().format({ + expected: positionParamLimitIndex, + }), + argList[argIndex].valueExpression || errorNode + ); + } + reportedArgError = true; + } + } else if (argList[argIndex].argCategory === ArgCategory.UnpackedList) { + // Handle the case where a *args: P.args (or *args: Any) is passed as an + // argument to a function that accepts a ParamSpec. + if (paramSpec) { + const argTypeResult = getTypeOfArg(argList[argIndex], /* inferenceContext */ undefined); + const argType = argTypeResult.type; + + if (argTypeResult.isIncomplete) { + isTypeIncomplete = true; + } + + if (isParamSpecArgs(paramSpec, argType)) { + validateArgTypeParams.push({ + paramCategory: ParamCategory.ArgsList, + paramType: paramSpec, + requiresTypeVarMatching: false, + argument: argList[argIndex], + argType: isParamSpec(argType) ? undefined : AnyType.create(), + errorNode: argList[argIndex].valueExpression ?? errorNode, + }); + } + } + } + } + + argIndex++; + } + + // If there are keyword-only parameters that haven't been matched but we + // have an unpacked dictionary arg, assume that it applies to them. + if (unpackedDictArgType && (!foundUnpackedListArg || paramDetails.argsIndex !== undefined)) { + // Don't consider any position-only parameters, since they cannot be matched to + // **kwargs arguments. Consider parameters that are either positional or keyword + // if there is no *args argument. + paramDetails.params.forEach((paramInfo, paramIndex) => { + const param = paramInfo.param; + if ( + paramIndex >= paramDetails.firstPositionOrKeywordIndex && + param.category === ParamCategory.Simple && + param.name && + paramTracker.lookupDetails(paramInfo).argsReceived === 0 + ) { + const paramType = paramDetails.params[paramIndex].type; + + if (!unpackedDictKeyNames || unpackedDictKeyNames.includes(param.name)) { + validateArgTypeParams.push({ + paramCategory: ParamCategory.Simple, + paramType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: { + argCategory: ArgCategory.Simple, + typeResult: { type: unpackedDictArgType! }, + }, + errorNode: + argList.find((arg) => arg.argCategory === ArgCategory.UnpackedDictionary) + ?.valueExpression ?? errorNode, + paramName: param.name, + isParamNameSynthesized: FunctionParam.isNameSynthesized(param), + }); + + paramTracker.markArgReceived(paramDetails.params[paramIndex]); + } + } + }); + } + + // Determine whether there are any parameters that require arguments + // but have not yet received them. If we received a dictionary argument + // (i.e. an arg starting with a "**"), we will assume that all parameters + // are matched. + if (!unpackedDictArgType && !FunctionType.isDefaultParamCheckDisabled(overload)) { + const unassignedParams = paramTracker.getUnassignedParams(); + + if (unassignedParams.length > 0) { + if (!canSkipDiagnosticForNode(errorNode)) { + const missingParamNames = unassignedParams.map((p) => `"${p}"`).join(', '); + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + unassignedParams.length === 1 + ? LocMessage.argMissingForParam().format({ name: missingParamNames }) + : LocMessage.argMissingForParams().format({ names: missingParamNames }), + errorNode + ); + } + } + reportedArgError = true; + } + + // Add any implicit (default) arguments that are needed for resolving + // generic types. For example, if the function is defined as + // def foo(v1: _T = 'default') + // and _T is a TypeVar, we need to match the TypeVar to the default + // value's type if it's not provided by the caller. + paramDetails.params.forEach((paramInfo) => { + const param = paramInfo.param; + if (param.category === ParamCategory.Simple && param.name) { + const entry = paramTracker.lookupDetails(paramInfo); + + if (entry.argsNeeded === 0 && entry.argsReceived === 0) { + const defaultArgType = paramInfo.defaultType; + + if ( + defaultArgType && + !isEllipsisType(defaultArgType) && + requiresSpecialization(paramInfo.declaredType, { ignorePseudoGeneric: true }) + ) { + validateArgTypeParams.push({ + paramCategory: param.category, + paramType: paramInfo.type, + requiresTypeVarMatching: true, + argument: { + argCategory: ArgCategory.Simple, + typeResult: { type: defaultArgType }, + }, + isDefaultArg: true, + errorNode, + paramName: param.name, + isParamNameSynthesized: FunctionParam.isNameSynthesized(param), + }); + } + } + } + }); + } + } + + // If we're in speculative mode and an arg/param mismatch has already been reported, don't + // bother doing the extra work here. This occurs frequently when attempting to find the + // correct overload. + if (!reportedArgError || !isSpeculativeModeInUse(undefined)) { + // If there are arguments that map to a variadic *args parameter that hasn't + // already been matched, see if the type of that *args parameter is a + // TypeVarTuple. If so, we'll preprocess those arguments and combine them + // into a tuple. + assert( + paramDetails.argsIndex === undefined || paramDetails.argsIndex < paramDetails.params.length, + 'paramDetails.argsIndex params entry is invalid' + ); + if ( + paramDetails.argsIndex !== undefined && + paramDetails.argsIndex >= 0 && + FunctionParam.isTypeDeclared(paramDetails.params[paramDetails.argsIndex].param) && + !isTypeVarTupleFullyMatched + ) { + const paramType = paramDetails.params[paramDetails.argsIndex].type; + const variadicArgs = validateArgTypeParams.filter((argParam) => argParam.mapsToVarArgList); + + if (isUnpacked(paramType) && (!isTypeVarTuple(paramType) || !paramType.priv.isInUnion)) { + const tupleTypeArgs: TupleTypeArg[] = variadicArgs.map((argParam) => { + const argType = getTypeOfArg(argParam.argument, /* inferenceContext */ undefined).type; + + const containsTypeVarTuple = + isUnpackedTypeVarTuple(argType) || + (isClassInstance(argType) && + isTupleClass(argType) && + argType.priv.tupleTypeArgs && + argType.priv.tupleTypeArgs.length === 1 && + isUnpackedTypeVarTuple(argType.priv.tupleTypeArgs[0].type)); + + if ( + containsTypeVarTuple && + argParam.argument.argCategory !== ArgCategory.UnpackedList && + !argParam.mapsToVarArgList + ) { + if (!canSkipDiagnosticForNode(errorNode) && !isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.typeVarTupleMustBeUnpacked(), + argParam.argument.valueExpression ?? errorNode + ); + } + reportedArgError = true; + } + + return { + type: argType, + isUnbounded: argParam.argument.argCategory === ArgCategory.UnpackedList, + }; + }); + + let specializedTuple: Type | undefined; + if (tupleTypeArgs.length === 1 && !tupleTypeArgs[0].isUnbounded) { + const entryType = tupleTypeArgs[0].type; + + if (isUnpacked(entryType)) { + specializedTuple = makePacked(entryType); + } + } + + if (!specializedTuple) { + specializedTuple = makeTupleObject(evaluatorInterface, tupleTypeArgs, /* isUnpacked */ false); + } + + const combinedArg: ValidateArgTypeParams = { + paramCategory: ParamCategory.Simple, + paramType: makePacked(paramType), + requiresTypeVarMatching: true, + argument: { + argCategory: ArgCategory.Simple, + typeResult: { type: specializedTuple }, + }, + errorNode, + paramName: paramDetails.params[paramDetails.argsIndex].param.name, + isParamNameSynthesized: FunctionParam.isNameSynthesized( + paramDetails.params[paramDetails.argsIndex].param + ), + mapsToVarArgList: true, + }; + + validateArgTypeParams = [ + ...validateArgTypeParams.filter((argParam) => !argParam.mapsToVarArgList), + combinedArg, + ]; + } + } + } + + // Special-case the builtin isinstance and issubclass functions. + if (FunctionType.isBuiltIn(overload, ['isinstance', 'issubclass']) && validateArgTypeParams.length === 2) { + validateArgTypeParams[1].isinstanceParam = true; + } + + return { + overload, + overloadIndex, + argumentErrors: reportedArgError, + isTypeIncomplete, + argParams: validateArgTypeParams, + paramSpecTarget, + paramSpecArgList, + activeParam, + unpackedArgOfUnknownLength, + unpackedArgMapsToVariadic, + argumentMatchScore: 0, + }; + } + + // After having matched arguments with parameters, this function evaluates the + // types of each argument expression and validates that the resulting type is + // compatible with the declared type of the corresponding parameter. + function validateArgTypesWithContext( + errorNode: ExpressionNode, + matchResults: MatchArgsToParamsResult, + constraints: ConstraintTracker, + skipUnknownArgCheck = false, + inferenceContext: InferenceContext | undefined + ): CallResult { + const type = matchResults.overload; + + let expectedType: Type | undefined = inferenceContext?.expectedType; + + // Can we safely ignore the inference context, either because it's not provided + // or will have no effect? If so, avoid the extra work. + const returnType = inferenceContext?.returnTypeOverride ?? getEffectiveReturnType(type); + if (!returnType || !requiresSpecialization(returnType)) { + expectedType = undefined; + } + + const tryExpectedType = (expectedSubtype: Type): number => { + const clonedConstraints = constraints.clone(); + const callResult = validateArgTypesWithExpectedType( + errorNode, + matchResults, + clonedConstraints, + /* skipUnknownArgCheck */ true, + expectedSubtype, + returnType + ); + + // Use a heuristic to pick a subtype that is most likely to be correct. + // We'll look for a subtype that produces no argument errors and has + // no Unknowns in the return type. + if (!callResult.argumentErrors && callResult.returnType) { + const returnType = inferenceContext?.returnTypeOverride + ? solveAndApplyConstraints(inferenceContext.returnTypeOverride, clonedConstraints) + : callResult.returnType; + + if ( + assignType( + expectedSubtype, + returnType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default + ) + ) { + const anyOrUnknown = containsAnyOrUnknown(callResult.returnType, /* recurse */ true); + // Prefer return types that have no unknown or Any. + if (!anyOrUnknown) { + return 3; + } + + // Prefer Any over Unknown. + return isAny(anyOrUnknown) ? 2 : 1; + } + } + + return 0; + }; + + // Refine the expected type by speculatively evaluating arg types. If the + // expected type is a union, we may need to perform multiple evaluations + // to determine whether one of the subtypes works. + if (expectedType) { + expectedType = useSpeculativeMode(getSpeculativeNodeForCall(errorNode), () => { + let validExpectedSubtype: Type | undefined; + let bestSubtypeScore = -1; + + // If the expected type is a union, we don't know which type is expected. + // We may or may not be able to make use of the expected type. We'll evaluate + // speculatively to see if using one of the expected subtypes works. + if (isUnion(expectedType!)) { + doForEachSubtype( + expectedType!, + (expectedSubtype) => { + if (bestSubtypeScore < 3) { + const score = tryExpectedType(expectedSubtype); + if (score > 0 && score > bestSubtypeScore) { + validExpectedSubtype = expectedSubtype; + bestSubtypeScore = score; + } + } + }, + /* sortSubtypes */ true + ); + } + + if (bestSubtypeScore < 3) { + const score = tryExpectedType(expectedType!); + if (score > 0 && score > bestSubtypeScore) { + validExpectedSubtype = expectedType; + } + } + + return validExpectedSubtype; + }); + } + + // If there is no expected type, or the expected type is Any or Unknown, + // there's nothing left to do here. + if (!expectedType || isAnyOrUnknown(expectedType) || isNever(expectedType)) { + return validateArgTypes(errorNode, matchResults, constraints, skipUnknownArgCheck); + } + + return validateArgTypesWithExpectedType( + errorNode, + matchResults, + constraints, + skipUnknownArgCheck, + expectedType, + returnType + ); + } + + function validateArgTypesWithExpectedType( + errorNode: ExpressionNode, + matchResults: MatchArgsToParamsResult, + constraints: ConstraintTracker, + skipUnknownArgCheck = false, + expectedType: Type, + returnType: Type + ): CallResult { + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(errorNode); + let assignFlags = AssignTypeFlags.PopulateExpectedType; + if (containsLiteralType(expectedType, /* includeTypeArgs */ true)) { + assignFlags |= AssignTypeFlags.RetainLiteralsForTypeVar; + } + + // Prepopulate the constraints based on the specialized expected type. + // This will allow us to more closely match the expected type if possible. + if (isClassInstance(returnType) && isClassInstance(expectedType) && !isTypeSame(returnType, expectedType)) { + const tempConstraints = new ConstraintTracker(); + if ( + addConstraintsForExpectedType( + evaluatorInterface, + returnType, + expectedType, + tempConstraints, + liveTypeVarScopes, + errorNode.start + ) + ) { + const genericReturnType = selfSpecializeClass(returnType, { + overrideTypeArgs: true, + }); + + expectedType = solveAndApplyConstraints(genericReturnType, tempConstraints, { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(returnType), + useUnknown: true, + tupleClassType: getTupleClassType(), + }, + }); + + assignFlags |= AssignTypeFlags.SkipPopulateUnknownExpectedType; + } + } + + expectedType = transformExpectedType(expectedType, liveTypeVarScopes, errorNode.start); + + assignType(returnType, expectedType, /* diag */ undefined, constraints, assignFlags); + + return validateArgTypes(errorNode, matchResults, constraints, skipUnknownArgCheck); + } + + function validateArgTypes( + errorNode: ExpressionNode, + matchResults: MatchArgsToParamsResult, + constraints: ConstraintTracker, + skipUnknownArgCheck: boolean | undefined + ): CallResult { + const type = matchResults.overload; + let isTypeIncomplete = matchResults.isTypeIncomplete; + let argumentErrors = false; + let argumentMatchScore = 0; + let specializedInitSelfType: Type | undefined; + let anyOrUnknownArg: UnknownType | AnyType | undefined; + const speculativeNode = getSpeculativeNodeForCall(errorNode); + const typeCondition = getTypeCondition(type); + const paramSpec = FunctionType.getParamSpecFromArgsKwargs(type); + + // Check for an attempt to invoke an unimplemented abstract method. + if (type.priv.boundToType && !type.priv.boundToType.priv.includeSubclasses && type.shared.methodClass) { + const abstractSymbolInfo = getAbstractSymbolInfo(type.shared.methodClass, type.shared.name); + + if (abstractSymbolInfo && !abstractSymbolInfo.hasImplementation) { + addDiagnostic( + DiagnosticRule.reportAbstractUsage, + LocMessage.abstractMethodInvocation().format({ + method: type.shared.name, + }), + errorNode.nodeType === ParseNodeType.Call ? errorNode.d.leftExpr : errorNode + ); + } + } + + // The type annotation for the "self" parameter in an __init__ method to + // can influence the type being constructed. + if ( + type.shared.name === '__init__' && + type.priv.strippedFirstParamType && + type.priv.boundToType && + isClassInstance(type.priv.strippedFirstParamType) && + isClassInstance(type.priv.boundToType) && + ClassType.isSameGenericClass(type.priv.strippedFirstParamType, type.priv.boundToType) && + type.priv.strippedFirstParamType.priv.typeArgs + ) { + const typeParams = type.priv.strippedFirstParamType.shared.typeParams; + specializedInitSelfType = type.priv.strippedFirstParamType; + type.priv.strippedFirstParamType.priv.typeArgs.forEach((typeArg, index) => { + if (index < typeParams.length) { + const typeParam = typeParams[index]; + if (!isTypeSame(typeParam, typeArg, { ignorePseudoGeneric: true })) { + constraints.setBounds(typeParams[index], typeArg); + } + } + }); + } + + // Special-case a few built-in calls that are often used for + // casting or checking for unknown types. + if ( + FunctionType.isBuiltIn(type, [ + 'typing.cast', + 'typing_extensions.cast', + 'builtins.isinstance', + 'builtins.issubclass', + ]) + ) { + skipUnknownArgCheck = true; + } + + // Run through all args and validate them against their matched parameter. + // We'll do two phases. The first one establishes constraints for type + // variables. The second perform type validation using the solved + // types. We can skip the first pass if there are no type vars to solve. + const typeVarCount = matchResults.argParams.filter((arg) => arg.requiresTypeVarMatching).length; + if (typeVarCount > 0) { + // Do up to two passes. + let passCount = Math.min(typeVarCount, 2); + + for (let i = 0; i < passCount; i++) { + useSpeculativeMode(speculativeNode, () => { + matchResults.argParams.forEach((argParam) => { + if (!argParam.requiresTypeVarMatching) { + return; + } + + const argResult = validateArgType( + argParam, + constraints, + { type, isIncomplete: matchResults.isTypeIncomplete }, + { + skipUnknownArgCheck, + isArgFirstPass: passCount > 1 && i === 0, + conditionFilter: typeCondition, + skipReportError: true, + } + ); + + if (argResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + // If we skipped a bare type var during the first pass, add + // another pass to ensure that we handle all of the type variables. + if (i === 0 && passCount < 2 && argResult.skippedBareTypeVarExpectedType) { + passCount++; + } + }); + }); + } + } + + let sawParamSpecArgs = false; + let sawParamSpecKwargs = false; + + let condition: TypeCondition[] = []; + const argResults: ArgResult[] = []; + + matchResults.argParams.forEach((argParam, argParamIndex) => { + const argResult = validateArgType( + argParam, + constraints, + { type, isIncomplete: matchResults.isTypeIncomplete }, + { + skipUnknownArgCheck, + conditionFilter: typeCondition, + } + ); + + argResults.push(argResult); + + if (!argResult.isCompatible) { + argumentErrors = true; + + // Add the inverse index so earlier parameters represent larger errors. + // This will help the heuristics in the overload error paths to pick the + // most likely intended overload if none of them match. + argumentMatchScore += 1 + (matchResults.argParams.length - argParamIndex); + } + + if (argResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (argResult.condition) { + condition = TypeCondition.combine(condition, argResult.condition) ?? []; + } + + if (isAnyOrUnknown(argResult.argType)) { + anyOrUnknownArg = anyOrUnknownArg + ? preserveUnknown(argResult.argType, anyOrUnknownArg) + : argResult.argType; + } + + if (paramSpec) { + if (argParam.argument.argCategory === ArgCategory.UnpackedList) { + if (isParamSpecArgs(paramSpec, argResult.argType)) { + if (sawParamSpecArgs) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramSpecArgsKwargsDuplicate().format({ type: printType(paramSpec) }), + argParam.errorNode + ); + } + + sawParamSpecArgs = true; + } + } + + if (argParam.argument.argCategory === ArgCategory.UnpackedDictionary) { + if (isParamSpecKwargs(paramSpec, argResult.argType)) { + if (sawParamSpecKwargs) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramSpecArgsKwargsDuplicate().format({ type: printType(paramSpec) }), + argParam.errorNode + ); + } + + sawParamSpecKwargs = true; + } + } + } + }); + + let paramSpecConstraints: (ConstraintTracker | undefined)[] = []; + + // Handle the assignment of additional arguments that map to a param spec. + if (matchResults.paramSpecArgList && matchResults.paramSpecTarget) { + const paramSpecArgResult = validateArgTypesForParamSpec( + errorNode, + matchResults.paramSpecArgList, + matchResults.paramSpecTarget, + constraints + ); + + if (paramSpecArgResult.argumentErrors) { + argumentErrors = true; + argumentMatchScore += 1; + } + + paramSpecConstraints = paramSpecArgResult.constraintTrackers; + } else if (paramSpec) { + if (!sawParamSpecArgs || !sawParamSpecKwargs) { + if (!isTypeIncomplete) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramSpecArgsMissing().format({ type: printType(paramSpec) }), + errorNode + ); + } + argumentErrors = true; + argumentMatchScore += 1; + } + } + + // Calculate the return type. + const returnTypeResult = getEffectiveReturnTypeResult(type, { + callSiteInfo: { args: matchResults.argParams, errorNode }, + }); + let returnType = returnTypeResult.type; + if (returnTypeResult.isIncomplete) { + isTypeIncomplete = true; + } + + if (condition.length > 0) { + returnType = TypeBase.cloneForCondition(returnType, condition); + } + + let eliminateUnsolvedInUnions = true; + + // If the function is returning a callable, don't eliminate unsolved + // type vars within a union. There are legit uses for unsolved type vars + // within a callable. + if (isFunctionOrOverloaded(returnType)) { + eliminateUnsolvedInUnions = false; + } + + let specializedReturnType = solveAndApplyConstraints(returnType, constraints, { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(type), + unsolvedExemptTypeVars: getUnknownExemptTypeVarsForReturnType(type, returnType), + tupleClassType: getTupleClassType(), + eliminateUnsolvedInUnions, + }, + }); + specializedReturnType = addConditionToType(specializedReturnType, typeCondition, { skipBoundTypeVars: true }); + + // If the function includes a ParamSpec and the captured signature(s) includes + // generic types, we may need to apply those solved TypeVars. + if (paramSpecConstraints.length > 0) { + paramSpecConstraints.forEach((paramSpecConstraints) => { + if (paramSpecConstraints) { + specializedReturnType = solveAndApplyConstraints(specializedReturnType, paramSpecConstraints); + + // It's possible that one or more of the TypeVars or ParamSpecs + // in the constraints refer to TypeVars that were solved in + // the paramSpecConstraints. Apply these solved TypeVars accordingly. + applySourceSolutionToConstraints( + constraints, + solveConstraints(evaluatorInterface, paramSpecConstraints) + ); + } + }); + } + + // If the final return type is an unpacked tuple, turn it into a normal (unpacked) tuple. + if (isUnpackedClass(specializedReturnType)) { + specializedReturnType = ClassType.cloneForPacked(specializedReturnType); + } + + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(errorNode); + specializedReturnType = adjustCallableReturnType(errorNode, specializedReturnType, liveTypeVarScopes); + + if (specializedInitSelfType) { + specializedInitSelfType = solveAndApplyConstraints(specializedInitSelfType, constraints); + } + + matchResults.argumentMatchScore = argumentMatchScore; + + return { + argumentErrors, + argResults, + anyOrUnknownArg, + returnType: specializedReturnType, + isTypeIncomplete, + activeParam: matchResults.activeParam, + specializedInitSelfType, + overloadsUsedForCall: argumentErrors ? [] : [type], + }; + } + + // In general, all in-scope type variables left in a return type should be + // replaced with Unknown. However, if the return type is a callable that uses + // type vars that are found nowhere within the function's input parameters, + // we'll treat these as though they're scoped to the callable and leave them + // unsolved. + function getUnknownExemptTypeVarsForReturnType(functionType: FunctionType, returnType: Type): TypeVarType[] { + if (isFunction(returnType) && !returnType.shared.name) { + const returnTypeScopeId = returnType.shared.typeVarScopeId; + + // If one or more type vars found within the return type are scoped to + // the functionType but don't appear anywhere else within the functionType's + // input parameters, rescope them to the return type callable so they are + // not replaced with Unknown. + if (returnTypeScopeId && functionType.shared.typeVarScopeId) { + let typeVarsInReturnType = getTypeVarArgsRecursive(returnType); + + // Remove any type variables that appear in the function's input parameters. + functionType.shared.parameters.forEach((param, index) => { + if (FunctionParam.isTypeDeclared(param)) { + const typeVarsInInputParam = getTypeVarArgsRecursive( + FunctionType.getParamType(functionType, index) + ); + typeVarsInReturnType = typeVarsInReturnType.filter( + (returnTypeVar) => + !typeVarsInInputParam.some((inputTypeVar) => isTypeSame(returnTypeVar, inputTypeVar)) + ); + } + }); + + return typeVarsInReturnType; + } + } + + return []; + } + + // If the return type includes a generic Callable type, set the type var + // scope to the scope of the function it was originally associated with + // to allow these type vars to be solved. This won't work with overloads + // or unions of callables. It's intended for a specific use case. We may + // need to make this more sophisticated in the future. + function adjustCallableReturnType( + callNode: ExpressionNode, + returnType: Type, + liveTypeVarScopes: TypeVarScopeId[] + ): Type { + if (!isFunction(returnType)) { + return returnType; + } + + // What type variables are referenced in the callable return type? Do not include any live type variables. + const typeParams = getTypeVarArgsRecursive(returnType).filter( + (t) => !liveTypeVarScopes.some((scopeId) => t.priv.scopeId === scopeId) + ); + + // If there are no unsolved type variables, we're done. If there are + // unsolved type variables, rescope them to the callable. + if (typeParams.length === 0) { + return returnType; + } + + inferReturnTypeIfNecessary(returnType); + + // Create a new scope ID based on the caller's position. This + // will guarantee uniqueness. If another caller uses the same + // call and arguments, the type vars will not conflict. + const newScopeId = ParseTreeUtils.getScopeIdForNode(callNode); + const solution = new ConstraintSolution(); + + const newTypeParams = typeParams.map((typeVar) => { + const newTypeParam = TypeVarType.cloneForScopeId( + typeVar, + newScopeId, + typeVar.priv.scopeName, + TypeVarScopeType.Function + ); + solution.setType(typeVar, newTypeParam); + return newTypeParam; + }); + + return applySolvedTypeVars( + FunctionType.cloneWithNewTypeVarScopeId( + returnType, + newScopeId, + /* constructorTypeVarScopeId */ undefined, + newTypeParams + ), + solution + ); + } + + // Tries to assign the call arguments to the function parameter + // list and reports any mismatches in types or counts. Returns the + // specialized return type of the call. + function validateArgs( + errorNode: ExpressionNode, + argList: Arg[], + typeResult: TypeResult, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck = false, + inferenceContext: InferenceContext | undefined + ): CallResult { + const matchResults = matchArgsToParams(errorNode, argList, typeResult, 0); + + if (matchResults.argumentErrors) { + // Evaluate types of all args. This will ensure that referenced symbols are + // not reported as unaccessed. Also pass the expected parameter type as + // inference context to enable proper completions even when there are errors. + matchResults.argParams.forEach((argParam) => { + if (argParam.argument.valueExpression && !isSpeculativeModeInUse(argParam.argument.valueExpression)) { + getTypeOfExpression( + argParam.argument.valueExpression, + /* flags */ undefined, + makeInferenceContext(argParam.paramType) + ); + } + }); + + // Also evaluate any arguments that weren't matched to parameters + argList.forEach((arg) => { + if (arg.valueExpression && !isSpeculativeModeInUse(arg.valueExpression)) { + // Check if this argument was already evaluated above + const wasEvaluated = matchResults.argParams.some((argParam) => argParam.argument === arg); + if (!wasEvaluated) { + getTypeOfExpression(arg.valueExpression); + } + } + }); + // Use a return type of Unknown but attach a "possible type" to it + // so the completion provider can suggest better completions. + const possibleType = FunctionType.getEffectiveReturnType(typeResult.type); + return { + returnType: + possibleType && !isAnyOrUnknown(possibleType) + ? UnknownType.createPossibleType(possibleType, /* isIncomplete */ false) + : undefined, + argumentErrors: true, + activeParam: matchResults.activeParam, + overloadsUsedForCall: [], + }; + } + + return validateArgTypesWithContext( + errorNode, + matchResults, + constraints ?? new ConstraintTracker(), + skipUnknownArgCheck, + makeInferenceContext( + inferenceContext?.expectedType, + inferenceContext?.isTypeIncomplete, + inferenceContext?.returnTypeOverride + ) + ); + } + + // Determines whether the specified argument list satisfies the function + // signature bound to the specified ParamSpec. Return value indicates success. + function validateArgTypesForParamSpec( + errorNode: ExpressionNode, + argList: Arg[], + paramSpec: ParamSpecType, + destConstraints: ConstraintTracker + ): ParamSpecArgResult { + const sets = destConstraints.getConstraintSets(); + + // Handle the common case where there is only one signature context. + if (sets.length === 1) { + return validateArgTypesForParamSpecSignature(errorNode, argList, paramSpec, sets[0]); + } + + const filteredSets: ConstraintSet[] = []; + const constraintTrackers: (ConstraintTracker | undefined)[] = []; + const speculativeNode = getSpeculativeNodeForCall(errorNode); + + sets.forEach((context) => { + // Use speculative mode to avoid emitting errors or caching types. + useSpeculativeMode(speculativeNode, () => { + const paramSpecArgResult = validateArgTypesForParamSpecSignature( + errorNode, + argList, + paramSpec, + context + ); + + if (!paramSpecArgResult.argumentErrors) { + filteredSets.push(context); + } + + appendArray(constraintTrackers, paramSpecArgResult.constraintTrackers); + }); + }); + + // Copy back any compatible signature contexts if any were compatible. + if (filteredSets.length > 0) { + destConstraints.addConstraintSets(filteredSets); + } + + // Evaluate non-speculatively to produce a final result and cache types. + const paramSpecArgResult = validateArgTypesForParamSpecSignature( + errorNode, + argList, + paramSpec, + filteredSets.length > 0 ? filteredSets[0] : sets[0] + ); + + return { argumentErrors: paramSpecArgResult.argumentErrors, constraintTrackers: constraintTrackers }; + } + + function validateArgTypesForParamSpecSignature( + errorNode: ExpressionNode, + argList: Arg[], + paramSpec: ParamSpecType, + constraintSet: ConstraintSet + ): ParamSpecArgResult { + const solutionSet = solveConstraintSet(evaluatorInterface, constraintSet); + let paramSpecType = solutionSet.getType(paramSpec); + paramSpecType = convertTypeToParamSpecValue(paramSpecType ?? paramSpec); + + const matchResults = matchArgsToParams(errorNode, argList, { type: paramSpecType }, 0); + const functionType = matchResults.overload; + const constraints = new ConstraintTracker(); + + if (matchResults.argumentErrors) { + // Evaluate types of all args. This will ensure that referenced symbols are + // not reported as unaccessed. + argList.forEach((arg) => { + if (arg.valueExpression && !isSpeculativeModeInUse(arg.valueExpression)) { + getTypeOfExpression(arg.valueExpression); + } + }); + + return { argumentErrors: true, constraintTrackers: [constraints] }; + } + + const functionParamSpec = FunctionType.getParamSpecFromArgsKwargs(functionType); + const functionWithoutParamSpec = FunctionType.cloneRemoveParamSpecArgsKwargs(functionType); + + // Handle the recursive case where we're passing (*args: P.args, **kwargs: P.args) + // a remaining function of type (*P). + if ( + functionParamSpec && + functionWithoutParamSpec.shared.parameters.length === 0 && + isTypeSame(functionParamSpec, paramSpec) + ) { + // If there are any arguments other than *args: P.args or **kwargs: P.kwargs, + // report an error. + let argsCount = 0; + let kwargsCount = 0; + let argumentErrors = false; + let argErrorNode: ExpressionNode | undefined; + + for (const arg of argList) { + const argType = getTypeOfArg(arg, /* inferenceContext */ undefined)?.type; + + if (arg.argCategory === ArgCategory.UnpackedList) { + if (isParamSpecArgs(paramSpec, argType)) { + argsCount++; + } + } else if (arg.argCategory === ArgCategory.UnpackedDictionary) { + if (isParamSpecKwargs(paramSpec, argType)) { + kwargsCount++; + } + } else { + argErrorNode = argErrorNode ?? arg.valueExpression; + argumentErrors = true; + } + } + + if (argsCount !== 1 || kwargsCount !== 1) { + argumentErrors = true; + } + + if (argumentErrors) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramSpecArgsMissing().format({ + type: printType(functionParamSpec), + }), + argErrorNode ?? errorNode + ); + } + + return { argumentErrors, constraintTrackers: [constraints] }; + } + + const result = validateArgTypes(errorNode, matchResults, constraints, /* skipUnknownArgCheck */ undefined); + return { argumentErrors: !!result.argumentErrors, constraintTrackers: [constraints] }; + } + + function validateArgType( + argParam: ValidateArgTypeParams, + constraints: ConstraintTracker, + typeResult: TypeResult | undefined, + options: ValidateArgTypeOptions + ): ArgResult { + let argType: Type | undefined; + let expectedTypeDiag: DiagnosticAddendum | undefined; + let isTypeIncomplete = !!typeResult?.isIncomplete; + let isCompatible = true; + const functionName = typeResult?.type.shared.name; + let skippedBareTypeVarExpectedType = false; + + if (argParam.argument.valueExpression) { + let expectedType: Type | undefined; + + // Is the expected type a "bare" in-scope TypeVar or a union of bare in-scope TypeVars? + let isExpectedTypeBareTypeVar = true; + doForEachSubtype(argParam.paramType, (subtype) => { + if (!isTypeVar(subtype) || subtype.priv.scopeId !== typeResult?.type.shared.typeVarScopeId) { + isExpectedTypeBareTypeVar = false; + } + }); + + if (!options.isArgFirstPass || !isExpectedTypeBareTypeVar) { + expectedType = argParam.paramType; + + // If the parameter type is a function with a ParamSpec, don't apply + // the solved TypeVars if the constraint tracker has more than one signature. + // This will expand the ParamSpec into an overload, which will cause problems. + const skipApplySolvedTypeVars = + isFunction(argParam.paramType) && + FunctionType.getParamSpecFromArgsKwargs(argParam.paramType) && + constraints.getConstraintSets().length > 1; + + if (!skipApplySolvedTypeVars) { + expectedType = solveAndApplyConstraints(expectedType, constraints, /* applyOptions */ undefined, { + useLowerBoundOnly: !!options.isArgFirstPass, + }); + } + } else { + skippedBareTypeVarExpectedType = true; + } + + // If the expected type is unknown, don't use an expected type. Instead, + // use default rules for evaluating the expression type. + if (expectedType && isUnknown(expectedType)) { + expectedType = undefined; + } + + // Was the argument's type precomputed by the caller? + if (argParam.argType) { + argType = argParam.argType; + } else { + const flags = argParam.isinstanceParam + ? EvalFlags.IsInstanceArgDefaults + : EvalFlags.NoFinal | EvalFlags.NoSpecialize; + const exprTypeResult = getTypeOfExpression( + argParam.argument.valueExpression, + flags, + makeInferenceContext(expectedType, !!typeResult?.isIncomplete) + ); + + argType = exprTypeResult.type; + + // If the argument is unpacked and we are supposed to enforce + // that it's an iterator, do so now. + if (argParam.argument.argCategory === ArgCategory.UnpackedList && argParam.argument.enforceIterable) { + const iteratorType = getTypeOfIterator( + exprTypeResult, + /* isAsync */ false, + argParam.argument.valueExpression + ); + // Try to prevent cascading errors if it was not iterable. + argType = iteratorType?.type ?? UnknownType.create(); + } + + if (exprTypeResult.isIncomplete) { + isTypeIncomplete = true; + } + + if (expectedType && requiresSpecialization(expectedType)) { + // Assign the argument type back to the expected type to assign + // values to any unification variables. + const clonedConstraints = constraints.clone(); + if ( + assignType( + expectedType, + argType, + /* diag */ undefined, + clonedConstraints, + options?.isArgFirstPass ? AssignTypeFlags.ArgAssignmentFirstPass : AssignTypeFlags.Default + ) + ) { + constraints.copyFromClone(clonedConstraints); + } else { + isCompatible = false; + } + } + + expectedTypeDiag = exprTypeResult.expectedTypeDiagAddendum; + } + + if (argParam.argument && argParam.argument.name && !isSpeculativeModeInUse(argParam.errorNode)) { + writeTypeCache( + argParam.argument.name, + { type: expectedType ?? argType, isIncomplete: isTypeIncomplete }, + EvalFlags.None + ); + } + } else { + // Was the argument's type precomputed by the caller? + if (argParam.argType) { + argType = argParam.argType; + } else { + const argTypeResult = getTypeOfArg( + argParam.argument, + makeInferenceContext(argParam.paramType, isTypeIncomplete) + ); + argType = argTypeResult.type; + if (argTypeResult.isIncomplete) { + isTypeIncomplete = true; + } + } + + // If the argument came from a parameter's default argument value, + // we may need to specialize the type. + if (argParam.isDefaultArg) { + argType = solveAndApplyConstraints(argType, constraints); + } + } + + // If we're assigning to a var arg dictionary with a TypeVar type, + // strip literals before performing the assignment. This is used in + // places like a dict constructor. + if (argParam.paramCategory === ParamCategory.KwargsDict && isTypeVar(argParam.paramType)) { + argType = stripLiteralValue(argType); + } + + // If there's a constraint filter, apply it to top-level type variables + // if appropriate. This doesn't properly handle non-top-level constrained + // type variables. + if (options.conditionFilter) { + argType = mapSubtypesExpandTypeVars( + argType, + { conditionFilter: options.conditionFilter }, + (expandedSubtype) => { + return expandedSubtype; + } + ); + } + + const condition = argType.props?.condition; + + let diag = options?.skipReportError ? undefined : new DiagnosticAddendum(); + + if (isParamSpec(argParam.paramType)) { + // Handle the case where we're assigning a *args or **kwargs argument + // to a *P.args or **P.kwargs parameter. + if (argParam.paramType.priv.paramSpecAccess !== undefined) { + return { isCompatible, argType, isTypeIncomplete, condition }; + } + + // Handle the case where we're assigning a *P.args or **P.kwargs argument + // to a *P.args or **P.kwargs parameter. + if (isParamSpec(argType) && argType.priv.paramSpecAccess !== undefined) { + return { isCompatible, argType, isTypeIncomplete, condition }; + } + } + + let assignTypeFlags = AssignTypeFlags.Default; + + if (argParam.isinstanceParam) { + assignTypeFlags |= AssignTypeFlags.AllowIsinstanceSpecialForms; + } + + if (options?.isArgFirstPass) { + assignTypeFlags |= AssignTypeFlags.ArgAssignmentFirstPass; + } + + if (!assignType(argParam.paramType, argType, diag?.createAddendum(), constraints, assignTypeFlags)) { + if (!options?.skipReportError) { + // Mismatching parameter types are common in untyped code; don't bother spending time + // printing types if the diagnostic is disabled. + const fileInfo = AnalyzerNodeInfo.getFileInfo(argParam.errorNode); + if ( + fileInfo.diagnosticRuleSet.reportArgumentType !== 'none' && + !canSkipDiagnosticForNode(argParam.errorNode) && + !isTypeIncomplete + ) { + const argTypeText = printType(argType); + const paramTypeText = printType(argParam.paramType); + + let message: string; + if (argParam.paramName && !argParam.isParamNameSynthesized) { + if (functionName) { + message = LocMessage.argAssignmentParamFunction().format({ + argType: argTypeText, + paramType: paramTypeText, + functionName, + paramName: argParam.paramName, + }); + } else { + message = LocMessage.argAssignmentParam().format({ + argType: argTypeText, + paramType: paramTypeText, + paramName: argParam.paramName, + }); + } + } else { + if (functionName) { + message = LocMessage.argAssignmentFunction().format({ + argType: argTypeText, + paramType: paramTypeText, + functionName, + }); + } else { + message = LocMessage.argAssignment().format({ + argType: argTypeText, + paramType: paramTypeText, + }); + } + } + + // If we have an expected type diagnostic addendum, use that + // instead of the local diagnostic addendum because it will + // be more informative. + if (expectedTypeDiag) { + diag = expectedTypeDiag; + } + + addDiagnostic( + DiagnosticRule.reportArgumentType, + message + diag?.getString(), + argParam.errorNode, + diag?.getEffectiveTextRange() ?? argParam.errorNode + ); + } + } + + return { isCompatible: false, argType, isTypeIncomplete, skippedBareTypeVarExpectedType, condition }; + } + + if (!options.skipUnknownArgCheck) { + const simplifiedType = makeTopLevelTypeVarsConcrete(removeUnbound(argType)); + const fileInfo = AnalyzerNodeInfo.getFileInfo(argParam.errorNode); + + function getDiagAddendum() { + const diagAddendum = new DiagnosticAddendum(); + if (argParam.paramName) { + diagAddendum.addMessage( + (functionName + ? LocAddendum.argParamFunction().format({ + paramName: argParam.paramName, + functionName, + }) + : LocAddendum.argParam().format({ paramName: argParam.paramName })) + + diagAddendum.getString() + ); + } + return diagAddendum; + } + + // Do not check for unknown types if the expected type is "Any". + // Don't print types if reportUnknownArgumentType is disabled for performance. + if ( + fileInfo.diagnosticRuleSet.reportUnknownArgumentType !== 'none' && + !isAny(argParam.paramType) && + !isTypeIncomplete + ) { + if (isUnknown(simplifiedType)) { + const diagAddendum = getDiagAddendum(); + addDiagnostic( + DiagnosticRule.reportUnknownArgumentType, + LocMessage.argTypeUnknown() + diagAddendum.getString(), + argParam.errorNode + ); + } else if (isPartlyUnknown(simplifiedType)) { + // If the parameter type is also partially unknown, don't report + // the error because it's likely that the partially-unknown type + // arose due to bidirectional type matching. + if (!isPartlyUnknown(argParam.paramType)) { + const diagAddendum = getDiagAddendum(); + diagAddendum.addMessage( + LocAddendum.argumentType().format({ + type: printType(simplifiedType, { expandTypeAlias: true }), + }) + ); + addDiagnostic( + DiagnosticRule.reportUnknownArgumentType, + LocMessage.argTypePartiallyUnknown() + diagAddendum.getString(), + argParam.errorNode + ); + } + } + } + } + + return { isCompatible, argType, isTypeIncomplete, skippedBareTypeVarExpectedType, condition }; + } + + function createTypeVarType(errorNode: ExpressionNode, classType: ClassType, argList: Arg[]): Type | undefined { + let typeVarName = ''; + let firstConstraintArg: Arg | undefined; + let defaultValueNode: ExpressionNode | undefined; + + if (argList.length === 0) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.typeVarFirstArg(), errorNode); + return undefined; + } + + const firstArg = argList[0]; + if (firstArg.valueExpression && firstArg.valueExpression.nodeType === ParseNodeType.StringList) { + typeVarName = firstArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarFirstArg(), + firstArg.valueExpression || errorNode + ); + } + + const typeVar = TypeBase.cloneAsSpecialForm( + TypeVarType.createInstantiable(typeVarName), + ClassType.cloneAsInstance(classType) + ); + + // Parse the remaining parameters. + const paramNameMap = new Map(); + for (let i = 1; i < argList.length; i++) { + const paramNameNode = argList[i].name; + const paramName = paramNameNode ? paramNameNode.d.value : undefined; + + if (paramName) { + if (paramNameMap.get(paramName)) { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.duplicateParam().format({ name: paramName }), + argList[i].valueExpression || errorNode + ); + } + + if (paramName === 'bound') { + if (TypeVarType.hasConstraints(typeVar)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarBoundAndConstrained(), + argList[i].valueExpression || errorNode + ); + } else { + const argType = + argList[i].typeResult?.type ?? + getTypeOfExpressionExpectingType(argList[i].valueExpression!, { + noNonTypeSpecialForms: true, + typeExpression: true, + parsesStringLiteral: true, + }).type; + if ( + requiresSpecialization(argType, { ignorePseudoGeneric: true, ignoreImplicitTypeArgs: true }) + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarBoundGeneric(), + argList[i].valueExpression || errorNode + ); + } + typeVar.shared.boundType = convertToInstance(argType); + } + } else if (paramName === 'covariant') { + if (argList[i].valueExpression && getBooleanValue(argList[i].valueExpression!)) { + if ( + typeVar.shared.declaredVariance === Variance.Contravariant || + typeVar.shared.declaredVariance === Variance.Auto + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarVariance(), + argList[i].valueExpression! + ); + } else { + typeVar.shared.declaredVariance = Variance.Covariant; + } + } + } else if (paramName === 'contravariant') { + if (argList[i].valueExpression && getBooleanValue(argList[i].valueExpression!)) { + if ( + typeVar.shared.declaredVariance === Variance.Covariant || + typeVar.shared.declaredVariance === Variance.Auto + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarVariance(), + argList[i].valueExpression! + ); + } else { + typeVar.shared.declaredVariance = Variance.Contravariant; + } + } + } else if (paramName === 'infer_variance') { + if (argList[i].valueExpression && getBooleanValue(argList[i].valueExpression!)) { + if ( + typeVar.shared.declaredVariance === Variance.Covariant || + typeVar.shared.declaredVariance === Variance.Contravariant + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarVariance(), + argList[i].valueExpression! + ); + } else { + typeVar.shared.declaredVariance = Variance.Auto; + } + } + } else if (paramName === 'default') { + defaultValueNode = argList[i].valueExpression; + const argType = + argList[i].typeResult?.type ?? + getTypeOfExpressionExpectingType(defaultValueNode!, { + allowTypeVarsWithoutScopeId: true, + typeExpression: true, + }).type; + typeVar.shared.defaultType = convertToInstance(argType); + typeVar.shared.isDefaultExplicit = true; + + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + if ( + !fileInfo.isStubFile && + PythonVersion.isLessThan(fileInfo.executionEnvironment.pythonVersion, pythonVersion3_13) && + classType.shared.moduleName !== 'typing_extensions' + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarDefaultIllegal(), + defaultValueNode! + ); + } + } else { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.typeVarUnknownParam().format({ name: paramName }), + argList[i].node?.d.name || argList[i].valueExpression || errorNode + ); + } + + paramNameMap.set(paramName, paramName); + } else { + if (TypeVarType.hasBound(typeVar)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarBoundAndConstrained(), + argList[i].valueExpression || errorNode + ); + } else { + const argType = + argList[i].typeResult?.type ?? + getTypeOfExpressionExpectingType(argList[i].valueExpression!, { + typeExpression: true, + }).type; + + if (requiresSpecialization(argType, { ignorePseudoGeneric: true })) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarConstraintGeneric(), + argList[i].valueExpression || errorNode + ); + } + TypeVarType.addConstraint(typeVar, convertToInstance(argType)); + if (firstConstraintArg === undefined) { + firstConstraintArg = argList[i]; + } + } + } + } + + if (typeVar.shared.constraints.length === 1 && firstConstraintArg) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarSingleConstraint(), + firstConstraintArg.valueExpression || errorNode + ); + } + + // If a default is provided, make sure it is compatible with the bound + // or constraint. + if (typeVar.shared.isDefaultExplicit && defaultValueNode) { + verifyTypeVarDefaultIsCompatible(typeVar, defaultValueNode); + } + + return typeVar; + } + + function verifyTypeVarDefaultIsCompatible(typeVar: TypeVarType, defaultValueNode: ExpressionNode) { + assert(typeVar.shared.isDefaultExplicit); + + const constraints = new ConstraintTracker(); + const concreteDefaultType = makeTopLevelTypeVarsConcrete( + solveAndApplyConstraints(typeVar.shared.defaultType, constraints, { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(typeVar), + tupleClassType: getTupleClassType(), + }, + }) + ); + + if (typeVar.shared.boundType) { + if (!assignType(typeVar.shared.boundType, concreteDefaultType)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarDefaultBoundMismatch(), + defaultValueNode + ); + } + } else if (TypeVarType.hasConstraints(typeVar)) { + let isConstraintCompatible = true; + + // If the default type is a constrained TypeVar, make sure all of its constraints + // are also constraints in typeVar. If the default type is not a constrained TypeVar, + // use its concrete type to compare against the constraints. + if (isTypeVar(typeVar.shared.defaultType) && TypeVarType.hasConstraints(typeVar.shared.defaultType)) { + for (const constraint of typeVar.shared.defaultType.shared.constraints) { + if (!typeVar.shared.constraints.some((c) => isTypeSame(c, constraint))) { + isConstraintCompatible = false; + } + } + } else if ( + !typeVar.shared.constraints.some((constraint) => + isTypeSame(constraint, concreteDefaultType, { ignoreConditions: true }) + ) + ) { + isConstraintCompatible = false; + } + + if (!isConstraintCompatible) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarDefaultConstraintMismatch(), + defaultValueNode + ); + } + } + } + + function createTypeVarTupleType(errorNode: ExpressionNode, classType: ClassType, argList: Arg[]): Type | undefined { + let typeVarName = ''; + + if (argList.length === 0) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.typeVarFirstArg(), errorNode); + return undefined; + } + + const firstArg = argList[0]; + if (firstArg.valueExpression && firstArg.valueExpression.nodeType === ParseNodeType.StringList) { + typeVarName = firstArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarFirstArg(), + firstArg.valueExpression || errorNode + ); + } + + const typeVar = TypeBase.cloneAsSpecialForm( + TypeVarType.createInstantiable(typeVarName, TypeVarKind.TypeVarTuple), + ClassType.cloneAsInstance(classType) + ); + typeVar.shared.defaultType = makeTupleObject(evaluatorInterface, [ + { type: UnknownType.create(), isUnbounded: true }, + ]); + + // Parse the remaining parameters. + for (let i = 1; i < argList.length; i++) { + const paramNameNode = argList[i].name; + const paramName = paramNameNode ? paramNameNode.d.value : undefined; + + if (paramName) { + if (paramName === 'default') { + const expr = argList[i].valueExpression; + if (expr) { + const defaultType = getTypeVarTupleDefaultType(expr, /* isPep695Syntax */ false); + if (defaultType) { + typeVar.shared.defaultType = defaultType; + typeVar.shared.isDefaultExplicit = true; + } + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + if ( + !fileInfo.isStubFile && + PythonVersion.isLessThan(fileInfo.executionEnvironment.pythonVersion, pythonVersion3_13) && + classType.shared.moduleName !== 'typing_extensions' + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarDefaultIllegal(), + expr! + ); + } + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarTupleUnknownParam().format({ name: argList[i].name?.d.value || '?' }), + argList[i].node?.d.name || argList[i].valueExpression || errorNode + ); + } + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarTupleConstraints(), + argList[i].valueExpression || errorNode + ); + } + } + + return typeVar; + } + + function getTypeVarTupleDefaultType(node: ExpressionNode, isPep695Syntax: boolean): Type | undefined { + const argType = getTypeOfExpressionExpectingType(node, { + allowUnpackedTuple: true, + allowTypeVarsWithoutScopeId: true, + forwardRefs: isPep695Syntax, + typeExpression: true, + }).type; + const isUnpackedTuple = isClass(argType) && isTupleClass(argType) && argType.priv.isUnpacked; + const isUnpackedTypeVar = isUnpackedTypeVarTuple(argType); + + if (!isUnpackedTuple && !isUnpackedTypeVar) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.typeVarTupleDefaultNotUnpacked(), node); + return undefined; + } + + return convertToInstance(argType); + } + + function createParamSpecType(errorNode: ExpressionNode, classType: ClassType, argList: Arg[]): Type | undefined { + if (argList.length === 0) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.paramSpecFirstArg(), errorNode); + return undefined; + } + + const firstArg = argList[0]; + let paramSpecName = ''; + if (firstArg.valueExpression && firstArg.valueExpression.nodeType === ParseNodeType.StringList) { + paramSpecName = firstArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.paramSpecFirstArg(), + firstArg.valueExpression || errorNode + ); + } + + const paramSpec = TypeBase.cloneAsSpecialForm( + TypeVarType.createInstantiable(paramSpecName, TypeVarKind.ParamSpec), + ClassType.cloneAsInstance(classType) + ); + + paramSpec.shared.defaultType = ParamSpecType.getUnknown(); + + // Parse the remaining parameters. + for (let i = 1; i < argList.length; i++) { + const paramNameNode = argList[i].name; + const paramName = paramNameNode ? paramNameNode.d.value : undefined; + + if (paramName) { + if (paramName === 'default') { + const expr = argList[i].valueExpression; + if (expr) { + const defaultType = getParamSpecDefaultType(expr, /* isPep695Syntax */ false); + if (defaultType) { + paramSpec.shared.defaultType = defaultType; + paramSpec.shared.isDefaultExplicit = true; + } + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + if ( + !fileInfo.isStubFile && + PythonVersion.isLessThan(fileInfo.executionEnvironment.pythonVersion, pythonVersion3_13) && + classType.shared.moduleName !== 'typing_extensions' + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarDefaultIllegal(), + expr! + ); + } + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.paramSpecUnknownParam().format({ name: paramName }), + paramNameNode || argList[i].valueExpression || errorNode + ); + } + } else { + addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.paramSpecUnknownArg(), + argList[i].valueExpression || errorNode + ); + break; + } + } + + return paramSpec; + } + + function getParamSpecDefaultType(node: ExpressionNode, isPep695Syntax: boolean): Type | undefined { + const functionType = FunctionType.createSynthesizedInstance('', FunctionTypeFlags.ParamSpecValue); + + if (node.nodeType === ParseNodeType.Ellipsis) { + FunctionType.addDefaultParams(functionType); + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + return functionType; + } + + if (node.nodeType === ParseNodeType.List) { + node.d.items.forEach((paramExpr, index) => { + const typeResult = getTypeOfExpressionExpectingType(paramExpr, { + allowTypeVarsWithoutScopeId: true, + forwardRefs: isPep695Syntax, + typeExpression: true, + }); + + FunctionType.addParam( + functionType, + FunctionParam.create( + ParamCategory.Simple, + convertToInstance(typeResult.type), + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `__p${index}` + ) + ); + }); + + if (node.d.items.length > 0) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + + // Update the type cache so we don't attempt to re-evaluate this node. + // The type doesn't matter, so use Any. + writeTypeCache(node, { type: AnyType.create() }, /* flags */ undefined); + return functionType; + } else { + const typeResult = getTypeOfExpressionExpectingType(node, { + allowParamSpec: true, + allowTypeVarsWithoutScopeId: true, + allowEllipsis: true, + typeExpression: true, + }); + + if (typeResult.typeErrors) { + return undefined; + } + + if (isParamSpec(typeResult.type)) { + FunctionType.addParamSpecVariadics(functionType, typeResult.type); + return functionType; + } + + if ( + isClassInstance(typeResult.type) && + ClassType.isBuiltIn(typeResult.type, ['EllipsisType', 'ellipsis']) + ) { + FunctionType.addDefaultParams(functionType); + return functionType; + } + } + + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.paramSpecDefaultNotTuple(), node); + + return undefined; + } + + // Handles a call to TypeAliasType(). This special form allows a caller to programmatically + // create a type alias as defined in PEP 695. If one or more of the arguments is incorrect, + // it returns undefined so the normal constructor evaluation can be performed (and type errors + // emitted). + function createTypeAliasType(errorNode: ExpressionNode, argList: Arg[]): Type | undefined { + if (errorNode.nodeType !== ParseNodeType.Call || !errorNode.parent || argList.length < 2) { + return undefined; + } + + if ( + errorNode.parent.nodeType !== ParseNodeType.Assignment || + errorNode.parent.d.rightExpr !== errorNode || + errorNode.parent.d.leftExpr.nodeType !== ParseNodeType.Name + ) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.typeAliasTypeMustBeAssigned(), errorNode); + return undefined; + } + + const scope = ScopeUtils.getScopeForNode(errorNode); + if (scope) { + if (scope.type !== ScopeType.Class && scope.type !== ScopeType.Module && scope.type !== ScopeType.Builtin) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasTypeBadScope(), + errorNode.parent.d.leftExpr + ); + } + } + + const nameNode = errorNode.parent.d.leftExpr; + + const firstArg = argList[0]; + if (firstArg.valueExpression && firstArg.valueExpression.nodeType === ParseNodeType.StringList) { + const typeAliasName = firstArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + if (typeAliasName !== nameNode.d.value) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasTypeNameMismatch(), + firstArg.valueExpression + ); + } + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasTypeNameArg(), + firstArg.valueExpression || errorNode + ); + return undefined; + } + + let valueExpr: ExpressionNode | undefined; + let typeParamsExpr: ExpressionNode | undefined; + + // Parse the remaining parameters. + for (let i = 1; i < argList.length; i++) { + const paramNameNode = argList[i].name; + const paramName = paramNameNode ? paramNameNode.d.value : undefined; + + if (paramName) { + if (paramName === 'type_params' && !typeParamsExpr) { + typeParamsExpr = argList[i].valueExpression; + } else if (paramName === 'value' && !valueExpr) { + valueExpr = argList[i].valueExpression; + } else { + return undefined; + } + } else if (i === 1) { + valueExpr = argList[i].valueExpression; + } else { + return undefined; + } + } + + // The value expression is not optional, so bail if it's not present. + if (!valueExpr) { + return undefined; + } + + let typeParams: TypeVarType[] | undefined; + if (typeParamsExpr) { + if (typeParamsExpr.nodeType !== ParseNodeType.Tuple) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasTypeParamInvalid(), + typeParamsExpr + ); + return undefined; + } + + typeParams = []; + let isTypeParamListValid = true; + typeParamsExpr.d.items.map((expr) => { + let entryType = getTypeOfExpression( + expr, + EvalFlags.InstantiableType | EvalFlags.AllowTypeVarWithoutScopeId + ).type; + + if (isTypeVar(entryType)) { + if (entryType.priv.scopeId || (isTypeVarTuple(entryType) && entryType.priv.isUnpacked)) { + isTypeParamListValid = false; + } else { + entryType = TypeVarType.cloneForScopeId( + entryType, + ParseTreeUtils.getScopeIdForNode(nameNode), + nameNode.d.value, + TypeVarScopeType.TypeAlias + ); + } + + typeParams!.push(entryType); + } else { + isTypeParamListValid = false; + } + }); + + if (!isTypeParamListValid) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasTypeParamInvalid(), + typeParamsExpr + ); + return undefined; + } + } + + return getTypeOfTypeAliasCommon( + nameNode, + nameNode, + valueExpr, + /* isPep695Syntax */ false, + /* typeParamNodes */ undefined, + () => typeParams + ); + } + + function getBooleanValue(node: ExpressionNode): boolean { + if (node.nodeType === ParseNodeType.Constant) { + if (node.d.constType === KeywordType.False) { + return false; + } else if (node.d.constType === KeywordType.True) { + return true; + } + } + + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.expectedBoolLiteral(), node); + return false; + } + + function getFunctionFullName(functionNode: ParseNode, moduleName: string, functionName: string): string { + const nameParts: string[] = [functionName]; + + let curNode: ParseNode | undefined = functionNode; + + // Walk the parse tree looking for classes or functions. + while (curNode) { + curNode = ParseTreeUtils.getEnclosingClassOrFunction(curNode); + if (curNode) { + nameParts.push(curNode.d.name.d.value); + } + } + + nameParts.push(moduleName); + + return nameParts.reverse().join('.'); + } + + // Implements the semantics of the NewType call as documented + // in the Python specification: The static type checker will treat + // the new type as if it were a subclass of the original type. + function createNewType(errorNode: ExpressionNode, argList: Arg[]): ClassType | undefined { + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + let className = ''; + + if (argList.length !== 2) { + addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.newTypeParamCount(), errorNode); + return undefined; + } + + const nameArg = argList[0]; + if ( + nameArg.argCategory === ArgCategory.Simple && + nameArg.valueExpression && + nameArg.valueExpression.nodeType === ParseNodeType.StringList + ) { + className = nameArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + } + + if (!className) { + addDiagnostic(DiagnosticRule.reportArgumentType, LocMessage.newTypeBadName(), argList[0].node ?? errorNode); + return undefined; + } + + if ( + errorNode.parent?.nodeType === ParseNodeType.Assignment && + errorNode.parent.d.leftExpr.nodeType === ParseNodeType.Name && + errorNode.parent.d.leftExpr.d.value !== className + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.newTypeNameMismatch(), + errorNode.parent.d.leftExpr + ); + return undefined; + } + + let baseClass = getTypeOfArgExpectingType(argList[1]).type; + let isBaseClassAny = false; + + if (isAnyOrUnknown(baseClass)) { + baseClass = prefetched?.objectClass ?? UnknownType.create(); + + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.newTypeAnyOrUnknown(), + argList[1].node ?? errorNode + ); + + isBaseClassAny = true; + } + + // Specifically disallow Annotated. + if ( + baseClass.props?.specialForm && + isClassInstance(baseClass.props.specialForm) && + ClassType.isBuiltIn(baseClass.props.specialForm, 'Annotated') + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.newTypeNotAClass(), + argList[1].node || errorNode + ); + return undefined; + } + + if (!isInstantiableClass(baseClass)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.newTypeNotAClass(), + argList[1].node || errorNode + ); + return undefined; + } + + if (ClassType.isProtocolClass(baseClass) || ClassType.isTypedDictClass(baseClass)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.newTypeProtocolClass(), + argList[1].node || errorNode + ); + } else if (baseClass.priv.literalValue !== undefined) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.newTypeLiteral(), + argList[1].node || errorNode + ); + } + + const classType = ClassType.createInstantiable( + className, + ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.Final | ClassTypeFlags.NewTypeClass | ClassTypeFlags.ValidTypeAliasClass, + ParseTreeUtils.getTypeSourceId(errorNode), + /* declaredMetaclass */ undefined, + baseClass.shared.effectiveMetaclass + ); + classType.shared.baseClasses.push(isBaseClassAny ? AnyType.create() : baseClass); + computeMroLinearization(classType); + + if (!isBaseClassAny) { + // Synthesize an __init__ method that accepts only the specified type. + const initType = FunctionType.createSynthesizedInstance('__init__'); + FunctionType.addParam( + initType, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'self') + ); + FunctionType.addParam( + initType, + FunctionParam.create( + ParamCategory.Simple, + ClassType.cloneAsInstance(baseClass), + FunctionParamFlags.TypeDeclared, + '_x' + ) + ); + initType.shared.declaredReturnType = getNoneType(); + ClassType.getSymbolTable(classType).set( + '__init__', + Symbol.createWithType(SymbolFlags.ClassMember, initType) + ); + + // Synthesize a trivial __new__ method. + const newType = FunctionType.createSynthesizedInstance('__new__', FunctionTypeFlags.ConstructorMethod); + FunctionType.addParam( + newType, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'cls') + ); + FunctionType.addDefaultParams(newType); + newType.shared.declaredReturnType = ClassType.cloneAsInstance(classType); + newType.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + ClassType.getSymbolTable(classType).set('__new__', Symbol.createWithType(SymbolFlags.ClassMember, newType)); + } + + return classType; + } + + // Implements the semantics of the multi-parameter variant of the "type" call. + function createClassFromMetaclass( + errorNode: ExpressionNode, + argList: Arg[], + metaclass: ClassType + ): ClassType | undefined { + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + const arg0Type = getTypeOfArg(argList[0], /* inferenceContext */ undefined).type; + if (!isClassInstance(arg0Type) || !ClassType.isBuiltIn(arg0Type, 'str')) { + return undefined; + } + const className = (arg0Type.priv.literalValue as string) || '_'; + + const arg1Type = getTypeOfArg(argList[1], /* inferenceContext */ undefined).type; + + // TODO - properly handle case where tuple of base classes is provided. + if (!isClassInstance(arg1Type) || !isTupleClass(arg1Type) || arg1Type.priv.tupleTypeArgs === undefined) { + return undefined; + } + + const classType = ClassType.createInstantiable( + className, + ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.ValidTypeAliasClass, + ParseTreeUtils.getTypeSourceId(errorNode), + metaclass, + arg1Type.shared.effectiveMetaclass + ); + arg1Type.priv.tupleTypeArgs.forEach((typeArg) => { + const specializedType = makeTopLevelTypeVarsConcrete(typeArg.type); + + if (isEffectivelyInstantiable(specializedType)) { + classType.shared.baseClasses.push(specializedType); + } else { + classType.shared.baseClasses.push(UnknownType.create()); + } + }); + + if (!computeMroLinearization(classType)) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.methodOrdering(), errorNode); + } + + return classType; + } + + function getTypeOfConstant(node: ConstantNode, flags: EvalFlags): TypeResult { + let type: Type | undefined; + + if (node.d.constType === KeywordType.None) { + if (prefetched?.noneTypeClass) { + type = + (flags & EvalFlags.InstantiableType) !== 0 + ? prefetched.noneTypeClass + : convertToInstance(prefetched.noneTypeClass); + + if (isTypeFormSupported(node)) { + type = TypeBase.cloneWithTypeForm(type, convertToInstance(type)); + } + } + } else if ( + node.d.constType === KeywordType.True || + node.d.constType === KeywordType.False || + node.d.constType === KeywordType.Debug + ) { + type = getBuiltInObject(node, 'bool'); + + // For True and False, we can create truthy and falsy + // versions of 'bool'. + if (type && isClassInstance(type)) { + if (node.d.constType === KeywordType.True) { + type = ClassType.cloneWithLiteral(type, /* value */ true); + } else if (node.d.constType === KeywordType.False) { + type = ClassType.cloneWithLiteral(type, /* value */ false); + } + } + } + + return { type: type ?? UnknownType.create() }; + } + + function getTypeOfMagicMethodCall( + objType: Type, + methodName: string, + argList: TypeResult[], + errorNode: ExpressionNode, + inferenceContext?: InferenceContext, + diag?: DiagnosticAddendum + ): TypeResult | undefined { + let magicMethodSupported = true; + let isIncomplete = false; + let deprecationInfo: MagicMethodDeprecationInfo | undefined; + const overloadsUsedForCall: FunctionType[] = []; + + // Create a helper lambda for object subtypes. + const handleSubtype = (subtype: ClassType | TypeVarType) => { + let magicMethodType: Type | undefined; + const concreteSubtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isClass(concreteSubtype)) { + magicMethodType = getBoundMagicMethod(concreteSubtype, methodName, subtype, errorNode, diag); + } + + if (magicMethodType) { + const functionArgs: Arg[] = argList.map((arg) => { + return { + argCategory: ArgCategory.Simple, + typeResult: arg, + }; + }); + + let callResult: CallResult | undefined; + + callResult = useSpeculativeMode(errorNode, () => { + assert(magicMethodType !== undefined); + return validateCallArgs( + errorNode, + functionArgs, + { type: magicMethodType }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ true, + inferenceContext + ); + }); + + // If there were errors with the expected type, try + // to evaluate without the expected type. + if (callResult.argumentErrors && inferenceContext) { + callResult = useSpeculativeMode(errorNode, () => { + assert(magicMethodType !== undefined); + return validateCallArgs( + errorNode, + functionArgs, + { type: magicMethodType }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ true, + /* inferenceContext */ undefined + ); + }); + } + + if (callResult.argumentErrors) { + magicMethodSupported = false; + } else if (callResult.overloadsUsedForCall) { + callResult.overloadsUsedForCall.forEach((overload) => { + overloadsUsedForCall.push(overload); + + // If one of the overloads is deprecated, note the message. + if (overload.shared.deprecatedMessage && isClass(concreteSubtype)) { + deprecationInfo = { + deprecatedMessage: overload.shared.deprecatedMessage, + className: concreteSubtype.shared.name, + methodName, + }; + } + }); + } + + if (callResult.isTypeIncomplete) { + isIncomplete = true; + } + + return callResult.returnType; + } + + magicMethodSupported = false; + return undefined; + }; + + const returnType = mapSubtypes(objType, (subtype) => { + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + if (isClassInstance(subtype) || isInstantiableClass(subtype) || isTypeVar(subtype)) { + return handleSubtype(subtype); + } + + if (isNoneInstance(subtype)) { + if (prefetched?.objectClass && isInstantiableClass(prefetched.objectClass)) { + // Use 'object' for 'None'. + return handleSubtype(ClassType.cloneAsInstance(prefetched.objectClass)); + } + } + + if (isNoneTypeClass(subtype)) { + if (prefetched?.typeClass && isInstantiableClass(prefetched.typeClass)) { + // Use 'type' for 'type[None]'. + return handleSubtype(ClassType.cloneAsInstance(prefetched.typeClass)); + } + } + + magicMethodSupported = false; + return undefined; + }); + + if (!magicMethodSupported) { + return undefined; + } + + return { type: returnType, isIncomplete, magicMethodDeprecationInfo: deprecationInfo, overloadsUsedForCall }; + } + + function getTypeOfDictionary( + node: DictionaryNode, + flags: EvalFlags, + inferenceContext: InferenceContext | undefined + ): TypeResult { + if ((flags & EvalFlags.TypeExpression) !== 0 && node.parent?.nodeType !== ParseNodeType.Argument) { + const diag = new DiagnosticAddendum(); + diag.addMessage(LocAddendum.useDictInstead()); + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.dictInAnnotation() + diag.getString(), node); + } + + // If the expected type is a union, analyze for each of the subtypes + // to find one that matches. + let expectedType = inferenceContext?.expectedType; + + if (inferenceContext && isUnion(inferenceContext.expectedType)) { + let matchingSubtype: Type | undefined; + let matchingSubtypeResult: TypeResult | undefined; + + doForEachSubtype( + inferenceContext.expectedType, + (subtype) => { + // Use shortcut if we've already found a match. + if (matchingSubtypeResult && !matchingSubtypeResult.typeErrors) { + return; + } + + const subtypeResult = useSpeculativeMode(node, () => { + return getTypeOfDictionaryWithContext(node, flags, makeInferenceContext(subtype)); + }); + + if (subtypeResult && assignType(subtype, subtypeResult.type)) { + // If this is the first result we're seeing or it's the first result + // without errors, select it as the match. + if (!matchingSubtypeResult || (matchingSubtypeResult.typeErrors && !subtypeResult.typeErrors)) { + matchingSubtype = subtype; + matchingSubtypeResult = subtypeResult; + } + } + }, + /* sortSubtypes */ true + ); + + expectedType = matchingSubtype; + } + + let expectedTypeDiagAddendum = undefined; + if (expectedType) { + expectedTypeDiagAddendum = new DiagnosticAddendum(); + const result = getTypeOfDictionaryWithContext( + node, + flags, + makeInferenceContext(expectedType), + expectedTypeDiagAddendum + ); + if (result) { + return result; + } + } + + const result = getTypeOfDictionaryInferred(node, flags, /* hasExpectedType */ !!inferenceContext?.expectedType); + return { ...result, expectedTypeDiagAddendum }; + } + + function getTypeOfDictionaryWithContext( + node: DictionaryNode, + flags: EvalFlags, + inferenceContext: InferenceContext, + expectedDiagAddendum?: DiagnosticAddendum + ): TypeResult | undefined { + inferenceContext.expectedType = transformPossibleRecursiveTypeAlias(inferenceContext.expectedType); + let concreteExpectedType = makeTopLevelTypeVarsConcrete(inferenceContext.expectedType); + + if (!isClassInstance(concreteExpectedType)) { + return undefined; + } + + const keyTypes: TypeResultWithNode[] = []; + const valueTypes: TypeResultWithNode[] = []; + let isIncomplete = false; + let typeErrors = false; + + // Handle TypedDict's as a special case. + if (ClassType.isTypedDictClass(concreteExpectedType)) { + // Remove any conditions associated with the type so the resulting type isn't + // considered compatible with a bound TypeVar. + concreteExpectedType = TypeBase.cloneForCondition(concreteExpectedType, undefined); + + const expectedTypedDictEntries = getTypedDictMembersForClass(evaluatorInterface, concreteExpectedType); + + // Infer the key and value types if possible. + const keyValueTypeResult = getKeyAndValueTypesFromDictionary( + node, + flags, + keyTypes, + valueTypes, + /* forceStrictInference */ true, + /* isValueTypeInvariant */ true, + /* expectedKeyType */ undefined, + /* expectedValueType */ undefined, + expectedTypedDictEntries, + expectedDiagAddendum + ); + + if (keyValueTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (keyValueTypeResult.typeErrors) { + typeErrors = true; + } + + const resultTypedDict = assignToTypedDict( + evaluatorInterface, + concreteExpectedType, + keyTypes, + valueTypes, + // Don't overwrite existing expectedDiagAddendum messages if they were + // already provided by getKeyValueTypesFromDictionary. + expectedDiagAddendum?.isEmpty() ? expectedDiagAddendum : undefined + ); + if (resultTypedDict) { + return { + type: resultTypedDict, + isIncomplete, + }; + } + + return undefined; + } + + let expectedKeyType: Type; + let expectedValueType: Type; + + if (isAnyOrUnknown(inferenceContext.expectedType)) { + expectedKeyType = inferenceContext.expectedType; + expectedValueType = inferenceContext.expectedType; + } else { + const builtInDict = getBuiltInObject(node, 'dict'); + if (!isClassInstance(builtInDict)) { + return undefined; + } + + const dictConstraints = new ConstraintTracker(); + if ( + !addConstraintsForExpectedType( + evaluatorInterface, + builtInDict, + inferenceContext.expectedType, + dictConstraints, + ParseTreeUtils.getTypeVarScopesForNode(node), + node.start + ) + ) { + return undefined; + } + + const specializedDict = solveAndApplyConstraints( + ClassType.cloneAsInstantiable(builtInDict), + dictConstraints + ) as ClassType; + if (!specializedDict.priv.typeArgs || specializedDict.priv.typeArgs.length !== 2) { + return undefined; + } + + expectedKeyType = specializedDict.priv.typeArgs[0]; + expectedValueType = specializedDict.priv.typeArgs[1]; + } + + // Dict and MutableMapping types have invariant value types, so they + // cannot be narrowed further. Other super-types like Mapping, Collection, + // and Iterable use covariant value types, so they can be narrowed. + let isValueTypeInvariant = false; + if (isClassInstance(inferenceContext.expectedType)) { + if (inferenceContext.expectedType.shared.typeParams.length >= 2) { + const valueTypeParam = inferenceContext.expectedType.shared.typeParams[1]; + if (TypeVarType.getVariance(valueTypeParam) === Variance.Invariant) { + isValueTypeInvariant = true; + } + } + } + + // Infer the key and value types if possible. + const keyValueResult = getKeyAndValueTypesFromDictionary( + node, + flags, + keyTypes, + valueTypes, + /* forceStrictInference */ true, + isValueTypeInvariant, + expectedKeyType, + expectedValueType, + undefined, + expectedDiagAddendum + ); + + if (keyValueResult.isIncomplete) { + isIncomplete = true; + } + + if (keyValueResult.typeErrors) { + typeErrors = true; + } + + const specializedKeyType = inferTypeArgFromExpectedEntryType( + makeInferenceContext(expectedKeyType), + keyTypes.map((result) => result.type), + /* isNarrowable */ false + ); + const specializedValueType = inferTypeArgFromExpectedEntryType( + makeInferenceContext(expectedValueType), + valueTypes.map((result) => result.type), + !isValueTypeInvariant + ); + if (!specializedKeyType || !specializedValueType) { + return undefined; + } + + const type = getBuiltInObject(node, 'dict', [specializedKeyType, specializedValueType]); + return { type, isIncomplete, typeErrors }; + } + + // Attempts to infer the type of a dictionary statement. If hasExpectedType + // is true, strict inference is used for the subexpressions. + function getTypeOfDictionaryInferred(node: DictionaryNode, flags: EvalFlags, hasExpectedType: boolean): TypeResult { + const fallbackType = hasExpectedType ? AnyType.create() : UnknownType.create(); + let keyType: Type = fallbackType; + let valueType: Type = fallbackType; + + const keyTypeResults: TypeResultWithNode[] = []; + const valueTypeResults: TypeResultWithNode[] = []; + + let isEmptyContainer = false; + let isIncomplete = false; + let typeErrors = false; + + // Infer the key and value types if possible. + const keyValueResult = getKeyAndValueTypesFromDictionary( + node, + flags, + keyTypeResults, + valueTypeResults, + /* forceStrictInference */ hasExpectedType, + /* isValueTypeInvariant */ false + ); + + if (keyValueResult.isIncomplete) { + isIncomplete = true; + } + + if (keyValueResult.typeErrors) { + typeErrors = true; + } + + // Strip any literal values and TypeForm types. + const keyTypes = keyTypeResults.map((t) => + stripTypeForm(convertSpecialFormToRuntimeValue(stripLiteralValue(t.type), flags, !hasExpectedType)) + ); + const valueTypes = valueTypeResults.map((t) => + stripTypeForm(convertSpecialFormToRuntimeValue(stripLiteralValue(t.type), flags, !hasExpectedType)) + ); + + if (keyTypes.length > 0) { + if (AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.strictDictionaryInference || hasExpectedType) { + keyType = combineTypes(keyTypes); + } else { + keyType = areTypesSame(keyTypes, { ignorePseudoGeneric: true }) ? keyTypes[0] : fallbackType; + } + } else { + keyType = fallbackType; + } + + // If the value type differs and we're not using "strict inference mode", + // we need to back off because we can't properly represent the mappings + // between different keys and associated value types. If all the values + // are the same type, we'll assume that all values in this dictionary should + // be the same. + if (valueTypes.length > 0) { + if (AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.strictDictionaryInference || hasExpectedType) { + valueType = combineTypes(valueTypes); + } else { + valueType = areTypesSame(valueTypes, { ignorePseudoGeneric: true }) ? valueTypes[0] : fallbackType; + } + } else { + valueType = fallbackType; + isEmptyContainer = true; + } + + const dictClass = getBuiltInType(node, 'dict'); + const type = isInstantiableClass(dictClass) + ? ClassType.cloneAsInstance( + ClassType.specialize( + dictClass, + [keyType, valueType], + /* isTypeArgExplicit */ true, + /* includeSubclasses */ undefined, + /* tupleTypeArgs */ undefined, + isEmptyContainer + ) + ) + : UnknownType.create(); + + if (isIncomplete) { + if (getContainerDepth(type) > maxInferredContainerDepth) { + return { type: UnknownType.create() }; + } + } + + return { type, isIncomplete, typeErrors }; + } + + function getKeyAndValueTypesFromDictionary( + node: DictionaryNode, + flags: EvalFlags, + keyTypes: TypeResultWithNode[], + valueTypes: TypeResultWithNode[], + forceStrictInference: boolean, + isValueTypeInvariant: boolean, + expectedKeyType?: Type, + expectedValueType?: Type, + expectedTypedDictEntries?: TypedDictEntries, + expectedDiagAddendum?: DiagnosticAddendum + ): TypeResult { + let isIncomplete = false; + let typeErrors = false; + + // Mask out some of the flags that are not applicable for a dictionary key + // even if it appears within an inlined TypedDict annotation. + const keyFlags = flags & ~(EvalFlags.TypeExpression | EvalFlags.StrLiteralAsType | EvalFlags.InstantiableType); + + // Infer the key and value types if possible. + node.d.items.forEach((entryNode, index) => { + let addUnknown = true; + + if (entryNode.nodeType === ParseNodeType.DictionaryKeyEntry) { + const keyTypeResult = getTypeOfExpression( + entryNode.d.keyExpr, + keyFlags | EvalFlags.StripTupleLiterals, + makeInferenceContext( + expectedKeyType ?? (forceStrictInference ? NeverType.createNever() : undefined) + ) + ); + + if (keyTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (keyTypeResult.typeErrors) { + typeErrors = true; + } + + const keyType = keyTypeResult.type; + + if (!keyTypeResult.isIncomplete && !keyTypeResult.typeErrors) { + verifySetEntryOrDictKeyIsHashable(entryNode.d.keyExpr, keyType, /* isDictKey */ true); + } + + if (expectedDiagAddendum && keyTypeResult.expectedTypeDiagAddendum) { + expectedDiagAddendum.addAddendum(keyTypeResult.expectedTypeDiagAddendum); + } + + let valueTypeResult: TypeResult; + let entryInferenceContext: InferenceContext | undefined; + + if ( + expectedTypedDictEntries && + isClassInstance(keyType) && + ClassType.isBuiltIn(keyType, 'str') && + isLiteralType(keyType) && + (expectedTypedDictEntries.knownItems.has(keyType.priv.literalValue as string) || + expectedTypedDictEntries.extraItems) + ) { + let effectiveValueType = + expectedTypedDictEntries.knownItems.get(keyType.priv.literalValue as string)?.valueType ?? + expectedTypedDictEntries.extraItems?.valueType; + if (effectiveValueType) { + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + effectiveValueType = transformExpectedType(effectiveValueType, liveTypeVarScopes, node.start); + } + entryInferenceContext = makeInferenceContext(effectiveValueType); + valueTypeResult = getTypeOfExpression( + entryNode.d.valueExpr, + flags | EvalFlags.StripTupleLiterals, + entryInferenceContext + ); + } else { + let effectiveValueType = + expectedValueType ?? (forceStrictInference ? NeverType.createNever() : undefined); + if (effectiveValueType) { + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + effectiveValueType = transformExpectedType(effectiveValueType, liveTypeVarScopes, node.start); + } + entryInferenceContext = makeInferenceContext(effectiveValueType); + valueTypeResult = getTypeOfExpression( + entryNode.d.valueExpr, + flags | EvalFlags.StripTupleLiterals, + entryInferenceContext + ); + } + + if (entryInferenceContext && !valueTypeResult.typeErrors) { + const fromExpectedType = inferTypeArgFromExpectedEntryType( + entryInferenceContext, + [valueTypeResult.type], + !isValueTypeInvariant + ); + + if (fromExpectedType) { + valueTypeResult = { ...valueTypeResult, type: fromExpectedType }; + } + } + + if (expectedDiagAddendum && valueTypeResult.expectedTypeDiagAddendum) { + expectedDiagAddendum.addAddendum(valueTypeResult.expectedTypeDiagAddendum); + } + + const valueType = valueTypeResult.type; + if (valueTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (valueTypeResult.typeErrors) { + typeErrors = true; + } + + if (forceStrictInference || index < maxEntriesToUseForInference) { + // If an existing key has the same literal type, delete the previous + // key since we're overwriting it here. + if (isClass(keyType) && isLiteralType(keyType)) { + const existingIndex = keyTypes.findIndex((kt) => isTypeSame(keyType, kt.type)); + if (existingIndex >= 0) { + keyTypes.splice(existingIndex, 1); + valueTypes.splice(existingIndex, 1); + } + } + + keyTypes.push({ node: entryNode.d.keyExpr, type: keyType }); + valueTypes.push({ node: entryNode.d.valueExpr, type: valueType }); + } + + addUnknown = false; + } else if (entryNode.nodeType === ParseNodeType.DictionaryExpandEntry) { + let expectedType: Type | undefined; + if (expectedKeyType && expectedValueType) { + if ( + prefetched?.supportsKeysAndGetItemClass && + isInstantiableClass(prefetched.supportsKeysAndGetItemClass) + ) { + expectedType = ClassType.cloneAsInstance( + ClassType.specialize(prefetched.supportsKeysAndGetItemClass, [ + expectedKeyType, + expectedValueType, + ]) + ); + } + } + + const entryInferenceContext = makeInferenceContext(expectedType); + let unexpandedTypeResult = getTypeOfExpression( + entryNode.d.expr, + flags | EvalFlags.StripTupleLiterals, + entryInferenceContext + ); + + if (entryInferenceContext && !unexpandedTypeResult.typeErrors) { + const fromExpectedType = inferTypeArgFromExpectedEntryType( + entryInferenceContext, + [unexpandedTypeResult.type], + !isValueTypeInvariant + ); + + if (fromExpectedType) { + unexpandedTypeResult = { ...unexpandedTypeResult, type: fromExpectedType }; + } + } + + if (unexpandedTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (unexpandedTypeResult.typeErrors) { + typeErrors = true; + } + + const unexpandedType = unexpandedTypeResult.type; + + if (isAnyOrUnknown(unexpandedType)) { + if (forceStrictInference || index < maxEntriesToUseForInference) { + keyTypes.push({ node: entryNode, type: unexpandedType }); + valueTypes.push({ node: entryNode, type: unexpandedType }); + } + addUnknown = false; + } else if (isClassInstance(unexpandedType) && ClassType.isTypedDictClass(unexpandedType)) { + // Handle dictionary expansion for a TypedDict. + if (prefetched?.strClass && isInstantiableClass(prefetched.strClass)) { + const strObject = ClassType.cloneAsInstance(prefetched.strClass); + const tdEntries = getTypedDictMembersForClass( + evaluatorInterface, + unexpandedType, + /* allowNarrowed */ true + ); + + tdEntries.knownItems.forEach((entry, name) => { + if (entry.isRequired || entry.isProvided) { + keyTypes.push({ + node: entryNode, + type: ClassType.cloneWithLiteral(strObject, name), + }); + valueTypes.push({ node: entryNode, type: entry.valueType }); + } + }); + + if (!expectedTypedDictEntries) { + keyTypes.push({ node: entryNode, type: ClassType.cloneAsInstance(strObject) }); + valueTypes.push({ + node: entryNode, + type: tdEntries.extraItems?.valueType ?? getObjectType(), + }); + } + + addUnknown = false; + } + } else if ( + prefetched?.supportsKeysAndGetItemClass && + isInstantiableClass(prefetched.supportsKeysAndGetItemClass) + ) { + const mappingConstraints = new ConstraintTracker(); + + const supportsKeysAndGetItemClass = selfSpecializeClass(prefetched.supportsKeysAndGetItemClass); + + if ( + assignType( + ClassType.cloneAsInstance(supportsKeysAndGetItemClass), + unexpandedType, + /* diag */ undefined, + mappingConstraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ) + ) { + const specializedMapping = solveAndApplyConstraints( + supportsKeysAndGetItemClass, + mappingConstraints + ) as ClassType; + const typeArgs = specializedMapping.priv.typeArgs; + if (typeArgs && typeArgs.length >= 2) { + if (forceStrictInference || index < maxEntriesToUseForInference) { + keyTypes.push({ node: entryNode, type: typeArgs[0] }); + valueTypes.push({ node: entryNode, type: typeArgs[1] }); + } + addUnknown = false; + } + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.dictUnpackIsNotMapping(), + entryNode + ); + } + } + } else if (entryNode.nodeType === ParseNodeType.Comprehension) { + const dictEntryTypeResult = getElementTypeFromComprehension( + entryNode, + flags | EvalFlags.StripTupleLiterals, + expectedValueType, + expectedKeyType + ); + const dictEntryType = dictEntryTypeResult.type; + if (dictEntryTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (dictEntryTypeResult.typeErrors) { + typeErrors = true; + } + + // The result should be a tuple. + if (isClassInstance(dictEntryType) && isTupleClass(dictEntryType)) { + const typeArgs = dictEntryType.priv.tupleTypeArgs?.map((t) => t.type); + if (typeArgs && typeArgs.length === 2) { + if (forceStrictInference || index < maxEntriesToUseForInference) { + keyTypes.push({ node: entryNode, type: typeArgs[0] }); + valueTypes.push({ node: entryNode, type: typeArgs[1] }); + } + addUnknown = false; + } + } + } + + if (addUnknown) { + if (forceStrictInference || index < maxEntriesToUseForInference) { + keyTypes.push({ node: entryNode, type: UnknownType.create() }); + valueTypes.push({ node: entryNode, type: UnknownType.create() }); + } + } + }); + + return { type: AnyType.create(), isIncomplete, typeErrors }; + } + + function getTypeOfListOrSet( + node: ListNode | SetNode, + flags: EvalFlags, + inferenceContext: InferenceContext | undefined + ): TypeResult { + if ( + (flags & EvalFlags.TypeExpression) !== 0 && + node.nodeType === ParseNodeType.List && + node.parent?.nodeType !== ParseNodeType.Argument + ) { + const diag = new DiagnosticAddendum(); + diag.addMessage(LocAddendum.useListInstead()); + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.listInAnnotation() + diag.getString(), node); + } + + flags &= ~(EvalFlags.TypeExpression | EvalFlags.StrLiteralAsType | EvalFlags.InstantiableType); + + // If the expected type is a union, recursively call for each of the subtypes + // to find one that matches. + let expectedType = inferenceContext?.expectedType; + + if (inferenceContext && isUnion(inferenceContext.expectedType)) { + let matchingSubtype: Type | undefined; + let matchingSubtypeResult: TypeResult | undefined; + + doForEachSubtype( + inferenceContext.expectedType, + (subtype) => { + // Use shortcut if we've already found a match. + if (matchingSubtypeResult && !matchingSubtypeResult.typeErrors) { + return; + } + + const subtypeResult = useSpeculativeMode(node, () => { + return getTypeOfListOrSetWithContext(node, flags, makeInferenceContext(subtype)); + }); + + if (subtypeResult && assignType(subtype, subtypeResult.type)) { + // If this is the first result we're seeing or it's the first result + // without errors, select it as the match. + if (!matchingSubtypeResult || (matchingSubtypeResult.typeErrors && !subtypeResult.typeErrors)) { + matchingSubtype = subtype; + matchingSubtypeResult = subtypeResult; + } + } + }, + /* sortSubtypes */ true + ); + + expectedType = matchingSubtype; + } + + let expectedTypeDiagAddendum: DiagnosticAddendum | undefined; + if (expectedType) { + const result = getTypeOfListOrSetWithContext(node, flags, makeInferenceContext(expectedType)); + if (result && !result.typeErrors) { + return result; + } + + expectedTypeDiagAddendum = result?.expectedTypeDiagAddendum; + } + + const typeResult = getTypeOfListOrSetInferred( + node, + flags, + /* hasExpectedType */ !!inferenceContext?.expectedType + ); + return { ...typeResult, expectedTypeDiagAddendum }; + } + + // Attempts to determine the type of a list or set statement based on an expected type. + // Returns undefined if that type cannot be honored. + function getTypeOfListOrSetWithContext( + node: ListNode | SetNode, + flags: EvalFlags, + inferenceContext: InferenceContext + ): TypeResult | undefined { + const builtInClassName = node.nodeType === ParseNodeType.List ? 'list' : 'set'; + inferenceContext.expectedType = transformPossibleRecursiveTypeAlias(inferenceContext.expectedType); + + let isIncomplete = false; + let typeErrors = false; + const verifyHashable = node.nodeType === ParseNodeType.Set; + + const expectedEntryType = getExpectedEntryTypeForIterable( + node, + getBuiltInType(node, builtInClassName), + inferenceContext + ); + if (!expectedEntryType) { + return undefined; + } + + const entryTypes: Type[] = []; + const expectedTypeDiagAddendum = new DiagnosticAddendum(); + node.d.items.forEach((entry) => { + let entryTypeResult: TypeResult; + + if (entry.nodeType === ParseNodeType.Comprehension) { + entryTypeResult = getElementTypeFromComprehension( + entry, + flags | EvalFlags.StripTupleLiterals, + expectedEntryType + ); + } else { + entryTypeResult = getTypeOfExpression( + entry, + flags | EvalFlags.StripTupleLiterals, + makeInferenceContext(expectedEntryType) + ); + } + + entryTypes.push(entryTypeResult.type); + + if (entryTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (entryTypeResult.typeErrors) { + typeErrors = true; + } + + if (entryTypeResult.expectedTypeDiagAddendum) { + expectedTypeDiagAddendum.addAddendum(entryTypeResult.expectedTypeDiagAddendum); + } + + if (verifyHashable && !entryTypeResult.isIncomplete && !entryTypeResult.typeErrors) { + verifySetEntryOrDictKeyIsHashable(entry, entryTypeResult.type, /* isDictKey */ false); + } + }); + + let isTypeInvariant = false; + + if (isClassInstance(inferenceContext.expectedType)) { + inferVarianceForClass(inferenceContext.expectedType); + + if ( + inferenceContext.expectedType.shared.typeParams.some( + (t) => TypeVarType.getVariance(t) === Variance.Invariant + ) + ) { + isTypeInvariant = true; + } + } + + const specializedEntryType = inferTypeArgFromExpectedEntryType( + makeInferenceContext(expectedEntryType), + entryTypes, + !isTypeInvariant + ); + if (!specializedEntryType) { + return { type: UnknownType.create(), isIncomplete, typeErrors: true, expectedTypeDiagAddendum }; + } + + const type = getBuiltInObject(node, builtInClassName, [specializedEntryType]); + return { type, isIncomplete, typeErrors, expectedTypeDiagAddendum }; + } + + function getExpectedEntryTypeForIterable( + node: ListNode | SetNode | ComprehensionNode, + expectedClassType: Type | undefined, + inferenceContext?: InferenceContext + ): Type | undefined { + if (!inferenceContext) { + return undefined; + } + + if (!expectedClassType || !isInstantiableClass(expectedClassType)) { + return undefined; + } + + if (isAnyOrUnknown(inferenceContext.expectedType)) { + return inferenceContext.expectedType; + } + + if (!isClassInstance(inferenceContext.expectedType)) { + return undefined; + } + + const constraints = new ConstraintTracker(); + if ( + !addConstraintsForExpectedType( + evaluatorInterface, + ClassType.cloneAsInstance(expectedClassType), + inferenceContext.expectedType, + constraints, + ParseTreeUtils.getTypeVarScopesForNode(node), + node.start + ) + ) { + return undefined; + } + + const specializedListOrSet = solveAndApplyConstraints(expectedClassType, constraints) as ClassType; + if (!specializedListOrSet.priv.typeArgs) { + return undefined; + } + + return specializedListOrSet.priv.typeArgs[0]; + } + + // Attempts to infer the type of a list or set statement with no "expected type". + function getTypeOfListOrSetInferred( + node: ListNode | SetNode, + flags: EvalFlags, + hasExpectedType: boolean + ): TypeResult { + const builtInClassName = node.nodeType === ParseNodeType.List ? 'list' : 'set'; + const verifyHashable = node.nodeType === ParseNodeType.Set; + let isEmptyContainer = false; + let isIncomplete = false; + let typeErrors = false; + + let entryTypes: Type[] = []; + node.d.items.forEach((entry, index) => { + let entryTypeResult: TypeResult; + + if (entry.nodeType === ParseNodeType.Comprehension && !entry.d.isGenerator) { + entryTypeResult = getElementTypeFromComprehension(entry, flags | EvalFlags.StripTupleLiterals); + } else { + entryTypeResult = getTypeOfExpression(entry, flags | EvalFlags.StripTupleLiterals); + } + + entryTypeResult.type = stripTypeForm( + convertSpecialFormToRuntimeValue(entryTypeResult.type, flags, !hasExpectedType) + ); + + if (entryTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (entryTypeResult.typeErrors) { + typeErrors = true; + } + + if (hasExpectedType || index < maxEntriesToUseForInference) { + entryTypes.push(entryTypeResult.type); + } + + if (verifyHashable && !entryTypeResult.isIncomplete && !entryTypeResult.typeErrors) { + verifySetEntryOrDictKeyIsHashable(entry, entryTypeResult.type, /* isDictKey */ false); + } + }); + + entryTypes = entryTypes.map((t) => stripLiteralValue(t)); + + let inferredEntryType: Type = hasExpectedType ? AnyType.create() : UnknownType.create(); + if (entryTypes.length > 0) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + // If there was an expected type or we're using strict list inference, + // combine the types into a union. + if ( + (builtInClassName === 'list' && fileInfo.diagnosticRuleSet.strictListInference) || + (builtInClassName === 'set' && fileInfo.diagnosticRuleSet.strictSetInference) || + hasExpectedType + ) { + inferredEntryType = combineTypes(entryTypes, { maxSubtypeCount: maxSubtypesForInferredType }); + } else { + // Is the list or set homogeneous? If so, use stricter rules. Otherwise relax the rules. + inferredEntryType = areTypesSame(entryTypes, { ignorePseudoGeneric: true }) + ? entryTypes[0] + : inferredEntryType; + } + } else { + isEmptyContainer = true; + } + + const listOrSetClass = getBuiltInType(node, builtInClassName); + const type = isInstantiableClass(listOrSetClass) + ? ClassType.cloneAsInstance( + ClassType.specialize( + listOrSetClass, + [inferredEntryType], + /* isTypeArgExplicit */ true, + /* includeSubclasses */ undefined, + /* tupleTypeArgs */ undefined, + isEmptyContainer + ) + ) + : UnknownType.create(); + + if (isIncomplete) { + if (getContainerDepth(type) > maxInferredContainerDepth) { + return { type: UnknownType.create() }; + } + } + + return { type, isIncomplete, typeErrors }; + } + + function verifySetEntryOrDictKeyIsHashable(entry: ExpressionNode, type: Type, isDictKey: boolean) { + // Verify that the type is hashable. + if (!isTypeHashable(type)) { + const diag = new DiagnosticAddendum(); + diag.addMessage(LocAddendum.unhashableType().format({ type: printType(type) })); + + const message = isDictKey ? LocMessage.unhashableDictKey() : LocMessage.unhashableSetEntry(); + + addDiagnostic(DiagnosticRule.reportUnhashable, message + diag.getString(), entry); + } + } + + function inferTypeArgFromExpectedEntryType( + inferenceContext: InferenceContext, + entryTypes: Type[], + isNarrowable: boolean + ): Type | undefined { + // If the expected type is Any, the resulting type becomes Any. + if (isAny(inferenceContext.expectedType)) { + return inferenceContext.expectedType; + } + + const constraints = new ConstraintTracker(); + const expectedType = inferenceContext.expectedType; + let isCompatible = true; + + entryTypes.forEach((entryType) => { + if (isCompatible && !assignType(expectedType, entryType, /* diag */ undefined, constraints)) { + isCompatible = false; + } + }); + + if (!isCompatible) { + return undefined; + } + + if (isNarrowable && entryTypes.length > 0) { + const combinedTypes = combineTypes(entryTypes); + return containsLiteralType(inferenceContext.expectedType) + ? combinedTypes + : stripLiteralValue(combinedTypes); + } + + return mapSubtypes( + solveAndApplyConstraints(inferenceContext.expectedType, constraints, { + replaceUnsolved: { + scopeIds: [], + tupleClassType: getTupleClassType(), + }, + }), + (subtype) => { + if (entryTypes.length !== 1) { + return subtype; + } + const entryType = entryTypes[0]; + + // If the entry type is a TypedDict instance, clone it with additional information. + if ( + isTypeSame(subtype, entryType, { ignoreTypedDictNarrowEntries: true }) && + isClass(subtype) && + isClass(entryType) && + ClassType.isTypedDictClass(entryType) + ) { + return ClassType.cloneForNarrowedTypedDictEntries(subtype, entryType.priv.typedDictNarrowedEntries); + } + + return subtype; + } + ); + } + + function getTypeOfYield(node: YieldNode): TypeResult { + let expectedYieldType: Type | undefined; + let sentType: Type | undefined; + let isIncomplete = false; + + const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); + if (enclosingFunction) { + const functionTypeInfo = getTypeOfFunction(enclosingFunction); + if (functionTypeInfo) { + let returnType = FunctionType.getEffectiveReturnType(functionTypeInfo.functionType); + if (returnType) { + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(node); + returnType = makeTypeVarsBound(returnType, liveScopeIds); + + expectedYieldType = getGeneratorYieldType(returnType, !!enclosingFunction.d.isAsync); + + const generatorTypeArgs = getGeneratorTypeArgs(returnType); + if (generatorTypeArgs && generatorTypeArgs.length >= 2) { + sentType = makeTypeVarsBound(generatorTypeArgs[1], liveScopeIds); + } + } + } + } + + if (node.d.expr) { + const exprResult = getTypeOfExpression( + node.d.expr, + /* flags */ undefined, + makeInferenceContext(expectedYieldType) + ); + if (exprResult.isIncomplete) { + isIncomplete = true; + } + } + + return { type: sentType || UnknownType.create(), isIncomplete }; + } + + function getTypeOfYieldFrom(node: YieldFromNode): TypeResult { + const yieldFromTypeResult = getTypeOfExpression(node.d.expr); + const yieldFromType = yieldFromTypeResult.type; + + const returnedType = mapSubtypes(yieldFromType, (yieldFromSubtype) => { + // Is the expression a Generator type? + let generatorTypeArgs = getGeneratorTypeArgs(yieldFromSubtype); + if (generatorTypeArgs) { + return generatorTypeArgs.length >= 2 ? generatorTypeArgs[2] : UnknownType.create(); + } + + // Handle old-style (pre-await) Coroutines as a special case. + if ( + isClassInstance(yieldFromSubtype) && + ClassType.isBuiltIn(yieldFromSubtype, ['Coroutine', 'CoroutineType']) + ) { + return UnknownType.create(); + } + + // Handle simple iterables. + const iterableType = + getTypeOfIterable(yieldFromTypeResult, /* isAsync */ false, node)?.type ?? UnknownType.create(); + + // Does the iterable return a Generator? + generatorTypeArgs = getGeneratorTypeArgs(iterableType); + return generatorTypeArgs && generatorTypeArgs.length >= 2 ? generatorTypeArgs[2] : UnknownType.create(); + }); + + return { type: returnedType }; + } + + function getTypeOfLambda(node: LambdaNode, inferenceContext: InferenceContext | undefined): TypeResult { + let expectedFunctionTypes: FunctionType[] = []; + if (inferenceContext) { + mapSubtypes(inferenceContext.expectedType, (subtype) => { + if (isFunction(subtype)) { + expectedFunctionTypes.push(subtype); + } + + if (isClassInstance(subtype)) { + const boundMethod = getBoundMagicMethod(subtype, '__call__'); + if (boundMethod && isFunction(boundMethod)) { + expectedFunctionTypes.push(boundMethod as FunctionType); + } + } + + return undefined; + }); + } + + let expectedSubtype: FunctionType | undefined; + + // If there's more than one type, try each in turn until we find one that works. + if (expectedFunctionTypes.length > 1) { + // Sort the expected types for deterministic results. + expectedFunctionTypes = sortTypes(expectedFunctionTypes) as FunctionType[]; + + for (const subtype of expectedFunctionTypes) { + const result = getTypeOfLambdaWithExpectedType( + node, + subtype, + inferenceContext, + /* forceSpeculative */ true + ); + + if (!result.typeErrors) { + expectedSubtype = subtype; + break; + } + } + } + + if (!expectedSubtype && expectedFunctionTypes.length > 0) { + expectedSubtype = expectedFunctionTypes[0]; + } + + return getTypeOfLambdaWithExpectedType(node, expectedSubtype, inferenceContext, /* forceSpeculative */ false); + } + + function getTypeOfLambdaWithExpectedType( + node: LambdaNode, + expectedType: FunctionType | undefined, + inferenceContext: InferenceContext | undefined, + forceSpeculative: boolean + ): TypeResult { + let isIncomplete = !!inferenceContext?.isTypeIncomplete; + let paramsArePositionOnly = true; + + let expectedReturnType: Type | undefined; + let expectedParamDetails: ParamListDetails | undefined; + + if (expectedType) { + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + expectedType = transformExpectedType(expectedType, liveTypeVarScopes, node.start) as FunctionType; + + expectedParamDetails = getParamListDetails(expectedType); + expectedReturnType = getEffectiveReturnType(expectedType); + } + + let functionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.PartiallyEvaluated); + functionType.shared.typeVarScopeId = ParseTreeUtils.getScopeIdForNode(node); + + return invalidateTypeCacheIfCanceled(() => { + // Pre-cache the incomplete function type in case the evaluation of the + // lambda depends on itself. + writeTypeCache(node, { type: functionType, isIncomplete: true }, EvalFlags.None); + + // We assume for simplicity that the parameter signature of the lambda is + // the same as the expected type. If this isn't the case, we'll use + // object for any lambda parameters that don't match. We could make this + // more sophisticated in the future, but it becomes very complex to handle + // all of the permutations. + let sawParamMismatch = false; + + node.d.params.forEach((param, index) => { + let paramType: Type | undefined; + + if (expectedParamDetails && !sawParamMismatch) { + if (index < expectedParamDetails.params.length) { + const expectedParam = expectedParamDetails.params[index]; + + // If the parameter category matches and both of the parameters are + // either separators (/ or *) or not separators, copy the type + // from the expected parameter. + if ( + expectedParam.param.category === param.d.category && + !param.d.name === !expectedParam.param.name + ) { + paramType = expectedParam.type; + } else { + sawParamMismatch = true; + } + } else if (param.d.defaultValue) { + // If the lambda param has a default value but there is no associated + // parameter in the expected type, assume that the default value is + // being used to explicitly capture a value from an outer scope. Infer + // its type from the default value expression. + paramType = getTypeOfExpression(param.d.defaultValue, undefined, inferenceContext).type; + } + } else if (param.d.defaultValue) { + // If there is no inference context but we have a default value, + // use the default value to infer the parameter's type. + paramType = inferParamTypeFromDefaultValue(param.d.defaultValue); + } + + if (param.d.name) { + writeTypeCache( + param.d.name, + { + type: transformVariadicParamType(node, param.d.category, paramType ?? UnknownType.create()), + }, + EvalFlags.None + ); + } + + if (param.d.defaultValue) { + // Evaluate the default value if it's present. + getTypeOfExpression(param.d.defaultValue, EvalFlags.ConvertEllipsisToAny); + } + + // Determine whether we need to insert an implied position-only parameter. + // This is needed when a function's parameters are named using the old-style + // way of specifying position-only parameters. + if (index >= 0) { + let isImplicitPositionOnlyParam = false; + + if (param.d.category === ParamCategory.Simple && param.d.name) { + if (isPrivateName(param.d.name.d.value)) { + isImplicitPositionOnlyParam = true; + } + } else { + paramsArePositionOnly = false; + } + + if ( + paramsArePositionOnly && + !isImplicitPositionOnlyParam && + functionType.shared.parameters.length > 0 + ) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + + if (!isImplicitPositionOnlyParam) { + paramsArePositionOnly = false; + } + } + + const functionParam = FunctionParam.create( + param.d.category, + paramType ?? UnknownType.create(), + FunctionParamFlags.TypeDeclared, + param.d.name ? param.d.name.d.value : undefined, + param.d.defaultValue ? AnyType.create(/* isEllipsis */ true) : undefined, + param.d.defaultValue + ); + + FunctionType.addParam(functionType, functionParam); + }); + + if (paramsArePositionOnly && functionType.shared.parameters.length > 0) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + + let typeErrors = false; + + // If we're speculatively evaluating the lambda, create another speculative + // evaluation scope for the return expression and do not allow retention + // of the cached types. + // We need to set allowCacheRetention to false because we don't want to + // cache the type of the lambda return expression because it depends on + // the parameter types that we set above, and the speculative type cache + // doesn't know about that context. + useSpeculativeMode( + forceSpeculative || isSpeculativeModeInUse(node) || inferenceContext?.isTypeIncomplete + ? node.d.expr + : undefined, + () => { + const returnTypeResult = getTypeOfExpression( + node.d.expr, + /* flags */ undefined, + makeInferenceContext(expectedReturnType) + ); + + functionType.shared.inferredReturnType = { + type: returnTypeResult.type, + }; + if (returnTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (returnTypeResult.typeErrors) { + typeErrors = true; + } else if (expectedReturnType) { + // If the expectedReturnType is generic, see if the actual return type + // provides types for some or all type variables. + if (requiresSpecialization(expectedReturnType)) { + const constraints = new ConstraintTracker(); + if ( + assignType(expectedReturnType, returnTypeResult.type, /* diag */ undefined, constraints) + ) { + functionType = solveAndApplyConstraints(functionType, constraints, { + replaceUnsolved: { + scopeIds: [], + tupleClassType: getTupleClassType(), + }, + }) as FunctionType; + } + } + } + }, + { + dependentType: inferenceContext?.expectedType, + allowDiagnostics: + !forceSpeculative && !canSkipDiagnosticForNode(node) && !inferenceContext?.isTypeIncomplete, + } + ); + + // Mark the function type as no longer being evaluated. + functionType.shared.flags &= ~FunctionTypeFlags.PartiallyEvaluated; + + // Is the resulting function compatible with the expected type? + if (expectedType && !assignType(expectedType, functionType)) { + typeErrors = true; + } + + return { type: functionType, isIncomplete, typeErrors }; + }); + } + + function getTypeOfComprehension( + node: ComprehensionNode, + flags: EvalFlags, + inferenceContext?: InferenceContext + ): TypeResult { + let isIncomplete = false; + let typeErrors = false; + + // If any of the "for" clauses are marked async or any of the "if" clauses + // or any clause other than the leftmost "for" contain an "await" operator, + // it is treated as an async generator. + let isAsync = node.d.forIfNodes.some((comp, index) => { + if (comp.nodeType === ParseNodeType.ComprehensionFor && comp.d.isAsync) { + return true; + } + return index > 0 && ParseTreeUtils.containsAwaitNode(comp); + }); + let type: Type = UnknownType.create(); + + if (ParseTreeUtils.containsAwaitNode(node.d.expr)) { + isAsync = true; + } + + const builtInIteratorType = getTypingType(node, isAsync ? 'AsyncGenerator' : 'Generator'); + + const expectedEntryType = getExpectedEntryTypeForIterable(node, builtInIteratorType, inferenceContext); + const elementTypeResult = getElementTypeFromComprehension( + node, + flags | EvalFlags.StripTupleLiterals, + expectedEntryType + ); + + if (elementTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (elementTypeResult.typeErrors) { + typeErrors = true; + } + + let elementType = elementTypeResult.type; + if (!expectedEntryType || !containsLiteralType(expectedEntryType)) { + elementType = stripLiteralValue(elementType); + } + + if (builtInIteratorType && isInstantiableClass(builtInIteratorType)) { + type = ClassType.cloneAsInstance( + ClassType.specialize( + builtInIteratorType, + isAsync ? [elementType, getNoneType()] : [elementType, getNoneType(), getNoneType()] + ) + ); + } + + return { type, isIncomplete, typeErrors }; + } + + function reportPossibleUnknownAssignment( + diagLevel: DiagnosticLevel, + rule: DiagnosticRule, + target: NameNode, + type: Type, + errorNode: ExpressionNode, + ignoreEmptyContainers: boolean + ) { + // Don't bother if the feature is disabled. + if (diagLevel === 'none') { + return; + } + + const nameValue = target.d.value; + + // Sometimes variables contain an "unbound" type if they're + // assigned only within conditional statements. Remove this + // to avoid confusion. + const simplifiedType = removeUnbound(type); + + if (isUnknown(simplifiedType)) { + addDiagnostic(rule, LocMessage.typeUnknown().format({ name: nameValue }), errorNode); + } else if (isPartlyUnknown(simplifiedType)) { + // If ignoreEmptyContainers is true, don't report the problem for + // empty containers (lists or dictionaries). We'll report the problem + // only if the assigned value is used later. + if (!ignoreEmptyContainers || !isClassInstance(type) || !type.priv.isEmptyContainer) { + const diagAddendum = new DiagnosticAddendum(); + diagAddendum.addMessage( + LocAddendum.typeOfSymbol().format({ + name: nameValue, + type: printType(simplifiedType, { expandTypeAlias: true }), + }) + ); + addDiagnostic( + rule, + LocMessage.typePartiallyUnknown().format({ name: nameValue }) + diagAddendum.getString(), + errorNode + ); + } + } + } + + function evaluateComprehensionForIf(node: ComprehensionForIfNode) { + let isIncomplete = false; + + if (node.nodeType === ParseNodeType.ComprehensionFor) { + const iterableTypeResult = getTypeOfExpression(node.d.iterableExpr); + if (iterableTypeResult.isIncomplete) { + isIncomplete = true; + } + const iterableType = stripLiteralValue(iterableTypeResult.type); + const itemTypeResult = getTypeOfIterator( + { type: iterableType, isIncomplete: iterableTypeResult.isIncomplete }, + !!node.d.isAsync, + node.d.iterableExpr + ) ?? { type: UnknownType.create(), isIncomplete: iterableTypeResult.isIncomplete }; + + const targetExpr = node.d.targetExpr; + assignTypeToExpression(targetExpr, itemTypeResult, node.d.iterableExpr); + } else { + assert(node.nodeType === ParseNodeType.ComprehensionIf); + + // Evaluate the test expression to validate it and mark symbols + // as referenced. This doesn't affect the type of the evaluated + // comprehension, but it is important for evaluating intermediate + // expressions such as assignment expressions that can affect other + // subexpressions. + getTypeOfExpression(node.d.testExpr); + } + + return isIncomplete; + } + + // Returns the type of one entry returned by the comprehension. + function getElementTypeFromComprehension( + node: ComprehensionNode, + flags: EvalFlags, + expectedValueOrElementType?: Type, + expectedKeyType?: Type + ): TypeResult { + let isIncomplete = false; + let typeErrors = false; + + // "Execute" the list comprehensions from start to finish. + for (const forIfNode of node.d.forIfNodes) { + if (evaluateComprehensionForIf(forIfNode)) { + isIncomplete = true; + } + } + + let type: Type = UnknownType.create(); + if (node.d.expr.nodeType === ParseNodeType.DictionaryKeyEntry) { + // Create a tuple with the key/value types. + const keyTypeResult = getTypeOfExpression( + node.d.expr.d.keyExpr, + flags, + makeInferenceContext(expectedKeyType) + ); + if (keyTypeResult.isIncomplete) { + isIncomplete = true; + } + if (keyTypeResult.typeErrors) { + typeErrors = true; + } + let keyType = keyTypeResult.type; + if (!expectedKeyType || !containsLiteralType(expectedKeyType)) { + keyType = stripLiteralValue(keyType); + } + + const valueTypeResult = getTypeOfExpression( + node.d.expr.d.valueExpr, + flags, + makeInferenceContext(expectedValueOrElementType) + ); + if (valueTypeResult.isIncomplete) { + isIncomplete = true; + } + if (valueTypeResult.typeErrors) { + typeErrors = true; + } + let valueType = valueTypeResult.type; + if (!expectedValueOrElementType || !containsLiteralType(expectedValueOrElementType)) { + valueType = stripLiteralValue(valueType); + } + + type = makeTupleObject(evaluatorInterface, [ + { type: keyType, isUnbounded: false }, + { type: valueType, isUnbounded: false }, + ]); + } else if (node.d.expr.nodeType === ParseNodeType.DictionaryExpandEntry) { + // The parser should have reported an error in this case because it's not allowed. + getTypeOfExpression(node.d.expr.d.expr, flags, makeInferenceContext(expectedValueOrElementType)); + } else if (isExpressionNode(node)) { + const exprTypeResult = getTypeOfExpression( + node.d.expr as ExpressionNode, + flags, + makeInferenceContext(expectedValueOrElementType) + ); + if (exprTypeResult.isIncomplete) { + isIncomplete = true; + } + if (exprTypeResult.typeErrors) { + typeErrors = true; + } + type = exprTypeResult.type; + } + + return { type, isIncomplete, typeErrors }; + } + + function getTypeOfSlice(node: SliceNode): TypeResult { + const noneType = getNoneType(); + let startType = noneType; + let endType = noneType; + let stepType = noneType; + let isIncomplete = false; + + // Evaluate the expressions to report errors and record symbol + // references. + if (node.d.startValue) { + const startTypeResult = getTypeOfExpression(node.d.startValue); + startType = startTypeResult.type; + if (startTypeResult.isIncomplete) { + isIncomplete = true; + } + } + + if (node.d.endValue) { + const endTypeResult = getTypeOfExpression(node.d.endValue); + endType = endTypeResult.type; + if (endTypeResult.isIncomplete) { + isIncomplete = true; + } + } + + if (node.d.stepValue) { + const stepTypeResult = getTypeOfExpression(node.d.stepValue); + stepType = stepTypeResult.type; + if (stepTypeResult.isIncomplete) { + isIncomplete = true; + } + } + + const sliceType = getBuiltInObject(node, 'slice'); + + if (!isClassInstance(sliceType)) { + return { type: sliceType }; + } + + return { type: ClassType.specialize(sliceType, [startType, endType, stepType]), isIncomplete }; + } + + // Verifies that a type argument's type is not disallowed. + function validateTypeArg(argResult: TypeResultWithNode, options?: ValidateTypeArgsOptions): boolean { + if (argResult.typeList) { + if (!options?.allowTypeArgList) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeArgListNotAllowed(), argResult.node); + return false; + } else { + argResult.typeList.forEach((typeArg) => { + validateTypeArg(typeArg); + }); + } + } + + if (isEllipsisType(argResult.type)) { + if (!options?.allowTypeArgList) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.ellipsisContext(), argResult.node); + return false; + } + } + + if (isModule(argResult.type)) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.moduleAsType(), argResult.node); + return false; + } + + if (isParamSpec(argResult.type)) { + if (!options?.allowParamSpec) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.paramSpecContext(), argResult.node); + return false; + } + } + + if (isTypeVarTuple(argResult.type) && !argResult.type.priv.isInUnion) { + if (!options?.allowTypeVarTuple) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeVarTupleContext(), argResult.node); + return false; + } else { + validateTypeVarTupleIsUnpacked(argResult.type, argResult.node); + } + } + + if (!options?.allowEmptyTuple && argResult.isEmptyTupleShorthand) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.zeroLengthTupleNotAllowed(), argResult.node); + return false; + } + + if (isUnpackedClass(argResult.type)) { + if (!options?.allowUnpackedTuples) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackedArgInTypeArgument(), + argResult.node + ); + return false; + } + } + + return true; + } + + // Evaluates the type arguments for a Callable type. It should have zero + // to two arguments.The first argument, if present, should be an ellipsis, + // a ParamSpec, a Concatenate, or a list of positional parameter types. + // The second argument, if present, should specify the return type. + function createCallableType( + classType: ClassType, + typeArgs: TypeResultWithNode[] | undefined, + errorNode: ParseNode + ): FunctionType { + let functionType = FunctionType.createInstantiable(FunctionTypeFlags.None); + let paramSpec: ParamSpecType | undefined; + let isValidTypeForm = true; + + TypeBase.setSpecialForm(functionType, ClassType.cloneAsInstance(classType)); + functionType.shared.declaredReturnType = UnknownType.create(); + functionType.shared.typeVarScopeId = ParseTreeUtils.getScopeIdForNode(errorNode); + + if (typeArgs && typeArgs.length > 0) { + functionType.priv.isCallableWithTypeArgs = true; + + if (typeArgs[0].typeList) { + const typeList = typeArgs[0].typeList; + let sawUnpacked = false; + let reportedUnpackedError = false; + const noteSawUnpacked = (entry: TypeResultWithNode) => { + // Make sure we have at most one unpacked TypeVarTuple. + if (sawUnpacked) { + if (!reportedUnpackedError) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.variadicTypeArgsTooMany(), + entry.node + ); + reportedUnpackedError = true; + isValidTypeForm = false; + } + } + sawUnpacked = true; + }; + + typeList.forEach((entry, index) => { + let entryType = entry.type; + let paramCategory: ParamCategory = ParamCategory.Simple; + const paramName = `__p${index.toString()}`; + + if (isTypeVarTuple(entryType)) { + validateTypeVarTupleIsUnpacked(entryType, entry.node); + paramCategory = ParamCategory.ArgsList; + noteSawUnpacked(entry); + } else if (validateTypeArg(entry, { allowUnpackedTuples: true })) { + if (isUnpackedClass(entryType)) { + paramCategory = ParamCategory.ArgsList; + + if ( + entryType.priv.tupleTypeArgs?.some( + (typeArg) => isTypeVarTuple(typeArg.type) || typeArg.isUnbounded + ) + ) { + noteSawUnpacked(entry); + } + } + } else { + entryType = UnknownType.create(); + } + + FunctionType.addParam( + functionType, + FunctionParam.create( + paramCategory, + convertToInstance(entryType), + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + paramName + ) + ); + }); + + if (typeList.length > 0) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + } else if (isEllipsisType(typeArgs[0].type)) { + FunctionType.addDefaultParams(functionType); + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } else if (isParamSpec(typeArgs[0].type)) { + paramSpec = typeArgs[0].type; + } else { + if (isInstantiableClass(typeArgs[0].type) && ClassType.isBuiltIn(typeArgs[0].type, 'Concatenate')) { + const concatTypeArgs = typeArgs[0].type.priv.typeArgs; + if (concatTypeArgs && concatTypeArgs.length > 0) { + concatTypeArgs.forEach((typeArg, index) => { + if (index === concatTypeArgs.length - 1) { + FunctionType.addPositionOnlyParamSeparator(functionType); + + if (isParamSpec(typeArg)) { + paramSpec = typeArg; + } else if (isEllipsisType(typeArg)) { + FunctionType.addDefaultParams(functionType); + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } + } else { + FunctionType.addParam( + functionType, + FunctionParam.create( + ParamCategory.Simple, + typeArg, + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `__p${index}` + ) + ); + } + }); + } + } else { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.callableFirstArg(), + typeArgs[0].node + ); + isValidTypeForm = false; + } + } + + if (typeArgs.length > 1) { + let typeArg1Type = typeArgs[1].type; + if (!validateTypeArg(typeArgs[1])) { + typeArg1Type = UnknownType.create(); + } + functionType.shared.declaredReturnType = convertToInstance(typeArg1Type); + } else { + addDiagnostic(DiagnosticRule.reportMissingTypeArgument, LocMessage.callableSecondArg(), errorNode); + + functionType.shared.declaredReturnType = UnknownType.create(); + isValidTypeForm = false; + } + + if (typeArgs.length > 2) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.callableExtraArgs(), typeArgs[2].node); + isValidTypeForm = false; + } + } else { + FunctionType.addDefaultParams(functionType, /* useUnknown */ true); + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + + if (typeArgs && typeArgs.length === 0) { + isValidTypeForm = false; + } + } + + if (paramSpec) { + FunctionType.addParamSpecVariadics(functionType, convertToInstance(paramSpec)); + } + + if (isTypeFormSupported(errorNode) && isValidTypeForm) { + functionType = TypeBase.cloneWithTypeForm(functionType, convertToInstance(functionType)); + } + + return functionType; + } + + // Creates an Optional[X] type. + function createOptionalType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + if (!typeArgs) { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.optionalExtraArgs(), errorNode); + return UnknownType.create(); + } + + return classType; + } + + if (typeArgs.length !== 1) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.optionalExtraArgs(), errorNode); + return UnknownType.create(); + } + + let typeArg0Type = typeArgs[0].type; + if (!validateTypeArg(typeArgs[0])) { + typeArg0Type = UnknownType.create(); + } + + let optionalType = combineTypes([typeArg0Type, prefetched?.noneTypeClass ?? UnknownType.create()]); + if (prefetched?.unionTypeClass && isInstantiableClass(prefetched.unionTypeClass)) { + optionalType = TypeBase.cloneAsSpecialForm( + optionalType, + ClassType.cloneAsInstance(prefetched.unionTypeClass) + ); + } + + if (typeArg0Type.props?.typeForm) { + const typeFormType = combineTypes([ + typeArg0Type.props.typeForm, + convertToInstance(prefetched?.noneTypeClass ?? UnknownType.create()), + ]); + optionalType = TypeBase.cloneWithTypeForm(optionalType, typeFormType); + } + + return optionalType; + } + + function cloneBuiltinObjectWithLiteral(node: ParseNode, builtInName: string, value: LiteralValue): Type { + const type = getBuiltInObject(node, builtInName); + if (isClassInstance(type)) { + return ClassType.cloneWithLiteral(ClassType.cloneRemoveTypePromotions(type), value); + } + + return UnknownType.create(); + } + + function cloneBuiltinClassWithLiteral( + node: ParseNode, + literalClassType: ClassType, + builtInName: string, + value: LiteralValue + ): Type { + const type = getBuiltInType(node, builtInName); + if (isInstantiableClass(type)) { + const literalType = ClassType.cloneWithLiteral(type, value); + TypeBase.setSpecialForm(literalType, literalClassType); + return literalType; + } + + return UnknownType.create(); + } + + // Creates a type that represents a Literal. + function createLiteralType(classType: ClassType, node: IndexNode, flags: EvalFlags): Type { + if (node.d.items.length === 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.literalEmptyArgs(), node.d.leftExpr); + return UnknownType.create(); + } + + // As per the specification, we support None, int, bool, str, bytes literals + // plus enum values. + const literalTypes: Type[] = []; + let isValidTypeForm = true; + + for (const item of node.d.items) { + let type: Type | undefined; + const itemExpr = item.d.valueExpr; + + if (item.d.argCategory !== ArgCategory.Simple) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackedArgInTypeArgument(), + itemExpr + ); + type = UnknownType.create(); + isValidTypeForm = false; + } + } else if (item.d.name) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.keywordArgInTypeArgument(), + itemExpr + ); + type = UnknownType.create(); + isValidTypeForm = false; + } + } else if (itemExpr.nodeType === ParseNodeType.StringList) { + const isBytes = (itemExpr.d.strings[0].d.token.flags & StringTokenFlags.Bytes) !== 0; + const value = itemExpr.d.strings.map((s) => s.d.value).join(''); + if (isBytes) { + type = cloneBuiltinClassWithLiteral(node, classType, 'bytes', value); + } else { + type = cloneBuiltinClassWithLiteral(node, classType, 'str', value); + } + + if ((flags & EvalFlags.TypeExpression) !== 0) { + itemExpr.d.strings.forEach((stringNode) => { + if ((stringNode.d.token.flags & StringTokenFlags.NamedUnicodeEscape) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.literalNamedUnicodeEscape(), + stringNode + ); + isValidTypeForm = false; + } + }); + } + } else if (itemExpr.nodeType === ParseNodeType.Number) { + if (!itemExpr.d.isImaginary && itemExpr.d.isInteger) { + type = cloneBuiltinClassWithLiteral(node, classType, 'int', itemExpr.d.value); + } + } else if (itemExpr.nodeType === ParseNodeType.Constant) { + if (itemExpr.d.constType === KeywordType.True) { + type = cloneBuiltinClassWithLiteral(node, classType, 'bool', true); + } else if (itemExpr.d.constType === KeywordType.False) { + type = cloneBuiltinClassWithLiteral(node, classType, 'bool', false); + } else if (itemExpr.d.constType === KeywordType.None) { + type = prefetched?.noneTypeClass ?? UnknownType.create(); + } + } else if (itemExpr.nodeType === ParseNodeType.UnaryOperation) { + if (itemExpr.d.operator === OperatorType.Subtract || itemExpr.d.operator === OperatorType.Add) { + if (itemExpr.d.expr.nodeType === ParseNodeType.Number) { + if (!itemExpr.d.expr.d.isImaginary && itemExpr.d.expr.d.isInteger) { + type = cloneBuiltinClassWithLiteral( + node, + classType, + 'int', + itemExpr.d.operator === OperatorType.Subtract + ? -itemExpr.d.expr.d.value + : itemExpr.d.expr.d.value + ); + } + } + } + } + + if (!type) { + const exprType = getTypeOfExpression( + itemExpr, + (flags & (EvalFlags.ForwardRefs | EvalFlags.TypeExpression)) | EvalFlags.NoConvertSpecialForm + ); + + // Is this an enum type? + if ( + isClassInstance(exprType.type) && + ClassType.isEnumClass(exprType.type) && + exprType.type.priv.literalValue !== undefined + ) { + type = ClassType.cloneAsInstantiable(exprType.type); + } else { + // Is this a type alias to an existing literal type? + let isLiteralType = true; + + doForEachSubtype(exprType.type, (subtype) => { + if (!isInstantiableClass(subtype) || subtype.priv.literalValue === undefined) { + if (!isNoneTypeClass(subtype)) { + isLiteralType = false; + } + } + }); + + if (isLiteralType) { + type = exprType.type; + } + } + } + + if (!type) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.literalUnsupportedType(), item); + type = UnknownType.create(); + isValidTypeForm = false; + } else { + return ClassType.cloneAsInstance(classType); + } + } + + literalTypes.push(type); + } + + let result = combineTypes(literalTypes, { skipElideRedundantLiterals: true }); + + if (isUnion(result) && prefetched?.unionTypeClass && isInstantiableClass(prefetched.unionTypeClass)) { + result = TypeBase.cloneAsSpecialForm(result, ClassType.cloneAsInstance(prefetched.unionTypeClass)); + } + + if (isTypeFormSupported(node) && isValidTypeForm) { + result = TypeBase.cloneWithTypeForm(result, convertToInstance(result)); + } + + return result; + } + + // Creates a ClassVar type. + function createClassVarType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + if (flags & EvalFlags.NoClassVar) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.classVarNotAllowed(), errorNode); + return AnyType.create(); + } + + if (!typeArgs) { + return classType; + } else if (typeArgs.length === 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.classVarFirstArgMissing(), errorNode); + return UnknownType.create(); + } else if (typeArgs.length > 1) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.classVarTooManyArgs(), typeArgs[1].node); + return UnknownType.create(); + } + + const type = typeArgs[0].type; + + // A ClassVar should not allow TypeVars or generic types parameterized + // by TypeVars. + if (requiresSpecialization(type, { ignorePseudoGeneric: true, ignoreSelf: true })) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.classVarWithTypeVar(), + typeArgs[0].node ?? errorNode + ); + } + + return type; + } + + function createTypeFormType( + classType: ClassType, + errorNode: ExpressionNode, + typeArgs: TypeResultWithNode[] | undefined + ): Type { + if (!typeArgs || typeArgs.length === 0) { + return ClassType.specialize(classType, [UnknownType.create()]); + } + + if (typeArgs.length > 1) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgsTooMany().format({ + name: classType.priv.aliasName || classType.shared.name, + expected: 1, + received: typeArgs.length, + }), + typeArgs[1].node + ); + return UnknownType.create(); + } + + const convertedTypeArgs = typeArgs.map((typeArg) => { + return convertToInstance(validateTypeArg(typeArg) ? typeArg.type : UnknownType.create()); + }); + let resultType = ClassType.specialize(classType, convertedTypeArgs); + + if (isTypeFormSupported(errorNode)) { + resultType = TypeBase.cloneWithTypeForm(resultType, convertToInstance(resultType)); + } + + return resultType; + } + + // Creates a "TypeGuard" and "TypeIs" type. + function createTypeGuardType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if (!typeArgs) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeGuardArgCount(), errorNode); + } + + return classType; + } else if (typeArgs.length !== 1) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeGuardArgCount(), errorNode); + return UnknownType.create(); + } + + const convertedTypeArgs = typeArgs.map((typeArg) => { + return convertToInstance(validateTypeArg(typeArg) ? typeArg.type : UnknownType.create()); + }); + + let resultType = ClassType.specialize(classType, convertedTypeArgs); + + if (isTypeFormSupported(errorNode)) { + resultType = TypeBase.cloneWithTypeForm(resultType, convertToInstance(resultType)); + } + + return resultType; + } + + function createSelfType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ) { + // Self doesn't support any type arguments. + if (typeArgs && typeArgs.length > 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.typeArgsExpectingNone().format({ + name: classType.shared.name, + }), + typeArgs[0].node ?? errorNode + ); + } + + let enclosingClass = ParseTreeUtils.getEnclosingClass(errorNode); + + // If `Self` appears anywhere outside of the class body (e.g. a decorator, + // base class list, metaclass argument, type parameter list), it is + // considered illegal. + if (enclosingClass && !ParseTreeUtils.isNodeContainedWithin(errorNode, enclosingClass.d.suite)) { + enclosingClass = undefined; + } + + const enclosingClassTypeResult = enclosingClass ? getTypeOfClass(enclosingClass) : undefined; + if (!enclosingClassTypeResult) { + if ((flags & (EvalFlags.TypeExpression | EvalFlags.InstantiableType)) !== 0) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.selfTypeContext(), errorNode); + } + + return UnknownType.create(); + } else if (isInstantiableMetaclass(enclosingClassTypeResult.classType)) { + // If `Self` appears within a metaclass, it is considered illegal. + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.selfTypeMetaclass(), errorNode); + + return UnknownType.create(); + } + + const enclosingFunction = ParseTreeUtils.getEnclosingFunction(errorNode); + if (enclosingFunction) { + const functionInfo = getFunctionInfoFromDecorators( + evaluatorInterface, + enclosingFunction, + /* isInClass */ true + ); + + const isInnerFunction = !!ParseTreeUtils.getEnclosingFunction(enclosingFunction); + if (!isInnerFunction) { + // Check for static methods. + if (functionInfo.flags & FunctionTypeFlags.StaticMethod) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.selfTypeContext(), errorNode); + + return UnknownType.create(); + } + + if (enclosingFunction.d.params.length > 0) { + const firstParamTypeAnnotation = ParseTreeUtils.getTypeAnnotationForParam(enclosingFunction, 0); + if ( + firstParamTypeAnnotation && + !ParseTreeUtils.isNodeContainedWithin(errorNode, firstParamTypeAnnotation) + ) { + const annotationType = getTypeOfAnnotation(firstParamTypeAnnotation, { + typeVarGetsCurScope: true, + }); + if (!isTypeVar(annotationType) || !TypeVarType.isSelf(annotationType)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.selfTypeWithTypedSelfOrCls(), + errorNode + ); + } + } + } + } + } + + let result = synthesizeTypeVarForSelfCls(enclosingClassTypeResult.classType, /* isClsParam */ true); + + if (enclosingClass) { + // If "Self" is used as a type expression within a function suite, it needs + // to be marked as bound. + const enclosingSuite = ParseTreeUtils.getEnclosingClassOrFunctionSuite(errorNode); + + if (enclosingSuite && ParseTreeUtils.isNodeContainedWithin(enclosingSuite, enclosingClass)) { + if (enclosingClass.d.suite !== enclosingSuite) { + result = TypeVarType.cloneAsBound(result); + } + } + } + + return result; + } + + function createRequiredOrReadOnlyType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): TypeResult { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if (!typeArgs && (flags & EvalFlags.TypeExpression) === 0) { + return { type: classType }; + } + + if (!typeArgs || typeArgs.length !== 1) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + classType.shared.name === 'ReadOnly' + ? LocMessage.readOnlyArgCount() + : classType.shared.name === 'Required' + ? LocMessage.requiredArgCount() + : LocMessage.notRequiredArgCount(), + errorNode + ); + } + + return { type: classType }; + } + + const typeArgType = typeArgs[0].type; + + // Make sure this is used only in a dataclass. + const containingClassNode = ParseTreeUtils.getEnclosingClass(errorNode, /* stopAtFunction */ true); + const classTypeInfo = containingClassNode ? getTypeOfClass(containingClassNode) : undefined; + + let isUsageLegal = false; + + if ( + classTypeInfo && + isInstantiableClass(classTypeInfo.classType) && + ClassType.isTypedDictClass(classTypeInfo.classType) + ) { + // The only legal usage is when used in a type annotation statement. + if (ParseTreeUtils.isNodeContainedWithinNodeType(errorNode, ParseNodeType.TypeAnnotation)) { + isUsageLegal = true; + } + } + + let isReadOnly = typeArgs[0].isReadOnly; + let isRequired = typeArgs[0].isRequired; + let isNotRequired = typeArgs[0].isNotRequired; + + if (classType.shared.name === 'ReadOnly') { + if ((flags & EvalFlags.AllowReadOnly) !== 0) { + isUsageLegal = true; + } + + // Nested ReadOnly are not allowed. + if (typeArgs[0].isReadOnly) { + isUsageLegal = false; + } + + isReadOnly = true; + } else { + if ((flags & EvalFlags.AllowRequired) !== 0) { + isUsageLegal = true; + } + + // Nested Required/NotRequired are not allowed. + if (typeArgs[0].isRequired || typeArgs[0].isNotRequired) { + isUsageLegal = false; + } + + isRequired = classType.shared.name === 'Required'; + isNotRequired = classType.shared.name === 'NotRequired'; + } + + if (!isUsageLegal) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + classType.shared.name === 'ReadOnly' + ? LocMessage.readOnlyNotInTypedDict() + : classType.shared.name === 'Required' + ? LocMessage.requiredNotInTypedDict() + : LocMessage.notRequiredNotInTypedDict(), + errorNode + ); + } + + return { type: classType }; + } + + return { type: typeArgType, isReadOnly, isRequired, isNotRequired }; + } + + function createUnpackType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + if (!typeArgs || typeArgs.length !== 1) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.unpackArgCount(), errorNode); + } + return classType; + } + + const typeArgType = typeArgs[0].type; + + if ((flags & EvalFlags.AllowUnpackedTuple) !== 0) { + const unpackedType = applyUnpackToTupleLike(typeArgType); + if (unpackedType) { + return unpackedType; + } + + if ((flags & EvalFlags.TypeExpression) === 0) { + return classType; + } + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.unpackExpectedTypeVarTuple(), errorNode); + return UnknownType.create(); + } + + if ((flags & EvalFlags.AllowUnpackedTypedDict) !== 0) { + if (isInstantiableClass(typeArgType) && ClassType.isTypedDictClass(typeArgType)) { + return ClassType.cloneForUnpacked(typeArgType); + } + + if ((flags & EvalFlags.TypeExpression) === 0) { + return classType; + } + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.unpackExpectedTypedDict(), errorNode); + return UnknownType.create(); + } + + if ((flags & EvalFlags.TypeExpression) === 0) { + return classType; + } + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.unpackNotAllowed(), errorNode); + return UnknownType.create(); + } + + // Creates a "Final" type. + function createFinalType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + if (flags & EvalFlags.NoFinal) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.finalContext(), errorNode); + } + return classType; + } + + if ((flags & EvalFlags.TypeExpression) === 0 || !typeArgs || typeArgs.length === 0) { + return classType; + } + + if (typeArgs.length > 1) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.finalTooManyArgs(), errorNode); + } + + return TypeBase.cloneAsSpecialForm(typeArgs[0].type, classType); + } + + function createConcatenateType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + if ((flags & EvalFlags.AllowConcatenate) === 0) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.concatenateContext(), errorNode); + } + return classType; + } + + if (!typeArgs || typeArgs.length === 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.concatenateTypeArgsMissing(), errorNode); + } else { + typeArgs.forEach((typeArg, index) => { + if (index === typeArgs.length - 1) { + if (!isParamSpec(typeArg.type) && !isEllipsisType(typeArg.type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.concatenateParamSpecMissing(), + typeArg.node + ); + } + } else { + if (isParamSpec(typeArg.type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.paramSpecContext(), + typeArg.node + ); + } else if (isUnpackedTypeVarTuple(typeArg.type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeVarTupleContext(), + typeArg.node + ); + } else if (isUnpackedClass(typeArg.type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackedArgInTypeArgument(), + typeArg.node + ); + } + } + }); + } + + return createSpecialType(classType, typeArgs, /* paramLimit */ undefined, /* allowParamSpec */ true); + } + + function createAnnotatedType( + classType: ClassType, + errorNode: ExpressionNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): TypeResult { + let type: Type | undefined; + + const typeExprFlags = EvalFlags.TypeExpression | EvalFlags.NoConvertSpecialForm; + if ((flags & typeExprFlags) === 0) { + type = ClassType.cloneAsInstance(classType); + + if (typeArgs && typeArgs.length >= 1 && typeArgs[0].type.props?.typeForm) { + type = TypeBase.cloneWithTypeForm(type, typeArgs[0].type.props.typeForm); + } + + return { type }; + } + + if (typeArgs && typeArgs.length > 0) { + type = typeArgs[0].type; + + if (typeArgs.length < 2) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.annotatedTypeArgMissing(), errorNode); + } else { + type = validateAnnotatedMetadata(errorNode, typeArgs[0].type, typeArgs.slice(1)); + } + } + + if (!type || !typeArgs || typeArgs.length === 0) { + return { type: AnyType.create() }; + } + + if (typeArgs[0].typeList) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.typeArgListNotAllowed(), typeArgs[0].node); + } + + return { + type: TypeBase.cloneAsSpecialForm(type, ClassType.cloneAsInstance(classType)), + isReadOnly: typeArgs[0].isReadOnly, + isRequired: typeArgs[0].isRequired, + isNotRequired: typeArgs[0].isNotRequired, + }; + } + + // Enforces metadata consistency as specified in PEP 746. + function validateAnnotatedMetadata( + errorNode: ExpressionNode, + baseType: Type, + metaArgs: TypeResultWithNode[] + ): Type { + for (const metaArg of metaArgs) { + validateTypeMetadata(errorNode, baseType, metaArg); + } + + return baseType; + } + + // Determines whether the metadata object is compatible with the base type. + function validateTypeMetadata(errorNode: ExpressionNode, baseType: Type, metaArg: TypeResultWithNode): boolean { + // This function was added for draft PEP 746, but the functionality + // has been removed for now while the PEP is being revised. + return true; + } + + // Creates one of several "special" types that are defined in typing.pyi + // but not declared in their entirety. This includes the likes of "Tuple", + // "Dict", etc. + function createSpecialType( + classType: ClassType, + typeArgs: TypeResultWithNode[] | undefined, + paramLimit?: number, + allowParamSpec = false, + isSpecialForm = true + ): Type { + const isTupleTypeParam = ClassType.isTupleClass(classType); + + if (typeArgs) { + if (isTupleTypeParam && typeArgs.length === 1 && typeArgs[0].isEmptyTupleShorthand) { + typeArgs = []; + } else { + let sawUnpacked = false; + const noteSawUnpacked = (typeArg: TypeResultWithNode) => { + if (sawUnpacked) { + if (!reportedUnpackedError) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.variadicTypeArgsTooMany(), + typeArg.node + ); + reportedUnpackedError = true; + } + } + sawUnpacked = true; + }; + let reportedUnpackedError = false; + + // Verify that we didn't receive any inappropriate types. + typeArgs.forEach((typeArg, index) => { + assert(typeArgs !== undefined); + if (isEllipsisType(typeArg.type)) { + if (!isTupleTypeParam) { + if (!allowParamSpec) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.ellipsisContext(), + typeArg.node + ); + } + } else if (typeArgs.length !== 2 || index !== 1) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.ellipsisSecondArg(), + typeArg.node + ); + } else { + if (isTypeVarTuple(typeArgs[0].type) && !typeArgs[0].type.priv.isInUnion) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeVarTupleContext(), + typeArgs[0].node + ); + } else if (isUnpackedClass(typeArgs[0].type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.ellipsisAfterUnpacked(), + typeArg.node + ); + } + } + } else if (isParamSpec(typeArg.type) && allowParamSpec) { + // Nothing to do - this is allowed. + } else if (paramLimit === undefined && isTypeVarTuple(typeArg.type)) { + if (!typeArg.type.priv.isInUnion) { + noteSawUnpacked(typeArg); + } + validateTypeVarTupleIsUnpacked(typeArg.type, typeArg.node); + } else if (paramLimit === undefined && isUnpackedClass(typeArg.type)) { + if (isUnboundedTupleClass(typeArg.type)) { + noteSawUnpacked(typeArg); + } + validateTypeArg(typeArg, { allowUnpackedTuples: true }); + } else { + validateTypeArg(typeArg); + } + }); + } + } + + let typeArgTypes = typeArgs ? typeArgs.map((t) => convertToInstance(t.type)) : []; + + // Make sure the argument list count is correct. + if (paramLimit !== undefined) { + if (typeArgs && typeArgTypes.length > paramLimit) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgsTooMany().format({ + name: classType.priv.aliasName || classType.shared.name, + expected: paramLimit, + received: typeArgTypes.length, + }), + typeArgs[paramLimit].node + ); + typeArgTypes = typeArgTypes.slice(0, paramLimit); + } else if (typeArgTypes.length < paramLimit) { + // Fill up the remainder of the slots with unknown types. + while (typeArgTypes.length < paramLimit) { + typeArgTypes.push(UnknownType.create()); + } + } + } + + // Handle tuple type params as a special case. + let returnType: Type; + if (isTupleTypeParam) { + const tupleTypeArgTypes: TupleTypeArg[] = []; + + // If no type args are provided and it's a tuple, default to [Unknown, ...]. + if (!typeArgs) { + tupleTypeArgTypes.push({ type: UnknownType.create(), isUnbounded: true }); + } else { + typeArgs.forEach((typeArg, index) => { + if (index === 1 && isEllipsisType(typeArgTypes[index])) { + if (tupleTypeArgTypes.length === 1 && !tupleTypeArgTypes[0].isUnbounded) { + tupleTypeArgTypes[0] = { type: tupleTypeArgTypes[0].type, isUnbounded: true }; + } + } else if (isUnpackedClass(typeArg.type) && typeArg.type.priv.tupleTypeArgs) { + appendArray(tupleTypeArgTypes, typeArg.type.priv.tupleTypeArgs); + } else { + tupleTypeArgTypes.push({ type: typeArgTypes[index], isUnbounded: false }); + } + }); + } + + returnType = specializeTupleClass(classType, tupleTypeArgTypes, typeArgs !== undefined); + } else { + returnType = ClassType.specialize(classType, typeArgTypes, typeArgs !== undefined); + } + + if (isSpecialForm) { + returnType = TypeBase.cloneAsSpecialForm(returnType, classType); + } + + return returnType; + } + + // Unpacks the index expression for a "Union[X, Y, Z]" type annotation. + function createUnionType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + const types: Type[] = []; + let allowSingleTypeArg = false; + let isValidTypeForm = true; + + if (!typeArgs) { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.unionTypeArgCount(), errorNode); + return NeverType.createNever(); + } + + return classType; + } + + for (const typeArg of typeArgs) { + let typeArgType = typeArg.type; + + // This is an experimental feature because Unions of unpacked TypeVarTuples are not officially supported. + if ( + !validateTypeArg(typeArg, { + allowTypeVarTuple: fileInfo.diagnosticRuleSet.enableExperimentalFeatures, + }) + ) { + typeArgType = UnknownType.create(); + } + + if (isTypeVar(typeArgType) && isUnpackedTypeVarTuple(typeArgType)) { + // This is an experimental feature because Unions of unpacked TypeVarTuples are not officially supported. + if (fileInfo.diagnosticRuleSet.enableExperimentalFeatures) { + // If this is an unpacked TypeVar, note that it is in a union so we can + // differentiate between Unpack[Vs] and Union[Unpack[Vs]]. + typeArgType = TypeVarType.cloneForUnpacked(typeArgType, /* isInUnion */ true); + allowSingleTypeArg = true; + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.unionUnpackedTypeVarTuple(), + errorNode + ); + + typeArgType = UnknownType.create(); + isValidTypeForm = false; + } + } + + types.push(typeArgType); + } + + // Validate that we received at least two type arguments. One type argument + // is allowed if it's an unpacked TypeVarTuple or tuple. None is also allowed + // since it is used to define NoReturn in typeshed stubs). + if (types.length === 1 && !allowSingleTypeArg && !isNoneInstance(types[0])) { + if ((flags & EvalFlags.TypeExpression) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeArguments, LocMessage.unionTypeArgCount(), errorNode); + } + isValidTypeForm = false; + } + + let unionType = combineTypes(types, { skipElideRedundantLiterals: true }); + if (prefetched?.unionTypeClass && isInstantiableClass(prefetched.unionTypeClass)) { + unionType = TypeBase.cloneAsSpecialForm(unionType, ClassType.cloneAsInstance(prefetched.unionTypeClass)); + } + + if (!isValidTypeForm || types.some((t) => !t.props?.typeForm)) { + if (unionType.props?.typeForm) { + unionType = TypeBase.cloneWithTypeForm(unionType, undefined); + } + } else if (isTypeFormSupported(errorNode)) { + const typeFormType = combineTypes(types.map((t) => t.props!.typeForm!)); + unionType = TypeBase.cloneWithTypeForm(unionType, typeFormType); + } + + return unionType; + } + + // Creates a type that represents "Generic[T1, T2, ...]", used in the + // definition of a generic class. + function createGenericType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags + ): Type { + if (!typeArgs) { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if ((flags & (EvalFlags.TypeExpression | EvalFlags.NoNakedGeneric)) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.genericTypeArgMissing(), errorNode); + } + + return classType; + } + + const uniqueTypeVars: TypeVarType[] = []; + if (typeArgs) { + // Make sure there's at least one type arg. + if (typeArgs.length === 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.genericTypeArgMissing(), errorNode); + } + + // Make sure that all of the type args are typeVars and are unique. + typeArgs.forEach((typeArg) => { + if (!isTypeVar(typeArg.type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.genericTypeArgTypeVar(), + typeArg.node + ); + } else { + if (uniqueTypeVars.some((t) => isTypeSame(t, typeArg.type))) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.genericTypeArgUnique(), + typeArg.node + ); + } + + uniqueTypeVars.push(typeArg.type); + } + }); + } + + return createSpecialType(classType, typeArgs, /* paramLimit */ undefined, /* allowParamSpec */ true); + } + + function transformTypeForTypeAlias( + type: Type, + errorNode: ExpressionNode, + typeAliasPlaceholder: TypeVarType, + isPep695TypeVarType: boolean, + typeParamNodes?: TypeParameterNode[] + ): Type { + // If this is a recursive type alias that hasn't yet been fully resolved + // (i.e. there is no boundType associated with it), don't apply the transform. + if (isTypeAliasPlaceholder(type)) { + return type; + } + + const sharedInfo = typeAliasPlaceholder.shared.recursiveAlias; + assert(sharedInfo !== undefined); + + let typeParams: TypeVarType[] | undefined = sharedInfo.typeParams; + if (!typeParams) { + // Determine if there are any generic type parameters associated + // with this type alias. + typeParams = []; + + addTypeVarsToListIfUnique(typeParams, getTypeVarArgsRecursive(type)); + + // Don't include any synthesized type variables. + typeParams = typeParams.filter((typeVar) => !typeVar.shared.isSynthesized); + } + + // Convert all type variables to instances. + typeParams = typeParams.map((typeVar) => { + if (TypeBase.isInstance(typeVar)) { + return typeVar; + } + return convertToInstance(typeVar); + }); + + // See if the type alias includes a TypeVarTuple followed by a TypeVar + // with a default value. This isn't allowed. + const firstTypeVarTupleIndex = typeParams.findIndex((typeVar) => isTypeVarTuple(typeVar)); + if (firstTypeVarTupleIndex >= 0) { + const typeVarWithDefaultIndex = typeParams.findIndex( + (typeVar, index) => + index > firstTypeVarTupleIndex && !isParamSpec(typeVar) && typeVar.shared.isDefaultExplicit + ); + + if (typeVarWithDefaultIndex >= 0) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarWithDefaultFollowsVariadic().format({ + typeVarName: typeParams[typeVarWithDefaultIndex].shared.name, + variadicName: typeParams[firstTypeVarTupleIndex].shared.name, + }), + typeParamNodes ? typeParamNodes[typeVarWithDefaultIndex].d.name : errorNode + ); + } + } + + // Validate the default types for all type parameters. + typeParams.forEach((typeParam, index) => { + assert(typeParams !== undefined); + let bestErrorNode = errorNode; + if (typeParamNodes && index < typeParamNodes.length) { + bestErrorNode = typeParamNodes[index].d.defaultExpr ?? typeParamNodes[index].d.name; + } + validateTypeParamDefault(bestErrorNode, typeParam, typeParams.slice(0, index), sharedInfo.typeVarScopeId); + }); + + // Verify that we have at most one TypeVarTuple. + const variadics = typeParams.filter((param) => isTypeVarTuple(param)); + if (variadics.length > 1) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.variadicTypeParamTooManyAlias().format({ + names: variadics.map((v) => `"${v.shared.name}"`).join(', '), + }), + errorNode + ); + } + + if (!sharedInfo.isTypeAliasType && !isPep695TypeVarType) { + const boundTypeVars = typeParams.filter( + (typeVar) => + typeVar.priv.scopeId !== sharedInfo.typeVarScopeId && + typeVar.priv.scopeType === TypeVarScopeType.Class + ); + + if (boundTypeVars.length > 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.genericTypeAliasBoundTypeVar().format({ + names: boundTypeVars.map((t) => `${t.shared.name}`).join(', '), + }), + errorNode + ); + } + } + + if (!TypeBase.isInstantiable(type)) { + return type; + } + + sharedInfo.typeParams = typeParams.length > 0 ? typeParams : undefined; + + let typeAlias = TypeBase.cloneForTypeAlias(type, { + shared: sharedInfo, + typeArgs: undefined, + }); + + // All PEP 695 type aliases are special forms because they are + // TypeAliasType objects at runtime. + if (sharedInfo.isTypeAliasType || isPep695TypeVarType) { + const typeAliasTypeClass = getTypingType(errorNode, 'TypeAliasType'); + if (typeAliasTypeClass && isInstantiableClass(typeAliasTypeClass)) { + typeAlias = TypeBase.cloneAsSpecialForm(typeAlias, ClassType.cloneAsInstance(typeAliasTypeClass)); + } + } + + // Delete the TypeForm info. The type alias serves as its own TypeForm info. + if (typeAlias.props?.typeForm) { + typeAlias = TypeBase.cloneWithTypeForm(typeAlias, undefined); + } + + return typeAlias; + } + + function createSpecialBuiltInClass(node: ParseNode, assignedName: string, aliasMapEntry: AliasMapEntry): ClassType { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + let specialClassType = ClassType.createInstantiable( + assignedName, + ParseTreeUtils.getClassFullName(node, fileInfo.moduleName, assignedName), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.BuiltIn | ClassTypeFlags.SpecialBuiltIn, + /* typeSourceId */ 0, + /* declaredMetaclass */ undefined, + /* effectiveMetaclass */ undefined + ); + + if (aliasMapEntry.isSpecialForm) { + specialClassType.shared.flags |= ClassTypeFlags.SpecialFormClass; + } + + if (aliasMapEntry.isIllegalInIsinstance) { + specialClassType.shared.flags |= ClassTypeFlags.IllegalIsinstanceClass; + } + + // Synthesize a single type parameter with the specified variance if + // specified in the alias map entry. + if (aliasMapEntry.typeParamVariance !== undefined) { + let typeParam = TypeVarType.createInstance('T'); + typeParam = TypeVarType.cloneForScopeId( + typeParam, + ParseTreeUtils.getScopeIdForNode(node), + assignedName, + TypeVarScopeType.Class + ); + typeParam.shared.declaredVariance = aliasMapEntry.typeParamVariance; + specialClassType.shared.typeParams.push(typeParam); + } + + const specialBuiltInClassDeclaration = (AnalyzerNodeInfo.getDeclaration(node) ?? + (node.parent ? AnalyzerNodeInfo.getDeclaration(node.parent) : undefined)) as + | SpecialBuiltInClassDeclaration + | undefined; + + specialClassType.shared.declaration = specialBuiltInClassDeclaration; + + if (fileInfo.isTypingExtensionsStubFile) { + specialClassType.shared.flags |= ClassTypeFlags.TypingExtensionClass; + } + + const baseClassName = aliasMapEntry.implicitBaseClass || aliasMapEntry.alias || 'object'; + + let baseClass: Type | undefined; + if (aliasMapEntry.module === 'builtins') { + baseClass = getBuiltInType(node, baseClassName); + } else if (aliasMapEntry.module === 'collections') { + // The typing.pyi file imports collections. + baseClass = getTypeOfModule(node, baseClassName, ['collections']); + } else if (aliasMapEntry.module === 'internals') { + // Handle TypedDict specially. + assert(baseClassName === 'TypedDictFallback'); + baseClass = prefetched?.typedDictPrivateClass; + if (baseClass) { + // The TypedDictFallback class is marked as abstract, but the + // methods that are abstract are overridden and shouldn't + // cause the TypedDict to be marked as abstract. + if ( + isInstantiableClass(baseClass) && + ClassType.isBuiltIn(baseClass, ['_TypedDict', 'TypedDictFallback']) + ) { + baseClass = ClassType.cloneWithNewFlags( + baseClass, + baseClass.shared.flags & + ~(ClassTypeFlags.SupportsAbstractMethods | ClassTypeFlags.TypeCheckOnly) + ); + } + } + } + + if (baseClass && isInstantiableClass(baseClass)) { + if (aliasMapEntry.alias) { + specialClassType = ClassType.cloneForTypingAlias(baseClass, assignedName); + } else { + specialClassType.shared.baseClasses.push(baseClass); + specialClassType.shared.effectiveMetaclass = baseClass.shared.effectiveMetaclass; + computeMroLinearization(specialClassType); + } + } else { + specialClassType.shared.baseClasses.push(UnknownType.create()); + specialClassType.shared.effectiveMetaclass = UnknownType.create(); + computeMroLinearization(specialClassType); + } + + return specialClassType; + } + + // Handles some special-case type annotations that are found + // within the typings.pyi file. + function handleTypingStubTypeAnnotation(node: ExpressionNode): Type | undefined { + if (!node.parent || node.parent.nodeType !== ParseNodeType.TypeAnnotation) { + return undefined; + } + + if (node.parent.d.valueExpr.nodeType !== ParseNodeType.Name) { + return undefined; + } + + const nameNode = node.parent.d.valueExpr; + const assignedName = nameNode.d.value; + + const specialTypes: Map = new Map([ + ['Tuple', { alias: 'tuple', module: 'builtins' }], + ['Generic', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Protocol', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Callable', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Type', { alias: 'type', module: 'builtins' }], + ['ClassVar', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Final', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Literal', { alias: '', module: 'builtins', isSpecialForm: true }], + ['TypedDict', { alias: 'TypedDictFallback', module: 'internals' }], + ['Union', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Optional', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Annotated', { alias: '', module: 'builtins', isSpecialForm: true, isIllegalInIsinstance: true }], + ['TypeAlias', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Concatenate', { alias: '', module: 'builtins', isSpecialForm: true }], + [ + 'TypeGuard', + { + alias: '', + module: 'builtins', + implicitBaseClass: 'bool', + isSpecialForm: true, + typeParamVariance: Variance.Covariant, + }, + ], + ['Unpack', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Required', { alias: '', module: 'builtins', isSpecialForm: true }], + ['NotRequired', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Self', { alias: '', module: 'builtins', isSpecialForm: true }], + ['NoReturn', { alias: '', module: 'builtins', isSpecialForm: true }], + ['Never', { alias: '', module: 'builtins', isSpecialForm: true }], + ['LiteralString', { alias: '', module: 'builtins', isSpecialForm: true }], + ['ReadOnly', { alias: '', module: 'builtins', isSpecialForm: true }], + [ + 'TypeIs', + { + alias: '', + module: 'builtins', + implicitBaseClass: 'bool', + isSpecialForm: true, + typeParamVariance: Variance.Invariant, + }, + ], + [ + 'TypeForm', + { + alias: '', + module: 'builtins', + isSpecialForm: true, + typeParamVariance: Variance.Covariant, + isIllegalInIsinstance: true, + }, + ], + ]); + + const aliasMapEntry = specialTypes.get(assignedName); + + if (aliasMapEntry) { + const cachedType = readTypeCache(node, EvalFlags.None); + if (cachedType) { + return cachedType; + } + + let specialType: Type = createSpecialBuiltInClass(node, assignedName, aliasMapEntry); + + // Handle 'LiteralString' specially because we want it to act as + // though it derives from 'str'. + if (assignedName === 'LiteralString') { + specialType.shared.baseClasses.push(prefetched?.strClass ?? AnyType.create()); + computeMroLinearization(specialType); + + if (isTypeFormSupported(node)) { + specialType = TypeBase.cloneWithTypeForm(specialType, convertToInstance(specialType)); + } + } + + // Handle 'Never' and 'NoReturn' specially. + if (assignedName === 'Never' || assignedName === 'NoReturn') { + specialType = TypeBase.cloneAsSpecialForm( + assignedName === 'Never' ? NeverType.createNever() : NeverType.createNoReturn(), + specialType + ); + + if (isTypeFormSupported(node)) { + specialType = TypeBase.cloneWithTypeForm(specialType, convertToInstance(specialType)); + } + } + + writeTypeCache(node, { type: specialType }, EvalFlags.None); + return specialType; + } + + return undefined; + } + + // Handles some special-case assignment statements that are found + // within the typings.pyi file. + function handleTypingStubAssignment(node: AssignmentNode): Type | undefined { + if (node.d.leftExpr.nodeType !== ParseNodeType.Name) { + return undefined; + } + + const nameNode = node.d.leftExpr; + const assignedName = nameNode.d.value; + + if (assignedName === 'Any') { + return AnyType.createSpecialForm(); + } + + const specialTypes: Map = new Map([ + ['overload', { alias: '', module: 'builtins' }], + ['TypeVar', { alias: '', module: 'builtins' }], + ['_promote', { alias: '', module: 'builtins' }], + ['no_type_check', { alias: '', module: 'builtins' }], + ['NoReturn', { alias: '', module: 'builtins' }], + ['Never', { alias: '', module: 'builtins' }], + ['Counter', { alias: 'Counter', module: 'collections' }], + ['List', { alias: 'list', module: 'builtins' }], + ['Dict', { alias: 'dict', module: 'builtins' }], + ['DefaultDict', { alias: 'defaultdict', module: 'collections' }], + ['Set', { alias: 'set', module: 'builtins' }], + ['FrozenSet', { alias: 'frozenset', module: 'builtins' }], + ['Deque', { alias: 'deque', module: 'collections' }], + ['ChainMap', { alias: 'ChainMap', module: 'collections' }], + ['OrderedDict', { alias: 'OrderedDict', module: 'collections' }], + ]); + + const aliasMapEntry = specialTypes.get(assignedName); + if (aliasMapEntry) { + // Evaluate the expression so symbols are marked as accessed. + getTypeOfExpression(node.d.rightExpr); + return createSpecialBuiltInClass(node, assignedName, aliasMapEntry); + } + + return undefined; + } + + function evaluateTypesForAssignmentStatement(node: AssignmentNode): void { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // If the entire statement has already been evaluated, don't + // re-evaluate it. + if (isTypeCached(node)) { + return; + } + + let flags: EvalFlags = EvalFlags.None; + if (fileInfo.isStubFile) { + // An assignment of ellipsis means "Any" within a type stub file. + flags |= EvalFlags.ConvertEllipsisToAny; + } + + if ( + node.d.rightExpr.nodeType === ParseNodeType.Name || + node.d.rightExpr.nodeType === ParseNodeType.MemberAccess + ) { + // Don't specialize a generic class on assignment (e.g. "x = list" + // or "x = collections.OrderedDict") because we may want to later + // specialize it (e.g. "x[int]"). + flags |= EvalFlags.NoSpecialize; + } + + // Is this type already cached? + let rightHandType = readTypeCache(node.d.rightExpr, /* flags */ undefined); + let isIncomplete = false; + let expectedTypeDiagAddendum: DiagnosticAddendum | undefined; + + if (!rightHandType) { + // Special-case the typing.pyi file, which contains some special + // types that the type analyzer needs to interpret differently. + if (fileInfo.isTypingStubFile || fileInfo.isTypingExtensionsStubFile) { + rightHandType = handleTypingStubAssignment(node); + if (rightHandType) { + writeTypeCache(node.d.rightExpr, { type: rightHandType }, EvalFlags.None); + } + } + } + + if (!rightHandType) { + let typeAliasNameNode: NameNode | undefined; + let typeAliasPlaceholder: TypeVarType | undefined; + let isSpeculativeTypeAlias = false; + + if (isDeclaredTypeAlias(node.d.leftExpr)) { + flags = + EvalFlags.InstantiableType | + EvalFlags.TypeExpression | + EvalFlags.StrLiteralAsType | + EvalFlags.NoParamSpec | + EvalFlags.NoTypeVarTuple | + EvalFlags.NoClassVar; + + typeAliasNameNode = (node.d.leftExpr as TypeAnnotationNode).d.valueExpr as NameNode; + + if (!isLegalTypeAliasExpressionForm(node.d.rightExpr, /* allowStrLiteral */ true)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeAliasIllegalExpressionForm(), + node.d.rightExpr + ); + } + } else if (node.d.leftExpr.nodeType === ParseNodeType.Name) { + const symbolWithScope = lookUpSymbolRecursive( + node.d.leftExpr, + node.d.leftExpr.d.value, + /* honorCodeFlow */ false + ); + + if (symbolWithScope) { + const decls = symbolWithScope.symbol.getDeclarations(); + + if (decls.length === 1) { + if (isPossibleTypeAliasDeclaration(decls[0])) { + typeAliasNameNode = node.d.leftExpr; + isSpeculativeTypeAlias = true; + flags |= EvalFlags.NoConvertSpecialForm; + } else if (isPossibleTypeDictFactoryCall(decls[0])) { + // Handle calls to TypedDict factory functions like type + // aliases to support recursive field type definitions. + typeAliasNameNode = node.d.leftExpr; + } + } + } + } + + if (typeAliasNameNode) { + typeAliasPlaceholder = synthesizeTypeAliasPlaceholder(typeAliasNameNode); + + writeTypeCache(node, { type: typeAliasPlaceholder }, /* flags */ undefined); + writeTypeCache(node.d.leftExpr, { type: typeAliasPlaceholder }, /* flags */ undefined); + + if (node.d.leftExpr.nodeType === ParseNodeType.TypeAnnotation) { + writeTypeCache(node.d.leftExpr.d.valueExpr, { type: typeAliasPlaceholder }, /* flags */ undefined); + } + } + + let declaredType = getDeclaredTypeForExpression(node.d.leftExpr, { method: 'set' }); + + if (declaredType) { + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + declaredType = makeTypeVarsBound(declaredType, liveTypeVarScopes); + } + + const srcTypeResult = getTypeOfExpression(node.d.rightExpr, flags, makeInferenceContext(declaredType)); + + rightHandType = srcTypeResult.type; + expectedTypeDiagAddendum = srcTypeResult.expectedTypeDiagAddendum; + if (srcTypeResult.isIncomplete) { + isIncomplete = true; + } + + // If this was a speculative type alias, it becomes a real type alias + // only if the evaluated type is an instantiable type. + if (isSpeculativeTypeAlias && !isLegalImplicitTypeAliasType(rightHandType)) { + typeAliasNameNode = undefined; + } + + if (typeAliasNameNode) { + assert(typeAliasPlaceholder !== undefined); + + // If this is a type alias, record its name based on the assignment target. + rightHandType = transformTypeForTypeAlias( + rightHandType, + typeAliasNameNode, + typeAliasPlaceholder, + /* isPep695TypeVarType */ false + ); + + if (isTypeAliasRecursive(typeAliasPlaceholder, rightHandType)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasIsRecursiveDirect().format({ + name: typeAliasNameNode.d.value, + }), + node.d.rightExpr + ); + + rightHandType = UnknownType.create(); + } + + // Set the resulting type to the boundType of the original type alias + // to support recursive type aliases. + typeAliasPlaceholder.shared.boundType = rightHandType; + + // Record the type parameters within the recursive type alias so it + // can be specialized. + typeAliasPlaceholder.shared.recursiveAlias!.typeParams = + rightHandType.props?.typeAliasInfo?.shared.typeParams; + } else { + // If the RHS is a constant boolean expression, assign it a literal type. + const constExprValue = evaluateStaticBoolExpression( + node.d.rightExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + + if (constExprValue !== undefined) { + const boolType = getBuiltInObject(node, 'bool'); + if (isClassInstance(boolType)) { + rightHandType = ClassType.cloneWithLiteral(boolType, constExprValue); + } + } + } + } + + assignTypeToExpression( + node.d.leftExpr, + { type: rightHandType, isIncomplete }, + node.d.rightExpr, + /* ignoreEmptyContainers */ true, + /* allowAssignmentToFinalVar */ true, + expectedTypeDiagAddendum + ); + + writeTypeCache(node, { type: rightHandType, isIncomplete }, EvalFlags.None); + } + + // Synthesize a TypeVar that acts as a placeholder for a type alias. This allows + // the type alias definition to refer to itself. + function synthesizeTypeAliasPlaceholder(nameNode: NameNode, isTypeAliasType: boolean = false): TypeVarType { + const placeholder = TypeVarType.createInstantiable(`__type_alias_${nameNode.d.value}`); + placeholder.shared.isSynthesized = true; + const typeVarScopeId = ParseTreeUtils.getScopeIdForNode(nameNode); + const fileInfo = AnalyzerNodeInfo.getFileInfo(nameNode); + + placeholder.shared.recursiveAlias = { + name: nameNode.d.value, + fullName: ParseTreeUtils.getClassFullName(nameNode, fileInfo.moduleName, nameNode.d.value), + moduleName: fileInfo.moduleName, + fileUri: fileInfo.fileUri, + typeVarScopeId, + isTypeAliasType, + typeParams: undefined, + computedVariance: undefined, + }; + placeholder.priv.scopeId = typeVarScopeId; + + return placeholder; + } + + // Evaluates the type of a type alias (i.e. "type") statement. This code + // path does not handle traditional type aliases, which are treated as + // variables since they use normal variable assignment syntax. + function getTypeOfTypeAlias(node: TypeAliasNode): Type { + return getTypeOfTypeAliasCommon( + node, + node.d.name, + node.d.expr, + /* isPep695Syntax */ true, + node.d.typeParams?.d.params, + () => { + if (node.d.typeParams) { + return evaluateTypeParamList(node.d.typeParams); + } + return undefined; + } + ); + } + + // This function is common to the handling of "type" statements and explicit + // calls to the TypeAliasType constructor. + function getTypeOfTypeAliasCommon( + declNode: ParseNode, + nameNode: NameNode, + valueNode: ExpressionNode, + isPep695Syntax: boolean, + typeParamNodes: TypeParameterNode[] | undefined, + getTypeParamCallback: () => TypeVarType[] | undefined + ) { + const cachedType = readTypeCache(nameNode, EvalFlags.None); + if (cachedType) { + return cachedType; + } + + // Synthesize a type variable that represents the type alias while we're + // evaluating it. This allows us to handle recursive definitions. + const typeAliasTypeVar = synthesizeTypeAliasPlaceholder(nameNode, /* isTypeAliasType */ true); + + // Write the type to the type cache to support recursive type alias definitions. + writeTypeCache(nameNode, { type: typeAliasTypeVar }, /* flags */ undefined); + + // Set a partial type to handle recursive (self-referential) type aliases. + const scope = ScopeUtils.getScopeForNode(declNode); + const typeAliasSymbol = scope?.lookUpSymbolRecursive(nameNode.d.value); + const typeAliasDecl = AnalyzerNodeInfo.getDeclaration(declNode); + if (typeAliasDecl && typeAliasSymbol) { + setSymbolResolutionPartialType(typeAliasSymbol.symbol, typeAliasDecl, typeAliasTypeVar); + } + + const typeParams = getTypeParamCallback(); + if (typeAliasTypeVar.shared.recursiveAlias) { + typeAliasTypeVar.shared.recursiveAlias.typeParams = typeParams ?? []; + } + + let aliasTypeResult: TypeResult; + if (isPep695Syntax) { + aliasTypeResult = getTypeOfExpressionExpectingType(valueNode, { + forwardRefs: true, + typeExpression: true, + }); + } else { + const flags = + EvalFlags.InstantiableType | + EvalFlags.TypeExpression | + EvalFlags.StrLiteralAsType | + EvalFlags.NoParamSpec | + EvalFlags.NoTypeVarTuple | + EvalFlags.NoClassVar; + aliasTypeResult = getTypeOfExpression(valueNode, flags); + } + + let isIncomplete = false; + let aliasType = aliasTypeResult.type; + if (aliasTypeResult.isIncomplete) { + isIncomplete = true; + } + + aliasType = transformTypeForTypeAlias( + aliasType, + nameNode, + typeAliasTypeVar, + /* isPep695TypeVarType */ true, + typeParamNodes + ); + + // See if the type alias relies on itself in a way that cannot be resolved. + if (isTypeAliasRecursive(typeAliasTypeVar, aliasType)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasIsRecursiveDirect().format({ + name: nameNode.d.value, + }), + valueNode + ); + + aliasType = UnknownType.create(); + } + + // Set the resulting type to the boundType of the original type alias + // to support recursive type aliases. + typeAliasTypeVar.shared.boundType = aliasType; + + writeTypeCache(nameNode, { type: aliasType, isIncomplete }, EvalFlags.None); + + return aliasType; + } + + function evaluateTypesForAugmentedAssignment(node: AugmentedAssignmentNode): void { + if (isTypeCached(node)) { + return; + } + + const destTypeResult = getTypeOfAugmentedAssignment(evaluatorInterface, node, /* inferenceContext */ undefined); + + writeTypeCache(node, destTypeResult, EvalFlags.None); + } + + function getPseudoGenericTypeVarName(paramName: string) { + return `__type_of_${paramName}`; + } + + // Creates a new class type that is a subclass of two other specified classes. + function createSubclass(errorNode: ExpressionNode, type1: ClassType, type2: ClassType): ClassType { + assert(isInstantiableClass(type1) && isInstantiableClass(type2)); + + // If both classes are class objects (type[A] and type[B]), create a new + // class object (type[A & B]) rather than "type[A] & type[B]". + let createClassObject = false; + if (TypeBase.getInstantiableDepth(type1) > 0 && TypeBase.getInstantiableDepth(type2) > 0) { + type1 = ClassType.cloneAsInstance(type1); + type2 = ClassType.cloneAsInstance(type2); + createClassObject = true; + } + + const className = ``; + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + + // The effective metaclass of the intersection is the narrower of the two metaclasses. + let effectiveMetaclass = type1.shared.effectiveMetaclass; + if (type2.shared.effectiveMetaclass) { + if (!effectiveMetaclass || assignType(effectiveMetaclass, type2.shared.effectiveMetaclass)) { + effectiveMetaclass = type2.shared.effectiveMetaclass; + } + } + + let newClassType = ClassType.createInstantiable( + className, + ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.None, + ParseTreeUtils.getTypeSourceId(errorNode), + /* declaredMetaclass */ undefined, + effectiveMetaclass, + type1.shared.docString + ); + + newClassType.shared.baseClasses = [type1, type2]; + computeMroLinearization(newClassType); + + newClassType = addConditionToType(newClassType, type1.props?.condition); + newClassType = addConditionToType(newClassType, type2.props?.condition); + + if (createClassObject) { + newClassType = ClassType.cloneAsInstantiable(newClassType); + } + + return newClassType; + } + + function getTypeOfClass(node: ClassNode): ClassTypeResult | undefined { + initializePrefetchedTypes(node); + + // Is this type already cached? + const cachedClassType = readTypeCache(node.d.name, EvalFlags.None); + + if (cachedClassType) { + if (!isInstantiableClass(cachedClassType)) { + // This can happen in rare circumstances where the class declaration + // is located in an unreachable code block. + return undefined; + } + return { + classType: cachedClassType, + decoratedType: readTypeCache(node, EvalFlags.None) || UnknownType.create(), + }; + } + + // The type wasn't cached, so we need to create a new one. + const scope = ScopeUtils.getScopeForNode(node); + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + let classFlags = ClassTypeFlags.None; + if ( + scope?.type === ScopeType.Builtin || + fileInfo.isTypingStubFile || + fileInfo.isTypingExtensionsStubFile || + fileInfo.isBuiltInStubFile || + fileInfo.isTypeshedStubFile + ) { + classFlags |= ClassTypeFlags.BuiltIn; + + if (fileInfo.isTypingExtensionsStubFile) { + classFlags |= ClassTypeFlags.TypingExtensionClass; + } + + if (node.d.name.d.value === 'property') { + classFlags |= ClassTypeFlags.PropertyClass; + } + + if (node.d.name.d.value === 'tuple') { + classFlags |= ClassTypeFlags.TupleClass; + } + } + + if (fileInfo.isStubFile) { + classFlags |= ClassTypeFlags.DefinedInStub; + } + + const classType = ClassType.createInstantiable( + node.d.name.d.value, + ParseTreeUtils.getClassFullName(node, fileInfo.moduleName, node.d.name.d.value), + fileInfo.moduleName, + fileInfo.fileUri, + classFlags, + ParseTreeUtils.getTypeSourceId(node), + /* declaredMetaclass */ undefined, + /* effectiveMetaclass */ undefined, + ParseTreeUtils.getDocString(node.d.suite.d.statements) + ); + + classType.shared.typeVarScopeId = ParseTreeUtils.getScopeIdForNode(node); + + // Is this a special type that supports type promotions according to PEP 484? + if (typePromotions.has(classType.shared.fullName)) { + classType.priv.includePromotions = true; + } + + // Some classes refer to themselves within type arguments used within + // base classes. We'll register the partially-constructed class type + // to allow these to be resolved. + const classSymbol = scope?.lookUpSymbol(node.d.name.d.value); + let classDecl: ClassDeclaration | undefined; + const decl = AnalyzerNodeInfo.getDeclaration(node); + if (decl) { + classDecl = decl as ClassDeclaration; + } + if (classDecl && classSymbol) { + setSymbolResolutionPartialType(classSymbol, classDecl, classType); + } + classType.shared.flags |= ClassTypeFlags.PartiallyEvaluated; + classType.shared.declaration = classDecl; + + return invalidateTypeCacheIfCanceled(() => { + writeTypeCache(node, { type: classType }, /* flags */ undefined); + writeTypeCache(node.d.name, { type: classType }, /* flags */ undefined); + + // Keep a list of unique type parameters that are used in the + // base class arguments. + let typeParams: TypeVarType[] = []; + + if (node.d.typeParams) { + typeParams = evaluateTypeParamList(node.d.typeParams).map((t) => TypeVarType.cloneAsInstance(t)); + } + + // If the class derives from "Generic" directly, it will provide + // all of the type parameters in the specified order. + let genericTypeParams: TypeVarType[] | undefined; + let protocolTypeParams: TypeVarType[] | undefined; + let isNamedTupleSubclass = false; + + const initSubclassArgs: Arg[] = []; + let metaclassNode: ExpressionNode | undefined; + let exprFlags = + EvalFlags.InstantiableType | + EvalFlags.AllowGeneric | + EvalFlags.NoNakedGeneric | + EvalFlags.NoTypeVarWithScopeId | + EvalFlags.TypeVarGetsCurScope | + EvalFlags.EnforceVarianceConsistency; + if (fileInfo.isStubFile) { + exprFlags |= EvalFlags.ForwardRefs; + } + let sawClosedOrExtraItems = false; + + node.d.arguments.forEach((arg) => { + // Ignore unpacked arguments. + if (arg.d.argCategory === ArgCategory.UnpackedDictionary) { + // Evaluate the expression's type so symbols are marked accessed + // and errors are reported. + getTypeOfExpression(arg.d.valueExpr); + return; + } + + if (!arg.d.name) { + let argType: Type; + + if (arg.d.argCategory === ArgCategory.UnpackedList) { + getTypeOfExpression(arg.d.valueExpr); + argType = UnknownType.create(); + } else { + argType = getTypeOfExpression(arg.d.valueExpr, exprFlags).type; + + if ( + isTypeVar(argType) && + argType.props?.specialForm && + TypeBase.isInstance(argType.props.specialForm) + ) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.baseClassInvalid(), arg); + argType = UnknownType.create(); + } + + argType = makeTopLevelTypeVarsConcrete(argType); + } + + // In some stub files, classes are conditionally defined (e.g. based + // on platform type). We'll assume that the conditional logic is correct + // and strip off the "unbound" union. + if (isUnion(argType)) { + argType = removeUnbound(argType); + } + + // Any is allowed as a base class. Remove its "special form" flag to avoid + // false positive errors. + if (isAny(argType) && argType.props?.specialForm) { + argType = AnyType.create(); + } + + argType = stripTypeFormRecursive(argType); + + if (!isAnyOrUnknown(argType) && !isUnbound(argType)) { + // If the specified base class is type(T), use the metaclass + // of T if it's known. + if ( + isClass(argType) && + TypeBase.getInstantiableDepth(argType) > 0 && + argType.shared.effectiveMetaclass && + isClass(argType.shared.effectiveMetaclass) + ) { + argType = argType.shared.effectiveMetaclass; + } + + if (isMetaclassInstance(argType)) { + assert(isClassInstance(argType)); + argType = + argType.priv.typeArgs && argType.priv.typeArgs.length > 0 + ? argType.priv.typeArgs[0] + : UnknownType.create(); + } else if (!isInstantiableClass(argType)) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.baseClassInvalid(), arg); + argType = UnknownType.create(); + } else { + if ( + ClassType.isPartiallyEvaluated(argType) || + argType.shared.mro.some((t) => isClass(t) && ClassType.isPartiallyEvaluated(t)) + ) { + // If the base class is partially evaluated, install a callback + // so we can fix up this class (e.g. compute the MRO) when the + // dependent class is completed. + registerDeferredClassCompletion(node, argType); + } + + if (ClassType.isBuiltIn(argType, 'Protocol')) { + if ( + !fileInfo.isStubFile && + !ClassType.isTypingExtensionClass(argType) && + PythonVersion.isLessThan( + fileInfo.executionEnvironment.pythonVersion, + pythonVersion3_7 + ) + ) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.protocolIllegal(), + arg.d.valueExpr + ); + } + classType.shared.flags |= ClassTypeFlags.ProtocolClass; + } + + if (ClassType.isBuiltIn(argType, 'property')) { + classType.shared.flags |= ClassTypeFlags.PropertyClass; + } + + // If the class directly derives from NamedTuple (in Python 3.6 or + // newer), it's considered a (read-only) dataclass. + if ( + PythonVersion.isGreaterOrEqualTo( + fileInfo.executionEnvironment.pythonVersion, + pythonVersion3_6 + ) + ) { + if (ClassType.isBuiltIn(argType, 'NamedTuple')) { + isNamedTupleSubclass = true; + } + } + + // If the class directly derives from TypedDict or from a class that is + // a TypedDict, it is considered a TypedDict. + if (ClassType.isBuiltIn(argType, 'TypedDict') || ClassType.isTypedDictClass(argType)) { + classType.shared.flags |= ClassTypeFlags.TypedDictClass; + + // Propagate the "effectively closed" flag from base classes. + if (ClassType.isTypedDictEffectivelyClosed(argType)) { + classType.shared.flags |= ClassTypeFlags.TypedDictEffectivelyClosed; + } + } + + // Validate that the class isn't deriving from itself, creating a + // circular dependency. + if (derivesFromClassRecursive(argType, classType, /* ignoreUnknown */ true)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.baseClassCircular(), + arg + ); + argType = UnknownType.create(); + } + + // If the class is attempting to derive from a TypeAliasType, + // generate an error. + if ( + argType.props?.specialForm && + ClassType.isBuiltIn(argType.props.specialForm, 'TypeAliasType') + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeAliasTypeBaseClass(), + arg + ); + argType = UnknownType.create(); + } + } + } + + if (isUnknown(argType)) { + addDiagnostic(DiagnosticRule.reportUntypedBaseClass, LocMessage.baseClassUnknown(), arg); + } + + // Check for a duplicate class. + if ( + classType.shared.baseClasses.some((prevBaseClass) => { + return ( + isInstantiableClass(prevBaseClass) && + isInstantiableClass(argType) && + ClassType.isSameGenericClass(argType, prevBaseClass) + ); + }) + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.duplicateBaseClass(), + arg.d.name || arg + ); + } + + classType.shared.baseClasses.push(argType); + if (isInstantiableClass(argType)) { + if (ClassType.isEnumClass(argType)) { + classType.shared.flags |= ClassTypeFlags.EnumClass; + } + + // Determine if the class is abstract. Protocol classes support abstract methods + // because they are constructed by the _ProtocolMeta metaclass, which derives + // from ABCMeta. + if (ClassType.supportsAbstractMethods(argType) || ClassType.isProtocolClass(argType)) { + classType.shared.flags |= ClassTypeFlags.SupportsAbstractMethods; + } + + if (ClassType.isPropertyClass(argType)) { + classType.shared.flags |= ClassTypeFlags.PropertyClass; + } + + if (ClassType.isFinal(argType)) { + const className = printObjectTypeForClass(argType); + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.baseClassFinal().format({ type: className }), + arg.d.valueExpr + ); + } + } + + addTypeVarsToListIfUnique(typeParams, getTypeVarArgsRecursive(argType)); + if (isInstantiableClass(argType)) { + if (ClassType.isBuiltIn(argType, 'Generic')) { + // 'Generic' is implicitly added if type parameter syntax is used. + if (node.d.typeParams) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.genericBaseClassNotAllowed(), + arg.d.valueExpr + ); + } else { + if (!genericTypeParams) { + if (protocolTypeParams) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.duplicateGenericAndProtocolBase(), + arg.d.valueExpr + ); + } + genericTypeParams = buildTypeParamsFromTypeArgs(argType); + } + } + } else if ( + ClassType.isBuiltIn(argType, 'Protocol') && + argType.priv.typeArgs && + argType.priv.typeArgs.length > 0 + ) { + if (!protocolTypeParams) { + if (genericTypeParams) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.duplicateGenericAndProtocolBase(), + arg.d.valueExpr + ); + } + protocolTypeParams = buildTypeParamsFromTypeArgs(argType); + + if (node.d.typeParams && protocolTypeParams.length > 0) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.protocolBaseClassWithTypeArgs(), + arg.d.valueExpr + ); + protocolTypeParams = []; + } + } + } + } + } else if (ClassType.isTypedDictClass(classType)) { + if (arg.d.name.d.value === 'total' || arg.d.name.d.value === 'closed') { + // The "total" and "readonly" parameters apply only for TypedDict classes. + // PEP 589 specifies that the parameter must be either True or False. + const constArgValue = evaluateStaticBoolExpression( + arg.d.valueExpr, + fileInfo.executionEnvironment, + fileInfo.definedConstants + ); + + if (constArgValue === undefined) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictBoolParam().format({ name: arg.d.name.d.value }), + arg.d.valueExpr + ); + } else if (arg.d.name.d.value === 'total' && !constArgValue) { + classType.shared.flags |= ClassTypeFlags.CanOmitDictValues; + } else if (arg.d.name.d.value === 'closed') { + if (constArgValue) { + classType.shared.flags |= + ClassTypeFlags.TypedDictMarkedClosed | ClassTypeFlags.TypedDictEffectivelyClosed; + + if (classType.shared.typedDictExtraItemsExpr) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictExtraItemsClosed(), + classType.shared.typedDictExtraItemsExpr + ); + } + } else { + // PEP 728: A class that subclasses from a non-open TypedDict + // cannot specify closed=False. + const nonOpenBase = classType.shared.baseClasses.find( + (base) => + isInstantiableClass(base) && + ClassType.isTypedDictClass(base) && + ClassType.isTypedDictEffectivelyClosed(base) + ); + if (nonOpenBase) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictClosedFalseNonOpenBase().format({ + name: (nonOpenBase as ClassType).shared.name, + }), + arg.d.valueExpr + ); + } + } + + if (sawClosedOrExtraItems) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictExtraItemsClosed(), + arg.d.valueExpr + ); + } + + sawClosedOrExtraItems = true; + } + } else if (arg.d.name.d.value === 'extra_items') { + // Record a reference to the expression but don't evaluate it yet. + // It may refer to the class itself. + classType.shared.typedDictExtraItemsExpr = arg.d.valueExpr; + classType.shared.flags |= ClassTypeFlags.TypedDictEffectivelyClosed; + + if (ClassType.isTypedDictMarkedClosed(classType)) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictExtraItemsClosed(), + classType.shared.typedDictExtraItemsExpr + ); + } + + if (sawClosedOrExtraItems) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictExtraItemsClosed(), + arg.d.valueExpr + ); + } + + sawClosedOrExtraItems = true; + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictInitsubclassParameter().format({ name: arg.d.name.d.value }), + arg + ); + } + } else if (arg.d.name.d.value === 'metaclass') { + if (metaclassNode) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.metaclassDuplicate(), arg); + } else { + metaclassNode = arg.d.valueExpr; + } + } else { + // Collect arguments that will be passed to the `__init_subclass__` + // method described in PEP 487. + initSubclassArgs.push({ + argCategory: ArgCategory.Simple, + node: arg, + name: arg.d.name, + valueExpression: arg.d.valueExpr, + }); + } + }); + + // Check for NamedTuple multiple inheritance. + if (classType.shared.baseClasses.length > 1) { + let derivesFromNamedTuple = false; + let foundIllegalBaseClass = false; + + classType.shared.baseClasses.forEach((baseClass) => { + if (isInstantiableClass(baseClass)) { + if (ClassType.isBuiltIn(baseClass, 'NamedTuple')) { + derivesFromNamedTuple = true; + } else if (!ClassType.isBuiltIn(baseClass, 'Generic')) { + foundIllegalBaseClass = true; + } + } + }); + + if (derivesFromNamedTuple && foundIllegalBaseClass) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.namedTupleMultipleInheritance(), + node.d.name + ); + } + } + + // Make sure we don't have 'object' derive from itself. Infinite + // recursion will result. + if ( + !ClassType.isBuiltIn(classType, 'object') && + classType.shared.baseClasses.filter((baseClass) => isClass(baseClass)).length === 0 + ) { + // If there are no other (known) base classes, the class implicitly derives from object. + classType.shared.baseClasses.push(getBuiltInType(node, 'object')); + } + + // If genericTypeParams or protocolTypeParams are provided, + // make sure that typeParams is a proper subset. + genericTypeParams = genericTypeParams ?? protocolTypeParams; + if (genericTypeParams && !node.d.typeParams) { + verifyGenericTypeParams(node.d.name, typeParams, genericTypeParams); + } + classType.shared.typeParams = genericTypeParams ?? typeParams; + + // Determine if one or more type parameters is autovariance. + if ( + classType.shared.typeParams.some( + (param) => + param.shared.declaredVariance === Variance.Auto && param.priv.computedVariance === undefined + ) + ) { + classType.shared.requiresVarianceInference = true; + } + + // Make sure there's at most one TypeVarTuple. + const variadics = typeParams.filter((param) => isTypeVarTuple(param)); + if (variadics.length > 1) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.variadicTypeParamTooManyClass().format({ + names: variadics.map((v) => `"${v.shared.name}"`).join(', '), + }), + node.d.name, + TextRange.combine(node.d.arguments) || node.d.name + ); + } else if (variadics.length > 0) { + // Make sure a TypeVar with a default doesn't come after a TypeVarTuple. + const firstVariadicIndex = typeParams.findIndex((param) => isTypeVarTuple(param)); + const typeVarWithDefaultIndex = typeParams.findIndex( + (param, index) => + index > firstVariadicIndex && !isParamSpec(param) && param.shared.isDefaultExplicit + ); + + if (typeVarWithDefaultIndex >= 0) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarWithDefaultFollowsVariadic().format({ + typeVarName: typeParams[typeVarWithDefaultIndex].shared.name, + variadicName: typeParams[firstVariadicIndex].shared.name, + }), + node.d.typeParams ? node.d.typeParams.d.params[typeVarWithDefaultIndex].d.name : node.d.name + ); + } + } + + // Validate the default types for all type parameters. + classType.shared.typeParams.forEach((typeParam, index) => { + let bestErrorNode: ExpressionNode = node.d.name; + if (node.d.typeParams && index < node.d.typeParams.d.params.length) { + const typeParamNode = node.d.typeParams.d.params[index]; + bestErrorNode = typeParamNode.d.defaultExpr ?? typeParamNode.d.name; + } + validateTypeParamDefault( + bestErrorNode, + typeParam, + classType.shared.typeParams.slice(0, index), + classType.shared.typeVarScopeId! + ); + }); + + if (!computeMroLinearization(classType)) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.methodOrdering(), node.d.name); + } + + // The scope for this class becomes the "fields" for the corresponding type. + const innerScope = ScopeUtils.getScopeForNode(node.d.suite); + classType.shared.fields = innerScope?.symbolTable + ? new Map(innerScope.symbolTable) + : new Map(); + + // Determine whether the class should inherit __hash__. If a class defines + // __eq__ but doesn't define __hash__ then __hash__ is set to None. + if (classType.shared.fields.has('__eq__') && !classType.shared.fields.has('__hash__')) { + classType.shared.fields.set( + '__hash__', + Symbol.createWithType( + SymbolFlags.ClassMember | + SymbolFlags.ClassVar | + SymbolFlags.IgnoredForProtocolMatch | + SymbolFlags.IgnoredForOverrideChecks, + getNoneType() + ) + ); + } + + // Determine whether the class's instance variables are constrained + // to those defined by __slots__. We need to do this prior to dataclass + // processing because dataclasses can implicitly add to the slots + // list. + const slotsNames = innerScope?.getSlotsNames(); + if (slotsNames) { + classType.shared.localSlotsNames = slotsNames; + } + + // Determine if the class should be a "pseudo-generic" class, characterized + // by having an __init__ method with parameters that lack type annotations. + // For such classes, we'll treat them as generic, with the type arguments provided + // by the callers of the constructor. + if (!fileInfo.isStubFile && classType.shared.typeParams.length === 0) { + const initMethod = classType.shared.fields.get('__init__'); + if (initMethod) { + const initDecls = initMethod.getTypedDeclarations(); + if (initDecls.length === 1 && initDecls[0].type === DeclarationType.Function) { + const initDeclNode = initDecls[0].node; + const initParams = initDeclNode.d.params; + + if ( + initParams.length > 1 && + !initParams.some( + (param, index) => !!ParseTreeUtils.getTypeAnnotationForParam(initDeclNode, index) + ) + ) { + const genericParams = initParams.filter( + (param, index) => + index > 0 && + param.d.name && + param.d.category === ParamCategory.Simple && + !param.d.defaultValue + ); + + if (genericParams.length > 0) { + classType.shared.flags |= ClassTypeFlags.PseudoGenericClass; + + // Create a type parameter for each simple, named parameter + // in the __init__ method. + classType.shared.typeParams = genericParams.map((param) => { + const typeVar = TypeVarType.createInstance( + getPseudoGenericTypeVarName(param.d.name!.d.value) + ); + typeVar.shared.isSynthesized = true; + typeVar.priv.scopeId = ParseTreeUtils.getScopeIdForNode(initDeclNode); + typeVar.shared.boundType = UnknownType.create(); + return TypeVarType.cloneForScopeId( + typeVar, + ParseTreeUtils.getScopeIdForNode(node), + node.d.name.d.value, + TypeVarScopeType.Class + ); + }); + } + } + } + } + } + + // Determine if the class has a custom __class_getitem__ method. This applies + // only to classes that have no type parameters, since those with type parameters + // are assumed to follow normal subscripting semantics for generic classes. + if (classType.shared.typeParams.length === 0 && !ClassType.isBuiltIn(classType, 'type')) { + if ( + classType.shared.baseClasses.some( + (baseClass) => isInstantiableClass(baseClass) && ClassType.hasCustomClassGetItem(baseClass) + ) || + classType.shared.fields.has('__class_getitem__') + ) { + classType.shared.flags |= ClassTypeFlags.HasCustomClassGetItem; + } + } + + // Determine the effective metaclass. + if (metaclassNode) { + let metaclassType = getTypeOfExpression(metaclassNode, exprFlags).type; + if (isInstantiableClass(metaclassType) || isUnknown(metaclassType)) { + if (requiresSpecialization(metaclassType, { ignorePseudoGeneric: true })) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.metaclassIsGeneric(), + metaclassNode + ); + } + + // If the specified metaclass is type(T), use the metaclass + // of T if it's known. + if ( + TypeBase.getInstantiableDepth(metaclassType) > 0 && + isClass(metaclassType) && + metaclassType.shared.effectiveMetaclass && + isClass(metaclassType.shared.effectiveMetaclass) + ) { + metaclassType = metaclassType.shared.effectiveMetaclass; + } + + classType.shared.declaredMetaclass = metaclassType; + if (isInstantiableClass(metaclassType)) { + if (isEnumMetaclass(metaclassType)) { + classType.shared.flags |= ClassTypeFlags.EnumClass; + } + + if (derivesFromStdlibClass(metaclassType, 'ABCMeta')) { + classType.shared.flags |= ClassTypeFlags.SupportsAbstractMethods; + } + } + } + } + + const effectiveMetaclass = computeEffectiveMetaclass(classType, node.d.name); + + // Clear the "partially constructed" flag. + classType.shared.flags &= ~ClassTypeFlags.PartiallyEvaluated; + + // Now determine the decorated type of the class. + let decoratedType: Type = classType; + let foundUnknown = false; + + for (let i = node.d.decorators.length - 1; i >= 0; i--) { + const decorator = node.d.decorators[i]; + + const newDecoratedType = useSignatureTracker(node.parent ?? node, () => + applyClassDecorator(evaluatorInterface, decoratedType, classType, decorator) + ); + const unknownOrAny = containsAnyOrUnknown(newDecoratedType, /* recurse */ false); + + if (unknownOrAny && isUnknown(unknownOrAny)) { + // Report this error only on the first unknown type. + if (!foundUnknown) { + addDiagnostic( + DiagnosticRule.reportUntypedClassDecorator, + LocMessage.classDecoratorTypeUnknown(), + node.d.decorators[i].d.expr + ); + + foundUnknown = true; + } + } else { + // Apply the decorator only if the type is known. + decoratedType = newDecoratedType; + } + } + + // Determine whether this class derives from (or has a metaclass) that imbues + // it with dataclass-like behaviors. If so, we'll apply those here. + let dataClassBehaviors: DataClassBehaviors | undefined; + if (isInstantiableClass(effectiveMetaclass) && effectiveMetaclass.shared.classDataClassTransform) { + dataClassBehaviors = effectiveMetaclass.shared.classDataClassTransform; + } else { + const baseClassDataTransform = classType.shared.mro.find((mroClass) => { + return ( + isClass(mroClass) && + mroClass.shared.classDataClassTransform !== undefined && + !ClassType.isSameGenericClass(mroClass, classType) + ); + }); + + if (baseClassDataTransform) { + dataClassBehaviors = (baseClassDataTransform as ClassType).shared.classDataClassTransform; + } + } + + if (dataClassBehaviors) { + applyDataClassClassBehaviorOverrides( + evaluatorInterface, + node.d.name, + classType, + initSubclassArgs, + dataClassBehaviors + ); + } + + // Run any deferred class completions that depend on this class. + runDeferredClassCompletions(classType); + + // If there are any outstanding deferred class completions registered that + // were not removed by the call to runDeferredClassCompletions, assume that + // the current class may depend on them and register for deferred completion. + registerDeferredClassCompletion(node, /* dependsUpon */ undefined); + + // Synthesize TypedDict methods. + if (ClassType.isTypedDictClass(classType)) { + // TypedDict classes must derive only from other TypedDict classes. + let foundInvalidBaseClass = false; + const diag = new DiagnosticAddendum(); + + classType.shared.baseClasses.forEach((baseClass) => { + if ( + isClass(baseClass) && + !ClassType.isTypedDictClass(baseClass) && + !ClassType.isBuiltIn(baseClass, ['_TypedDict', 'TypedDictFallback', 'Generic']) + ) { + foundInvalidBaseClass = true; + diag.addMessage(LocAddendum.typedDictBaseClass().format({ type: baseClass.shared.name })); + } + }); + + if (foundInvalidBaseClass) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictBaseClass() + diag.getString(), + node.d.name + ); + } + + synthesizeTypedDictClassMethods(evaluatorInterface, node, classType); + } + + // Synthesize dataclass methods. + if (ClassType.isDataClass(classType) || isNamedTupleSubclass) { + const skipSynthesizedInit = ClassType.isDataClassSkipGenerateInit(classType); + let hasExistingInitMethod = skipSynthesizedInit; + + // See if there's already a non-synthesized __init__ method. + // We shouldn't override it. + if (!skipSynthesizedInit) { + const initSymbol = classType.shared.fields.get('__init__'); + if (initSymbol && initSymbol.isClassMember()) { + hasExistingInitMethod = true; + } + } + + let skipSynthesizeHash = false; + const hashSymbol = classType.shared.fields.get('__hash__'); + + // If there is a hash symbol defined in the class (i.e. one that we didn't + // synthesize above), then we shouldn't synthesize a new one for the dataclass. + if (hashSymbol && hashSymbol.isClassMember() && !hashSymbol.getSynthesizedType()) { + skipSynthesizeHash = true; + } + + const synthesizeMethods = () => + synthesizeDataClassMethods( + evaluatorInterface, + node, + classType, + isNamedTupleSubclass, + skipSynthesizedInit, + hasExistingInitMethod, + skipSynthesizeHash + ); + + // If this is a NamedTuple subclass, immediately synthesize dataclass methods + // because we also need to update the MRO classes in this case. For regular + // dataclasses, we'll defer the method synthesis to avoid circular dependencies. + if (isNamedTupleSubclass) { + synthesizeMethods(); + } else { + classType.shared.synthesizeMethodsDeferred = () => { + delete classType.shared.synthesizeMethodsDeferred; + synthesizeMethods(); + }; + } + } + + // Build a complete list of all slots names defined by the class hierarchy. + // This needs to be done after dataclass processing. + classType.shared.calculateInheritedSlotsNamesDeferred = () => { + delete classType.shared.calculateInheritedSlotsNamesDeferred; + + if (classType.shared.localSlotsNames) { + let isLimitedToSlots = true; + const extendedSlotsNames = Array.from(classType.shared.localSlotsNames); + + classType.shared.baseClasses.forEach((baseClass) => { + if (isInstantiableClass(baseClass)) { + if ( + !ClassType.isBuiltIn(baseClass, 'object') && + !ClassType.isBuiltIn(baseClass, 'type') && + !ClassType.isBuiltIn(baseClass, 'Generic') + ) { + const inheritedSlotsNames = ClassType.getInheritedSlotsNames(baseClass); + if (inheritedSlotsNames) { + appendArray(extendedSlotsNames, inheritedSlotsNames); + } else { + isLimitedToSlots = false; + } + } + } else { + isLimitedToSlots = false; + } + }); + + if (isLimitedToSlots) { + classType.shared.inheritedSlotsNamesCached = extendedSlotsNames; + } + } + }; + + // If Any is defined using a class statement, treat it as a special form. + if (node.d.name.d.value === 'Any' && fileInfo.isTypingStubFile) { + decoratedType = AnyType.createSpecialForm(); + } + + // Update the undecorated class type. + writeTypeCache(node.d.name, { type: classType }, EvalFlags.None); + + // Update the decorated class type. + writeTypeCache(node, { type: decoratedType }, EvalFlags.None); + + return { classType, decoratedType }; + }); + } + + function buildTypeParamsFromTypeArgs(classType: ClassType): TypeVarType[] { + const typeParams: TypeVarType[] = []; + const typeArgs = classType.priv.typeArgs ?? []; + + typeArgs.forEach((typeArg, index) => { + if (isTypeVar(typeArg)) { + typeParams.push(typeArg); + return; + } + + // Synthesize a dummy type parameter. + const typeVar = TypeVarType.createInstance(`__P${index}`); + typeVar.shared.isSynthesized = true; + typeParams.push(typeVar); + }); + + return typeParams; + } + + // Determines whether the type parameters has a default that refers to another + // type parameter. If so, validates that it is in the list of "live" type + // parameters and updates the scope of the type parameter referred to in the + // default type expression. + function validateTypeParamDefault( + errorNode: ExpressionNode, + typeParam: TypeVarType, + otherLiveTypeParams: TypeVarType[], + scopeId: TypeVarScopeId + ) { + if (!typeParam.shared.isDefaultExplicit && !typeParam.shared.isSynthesized && !TypeVarType.isSelf(typeParam)) { + const typeVarWithDefault = otherLiveTypeParams.find( + (param) => param.shared.isDefaultExplicit && param.priv.scopeId === scopeId + ); + + if (typeVarWithDefault) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarWithoutDefault().format({ + name: typeParam.shared.name, + other: typeVarWithDefault.shared.name, + }), + errorNode + ); + } + return; + } + + const invalidTypeVars = new Set(); + validateTypeVarDefault(typeParam, otherLiveTypeParams, invalidTypeVars); + + // If we found one or more unapplied type variable, report an error. + if (invalidTypeVars.size > 0) { + const diag = new DiagnosticAddendum(); + invalidTypeVars.forEach((name) => { + diag.addMessage(LocAddendum.typeVarDefaultOutOfScope().format({ name })); + }); + + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarDefaultInvalidTypeVar().format({ + name: typeParam.shared.name, + }) + diag.getString(), + errorNode + ); + } + } + + function inferVarianceForClass(classType: ClassType): void { + if (!classType.shared.requiresVarianceInference) { + return; + } + + // Presumptively mark the variance inference as complete. This + // prevents potential recursion. + classType.shared.requiresVarianceInference = false; + + // Presumptively mark the computed variance to "unknown". We'll + // replace this below once the variance has been inferred. + classType.shared.typeParams.forEach((param) => { + if (param.shared.declaredVariance === Variance.Auto) { + param.priv.computedVariance = Variance.Unknown; + } + }); + + const dummyTypeObject = ClassType.createInstantiable( + '__varianceDummy', + '', + '', + Uri.empty(), + 0, + 0, + undefined, + undefined + ); + + classType.shared.typeParams.forEach((param, paramIndex) => { + // Skip TypeVarTuples and ParamSpecs. + if (isTypeVarTuple(param) || isParamSpec(param)) { + return; + } + + // Skip type variables without auto-variance. + if (param.shared.declaredVariance !== Variance.Auto) { + return; + } + + // Replace all type arguments with a dummy type except for the + // TypeVar of interest, which is replaced with an object instance. + const srcTypeArgs = classType.shared.typeParams.map((p, i) => { + if (isTypeVarTuple(p)) { + return p; + } + return i === paramIndex ? getObjectType() : dummyTypeObject; + }); + + // Replace all type arguments with a dummy type except for the + // TypeVar of interest, which is replaced with itself. + const destTypeArgs = classType.shared.typeParams.map((p, i) => { + return i === paramIndex || isTypeVarTuple(p) ? p : dummyTypeObject; + }); + + const srcType = ClassType.specialize(classType, srcTypeArgs); + const destType = ClassType.specialize(classType, destTypeArgs); + + const isDestSubtypeOfSrc = assignClassToSelf( + srcType, + destType, + Variance.Covariant, + /* ignoreBaseClassVariance */ false + ); + + let inferredVariance: Variance; + if (isDestSubtypeOfSrc) { + inferredVariance = Variance.Covariant; + } else { + const isSrcSubtypeOfDest = assignClassToSelf( + destType, + srcType, + Variance.Contravariant, + /* ignoreBaseClassVariance */ false + ); + if (isSrcSubtypeOfDest) { + inferredVariance = Variance.Contravariant; + } else { + inferredVariance = Variance.Invariant; + } + } + + // We assume here that we don't need to clone the type var object + // because it was already cloned when it was associated with this + // class scope. + classType.shared.typeParams[paramIndex].priv.computedVariance = inferredVariance; + }); + } + + function evaluateTypeParamList(node: TypeParameterListNode): TypeVarType[] { + const paramTypes: TypeVarType[] = []; + const typeParamScope = AnalyzerNodeInfo.getScope(node); + + node.d.params.forEach((param) => { + const paramSymbol = typeParamScope?.symbolTable.get(param.d.name.d.value); + if (!paramSymbol) { + // This can happen if the code is unreachable. + return; + } + + const typeOfParam = getDeclaredTypeOfSymbol(paramSymbol, param.d.name)?.type; + if (!typeOfParam || !isTypeVar(typeOfParam)) { + return; + } + + writeTypeCache(param.d.name, { type: typeOfParam }, EvalFlags.None); + paramTypes.push(typeOfParam); + }); + + return paramTypes; + } + + function computeEffectiveMetaclass(classType: ClassType, errorNode: ParseNode) { + let effectiveMetaclass = classType.shared.declaredMetaclass; + let reportedMetaclassConflict = false; + + if (!effectiveMetaclass || isInstantiableClass(effectiveMetaclass)) { + for (const baseClass of classType.shared.baseClasses) { + if (isInstantiableClass(baseClass)) { + const baseClassMeta = baseClass.shared.effectiveMetaclass ?? prefetched?.typeClass; + if (baseClassMeta && isInstantiableClass(baseClassMeta)) { + // Make sure there is no metaclass conflict. + if (!effectiveMetaclass) { + effectiveMetaclass = baseClassMeta; + } else if ( + derivesFromClassRecursive(baseClassMeta, effectiveMetaclass, /* ignoreUnknown */ false) + ) { + effectiveMetaclass = baseClassMeta; + } else if ( + !derivesFromClassRecursive(effectiveMetaclass, baseClassMeta, /* ignoreUnknown */ false) + ) { + if (!reportedMetaclassConflict) { + const diag = new DiagnosticAddendum(); + + diag.addMessage( + LocAddendum.metaclassConflict().format({ + metaclass1: printType(convertToInstance(effectiveMetaclass)), + metaclass2: printType(convertToInstance(baseClassMeta)), + }) + ); + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.metaclassConflict() + diag.getString(), + errorNode + ); + + // Don't report more than once. + reportedMetaclassConflict = true; + } + } + } else { + effectiveMetaclass = baseClassMeta ? UnknownType.create() : undefined; + break; + } + } else { + // If one of the base classes is unknown, then the effective + // metaclass is also unknowable. + effectiveMetaclass = UnknownType.create(); + break; + } + } + } + + // If we haven't found an effective metaclass, assume "type", which + // is the metaclass for "object". + if (!effectiveMetaclass) { + const typeMetaclass = getBuiltInType(errorNode, 'type'); + effectiveMetaclass = + typeMetaclass && isInstantiableClass(typeMetaclass) ? typeMetaclass : UnknownType.create(); + } + + classType.shared.effectiveMetaclass = effectiveMetaclass; + + return effectiveMetaclass; + } + + // Verifies that the type variables provided outside of "Generic" + // or "Protocol" are also provided within the "Generic". For example: + // class Foo(Mapping[K, V], Generic[V]) + // is illegal because K is not included in Generic. + function verifyGenericTypeParams( + errorNode: ExpressionNode, + typeVars: TypeVarType[], + genericTypeVars: TypeVarType[] + ) { + const missingFromGeneric = typeVars.filter((typeVar) => { + return !genericTypeVars.some((genericTypeVar) => genericTypeVar.shared.name === typeVar.shared.name); + }); + + if (missingFromGeneric.length > 0) { + const diag = new DiagnosticAddendum(); + diag.addMessage( + LocAddendum.typeVarsMissing().format({ + names: missingFromGeneric.map((typeVar) => `"${typeVar.shared.name}"`).join(', '), + }) + ); + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarsNotInGenericOrProtocol() + diag.getString(), + errorNode + ); + } + } + + // Records the fact that the specified class requires "deferred completion" because + // one of its base classes has not yet been fully evaluated. If the caller passes + // undefined for "dependsUpon", then the class is added to all outstanding deferred + // completions. + function registerDeferredClassCompletion(classToComplete: ClassNode, dependsUpon: ClassType | undefined) { + if (dependsUpon) { + // See if there is an existing entry for this dependency. + const entry = deferredClassCompletions.find((e) => + ClassType.isSameGenericClass(e.dependsUpon, dependsUpon) + ); + if (entry) { + entry.classesToComplete.push(classToComplete); + } else { + deferredClassCompletions.push({ dependsUpon, classesToComplete: [classToComplete] }); + } + } else { + deferredClassCompletions.forEach((e) => { + e.classesToComplete.push(classToComplete); + }); + } + } + + // Runs any registered "deferred class completions" that depend on the specified + // class type. This allows us to complete any work that requires dependent classes + // to be completed. + function runDeferredClassCompletions(type: ClassType) { + deferredClassCompletions.forEach((e) => { + if (ClassType.isSameGenericClass(e.dependsUpon, type)) { + e.classesToComplete.forEach((classNode) => { + const classType = readTypeCache(classNode.d.name, EvalFlags.None); + if (classType) { + completeClassTypeDeferred(classType as ClassType, classNode.d.name); + } + }); + } + }); + + // Remove any completions that depend on this type. + deferredClassCompletions = deferredClassCompletions.filter( + (e) => !ClassType.isSameGenericClass(e.dependsUpon, type) + ); + } + + // Recomputes the MRO and effective metaclass for the class after dependent + // classes have been fully constructed. + function completeClassTypeDeferred(type: ClassType, errorNode: ParseNode) { + // Recompute the MRO linearization. + if (!computeMroLinearization(type)) { + addDiagnostic(DiagnosticRule.reportGeneralTypeIssues, LocMessage.methodOrdering(), errorNode); + } + + // Recompute the effective metaclass. + computeEffectiveMetaclass(type, errorNode); + } + + function validateInitSubclassArgs(node: ClassNode, classType: ClassType) { + // Collect arguments that will be passed to the `__init_subclass__` + // method described in PEP 487 and validate it. + const argList: Arg[] = []; + + node.d.arguments.forEach((arg) => { + if (arg.d.name && arg.d.name.d.value !== 'metaclass') { + argList.push({ + argCategory: ArgCategory.Simple, + node: arg, + name: arg.d.name, + valueExpression: arg.d.valueExpr, + }); + } + }); + + let newMethodMember: ClassMember | undefined; + + // See if the class has a metaclass that overrides `__new__`. If so, we + // will validate the signature of the `__new__` method. + if (classType.shared.effectiveMetaclass && isClass(classType.shared.effectiveMetaclass)) { + // If the metaclass is 'type' or 'ABCMeta', we'll assume it will call through to + // __init_subclass__, so we'll skip the `__new__` method check. We need to exclude + // TypedDict classes here because _TypedDict uses ABCMeta as its metaclass, but its + // typeshed definition doesn't override __init_subclass__. + const metaclassCallsInitSubclass = + ClassType.isBuiltIn(classType.shared.effectiveMetaclass, ['ABCMeta', 'type']) && + !ClassType.isTypedDictClass(classType); + + if (!metaclassCallsInitSubclass) { + // See if the metaclass has a `__new__` method that accepts keyword parameters. + newMethodMember = lookUpClassMember( + classType.shared.effectiveMetaclass, + '__new__', + MemberAccessFlags.SkipTypeBaseClass + ); + } + } + + if (newMethodMember) { + const newMethodType = getTypeOfMember(newMethodMember); + if (isFunction(newMethodType)) { + const paramListDetails = getParamListDetails(newMethodType); + + if (paramListDetails.firstKeywordOnlyIndex !== undefined) { + // Build a map of the keyword-only parameters. + const paramMap = new Map(); + for (let i = paramListDetails.firstKeywordOnlyIndex; i < paramListDetails.params.length; i++) { + const paramInfo = paramListDetails.params[i]; + if ( + paramInfo.param.category === ParamCategory.Simple && + paramInfo.param.name && + paramInfo.kind !== ParamKind.Positional + ) { + paramMap.set(paramInfo.param.name, i); + } + } + + argList.forEach((arg) => { + if (arg.argCategory === ArgCategory.Simple && arg.name) { + const paramIndex = paramMap.get(arg.name.d.value) ?? paramListDetails.kwargsIndex; + + if (paramIndex !== undefined) { + const paramInfo = paramListDetails.params[paramIndex]; + const argParam: ValidateArgTypeParams = { + paramCategory: paramInfo.param.category, + paramType: paramInfo.type, + requiresTypeVarMatching: false, + argument: arg, + errorNode: arg.valueExpression ?? node.d.name, + }; + + validateArgType( + argParam, + new ConstraintTracker(), + { type: newMethodType }, + { skipUnknownArgCheck: true } + ); + paramMap.delete(arg.name.d.value); + } else { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.paramNameMissing().format({ name: arg.name.d.value }), + arg.name ?? node.d.name + ); + } + } + }); + + // See if we have any remaining unmatched parameters without + // default values. + const unassignedParams: string[] = []; + paramMap.forEach((index, paramName) => { + const paramInfo = paramListDetails.params[index]; + if (!paramInfo.defaultType) { + unassignedParams.push(paramName); + } + }); + + if (unassignedParams.length > 0) { + const missingParamNames = unassignedParams.map((p) => `"${p}"`).join(', '); + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + unassignedParams.length === 1 + ? LocMessage.argMissingForParam().format({ name: missingParamNames }) + : LocMessage.argMissingForParams().format({ names: missingParamNames }), + node.d.name + ); + } + } + } + } else { + // If there was no custom metaclass __new__ method, see if there is an __init_subclass__ + // method present somewhere in the class hierarchy. + const initSubclassMethodInfo = getTypeOfBoundMember( + node.d.name, + classType, + '__init_subclass__', + /* usage */ undefined, + /* diag */ undefined, + MemberAccessFlags.SkipClassMembers | + MemberAccessFlags.SkipOriginalClass | + MemberAccessFlags.SkipAttributeAccessOverride + ); + + if (initSubclassMethodInfo) { + const initSubclassMethodType = initSubclassMethodInfo.type; + + if (initSubclassMethodType && initSubclassMethodInfo.classType) { + const callResult = validateCallArgs( + node.d.name, + argList, + { type: initSubclassMethodType }, + /* constraints */ undefined, + /* skipUnknownArgCheck */ false, + makeInferenceContext(getNoneType()) + ); + + if (callResult.argumentErrors) { + const diag = addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.initSubclassCallFailed(), + node.d.name + ); + + const initSubclassFunction = isOverloaded(initSubclassMethodType) + ? OverloadedType.getOverloads(initSubclassMethodType)[0] + : initSubclassMethodType; + const initSubclassDecl = isFunction(initSubclassFunction) + ? initSubclassFunction.shared.declaration + : undefined; + + if (diag && initSubclassDecl) { + diag.addRelatedInfo( + LocAddendum.initSubclassLocation().format({ + name: printType(convertToInstance(initSubclassMethodInfo.classType)), + }), + initSubclassDecl.uri, + initSubclassDecl.range + ); + } + } + } + } + } + + // Evaluate all of the expressions so they are checked and marked referenced. + argList.forEach((arg) => { + if (arg.valueExpression) { + getTypeOfExpression(arg.valueExpression); + } + }); + } + + function getTypeOfFunction(node: FunctionNode): FunctionTypeResult | undefined { + initializePrefetchedTypes(node); + + // Is this predecorated function type cached? + let functionType = readTypeCache(node.d.name, EvalFlags.None); + + if (functionType) { + if (!isFunction(functionType)) { + // This can happen in certain rare circumstances where the + // function declaration falls within an unreachable code block. + return undefined; + } + + if (FunctionType.isPartiallyEvaluated(functionType)) { + return { functionType, decoratedType: functionType }; + } + } else { + functionType = getTypeOfFunctionPredecorated(node); + } + + // Is the decorated function type cached? + let decoratedType = readTypeCache(node, EvalFlags.None); + if (decoratedType) { + return { functionType, decoratedType }; + } + + // Populate the cache with a temporary value to handle recursion. + writeTypeCache(node, { type: functionType }, /* flags */ undefined); + + // If it's an async function, wrap the return type in an Awaitable or Generator. + // Set the "partially evaluated" flag around this logic to detect recursion. + functionType.shared.flags |= FunctionTypeFlags.PartiallyEvaluated; + const preDecoratedType = node.d.isAsync ? createAsyncFunction(node, functionType) : functionType; + + // Apply all of the decorators in reverse order. + decoratedType = preDecoratedType; + let foundUnknown = false; + for (let i = node.d.decorators.length - 1; i >= 0; i--) { + const decorator = node.d.decorators[i]; + + const newDecoratedType = useSignatureTracker(node.parent ?? node, () => { + assert(decoratedType !== undefined); + return applyFunctionDecorator(evaluatorInterface, decoratedType, functionType, decorator, node); + }); + + const unknownOrAny = containsAnyOrUnknown(newDecoratedType, /* recurse */ false); + + if (unknownOrAny && isUnknown(unknownOrAny)) { + // Report this error only on the first unknown type. + if (!foundUnknown) { + addDiagnostic( + DiagnosticRule.reportUntypedFunctionDecorator, + LocMessage.functionDecoratorTypeUnknown(), + node.d.decorators[i].d.expr + ); + + foundUnknown = true; + } + } else { + // Apply the decorator only if the type is known. + decoratedType = newDecoratedType; + } + } + + // See if there are any overloads provided by previous function declarations. + if (isFunction(decoratedType)) { + decoratedType.shared.deprecatedMessage = functionType.shared.deprecatedMessage; + + if (FunctionType.isOverloaded(decoratedType)) { + // Mark all the parameters as accessed. + node.d.params.forEach((param) => { + markParamAccessed(param); + }); + } + } + + decoratedType = addOverloadsToFunctionType(evaluatorInterface, node, decoratedType); + + writeTypeCache(node, { type: decoratedType }, EvalFlags.None); + + // Now that the decorator has been applied, we can clear the + // "partially evaluated" flag. + functionType.shared.flags &= ~FunctionTypeFlags.PartiallyEvaluated; + + return { functionType, decoratedType }; + } + + // Evaluates the type of a "def" statement without applying an async + // modifier or any decorators. + function getTypeOfFunctionPredecorated(node: FunctionNode): FunctionType { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // Is this type already cached? + const cachedFunctionType = readTypeCache(node.d.name, EvalFlags.None); + + if (cachedFunctionType && isFunction(cachedFunctionType)) { + return cachedFunctionType; + } + + let functionDecl: FunctionDeclaration | undefined; + const decl = AnalyzerNodeInfo.getDeclaration(node); + if (decl) { + functionDecl = decl as FunctionDeclaration; + } + + // There was no cached type, so create a new one. + // Retrieve the containing class node if the function is a method. + const containingClassNode = ParseTreeUtils.getEnclosingClass(node, /* stopAtFunction */ true); + let containingClassType: ClassType | undefined; + if (containingClassNode) { + containingClassType = getTypeOfClass(containingClassNode)?.classType; + } + + const functionInfo = getFunctionInfoFromDecorators(evaluatorInterface, node, !!containingClassNode); + let functionFlags = functionInfo.flags; + if (functionDecl?.isGenerator) { + functionFlags |= FunctionTypeFlags.Generator; + } + + if (fileInfo.isStubFile) { + functionFlags |= FunctionTypeFlags.StubDefinition; + } else if (fileInfo.isInPyTypedPackage) { + functionFlags |= FunctionTypeFlags.PyTypedDefinition; + } + + if (node.d.isAsync) { + functionFlags |= FunctionTypeFlags.Async; + } + + const functionType = FunctionType.createInstance( + node.d.name.d.value, + getFunctionFullName(node, fileInfo.moduleName, node.d.name.d.value), + fileInfo.moduleName, + functionFlags | FunctionTypeFlags.PartiallyEvaluated, + ParseTreeUtils.getDocString(node.d.suite.d.statements) + ); + + functionType.shared.typeVarScopeId = ParseTreeUtils.getScopeIdForNode(node); + functionType.shared.deprecatedMessage = functionInfo.deprecationMessage; + functionType.shared.methodClass = containingClassType; + + if (node.d.name.d.value === '__init__' || node.d.name.d.value === '__new__') { + if (containingClassNode) { + functionType.priv.constructorTypeVarScopeId = ParseTreeUtils.getScopeIdForNode(containingClassNode); + } + } + + if (fileInfo.isBuiltInStubFile || fileInfo.isTypingStubFile || fileInfo.isTypingExtensionsStubFile) { + // Mark the function as a built-in stdlib function. + functionType.shared.flags |= FunctionTypeFlags.BuiltIn; + } + + functionType.shared.declaration = functionDecl; + + // Allow recursion by caching and registering the partially-constructed function type. + const scope = ScopeUtils.getScopeForNode(node); + const functionSymbol = scope?.lookUpSymbolRecursive(node.d.name.d.value); + if (functionDecl && functionSymbol) { + setSymbolResolutionPartialType(functionSymbol.symbol, functionDecl, functionType); + } + + return invalidateTypeCacheIfCanceled(() => { + writeTypeCache(node.d.name, { type: functionType }, /* flags */ undefined); + + // Is this an "__init__" method within a pseudo-generic class? If so, + // we'll add generic types to the constructor's parameters. + const addGenericParamTypes = + containingClassType && + ClassType.isPseudoGenericClass(containingClassType) && + node.d.name.d.value === '__init__'; + + const paramTypes: Type[] = []; + + // Determine if the first parameter should be skipped for comment-based + // function annotations. + let firstCommentAnnotationIndex = 0; + if (containingClassType && (functionType.shared.flags & FunctionTypeFlags.StaticMethod) === 0) { + firstCommentAnnotationIndex = 1; + } + + // If there is a function annotation comment, validate that it has the correct + // number of parameter annotations. + if (node.d.funcAnnotationComment && !node.d.funcAnnotationComment.d.isEllipsis) { + const expected = node.d.params.length - firstCommentAnnotationIndex; + const received = node.d.funcAnnotationComment.d.paramAnnotations.length; + + // For methods with "self" or "cls" parameters, the annotation list + // can either include or exclude the annotation for the first parameter. + if (firstCommentAnnotationIndex > 0 && received === node.d.params.length) { + firstCommentAnnotationIndex = 0; + } else if (received !== expected) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.annotatedParamCountMismatch().format({ + expected, + received, + }), + node.d.funcAnnotationComment + ); + } + } + + // If this function uses PEP 695 syntax for type parameters, + // accumulate the list of type parameters upfront. + const typeParamsSeen: TypeVarType[] = []; + if (node.d.typeParams) { + functionType.shared.typeParams = evaluateTypeParamList(node.d.typeParams).map((typeParam) => + convertToInstance(typeParam) + ); + } else { + functionType.shared.typeParams = typeParamsSeen; + } + + let paramsArePositionOnly = true; + const isFirstParamClsOrSelf = + containingClassType && + (FunctionType.isClassMethod(functionType) || + FunctionType.isInstanceMethod(functionType) || + FunctionType.isConstructorMethod(functionType)); + const firstNonClsSelfParamIndex = isFirstParamClsOrSelf ? 1 : 0; + + node.d.params.forEach((param, index) => { + let paramType: Type | undefined; + let annotatedType: Type | undefined; + let paramTypeNode: ExpressionNode | undefined; + + if (param.d.name) { + if (index === 0 && isFirstParamClsOrSelf) { + // Mark "self/cls" as accessed. + markParamAccessed(param); + } else if (FunctionType.isAbstractMethod(functionType)) { + // Mark all parameters in abstract methods as accessed. + markParamAccessed(param); + } else if (containingClassType && ClassType.isProtocolClass(containingClassType)) { + // Mark all parameters in protocol methods as accessed. + markParamAccessed(param); + } + } + + if (param.d.annotation) { + paramTypeNode = param.d.annotation; + } else if (param.d.annotationComment) { + paramTypeNode = param.d.annotationComment; + } else if (node.d.funcAnnotationComment && !node.d.funcAnnotationComment.d.isEllipsis) { + const adjustedIndex = index - firstCommentAnnotationIndex; + if (adjustedIndex >= 0 && adjustedIndex < node.d.funcAnnotationComment.d.paramAnnotations.length) { + paramTypeNode = node.d.funcAnnotationComment.d.paramAnnotations[adjustedIndex]; + } + } + + if (paramTypeNode) { + if ((functionInfo.flags & FunctionTypeFlags.NoTypeCheck) !== 0) { + annotatedType = UnknownType.create(); + } else { + annotatedType = getTypeOfParamAnnotation(paramTypeNode, param.d.category); + } + + if (annotatedType) { + addTypeVarsToListIfUnique( + typeParamsSeen, + getTypeVarArgsRecursive(annotatedType), + functionType.shared.typeVarScopeId + ); + } + + if (isTypeVarTuple(annotatedType) && !annotatedType.priv.isUnpacked) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.unpackedTypeVarTupleExpected().format({ + name1: annotatedType.shared.name, + name2: annotatedType.shared.name, + }), + paramTypeNode + ); + annotatedType = UnknownType.create(); + } + } + + if (!annotatedType && addGenericParamTypes) { + if ( + index > 0 && + param.d.category === ParamCategory.Simple && + param.d.name && + !param.d.defaultValue + ) { + const typeParamName = getPseudoGenericTypeVarName(param.d.name.d.value); + annotatedType = containingClassType!.shared.typeParams.find( + (param) => param.shared.name === typeParamName + ); + } + } + + if (annotatedType) { + const adjustedAnnotatedType = adjustParamAnnotatedType(param, annotatedType); + if (adjustedAnnotatedType !== annotatedType) { + annotatedType = adjustedAnnotatedType; + } + } + + let defaultValueType: Type | undefined; + if (param.d.defaultValue) { + // If this is a stub file, a protocol, an overload, or a class + // whose body is a placeholder implementation, treat a "...", as + // an "Any" value. + let treatEllipsisAsAny = fileInfo.isStubFile || ParseTreeUtils.isSuiteEmpty(node.d.suite); + if (containingClassType && ClassType.isProtocolClass(containingClassType)) { + treatEllipsisAsAny = true; + } + if (FunctionType.isOverloaded(functionType) || FunctionType.isAbstractMethod(functionType)) { + treatEllipsisAsAny = true; + } + + defaultValueType = getTypeOfExpression( + param.d.defaultValue, + treatEllipsisAsAny ? EvalFlags.ConvertEllipsisToAny : EvalFlags.None, + makeInferenceContext(annotatedType) + ).type; + } + + if (annotatedType) { + // If there was both a type annotation and a default value, verify + // that the default value matches the annotation. + if (param.d.defaultValue && defaultValueType) { + const diagAddendum = new DiagnosticAddendum(); + + if (!assignType(annotatedType, defaultValueType, diagAddendum)) { + addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.paramAssignmentMismatch().format({ + sourceType: printType(defaultValueType), + paramType: printType(annotatedType), + }) + diagAddendum.getString(), + param.d.defaultValue + ); + } + } + + paramType = annotatedType; + } + + // Determine whether we need to insert an implied position-only parameter. + // This is needed when a function's parameters are named using the old-style + // way of specifying position-only parameters. + if (index >= firstNonClsSelfParamIndex) { + let isImplicitPositionOnlyParam = false; + + if (param.d.category === ParamCategory.Simple && param.d.name) { + if ( + isPrivateName(param.d.name.d.value) && + !node.d.params.some((p) => p.d.category === ParamCategory.Simple && !p.d.name) + ) { + isImplicitPositionOnlyParam = true; + + // If the parameter name indicates an implicit position-only parameter + // but we have already seen non-position-only parameters, report an error. + if ( + !paramsArePositionOnly && + functionType.shared.parameters.every((p) => p.category === ParamCategory.Simple) + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.positionOnlyAfterNon(), + param.d.name + ); + } + } + } else { + paramsArePositionOnly = false; + } + + if ( + paramsArePositionOnly && + !isImplicitPositionOnlyParam && + functionType.shared.parameters.length > firstNonClsSelfParamIndex + ) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + + if (!isImplicitPositionOnlyParam) { + paramsArePositionOnly = false; + } + } + + // If there was no annotation for the parameter, infer its type if possible. + let isTypeInferred = false; + if (!paramTypeNode) { + isTypeInferred = true; + const inferredType = inferParamType(node, functionType.shared.flags, index, containingClassType); + if (inferredType) { + paramType = inferredType; + } + } + + paramType = paramType ?? UnknownType.create(); + + const functionParam = FunctionParam.create( + param.d.category, + paramType, + (isTypeInferred ? FunctionParamFlags.TypeInferred : FunctionParamFlags.None) | + (paramTypeNode ? FunctionParamFlags.TypeDeclared : FunctionParamFlags.None), + param.d.name ? param.d.name.d.value : undefined, + defaultValueType, + param.d.defaultValue + ); + + FunctionType.addParam(functionType, functionParam); + + if (FunctionParam.isTypeDeclared(functionParam)) { + addTypeVarsToListIfUnique( + typeParamsSeen, + getTypeVarArgsRecursive(paramType), + functionType.shared.typeVarScopeId + ); + } + + if (param.d.name) { + const variadicParamType = transformVariadicParamType(node, param.d.category, paramType); + paramTypes.push(variadicParamType); + } else { + paramTypes.push(paramType); + } + }); + + if (paramsArePositionOnly && functionType.shared.parameters.length > firstNonClsSelfParamIndex) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + + // Update the types for the nodes associated with the parameters. + const scopeIds = ParseTreeUtils.getTypeVarScopesForNode(node); + paramTypes.forEach((paramType, index) => { + const paramNameNode = node.d.params[index].d.name; + if (paramNameNode) { + if (isUnknown(paramType)) { + functionType.shared.flags |= FunctionTypeFlags.UnannotatedParams; + } + + paramType = makeTypeVarsBound(paramType, scopeIds); + + writeTypeCache(paramNameNode, { type: paramType }, EvalFlags.None); + } + }); + + // If the function ends in P.args and P.kwargs parameters, make it exempt from + // args/kwargs compatibility checks. This is important for protocol comparisons. + if (paramTypes.length >= 2) { + const paramType1 = paramTypes[paramTypes.length - 2]; + const paramType2 = paramTypes[paramTypes.length - 1]; + if ( + isParamSpec(paramType1) && + paramType1.priv.paramSpecAccess === 'args' && + isParamSpec(paramType2) && + paramType2.priv.paramSpecAccess === 'kwargs' + ) { + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } + } + + // If the function contains an *args and a **kwargs parameter and both + // are annotated as Any or are unannotated, make it exempt from + // args/kwargs compatibility checks. + const variadicsWithAnyType = functionType.shared.parameters.filter( + (param, index) => + param.category !== ParamCategory.Simple && + param.name && + isAnyOrUnknown(FunctionType.getParamType(functionType, index)) + ); + if (variadicsWithAnyType.length >= 2) { + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } + + // If there was a defined return type, analyze that first so when we + // walk the contents of the function, return statements can be + // validated against this type. + const returnTypeAnnotationNode = + node.d.returnAnnotation ?? node.d.funcAnnotationComment?.d.returnAnnotation; + if (returnTypeAnnotationNode) { + // Temporarily set the return type to unknown in case of recursion. + functionType.shared.declaredReturnType = UnknownType.create(); + + const returnType = getTypeOfAnnotation(returnTypeAnnotationNode, { + typeVarGetsCurScope: true, + }); + functionType.shared.declaredReturnType = returnType; + } else { + // If there was no return type annotation and this is a type stub, + // we have no opportunity to infer the return type, so we'll indicate + // that it's unknown. + if (fileInfo.isStubFile) { + // Special-case the __init__ method, which is commonly left without + // an annotated return type, but we can assume it returns None. + if (node.d.name.d.value === '__init__') { + functionType.shared.declaredReturnType = getNoneType(); + } else { + functionType.shared.declaredReturnType = UnknownType.create(); + } + } + } + + // Accumulate any type parameters used in the return type. + if (functionType.shared.declaredReturnType && returnTypeAnnotationNode) { + addTypeVarsToListIfUnique( + typeParamsSeen, + getTypeVarArgsRecursive(functionType.shared.declaredReturnType), + functionType.shared.typeVarScopeId + ); + } + + // Validate the default types for all type parameters. + functionType.shared.typeParams.forEach((typeParam, index) => { + let bestErrorNode: ExpressionNode = node.d.name; + if (node.d.typeParams && index < node.d.typeParams.d.params.length) { + const typeParamNode = node.d.typeParams.d.params[index]; + bestErrorNode = typeParamNode.d.defaultExpr ?? typeParamNode.d.name; + } + + validateTypeParamDefault( + bestErrorNode, + typeParam, + functionType.shared.typeParams.slice(0, index), + functionType.shared.typeVarScopeId! + ); + }); + + // Clear the "partially evaluated" flag to indicate that the functionType + // is fully evaluated. + functionType.shared.flags &= ~FunctionTypeFlags.PartiallyEvaluated; + + writeTypeCache(node.d.name, { type: functionType }, EvalFlags.None); + + return functionType; + }); + } + + function markParamAccessed(param: ParameterNode) { + if (param.d.name) { + const symbolWithScope = lookUpSymbolRecursive( + param.d.name, + param.d.name.d.value, + /* honorCodeFlow */ false + ); + if (symbolWithScope) { + setSymbolAccessed(AnalyzerNodeInfo.getFileInfo(param), symbolWithScope.symbol, param.d.name); + } + } + } + + function adjustParamAnnotatedType(param: ParameterNode, type: Type): Type { + // PEP 484 indicates that if a parameter has a default value of 'None' + // the type checker should assume that the type is optional (i.e. a union + // of the specified type and 'None'). Skip this step if the type is already + // optional to avoid losing alias names when combining the types. + if ( + param.d.defaultValue?.nodeType === ParseNodeType.Constant && + param.d.defaultValue.d.constType === KeywordType.None && + !isOptionalType(type) && + !AnalyzerNodeInfo.getFileInfo(param).diagnosticRuleSet.strictParameterNoneValue + ) { + return combineTypes([type, getNoneType()]); + } + + return type; + } + + // Attempts to infer an unannotated parameter type from available context. + function inferParamType( + functionNode: FunctionNode, + functionFlags: FunctionTypeFlags, + paramIndex: number, + containingClassType: ClassType | undefined + ) { + // Is the function a method within a class? If so, see if a base class + // defines the same method and provides annotations. + if (containingClassType) { + if (paramIndex === 0) { + if ((functionFlags & FunctionTypeFlags.StaticMethod) === 0) { + const hasClsParam = + (functionFlags & (FunctionTypeFlags.ClassMethod | FunctionTypeFlags.ConstructorMethod)) !== 0; + return synthesizeTypeVarForSelfCls(containingClassType, hasClsParam); + } + } + + const methodName = functionNode.d.name.d.value; + + const baseClassMemberInfo = lookUpClassMember( + containingClassType, + methodName, + MemberAccessFlags.SkipOriginalClass + ); + + if (baseClassMemberInfo) { + const memberDecls = baseClassMemberInfo.symbol.getDeclarations(); + if (memberDecls.length === 1 && memberDecls[0].type === DeclarationType.Function) { + const baseClassMethodNode = memberDecls[0].node; + + // Does the signature match exactly with the exception of annotations? + if ( + baseClassMethodNode.d.params.length === functionNode.d.params.length && + baseClassMethodNode.d.params.every((param, index) => { + const overrideParam = functionNode.d.params[index]; + return ( + overrideParam.d.name?.d.value === param.d.name?.d.value && + overrideParam.d.category === param.d.category + ); + }) + ) { + const baseClassParam = baseClassMethodNode.d.params[paramIndex]; + const baseClassParamAnnotation = + baseClassParam.d.annotation ?? baseClassParam.d.annotationComment; + if (baseClassParamAnnotation) { + let inferredParamType = getTypeOfParamAnnotation( + baseClassParamAnnotation, + functionNode.d.params[paramIndex].d.category + ); + + // If the parameter type is generic, specialize it in the context + // of the child class. + if (requiresSpecialization(inferredParamType) && isClass(baseClassMemberInfo.classType)) { + const scopeIds: TypeVarScopeId[] = getTypeVarScopeIds(baseClassMemberInfo.classType); + const solution = buildSolutionFromSpecializedClass(baseClassMemberInfo.classType); + + scopeIds.push(ParseTreeUtils.getScopeIdForNode(baseClassMethodNode)); + + // Replace any unsolved TypeVars with Unknown (including all function-scoped TypeVars). + inferredParamType = applySolvedTypeVars(inferredParamType, solution, { + replaceUnsolved: { + scopeIds, + tupleClassType: getTupleClassType(), + }, + }); + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(functionNode); + if (fileInfo.isInPyTypedPackage && !fileInfo.isStubFile) { + inferredParamType = TypeBase.cloneForAmbiguousType(inferredParamType); + } + + return inferredParamType; + } + } + } + } + } + + // If the parameter has a default argument value, we may be able to infer its + // type from this information. + const paramValueExpr = functionNode.d.params[paramIndex].d.defaultValue; + if (paramValueExpr) { + return inferParamTypeFromDefaultValue(paramValueExpr); + } + + return undefined; + } + + function inferParamTypeFromDefaultValue(paramValueExpr: ExpressionNode) { + const defaultValueType = getTypeOfExpression(paramValueExpr, EvalFlags.ConvertEllipsisToAny).type; + + let inferredParamType: Type | undefined; + + // Is the default value a "None", a sentinel, or an instance of some private + // class (one whose name starts with an underscore)? If so, we will assume + // that the value is a singleton sentinel. The actual supported type is + // going to be a union of this type and Unknown. + if ( + isNoneInstance(defaultValueType) || + isSentinelLiteral(defaultValueType) || + (isClassInstance(defaultValueType) && isPrivateOrProtectedName(defaultValueType.shared.name)) + ) { + inferredParamType = combineTypes([defaultValueType, UnknownType.create()]); + } else { + let skipInference = false; + + if (isFunctionOrOverloaded(defaultValueType)) { + // Do not infer parameter types that use a lambda or another function as a + // default value. We're likely to generate false positives in this case. + // It's not clear whether parameters should be positional-only or not. + skipInference = true; + } else if ( + isClassInstance(defaultValueType) && + ClassType.isBuiltIn(defaultValueType, ['tuple', 'list', 'set', 'dict']) + ) { + // Do not infer certain types like tuple because it's likely to be + // more restrictive (narrower) than intended. + skipInference = true; + } + + if (!skipInference) { + inferredParamType = convertSpecialFormToRuntimeValue( + defaultValueType, + EvalFlags.None, + /* convertModule */ true + ); + inferredParamType = stripTypeForm(inferredParamType); + inferredParamType = stripLiteralValue(inferredParamType); + } + } + + if (inferredParamType) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(paramValueExpr); + if (fileInfo.isInPyTypedPackage && !fileInfo.isStubFile) { + inferredParamType = TypeBase.cloneForAmbiguousType(inferredParamType); + } + } + + return inferredParamType; + } + + // Transforms the parameter type based on its category. If it's a simple parameter, + // no transform is applied. If it's a var-arg or keyword-arg parameter, the type + // is wrapped in a List or Dict. + function transformVariadicParamType(node: ParseNode, paramCategory: ParamCategory, type: Type): Type { + switch (paramCategory) { + case ParamCategory.Simple: { + return type; + } + + case ParamCategory.ArgsList: { + if (isParamSpec(type) && type.priv.paramSpecAccess) { + return type; + } + + if (isUnpackedClass(type)) { + return ClassType.cloneForPacked(type); + } + + return makeTupleObject(evaluatorInterface, [{ type, isUnbounded: !isTypeVarTuple(type) }]); + } + + case ParamCategory.KwargsDict: { + // Leave a ParamSpec alone. + if (isParamSpec(type) && type.priv.paramSpecAccess) { + return type; + } + + // Is this an unpacked TypedDict? If so, return its packed version. + if (isClassInstance(type) && ClassType.isTypedDictClass(type) && type.priv.isUnpacked) { + return ClassType.cloneForPacked(type); + } + + // Wrap the type in a dict with str keys. + const dictType = getBuiltInType(node, 'dict'); + const strType = getBuiltInObject(node, 'str'); + + if (isInstantiableClass(dictType) && isClassInstance(strType)) { + return ClassType.cloneAsInstance(ClassType.specialize(dictType, [strType, type])); + } + + return UnknownType.create(); + } + } + } + + function createAsyncFunction(node: FunctionNode, functionType: FunctionType): FunctionType { + assert(FunctionType.isAsync(functionType)); + + // Clone the original function and replace its return type with an + // Awaitable[]. Mark the new function as no longer async. + const awaitableFunctionType = FunctionType.cloneWithNewFlags( + functionType, + functionType.shared.flags & ~(FunctionTypeFlags.Async | FunctionTypeFlags.PartiallyEvaluated) + ); + + if (functionType.shared.declaredReturnType) { + awaitableFunctionType.shared.declaredReturnType = createAwaitableReturnType( + node, + functionType.shared.declaredReturnType, + FunctionType.isGenerator(functionType) + ); + } else { + awaitableFunctionType.shared.inferredReturnType = { + type: createAwaitableReturnType( + node, + getInferredReturnType(functionType), + FunctionType.isGenerator(functionType) + ), + }; + } + + return awaitableFunctionType; + } + + function createAwaitableReturnType( + node: ParseNode, + returnType: Type, + isGenerator: boolean, + useCoroutine = true + ): Type { + let awaitableReturnType: Type | undefined; + + if (isClassInstance(returnType)) { + if (ClassType.isBuiltIn(returnType)) { + if (returnType.shared.name === 'Generator') { + // If the return type is a Generator, change it to an AsyncGenerator. + const asyncGeneratorType = getTypingType(node, 'AsyncGenerator'); + if (asyncGeneratorType && isInstantiableClass(asyncGeneratorType)) { + const typeArgs: Type[] = []; + const generatorTypeArgs = returnType.priv.typeArgs; + if (generatorTypeArgs && generatorTypeArgs.length > 0) { + typeArgs.push(generatorTypeArgs[0]); + } + if (generatorTypeArgs && generatorTypeArgs.length > 1) { + typeArgs.push(generatorTypeArgs[1]); + } + awaitableReturnType = ClassType.cloneAsInstance( + ClassType.specialize(asyncGeneratorType, typeArgs) + ); + } + } else if (['AsyncIterator', 'AsyncIterable'].some((name) => name === returnType.shared.name)) { + // If it's already an AsyncIterator or AsyncIterable, leave it as is. + awaitableReturnType = returnType; + } else if (returnType.shared.name === 'AsyncGenerator') { + // If it's already an AsyncGenerator and the function is a generator, + // leave it as is. + if (isGenerator) { + awaitableReturnType = returnType; + } + } + } + } + + if (!awaitableReturnType || !isGenerator) { + // Wrap in either an Awaitable or a CoroutineType, which is a subclass of Awaitable. + const awaitableType = useCoroutine ? getTypesType(node, 'CoroutineType') : getTypingType(node, 'Awaitable'); + if (awaitableType && isInstantiableClass(awaitableType)) { + awaitableReturnType = ClassType.cloneAsInstance( + ClassType.specialize( + awaitableType, + useCoroutine ? [AnyType.create(), AnyType.create(), returnType] : [returnType] + ) + ); + } else { + awaitableReturnType = UnknownType.create(); + } + } + + return awaitableReturnType; + } + + function inferFunctionReturnType( + node: FunctionNode, + isAbstract: boolean, + callerNode: ExpressionNode | undefined + ): TypeResult | undefined { + const returnAnnotation = node.d.returnAnnotation || node.d.funcAnnotationComment?.d.returnAnnotation; + + // This shouldn't be called if there is a declared return type, but it + // can happen if there are unexpected cycles between decorators and + // classes that they decorate. We'll just return an undefined type + // in this case. + if (returnAnnotation) { + return undefined; + } + + // Is this type already cached? + let inferredReturnType = readTypeCache(node.d.suite, EvalFlags.None); + let isIncomplete = false; + + if (inferredReturnType) { + return { type: inferredReturnType, isIncomplete }; + } + + const recursionEntry = functionRecursionMap.get(node.id) ?? []; + + if (functionRecursionMap.size >= maxInferFunctionReturnRecursionCount) { + inferredReturnType = UnknownType.create(); + isIncomplete = true; + } else if (recursionEntry.some((entry) => entry.callerNode === callerNode)) { + inferredReturnType = UnknownType.create(); + isIncomplete = true; + } else { + recursionEntry.push({ callerNode }); + functionRecursionMap.set(node.id, recursionEntry); + + try { + let functionDecl: FunctionDeclaration | undefined; + const decl = AnalyzerNodeInfo.getDeclaration(node); + if (decl) { + functionDecl = decl as FunctionDeclaration; + } + + const functionNeverReturns = !isAfterNodeReachable(node); + const implicitlyReturnsNone = isAfterNodeReachable(node.d.suite); + + // Infer the return type based on all of the return statements in the function's body. + if (AnalyzerNodeInfo.getFileInfo(node).isStubFile) { + // If a return type annotation is missing in a stub file, assume + // it's an "unknown" type. In normal source files, we can infer the + // type from the implementation. + inferredReturnType = UnknownType.create(); + } else { + if (functionNeverReturns) { + // If the function always raises and never returns, assume a "NoReturn" type. + // Skip this for abstract methods which often are implemented with "raise + // NotImplementedError()". + if (isAbstract || methodAlwaysRaisesNotImplemented(functionDecl)) { + inferredReturnType = UnknownType.create(); + } else { + inferredReturnType = NeverType.createNoReturn(); + } + } else { + const inferredReturnTypes: Type[] = []; + if (functionDecl?.returnStatements) { + functionDecl.returnStatements.forEach((returnNode) => { + if (isNodeReachable(returnNode)) { + if (returnNode.d.expr) { + const returnTypeResult = getTypeOfExpression(returnNode.d.expr); + if (returnTypeResult.isIncomplete) { + isIncomplete = true; + } + + let returnType = returnTypeResult.type; + + // If the type is a special form, use the special form instead. + if (returnType.props?.specialForm) { + returnType = returnType.props.specialForm; + } + + // If the return type includes an instance of a class with isEmptyContainer + // set, clear that because we don't want this flag to "leak" into the + // inferred return type. + returnType = mapSubtypes(returnType, (subtype) => { + if (isClassInstance(subtype) && subtype.priv.isEmptyContainer) { + return ClassType.specialize( + subtype, + subtype.priv.typeArgs, + !!subtype.priv.isTypeArgExplicit, + subtype.priv.includeSubclasses, + subtype.priv.tupleTypeArgs, + /* isEmptyContainer */ false + ); + } + return subtype; + }); + + // Do not retain TypeForm types in inferred return types. + returnType = stripTypeForm(returnType); + + inferredReturnTypes.push(returnType); + } else { + inferredReturnTypes.push(getNoneType()); + } + } + }); + } + + if (!functionNeverReturns && implicitlyReturnsNone) { + inferredReturnTypes.push(getNoneType()); + } + + inferredReturnType = combineTypes(inferredReturnTypes); + + // Remove any unbound values since those would generate an exception + // before being returned. + inferredReturnType = removeUnbound(inferredReturnType); + } + + // Is it a generator? + if (functionDecl?.isGenerator) { + const inferredYieldTypes: Type[] = []; + let useAwaitableGenerator = false; + let isYieldResultUsed = false; + + if (functionDecl.yieldStatements) { + functionDecl.yieldStatements.forEach((yieldNode) => { + if (isNodeReachable(yieldNode)) { + if (yieldNode.nodeType === ParseNodeType.YieldFrom) { + isYieldResultUsed = true; + const iteratorTypeResult = getTypeOfExpression(yieldNode.d.expr); + if ( + isClassInstance(iteratorTypeResult.type) && + ClassType.isBuiltIn(iteratorTypeResult.type, ['Coroutine', 'CoroutineType']) + ) { + const yieldType = + iteratorTypeResult.type.priv.typeArgs && + iteratorTypeResult.type.priv.typeArgs.length > 0 + ? iteratorTypeResult.type.priv.typeArgs[0] + : UnknownType.create(); + + // Handle old-style (pre-await) Coroutines. + inferredYieldTypes.push(yieldType); + useAwaitableGenerator = true; + } else { + const yieldType = getTypeOfIterator( + iteratorTypeResult, + /* isAsync */ false, + yieldNode + )?.type; + + inferredYieldTypes.push(yieldType ?? UnknownType.create()); + } + } else { + // If the yield expression is not by itself in a statement list, + // assume that its result is consumed. + if (yieldNode?.parent?.nodeType !== ParseNodeType.StatementList) { + isYieldResultUsed = true; + } + + if (yieldNode.d.expr) { + const yieldType = getTypeOfExpression(yieldNode.d.expr).type; + inferredYieldTypes.push(yieldType ?? UnknownType.create()); + } else { + inferredYieldTypes.push(getNoneType()); + } + } + } + }); + } + + const inferredYieldType = combineTypes(inferredYieldTypes); + + // Inferred yield types need to be wrapped in a Generator or + // AwaitableGenerator to produce the final result. + const generatorType = useAwaitableGenerator + ? getTypeCheckerInternalsType(node, 'AwaitableGenerator') ?? + getTypingType(node, 'AwaitableGenerator') + : getTypingType(node, 'Generator'); + + if (generatorType && isInstantiableClass(generatorType)) { + const typeArgs: Type[] = []; + + // The "send type" for the generator (the second type argument) is + // not generally inferrable, but we can assume that it's Any + // if the function never uses the value and Unknown if it does. + // This eliminates any "partially unknown" errors in strict mode + // in the common case. + const sendType = isYieldResultUsed ? UnknownType.create() : AnyType.create(); + + typeArgs.push(inferredYieldType, sendType, inferredReturnType); + + if (useAwaitableGenerator) { + typeArgs.push(AnyType.create()); + } + + inferredReturnType = ClassType.cloneAsInstance( + ClassType.specialize(generatorType, typeArgs) + ); + } else { + inferredReturnType = UnknownType.create(); + } + } + } + + writeTypeCache(node.d.suite, { type: inferredReturnType, isIncomplete }, EvalFlags.None); + } catch (err) { + // Attempt to handle a stack overflow without crashing. In rare + // cases, we can get very deep stacks when inferring return types + // within untyped code. + if ((err as any)?.message === 'Maximum call stack size exceeded') { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + console.error( + `Overflowed stack when inferring return type for function: ${ + node.d.name.d.value + } in file ${fileInfo.fileUri.toUserVisibleString()}` + ); + return; + } + throw err; + } finally { + recursionEntry.pop(); + if (recursionEntry.length === 0) { + functionRecursionMap.delete(node.id); + } + } + } + + return inferredReturnType ? { type: inferredReturnType, isIncomplete } : undefined; + } + + // Determines whether the method consists only of a "raise" statement + // and the exception type raised is a NotImplementedError or a subclass + // thereof. This is commonly used for abstract methods. + function methodAlwaysRaisesNotImplemented(functionDecl?: FunctionDeclaration): boolean { + if ( + !functionDecl || + !functionDecl.isMethod || + functionDecl.returnStatements || + functionDecl.yieldStatements || + !functionDecl.raiseStatements + ) { + return false; + } + + const statements = functionDecl.node.d.suite.d.statements; + if (statements.some((statement) => statement.nodeType !== ParseNodeType.StatementList)) { + return false; + } + + for (const raiseStatement of functionDecl.raiseStatements) { + if (!raiseStatement.d.expr || raiseStatement.d.fromExpr) { + return false; + } + const raiseType = getTypeOfExpression(raiseStatement.d.expr).type; + const classType = isInstantiableClass(raiseType) + ? raiseType + : isClassInstance(raiseType) + ? raiseType + : undefined; + if (!classType || !derivesFromStdlibClass(classType, 'NotImplementedError')) { + return false; + } + } + + return true; + } + + function evaluateTypesForForStatement(node: ForNode): void { + if (isTypeCached(node)) { + return; + } + + const iteratorTypeResult = getTypeOfExpression(node.d.iterableExpr); + const iteratedType = + getTypeOfIterator(iteratorTypeResult, !!node.d.isAsync, node.d.iterableExpr)?.type ?? UnknownType.create(); + + assignTypeToExpression( + node.d.targetExpr, + { type: iteratedType, isIncomplete: iteratorTypeResult.isIncomplete }, + node.d.targetExpr + ); + + writeTypeCache(node, { type: iteratedType, isIncomplete: !!iteratorTypeResult.isIncomplete }, EvalFlags.None); + } + + function evaluateTypesForExceptStatement(node: ExceptNode): void { + // This should be called only if the except node has a target exception. + assert(node.d.typeExpr !== undefined); + + if (isTypeCached(node)) { + return; + } + + const exceptionTypeResult = getTypeOfExpression(node.d.typeExpr!); + const exceptionTypes = exceptionTypeResult.type; + let includesBaseException = false; + + function getExceptionType(exceptionType: Type, errorNode: ExpressionNode) { + exceptionType = makeTopLevelTypeVarsConcrete(exceptionType); + + if (isAnyOrUnknown(exceptionType)) { + return exceptionType; + } + + if (isInstantiableClass(exceptionType)) { + if (ClassType.isBuiltIn(exceptionType, 'BaseException')) { + includesBaseException = true; + } + return ClassType.cloneAsInstance(exceptionType); + } + + if (isClassInstance(exceptionType)) { + const iterableType = + getTypeOfIterator( + { type: exceptionType, isIncomplete: exceptionTypeResult.isIncomplete }, + /* isAsync */ false, + errorNode, + /* emitNotIterableError */ false + )?.type ?? UnknownType.create(); + + return mapSubtypes(iterableType, (subtype) => { + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + return UnknownType.create(); + }); + } + + return UnknownType.create(); + } + + let targetType = mapSubtypes(exceptionTypes, (subType) => { + // If more than one type was specified for the exception, we'll receive + // a specialized tuple object here. + const tupleType = getSpecializedTupleType(subType); + if (tupleType && tupleType.priv.tupleTypeArgs) { + const entryTypes = tupleType.priv.tupleTypeArgs.map((t) => { + return getExceptionType(t.type, node.d.typeExpr!); + }); + return combineTypes(entryTypes); + } + + return getExceptionType(subType, node.d.typeExpr!); + }); + + // If this is an except group, wrap the exception type in an ExceptionGroup + // or BaseExceptionGroup depending on whether the target exception is + // a BaseException. + if (node.d.isExceptGroup) { + targetType = getBuiltInObject(node, includesBaseException ? 'BaseExceptionGroup' : 'ExceptionGroup', [ + targetType, + ]); + } + + if (node.d.name) { + assignTypeToExpression(node.d.name, { type: targetType }, node.d.name); + } + + writeTypeCache(node, { type: targetType }, EvalFlags.None); + } + + function evaluateTypesForWithStatement(node: WithItemNode): void { + if (isTypeCached(node)) { + return; + } + + const exprTypeResult = getTypeOfExpression(node.d.expr); + let isIncomplete = exprTypeResult.isIncomplete; + let exprType = exprTypeResult.type; + const isAsync = node.parent && node.parent.nodeType === ParseNodeType.With && !!node.parent.d.isAsync; + + if (isOptionalType(exprType)) { + addDiagnostic( + DiagnosticRule.reportOptionalContextManager, + isAsync ? LocMessage.noneNotUsableWithAsync() : LocMessage.noneNotUsableWith(), + node.d.expr + ); + exprType = removeNoneFromUnion(exprType); + } + + // Verify that the target has an __enter__ or __aenter__ method defined. + const enterMethodName = isAsync ? '__aenter__' : '__enter__'; + const scopedType = mapSubtypes(exprType, (subtype) => { + subtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + const enterDiag = new DiagnosticAddendum(); + + if (isClass(subtype)) { + const enterTypeResult = getTypeOfMagicMethodCall( + subtype, + enterMethodName, + [], + node.d.expr, + /* inferenceContext */ undefined, + enterDiag.createAddendum() + ); + + if (enterTypeResult) { + if (isAsync) { + if (enterTypeResult.isIncomplete) { + isIncomplete = true; + } + + const asyncResult = getTypeOfAwaitable({ type: enterTypeResult.type }, node.d.expr); + if (asyncResult.isIncomplete) { + isIncomplete = true; + } + + return asyncResult.type; + } + return enterTypeResult.type; + } + + if (!isAsync) { + if ( + getTypeOfMagicMethodCall( + subtype, + '__aenter__', + [], + node.d.expr, + /* inferenceContext */ undefined + )?.type + ) { + enterDiag.addMessage(LocAddendum.asyncHelp()); + } + } + } + + const message = isAsync ? LocMessage.typeNotUsableWithAsync() : LocMessage.typeNotUsableWith(); + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + message.format({ type: printType(subtype), method: enterMethodName }) + enterDiag.getString(), + node.d.expr + ); + return UnknownType.create(); + }); + + // Verify that the target has an __exit__ or __aexit__ method defined. + const exitMethodName = isAsync ? '__aexit__' : '__exit__'; + const exitDiag = new DiagnosticAddendum(); + + doForEachSubtype(exprType, (subtype) => { + subtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isAnyOrUnknown(subtype)) { + return; + } + + if (isClass(subtype)) { + const anyArg: TypeResult = { type: AnyType.create() }; + const exitTypeResult = getTypeOfMagicMethodCall( + subtype, + exitMethodName, + [anyArg, anyArg, anyArg], + node.d.expr, + /* inferenceContext */ undefined, + exitDiag + ); + + if (exitTypeResult) { + if (exitTypeResult.isIncomplete) { + isIncomplete = true; + } + + if (isAsync) { + const asyncResult = getTypeOfAwaitable({ type: exitTypeResult.type }, node.d.expr); + if (asyncResult.isIncomplete) { + isIncomplete = true; + } + + return asyncResult.type; + } + + return exitTypeResult.type; + } + } + + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeNotUsableWith().format({ type: printType(subtype), method: exitMethodName }) + + exitDiag.getString(), + node.d.expr + ); + return UnknownType.create(); + }); + + if (node.d.target) { + assignTypeToExpression(node.d.target, { type: scopedType, isIncomplete }, node.d.target); + } + + writeTypeCache(node, { type: scopedType, isIncomplete }, EvalFlags.None); + } + + function evaluateTypesForImportAs(node: ImportAsNode): void { + if (isTypeCached(node)) { + return; + } + + let symbolNameNode: NameNode; + if (node.d.alias) { + // The symbol name is defined by the alias. + symbolNameNode = node.d.alias; + } else { + // There was no alias, so we need to use the first element of + // the name parts as the symbol. + symbolNameNode = node.d.module.d.nameParts[0]; + } + + if (!symbolNameNode) { + // This can happen in certain cases where there are parse errors. + return; + } + + // Look up the symbol to find the alias declaration. + let symbolType = getAliasedSymbolTypeForName(node, symbolNameNode.d.value) ?? UnknownType.create(); + + // Is there a cached module type associated with this node? If so, use + // it instead of the type we just created. + const cachedModuleType = readTypeCache(node, EvalFlags.None) as ModuleType; + if (cachedModuleType && isModule(cachedModuleType) && symbolType) { + if (isTypeSame(symbolType, cachedModuleType)) { + symbolType = cachedModuleType; + } + } + + assignTypeToNameNode(symbolNameNode, { type: symbolType }, /* ignoreEmptyContainers */ false); + + writeTypeCache(node, { type: symbolType }, EvalFlags.None); + } + + function evaluateTypesForImportFromAs(node: ImportFromAsNode): void { + if (isTypeCached(node)) { + return; + } + + const aliasNode = node.d.alias || node.d.name; + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // If this is a redundant form of an import, assume it is an intentional + // export and mark the symbol as accessed. + if (node.d.alias?.d.value === node.d.name.d.value) { + const symbolInScope = lookUpSymbolRecursive(node, node.d.name.d.value, /* honorCodeFlow */ true); + if (symbolInScope) { + setSymbolAccessed(fileInfo, symbolInScope.symbol, node); + } + } + + // If this is an import into a class scope, mark the symbol as accessed. + const classNode = ParseTreeUtils.getEnclosingClass(node, /* stopAtFunction */ true); + if (classNode) { + const symbolInScope = lookUpSymbolRecursive(node, aliasNode.d.value, /* honorCodeFlow */ true); + if (symbolInScope) { + setSymbolAccessed(fileInfo, symbolInScope.symbol, node); + } + } + + let symbolType = getAliasedSymbolTypeForName(node, aliasNode.d.value); + if (!symbolType) { + const parentNode = node.parent as ImportFromNode; + assert(parentNode && parentNode.nodeType === ParseNodeType.ImportFrom); + assert(!parentNode.d.isWildcardImport); + + const importInfo = AnalyzerNodeInfo.getImportInfo(parentNode.d.module); + if (importInfo && importInfo.isImportFound && !importInfo.isNativeLib) { + const resolvedPath = importInfo.resolvedUris[importInfo.resolvedUris.length - 1]; + + const importLookupInfo = importLookup(resolvedPath); + let reportError = false; + + // If we were able to resolve the import, report the error as + // an unresolved symbol. + if (importLookupInfo) { + reportError = true; + + // Handle PEP 562 support for module-level __getattr__ function, + // introduced in Python 3.7. + if ( + PythonVersion.isGreaterOrEqualTo( + fileInfo.executionEnvironment.pythonVersion, + pythonVersion3_7 + ) || + fileInfo.isStubFile + ) { + const getAttrSymbol = importLookupInfo.symbolTable.get('__getattr__'); + if (getAttrSymbol) { + const getAttrType = getEffectiveTypeOfSymbol(getAttrSymbol); + if (isFunction(getAttrType)) { + symbolType = getEffectiveReturnType(getAttrType); + reportError = false; + } + } + } + } else if (resolvedPath.isEmpty()) { + // This corresponds to the "from . import a" form. + reportError = true; + } + + if (reportError) { + addDiagnostic( + DiagnosticRule.reportAttributeAccessIssue, + LocMessage.importSymbolUnknown().format({ name: node.d.name.d.value }), + node.d.name + ); + } + } + + if (!symbolType) { + symbolType = UnknownType.create(); + } + } + + assignTypeToNameNode(aliasNode, { type: symbolType }, /* ignoreEmptyContainers */ false); + writeTypeCache(node, { type: symbolType }, EvalFlags.None); + } + + function evaluateTypesForMatchStatement(node: MatchNode): void { + if (isTypeCached(node)) { + return; + } + + const subjectTypeResult = getTypeOfExpression(node.d.expr); + let subjectType = subjectTypeResult.type; + + // Apply negative narrowing for each of the cases that doesn't have a guard statement. + for (const caseStatement of node.d.cases) { + if (!caseStatement.d.guardExpr) { + subjectType = narrowTypeBasedOnPattern( + evaluatorInterface, + subjectType, + caseStatement.d.pattern, + /* isPositiveTest */ false + ); + } + } + + writeTypeCache(node, { type: subjectType, isIncomplete: !!subjectTypeResult.isIncomplete }, EvalFlags.None); + } + + function evaluateTypesForCaseStatement(node: CaseNode): void { + if (isTypeCached(node)) { + return; + } + + if (!node.parent || node.parent.nodeType !== ParseNodeType.Match) { + fail('Expected parent of case statement to be match statement'); + return; + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const subjectTypeResult = getTypeOfExpression(node.parent.d.expr); + let subjectType = subjectTypeResult.type; + + // Apply negative narrowing for each of the cases prior to the current one + // except for those that have a guard expression. + for (const caseStatement of node.parent.d.cases) { + if (caseStatement === node) { + if (fileInfo.diagnosticRuleSet.reportUnnecessaryComparison !== 'none') { + if (!subjectTypeResult.isIncomplete) { + checkForUnusedPattern(evaluatorInterface, node.d.pattern, subjectType); + } + } + break; + } + + if (!caseStatement.d.guardExpr) { + subjectType = narrowTypeBasedOnPattern( + evaluatorInterface, + subjectType, + caseStatement.d.pattern, + /* isPositiveTest */ false + ); + } + } + + const narrowedSubjectType = assignTypeToPatternTargets( + evaluatorInterface, + subjectType, + !!subjectTypeResult.isIncomplete, + node.d.pattern + ); + + writeTypeCache( + node, + { type: narrowedSubjectType, isIncomplete: !!subjectTypeResult.isIncomplete }, + EvalFlags.None + ); + } + + function evaluateTypesForImportFrom(node: ImportFromNode): void { + if (isTypeCached(node)) { + return; + } + + if (node.d.isWildcardImport) { + // Write back a dummy type so we don't evaluate this node again. + writeTypeCache(node, { type: AnyType.create() }, EvalFlags.None); + + const flowNode = AnalyzerNodeInfo.getFlowNode(node); + if (flowNode && (flowNode.flags & FlowFlags.WildcardImport) !== 0) { + const wildcardFlowNode = flowNode as FlowWildcardImport; + wildcardFlowNode.names.forEach((name) => { + const importedSymbolType = getAliasedSymbolTypeForName(node, name); + + if (!importedSymbolType) { + return; + } + + const symbolWithScope = lookUpSymbolRecursive(node, name, /* honorCodeFlow */ false); + if (!symbolWithScope) { + return; + } + + const declaredType = getDeclaredTypeOfSymbol(symbolWithScope.symbol)?.type; + if (!declaredType) { + return; + } + + const diagAddendum = new DiagnosticAddendum(); + + if (!assignType(declaredType, importedSymbolType, diagAddendum)) { + addDiagnostic( + DiagnosticRule.reportAssignmentType, + LocMessage.typeAssignmentMismatchWildcard().format({ + ...printSrcDestTypes(importedSymbolType, declaredType), + name, + }) + diagAddendum.getString(), + node, + node.d.wildcardToken ?? node + ); + } + }); + } + } else { + // Use the first element of the name parts as the symbol. + const symbolNameNode = node.d.module.d.nameParts[0]; + + // Look up the symbol to find the alias declaration. + let symbolType = getAliasedSymbolTypeForName(node, symbolNameNode.d.value); + if (!symbolType) { + return; + } + + // Is there a cached module type associated with this node? If so, use + // it instead of the type we just created. + const cachedModuleType = readTypeCache(node, EvalFlags.None) as ModuleType; + if (cachedModuleType && isModule(cachedModuleType) && symbolType) { + if (isTypeSame(symbolType, cachedModuleType)) { + symbolType = cachedModuleType; + } + } + + assignTypeToNameNode(symbolNameNode, { type: symbolType }, /* ignoreEmptyContainers */ false); + + writeTypeCache(node, { type: symbolType }, EvalFlags.None); + } + } + + function evaluateTypesForTypeAnnotationNode(node: TypeAnnotationNode) { + // If this node is part of an assignment statement, use specialized + // logic that performs bidirectional inference and assignment + // type narrowing. + if (node.parent?.nodeType === ParseNodeType.Assignment) { + evaluateTypesForAssignmentStatement(node.parent); + } else { + const annotationType = getTypeOfAnnotation(node.d.annotation, { + varTypeAnnotation: true, + allowFinal: isFinalAllowedForAssignmentTarget(node.d.valueExpr), + allowClassVar: isClassVarAllowedForAssignmentTarget(node.d.valueExpr), + }); + + writeTypeCache(node.d.valueExpr, { type: annotationType }, EvalFlags.None); + } + } + + function getAliasedSymbolTypeForName( + node: ImportAsNode | ImportFromAsNode | ImportFromNode, + name: string + ): Type | undefined { + const symbolWithScope = lookUpSymbolRecursive(node, name, /* honorCodeFlow */ true); + if (!symbolWithScope) { + return undefined; + } + + // Normally there will be at most one decl associated with the import node, but + // there can be multiple in the case of the "from .X import X" statement. In such + // case, we want to choose the last declaration. + const filteredDecls = symbolWithScope.symbol + .getDeclarations() + .filter( + (decl) => ParseTreeUtils.isNodeContainedWithin(node, decl.node) && decl.type === DeclarationType.Alias + ); + let aliasDecl = filteredDecls.length > 0 ? filteredDecls[filteredDecls.length - 1] : undefined; + + // If we didn't find an exact match, look for any alias associated with + // this symbol. In cases where we have multiple ImportAs nodes that share + // the same first-part name (e.g. "import asyncio" and "import asyncio.tasks"), + // we may not find the declaration associated with this node. + if (!aliasDecl) { + aliasDecl = symbolWithScope.symbol.getDeclarations().find((decl) => decl.type === DeclarationType.Alias); + } + + if (!aliasDecl) { + return undefined; + } + + assert(aliasDecl.type === DeclarationType.Alias); + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // Try to resolve the alias while honoring external visibility. + const resolvedAliasInfo = resolveAliasDeclarationWithInfo(aliasDecl, /* resolveLocalNames */ true, { + allowExternallyHiddenAccess: fileInfo.isStubFile, + }); + + if (!resolvedAliasInfo) { + return undefined; + } + + if (!resolvedAliasInfo.declaration) { + return evaluatorOptions.evaluateUnknownImportsAsAny ? AnyType.create() : UnknownType.create(); + } + + if (node.nodeType === ParseNodeType.ImportFromAs) { + if (resolvedAliasInfo.isPrivate) { + addDiagnostic( + DiagnosticRule.reportPrivateUsage, + LocMessage.privateUsedOutsideOfModule().format({ + name: node.d.name.d.value, + }), + node.d.name + ); + } + + if (resolvedAliasInfo.privatePyTypedImporter) { + const diag = new DiagnosticAddendum(); + if (resolvedAliasInfo.privatePyTypedImported) { + diag.addMessage( + LocAddendum.privateImportFromPyTypedSource().format({ + module: resolvedAliasInfo.privatePyTypedImported, + }) + ); + } + addDiagnostic( + DiagnosticRule.reportPrivateImportUsage, + LocMessage.privateImportFromPyTypedModule().format({ + name: node.d.name.d.value, + module: resolvedAliasInfo.privatePyTypedImporter, + }) + diag.getString(), + node.d.name + ); + } + } + + return getInferredTypeOfDeclaration(symbolWithScope.symbol, aliasDecl); + } + + // In some cases, an expression must be evaluated in the context of another + // expression or statement that contains it. This contextual evaluation + // allows for bidirectional type evaluation. + function evaluateTypesForExpressionInContext(node: ExpressionNode): void { + // Check for a couple of special cases where the node is a NameNode but + // is technically not part of an expression. We'll handle these here so + // callers don't need to include special-case logic. + if (node.nodeType === ParseNodeType.Name && node.parent) { + if (node.parent.nodeType === ParseNodeType.Function && node.parent.d.name === node) { + getTypeOfFunction(node.parent); + return; + } + + if (node.parent.nodeType === ParseNodeType.Class && node.parent.d.name === node) { + getTypeOfClass(node.parent); + return; + } + + if (node.parent.nodeType === ParseNodeType.ImportFromAs) { + evaluateTypesForImportFromAs(node.parent); + return; + } + + if (node.parent.nodeType === ParseNodeType.ImportAs) { + evaluateTypesForImportAs(node.parent); + return; + } + + if (node.parent.nodeType === ParseNodeType.TypeAlias && node.parent.d.name === node) { + getTypeOfTypeAlias(node.parent); + return; + } + + if (node.parent.nodeType === ParseNodeType.Global || node.parent.nodeType === ParseNodeType.Nonlocal) { + // For global and nonlocal statements, allow forward references so + // we don't use code flow during symbol lookups. + getTypeOfExpression(node, EvalFlags.ForwardRefs); + return; + } + + if (node.parent.nodeType === ParseNodeType.ModuleName) { + // A name within a module name isn't an expression, + // so there's nothing we can evaluate here. + return; + } + } + + // If the expression is part of a type annotation, we need to evaluate + // it with special evaluation flags. + const annotationNode = ParseTreeUtils.getParentAnnotationNode(node); + if (annotationNode) { + // Annotations need to be evaluated with specialized evaluation flags. + const annotationParent = annotationNode.parent; + assert(annotationParent !== undefined); + + if (annotationParent.nodeType === ParseNodeType.Assignment) { + if (annotationNode === annotationParent.d.annotationComment) { + getTypeOfAnnotation(annotationNode, { + varTypeAnnotation: true, + allowFinal: isFinalAllowedForAssignmentTarget(annotationParent.d.leftExpr), + allowClassVar: isClassVarAllowedForAssignmentTarget(annotationParent.d.leftExpr), + }); + } else { + evaluateTypesForAssignmentStatement(annotationParent); + } + return; + } + + if (annotationParent.nodeType === ParseNodeType.TypeAnnotation) { + evaluateTypesForTypeAnnotationNode(annotationParent); + return; + } + + if ( + annotationParent.nodeType === ParseNodeType.Function && + annotationNode === annotationParent.d.returnAnnotation + ) { + getTypeOfAnnotation(annotationNode, { + typeVarGetsCurScope: true, + }); + return; + } + + getTypeOfAnnotation(annotationNode, { + varTypeAnnotation: annotationNode.parent?.nodeType === ParseNodeType.TypeAnnotation, + allowUnpackedTuple: + annotationParent.nodeType === ParseNodeType.Parameter && + annotationParent.d.category === ParamCategory.ArgsList, + allowUnpackedTypedDict: + annotationParent.nodeType === ParseNodeType.Parameter && + annotationParent.d.category === ParamCategory.KwargsDict, + }); + return; + } + + // See if the expression is part of a pattern used in a case statement. + const possibleCaseNode = ParseTreeUtils.getParentNodeOfType(node, ParseNodeType.Case); + if (possibleCaseNode) { + if (ParseTreeUtils.isNodeContainedWithin(node, possibleCaseNode.d.pattern)) { + evaluateTypesForCaseStatement(possibleCaseNode); + return; + } + } + + // Scan up the parse tree until we find a node that doesn't + // require any context to be evaluated. + let nodeToEvaluate: ExpressionNode = node; + let flags = EvalFlags.None; + + while (true) { + // If we're within an argument node in a call or index expression, skip + // all of the nodes between because the entire argument expression + // needs to be evaluated contextually. + const argumentNode = ParseTreeUtils.getParentNodeOfType(nodeToEvaluate, ParseNodeType.Argument); + if (argumentNode && argumentNode !== nodeToEvaluate) { + assert(argumentNode.parent !== undefined); + + if ( + argumentNode.parent.nodeType === ParseNodeType.Call || + argumentNode.parent.nodeType === ParseNodeType.Index + ) { + nodeToEvaluate = argumentNode.parent; + continue; + } + + if (argumentNode.parent.nodeType === ParseNodeType.Class) { + // If this is an argument node within a class declaration, + // evaluate the full class declaration node. + getTypeOfClass(argumentNode.parent); + return; + } + } + + let parent = nodeToEvaluate.parent; + if (!parent) { + break; + } + + // If this is the target of an assignment expression, evaluate the + // assignment expression node instead. + if (parent.nodeType === ParseNodeType.AssignmentExpression && nodeToEvaluate === parent.d.name) { + nodeToEvaluate = parent; + continue; + } + + // Forward-declared type annotation expressions need to be be evaluated + // in context so they have the appropriate flags set. Most of these cases + // will have been detected above when calling getParentAnnotationNode, + // but TypeAlias expressions are not handled there. + const stringEnclosure = ParseTreeUtils.getParentNodeOfType(parent, ParseNodeType.StringList); + if (stringEnclosure) { + nodeToEvaluate = stringEnclosure as StringListNode; + continue; + } + + // The left expression of a call or member access expression is not generally contextual. + if (parent.nodeType === ParseNodeType.Call || parent.nodeType === ParseNodeType.MemberAccess) { + if (nodeToEvaluate === parent.d.leftExpr) { + // Handle the special case where the LHS is a call to super(). + if ( + nodeToEvaluate.nodeType === ParseNodeType.Call && + nodeToEvaluate.d.leftExpr.nodeType === ParseNodeType.Name && + nodeToEvaluate.d.leftExpr.d.value === 'super' + ) { + nodeToEvaluate = parent; + continue; + } + + // Handle the special case where the LHS is a call to a lambda. + if (parent.nodeType === ParseNodeType.Call && nodeToEvaluate.nodeType === ParseNodeType.Lambda) { + nodeToEvaluate = parent; + continue; + } + + flags = EvalFlags.CallBaseDefaults; + break; + } + } else if (parent.nodeType === ParseNodeType.Index) { + // The base expression of an index expression is not contextual. + if (nodeToEvaluate === parent.d.leftExpr) { + flags = EvalFlags.IndexBaseDefaults; + } + } + + if (!isExpressionNode(parent)) { + // If we've hit a non-expression node, we generally want to + // stop. However, there are a few special "pass through" + // node types that we can skip over to get to a known + // expression node. + if ( + parent.nodeType === ParseNodeType.DictionaryKeyEntry || + parent.nodeType === ParseNodeType.DictionaryExpandEntry || + parent.nodeType === ParseNodeType.ComprehensionFor || + parent.nodeType === ParseNodeType.ComprehensionIf + ) { + assert(parent.parent !== undefined && isExpressionNode(parent.parent)); + parent = parent.parent; + } else if (parent.nodeType === ParseNodeType.Parameter) { + assert(parent.parent !== undefined); + + // Parameters are contextual for lambdas. + if (parent.parent.nodeType === ParseNodeType.Lambda) { + parent = parent.parent; + } else { + break; + } + } else if (parent.nodeType === ParseNodeType.TypeParameter) { + // If this is a bound or default expression in a type parameter list, + // we need to evaluate it in the context of the type parameter. + if (node === parent.d.boundExpr || node === parent.d.defaultExpr) { + getTypeOfTypeParam(parent); + return; + } + + break; + } else { + break; + } + } + + nodeToEvaluate = parent; + } + + const parent = nodeToEvaluate.parent!; + assert(parent !== undefined); + + switch (parent.nodeType) { + case ParseNodeType.Del: { + verifyDeleteExpression(nodeToEvaluate); + return; + } + + case ParseNodeType.TypeParameter: { + // If this is the name node within a type parameter list, see if it's a type alias + // definition. If so, we need to evaluate the type alias contextually. + if ( + nodeToEvaluate === parent.d.name && + parent.parent?.nodeType === ParseNodeType.TypeParameterList && + parent.parent.parent?.nodeType === ParseNodeType.TypeAlias + ) { + getTypeOfTypeAlias(parent.parent.parent); + return; + } + break; + } + + case ParseNodeType.TypeAlias: { + getTypeOfTypeAlias(parent); + return; + } + + case ParseNodeType.Decorator: { + if (parent.parent?.nodeType === ParseNodeType.Class) { + getTypeOfClass(parent.parent); + } else if (parent.parent?.nodeType === ParseNodeType.Function) { + getTypeOfFunction(parent.parent); + } + return; + } + + case ParseNodeType.Parameter: { + if (nodeToEvaluate !== parent.d.defaultValue) { + evaluateTypeOfParam(parent); + return; + } + break; + } + + case ParseNodeType.Argument: { + if (nodeToEvaluate === parent.d.name) { + // A name used to specify a named parameter in an argument isn't an + // expression, so there's nothing we can evaluate here. + return; + } + + if (parent.parent?.nodeType === ParseNodeType.Class) { + // A class argument must be evaluated in the context of the class declaration. + getTypeOfClass(parent.parent); + return; + } + break; + } + + case ParseNodeType.Return: { + // Return expressions must be evaluated in the context of the expected return type. + if (parent.d.expr) { + const enclosingFunctionNode = ParseTreeUtils.getEnclosingFunction(node); + let declaredReturnType = enclosingFunctionNode + ? getDeclaredReturnType(enclosingFunctionNode) + : undefined; + if (declaredReturnType) { + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(node); + declaredReturnType = makeTypeVarsBound(declaredReturnType, liveScopeIds); + } + getTypeOfExpression(parent.d.expr, EvalFlags.None, makeInferenceContext(declaredReturnType)); + return; + } + break; + } + + case ParseNodeType.TypeAnnotation: { + evaluateTypesForTypeAnnotationNode(parent); + return; + } + + case ParseNodeType.Assignment: { + evaluateTypesForAssignmentStatement(parent); + return; + } + } + + if (nodeToEvaluate.nodeType === ParseNodeType.TypeAnnotation) { + evaluateTypesForTypeAnnotationNode(nodeToEvaluate); + return; + } + + getTypeOfExpression(nodeToEvaluate, flags); + } + + function evaluateTypeOfParam(node: ParameterNode): void { + // If this parameter has no name, we have nothing to do. + if (!node.d.name) { + return; + } + + // We need to handle lambdas differently from functions because + // the former never have parameter type annotations but can + // be inferred, whereas the latter sometimes have type annotations + // but cannot be inferred. + const parent = node.parent!; + if (parent.nodeType === ParseNodeType.Lambda) { + evaluateTypesForExpressionInContext(parent); + return; + } + + assert(parent.nodeType === ParseNodeType.Function); + const functionNode = parent as FunctionNode; + + const paramIndex = functionNode.d.params.findIndex((param) => param === node); + const typeAnnotation = ParseTreeUtils.getTypeAnnotationForParam(functionNode, paramIndex); + + if (typeAnnotation) { + const param = functionNode.d.params[paramIndex]; + let annotatedType = getTypeOfParamAnnotation(typeAnnotation, functionNode.d.params[paramIndex].d.category); + + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(param); + annotatedType = makeTypeVarsBound(annotatedType, liveTypeVarScopes); + + const adjType = transformVariadicParamType( + node, + node.d.category, + adjustParamAnnotatedType(param, annotatedType) + ); + + writeTypeCache(node.d.name, { type: adjType }, EvalFlags.None); + return; + } + + const containingClassNode = ParseTreeUtils.getEnclosingClass(functionNode, /* stopAtFunction */ true); + const classInfo = containingClassNode ? getTypeOfClass(containingClassNode) : undefined; + + if ( + classInfo && + ClassType.isPseudoGenericClass(classInfo?.classType) && + functionNode.d.name.d.value === '__init__' + ) { + const typeParamName = getPseudoGenericTypeVarName(node.d.name.d.value); + const paramType = classInfo.classType.shared.typeParams.find( + (param) => param.shared.name === typeParamName + ); + + if (paramType) { + writeTypeCache(node.d.name, { type: TypeVarType.cloneAsBound(paramType) }, EvalFlags.None); + return; + } + } + + // See if the function is a method in a child class. We may be able to + // infer the type of the parameter from a method of the same name in + // a parent class if it has an annotated type. + const functionFlags = getFunctionInfoFromDecorators( + evaluatorInterface, + functionNode, + /* isInClass */ true + ).flags; + + let inferredParamType = + inferParamType(functionNode, functionFlags, paramIndex, classInfo?.classType) ?? UnknownType.create(); + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(node); + inferredParamType = makeTypeVarsBound(inferredParamType, liveTypeVarScopes); + + writeTypeCache( + node.d.name, + { type: transformVariadicParamType(node, node.d.category, inferredParamType) }, + EvalFlags.None + ); + } + + // Evaluates the types that are assigned within the statement that contains + // the specified parse node. In some cases, a broader statement may need to + // be evaluated to provide sufficient context for the type. Evaluated types + // are written back to the type cache for later retrieval. + function evaluateTypesForStatement(node: ParseNode): void { + initializePrefetchedTypes(node); + + let curNode: ParseNode | undefined = node; + + while (curNode) { + switch (curNode.nodeType) { + case ParseNodeType.Assignment: { + // See if the assignment is part of a chain of assignments. If so, + // evaluate the entire chain. + const isInAssignmentChain = + curNode.parent && + (curNode.parent.nodeType === ParseNodeType.Assignment || + curNode.parent.nodeType === ParseNodeType.AssignmentExpression || + curNode.parent.nodeType === ParseNodeType.AugmentedAssignment) && + curNode.parent.d.rightExpr === curNode; + if (!isInAssignmentChain) { + evaluateTypesForAssignmentStatement(curNode); + return; + } + break; + } + + case ParseNodeType.TypeAlias: { + getTypeOfTypeAlias(curNode); + return; + } + + case ParseNodeType.AssignmentExpression: { + evaluateTypesForExpressionInContext(curNode); + return; + } + + case ParseNodeType.AugmentedAssignment: { + evaluateTypesForAugmentedAssignment(curNode); + return; + } + + case ParseNodeType.Class: { + getTypeOfClass(curNode); + return; + } + + case ParseNodeType.Parameter: { + evaluateTypeOfParam(curNode); + return; + } + + case ParseNodeType.Lambda: { + evaluateTypesForExpressionInContext(curNode); + return; + } + + case ParseNodeType.Function: { + getTypeOfFunction(curNode); + return; + } + + case ParseNodeType.For: { + evaluateTypesForForStatement(curNode); + return; + } + + case ParseNodeType.Except: { + evaluateTypesForExceptStatement(curNode); + return; + } + + case ParseNodeType.WithItem: { + evaluateTypesForWithStatement(curNode); + return; + } + + case ParseNodeType.ComprehensionFor: { + const comprehension = curNode.parent as ComprehensionNode; + assert(comprehension.nodeType === ParseNodeType.Comprehension); + if (curNode === comprehension.d.expr) { + evaluateTypesForExpressionInContext(comprehension); + } else { + // Evaluate the individual iterations starting with the first + // up to the curNode. + for (const forIfNode of comprehension.d.forIfNodes) { + evaluateComprehensionForIf(forIfNode); + if (forIfNode === curNode) { + break; + } + } + } + return; + } + + case ParseNodeType.ImportAs: { + evaluateTypesForImportAs(curNode); + return; + } + + case ParseNodeType.ImportFromAs: { + evaluateTypesForImportFromAs(curNode); + return; + } + + case ParseNodeType.ImportFrom: { + evaluateTypesForImportFrom(curNode); + return; + } + + case ParseNodeType.Case: { + evaluateTypesForCaseStatement(curNode); + return; + } + } + + curNode = curNode.parent; + } + + fail('Unexpected statement'); + return undefined; + } + + // Helper function for cases where we need to evaluate the types + // for a subtree so we can determine the type of one of the subnodes + // within that tree. If the type cannot be determined (because it's part + // of a cyclical dependency), the function returns undefined. + function evaluateTypeForSubnode(subnode: ParseNode, callback: () => void): TypeResult | undefined { + // If the type cache is already populated with a complete type, + // don't bother doing additional work. + let cacheEntry = readTypeCacheEntry(subnode); + if (cacheEntry && !cacheEntry.typeResult.isIncomplete) { + const typeResult = cacheEntry.typeResult; + + // Handle the special case where a function or class is partially evaluated. + // Indicate that these are not complete types. + if (isFunction(typeResult.type) && FunctionType.isPartiallyEvaluated(typeResult.type)) { + return { ...typeResult, isIncomplete: true }; + } + + if (isClass(typeResult.type) && ClassType.isPartiallyEvaluated(typeResult.type)) { + return { ...typeResult, isIncomplete: true }; + } + + return typeResult; + } + + callback(); + cacheEntry = readTypeCacheEntry(subnode); + if (cacheEntry) { + return cacheEntry.typeResult; + } + + return undefined; + } + + function getCodeFlowAnalyzerForNode( + node: ExecutionScopeNode, + typeAtStart: TypeResult | undefined + ): CodeFlowAnalyzer { + let entries = codeFlowAnalyzerCache.get(node.id); + + if (entries) { + const cachedEntry = entries.find((entry) => { + if (!typeAtStart || !entry.typeAtStart) { + return !typeAtStart && !entry.typeAtStart; + } + + if (!typeAtStart.isIncomplete !== !entry.typeAtStart.isIncomplete) { + return false; + } + + return isTypeSame(typeAtStart.type, entry.typeAtStart.type); + }); + + if (cachedEntry) { + return cachedEntry.codeFlowAnalyzer; + } + } + + // Allocate a new code flow analyzer. + const analyzer = codeFlowEngine.createCodeFlowAnalyzer(); + if (entries) { + entries.push({ typeAtStart, codeFlowAnalyzer: analyzer }); + } else { + entries = [{ typeAtStart, codeFlowAnalyzer: analyzer }]; + codeFlowAnalyzerCache.set(node.id, entries); + } + + return analyzer; + } + + // Attempts to determine the type of the reference expression at the + // point in the code. If the code flow analysis has nothing to say + // about that expression, it returns un undefined type. Normally + // flow analysis starts from the reference node, but startNode can be + // specified to override this in a few special cases (functions and + // lambdas) to support analysis of captured variables. + function getFlowTypeOfReference( + reference: CodeFlowReferenceExpressionNode, + startNode?: ClassNode | FunctionNode | LambdaNode, + options?: FlowNodeTypeOptions + ): FlowNodeTypeResult { + // See if this execution scope requires code flow for this reference expression. + const referenceKey = createKeyForReference(reference); + const executionNode = ParseTreeUtils.getExecutionScopeNode(startNode?.parent ?? reference); + const codeFlowExpressions = AnalyzerNodeInfo.getCodeFlowExpressions(executionNode); + + if ( + !codeFlowExpressions || + (!codeFlowExpressions.has(referenceKey) && !codeFlowExpressions.has(wildcardImportReferenceKey)) + ) { + return FlowNodeTypeResult.create(/* type */ undefined, /* isIncomplete */ false); + } + + if (checkCodeFlowTooComplex(reference)) { + return FlowNodeTypeResult.create( + /* type */ options?.typeAtStart && isUnbound(options.typeAtStart.type) + ? UnknownType.create() + : undefined, + /* isIncomplete */ true + ); + } + + // Is there an code flow analyzer cached for this execution scope? + let analyzer: CodeFlowAnalyzer | undefined; + + if (isNodeInReturnTypeInferenceContext(executionNode)) { + // If we're performing the analysis within a temporary + // context of a function for purposes of inferring its + // return type for a specified set of arguments, use + // a temporary analyzer that we'll use only for this context. + analyzer = getCodeFlowAnalyzerForReturnTypeInferenceContext(); + } else { + analyzer = getCodeFlowAnalyzerForNode(executionNode, options?.typeAtStart); + } + + const flowNode = AnalyzerNodeInfo.getFlowNode(startNode ?? reference); + if (flowNode === undefined) { + return FlowNodeTypeResult.create(/* type */ undefined, /* isIncomplete */ false); + } + + return analyzer.getTypeFromCodeFlow(flowNode!, reference, options); + } + + // Specializes the specified (potentially generic) class type using + // the specified type arguments, reporting errors as appropriate. + // Returns the specialized type and a boolean indicating whether + // the type indicates a class type (true) or an object type (false). + function createSpecializedClassType( + classType: ClassType, + typeArgs: TypeResultWithNode[] | undefined, + flags: EvalFlags, + errorNode: ExpressionNode + ): TypeResult { + let isValidTypeForm = true; + + // Handle the special-case classes that are not defined + // in the type stubs. + if (ClassType.isSpecialBuiltIn(classType)) { + const aliasedName = classType.priv.aliasName || classType.shared.name; + switch (aliasedName) { + case 'Callable': { + return { type: createCallableType(classType, typeArgs, errorNode) }; + } + + case 'Never': + case 'NoReturn': { + if (typeArgs && typeArgs.length > 0) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgsExpectingNone().format({ name: aliasedName }), + typeArgs[0].node + ); + } + + let resultType = aliasedName === 'Never' ? NeverType.createNever() : NeverType.createNoReturn(); + resultType = TypeBase.cloneAsSpecialForm(resultType, classType); + if (isTypeFormSupported(errorNode)) { + resultType = TypeBase.cloneWithTypeForm(resultType, convertToInstance(resultType)); + } + + return { type: resultType }; + } + + case 'Optional': { + return { type: createOptionalType(classType, errorNode, typeArgs, flags) }; + } + + case 'Type': { + let typeType = createSpecialType( + classType, + typeArgs, + 1, + /* allowParamSpec */ undefined, + /* isSpecialForm */ false + ); + + if (isInstantiableClass(typeType)) { + typeType = explodeGenericClass(typeType); + } + + if (isTypeFormSupported(errorNode)) { + typeType = TypeBase.cloneWithTypeForm(typeType, convertToInstance(typeType)); + } + + return { type: typeType }; + } + + case 'ClassVar': { + return { type: createClassVarType(classType, errorNode, typeArgs, flags) }; + } + + case 'Protocol': { + if ((flags & (EvalFlags.NoNonTypeSpecialForms | EvalFlags.TypeExpression)) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.protocolNotAllowed(), errorNode); + } + + typeArgs?.forEach((typeArg) => { + if (typeArg.typeList || !isTypeVar(typeArg.type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.protocolTypeArgMustBeTypeParam(), + typeArg.node + ); + } + }); + + return { + type: createSpecialType( + classType, + typeArgs, + /* paramLimit */ undefined, + /* allowParamSpec */ true + ), + }; + } + + case 'TypedDict': { + if ((flags & (EvalFlags.NoNonTypeSpecialForms | EvalFlags.TypeExpression)) !== 0) { + const isInlinedTypedDict = + AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.enableExperimentalFeatures && + !!typeArgs; + + if (!isInlinedTypedDict) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typedDictNotAllowed(), + errorNode + ); + } + } + isValidTypeForm = false; + break; + } + + case 'Literal': { + if ((flags & (EvalFlags.NoNonTypeSpecialForms | EvalFlags.TypeExpression)) !== 0) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.literalNotAllowed(), errorNode); + } + isValidTypeForm = false; + break; + } + + case 'Tuple': { + return { + type: createSpecialType( + classType, + typeArgs, + /* paramLimit */ undefined, + /* allowParamSpec */ false, + /* isSpecialForm */ false + ), + }; + } + + case 'Union': { + return { type: createUnionType(classType, errorNode, typeArgs, flags) }; + } + + case 'Generic': { + return { type: createGenericType(classType, errorNode, typeArgs, flags) }; + } + + case 'Final': { + return { type: createFinalType(classType, errorNode, typeArgs, flags) }; + } + + case 'Annotated': { + return createAnnotatedType(classType, errorNode, typeArgs, flags); + } + + case 'Concatenate': { + return { type: createConcatenateType(classType, errorNode, typeArgs, flags) }; + } + + case 'TypeGuard': + case 'TypeIs': { + return { type: createTypeGuardType(classType, errorNode, typeArgs, flags) }; + } + + case 'Unpack': { + return { type: createUnpackType(classType, errorNode, typeArgs, flags) }; + } + + case 'Required': + case 'NotRequired': { + return createRequiredOrReadOnlyType(classType, errorNode, typeArgs, flags); + } + + case 'ReadOnly': { + return createRequiredOrReadOnlyType(classType, errorNode, typeArgs, flags); + } + + case 'Self': { + return { type: createSelfType(classType, errorNode, typeArgs, flags) }; + } + + case 'LiteralString': { + return { type: createSpecialType(classType, typeArgs, 0) }; + } + + case 'TypeForm': { + return { type: createTypeFormType(classType, errorNode, typeArgs) }; + } + } + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + if ( + fileInfo.isStubFile || + PythonVersion.isGreaterOrEqualTo(fileInfo.executionEnvironment.pythonVersion, pythonVersion3_9) || + isAnnotationEvaluationPostponed(AnalyzerNodeInfo.getFileInfo(errorNode)) || + (flags & EvalFlags.ForwardRefs) !== 0 + ) { + // Handle "type" specially, since it needs to act like "Type" + // in Python 3.9 and newer. + if (ClassType.isBuiltIn(classType, 'type') && typeArgs) { + if (typeArgs.length >= 1) { + // Treat type[function] as illegal. + if (isFunctionOrOverloaded(typeArgs[0].type)) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeAnnotationWithCallable(), + typeArgs[0].node + ); + + return { type: UnknownType.create() }; + } + } + + if (prefetched?.typeClass && isInstantiableClass(prefetched.typeClass)) { + let typeType = createSpecialType( + prefetched.typeClass, + typeArgs, + 1, + /* allowParamSpec */ undefined, + /* isSpecialForm */ false + ); + + if (isInstantiableClass(typeType)) { + typeType = explodeGenericClass(typeType); + } + + if (isTypeFormSupported(errorNode)) { + typeType = TypeBase.cloneWithTypeForm(typeType, convertToInstance(typeType)); + } + + return { type: typeType }; + } + } + + // Handle "tuple" specially, since it needs to act like "Tuple" + // in Python 3.9 and newer. + if (isTupleClass(classType)) { + let specializedClass = createSpecialType( + classType, + typeArgs, + /* paramLimit */ undefined, + /* allowParamSpec */ undefined, + /* isSpecialForm */ false + ); + + if (isTypeFormSupported(errorNode)) { + specializedClass = TypeBase.cloneWithTypeForm( + specializedClass, + convertToInstance(specializedClass) + ); + } + + return { type: specializedClass }; + } + } + + let typeArgCount = typeArgs ? typeArgs.length : 0; + + // Make sure the argument list count is correct. + const typeParams = ClassType.isPseudoGenericClass(classType) ? [] : ClassType.getTypeParams(classType); + + // If there are no type parameters or args, the class is already specialized. + // No need to do any more work. + if (typeParams.length === 0 && typeArgCount === 0) { + return { type: classType }; + } + + const variadicTypeParamIndex = typeParams.findIndex((param) => isTypeVarTuple(param)); + + if (typeArgs) { + let minTypeArgCount = typeParams.length; + const firstDefaultParamIndex = typeParams.findIndex((param) => !!param.shared.isDefaultExplicit); + + if (firstDefaultParamIndex >= 0) { + minTypeArgCount = firstDefaultParamIndex; + } + + // Classes that accept inlined type dict type args allow only one. + if (typeArgs.length > 0 && typeArgs[0].inlinedTypeDict) { + if (typeArgs.length > 1) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.typeArgsTooMany().format({ + name: classType.priv.aliasName || classType.shared.name, + expected: 1, + received: typeArgCount, + }), + typeArgs[1].node + ); + } + + return { type: typeArgs[0].inlinedTypeDict }; + } else if (typeArgCount > typeParams.length) { + if (!ClassType.isPartiallyEvaluated(classType) && !ClassType.isTupleClass(classType)) { + if (typeParams.length === 0) { + isValidTypeForm = false; + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.typeArgsExpectingNone().format({ + name: classType.priv.aliasName || classType.shared.name, + }), + typeArgs[typeParams.length].node + ); + } else if (typeParams.length !== 1 || !isParamSpec(typeParams[0])) { + isValidTypeForm = false; + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.typeArgsTooMany().format({ + name: classType.priv.aliasName || classType.shared.name, + expected: typeParams.length, + received: typeArgCount, + }), + typeArgs[typeParams.length].node + ); + } + + typeArgCount = typeParams.length; + } + } else if (typeArgCount < minTypeArgCount) { + isValidTypeForm = false; + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.typeArgsTooFew().format({ + name: classType.priv.aliasName || classType.shared.name, + expected: minTypeArgCount, + received: typeArgCount, + }), + typeArgs.length > 0 ? typeArgs[0].node.parent! : errorNode + ); + } + + typeArgs.forEach((typeArg, index) => { + if (!typeArg.type.props?.typeForm) { + isValidTypeForm = false; + } + + if (index === variadicTypeParamIndex) { + // The types that make up the tuple that maps to the + // TypeVarTuple have already been validated when the tuple + // object was created in adjustTypeArgsForTypeVarTuple. + if (isClassInstance(typeArg.type) && isTupleClass(typeArg.type)) { + return; + } + + if (isTypeVarTuple(typeArg.type)) { + if (!validateTypeVarTupleIsUnpacked(typeArg.type, typeArg.node)) { + isValidTypeForm = false; + } + return; + } + } + + const typeParam = index < typeParams.length ? typeParams[index] : undefined; + const isParamSpecTarget = typeParam && isParamSpec(typeParam); + + if ( + !validateTypeArg(typeArg, { + allowParamSpec: true, + allowTypeArgList: isParamSpecTarget, + }) + ) { + isValidTypeForm = false; + } + }); + } + + // Handle ParamSpec arguments and fill in any missing type arguments with Unknown. + let typeArgTypes: Type[] = []; + const fullTypeParams = ClassType.getTypeParams(classType); + + typeArgs = transformTypeArgsForParamSpec(fullTypeParams, typeArgs, errorNode); + if (!typeArgs) { + isValidTypeForm = false; + } + + const constraints = new ConstraintTracker(); + + fullTypeParams.forEach((typeParam, index) => { + if (typeArgs && index < typeArgs.length) { + if (isParamSpec(typeParam)) { + const typeArg = typeArgs[index]; + const functionType = FunctionType.createSynthesizedInstance('', FunctionTypeFlags.ParamSpecValue); + + if (isEllipsisType(typeArg.type)) { + FunctionType.addDefaultParams(functionType); + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + typeArgTypes.push(functionType); + constraints.setBounds(typeParam, functionType); + return; + } + + if (typeArg.typeList) { + typeArg.typeList!.forEach((paramType, paramIndex) => { + FunctionType.addParam( + functionType, + FunctionParam.create( + ParamCategory.Simple, + convertToInstance(paramType.type), + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `__p${paramIndex}` + ) + ); + }); + + if (typeArg.typeList.length > 0) { + FunctionType.addPositionOnlyParamSeparator(functionType); + } + + typeArgTypes.push(functionType); + constraints.setBounds(typeParam, functionType); + return; + } + + if (isInstantiableClass(typeArg.type) && ClassType.isBuiltIn(typeArg.type, 'Concatenate')) { + const concatTypeArgs = typeArg.type.priv.typeArgs; + if (concatTypeArgs && concatTypeArgs.length > 0) { + concatTypeArgs.forEach((typeArg, index) => { + if (index === concatTypeArgs.length - 1) { + if (isParamSpec(typeArg)) { + FunctionType.addParamSpecVariadics(functionType, typeArg); + } else if (isEllipsisType(typeArg)) { + FunctionType.addDefaultParams(functionType); + functionType.shared.flags |= FunctionTypeFlags.GradualCallableForm; + } + } else { + FunctionType.addParam( + functionType, + FunctionParam.create( + ParamCategory.Simple, + typeArg, + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `__p${index}` + ) + ); + } + }); + } + + typeArgTypes.push(functionType); + return; + } + } + + const typeArgType = convertToInstance(typeArgs[index].type); + typeArgTypes.push(typeArgType); + constraints.setBounds(typeParam, typeArgType); + return; + } + + const solvedDefaultType = solveAndApplyConstraints(typeParam, constraints, { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(classType), + tupleClassType: getTupleClassType(), + }, + }); + typeArgTypes.push(solvedDefaultType); + constraints.setBounds(typeParam, solvedDefaultType); + }); + + typeArgTypes = typeArgTypes.map((typeArgType, index) => { + if (index < typeArgCount) { + const diag = new DiagnosticAddendum(); + let adjustedTypeArgType = applyTypeArgToTypeVar(typeParams[index], typeArgType, diag); + + // Determine if the variance must match. + if (adjustedTypeArgType && (flags & EvalFlags.EnforceVarianceConsistency) !== 0) { + const destType = typeParams[index]; + const declaredVariance = destType.shared.declaredVariance; + + if (!isVarianceOfTypeArgCompatible(adjustedTypeArgType, declaredVariance)) { + diag.addMessage( + LocAddendum.varianceMismatchForClass().format({ + typeVarName: printType(adjustedTypeArgType), + className: classType.shared.name, + }) + ); + adjustedTypeArgType = undefined; + } + } + + if (adjustedTypeArgType) { + typeArgType = adjustedTypeArgType; + } else { + // Avoid emitting this error for a partially-constructed class. + if (!isClassInstance(typeArgType) || !ClassType.isPartiallyEvaluated(typeArgType)) { + assert(typeArgs !== undefined); + isValidTypeForm = false; + addDiagnostic( + DiagnosticRule.reportInvalidTypeArguments, + LocMessage.typeVarAssignmentMismatch().format({ + type: printType(typeArgType), + name: TypeVarType.getReadableName(typeParams[index]), + }) + diag.getString(), + typeArgs[index].node + ); + } + } + } + + return typeArgType; + }); + + // If the class is partially constructed and doesn't yet have + // type parameters, assume that the number and types of supplied type + // arguments are correct. + if (typeArgs && classType.shared.typeParams.length === 0 && ClassType.isPartiallyEvaluated(classType)) { + typeArgTypes = typeArgs.map((t) => convertToInstance(t.type)); + } + + let specializedClass = ClassType.specialize(classType, typeArgTypes, typeArgs !== undefined); + + if (isTypeFormSupported(errorNode)) { + specializedClass = TypeBase.cloneWithTypeForm( + specializedClass, + isValidTypeForm ? convertToInstance(specializedClass) : undefined + ); + } + + return { type: specializedClass }; + } + + // PEP 612 says that if the class has only one type parameter consisting + // of a ParamSpec, the list of arguments does not need to be enclosed in + // a list. We'll handle that case specially here. + function transformTypeArgsForParamSpec( + typeParams: TypeVarType[], + typeArgs: TypeResultWithNode[] | undefined, + errorNode: ExpressionNode + ): TypeResultWithNode[] | undefined { + if (typeParams.length !== 1 || !isParamSpec(typeParams[0]) || !typeArgs) { + return typeArgs; + } + + if (typeArgs.length > 1) { + for (const typeArg of typeArgs) { + if (isParamSpec(typeArg.type)) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.paramSpecContext(), typeArg.node); + return undefined; + } + + if (isEllipsisType(typeArg.type)) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.ellipsisContext(), typeArg.node); + return undefined; + } + + if (isInstantiableClass(typeArg.type) && ClassType.isBuiltIn(typeArg.type, 'Concatenate')) { + addDiagnostic(DiagnosticRule.reportInvalidTypeForm, LocMessage.concatenateContext(), typeArg.node); + return undefined; + } + + if (typeArg.typeList) { + addDiagnostic( + DiagnosticRule.reportInvalidTypeForm, + LocMessage.typeArgListNotAllowed(), + typeArg.node + ); + return undefined; + } + } + } + + if (typeArgs.length === 1) { + // Don't transform a type list. + if (typeArgs[0].typeList) { + return typeArgs; + } + + const typeArgType = typeArgs[0].type; + + // Don't transform a single ParamSpec or ellipsis. + if (isParamSpec(typeArgType) || isEllipsisType(typeArgType)) { + return typeArgs; + } + + // Don't transform a Concatenate. + if (isInstantiableClass(typeArgType) && ClassType.isBuiltIn(typeArgType, 'Concatenate')) { + return typeArgs; + } + } + + // Package up the type arguments into a type list. + return [ + { + type: UnknownType.create(), + node: typeArgs.length > 0 ? typeArgs[0].node : errorNode, + typeList: typeArgs, + }, + ]; + } + + function getTypeOfArg(arg: Arg, inferenceContext: InferenceContext | undefined): TypeResult { + if (arg.typeResult) { + const type = arg.typeResult.type; + return { type: type?.props?.specialForm ?? type, isIncomplete: arg.typeResult.isIncomplete }; + } + + if (!arg.valueExpression) { + // We shouldn't ever get here, but just in case. + return { type: UnknownType.create() }; + } + + // If there was no defined type provided, there should always + // be a value expression from which we can retrieve the type. + return getTypeOfExpression(arg.valueExpression, /* flags */ undefined, inferenceContext); + } + + // This function is like getTypeOfArg except that it is + // used in cases where the argument is expected to be a type + // and therefore follows the normal rules of types (e.g. they + // can be forward-declared in stubs, etc.). + function getTypeOfArgExpectingType(arg: Arg, options?: ExpectedTypeOptions): TypeResult { + if (arg.typeResult) { + return { type: arg.typeResult.type, isIncomplete: arg.typeResult.isIncomplete }; + } + + // If there was no defined type provided, there should always + // be a value expression from which we can retrieve the type. + assert(arg.valueExpression !== undefined); + return getTypeOfExpressionExpectingType(arg.valueExpression, options); + } + + function getTypeOfExpressionExpectingType(node: ExpressionNode, options?: ExpectedTypeOptions): TypeResult { + let flags = EvalFlags.InstantiableType | EvalFlags.StrLiteralAsType; + + if (options?.allowTypeVarsWithoutScopeId) { + flags |= EvalFlags.AllowTypeVarWithoutScopeId; + } + + if (options?.typeVarGetsCurScope) { + flags |= EvalFlags.TypeVarGetsCurScope; + } + + if (options?.enforceClassTypeVarScope) { + flags |= EvalFlags.EnforceClassTypeVarScope; + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + if ((isAnnotationEvaluationPostponed(fileInfo) || options?.forwardRefs) && !options?.runtimeTypeExpression) { + flags |= EvalFlags.ForwardRefs; + } else if (options?.parsesStringLiteral) { + flags |= EvalFlags.ParsesStringLiteral; + } + + if (!options?.allowFinal) { + flags |= EvalFlags.NoFinal; + } + + if (options?.allowRequired) { + flags |= EvalFlags.AllowRequired | EvalFlags.TypeExpression; + } + + if (options?.allowReadOnly) { + flags |= EvalFlags.AllowReadOnly | EvalFlags.TypeExpression; + } + + if (options?.allowUnpackedTuple) { + flags |= EvalFlags.AllowUnpackedTuple; + } else { + flags |= EvalFlags.NoTypeVarTuple; + } + + if (options?.allowUnpackedTypedDict) { + flags |= EvalFlags.AllowUnpackedTypedDict; + } + + if (!options?.allowParamSpec) { + flags |= EvalFlags.NoParamSpec; + } + + if (options?.typeExpression) { + flags |= EvalFlags.TypeExpression; + } + + if (options?.convertEllipsisToAny) { + flags |= EvalFlags.ConvertEllipsisToAny; + } + + if (options?.allowEllipsis) { + flags |= EvalFlags.AllowEllipsis; + } + + if (options?.noNonTypeSpecialForms) { + flags |= EvalFlags.NoNonTypeSpecialForms; + } + + if (!options?.allowClassVar) { + flags |= EvalFlags.NoClassVar; + } + + if (options?.varTypeAnnotation) { + flags |= EvalFlags.VarTypeAnnotation; + } + + if (options?.notParsed) { + flags |= EvalFlags.NotParsed; + } + + if (options?.typeFormArg) { + flags |= EvalFlags.TypeFormArg; + } + + return getTypeOfExpression(node, flags); + } + + function getBuiltInType(node: ParseNode, name: string): Type { + const scope = ScopeUtils.getScopeForNode(node); + if (scope) { + const builtInScope = ScopeUtils.getBuiltInScope(scope); + const nameType = builtInScope.lookUpSymbol(name); + if (nameType) { + return getEffectiveTypeOfSymbol(nameType); + } + } + + return UnknownType.create(); + } + + function getBuiltInObject(node: ParseNode, name: string, typeArgs?: Type[]) { + const nameType = getBuiltInType(node, name); + if (isInstantiableClass(nameType)) { + let classType = nameType; + if (typeArgs) { + classType = ClassType.specialize(classType, typeArgs); + } + + return ClassType.cloneAsInstance(classType); + } + + return nameType; + } + + function lookUpSymbolRecursive( + node: ParseNode, + name: string, + honorCodeFlow: boolean, + preferGlobalScope = false + ): SymbolWithScope | undefined { + const scopeNodeInfo = ParseTreeUtils.getEvaluationScopeNode(node); + const scope = AnalyzerNodeInfo.getScope(scopeNodeInfo.node); + + let symbolWithScope = scope?.lookUpSymbolRecursive(name, { useProxyScope: !!scopeNodeInfo.useProxyScope }); + const scopeType = scope?.type ?? ScopeType.Module; + + // Functions and list comprehensions don't allow access to implicitly + // aliased symbols in outer scopes if they haven't yet been assigned + // within the local scope. + let scopeTypeHonorsCodeFlow = scopeType !== ScopeType.Function && scopeType !== ScopeType.Comprehension; + + // Type parameter scopes don't honor code flow. + if (symbolWithScope?.scope.type === ScopeType.TypeParameter) { + scopeTypeHonorsCodeFlow = false; + } + + if (symbolWithScope && honorCodeFlow && scopeTypeHonorsCodeFlow) { + // Filter the declarations based on flow reachability. + const reachableDecl = symbolWithScope.symbol.getDeclarations().find((decl) => { + if (decl.type !== DeclarationType.Alias && decl.type !== DeclarationType.Intrinsic) { + // Determine if the declaration is in the same execution scope as the "usageNode" node. + let usageScopeNode = ParseTreeUtils.getExecutionScopeNode(node); + const declNode: ParseNode = + decl.type === DeclarationType.Class || + decl.type === DeclarationType.Function || + decl.type === DeclarationType.TypeAlias + ? decl.node.d.name + : decl.node; + const declScopeNode = ParseTreeUtils.getExecutionScopeNode(declNode); + + // If this is a type parameter scope, it will be a proxy for its + // containing scope, so we need to use that instead. + const usageScope = AnalyzerNodeInfo.getScope(usageScopeNode); + if (usageScope?.proxy) { + const typeParamScope = AnalyzerNodeInfo.getScope(usageScopeNode); + if (!typeParamScope?.symbolTable.has(name) && usageScopeNode.parent) { + usageScopeNode = ParseTreeUtils.getExecutionScopeNode(usageScopeNode.parent); + } + } + + if (usageScopeNode === declScopeNode) { + if (!isFlowPathBetweenNodes(declNode, node)) { + // If there was no control flow path from the usage back + // to the source, see if the usage node is reachable by + // any path. + const flowNode = AnalyzerNodeInfo.getFlowNode(node); + const isReachable = + flowNode && + codeFlowEngine.getFlowNodeReachability( + flowNode, + /* sourceFlowNode */ undefined, + /* ignoreNoReturn */ true + ) === Reachability.Reachable; + return !isReachable; + } + } + } + return true; + }); + + // If none of the declarations are reachable from the current node, + // search for the symbol in outer scopes. + if (!reachableDecl) { + if (symbolWithScope.scope.type !== ScopeType.Function) { + let nextScopeToSearch = symbolWithScope.scope.parent; + const isOutsideCallerModule = + symbolWithScope.isOutsideCallerModule || symbolWithScope.scope.type === ScopeType.Module; + let isBeyondExecutionScope = + symbolWithScope.isBeyondExecutionScope || symbolWithScope.scope.isIndependentlyExecutable(); + + if (symbolWithScope.scope.type === ScopeType.Class) { + // There is an odd documented behavior for classes in that + // symbol resolution skips to the global scope rather than + // the next scope in the chain. + const globalScopeResult = symbolWithScope.scope.getGlobalScope(); + nextScopeToSearch = globalScopeResult.scope; + if (globalScopeResult.isBeyondExecutionScope) { + isBeyondExecutionScope = true; + } + } + + if (nextScopeToSearch) { + symbolWithScope = nextScopeToSearch.lookUpSymbolRecursive(name, { + isOutsideCallerModule, + isBeyondExecutionScope, + }); + } else { + symbolWithScope = undefined; + } + } else { + symbolWithScope = undefined; + } + } + } + + // PEP 563 indicates that if a forward reference can be resolved in the module + // scope (or, by implication, in the builtins scope), it should prefer that + // resolution over local resolutions. + if (symbolWithScope && preferGlobalScope) { + let curSymbolWithScope: SymbolWithScope | undefined = symbolWithScope; + while ( + curSymbolWithScope.scope.type !== ScopeType.Module && + curSymbolWithScope.scope.type !== ScopeType.Builtin && + curSymbolWithScope.scope.type !== ScopeType.TypeParameter && + curSymbolWithScope.scope.parent + ) { + curSymbolWithScope = curSymbolWithScope.scope.parent.lookUpSymbolRecursive(name, { + isOutsideCallerModule: curSymbolWithScope.isOutsideCallerModule, + isBeyondExecutionScope: + curSymbolWithScope.isBeyondExecutionScope || + curSymbolWithScope.scope.isIndependentlyExecutable(), + }); + if (!curSymbolWithScope) { + break; + } + } + + if ( + curSymbolWithScope?.scope.type === ScopeType.Module || + curSymbolWithScope?.scope.type === ScopeType.Builtin + ) { + symbolWithScope = curSymbolWithScope; + } + } + + return symbolWithScope; + } + + // Disables recording of errors and warnings. + function suppressDiagnostics( + node: ParseNode, + callback: () => T, + diagCallback?: (suppressedDiags: string[]) => void + ) { + suppressedNodeStack.push({ node, suppressedDiags: diagCallback ? [] : undefined }); + + try { + const result = callback(); + const poppedNode = suppressedNodeStack.pop(); + if (diagCallback && poppedNode?.suppressedDiags) { + diagCallback(poppedNode.suppressedDiags); + } + return result; + } catch (e) { + // We don't use finally here because the TypeScript debugger doesn't + // handle finally well when single stepping. + suppressedNodeStack.pop(); + throw e; + } + } + + function getSignatureTrackerForNode(node: ParseNode): UniqueSignatureTracker | undefined { + for (let i = signatureTrackerStack.length - 1; i >= 0; i--) { + const rootNode = signatureTrackerStack[i].rootNode; + if (ParseTreeUtils.isNodeContainedWithin(node, rootNode)) { + return signatureTrackerStack[i].tracker; + } + } + + return undefined; + } + + function useSignatureTracker(node: ParseNode, callback: () => T): T { + const tracker = getSignatureTrackerForNode(node); + + try { + // If a signature tracker doesn't already exist, allocate one. + if (!tracker) { + signatureTrackerStack.push({ + tracker: new UniqueSignatureTracker(), + rootNode: node, + }); + } + + const result = callback(); + + if (!tracker) { + signatureTrackerStack.pop(); + } + + return result; + } catch (e) { + // We don't use finally here because the TypeScript debugger doesn't + // handle finally well when single stepping. + if (!tracker) { + signatureTrackerStack.pop(); + } + + throw e; + } + } + + function ensureSignatureIsUnique(type: T, node: ParseNode): T { + const tracker = getSignatureTrackerForNode(node); + if (!tracker) { + return type; + } + + if (isFunctionOrOverloaded(type)) { + return ensureSignaturesAreUnique(type, tracker, node.start); + } + + return type; + } + + // Disables recording of errors and warnings and disables any caching of + // types, under the assumption that we're performing speculative evaluations. + // If speculativeNode is undefined, speculative mode is not used. This is + // useful in cases where we conditionally want to use speculative mode. + function useSpeculativeMode( + speculativeNode: ParseNode | undefined, + callback: () => T, + options?: SpeculativeModeOptions + ) { + if (!speculativeNode) { + return callback(); + } + + speculativeTypeTracker.enterSpeculativeContext(speculativeNode, options); + + try { + const result = callback(); + speculativeTypeTracker.leaveSpeculativeContext(); + return result; + } catch (e) { + // We don't use finally here because the TypeScript debugger doesn't + // handle finally well when single stepping. + speculativeTypeTracker.leaveSpeculativeContext(); + throw e; + } + } + + function disableSpeculativeMode(callback: () => void) { + const stack = speculativeTypeTracker.disableSpeculativeMode(); + + try { + callback(); + speculativeTypeTracker.enableSpeculativeMode(stack); + } catch (e) { + // We don't use finally here because the TypeScript debugger doesn't + // handle finally well when single stepping. + speculativeTypeTracker.enableSpeculativeMode(stack); + throw e; + } + } + + // Indicates whether the specified node is within a context that + // is currently being evaluated speculative. If node is undefined, + // returns true if any node is being evaluated speculatively. + function isSpeculativeModeInUse(node: ParseNode | undefined) { + return speculativeTypeTracker.isSpeculative(node); + } + + function getDeclarationFromKeywordParam(type: FunctionType, paramName: string): Declaration | undefined { + if (isFunction(type)) { + if (type.shared.declaration) { + const functionDecl = type.shared.declaration; + if (functionDecl.type === DeclarationType.Function) { + const functionNode = functionDecl.node; + const functionScope = AnalyzerNodeInfo.getScope(functionNode); + if (functionScope) { + const paramSymbol = functionScope.lookUpSymbol(paramName)!; + if (paramSymbol) { + return paramSymbol.getDeclarations().find((decl) => decl.type === DeclarationType.Param); + } + + const parameterDetails = getParamListDetails(type); + if (parameterDetails.unpackedKwargsTypedDictType) { + const lookupResults = lookUpClassMember( + parameterDetails.unpackedKwargsTypedDictType, + paramName + ); + if (lookupResults) { + return lookupResults.symbol + .getDeclarations() + .find((decl) => decl.type === DeclarationType.Variable); + } + } + } + } + } + } + + return undefined; + } + + // In general, string nodes don't have any declarations associated with them, but + // we need to handle the special case of string literals used as keys within a + // dictionary expression where those keys are associated with a known TypedDict. + function getDeclInfoForStringNode(node: StringNode): SymbolDeclInfo | undefined { + const decls: Declaration[] = []; + const synthesizedTypes: SynthesizedTypeInfo[] = []; + const expectedType = getExpectedType(node)?.type; + + if (expectedType) { + doForEachSubtype(expectedType, (subtype) => { + // If the expected type is a TypedDict then the node is either a key expression + // or a single entry in a set. We then need to check that the value of the node + // is a valid entry in the TypedDict to avoid resolving declarations for + // synthesized symbols such as 'get'. + if (isClassInstance(subtype) && ClassType.isTypedDictClass(subtype)) { + const entry = subtype.shared.typedDictEntries?.knownItems.get(node.d.value); + if (entry) { + const symbol = lookUpObjectMember(subtype, node.d.value)?.symbol; + + if (symbol) { + appendArray(decls, symbol.getDeclarations()); + + const synthTypeInfo = symbol.getSynthesizedType(); + if (synthTypeInfo) { + synthesizedTypes.push(synthTypeInfo); + } + } + } + } + }); + } + + return decls.length === 0 ? undefined : { decls, synthesizedTypes }; + } + + function getAliasFromImport(node: NameNode): NameNode | undefined { + if ( + node.parent && + node.parent.nodeType === ParseNodeType.ImportFromAs && + node.parent.d.alias && + node === node.parent.d.name + ) { + return node.parent.d.alias; + } + return undefined; + } + + function getDeclInfoForNameNode(node: NameNode, skipUnreachableCode = true): SymbolDeclInfo | undefined { + if (skipUnreachableCode && AnalyzerNodeInfo.isCodeUnreachable(node)) { + return undefined; + } + + const decls: Declaration[] = []; + const synthesizedTypes: SynthesizedTypeInfo[] = []; + + // If the node is part of a "from X import Y as Z" statement and the node + // is the "Y" (non-aliased) name, we need to look up the alias symbol + // since the non-aliased name is not in the symbol table. + const alias = getAliasFromImport(node); + if (alias) { + const scope = ScopeUtils.getScopeForNode(node); + if (scope) { + // Look up the alias symbol. + const symbolInScope = scope.lookUpSymbolRecursive(alias.d.value); + if (symbolInScope) { + // The alias could have more decls that don't refer to this import. Filter + // out the one(s) that specifically associated with this import statement. + const declsForThisImport = symbolInScope.symbol.getDeclarations().filter((decl) => { + return decl.type === DeclarationType.Alias && decl.node === node.parent; + }); + + appendArray(decls, getDeclarationsWithUsesLocalNameRemoved(declsForThisImport)); + } + } + } else if ( + node.parent && + node.parent.nodeType === ParseNodeType.MemberAccess && + node === node.parent.d.member + ) { + let baseType = getType(node.parent.d.leftExpr); + if (baseType) { + baseType = makeTopLevelTypeVarsConcrete(baseType); + const memberName = node.parent.d.member.d.value; + doForEachSubtype(baseType, (subtype) => { + let symbol: Symbol | undefined; + + subtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isInstantiableClass(subtype)) { + // Try to find a member that has a declared type. If so, that + // overrides any inferred types. + let member = lookUpClassMember(subtype, memberName, MemberAccessFlags.DeclaredTypesOnly); + if (!member) { + member = lookUpClassMember(subtype, memberName); + } + + if (!member) { + const metaclass = subtype.shared.effectiveMetaclass; + if (metaclass && isInstantiableClass(metaclass)) { + member = lookUpClassMember(metaclass, memberName); + } + } + + if (member) { + symbol = member.symbol; + } + } else if (isClassInstance(subtype)) { + // Try to find a member that has a declared type. If so, that + // overrides any inferred types. + let member = lookUpObjectMember(subtype, memberName, MemberAccessFlags.DeclaredTypesOnly); + if (!member) { + member = lookUpObjectMember(subtype, memberName); + } + if (member) { + symbol = member.symbol; + } + } else if (isModule(subtype)) { + symbol = ModuleType.getField(subtype, memberName); + } + + if (symbol) { + // By default, report only the declarations that have type annotations. + // If there are none, then report all of the unannotated declarations, + // which includes every assignment of that symbol. + const typedDecls = symbol.getTypedDeclarations(); + if (typedDecls.length > 0) { + appendArray(decls, typedDecls); + } else { + appendArray(decls, symbol.getDeclarations()); + } + + const synthTypeInfo = symbol.getSynthesizedType(); + if (synthTypeInfo) { + synthesizedTypes.push(synthTypeInfo); + } + } + }); + } + } else if (node.parent && node.parent.nodeType === ParseNodeType.ModuleName) { + const namePartIndex = node.parent.d.nameParts.findIndex((part) => part === node); + const importInfo = AnalyzerNodeInfo.getImportInfo(node.parent); + if ( + namePartIndex >= 0 && + importInfo && + !importInfo.isNativeLib && + namePartIndex < importInfo.resolvedUris.length + ) { + if (importInfo.resolvedUris[namePartIndex]) { + evaluateTypesForStatement(node); + + // Synthesize an alias declaration for this name part. The only + // time this case is used is for IDE services such as + // the find all references, hover provider and etc. + decls.push(synthesizeAliasDeclaration(importInfo.resolvedUris[namePartIndex])); + } + } + } else if (node.parent && node.parent.nodeType === ParseNodeType.Argument && node === node.parent.d.name) { + // The target node is the name in a keyword argument. We need to determine whether + // the corresponding keyword parameter can be determined from the context. + const argNode = node.parent; + const paramName = node.d.value; + if (argNode.parent?.nodeType === ParseNodeType.Call) { + const baseType = getType(argNode.parent.d.leftExpr); + + if (baseType) { + if (isFunction(baseType) && baseType.shared.declaration) { + const paramDecl = getDeclarationFromKeywordParam(baseType, paramName); + if (paramDecl) { + decls.push(paramDecl); + } + } else if (isOverloaded(baseType)) { + OverloadedType.getOverloads(baseType).forEach((f) => { + const paramDecl = getDeclarationFromKeywordParam(f, paramName); + if (paramDecl) { + decls.push(paramDecl); + } + }); + } else if (isInstantiableClass(baseType)) { + const initMethodType = getBoundInitMethod( + evaluatorInterface, + argNode.parent.d.leftExpr, + ClassType.cloneAsInstance(baseType) + )?.type; + + if (initMethodType && isFunction(initMethodType)) { + const paramDecl = getDeclarationFromKeywordParam(initMethodType, paramName); + if (paramDecl) { + decls.push(paramDecl); + } else if ( + ClassType.isDataClass(baseType) || + ClassType.isTypedDictClass(baseType) || + ClassType.hasNamedTupleEntry(baseType, paramName) + ) { + const lookupResults = lookUpClassMember(baseType, paramName); + + if (lookupResults) { + appendArray(decls, lookupResults.symbol.getDeclarations()); + + const synthTypeInfo = lookupResults.symbol.getSynthesizedType(); + if (synthTypeInfo) { + synthesizedTypes.push(synthTypeInfo); + } + } + } + } else if ( + ClassType.isDataClass(baseType) || + ClassType.isTypedDictClass(baseType) || + ClassType.hasNamedTupleEntry(baseType, paramName) + ) { + // Some synthesized callables (notably TypedDict "constructors") don't have a + // meaningful __init__ signature we can map keyword arguments to. In these cases, + // treat the keyword as referring to the class entry so IDE features like + // go-to-definition and rename can bind to the field declaration. + const lookupResults = lookUpClassMember(baseType, paramName); + + if (lookupResults) { + appendArray(decls, lookupResults.symbol.getDeclarations()); + + const synthTypeInfo = lookupResults.symbol.getSynthesizedType(); + if (synthTypeInfo) { + synthesizedTypes.push(synthTypeInfo); + } + } + } + } + } + } else if (argNode.parent?.nodeType === ParseNodeType.Class) { + const classTypeResult = getTypeOfClass(argNode.parent); + + // Validate the init subclass args for this class so we can properly + // evaluate its custom keyword parameters. + if (classTypeResult) { + validateInitSubclassArgs(argNode.parent, classTypeResult.classType); + } + } + } else { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // Determine if this node is within a quoted type annotation. + const isWithinTypeAnnotation = ParseTreeUtils.isWithinTypeAnnotation( + node, + !isAnnotationEvaluationPostponed(AnalyzerNodeInfo.getFileInfo(node)) + ); + + // Determine if this is part of a "type" statement. + const isWithinTypeAliasStatement = !!ParseTreeUtils.getParentNodeOfType(node, ParseNodeType.TypeAlias); + const allowForwardReferences = isWithinTypeAnnotation || isWithinTypeAliasStatement || fileInfo.isStubFile; + + const symbolWithScope = lookUpSymbolRecursive( + node, + node.d.value, + !allowForwardReferences, + isWithinTypeAnnotation + ); + + if (symbolWithScope) { + appendArray(decls, symbolWithScope.symbol.getDeclarations()); + + const synthTypeInfo = symbolWithScope.symbol.getSynthesizedType(); + if (synthTypeInfo) { + synthesizedTypes.push(synthTypeInfo); + } + } + } + + return { decls, synthesizedTypes }; + } + + function getTypeForDeclaration(declaration: Declaration): DeclaredSymbolTypeInfo { + switch (declaration.type) { + case DeclarationType.Intrinsic: { + if (declaration.intrinsicType === 'Any') { + return { type: AnyType.create() }; + } + + if (declaration.intrinsicType === '__class__') { + const classNode = ParseTreeUtils.getEnclosingClass(declaration.node) as ClassNode; + const classTypeInfo = getTypeOfClass(classNode); + return { + type: classTypeInfo + ? specializeWithUnknownTypeArgs(classTypeInfo.classType, getTupleClassType()) + : UnknownType.create(), + }; + } + + const strType = getBuiltInObject(declaration.node, 'str'); + const intType = getBuiltInObject(declaration.node, 'int'); + if (isClassInstance(intType) && isClassInstance(strType)) { + if (declaration.intrinsicType === 'str') { + return { type: strType }; + } + + if (declaration.intrinsicType === 'str | None') { + return { type: combineTypes([strType, getNoneType()]) }; + } + + if (declaration.intrinsicType === 'int') { + return { type: intType }; + } + + if (declaration.intrinsicType === 'MutableSequence[str]') { + const sequenceType = getBuiltInType(declaration.node, 'MutableSequence'); + if (isInstantiableClass(sequenceType)) { + return { + type: ClassType.cloneAsInstance(ClassType.specialize(sequenceType, [strType])), + }; + } + } + + if (declaration.intrinsicType === 'dict[str, Any]') { + const dictType = getBuiltInType(declaration.node, 'dict'); + if (isInstantiableClass(dictType)) { + return { + type: ClassType.cloneAsInstance( + ClassType.specialize(dictType, [strType, AnyType.create()]) + ), + }; + } + } + } + + return { type: UnknownType.create() }; + } + + case DeclarationType.Class: { + const classTypeInfo = getTypeOfClass(declaration.node); + return { type: classTypeInfo?.decoratedType }; + } + + case DeclarationType.SpecialBuiltInClass: { + return { type: getTypeOfAnnotation(declaration.node.d.annotation) }; + } + + case DeclarationType.Function: { + const functionTypeInfo = getTypeOfFunction(declaration.node); + return { type: functionTypeInfo?.decoratedType }; + } + + case DeclarationType.TypeAlias: { + return { type: getTypeOfTypeAlias(declaration.node) }; + } + + case DeclarationType.Param: { + let typeAnnotationNode = declaration.node.d.annotation ?? declaration.node.d.annotationComment; + + // If there wasn't an annotation, see if the parent function + // has a function-level annotation comment that provides + // this parameter's annotation type. + if (!typeAnnotationNode) { + if (declaration.node.parent?.nodeType === ParseNodeType.Function) { + const functionNode = declaration.node.parent; + if ( + functionNode.d.funcAnnotationComment && + !functionNode.d.funcAnnotationComment.d.isEllipsis + ) { + const paramIndex = functionNode.d.params.findIndex((param) => param === declaration.node); + typeAnnotationNode = ParseTreeUtils.getTypeAnnotationForParam(functionNode, paramIndex); + } + } + } + + if (typeAnnotationNode) { + let declaredType = getTypeOfParamAnnotation(typeAnnotationNode, declaration.node.d.category); + + const liveTypeVarScopes = ParseTreeUtils.getTypeVarScopesForNode(declaration.node); + declaredType = makeTypeVarsBound(declaredType, liveTypeVarScopes); + + return { + type: transformVariadicParamType( + declaration.node, + declaration.node.d.category, + adjustParamAnnotatedType(declaration.node, declaredType) + ), + }; + } + + return { type: undefined }; + } + + case DeclarationType.TypeParam: { + return { type: getTypeOfTypeParam(declaration.node) }; + } + + case DeclarationType.Variable: { + const typeAnnotationNode = declaration.typeAnnotationNode; + + if (typeAnnotationNode) { + let declaredType: Type | undefined; + + if (declaration.isRuntimeTypeExpression) { + declaredType = convertToInstance( + getTypeOfExpressionExpectingType(typeAnnotationNode, { + allowFinal: true, + allowRequired: true, + allowReadOnly: true, + runtimeTypeExpression: true, + }).type + ); + } else { + const declNode = + declaration.isDefinedByMemberAccess && + declaration.node.parent?.nodeType === ParseNodeType.MemberAccess + ? declaration.node.parent + : declaration.node; + const allowClassVar = isClassVarAllowedForAssignmentTarget(declNode); + const allowFinal = isFinalAllowedForAssignmentTarget(declNode); + const allowRequired = + ParseTreeUtils.isRequiredAllowedForAssignmentTarget(declNode) || + !!declaration.isInInlinedTypedDict; + + declaredType = getTypeOfAnnotation(typeAnnotationNode, { + varTypeAnnotation: true, + allowClassVar, + allowFinal, + allowRequired, + allowReadOnly: allowRequired, + enforceClassTypeVarScope: declaration.isDefinedByMemberAccess, + }); + } + + if (declaredType) { + // If this is a declaration for a member variable within a method, + // we need to convert any bound TypeVars associated with the + // class to their free counterparts. + if (declaration.isDefinedByMemberAccess) { + const enclosingClass = ParseTreeUtils.getEnclosingClass(declaration.node); + + if (enclosingClass) { + declaredType = makeTypeVarsFree(declaredType, [ + ParseTreeUtils.getScopeIdForNode(enclosingClass), + ]); + } + } + + if (isClassInstance(declaredType) && ClassType.isBuiltIn(declaredType, 'TypeAlias')) { + return { type: undefined, isTypeAlias: true }; + } + + return { type: declaredType }; + } + } + + return { type: undefined }; + } + + case DeclarationType.Alias: { + return { type: undefined }; + } + } + } + + function getTypeOfTypeParam(node: TypeParameterNode): TypeVarType { + // Is this type already cached? + const cachedTypeVarType = readTypeCache(node.d.name, EvalFlags.None) as FunctionType; + if (cachedTypeVarType && isTypeVar(cachedTypeVarType)) { + return cachedTypeVarType; + } + + let runtimeClassName = 'TypeVar'; + let kind: TypeVarKind = TypeVarKind.TypeVar; + if (node.d.typeParamKind === TypeParamKind.TypeVarTuple) { + runtimeClassName = 'TypeVarTuple'; + kind = TypeVarKind.TypeVarTuple; + } else if (node.d.typeParamKind === TypeParamKind.ParamSpec) { + runtimeClassName = 'ParamSpec'; + kind = TypeVarKind.ParamSpec; + } + const runtimeType = getTypingType(node, runtimeClassName); + const runtimeClass = runtimeType && isInstantiableClass(runtimeType) ? runtimeType : undefined; + + let typeVar = TypeVarType.createInstantiable(node.d.name.d.value, kind); + if (runtimeClass) { + typeVar = TypeBase.cloneAsSpecialForm(typeVar, ClassType.cloneAsInstance(runtimeClass)); + } + typeVar.shared.isTypeParamSyntax = true; + + // Cache the value before we evaluate the bound or the default type in + // case it refers to itself in a circular manner. + writeTypeCache(node, { type: typeVar }, /* flags */ undefined); + writeTypeCache(node.d.name, { type: typeVar }, /* flags */ undefined); + + if (node.d.boundExpr) { + if (node.d.boundExpr.nodeType === ParseNodeType.Tuple) { + const constraints = node.d.boundExpr.d.items.map((constraint) => { + const constraintType = getTypeOfExpressionExpectingType(constraint, { + noNonTypeSpecialForms: true, + forwardRefs: true, + typeExpression: true, + }).type; + + if ( + requiresSpecialization(constraintType, { + ignorePseudoGeneric: true, + ignoreImplicitTypeArgs: true, + }) + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarBoundGeneric(), + constraint + ); + } + + return convertToInstance(constraintType); + }); + + if (constraints.length < 2) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarSingleConstraint(), + node.d.boundExpr + ); + } else if (node.d.typeParamKind === TypeParamKind.TypeVar) { + typeVar.shared.constraints = constraints; + } + } else { + const boundType = getTypeOfExpressionExpectingType(node.d.boundExpr, { + noNonTypeSpecialForms: true, + forwardRefs: true, + typeExpression: true, + }).type; + + if (requiresSpecialization(boundType, { ignorePseudoGeneric: true })) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeVarConstraintGeneric(), + node.d.boundExpr + ); + } + + if (node.d.typeParamKind === TypeParamKind.TypeVar) { + typeVar.shared.boundType = convertToInstance(boundType); + } + } + } + + if (node.d.typeParamKind === TypeParamKind.ParamSpec) { + const defaultType = node.d.defaultExpr + ? getParamSpecDefaultType(node.d.defaultExpr, /* isPep695Syntax */ true) + : undefined; + + if (defaultType) { + typeVar.shared.defaultType = defaultType; + typeVar.shared.isDefaultExplicit = true; + } else { + typeVar.shared.defaultType = ParamSpecType.getUnknown(); + } + } else if (node.d.typeParamKind === TypeParamKind.TypeVarTuple) { + const defaultType = node.d.defaultExpr + ? getTypeVarTupleDefaultType(node.d.defaultExpr, /* isPep695Syntax */ true) + : undefined; + + if (defaultType) { + typeVar.shared.defaultType = defaultType; + typeVar.shared.isDefaultExplicit = true; + } else { + typeVar.shared.defaultType = makeTupleObject(evaluatorInterface, [ + { type: UnknownType.create(), isUnbounded: true }, + ]); + } + } else { + const defaultType = node.d.defaultExpr + ? convertToInstance( + getTypeOfExpressionExpectingType(node.d.defaultExpr, { + forwardRefs: true, + typeExpression: true, + }).type + ) + : undefined; + + if (defaultType) { + typeVar.shared.defaultType = defaultType; + typeVar.shared.isDefaultExplicit = true; + } else { + typeVar.shared.defaultType = UnknownType.create(); + } + } + + // If a default is provided, make sure it is compatible with the bound + // or constraint. + if (typeVar.shared.isDefaultExplicit && node.d.defaultExpr) { + verifyTypeVarDefaultIsCompatible(typeVar, node.d.defaultExpr); + } + + // Associate the type variable with the owning scope. + const scopeNode = ParseTreeUtils.getTypeVarScopeNode(node); + if (scopeNode) { + let scopeType: TypeVarScopeType; + if (scopeNode.nodeType === ParseNodeType.Class) { + scopeType = TypeVarScopeType.Class; + + // Set the variance to "auto" for class-scoped TypeVars. + typeVar.shared.declaredVariance = + isParamSpec(typeVar) || isTypeVarTuple(typeVar) ? Variance.Invariant : Variance.Auto; + } else if (scopeNode.nodeType === ParseNodeType.Function) { + scopeType = TypeVarScopeType.Function; + } else { + assert(scopeNode.nodeType === ParseNodeType.TypeAlias); + scopeType = TypeVarScopeType.TypeAlias; + typeVar.shared.declaredVariance = + isParamSpec(typeVar) || isTypeVarTuple(typeVar) ? Variance.Invariant : Variance.Auto; + } + + typeVar = TypeVarType.cloneForScopeId( + typeVar, + ParseTreeUtils.getScopeIdForNode( + scopeNode.nodeType === ParseNodeType.TypeAlias ? scopeNode.d.name : scopeNode + ), + scopeNode.d.name.d.value, + scopeType + ); + } + + writeTypeCache(node, { type: typeVar }, /* flags */ undefined); + writeTypeCache(node.d.name, { type: typeVar }, /* flags */ undefined); + + return typeVar; + } + + function getInferredTypeOfDeclaration(symbol: Symbol, decl: Declaration): Type | undefined { + const resolvedDecl = resolveAliasDeclaration(decl, /* resolveLocalNames */ true, { + allowExternallyHiddenAccess: AnalyzerNodeInfo.getFileInfo(decl.node).isStubFile, + }); + + // We couldn't resolve the alias. Substitute an unknown + // type in this case. + if (!resolvedDecl) { + return evaluatorOptions.evaluateUnknownImportsAsAny ? AnyType.create() : UnknownType.create(); + } + + function applyLoaderActionsToModuleType( + moduleType: ModuleType, + loaderActions: ModuleLoaderActions, + importLookup: ImportLookup + ): Type { + if (!loaderActions.uri.isEmpty() && loaderActions.loadSymbolsFromPath) { + const lookupResults = importLookup(loaderActions.uri); + if (lookupResults) { + moduleType.priv.fields = lookupResults.symbolTable; + moduleType.priv.docString = lookupResults.docString; + } else { + // Note that all module attributes that are not found in the + // symbol table should be treated as Any or Unknown rather than + // as an error. + moduleType.priv.notPresentFieldType = evaluatorOptions.evaluateUnknownImportsAsAny + ? AnyType.create() + : UnknownType.create(); + } + } + + if (loaderActions.implicitImports) { + loaderActions.implicitImports.forEach((implicitImport, name) => { + const existingLoaderField = moduleType.priv.loaderFields.get(name); + + // Recursively apply loader actions. + let symbolType: Type; + + if (implicitImport.isUnresolved) { + symbolType = UnknownType.create(); + } else { + let importedModuleType: ModuleType; + + const existingType = existingLoaderField?.getSynthesizedType(); + if (existingType?.type && isModule(existingType.type)) { + importedModuleType = existingType.type; + } else { + const moduleName = moduleType.priv.moduleName + ? moduleType.priv.moduleName + '.' + name + : ''; + importedModuleType = ModuleType.create(moduleName, implicitImport.uri); + } + + symbolType = applyLoaderActionsToModuleType(importedModuleType, implicitImport, importLookup); + } + + if (!existingLoaderField) { + const importedModuleSymbol = Symbol.createWithType(SymbolFlags.None, symbolType); + moduleType.priv.loaderFields.set(name, importedModuleSymbol); + } + }); + } + + return moduleType; + } + + // If the resolved declaration is still an alias, the alias + // is pointing at a module, and we need to synthesize a + // module type. + if (resolvedDecl.type === DeclarationType.Alias) { + let moduleType: ModuleType | undefined; + + // See if this is an import that shares a ModuleType with another + // import statement. If so, used the cached type. This happens when + // multiple import statements start with the same module name, such + // as "import a.b" and "import a.c". + if (resolvedDecl.node.nodeType === ParseNodeType.ImportAs) { + const cachedType = readTypeCache(resolvedDecl.node.d.module, EvalFlags.None); + if (cachedType && isModule(cachedType)) { + moduleType = cachedType; + } + } + + if (!moduleType) { + // Build a module type that corresponds to the declaration and + // its associated loader actions. + moduleType = ModuleType.create(resolvedDecl.moduleName, resolvedDecl.uri); + + if (resolvedDecl.node.nodeType === ParseNodeType.ImportAs) { + writeTypeCache(resolvedDecl.node.d.module, { type: moduleType }, EvalFlags.None); + } + } + + return applyLoaderActionsToModuleType( + moduleType, + resolvedDecl.symbolName && resolvedDecl.submoduleFallback + ? resolvedDecl.submoduleFallback + : resolvedDecl, + importLookup + ); + } + + const declaredType = getTypeForDeclaration(resolvedDecl); + if (declaredType.type) { + return declaredType.type; + } + + // If this is part of a "py.typed" package, don't fall back on type inference + // unless it's marked Final, is a constant, or is a declared type alias. + const fileInfo = AnalyzerNodeInfo.getFileInfo(resolvedDecl.node); + let isUnambiguousType = !fileInfo.isInPyTypedPackage || fileInfo.isStubFile; + + // If this is a py.typed package, determine if this is a case where an unannotated + // variable is considered "unambiguous" because all type checkers are almost + // guaranteed to infer its type the same. + if (!isUnambiguousType) { + if (resolvedDecl.type === DeclarationType.Variable) { + // Special-case variables within an enum class. These are effectively + // constants, so we'll treat them as unambiguous. + const enclosingClass = ParseTreeUtils.getEnclosingClass(resolvedDecl.node, /* stopAtFunction */ true); + if (enclosingClass) { + const classTypeInfo = getTypeOfClass(enclosingClass); + if (classTypeInfo && ClassType.isEnumClass(classTypeInfo.classType)) { + isUnambiguousType = true; + } + } + + // Special-case constants, which are treated as unambiguous. + if (isFinalVariableDeclaration(resolvedDecl) || resolvedDecl.isConstant) { + isUnambiguousType = true; + } + + // Special-case calls to certain built-in type functions. + if (resolvedDecl.inferredTypeSource?.nodeType === ParseNodeType.Call) { + const baseTypeResult = getTypeOfExpression( + resolvedDecl.inferredTypeSource.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const callType = baseTypeResult.type; + + const exemptBuiltins = [ + 'TypeVar', + 'ParamSpec', + 'TypeVarTuple', + 'TypedDict', + 'NamedTuple', + 'NewType', + 'TypeAliasType', + ]; + + if (isInstantiableClass(callType) && ClassType.isBuiltIn(callType, exemptBuiltins)) { + isUnambiguousType = true; + } else if ( + isFunction(callType) && + exemptBuiltins.some((name) => FunctionType.isBuiltIn(callType, name)) + ) { + isUnambiguousType = true; + } + } + } + } + + // If the resolved declaration had no defined type, use the + // inferred type for this node. + if (resolvedDecl.type === DeclarationType.Param) { + assert(resolvedDecl.node.d.name !== undefined); + return evaluateTypeForSubnode(resolvedDecl.node.d.name, () => { + evaluateTypeOfParam(resolvedDecl.node); + })?.type; + } + + if (resolvedDecl.type === DeclarationType.Variable && resolvedDecl.inferredTypeSource) { + const isTypeAlias = + isExplicitTypeAliasDeclaration(resolvedDecl) || isPossibleTypeAliasOrTypedDict(resolvedDecl); + + // If this is a type alias, evaluate types for the entire assignment + // statement rather than just the RHS of the assignment. + const typeSource = + isTypeAlias && resolvedDecl.inferredTypeSource.parent + ? resolvedDecl.inferredTypeSource.parent + : resolvedDecl.inferredTypeSource; + let inferredType = evaluateTypeForSubnode(resolvedDecl.node, () => { + evaluateTypesForStatement(typeSource); + })?.type; + + if (inferredType && isTypeAlias && resolvedDecl.typeAliasName) { + // If this was a speculative type alias, it becomes a real type alias only + // in the event that its inferred type is instantiable or explicitly Any + // (but not an ellipsis). + if (isLegalImplicitTypeAliasType(inferredType)) { + const typeAliasTypeVar = synthesizeTypeAliasPlaceholder(resolvedDecl.typeAliasName); + + inferredType = transformTypeForTypeAlias( + inferredType, + resolvedDecl.node, + typeAliasTypeVar, + /* isPep695TypeVarType */ false + ); + + isUnambiguousType = true; + } + } + + // Determine whether we need to mark the annotation as ambiguous. + if (inferredType && fileInfo.isInPyTypedPackage && !fileInfo.isStubFile) { + if (!isUnambiguousType) { + // See if this particular inference can be considered "unambiguous". + // Any symbol that is assigned more than once is considered ambiguous. + if (isUnambiguousInference(symbol, decl, inferredType)) { + isUnambiguousType = true; + } + } + + if (!isUnambiguousType) { + inferredType = TypeBase.cloneForAmbiguousType(inferredType); + } + } + + return inferredType; + } + + return undefined; + } + + // Applies some heuristics to determine whether it's likely that all Python + // type checkers will infer the same type. + function isUnambiguousInference(symbol: Symbol, decl: Declaration, inferredType: Type): boolean { + const nonSlotsDecls = symbol.getDeclarations().filter((decl) => { + return decl.type !== DeclarationType.Variable || !decl.isInferenceAllowedInPyTyped; + }); + + // Any symbol with more than one assignment is considered ambiguous. + if (nonSlotsDecls.length > 1) { + return false; + } + + if (decl.type !== DeclarationType.Variable) { + return false; + } + + // If there are no non-slots declarations, don't mark the inferred type as ambiguous. + if (nonSlotsDecls.length === 0) { + return true; + } + + // TypeVar definitions don't require a declaration. + if (isTypeVar(inferredType)) { + return true; + } + + let assignmentNode: AssignmentNode | undefined; + + const parentNode = decl.node.parent; + if (parentNode) { + // Is this a simple assignment (x = y) or an assignment of an instance variable (self.x = y)? + if (parentNode.nodeType === ParseNodeType.Assignment) { + assignmentNode = parentNode; + } else if ( + parentNode.nodeType === ParseNodeType.MemberAccess && + parentNode.parent?.nodeType === ParseNodeType.Assignment + ) { + assignmentNode = parentNode.parent; + } + } + + if (!assignmentNode) { + return false; + } + + const assignedType = getTypeOfExpression(assignmentNode.d.rightExpr).type; + + // Assume that literal values will always result in the same inferred type. + if (isClassInstance(assignedType) && isLiteralType(assignedType)) { + return true; + } + + // If the assignment is a simple name corresponding to an unambiguous + // type, we'll assume the resulting variable will receive the same + // unambiguous type. + if (assignmentNode.d.rightExpr.nodeType === ParseNodeType.Name && !TypeBase.isAmbiguous(assignedType)) { + return true; + } + + return false; + } + + // If the specified declaration is an alias declaration that points to a symbol, + // it resolves the alias and looks up the symbol, then returns the first declaration + // associated with that symbol. It does this recursively if necessary. If a symbol + // lookup fails, undefined is returned. If resolveLocalNames is true, the method + // resolves aliases through local renames ("as" clauses found in import statements). + function resolveAliasDeclaration( + declaration: Declaration, + resolveLocalNames: boolean, + options?: ResolveAliasOptions + ): Declaration | undefined { + return resolveAliasDeclarationUtil(importLookup, declaration, { + resolveLocalNames, + allowExternallyHiddenAccess: options?.allowExternallyHiddenAccess ?? false, + skipFileNeededCheck: options?.skipFileNeededCheck ?? false, + })?.declaration; + } + + function resolveAliasDeclarationWithInfo( + declaration: Declaration, + resolveLocalNames: boolean, + options?: ResolveAliasOptions + ): ResolvedAliasInfo | undefined { + return resolveAliasDeclarationUtil(importLookup, declaration, { + resolveLocalNames, + allowExternallyHiddenAccess: options?.allowExternallyHiddenAccess ?? false, + skipFileNeededCheck: options?.skipFileNeededCheck ?? false, + }); + } + + // Returns the type of the symbol. If the type is explicitly declared, that type + // is returned. If not, the type is inferred from assignments to the symbol. All + // assigned types are evaluated and combined into a union. + function getEffectiveTypeOfSymbol(symbol: Symbol): Type { + return getEffectiveTypeOfSymbolForUsage(symbol).type; + } + + // If a "usageNode" node is specified, only declarations that are outside + // of the current execution scope or that are reachable (as determined by + // code flow analysis) are considered. This helps in cases where there + // are cyclical dependencies between symbols. + function getEffectiveTypeOfSymbolForUsage( + symbol: Symbol, + usageNode?: NameNode, + useLastDecl = false + ): EffectiveTypeResult { + let declaredTypeInfo: DeclaredSymbolTypeInfo | undefined; + + // If there's a declared type, it takes precedence over inferred types. + if (symbol.hasTypedDeclarations()) { + declaredTypeInfo = getDeclaredTypeOfSymbol(symbol, usageNode); + const declaredType = declaredTypeInfo?.type; + + let isIncomplete = false; + if (declaredType) { + if (isFunction(declaredType) && FunctionType.isPartiallyEvaluated(declaredType)) { + isIncomplete = true; + } else if (isClass(declaredType) && ClassType.isPartiallyEvaluated(declaredType)) { + isIncomplete = true; + } + } + + // If the "declared" type uses a "TypeAlias" type annotation, then + // we need to use the inferred type path to evaluate its type. + if (declaredType || !declaredTypeInfo.isTypeAlias) { + const typedDecls = symbol.getTypedDeclarations(); + + // If we received an undefined declared type, this can be caused by + // exceeding the max number of type declarations, speculative + // evaluation, or a recursive definition. + const isRecursiveDefinition = + !declaredType && + !declaredTypeInfo.exceedsMaxDecls && + !speculativeTypeTracker.isSpeculative(/* node */ undefined); + + const result: EffectiveTypeResult = { + type: declaredType ?? UnknownType.create(), + isIncomplete, + includesVariableDecl: includesVariableTypeDecl(typedDecls), + includesIllegalTypeAliasDecl: !typedDecls.every((decl) => isPossibleTypeAliasDeclaration(decl)), + includesSpeculativeResult: false, + isRecursiveDefinition, + }; + + return result; + } + } + + return inferTypeOfSymbolForUsage(symbol, usageNode, useLastDecl); + } + + // Determines whether the set of declarations includes a variable declaration + // that is not part of a typing.pyi or typingExtensions.pyi file. + function includesVariableTypeDecl(decls: Declaration[]): boolean { + return decls.some((decl) => { + if (decl.type === DeclarationType.Variable) { + // Exempt typing.pyi and typingExtensions.pyi, which use variables to + // define some special forms. + const fileInfo = AnalyzerNodeInfo.getFileInfo(decl.node); + + if (!fileInfo.isTypingStubFile && !fileInfo.isTypingExtensionsStubFile) { + return true; + } + } + + if (decl.type === DeclarationType.Param) { + return true; + } + + return false; + }); + } + + function inferTypeOfSymbolForUsage(symbol: Symbol, usageNode?: NameNode, useLastDecl = false): EffectiveTypeResult { + // Look in the inferred type cache to see if we've computed this already. + let cacheEntries = effectiveTypeCache.get(symbol.id); + const usageNodeId = usageNode ? usageNode.id : undefined; + const effectiveTypeCacheKey = `${usageNodeId === undefined ? '.' : usageNodeId.toString()}${ + useLastDecl ? '*' : '' + }`; + const cacheEntry = cacheEntries?.get(effectiveTypeCacheKey); + + if (cacheEntry && !cacheEntry.isIncomplete) { + return cacheEntry; + } + + // Infer the type. + const decls = symbol.getDeclarations(); + + let declIndexToConsider: number | undefined; + + // Limit the number of declarations to explore. + if (decls.length > maxDeclarationsToUseForInference) { + const result: EffectiveTypeResult = { + type: UnknownType.create(), + isIncomplete: false, + includesVariableDecl: false, + includesIllegalTypeAliasDecl: !decls.every((decl) => isPossibleTypeAliasDeclaration(decl)), + includesSpeculativeResult: false, + isRecursiveDefinition: false, + }; + + addToEffectiveTypeCache(result); + return result; + } + + // If the caller has requested that we use only the last decl, we + // will use only the last one, but we'll ignore decls that are in + // except clauses. + if (useLastDecl) { + decls.forEach((decl, index) => { + if (!decl.isInExceptSuite) { + declIndexToConsider = index; + } + }); + } else { + // Handle the case where there are multiple imports — one of them in + // a try block and one or more in except blocks. In this case, we'll + // use the one in the try block rather than the excepts. + if (decls.length > 1 && decls.every((decl) => decl.type === DeclarationType.Alias)) { + const nonExceptDecls = decls.filter( + (decl) => decl.type === DeclarationType.Alias && !decl.isInExceptSuite + ); + if (nonExceptDecls.length === 1) { + declIndexToConsider = decls.findIndex((decl) => decl === nonExceptDecls[0]); + } + } + } + + // Determine which declarations to use for inference. + const declsToConsider: Declaration[] = []; + let includesVariableDecl = false; + let includesIllegalTypeAliasDecl = false; + + let sawExplicitTypeAlias = false; + decls.forEach((decl, index) => { + const resolvedDecl = + resolveAliasDeclaration(decl, /* resolveLocalNames */ true, { + allowExternallyHiddenAccess: AnalyzerNodeInfo.getFileInfo(decl.node).isStubFile, + }) ?? decl; + + if (!isPossibleTypeAliasDeclaration(resolvedDecl) && !isExplicitTypeAliasDeclaration(resolvedDecl)) { + includesIllegalTypeAliasDecl = true; + } + + if (includesVariableTypeDecl([resolvedDecl])) { + includesVariableDecl = true; + } + + if (declIndexToConsider !== undefined && declIndexToConsider !== index) { + return; + } + + // If we have already seen an explicit type alias, do not consider + // additional decls. This can happen if multiple TypeAlias declarations + // are provided -- normally an error, but it can happen in stdlib stubs + // if the user sets the pythonPlatform to "All". + if (sawExplicitTypeAlias) { + return; + } + + // If the symbol is explicitly marked as a ClassVar, consider only the + // declarations that assign to it from within the class body, not through + // a member access expression. + if ( + isEffectivelyClassVar(symbol, /* isDataclass */ false) && + decl.type === DeclarationType.Variable && + decl.isDefinedByMemberAccess + ) { + return; + } + + if (usageNode !== undefined) { + if (decl.type !== DeclarationType.Alias) { + // Is the declaration in the same execution scope as the "usageNode" node? + // If so, we can skip it because code flow analysis will allow us + // to determine the type in this context. + const usageScope = ParseTreeUtils.getExecutionScopeNode(usageNode); + const declScope = ParseTreeUtils.getExecutionScopeNode(decl.node); + if (usageScope === declScope) { + if (!isFlowPathBetweenNodes(decl.node, usageNode)) { + return; + } + } + } + } + + const isExplicitTypeAlias = isExplicitTypeAliasDeclaration(resolvedDecl); + const isTypeAlias = isExplicitTypeAlias || isPossibleTypeAliasOrTypedDict(resolvedDecl); + + if (isExplicitTypeAlias) { + sawExplicitTypeAlias = true; + } + + // If this is a type alias, evaluate it outside of the recursive symbol + // resolution check so we can evaluate the full assignment statement. + if ( + isTypeAlias && + resolvedDecl.type === DeclarationType.Variable && + resolvedDecl.inferredTypeSource?.parent?.nodeType === ParseNodeType.Assignment + ) { + evaluateTypesForAssignmentStatement(resolvedDecl.inferredTypeSource.parent); + } + + declsToConsider.push(resolvedDecl); + }); + + // If all of the decls come from augmented assignments, we won't be able to + // determine its type. At least one declaration must be a simple assignment. + if ( + declsToConsider.every( + (decl) => + isVariableDeclaration(decl) && + ParseTreeUtils.isNodeContainedWithinNodeType(decl.node, ParseNodeType.AugmentedAssignment) + ) + ) { + declsToConsider.splice(0); + } + + const result = getTypeOfSymbolForDecls(symbol, declsToConsider, effectiveTypeCacheKey); + result.includesVariableDecl = includesVariableDecl; + result.includesIllegalTypeAliasDecl = includesIllegalTypeAliasDecl; + + // Add the result to the effective type cache if it doesn't include speculative results. + if (!result.includesSpeculativeResult) { + addToEffectiveTypeCache(result); + } + + return result; + + function addToEffectiveTypeCache(result: EffectiveTypeResult) { + // Add the entry to the cache so we don't need to compute it next time. + if (!cacheEntries) { + cacheEntries = new Map(); + effectiveTypeCache.set(symbol.id, cacheEntries); + } + + cacheEntries.set(effectiveTypeCacheKey, result); + } + } + + // Returns the type of a symbol based on a subset of its declarations. + function getTypeOfSymbolForDecls(symbol: Symbol, decls: Declaration[], typeCacheKey: string): EffectiveTypeResult { + const typesToCombine: Type[] = []; + let isIncomplete = false; + let sawPendingEvaluation = false; + let includesSpeculativeResult = false; + + decls.forEach((decl) => { + if (pushSymbolResolution(symbol, decl)) { + try { + let type = getInferredTypeOfDeclaration(symbol, decl); + + if (!popSymbolResolution(symbol)) { + isIncomplete = true; + } + + if (type) { + if (decl.type === DeclarationType.Variable) { + let isConstant = false; + if (decl.type === DeclarationType.Variable) { + if (decl.isConstant || isFinalVariableDeclaration(decl)) { + isConstant = true; + } + } + + // Treat enum values declared within an enum class as though they are const even + // though they may not be named as such. + if ( + isClassInstance(type) && + ClassType.isEnumClass(type) && + isDeclInEnumClass(evaluatorInterface, decl) + ) { + isConstant = true; + } + + // If the symbol is constant, we can retain the literal + // value and TypeForm types. Otherwise, strip literal values + // and TypeForm types to widen. + if (TypeBase.isInstance(type) && !isConstant && !isExplicitTypeAliasDeclaration(decl)) { + type = stripTypeForm(stripLiteralValue(type)); + } + } + + typesToCombine.push(type); + + if (isSpeculativeModeInUse(decl.node)) { + includesSpeculativeResult = true; + } + } else { + isIncomplete = true; + } + } catch (e: any) { + // Clean up the stack before rethrowing. + popSymbolResolution(symbol); + throw e; + } + } else { + if (decl.type === DeclarationType.Class) { + const classTypeInfo = getTypeOfClass(decl.node); + if (classTypeInfo?.decoratedType) { + typesToCombine.push(classTypeInfo.decoratedType); + } + } + + isIncomplete = true; + + // Note that at least one decl could not be evaluated because + // it was already in the process of being evaluated. + sawPendingEvaluation = true; + } + }); + + // How many times have we already attempted to evaluate this declaration already? + const cacheEntries = effectiveTypeCache.get(symbol.id); + const evaluationAttempts = (cacheEntries?.get(typeCacheKey)?.evaluationAttempts ?? 0) + 1; + + let type: Type; + + if (typesToCombine.length > 0) { + // Ignore the pending evaluation flag if we've already attempted the + // type evaluation many times because this probably means there's a + // cyclical dependency that cannot be broken. + isIncomplete = sawPendingEvaluation && evaluationAttempts < maxEffectiveTypeEvaluationAttempts; + + type = combineTypes(typesToCombine); + } else { + // We can encounter this situation in the case of a bare ClassVar annotation. + if (symbol.isClassVar()) { + type = UnknownType.create(); + isIncomplete = false; + } else { + type = UnboundType.create(); + } + } + + return { type, isIncomplete, includesSpeculativeResult, evaluationAttempts }; + } + + // If a declaration has an explicit type (e.g. a variable with an annotation), + // this function evaluates the type and returns it. If the symbol has no + // explicit declared type, its type will need to be inferred instead. In some + // cases, non-type information (such as Final or ClassVar attributes) may be + // provided, but type inference is still required. In such cases, the attributes + // are returned as flags. + function getDeclaredTypeOfSymbol(symbol: Symbol, usageNode?: NameNode): DeclaredSymbolTypeInfo { + const synthesizedType = symbol.getSynthesizedType()?.type; + if (synthesizedType) { + return { type: synthesizedType }; + } + + let typedDecls = symbol.getTypedDeclarations(); + + if (typedDecls.length === 0) { + // If the symbol has no type declaration but is assigned many times, + // treat it as though it has an explicit type annotation of "Unknown". + // This will avoid a pathological performance condition for unannotated + // code that reassigns the same variable hundreds of times. If the symbol + // effectively has an "Any" annotation, it won't be narrowed. + if (symbol.getDeclarations().length > maxDeclarationsToUseForInference) { + return { type: UnknownType.create() }; + } + + // There was no declaration with a defined type. + return { type: undefined }; + } + + // If there is more than one typed decl, filter out any that are not + // reachable from the usage node (if specified). This can happen in + // cases where a property symbol is redefined to add a setter, deleter, + // etc. + let exceedsMaxDecls = false; + if (usageNode && typedDecls.length > 1) { + if (typedDecls.length > maxTypedDeclsPerSymbol) { + // If there are too many typed decls, don't bother filtering them + // because this can be very expensive. Simply use the last one + // in this case. + typedDecls = [typedDecls[typedDecls.length - 1]]; + exceedsMaxDecls = true; + } else { + const filteredTypedDecls = typedDecls.filter((decl) => { + if (decl.type !== DeclarationType.Alias) { + // Is the declaration in the same execution scope as the "usageNode" node? + const usageScope = ParseTreeUtils.getExecutionScopeNode(usageNode); + const declScope = ParseTreeUtils.getExecutionScopeNode(decl.node); + + if (usageScope === declScope) { + if (!isFlowPathBetweenNodes(decl.node, usageNode, /* allowSelf */ false)) { + return false; + } + } + } + return true; + }); + + if (filteredTypedDecls.length === 0) { + return { type: UnboundType.create() }; + } + + typedDecls = filteredTypedDecls; + } + } + + // Start with the last decl. If that's already being resolved, + // use the next-to-last decl, etc. This can happen when resolving + // property methods. Often the setter method is defined in reference to + // the initial property, which defines the getter method with the same + // symbol name. + let declIndex = typedDecls.length - 1; + while (declIndex >= 0) { + const decl = typedDecls[declIndex]; + + // If there's a partially-constructed type that is allowed + // for recursive symbol resolution, return it as the resolved type. + const partialType = getSymbolResolutionPartialType(symbol, decl); + if (partialType) { + return { type: partialType }; + } + + if (getIndexOfSymbolResolution(symbol, decl) < 0) { + if (pushSymbolResolution(symbol, decl)) { + try { + const declaredTypeInfo = getTypeForDeclaration(decl); + + // If there was recursion detected, don't use this declaration. + // The exception is it's a class declaration because getTypeOfClass + // handles recursion by populating a partially-created class type + // in the type cache. This exception is required to handle the + // circular dependency between the "type" and "object" classes in + // builtins.pyi (since "object" is a "type" and "type" is an "object"). + if (popSymbolResolution(symbol) || decl.type === DeclarationType.Class) { + return declaredTypeInfo; + } + } catch (e: any) { + // Clean up the stack before rethrowing. + popSymbolResolution(symbol); + throw e; + } + } + } + + declIndex--; + } + + return { type: undefined, exceedsMaxDecls }; + } + + function inferReturnTypeIfNecessary(type: Type) { + if (isFunction(type)) { + getEffectiveReturnType(type); + } else if (isOverloaded(type)) { + OverloadedType.getOverloads(type).forEach((overload) => { + getEffectiveReturnType(overload); + }); + + const impl = OverloadedType.getImplementation(type); + if (impl && isFunction(impl)) { + getEffectiveReturnType(impl); + } + } + } + + function getEffectiveReturnType(type: FunctionType): Type { + return getEffectiveReturnTypeResult(type).type; + } + + function getInferredReturnType(type: FunctionType): Type { + return getInferredReturnTypeResult(type).type; + } + + // Returns the return type of the function. If the type is explicitly provided in + // a type annotation, that type is returned. If not, an attempt is made to infer + // the return type. If a list of args is provided, the inference logic may take + // into account argument types to infer the return type. + function getEffectiveReturnTypeResult(type: FunctionType, options?: EffectiveReturnTypeOptions): TypeResult { + const specializedReturnType = FunctionType.getEffectiveReturnType(type, /* includeInferred */ false); + if (specializedReturnType && !isUnknown(specializedReturnType)) { + return { type: specializedReturnType }; + } + + return getInferredReturnTypeResult(type, options?.callSiteInfo); + } + + function _getInferredReturnTypeResult(type: FunctionType, callSiteInfo?: CallSiteEvaluationInfo): TypeResult { + let returnType: Type | undefined; + let isIncomplete = false; + const analyzeUnannotatedFunctions = true; + + // Don't attempt to infer the return type for a stub file. + if (FunctionType.isStubDefinition(type)) { + return { type: UnknownType.create() }; + } + + // Don't infer the return type for a ParamSpec value. + if (FunctionType.isParamSpecValue(type)) { + return { type: UnknownType.create() }; + } + + // Don't infer the return type for an overloaded function (unless it's synthesized, + // which is needed for proper operation of the __get__ method in properties). + if (FunctionType.isOverloaded(type) && !FunctionType.isSynthesizedMethod(type)) { + return { type: UnknownType.create() }; + } + + const evalCount = type.shared.inferredReturnType?.evaluationCount ?? 0; + + // If the return type has already been lazily evaluated, + // don't bother computing it again. + if (type.shared.inferredReturnType && !type.shared.inferredReturnType.isIncomplete) { + returnType = type.shared.inferredReturnType.type; + } else if (evalCount > maxReturnTypeInferenceAttempts) { + // Detect a case where a return type won't converge because of recursion. + returnType = UnknownType.create(); + } else { + // Don't bother inferring the return type of __init__ because it's + // always None. + if (FunctionType.isInstanceMethod(type) && type.shared.name === '__init__') { + returnType = getNoneType(); + } else if (type.shared.declaration) { + const functionNode = type.shared.declaration.node; + const skipUnannotatedFunction = + !AnalyzerNodeInfo.getFileInfo(functionNode).diagnosticRuleSet.analyzeUnannotatedFunctions && + ParseTreeUtils.isUnannotatedFunction(functionNode); + + // Skip return type inference if we are in "skip unannotated function" mode. + if (!skipUnannotatedFunction && !checkCodeFlowTooComplex(functionNode.d.suite)) { + const codeFlowComplexity = AnalyzerNodeInfo.getCodeFlowComplexity(functionNode); + + // For very complex functions that have no annotated parameter types, + // don't attempt to infer the return type because it can be extremely + // expensive. + const parametersAreAnnotated = + type.shared.parameters.length <= 1 || + type.shared.parameters.some((param) => FunctionParam.isTypeDeclared(param)); + + if (parametersAreAnnotated || codeFlowComplexity < maxReturnTypeInferenceCodeFlowComplexity) { + // Temporarily disable speculative mode while we + // lazily evaluate the return type. + let returnTypeResult: TypeResult | undefined; + disableSpeculativeMode(() => { + returnTypeResult = inferFunctionReturnType( + functionNode, + FunctionType.isAbstractMethod(type), + callSiteInfo?.errorNode + ); + }); + + returnType = returnTypeResult?.type; + if (returnTypeResult?.isIncomplete) { + isIncomplete = true; + } + } + } + } + + if (!returnType) { + returnType = UnknownType.create(); + } + + // Externalize any TypeVars that appear in the type. + const typeVarScopes: TypeVarScopeId[] = []; + if (type.shared.typeVarScopeId) { + typeVarScopes.push(type.shared.typeVarScopeId); + } + if (type.shared.methodClass?.shared.typeVarScopeId) { + typeVarScopes.push(type.shared.methodClass.shared.typeVarScopeId); + } + returnType = makeTypeVarsFree(returnType, typeVarScopes); + + // Cache the type for next time. + type.shared.inferredReturnType = { type: returnType, isIncomplete, evaluationCount: evalCount + 1 }; + } + + // If the type is partially unknown and the function has one or more unannotated + // params, try to analyze the function with the provided argument types and + // attempt to do a better job at inference. + if ( + !isIncomplete && + analyzeUnannotatedFunctions && + isPartlyUnknown(returnType) && + FunctionType.hasUnannotatedParams(type) && + !FunctionType.isStubDefinition(type) && + !FunctionType.isPyTypedDefinition(type) && + callSiteInfo + ) { + let hasDecorators = false; + let isAsync = false; + const declNode = type.shared.declaration?.node; + if (declNode) { + if (declNode.d.decorators.length > 0) { + hasDecorators = true; + } + if (declNode.d.isAsync) { + isAsync = true; + } + } + + // We can't use this technique if decorators or async are used because they + // would need to be applied to the inferred return type. + if (!hasDecorators && !isAsync) { + const contextualReturnType = inferReturnTypeForCallSite(type, callSiteInfo); + if (contextualReturnType) { + returnType = contextualReturnType; + + if (type.shared.declaration?.node) { + // Externalize any TypeVars that appear in the type. + const liveScopeIds = ParseTreeUtils.getTypeVarScopesForNode(type.shared.declaration.node); + returnType = makeTypeVarsFree(returnType, liveScopeIds); + } + } + } + } + + return { type: returnType, isIncomplete }; + } + + function inferReturnTypeForCallSite(type: FunctionType, callSiteInfo: CallSiteEvaluationInfo): Type | undefined { + const args = callSiteInfo.args; + let contextualReturnType: Type | undefined; + + if (!type.shared.declaration) { + return undefined; + } + const functionNode = type.shared.declaration.node; + const codeFlowComplexity = AnalyzerNodeInfo.getCodeFlowComplexity(functionNode); + + if (codeFlowComplexity >= maxReturnCallSiteTypeInferenceCodeFlowComplexity) { + return undefined; + } + + // If an arg hasn't been matched to a specific named parameter, + // it's an unpacked value that corresponds to multiple parameters. + // That's an edge case that we don't handle here. + if (args.some((arg) => !arg.paramName)) { + return undefined; + } + + // Detect recurrence. If a function invokes itself either directly + // or indirectly, we won't attempt to infer contextual return + // types any further. + if (returnTypeInferenceContextStack.some((context) => context.functionNode === functionNode)) { + return undefined; + } + + const functionTypeResult = getTypeOfFunction(functionNode); + if (!functionTypeResult) { + return undefined; + } + + // Very complex functions with many arguments can take a long time to analyze, + // so we'll use a heuristic and avoiding this inference technique for any + // call site that involves too many arguments. + if (args.length > maxReturnTypeInferenceArgCount) { + return undefined; + } + + // Don't explore arbitrarily deep in the call graph. + if (returnTypeInferenceContextStack.length >= maxReturnTypeInferenceStackSize) { + return undefined; + } + + const paramTypes: Type[] = []; + let isResultFromCache = false; + + // If the call is located in a loop, don't use literal argument types + // for the same reason we don't do literal math in loops. + const stripLiteralArgTypes = ParseTreeUtils.isWithinLoop(callSiteInfo.errorNode); + + // Suppress diagnostics because we don't want to generate errors. + suppressDiagnostics(functionNode, () => { + // Allocate a new temporary type cache for the context of just + // this function so we can analyze it separately without polluting + // the main type cache. + const prevTypeCache = returnTypeInferenceTypeCache; + returnTypeInferenceContextStack.push({ + functionNode, + codeFlowAnalyzer: codeFlowEngine.createCodeFlowAnalyzer(), + }); + + try { + returnTypeInferenceTypeCache = new Map(); + + let allArgTypesAreUnknown = true; + functionNode.d.params.forEach((param, index) => { + if (param.d.name) { + let paramType: Type | undefined; + const arg = args.find((arg) => param.d.name!.d.value === arg.paramName); + + if (arg && arg.argument.valueExpression) { + paramType = getTypeOfExpression(arg.argument.valueExpression).type; + if (!isUnknown(paramType)) { + allArgTypesAreUnknown = false; + } + } else if (param.d.defaultValue) { + paramType = getTypeOfExpression(param.d.defaultValue).type; + if (!isUnknown(paramType)) { + allArgTypesAreUnknown = false; + } + } else if (index === 0) { + // If this is an instance or class method, use the implied + // parameter type for the "self" or "cls" parameter. + if ( + FunctionType.isInstanceMethod(functionTypeResult.functionType) || + FunctionType.isClassMethod(functionTypeResult.functionType) + ) { + if (functionTypeResult.functionType.shared.parameters.length > 0) { + if (functionNode.d.params[0].d.name) { + paramType = FunctionType.getParamType(functionTypeResult.functionType, 0); + } + } + } + } + + if (!paramType) { + paramType = UnknownType.create(); + } + + if (stripLiteralArgTypes) { + paramType = stripTypeForm( + convertSpecialFormToRuntimeValue( + stripLiteralValue(paramType), + EvalFlags.None, + /* convertModule */ true + ) + ); + } + + paramTypes.push(paramType); + writeTypeCache(param.d.name, { type: paramType }, EvalFlags.None); + } + }); + + // Don't bother trying to determine the contextual return + // type if none of the argument types are known. + if (!allArgTypesAreUnknown) { + // See if the return type is already cached. If so, skip the + // inference step, which is potentially very expensive. + const cacheEntry = functionTypeResult.functionType.priv.callSiteReturnTypeCache?.find((entry) => { + return ( + entry.paramTypes.length === paramTypes.length && + entry.paramTypes.every((t, i) => isTypeSame(t, paramTypes[i])) + ); + }); + + if (cacheEntry) { + contextualReturnType = cacheEntry.returnType; + isResultFromCache = true; + } else { + contextualReturnType = inferFunctionReturnType( + functionNode, + FunctionType.isAbstractMethod(type), + callSiteInfo?.errorNode + )?.type; + } + } + } finally { + returnTypeInferenceContextStack.pop(); + returnTypeInferenceTypeCache = prevTypeCache; + } + }); + + if (contextualReturnType) { + contextualReturnType = removeUnbound(contextualReturnType); + + if (!isResultFromCache) { + // Cache the resulting type. + if (!functionTypeResult.functionType.priv.callSiteReturnTypeCache) { + functionTypeResult.functionType.priv.callSiteReturnTypeCache = []; + } + if ( + functionTypeResult.functionType.priv.callSiteReturnTypeCache.length >= + maxCallSiteReturnTypeCacheSize + ) { + functionTypeResult.functionType.priv.callSiteReturnTypeCache = + functionTypeResult.functionType.priv.callSiteReturnTypeCache.slice(1); + } + functionTypeResult.functionType.priv.callSiteReturnTypeCache.push({ + paramTypes, + returnType: contextualReturnType, + }); + } + + return contextualReturnType; + } + + return undefined; + } + + // If the function has an explicitly-declared return type, it is returned + // unaltered unless the function is a generator, in which case it is + // modified to return only the return type for the generator. + function getDeclaredReturnType(node: FunctionNode): Type | undefined { + const functionTypeInfo = getTypeOfFunction(node); + const returnType = functionTypeInfo?.functionType.shared.declaredReturnType; + + if (!returnType) { + return undefined; + } + + if (FunctionType.isGenerator(functionTypeInfo.functionType)) { + return getDeclaredGeneratorReturnType(functionTypeInfo.functionType); + } + + return returnType; + } + + function getTypeOfMember(member: ClassMember): Type { + if (isInstantiableClass(member.classType)) { + return partiallySpecializeType( + getEffectiveTypeOfSymbol(member.symbol), + member.classType, + getTypeClassType(), + /* selfClass */ undefined + ); + } + return UnknownType.create(); + } + + function getTypeOfMemberInternal( + errorNode: ExpressionNode | undefined, + member: ClassMember, + selfClass: ClassType | TypeVarType | undefined, + flags: MemberAccessFlags + ): TypeResult | undefined { + if (isAnyOrUnknown(member.classType)) { + return { + type: member.classType, + isIncomplete: false, + }; + } + + if (!isInstantiableClass(member.classType)) { + return undefined; + } + + const typeResult = getEffectiveTypeOfSymbolForUsage(member.symbol); + + if (!typeResult) { + return undefined; + } + + // Report inappropriate use of variables in type expressions. + if ((flags & MemberAccessFlags.TypeExpression) !== 0 && errorNode) { + typeResult.type = validateSymbolIsTypeExpression( + errorNode, + typeResult.type, + !!typeResult.includesVariableDecl + ); + } + + // If the type is a function or overloaded function, infer + // and cache the return type if necessary. This needs to be done + // prior to specializing. + inferReturnTypeIfNecessary(typeResult.type); + + // Check for ambiguous accesses to attributes with generic types? + if ( + errorNode && + selfClass && + isClass(selfClass) && + member.isInstanceMember && + isClass(member.unspecializedClassType) && + (flags & MemberAccessFlags.DisallowGenericInstanceVariableAccess) !== 0 && + requiresSpecialization(typeResult.type, { ignoreSelf: true, ignoreImplicitTypeArgs: true }) + ) { + const specializedType = partiallySpecializeType( + typeResult.type, + member.unspecializedClassType, + getTypeClassType(), + selfSpecializeClass(selfClass, { overrideTypeArgs: true }) + ); + + if ( + findSubtype( + specializedType, + (subtype) => + !isFunctionOrOverloaded(subtype) && + requiresSpecialization(subtype, { ignoreSelf: true, ignoreImplicitTypeArgs: true }) + ) + ) { + addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.genericInstanceVariableAccess(), + errorNode + ); + } + } + + return { + type: partiallySpecializeType(typeResult.type, member.classType, getTypeClassType(), selfClass), + isIncomplete: !!typeResult.isIncomplete, + }; + } + + function assignClass( + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number, + reportErrorsUsingObjType: boolean + ): boolean { + // If the source or dest types are partially evaluated (i.e. they are in the + // process of being constructed), assume they are assignable rather than risk + // emitting false positives. + if (ClassType.isHierarchyPartiallyEvaluated(destType) || ClassType.isHierarchyPartiallyEvaluated(srcType)) { + return true; + } + + // Handle typed dicts. They also use a form of structural typing for type + // checking, as defined in PEP 589. + if (ClassType.isTypedDictClass(srcType)) { + if (ClassType.isTypedDictClass(destType) && !ClassType.isSameGenericClass(destType, srcType)) { + if ( + !assignTypedDictToTypedDict( + evaluatorInterface, + destType, + srcType, + diag, + constraints, + flags, + recursionCount + ) + ) { + return false; + } + + // If invariance is being enforced, the two TypedDicts must be assignable to each other. + if ((flags & AssignTypeFlags.Invariant) !== 0) { + return assignTypedDictToTypedDict( + evaluatorInterface, + srcType, + destType, + /* diag */ undefined, + /* constraints */ undefined, + flags, + recursionCount + ); + } + + return true; + } + + // Handle some special cases where a TypedDict can act like + // a Mapping[str, T] or a dict[str, T]. + if (ClassType.isBuiltIn(destType, 'Mapping')) { + const mappingValueType = getTypedDictMappingEquivalent(evaluatorInterface, srcType); + + if ( + mappingValueType && + prefetched?.mappingClass && + isInstantiableClass(prefetched.mappingClass) && + prefetched?.strClass && + isInstantiableClass(prefetched.strClass) + ) { + srcType = ClassType.specialize(prefetched.mappingClass, [ + ClassType.cloneAsInstance(prefetched.strClass), + mappingValueType, + ]); + } + } else if (ClassType.isBuiltIn(destType, ['dict', 'MutableMapping'])) { + const dictValueType = getTypedDictDictEquivalent(evaluatorInterface, srcType, recursionCount); + + if ( + dictValueType && + prefetched?.dictClass && + isInstantiableClass(prefetched.dictClass) && + prefetched.strClass && + isInstantiableClass(prefetched.strClass) + ) { + srcType = ClassType.specialize(prefetched.dictClass, [ + ClassType.cloneAsInstance(prefetched.strClass), + dictValueType, + ]); + } + } + } + + // Handle special-case type promotions. + if (destType.priv.includePromotions) { + const promotionList = typePromotions.get(destType.shared.fullName); + if ( + promotionList && + promotionList.some((srcName) => + srcType.shared.mro.some((mroClass) => isClass(mroClass) && srcName === mroClass.shared.fullName) + ) + ) { + if ((flags & AssignTypeFlags.Invariant) === 0) { + return true; + } + } + } + + // Is it a structural type (i.e. a protocol)? If so, we need to + // perform a member-by-member check. + const inheritanceChain: InheritanceChain = []; + const isDerivedFrom = ClassType.isDerivedFrom(srcType, destType, inheritanceChain); + + // Use the slow path for protocols if the dest doesn't explicitly + // derive from the source. We also need to use this path if we're + // testing to see if the metaclass matches the protocol. + if (ClassType.isProtocolClass(destType) && !isDerivedFrom) { + if ( + !assignClassToProtocol( + evaluatorInterface, + destType, + ClassType.cloneAsInstance(srcType), + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + diag?.addMessage( + LocAddendum.protocolIncompatible().format({ + sourceType: printType(convertToInstance(srcType)), + destType: printType(convertToInstance(destType)), + }) + ); + return false; + } + + return true; + } + + if ((flags & AssignTypeFlags.Invariant) === 0 || ClassType.isSameGenericClass(srcType, destType)) { + if (isDerivedFrom) { + assert(inheritanceChain.length > 0); + + if ( + assignClassWithTypeArgs( + destType, + srcType, + inheritanceChain, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + return true; + } + } + } + + // Everything is assignable to an object. + if (ClassType.isBuiltIn(destType, 'object')) { + if ((flags & AssignTypeFlags.Invariant) === 0) { + return true; + } + } + + if (diag) { + const destErrorType = reportErrorsUsingObjType ? ClassType.cloneAsInstance(destType) : destType; + const srcErrorType = reportErrorsUsingObjType ? ClassType.cloneAsInstance(srcType) : srcType; + + let destErrorTypeText = printType(destErrorType); + let srcErrorTypeText = printType(srcErrorType); + + // If the text is the same, use the fully-qualified name rather than the short name. + if (destErrorTypeText === srcErrorTypeText && destType.shared.fullName && srcType.shared.fullName) { + destErrorTypeText = destType.shared.fullName; + srcErrorTypeText = srcType.shared.fullName; + } + + diag?.addMessage( + LocAddendum.typeIncompatible().format({ + sourceType: srcErrorTypeText, + destType: destErrorTypeText, + }) + ); + + // Tell the user about the disableBytesTypePromotions if that is involved. + if (ClassType.isBuiltIn(destType, 'bytes')) { + const promotions = typePromotions.get(destType.shared.fullName); + if (promotions && promotions.some((name) => name === srcType.shared.fullName)) { + diag?.addMessage(LocAddendum.bytesTypePromotions()); + } + } + } + + return false; + } + + // This function is used to validate or infer the variance of type + // parameters within a class. If ignoreBaseClassVariance is set to false, + // the type parameters for the base class are honored. This is useful for + // variance inference (PEP 695). For validation of protocol variance, we + // want to ignore the variance for all base classes in the class hierarchy. + function assignClassToSelf( + destType: ClassType, + srcType: ClassType, + assumedVariance: Variance, + ignoreBaseClassVariance = true, + recursionCount = 0 + ): boolean { + assert(ClassType.isSameGenericClass(destType, srcType)); + assert(destType.shared.typeParams.length > 0); + + srcType = makeTypeVarsBound(srcType, getTypeVarScopeIds(srcType)); + destType = makeTypeVarsBound(destType, getTypeVarScopeIds(destType)); + + let isAssignable = true; + + // Use a try/catch block here to make sure that we reset + // the assignClassToSelfClass to undefined if an exception occurs. + try { + // Stash the current class type so any references to it are treated + // as though all type parameters are invariant. + assignClassToSelfStack.push({ class: destType, assumedVariance }); + + ClassType.getSymbolTable(destType).forEach((symbol, name) => { + if (!isAssignable || symbol.isIgnoredForProtocolMatch()) { + return; + } + + // Constructor methods are exempt from variance calculations. + if (name === '__new__' || name === '__init__') { + return; + } + + const memberInfo = lookUpClassMember(srcType, name); + assert(memberInfo !== undefined); + + let destMemberType = getEffectiveTypeOfSymbol(symbol); + const srcMemberType = getTypeOfMember(memberInfo); + destMemberType = partiallySpecializeType(destMemberType, destType, getTypeClassType()); + + // Properties require special processing. + if ( + isClassInstance(destMemberType) && + ClassType.isPropertyClass(destMemberType) && + isClassInstance(srcMemberType) && + ClassType.isPropertyClass(srcMemberType) + ) { + if ( + !assignProperty( + evaluatorInterface, + ClassType.cloneAsInstantiable(destMemberType), + ClassType.cloneAsInstantiable(srcMemberType), + destType, + srcType, + /* diag */ undefined, + /* constraints */ undefined, + /* selfConstraints */ undefined, + recursionCount + ) + ) { + isAssignable = false; + } + } else { + const primaryDecl = symbol.getDeclarations()[0]; + + let flags = AssignTypeFlags.Default; + if ( + primaryDecl?.type === DeclarationType.Variable && + !isFinalVariableDeclaration(primaryDecl) && + !isMemberReadOnly(destType, name) + ) { + // Class and instance variables that are mutable need to + // enforce invariance. We will exempt variables that are + // private or protected, since these are presumably + // not modifiable outside of the class. + if (!isPrivateOrProtectedName(name)) { + flags |= AssignTypeFlags.Invariant; + } + } + + if ( + !assignType( + destMemberType, + srcMemberType, + /* diag */ undefined, + /* constraints */ undefined, + flags | AssignTypeFlags.SkipSelfClsParamCheck, + recursionCount + ) + ) { + isAssignable = false; + } + } + }); + + if (!isAssignable) { + return false; + } + + // Now handle generic base classes. + destType.shared.baseClasses.forEach((baseClass) => { + if ( + !isAssignable || + !isInstantiableClass(baseClass) || + ClassType.isBuiltIn(baseClass, ['object', 'Protocol', 'Generic']) || + baseClass.shared.typeParams.length === 0 + ) { + return; + } + + const specializedDestBaseClass = specializeForBaseClass(destType, baseClass); + const specializedSrcBaseClass = specializeForBaseClass(srcType, baseClass); + + if (!ignoreBaseClassVariance) { + specializedDestBaseClass.shared.typeParams.forEach((param, index) => { + if (isParamSpec(param) || isTypeVarTuple(param) || param.shared.isSynthesized) { + return; + } + + if ( + !specializedSrcBaseClass.priv.typeArgs || + index >= specializedSrcBaseClass.priv.typeArgs.length || + !specializedDestBaseClass.priv.typeArgs || + index >= specializedDestBaseClass.priv.typeArgs.length + ) { + return; + } + + const paramVariance = param.shared.declaredVariance; + if (isTypeVar(specializedSrcBaseClass.priv.typeArgs[index])) { + if (paramVariance === Variance.Invariant || paramVariance === Variance.Contravariant) { + isAssignable = false; + return; + } + } + + if (isTypeVar(specializedDestBaseClass.priv.typeArgs[index])) { + if (paramVariance === Variance.Invariant || paramVariance === Variance.Covariant) { + isAssignable = false; + return; + } + } + }); + } + + if (!isAssignable) { + return; + } + + // Handle tuples specially since their type arguments are variadic. + if (ClassType.isTupleClass(specializedDestBaseClass)) { + return; + } + + if ( + !assignClassToSelf( + specializedDestBaseClass, + specializedSrcBaseClass, + assumedVariance, + ignoreBaseClassVariance, + recursionCount + ) + ) { + isAssignable = false; + } + }); + + return isAssignable; + } finally { + assignClassToSelfStack.pop(); + } + } + + // Determines whether the specified type can be assigned to the + // specified inheritance chain, taking into account its type arguments. + function assignClassWithTypeArgs( + destType: ClassType, + srcType: ClassType, + inheritanceChain: InheritanceChain, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number + ): boolean { + let curSrcType = srcType; + let prevSrcType: ClassType | undefined; + + inferVarianceForClass(destType); + + // If we're enforcing invariance, literal types must match. + if ((flags & AssignTypeFlags.Invariant) !== 0) { + const srcIsLiteral = isLiteralLikeType(srcType); + const destIsLiteral = isLiteralLikeType(destType); + + if (srcIsLiteral !== destIsLiteral) { + return false; + } + } + + for (let ancestorIndex = inheritanceChain.length - 1; ancestorIndex >= 0; ancestorIndex--) { + const ancestorType = inheritanceChain[ancestorIndex]; + + // If we've hit an "unknown", all bets are off, and we need to assume + // that the type is assignable. If the destType is marked "@final", + // we should be able to assume that it's not assignable, but we can't do + // this in the general case because it breaks assumptions with the + // NotImplemented symbol exported by typeshed's builtins.pyi. Instead, + // we'll special-case only None. + if (isUnknown(ancestorType)) { + return !isNoneTypeClass(destType); + } + + // If this isn't the first time through the loop, specialize + // for the next ancestor in the chain. + if (ancestorIndex < inheritanceChain.length - 1) { + // If the curSrcType is a NamedTuple and the ancestorType is a tuple, + // we need to handle this as a special case because the NamedTuple may + // include typeParams from its parent class. + let effectiveCurSrcType = curSrcType; + if ( + ClassType.isBuiltIn(curSrcType, 'NamedTuple') && + ClassType.isBuiltIn(ancestorType, 'tuple') && + prevSrcType + ) { + effectiveCurSrcType = prevSrcType; + } + + curSrcType = specializeForBaseClass(effectiveCurSrcType, ancestorType); + } + + // If there are no type parameters on this class, we're done. + const ancestorTypeParams = ClassType.getTypeParams(ancestorType); + if (ancestorTypeParams.length === 0) { + continue; + } + + // If the dest type isn't specialized, there are no type args to validate. + if (!ancestorType.priv.typeArgs) { + return true; + } + + prevSrcType = curSrcType; + } + + // Handle tuple, which supports a variable number of type arguments. + if (destType.priv.tupleTypeArgs && curSrcType.priv.tupleTypeArgs) { + return assignTupleTypeArgs( + evaluatorInterface, + destType, + curSrcType, + diag, + constraints, + flags, + recursionCount + ); + } + + if (destType.priv.typeArgs) { + // If the dest type is specialized, make sure the specialized source + // type arguments are assignable to the dest type arguments. + return assignTypeArgs( + destType, + curSrcType, + // Don't emit a diag addendum if we're in an invariant context. It's + // sufficient to simply indicate that the types are not the same + // in this case. Adding more information is unnecessary and confusing. + (flags & AssignTypeFlags.Invariant) === 0 ? diag : undefined, + constraints, + flags, + recursionCount + ); + } + + if (constraints && curSrcType.priv.typeArgs) { + // Populate the typeVar map with type arguments of the source. + const srcTypeArgs = curSrcType.priv.typeArgs; + for (let i = 0; i < destType.shared.typeParams.length; i++) { + let typeArgType: Type; + const typeParam = destType.shared.typeParams[i]; + const variance = TypeVarType.getVariance(typeParam); + + if (curSrcType.priv.tupleTypeArgs) { + typeArgType = convertToInstance( + makeTupleObject(evaluatorInterface, curSrcType.priv.tupleTypeArgs, /* isUnpacked */ true) + ); + } else { + typeArgType = i < srcTypeArgs.length ? srcTypeArgs[i] : UnknownType.create(); + } + + constraints.setBounds( + typeParam, + variance !== Variance.Contravariant ? typeArgType : undefined, + variance !== Variance.Covariant ? typeArgType : undefined, + /* retainLiterals */ true + ); + } + } + + return true; + } + + function getGetterTypeFromProperty(propertyClass: ClassType): Type | undefined { + if (!ClassType.isPropertyClass(propertyClass)) { + return undefined; + } + + if (propertyClass.priv.fgetInfo) { + return getEffectiveReturnType(propertyClass.priv.fgetInfo.methodType); + } + + return undefined; + } + + function assignTypeArgs( + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number + ): boolean { + assert(ClassType.isSameGenericClass(destType, srcType)); + + inferVarianceForClass(destType); + + const destTypeParams = ClassType.getTypeParams(destType); + let destTypeArgs: Type[]; + let srcTypeArgs: Type[] | undefined; + + // Are we performing protocol variance validation for this class? If so, + // treat all of the type parameters as invariant even if they are declared + // otherwise. + const assignClassToSelfInfo = assignClassToSelfStack.find((info) => + ClassType.isSameGenericClass(info.class, destType) + ); + const assumedVariance = assignClassToSelfInfo?.assumedVariance; + + // If either source or dest type arguments are missing, they are + // treated as "Any", so they are assumed to be assignable. + if (!destType.priv.typeArgs || !srcType.priv.typeArgs) { + return true; + } + + if (ClassType.isTupleClass(destType)) { + destTypeArgs = destType.priv.tupleTypeArgs?.map((t) => t.type) ?? []; + srcTypeArgs = srcType.priv.tupleTypeArgs?.map((t) => t.type); + } else { + destTypeArgs = destType.priv.typeArgs; + srcTypeArgs = srcType.priv.typeArgs; + } + + let isCompatible = true; + + srcTypeArgs?.forEach((srcTypeArg, srcArgIndex) => { + // In most cases, the number of type args should match the number + // of type arguments, but there are a few special cases where this + // isn't true (e.g. assigning a Tuple[X, Y, Z] to a tuple[W]). + const destArgIndex = srcArgIndex >= destTypeArgs.length ? destTypeArgs.length - 1 : srcArgIndex; + const destTypeArg = destArgIndex >= 0 ? destTypeArgs[destArgIndex] : UnknownType.create(); + const destTypeParam = destArgIndex < destTypeParams.length ? destTypeParams[destArgIndex] : undefined; + const assignmentDiag = new DiagnosticAddendum(); + const variance = + assumedVariance ?? (destTypeParam ? TypeVarType.getVariance(destTypeParam) : Variance.Covariant); + let effectiveFlags: AssignTypeFlags; + let errorSource: () => ParameterizedString<{ name: string; sourceType: string; destType: string }>; + let includeDiagAddendum = true; + + if (variance === Variance.Covariant) { + effectiveFlags = flags | AssignTypeFlags.RetainLiteralsForTypeVar; + errorSource = LocAddendum.typeVarIsCovariant; + } else if (variance === Variance.Contravariant) { + effectiveFlags = flags | AssignTypeFlags.Contravariant | AssignTypeFlags.RetainLiteralsForTypeVar; + errorSource = LocAddendum.typeVarIsContravariant; + } else { + effectiveFlags = flags | AssignTypeFlags.Invariant | AssignTypeFlags.RetainLiteralsForTypeVar; + errorSource = LocAddendum.typeVarIsInvariant; + + // Omit the diagnostic addendum for the invariant case because it's obvious + // why two types are not the same. + includeDiagAddendum = false; + } + + // Special-case TypeForm to retain literals when solving TypeVars. + if (ClassType.isBuiltIn(destType, 'TypeForm')) { + effectiveFlags |= AssignTypeFlags.RetainLiteralsForTypeVar; + } + + if ( + !assignType( + variance === Variance.Contravariant ? srcTypeArg : destTypeArg, + variance === Variance.Contravariant ? destTypeArg : srcTypeArg, + assignmentDiag, + constraints, + effectiveFlags, + recursionCount + ) + ) { + // Don't report errors with type variables in "pseudo-random" + // classes since these type variables are not real. + if (!ClassType.isPseudoGenericClass(destType)) { + if (diag) { + if (destTypeParam) { + const childDiag = diag.createAddendum(); + + childDiag.addMessage( + errorSource().format({ + name: TypeVarType.getReadableName(destTypeParam), + ...printSrcDestTypes(srcTypeArg, destTypeArg), + }) + ); + + if (includeDiagAddendum) { + childDiag.addAddendum(assignmentDiag); + } + + if (isCompatible && ClassType.isSameGenericClass(destType, srcType)) { + // Add additional notes to help the user if this is a common type mismatch. + if (ClassType.isBuiltIn(destType, 'dict') && srcArgIndex === 1) { + childDiag.addMessage(LocAddendum.invariantSuggestionDict()); + } else if (ClassType.isBuiltIn(destType, 'list')) { + childDiag.addMessage(LocAddendum.invariantSuggestionList()); + } else if (ClassType.isBuiltIn(destType, 'set')) { + childDiag.addMessage(LocAddendum.invariantSuggestionSet()); + } + } + } else { + diag.addAddendum(assignmentDiag); + } + } + isCompatible = false; + } + } + }); + + return isCompatible; + } + + // Determines if the source type can be assigned to the dest type. + // If constraint are provided, type variables within the destType are + // matched against existing type variables in the map. If a type variable + // in the dest type is not in the type map already, it is assigned a type + // and added to the map. + function assignType( + destType: Type, + srcType: Type, + diag?: DiagnosticAddendum, + constraints?: ConstraintTracker, + flags = AssignTypeFlags.Default, + recursionCount = 0 + ): boolean { + // Handle the case where the dest and src types are the same object. + // We can normally shortcut this and say that they are compatible, + // but if the type includes TypeVars, we need to go through + // the rest of the logic. + if (destType === srcType && !requiresSpecialization(destType)) { + return true; + } + + // If the source type is a special form, use the literal special form + // class rather than the symbolic form. + const specialForm = srcType.props?.specialForm; + if (specialForm) { + let isSpecialFormExempt = false; + + // A few special forms that are normally not compatible with type[T] + // are compatible specifically in the context of isinstance and issubclass. + if ((flags & AssignTypeFlags.AllowIsinstanceSpecialForms) !== 0) { + if (ClassType.isBuiltIn(specialForm, ['Callable', 'UnionType', 'Generic'])) { + isSpecialFormExempt = true; + } + } + + if (!isSpecialFormExempt) { + if (srcType.props?.typeForm && !specialForm.props?.typeForm) { + srcType = TypeBase.cloneWithTypeForm(specialForm, srcType.props.typeForm); + } else { + srcType = specialForm; + } + } + } + + // If the source is a class-like type created by a call to NewType, treat it + // as a FunctionClass instance rather than an instantiable class for + // purposes of assignability. This reflects its actual runtime type. + if (isInstantiableClass(srcType) && ClassType.isNewTypeClass(srcType) && !srcType.priv.includeSubclasses) { + if (prefetched?.functionClass && isInstantiableClass(prefetched?.functionClass)) { + srcType = ClassType.cloneAsInstance(prefetched.functionClass); + } + } + + if (recursionCount > maxTypeRecursionCount) { + return true; + } + recursionCount++; + + // If the source and dest refer to the recursive type aliases, handle + // the case specially to avoid recursing down both type aliases. + if ( + isTypeVar(destType) && + destType.shared.recursiveAlias && + isTypeVar(srcType) && + srcType.shared.recursiveAlias + ) { + const destAliasInfo = destType.props?.typeAliasInfo; + const srcAliasInfo = srcType.props?.typeAliasInfo; + + // Do the source and dest refer to the same recursive type alias? + if ( + destAliasInfo?.typeArgs && + srcAliasInfo?.typeArgs && + destType.shared.recursiveAlias.typeVarScopeId === srcType.shared.recursiveAlias.typeVarScopeId + ) { + return assignRecursiveTypeAliasToSelf( + destAliasInfo, + srcAliasInfo, + diag, + constraints, + flags, + recursionCount + ); + } else { + // Have we already recursed once? + if ((flags & AssignTypeFlags.SkipRecursiveTypeCheck) !== 0) { + return true; + } + + // Note that we are comparing two recursive types and do + // not recursive more than once. + flags |= AssignTypeFlags.SkipRecursiveTypeCheck; + } + } + + // If one or both of the types has an instantiable depth greater than + // zero, convert both to instances first. + if (TypeBase.isInstantiable(destType) && TypeBase.isInstantiable(srcType)) { + if (TypeBase.getInstantiableDepth(destType) > 0 || TypeBase.getInstantiableDepth(srcType) > 0) { + return assignType( + convertToInstance(destType), + convertToInstance(srcType), + diag, + constraints, + flags, + recursionCount + ); + } + } + + // Transform recursive type aliases if necessary. + const transformedDestType = transformPossibleRecursiveTypeAlias(destType); + const transformedSrcType = transformPossibleRecursiveTypeAlias(srcType); + + // Did either the source or dest include recursive type aliases? + // If so, we could be dealing with different recursive type aliases + // or a recursive type alias and a recursive protocol definition. + if ( + (transformedDestType !== destType && isUnion(transformedDestType)) || + (transformedSrcType !== srcType && isUnion(transformedSrcType)) + ) { + // Use a smaller recursive limit in this case to prevent runaway recursion. + if (recursionCount > maxRecursiveTypeAliasRecursionCount) { + // Add a special case for when the source is a str, which is itself + // a recursive type (since it derives from Sequence[str]). + if (isClassInstance(srcType) && ClassType.isBuiltIn(srcType, 'str') && isUnion(transformedDestType)) { + return transformedDestType.priv.subtypes.some( + (subtype) => isClassInstance(subtype) && ClassType.isBuiltIn(subtype, ['object', 'str']) + ); + } + return true; + } + } + + destType = transformedDestType; + srcType = transformedSrcType; + + // If the source or dest is unbound, allow the assignment. The + // error will be reported elsewhere. + if (isUnbound(destType) || isUnbound(srcType)) { + return true; + } + + if (isTypeVar(destType)) { + if (isTypeVarSame(destType, srcType)) { + return true; + } + + // If the dest is a constrained or bound type variable and all of the + // types in the source are conditioned on that same type variable + // and have compatible types, we'll consider it assignable. + if (assignConditionalTypeToTypeVar(destType, srcType, recursionCount)) { + return true; + } + + // If the source is a conditional type associated with a bound TypeVar + // and the bound TypeVar matches the condition, the types are compatible. + const destTypeVar = destType; + if ( + TypeBase.isInstantiable(destType) === TypeBase.isInstantiable(srcType) && + srcType.props?.condition && + srcType.props.condition.some((cond) => { + return ( + !TypeVarType.hasConstraints(cond.typeVar) && + cond.typeVar.priv.nameWithScope === destTypeVar.priv.nameWithScope + ); + }) + ) { + return true; + } + + if (isUnion(srcType)) { + const srcWithoutAny = removeFromUnion(srcType, (type) => isAnyOrUnknown(type)); + if (isTypeSame(destType, srcWithoutAny)) { + return true; + } + } + + // Handle the special case where both types are Self types. We'll allow + // them to be treated as equivalent to handle certain common idioms. + if ( + isTypeVar(srcType) && + TypeVarType.isSelf(srcType) && + TypeVarType.hasBound(srcType) && + TypeVarType.isSelf(destType) && + TypeVarType.hasBound(destType) && + TypeVarType.isBound(destType) === TypeVarType.isBound(srcType) && + TypeBase.isInstance(srcType) === TypeBase.isInstance(destType) + ) { + if ((flags & AssignTypeFlags.Contravariant) === 0 && constraints) { + assignTypeVar(evaluatorInterface, destType, srcType, diag, constraints, flags, recursionCount); + } + return true; + } + + // If the dest is a TypeVarTuple, and the source is a tuple + // with a single entry that is the same TypeVarTuple, it's a match. + if ( + isTypeVarTuple(destType) && + isClassInstance(srcType) && + isTupleClass(srcType) && + srcType.priv.tupleTypeArgs && + srcType.priv.tupleTypeArgs.length === 1 + ) { + if (isTypeSame(destType, srcType.priv.tupleTypeArgs[0].type, {}, recursionCount)) { + return true; + } + } + + if ((flags & AssignTypeFlags.Contravariant) === 0 || !isTypeVar(srcType)) { + if (!assignTypeVar(evaluatorInterface, destType, srcType, diag, constraints, flags, recursionCount)) { + return false; + } + + if (isAnyOrUnknown(srcType) && (flags & AssignTypeFlags.OverloadOverlap) !== 0) { + return false; + } + + return true; + } + } + + if (isTypeVar(srcType)) { + if ((flags & AssignTypeFlags.Contravariant) !== 0) { + if (TypeVarType.isBound(srcType)) { + return assignType( + makeTopLevelTypeVarsConcrete(destType), + makeTopLevelTypeVarsConcrete(srcType), + diag, + /* constraints */ undefined, + flags, + recursionCount + ); + } + + if (assignTypeVar(evaluatorInterface, srcType, destType, diag, constraints, flags, recursionCount)) { + return true; + } + + // If the dest type is a union, only one of the subtypes needs to match. + let isAssignable = false; + if (isUnion(destType)) { + doForEachSubtype(destType, (destSubtype) => { + if ( + assignTypeVar( + evaluatorInterface, + srcType as TypeVarType, + destSubtype, + diag, + constraints, + flags, + recursionCount + ) + ) { + isAssignable = true; + } + }); + } + return isAssignable; + } + + if ((flags & AssignTypeFlags.Invariant) !== 0) { + if (isAnyOrUnknown(destType)) { + return true; + } + + // If the source is a ParamSpec and the dest is a "...", this is + // effectively like an "Any" signature, so we'll treat it as though + // it's Any. + if ( + isParamSpec(srcType) && + isFunction(destType) && + FunctionType.isGradualCallableForm(destType) && + destType.shared.parameters.length <= 2 + ) { + return true; + } + + // If the source is an unpacked TypeVarTuple and the dest is a + // *tuple[Any, ...], we'll treat it as compatible. + if ( + isUnpackedTypeVarTuple(srcType) && + isClassInstance(destType) && + isUnpackedClass(destType) && + destType.priv.tupleTypeArgs && + destType.priv.tupleTypeArgs.length === 1 && + destType.priv.tupleTypeArgs[0].isUnbounded && + isAnyOrUnknown(destType.priv.tupleTypeArgs[0].type) + ) { + return true; + } + + if (!isUnion(destType)) { + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + return false; + } + } + } + + if (isAnyOrUnknown(destType)) { + return true; + } + + if (isAnyOrUnknown(srcType) && !srcType.props?.specialForm) { + if (constraints) { + // If it's an ellipsis type, convert it to a regular "Any" + // type. These are functionally equivalent, but "Any" looks + // better in the text representation. + const typeVarSubstitution = isEllipsisType(srcType) ? AnyType.create() : srcType; + setConstraintsForFreeTypeVars(destType, typeVarSubstitution, constraints); + } + if ((flags & AssignTypeFlags.OverloadOverlap) === 0) { + return true; + } + } + + if (isNever(srcType)) { + if ((flags & AssignTypeFlags.Invariant) !== 0) { + if (isNever(destType)) { + return true; + } + + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + return false; + } + + if (constraints) { + setConstraintsForFreeTypeVars(destType, UnknownType.create(), constraints); + } + return true; + } + + if (isUnion(destType)) { + // If both the source and dest are unions, use assignFromUnionType which has + // special-case logic to handle this case. + if (isUnion(srcType)) { + return assignFromUnionType(destType, srcType, diag, constraints, flags, recursionCount); + } + + const clonedConstraints = constraints?.clone(); + if (assignToUnionType(destType, srcType, /* diag */ undefined, clonedConstraints, flags, recursionCount)) { + if (constraints && clonedConstraints) { + constraints.copyFromClone(clonedConstraints); + } + return true; + } + } + + const expandedSrcType = makeTopLevelTypeVarsConcrete(srcType); + if (isUnion(expandedSrcType)) { + return assignFromUnionType(destType, expandedSrcType, diag, constraints, flags, recursionCount); + } + + if (isUnion(destType)) { + return assignToUnionType(destType, srcType, diag, constraints, flags, recursionCount); + } + + // Is the src a specialized "type" object? + if (isClassInstance(expandedSrcType) && ClassType.isBuiltIn(expandedSrcType, 'type')) { + const srcTypeArgs = expandedSrcType.priv.typeArgs; + let typeTypeArg: Type; + + if (srcTypeArgs && srcTypeArgs.length >= 1) { + typeTypeArg = srcTypeArgs[0]; + } else { + typeTypeArg = UnknownType.create(); + } + + if (isAnyOrUnknown(typeTypeArg)) { + if (isEffectivelyInstantiable(destType)) { + return true; + } + } else if (isClassInstance(typeTypeArg) || isTypeVar(typeTypeArg)) { + if ( + assignType( + destType, + convertToInstantiable(typeTypeArg), + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + return true; + } + + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + return false; + } + } + + if (isInstantiableClass(destType)) { + if (isInstantiableClass(expandedSrcType)) { + // PEP 544 says that if the dest type is a type[Proto] class, + // the source must be a "concrete" (non-protocol) class. + if (ClassType.isProtocolClass(destType)) { + if ( + (flags & AssignTypeFlags.AllowProtocolClassSource) === 0 && + ClassType.isProtocolClass(expandedSrcType) && + isInstantiableClass(srcType) && + !srcType.priv.includeSubclasses + ) { + diag?.addMessage( + LocAddendum.protocolSourceIsNotConcrete().format({ + sourceType: printType(convertToInstance(srcType)), + destType: printType(destType), + }) + ); + return false; + } + } + + if (ClassType.isBuiltIn(destType, 'type') && (srcType.props?.instantiableDepth ?? 0) > 0) { + return true; + } + + if (isSpecialFormClass(expandedSrcType, flags)) { + // Special form classes are compatible only with other special form + // classes, not with 'object' or 'type'. + const destSpecialForm = destType.props?.specialForm ?? destType; + if (isSpecialFormClass(destSpecialForm, flags)) { + return assignType(destSpecialForm, expandedSrcType, diag, constraints, flags, recursionCount); + } + } else if ( + assignClass( + destType, + expandedSrcType, + diag, + constraints, + flags, + recursionCount, + /* reportErrorsUsingObjType */ false + ) + ) { + return true; + } + + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + return false; + } + } + + if (isClassInstance(destType)) { + if (ClassType.isBuiltIn(destType, 'type')) { + if ( + isInstantiableClass(srcType) && + isSpecialFormClass(srcType, flags) && + TypeBase.getInstantiableDepth(srcType) === 0 + ) { + return false; + } + + if (isAnyOrUnknown(srcType) && (flags & AssignTypeFlags.OverloadOverlap) !== 0) { + return false; + } + + const destTypeArgs = destType.priv.typeArgs; + if (destTypeArgs && destTypeArgs.length >= 1) { + if (TypeBase.isInstance(destTypeArgs[0]) && TypeBase.isInstantiable(srcType)) { + return assignType( + destTypeArgs[0], + convertToInstance(srcType), + diag, + constraints, + flags, + recursionCount + ); + } + } + + // Is the dest a "type" object? Assume that all instantiable + // types are assignable to "type". + if (TypeBase.isInstantiable(srcType)) { + const isLiteral = isClass(srcType) && srcType.priv.literalValue !== undefined; + return !isLiteral; + } + } + + let concreteSrcType = makeTopLevelTypeVarsConcrete(srcType); + + // Handle the TypeForm special form. Add a special case for + // type[T] to be assignable to TypeForm[T]. + if (ClassType.isBuiltIn(destType, 'TypeForm')) { + const destTypeArg = + destType.priv.typeArgs && destType.priv.typeArgs.length > 0 + ? destType.priv.typeArgs[0] + : UnknownType.create(); + + let srcTypeArg: Type | undefined; + if (isClassInstance(concreteSrcType) && ClassType.isBuiltIn(concreteSrcType, 'type')) { + srcTypeArg = concreteSrcType; + } else if (isInstantiableClass(concreteSrcType)) { + srcTypeArg = convertToInstance(concreteSrcType); + } + + if (srcTypeArg) { + return assignType(destTypeArg, srcTypeArg, diag, constraints, flags, recursionCount); + } + } + + if (isClass(concreteSrcType) && TypeBase.isInstance(concreteSrcType)) { + // Handle the case where the source is an unpacked tuple. + if ( + !destType.priv.isUnpacked && + concreteSrcType.priv.isUnpacked && + concreteSrcType.priv.tupleTypeArgs + ) { + return assignType( + destType, + combineTupleTypeArgs(concreteSrcType.priv.tupleTypeArgs), + diag, + constraints, + flags, + recursionCount + ); + } + + if ( + destType.priv.literalValue !== undefined && + ClassType.isSameGenericClass(destType, concreteSrcType) + ) { + const srcLiteral = concreteSrcType.priv.literalValue; + if (srcLiteral === undefined || !ClassType.isLiteralValueSame(concreteSrcType, destType)) { + diag?.addMessage( + LocAddendum.literalAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + + return false; + } + } + + // Handle LiteralString special form. + if (ClassType.isBuiltIn(destType, 'LiteralString')) { + if ( + ClassType.isBuiltIn(concreteSrcType, 'str') && + concreteSrcType.priv.literalValue !== undefined + ) { + return (flags & AssignTypeFlags.Invariant) === 0; + } else if (ClassType.isBuiltIn(concreteSrcType, 'LiteralString')) { + return true; + } + } else if ( + ClassType.isBuiltIn(concreteSrcType, 'LiteralString') && + prefetched?.strClass && + isInstantiableClass(prefetched.strClass) && + (flags & AssignTypeFlags.Invariant) === 0 + ) { + concreteSrcType = ClassType.cloneAsInstance(prefetched.strClass); + } + + if ( + !assignClass( + ClassType.cloneAsInstantiable(destType), + ClassType.cloneAsInstantiable(concreteSrcType), + diag, + constraints, + flags, + recursionCount, + /* reportErrorsUsingObjType */ true + ) + ) { + return false; + } + + return true; + } else if (isFunctionOrOverloaded(concreteSrcType)) { + // Is the destination a callback protocol (defined in PEP 544)? + const destCallbackType = getCallbackProtocolType(destType, recursionCount); + if (destCallbackType) { + return assignType(destCallbackType, concreteSrcType, diag, constraints, flags, recursionCount); + } + + // All functions are considered instances of "types.FunctionType" or "types.MethodType". + const altClass = isMethodType(concreteSrcType) ? prefetched?.methodClass : prefetched?.functionClass; + if (altClass) { + return assignType(destType, convertToInstance(altClass), diag, constraints, flags, recursionCount); + } + } else if (isModule(concreteSrcType)) { + // Is the destination the built-in "ModuleType"? + if (ClassType.isBuiltIn(destType, 'ModuleType')) { + return true; + } + + if (ClassType.isProtocolClass(destType)) { + return assignModuleToProtocol( + evaluatorInterface, + ClassType.cloneAsInstantiable(destType), + concreteSrcType, + diag, + constraints, + flags, + recursionCount + ); + } + } else if (isInstantiableClass(concreteSrcType)) { + // See if the destType is an instantiation of a Protocol + // class that is effectively a function. + const callbackType = getCallbackProtocolType(destType, recursionCount); + if (callbackType) { + return assignType(callbackType, concreteSrcType, diag, constraints, flags, recursionCount); + } + + // If the destType is an instantiation of a Protocol, + // see if the class type itself satisfies the protocol. + if (ClassType.isProtocolClass(destType)) { + return assignClassToProtocol( + evaluatorInterface, + ClassType.cloneAsInstantiable(destType), + concreteSrcType, + diag, + constraints, + flags, + recursionCount + ); + } + + // Determine if the metaclass can be assigned to the object. + const metaclass = concreteSrcType.shared.effectiveMetaclass; + if (metaclass) { + if (!isAnyOrUnknown(metaclass)) { + if ( + assignClass( + ClassType.cloneAsInstantiable(destType), + metaclass, + /* diag */ undefined, + constraints, + flags, + recursionCount, + /* reportErrorsUsingObjType */ true + ) + ) { + return true; + } + } + } + } else if (isAnyOrUnknown(concreteSrcType) && !concreteSrcType.props?.specialForm) { + return (flags & AssignTypeFlags.OverloadOverlap) === 0; + } else if (isUnion(concreteSrcType)) { + return assignType(destType, concreteSrcType, diag, constraints, flags, recursionCount); + } + } + + if (isFunction(destType)) { + let concreteSrcType = makeTopLevelTypeVarsConcrete(srcType); + + if (isClassInstance(concreteSrcType)) { + const boundMethod = getBoundMagicMethod( + concreteSrcType, + '__call__', + /* selfType */ undefined, + /* errorNode */ undefined, + /* diag */ undefined, + recursionCount + ); + if (boundMethod) { + concreteSrcType = boundMethod; + } + } + + // If it's a class, use the constructor for type compatibility checking. + if (isInstantiableClass(concreteSrcType) && concreteSrcType.priv.literalValue === undefined) { + const constructor = createFunctionFromConstructor( + evaluatorInterface, + concreteSrcType, + isTypeVar(srcType) ? convertToInstance(srcType) : undefined, + recursionCount + ); + if (constructor) { + concreteSrcType = constructor; + + // The constructor conversion may result in a union of the + // __init__ and __new__ callables. + if (isUnion(concreteSrcType)) { + return assignType(destType, concreteSrcType, diag, constraints, flags, recursionCount); + } + } + } + + if (isAnyOrUnknown(concreteSrcType)) { + return (flags & AssignTypeFlags.OverloadOverlap) === 0; + } + + if (isOverloaded(concreteSrcType)) { + // If this is the first pass of an argument assignment, skip + // all attempts to assign an overloaded function to a function + // because we probably don't have enough information to properly + // filter the overloads at this time. We will do this work on + // subsequent passes. + if ((flags & AssignTypeFlags.ArgAssignmentFirstPass) !== 0) { + return true; + } + + // Find all of the overloaded functions that match the parameters. + const overloads = OverloadedType.getOverloads(concreteSrcType); + const filteredOverloads: FunctionType[] = []; + const typeVarSignatures: ConstraintSet[] = []; + + overloads.forEach((overload) => { + const overloadScopeId = getTypeVarScopeId(overload) ?? ''; + const constraintsClone = constraints?.cloneWithSignature(overloadScopeId); + + if (assignType(destType, overload, /* diag */ undefined, constraintsClone, flags, recursionCount)) { + filteredOverloads.push(overload); + + if (constraintsClone) { + appendArray(typeVarSignatures, constraintsClone.getConstraintSets()); + } + } + }); + + if (filteredOverloads.length === 0) { + diag?.addMessage(LocAddendum.noOverloadAssignable().format({ type: printType(destType) })); + return false; + } + + if (filteredOverloads.length === 1 || (flags & AssignTypeFlags.ArgAssignmentFirstPass) === 0) { + if (constraints) { + constraints.addConstraintSets(typeVarSignatures); + } + } + + return true; + } + + if (isFunction(concreteSrcType)) { + if ( + assignFunction( + destType, + concreteSrcType, + diag?.createAddendum(), + constraints ?? new ConstraintTracker(), + flags, + recursionCount + ) + ) { + return true; + } + } + } + + if (isOverloaded(destType)) { + const overloadDiag = diag?.createAddendum(); + + // All overloads in the dest must be assignable. + const destOverloads = OverloadedType.getOverloads(destType); + + // If the source is also an overload with the same number of overloads, + // there's a good chance that there's a one-to-one mapping. Try this + // first before using an n^2 algorithm. + if (isOverloaded(srcType)) { + const srcOverloads = OverloadedType.getOverloads(srcType); + if (destOverloads.length === srcOverloads.length) { + if ( + destOverloads.every((destOverload, index) => { + const srcOverload = srcOverloads[index]; + return assignType( + destOverload, + srcOverload, + /* diag */ undefined, + constraints, + flags, + recursionCount + ); + }) + ) { + return true; + } + } + } + + const isAssignable = destOverloads.every((destOverload) => { + const result = assignType( + destOverload, + srcType, + overloadDiag?.createAddendum(), + constraints, + flags, + recursionCount + ); + return result; + }); + + if (!isAssignable) { + const overloads = OverloadedType.getOverloads(destType); + + if (overloadDiag && overloads.length > 0) { + overloadDiag.addMessage( + LocAddendum.overloadNotAssignable().format({ + name: overloads[0].shared.name, + }) + ); + } + return false; + } + + return true; + } + + if (isClass(destType) && ClassType.isBuiltIn(destType, 'object')) { + if ((isInstantiableClass(destType) && TypeBase.isInstantiable(srcType)) || isClassInstance(destType)) { + if ((flags & AssignTypeFlags.Invariant) === 0) { + // All types (including None, Module, Overloaded) derive from object. + return true; + } + } + } + + // Are we trying to assign None to a protocol? + if (isNoneInstance(srcType) && isClassInstance(destType) && ClassType.isProtocolClass(destType)) { + if (prefetched?.noneTypeClass && isInstantiableClass(prefetched.noneTypeClass)) { + return assignClassToProtocol( + evaluatorInterface, + ClassType.cloneAsInstantiable(destType), + ClassType.cloneAsInstance(prefetched.noneTypeClass), + diag, + constraints, + flags, + recursionCount + ); + } + } + + if (isNoneInstance(destType)) { + diag?.addMessage(LocAddendum.assignToNone()); + return false; + } + + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + + return false; + } + + // Determines whether a recursive type alias can be assigned to itself + // given the source and dest type args and computed variance for its + // type params. + function assignRecursiveTypeAliasToSelf( + destAliasInfo: TypeAliasInfo, + srcAliasInfo: TypeAliasInfo, + diag?: DiagnosticAddendum, + constraints?: ConstraintTracker, + flags = AssignTypeFlags.Default, + recursionCount = 0 + ) { + assert(destAliasInfo.typeArgs !== undefined); + assert(srcAliasInfo.typeArgs !== undefined); + + let isAssignable = true; + const srcTypeArgs = srcAliasInfo.typeArgs; + const variances = destAliasInfo.shared.computedVariance; + + destAliasInfo.typeArgs.forEach((destTypeArg, index) => { + const srcTypeArg = index < srcTypeArgs.length ? srcTypeArgs[index] : UnknownType.create(); + + let adjFlags = flags; + const variance = variances && index < variances.length ? variances[index] : Variance.Covariant; + + if (variance === Variance.Invariant) { + adjFlags |= AssignTypeFlags.Invariant; + } else if (variance === Variance.Contravariant) { + adjFlags ^= AssignTypeFlags.Contravariant; + } + + if (!assignType(destTypeArg, srcTypeArg, diag, constraints, adjFlags, recursionCount)) { + isAssignable = false; + } + }); + + return isAssignable; + } + + // If the expected type is an explicit TypeForm type, see if the source + // type has an implicit TypeForm type that can be assigned to it. If so, + // convert to an explicit TypeForm type. + function convertToTypeFormType(expectedType: Type, srcType: Type): Type { + // Is the source is a TypeForm type? + if (!srcType.props?.typeForm) { + return srcType; + } + + let srcTypeFormType: Type | undefined; + + // Is the source is a TypeForm type? + if (srcType.props?.typeForm) { + srcTypeFormType = srcType.props.typeForm; + } else if (isClass(srcType)) { + if (TypeBase.isInstantiable(srcType)) { + if (!ClassType.isSpecialBuiltIn(srcType)) { + srcTypeFormType = ClassType.cloneAsInstance(srcType); + } + } else if (ClassType.isBuiltIn(srcType, 'type')) { + srcTypeFormType = + srcType.priv.typeArgs?.length && srcType.priv.typeArgs.length > 0 + ? srcType.priv.typeArgs[0] + : UnknownType.create(); + } + } else if (isTypeVar(srcType) && TypeBase.isInstantiable(srcType)) { + if (!isTypeVarTuple(srcType) || !srcType.priv.isInUnion) { + srcTypeFormType = convertToInstance(srcType); + } + } + + if (!srcTypeFormType) { + return srcType; + } + + let resultType: Type | undefined; + + doForEachSubtype(expectedType, (subtype) => { + if (resultType || !isClassInstance(subtype) || !ClassType.isBuiltIn(subtype, 'TypeForm')) { + return; + } + + const destTypeFormType = + subtype.priv.typeArgs && subtype.priv.typeArgs.length > 0 + ? subtype.priv.typeArgs[0] + : UnknownType.create(); + + if (assignType(destTypeFormType, srcTypeFormType)) { + resultType = ClassType.specialize(subtype, [srcTypeFormType]); + } + }); + + return resultType ?? srcType; + } + + function assignFromUnionType( + destType: Type, + srcType: UnionType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number + ): boolean { + // Start by checking for an exact match. This is needed to handle unions + // that contain recursive type aliases. + if (isTypeSame(srcType, destType, {}, recursionCount)) { + return true; + } + + if ( + (flags & AssignTypeFlags.OverloadOverlap) !== 0 && + srcType.priv.subtypes.some((subtype) => isAnyOrUnknown(subtype)) + ) { + return false; + } + + // Sort the subtypes so we have a deterministic order for unions. + let sortedSrcTypes: Type[] = sortTypes(srcType.priv.subtypes); + let matchedSomeSubtypes = false; + + // Handle the case where the source and dest are both unions. Try + // to eliminate as many exact type matches between the src and dest. + if (isUnion(destType)) { + // Handle the special case where the dest is a union of Any and + // a type variable. This occurs, for example, with the return type of + // the getattr function. + const nonAnySubtypes = destType.priv.subtypes.filter((t) => !isAnyOrUnknown(t)); + if (nonAnySubtypes.length === 1 && isTypeVar(nonAnySubtypes[0])) { + assignType(nonAnySubtypes[0], srcType, /* diag */ undefined, constraints, flags, recursionCount); + + // This always succeeds because the destination contains Any. + return true; + } + + const remainingDestSubtypes: Type[] = []; + let remainingSrcSubtypes: Type[] = sortedSrcTypes; + let canUseFastPath = true; + + // First attempt to match all of the non-generic types in the dest + // to non-generic types in the source. + sortTypes(destType.priv.subtypes).forEach((destSubtype) => { + if (requiresSpecialization(destSubtype)) { + remainingDestSubtypes.push(destSubtype); + } else { + const srcTypeIndex = remainingSrcSubtypes.findIndex((srcSubtype) => + isTypeSame(srcSubtype, destSubtype, {}, recursionCount) + ); + + if (srcTypeIndex >= 0) { + remainingSrcSubtypes.splice(srcTypeIndex, 1); + matchedSomeSubtypes = true; + } else { + remainingDestSubtypes.push(destSubtype); + } + } + }); + + // For all remaining source subtypes, attempt to find a dest subtype + // whose primary type matches. + remainingSrcSubtypes.forEach((srcSubtype) => { + const destTypeIndex = remainingDestSubtypes.findIndex((destSubtype) => { + if (isTypeSame(destSubtype, srcSubtype)) { + return true; + } + + if ( + isClass(srcSubtype) && + isClass(destSubtype) && + TypeBase.isInstance(srcSubtype) === TypeBase.isInstance(destSubtype) + ) { + if (ClassType.isSameGenericClass(srcSubtype, destSubtype)) { + return true; + } + + // Are they equivalent TypedDicts? + if (ClassType.isTypedDictClass(srcSubtype) && ClassType.isTypedDictClass(destSubtype)) { + if ( + assignType( + srcSubtype, + destSubtype, + /* diag */ undefined, + /* constraints */ undefined, + flags, + recursionCount + ) + ) { + return true; + } + } + } + + if (isFunctionOrOverloaded(srcSubtype) && isFunctionOrOverloaded(destSubtype)) { + return true; + } + + return false; + }); + + if (destTypeIndex >= 0) { + if ( + assignType( + remainingDestSubtypes[destTypeIndex], + srcSubtype, + /* diag */ undefined, + constraints, + flags, + recursionCount + ) + ) { + // Note that we have matched at least one subtype indicating + // there is at least some overlap. + matchedSomeSubtypes = true; + } else { + canUseFastPath = false; + } + + remainingDestSubtypes.splice(destTypeIndex, 1); + remainingSrcSubtypes = remainingSrcSubtypes.filter((t) => t !== srcSubtype); + } + }); + + // If there is are remaining dest subtypes and they're all type variables, + // attempt to assign the remaining source subtypes to them. + if (canUseFastPath && (remainingDestSubtypes.length !== 0 || remainingSrcSubtypes.length !== 0)) { + if ((flags & AssignTypeFlags.Invariant) !== 0) { + // If we have no src subtypes remaining but not all dest types have been subsumed + // by other dest types, then the types are not compatible if we're enforcing invariance. + if (remainingSrcSubtypes.length === 0) { + return remainingDestSubtypes.every((destSubtype) => + isTypeSubsumedByOtherType( + destSubtype, + destType, + /* allowAnyToSubsume */ true, + recursionCount + ) + ); + } + } + + const isContra = (flags & AssignTypeFlags.Contravariant) !== 0; + const effectiveDestSubtypes = isContra ? remainingSrcSubtypes : remainingDestSubtypes; + + if (effectiveDestSubtypes.length === 0 || effectiveDestSubtypes.some((t) => !isTypeVar(t))) { + canUseFastPath = false; + + // We can avoid checking the source subtypes that have already been checked. + sortedSrcTypes = remainingSrcSubtypes; + } else if (remainingDestSubtypes.length === remainingSrcSubtypes.length) { + // If the number of remaining source subtypes is the same as the number + // of dest TypeVars, try to assign each source subtype to its own dest TypeVar. + const reorderedDestSubtypes = [...remainingDestSubtypes]; + + for (let srcIndex = 0; srcIndex < remainingSrcSubtypes.length; srcIndex++) { + let foundMatchForSrc = false; + + for (let destIndex = 0; destIndex < reorderedDestSubtypes.length; destIndex++) { + if ( + assignType( + reorderedDestSubtypes[destIndex], + remainingSrcSubtypes[srcIndex], + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + foundMatchForSrc = true; + // Move the matched dest TypeVar to the end of the list so the other + // dest TypeVars have a better chance of being assigned to. + reorderedDestSubtypes.push(...reorderedDestSubtypes.splice(destIndex, 1)); + break; + } + } + + if (!foundMatchForSrc) { + canUseFastPath = false; + break; + } + } + + // We can avoid checking the source subtypes that have already been checked. + sortedSrcTypes = remainingSrcSubtypes; + } else if (remainingSrcSubtypes.length === 0) { + if ((flags & AssignTypeFlags.PopulateExpectedType) !== 0) { + // If we're populating an expected type, try not to leave + // any TypeVars unsolved. Assign the full type to the remaining + // dest TypeVars. + remainingDestSubtypes.forEach((destSubtype) => { + assignType(destSubtype, srcType, /* diag */ undefined, constraints, flags, recursionCount); + }); + } + + // If we've assigned all of the source subtypes but one or more dest + // TypeVars have gone unmatched, treat this as success. + } else { + // Try to assign a union of the remaining source types to + // the first destination TypeVar. If this is a contravariant + // context, use the full dest type rather than the remaining + // dest subtypes to keep the lower bound as wide as possible. + if ( + !assignType( + isContra ? destType : remainingDestSubtypes[0], + isContra ? remainingSrcSubtypes[0] : combineTypes(remainingSrcSubtypes), + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + canUseFastPath = false; + } + } + } + + if (canUseFastPath) { + return true; + } + + // If we're looking for type overlaps and at least one type was matched, + // consider it as assignable. + if ((flags & AssignTypeFlags.PartialOverloadOverlap) !== 0 && matchedSomeSubtypes) { + return true; + } + } + + let isIncompatible = false; + + sortedSrcTypes.forEach((subtype) => { + if (isIncompatible) { + return; + } + + if (!assignType(destType, subtype, /* diag */ undefined, constraints, flags, recursionCount)) { + // Determine if the current subtype is subsumed by another subtype + // in the same union. If so, we can ignore this. + const isSubtypeSubsumed = isTypeSubsumedByOtherType( + subtype, + srcType, + /* allowAnyToSubsume */ false, + recursionCount + ); + + // Try again with a concrete version of the subtype. + if ( + !isSubtypeSubsumed && + !assignType(destType, subtype, diag?.createAddendum(), constraints, flags, recursionCount) + ) { + isIncompatible = true; + } + } else { + matchedSomeSubtypes = true; + } + }, /* sortSubtypes */ true); + + if (isIncompatible) { + // If we're looking for type overlaps and at least one type was matched, + // consider it as assignable. + if ((flags & AssignTypeFlags.PartialOverloadOverlap) !== 0 && matchedSomeSubtypes) { + return true; + } + + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + return false; + } + + return true; + } + + function isSpecialFormClass(classType: ClassType, flags: AssignTypeFlags): boolean { + if ((flags & AssignTypeFlags.AllowIsinstanceSpecialForms) !== 0) { + return false; + } + + return ClassType.isSpecialFormClass(classType); + } + + // Finds unsolved type variables in the destType and establishes constraints + // in the constraint tracker for them based on the srcType. + function setConstraintsForFreeTypeVars( + destType: Type, + srcType: UnknownType | AnyType, + constraints: ConstraintTracker + ) { + const typeVars = getTypeVarArgsRecursive(destType); + typeVars.forEach((typeVar) => { + if (!TypeVarType.isBound(typeVar) && !constraints.getMainConstraintSet().getTypeVar(typeVar)) { + // Don't set ParamSpecs or TypeVarTuples. + if (!isParamSpec(srcType) && !isTypeVarTuple(srcType)) { + constraints.setBounds(typeVar, srcType); + } + } + }); + } + + // Determines whether a type is "subsumed by" (i.e. is a proper subtype of) another type. + function isTypeSubsumedByOtherType(type: Type, otherType: Type, allowAnyToSubsume: boolean, recursionCount = 0) { + const concreteType = makeTopLevelTypeVarsConcrete(type); + const otherSubtypes = isUnion(otherType) ? otherType.priv.subtypes : [otherType]; + + for (const otherSubtype of otherSubtypes) { + if (isTypeSame(otherSubtype, type)) { + continue; + } + + if (isAnyOrUnknown(otherSubtype)) { + if (allowAnyToSubsume) { + return true; + } + } else if (isProperSubtype(otherSubtype, concreteType, recursionCount)) { + return true; + } + } + + return false; + } + + // Determines whether the srcType is a subtype of destType but the converse + // is not true. It's important that we check both directions to avoid + // matches for types like `tuple[Any]` and `tuple[int]` from being considered + // proper subtypes of each other. + function isProperSubtype(destType: Type, srcType: Type, recursionCount: number) { + // If the destType has a condition, don't consider the srcType a proper subtype. + if (destType.props?.condition) { + return false; + } + + // Shortcut the check if either type is Any or Unknown. + if (isAnyOrUnknown(destType) || isAnyOrUnknown(srcType)) { + return true; + } + + // Shortcut the check if either type is a class whose hierarchy contains an unknown type. + if (isClass(destType) && destType.shared.mro.some((mro) => isAnyOrUnknown(mro))) { + return true; + } + + if (isClass(srcType) && srcType.shared.mro.some((mro) => isAnyOrUnknown(mro))) { + return true; + } + + return ( + assignType( + destType, + srcType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) && + !assignType( + srcType, + destType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ); + } + + // Determines whether the two types are potentially comparable -- i.e. + // their types overlap in such a way that it makes sense for them to + // be compared with an == or != operator. The functional also supports + // a special variant that can be used for the "is" and "is not" operator. + // This variant can be less conservative in some cases. + function isTypeComparable(leftType: Type, rightType: Type, assumeIsOperator = false) { + if (isAnyOrUnknown(leftType) || isAnyOrUnknown(rightType)) { + return true; + } + + if (isNever(leftType) || isNever(rightType)) { + return false; + } + + if (isModule(leftType) || isModule(rightType)) { + return isTypeSame(leftType, rightType, { ignoreConditions: true }); + } + + const isLeftCallable = isFunctionOrOverloaded(leftType); + const isRightCallable = isFunctionOrOverloaded(rightType); + + // If either type is a function, assume that it may be comparable. The other + // operand might be a callable object, an 'object' instance, etc. We could + // make this more precise for specific cases (e.g. if the other operand is + // None or a literal or an instance of a nominal class that doesn't override + // __call__ and is marked final, etc.), but coming up with a comprehensive + // list is probably not feasible. + if (isLeftCallable || isRightCallable) { + return true; + } + + if (isInstantiableClass(leftType) || (isClassInstance(leftType) && ClassType.isBuiltIn(leftType, 'type'))) { + if ( + isInstantiableClass(rightType) || + (isClassInstance(rightType) && ClassType.isBuiltIn(rightType, 'type')) + ) { + const genericLeftType = ClassType.specialize(leftType, /* typeArgs */ undefined); + const genericRightType = ClassType.specialize(rightType, /* typeArgs */ undefined); + + if (assignType(genericLeftType, genericRightType) || assignType(genericRightType, genericLeftType)) { + return true; + } + } + + // Does the class have an operator overload for eq? + const metaclass = leftType.shared.effectiveMetaclass; + if (metaclass && isClass(metaclass)) { + if (lookUpClassMember(metaclass, '__eq__', MemberAccessFlags.SkipObjectBaseClass)) { + return true; + } + } + + return false; + } + + if (isClassInstance(leftType)) { + if (isClass(rightType)) { + const genericLeftType = ClassType.specialize(leftType, /* typeArgs */ undefined); + const genericRightType = ClassType.specialize(rightType, /* typeArgs */ undefined); + + if (assignType(genericLeftType, genericRightType) || assignType(genericRightType, genericLeftType)) { + return true; + } + + // Check for the "is None" or "is not None" case. + if (assumeIsOperator && isNoneInstance(rightType)) { + if (isNoneInstance(leftType)) { + return true; + } + + // The LHS could be a protocol or 'object', in which case None is + // potentially comparable to it. In other cases, None is not comparable + // because the types are disjoint. + return assignType(leftType, rightType); + } + + // Assume that if the types are disjoint and built-in classes that they + // will never be comparable. + if (ClassType.isBuiltIn(leftType) && ClassType.isBuiltIn(rightType) && TypeBase.isInstance(rightType)) { + // We need to be careful with bool and int literals because + // they are comparable under certain circumstances. + let boolType: ClassType | undefined; + let intType: ClassType | undefined; + if (ClassType.isBuiltIn(leftType, 'bool') && ClassType.isBuiltIn(rightType, 'int')) { + boolType = leftType; + intType = rightType; + } else if (ClassType.isBuiltIn(rightType, 'bool') && ClassType.isBuiltIn(leftType, 'int')) { + boolType = rightType; + intType = leftType; + } + + if (boolType && intType) { + const intVal = intType.priv?.literalValue as number | BigInt | undefined; + if (intVal === undefined) { + return true; + } + if (intVal !== 0 && intVal !== 1) { + return false; + } + + const boolVal = boolType.priv?.literalValue as boolean | undefined; + if (boolVal === undefined) { + return true; + } + + return boolVal === (intVal === 1); + } + + return false; + } + } + + // Does the class have an operator overload for eq? + const eqMethod = lookUpClassMember( + ClassType.cloneAsInstantiable(leftType), + '__eq__', + MemberAccessFlags.SkipObjectBaseClass + ); + + if (eqMethod) { + // If this is a synthesized method for a dataclass, we can assume + // that other dataclass types will not be comparable. + if (ClassType.isDataClass(leftType) && eqMethod.symbol.getSynthesizedType()) { + return false; + } + + return true; + } + + return false; + } + + return true; + } + + function assignToUnionType( + destType: UnionType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number + ): boolean { + // If we need to enforce invariance, the source needs to be compatible + // with all subtypes in the dest, unless those subtypes are subclasses + // of other subtypes. + if (flags & AssignTypeFlags.Invariant) { + let isIncompatible = false; + + doForEachSubtype(destType, (subtype, index) => { + if ( + !isIncompatible && + !assignType(subtype, srcType, diag?.createAddendum(), constraints, flags, recursionCount) + ) { + // Determine whether this subtype is subsumed by some other + // subtype in the union. If so, we can ignore the incompatibility. + let skipSubtype = false; + if (!isAnyOrUnknown(subtype)) { + const adjSubtype = makeTypeVarsBound(subtype, /* scopeIds */ undefined); + + doForEachSubtype(destType, (otherSubtype, otherIndex) => { + if (index !== otherIndex && !skipSubtype) { + const adjOtherSubtype = makeTypeVarsBound(otherSubtype, /* scopeIds */ undefined); + + if ( + assignType( + adjOtherSubtype, + adjSubtype, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ) + ) { + skipSubtype = true; + } + } + }); + } + if (!skipSubtype) { + isIncompatible = true; + } + } + }); + + if (isIncompatible) { + diag?.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + return false; + } + + return true; + } + + // For union destinations, we just need to match one of the types. + const diagAddendum = diag ? new DiagnosticAddendum() : undefined; + + let foundMatch = false; + + // Does the union contain any type variables that need to be solved? + // If so, we need to use a slower path. + if (!requiresSpecialization(destType)) { + for (const subtype of destType.priv.subtypes) { + if (assignType(subtype, srcType, diagAddendum?.createAddendum(), constraints, flags, recursionCount)) { + foundMatch = true; + break; + } + } + } else { + // Run through all subtypes in the union. Don't stop at the first + // match we find because we may need to match TypeVars in other + // subtypes. We special-case "None" so we can handle Optional[T] + // without matching the None to the type var. + if (isNoneInstance(srcType) && isOptionalType(destType)) { + foundMatch = true; + } else { + let bestConstraints: ConstraintTracker | undefined; + let bestConstraintsScore: number | undefined; + let nakedTypeVarMatches = 0; + + // If the srcType is a literal, try to use the fast-path lookup + // in case the destType is a union with hundreds of literals. + if ( + isClassInstance(srcType) && + isLiteralType(srcType) && + UnionType.containsType( + destType, + srcType, + /* options */ undefined, + /* exclusionSet */ undefined, + recursionCount + ) + ) { + return true; + } + + doForEachSubtype( + destType, + (subtype) => { + // Make a temporary clone of the constraints. We don't want to modify + // the original constraints until we find the "optimal" typeVar mapping. + const constraintsClone = constraints?.clone(); + if ( + assignType( + subtype, + srcType, + diagAddendum?.createAddendum(), + constraintsClone, + flags, + recursionCount + ) + ) { + foundMatch = true; + if (constraintsClone) { + // Ask the constraints to compute a "score" for the current + // contents of the table. + let constraintsScore = constraintsClone.getScore(); + + if (isTypeVar(subtype)) { + if (!constraints?.getMainConstraintSet().getTypeVar(subtype)) { + nakedTypeVarMatches++; + + // Handicap the solution slightly so another type var with + // existing constraints will be preferred. + constraintsScore += 0.001; + } + } + + // If the type matches exactly, prefer it over other types. + if (isTypeSame(subtype, stripLiteralValue(srcType))) { + constraintsScore = Number.POSITIVE_INFINITY; + } + + if (bestConstraintsScore === undefined || bestConstraintsScore <= constraintsScore) { + // We found a typeVar mapping with a higher score than before. + bestConstraintsScore = constraintsScore; + bestConstraints = constraintsClone; + } + } + } + }, + /* sortSubtypes */ true + ); + + // If we saw more than one "naked" type vars that have no + // previous constraints recorded, it's dangerous for us to + // assign a value to any of these type vars at this time. + // Typically, they will receive some constraints via some + // later argument assignment. + if (nakedTypeVarMatches > 1 && (flags & AssignTypeFlags.ArgAssignmentFirstPass) !== 0) { + bestConstraints = undefined; + } + + // If we found a winning type var mapping, copy it back to constraints. + if (constraints && bestConstraints) { + constraints.copyFromClone(bestConstraints); + } + } + } + + // If the source is a constrained TypeVar, see if we can assign all of the + // constraints to the union. + if (!foundMatch) { + if (isTypeVar(srcType) && TypeVarType.hasConstraints(srcType)) { + foundMatch = assignType( + destType, + makeTopLevelTypeVarsConcrete(srcType), + diagAddendum?.createAddendum(), + constraints, + flags, + recursionCount + ); + } + } + + if (!foundMatch) { + if (diag && diagAddendum) { + diag.addMessage(LocAddendum.typeAssignmentMismatch().format(printSrcDestTypes(srcType, destType))); + diag.addAddendum(diagAddendum); + } + return false; + } + + return true; + } + + function assignConditionalTypeToTypeVar(destType: TypeVarType, srcType: Type, recursionCount: number): boolean { + // The srcType is assignable only if all of its subtypes are assignable. + return !findSubtype(srcType, (srcSubtype) => { + if (isTypeSame(destType, srcSubtype, { ignorePseudoGeneric: true }, recursionCount)) { + return false; + } + + if (isIncompleteUnknown(srcSubtype)) { + return false; + } + + const destTypeVarName = TypeVarType.getNameWithScope(destType); + + // Determine which conditions on this type apply to this type variable. + // There might be more than one of them. + const applicableConditions = (getTypeCondition(srcSubtype) ?? []).filter( + (constraint) => constraint.typeVar.priv.nameWithScope === destTypeVarName + ); + + // If there are no applicable conditions, it's not assignable. + if (applicableConditions.length === 0) { + return true; + } + + return !applicableConditions.some((condition) => { + if (condition.typeVar.priv.nameWithScope === TypeVarType.getNameWithScope(destType)) { + if (destType.shared.boundType) { + assert( + condition.constraintIndex === 0, + 'Expected constraint for bound TypeVar to have index of 0' + ); + + return assignType( + destType.shared.boundType, + srcSubtype, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ); + } + + if (TypeVarType.hasConstraints(destType)) { + assert( + condition.constraintIndex < destType.shared.constraints.length, + 'Constraint for constrained TypeVar is out of bounds' + ); + + return assignType( + destType.shared.constraints[condition.constraintIndex], + srcSubtype, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default, + recursionCount + ); + } + + // This is a non-bound and non-constrained type variable with a matching condition. + return true; + } + + return false; + }); + }); + } + + // If the class is a protocol and it has a `__call__` method but no other methods + // or attributes that would be incompatible with a function, this method returns + // the signature of the call implied by the `__call__` method. Otherwise it returns + // undefined. + function getCallbackProtocolType( + objType: ClassType, + recursionCount = 0 + ): FunctionType | OverloadedType | undefined { + if (!isClassInstance(objType) || !ClassType.isProtocolClass(objType)) { + return undefined; + } + + // Make sure that the protocol class doesn't define any fields that + // a normal function wouldn't be compatible with. + for (const mroClass of objType.shared.mro) { + if (isClass(mroClass) && ClassType.isProtocolClass(mroClass)) { + for (const field of ClassType.getSymbolTable(mroClass)) { + const fieldName = field[0]; + const fieldSymbol = field[1]; + + // We're expecting a __call__ method. We will also ignore a + // __slots__ definition, which is (by convention) ignored for + // protocol matching. + if (fieldName === '__call__' || fieldName === '__slots__') { + continue; + } + + if (fieldSymbol.isIgnoredForProtocolMatch()) { + continue; + } + + let fieldIsPartOfFunction = false; + + if (prefetched?.functionClass && isClass(prefetched.functionClass)) { + if (ClassType.getSymbolTable(prefetched.functionClass).has(field[0])) { + fieldIsPartOfFunction = true; + } + } + + if (!fieldIsPartOfFunction) { + return undefined; + } + } + } + } + + const callType = getBoundMagicMethod( + objType, + '__call__', + /* selfType */ undefined, + /* errorNode */ undefined, + /* diag */ undefined, + recursionCount + ); + + if (!callType) { + return undefined; + } + + return makeFunctionTypeVarsBound(callType); + } + + function assignParam( + destType: Type, + srcType: Type, + paramIndex: number | undefined, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker, + flags: AssignTypeFlags, + recursionCount: number + ) { + if (isTypeVarTuple(destType) && !isUnpacked(srcType)) { + return false; + } + + let specializedSrcType = srcType; + let specializedDestType = destType; + let doSpecializationStep = false; + + if ((flags & AssignTypeFlags.OverloadOverlap) === 0) { + const isFirstPass = (flags & AssignTypeFlags.ArgAssignmentFirstPass) !== 0; + + if ((flags & AssignTypeFlags.Contravariant) === 0) { + if (!isFirstPass) { + specializedDestType = solveAndApplyConstraints( + destType, + constraints, + /* applyOptions */ undefined, + { useLowerBoundOnly: true } + ); + } + doSpecializationStep = requiresSpecialization(specializedDestType); + } else { + if (!isFirstPass) { + specializedSrcType = solveAndApplyConstraints(srcType, constraints, /* applyOptions */ undefined, { + useLowerBoundOnly: true, + }); + } + doSpecializationStep = requiresSpecialization(specializedSrcType); + } + } + + // Is an additional specialization step required? + if (doSpecializationStep) { + if ( + assignType( + specializedSrcType, + specializedDestType, + /* diag */ undefined, + constraints, + (flags ^ AssignTypeFlags.Contravariant) | AssignTypeFlags.RetainLiteralsForTypeVar, + recursionCount + ) + ) { + specializedDestType = solveAndApplyConstraints(destType, constraints); + } + } + + if ( + !assignType( + specializedSrcType, + specializedDestType, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + if (diag && paramIndex !== undefined) { + diag.addMessage( + LocAddendum.paramAssignment().format({ + index: paramIndex + 1, + sourceType: printType(destType), + destType: printType(srcType), + }) + ); + } + + return false; + } + + return true; + } + + // Determines whether we need to pack some of the source positionals + // into a tuple that matches a variadic *args parameter in the destination. + function adjustSourceParamDetailsForDestVariadic(srcDetails: ParamListDetails, destDetails: ParamListDetails) { + // If there is no *args parameter in the dest, we have nothing to do. + if (destDetails.argsIndex === undefined) { + return; + } + + // If the *args parameter isn't an unpacked TypeVarTuple or tuple, + // we have nothing to do. + if (!isUnpacked(destDetails.params[destDetails.argsIndex].type)) { + return; + } + + // If the source doesn't have enough positional parameters, we have nothing to do. + if (srcDetails.params.length < destDetails.argsIndex) { + return; + } + + let srcLastToPackIndex = srcDetails.params.findIndex((p, i) => { + assert(destDetails.argsIndex !== undefined); + return i >= destDetails.argsIndex && p.kind === ParamKind.Keyword; + }); + if (srcLastToPackIndex < 0) { + srcLastToPackIndex = srcDetails.params.length; + } + + // If both the source and dest have an *args parameter but the dest's is + // in a later position, then we can't assign the source's *args to the dest. + // Don't make any adjustment in this case. + if (srcDetails.argsIndex !== undefined && destDetails.argsIndex > srcDetails.argsIndex) { + return; + } + + const destFirstNonPositional = destDetails.firstKeywordOnlyIndex ?? destDetails.params.length; + const suffixLength = destFirstNonPositional - destDetails.argsIndex - 1; + const srcPositionalsToPack = srcDetails.params.slice(destDetails.argsIndex, srcLastToPackIndex - suffixLength); + const srcTupleTypes: TupleTypeArg[] = []; + srcPositionalsToPack.forEach((entry) => { + if (entry.param.category === ParamCategory.ArgsList) { + if (isUnpackedTypeVarTuple(entry.type)) { + srcTupleTypes.push({ type: entry.type, isUnbounded: false }); + } else if (isUnpackedClass(entry.type) && entry.type.priv.tupleTypeArgs) { + appendArray(srcTupleTypes, entry.type.priv.tupleTypeArgs); + } else { + srcTupleTypes.push({ type: entry.type, isUnbounded: true }); + } + } else { + srcTupleTypes.push({ type: entry.type, isUnbounded: false, isOptional: !!entry.defaultType }); + } + }); + + if (srcTupleTypes.length !== 1 || !isTypeVarTuple(srcTupleTypes[0].type)) { + const srcPositionalsType = makeTupleObject(evaluatorInterface, srcTupleTypes, /* isUnpacked */ true); + + // Snip out the portion of the source positionals that map to the variadic + // dest parameter and replace it with a single parameter that is typed as a + // tuple containing the individual types of the replaced parameters. + srcDetails.params = [ + ...srcDetails.params.slice(0, destDetails.argsIndex), + { + param: FunctionParam.create( + ParamCategory.ArgsList, + srcPositionalsType, + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + '_arg_combined' + ), + type: srcPositionalsType, + declaredType: srcPositionalsType, + index: -1, + kind: ParamKind.Positional, + }, + ...srcDetails.params.slice( + destDetails.argsIndex + srcPositionalsToPack.length, + srcDetails.params.length + ), + ]; + + const argsIndex = srcDetails.params.findIndex((param) => param.param.category === ParamCategory.ArgsList); + srcDetails.argsIndex = argsIndex >= 0 ? argsIndex : undefined; + + const kwargsIndex = srcDetails.params.findIndex( + (param) => param.param.category === ParamCategory.KwargsDict + ); + srcDetails.kwargsIndex = kwargsIndex >= 0 ? kwargsIndex : undefined; + + const firstKeywordOnlyIndex = srcDetails.params.findIndex((param) => param.kind === ParamKind.Keyword); + srcDetails.firstKeywordOnlyIndex = firstKeywordOnlyIndex >= 0 ? firstKeywordOnlyIndex : undefined; + + srcDetails.positionOnlyParamCount = Math.max( + 0, + srcDetails.params.findIndex( + (p) => + p.kind !== ParamKind.Positional || p.param.category !== ParamCategory.Simple || !!p.defaultType + ) + ); + } + } + + function assignFunction( + destType: FunctionType, + srcType: FunctionType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker, + flags: AssignTypeFlags, + recursionCount: number + ): boolean { + let canAssign = true; + const checkReturnType = (flags & AssignTypeFlags.SkipReturnTypeCheck) === 0; + const isContra = (flags & AssignTypeFlags.Contravariant) !== 0; + flags &= ~AssignTypeFlags.SkipReturnTypeCheck; + + const destParamSpec = FunctionType.getParamSpecFromArgsKwargs(destType); + if (destParamSpec) { + destType = FunctionType.cloneRemoveParamSpecArgsKwargs(destType); + } + + const srcParamSpec = FunctionType.getParamSpecFromArgsKwargs(srcType); + if (srcParamSpec) { + srcType = FunctionType.cloneRemoveParamSpecArgsKwargs(srcType); + } + + const destParamDetails = getParamListDetails(destType, { + disallowExtraKwargsForTd: (flags & AssignTypeFlags.DisallowExtraKwargsForTd) !== 0, + }); + const srcParamDetails = getParamListDetails(srcType, { + disallowExtraKwargsForTd: (flags & AssignTypeFlags.DisallowExtraKwargsForTd) !== 0, + }); + + adjustSourceParamDetailsForDestVariadic( + isContra ? destParamDetails : srcParamDetails, + isContra ? srcParamDetails : destParamDetails + ); + + const targetIncludesParamSpec = isContra ? !!srcParamSpec : !!destParamSpec; + + const destPositionalCount = destParamDetails.firstKeywordOnlyIndex ?? destParamDetails.params.length; + const srcPositionalCount = srcParamDetails.firstKeywordOnlyIndex ?? srcParamDetails.params.length; + const positionalsToMatch = Math.min(destPositionalCount, srcPositionalCount); + const skippedPosParamIndices: number[] = []; + + // Match positional parameters. + for (let paramIndex = 0; paramIndex < positionalsToMatch; paramIndex++) { + if ( + paramIndex === 0 && + destType.shared.methodClass && + (flags & AssignTypeFlags.SkipSelfClsParamCheck) !== 0 + ) { + if (FunctionType.isInstanceMethod(destType) || FunctionType.isClassMethod(destType)) { + continue; + } + } + + // Skip over the *args parameter since it's handled separately below. + if (paramIndex === destParamDetails.argsIndex) { + if (!isUnpackedTypeVarTuple(destParamDetails.params[destParamDetails.argsIndex].type)) { + skippedPosParamIndices.push(paramIndex); + } + continue; + } + + const destParam = destParamDetails.params[paramIndex]; + const srcParam = srcParamDetails.params[paramIndex]; + + // Find the original index of this source param. If we synthesized it above (for + // a variadic parameter), it may not be found. + const srcParamType = srcParam.type; + const destParamType = destParam.type; + + const destParamName = destParam.param.name ?? ''; + const srcParamName = srcParam.param.name ?? ''; + if (destParamName) { + const isDestPositionalOnly = + destParam.kind === ParamKind.Positional || destParam.kind === ParamKind.ExpandedArgs; + if ( + !isDestPositionalOnly && + destParam.param.category !== ParamCategory.ArgsList && + srcParam.param.category !== ParamCategory.ArgsList + ) { + if (srcParam.kind === ParamKind.Positional || srcParam.kind === ParamKind.ExpandedArgs) { + diag?.createAddendum().addMessage( + LocAddendum.functionParamPositionOnly().format({ + name: destParamName, + }) + ); + canAssign = false; + } else if (destParamName !== srcParamName) { + diag?.createAddendum().addMessage( + LocAddendum.functionParamName().format({ + srcName: srcParamName, + destName: destParamName, + }) + ); + canAssign = false; + } + } + } + + if (destParam.defaultType) { + if (!srcParam.defaultType && paramIndex !== srcParamDetails.argsIndex) { + diag?.createAddendum().addMessage( + LocAddendum.functionParamDefaultMissing().format({ + name: srcParamName, + }) + ); + canAssign = false; + } + + // If we're performing a partial overload match and both the source + // and dest parameters provide defaults, assume that there could + // be a match. + if ((flags & AssignTypeFlags.PartialOverloadOverlap) !== 0) { + if (srcParam.defaultType) { + continue; + } + } + } + + // Handle the special case of an overloaded __init__ method whose self + // parameter is annotated. + if ( + paramIndex === 0 && + srcType.shared.name === '__init__' && + FunctionType.isInstanceMethod(srcType) && + destType.shared.name === '__init__' && + FunctionType.isInstanceMethod(destType) && + FunctionType.isOverloaded(destType) && + FunctionParam.isTypeDeclared(destParam.param) + ) { + continue; + } + + if (isUnpacked(srcParamType)) { + canAssign = false; + } else if ( + !assignParam( + destParamType, + srcParamType, + paramIndex, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + // Handle the special case where the source parameter is a synthesized + // TypeVar for "self" or "cls". + if ( + (flags & AssignTypeFlags.SkipSelfClsTypeCheck) === 0 || + !isTypeVar(srcParamType) || + !srcParamType.shared.isSynthesized + ) { + canAssign = false; + } + } else if ( + destParam.kind !== ParamKind.Positional && + destParam.kind !== ParamKind.ExpandedArgs && + srcParam.kind === ParamKind.Positional && + srcParamDetails.kwargsIndex === undefined && + !srcParamDetails.params.some( + (p) => + p.kind === ParamKind.Keyword && + p.param.category === ParamCategory.Simple && + p.param.name === destParam.param.name + ) + ) { + diag?.addMessage( + LocAddendum.namedParamMissingInSource().format({ + name: destParam.param.name ?? '', + }) + ); + canAssign = false; + } + } + + if ( + !FunctionType.isGradualCallableForm(destType) && + destParamDetails.firstPositionOrKeywordIndex < srcParamDetails.positionOnlyParamCount && + !targetIncludesParamSpec + ) { + diag?.createAddendum().addMessage( + LocAddendum.argsPositionOnly().format({ + expected: srcParamDetails.positionOnlyParamCount, + received: destParamDetails.firstPositionOrKeywordIndex, + }) + ); + canAssign = false; + } + + if (destPositionalCount < srcPositionalCount && !targetIncludesParamSpec) { + // Add any remaining positional parameter indices to the list that + // need to be validated. + for (let i = destPositionalCount; i < srcPositionalCount; i++) { + skippedPosParamIndices.push(i); + } + + for (const i of skippedPosParamIndices) { + // If the dest has an *args parameter, make sure it can accept the remaining + // positional arguments in the source. + if (destParamDetails.argsIndex !== undefined) { + const destArgsType = destParamDetails.params[destParamDetails.argsIndex].type; + const srcParamType = srcParamDetails.params[i].type; + if ( + !assignParam( + destArgsType, + srcParamType, + i, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + canAssign = false; + } + + continue; + } + + // If The source parameter has a default value, it is OK for the + // corresponding dest parameter to be missing. + const srcParam = srcParamDetails.params[i]; + + if (srcParam.defaultType) { + // Assign default arg value in case it is needed for + // populating TypeVar constraints. + const paramInfo = srcParamDetails.params[i]; + const defaultArgType = paramInfo.defaultType ?? paramInfo.defaultType; + + // Enforce invariance below because the default arg value + // is constructed prior to the call, so its type is already + // fixed. + if ( + defaultArgType && + !assignType( + paramInfo.type, + defaultArgType, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + if ((flags & AssignTypeFlags.PartialOverloadOverlap) === 0) { + canAssign = false; + } + } + + continue; + } + + // If the source parameter is also addressable by keyword, it is OK + // that there is no matching positional parameter in the dest. + if (srcParam.kind === ParamKind.Standard) { + continue; + } + + // If the source parameter is a variadic, it is OK that there is no + // matching positional parameter in the dest. + if (srcParam.param.category === ParamCategory.ArgsList) { + continue; + } + + const nonDefaultSrcParamCount = srcParamDetails.params.filter( + (p) => !!p.param.name && !p.defaultType && p.param.category === ParamCategory.Simple + ).length; + + diag?.createAddendum().addMessage( + LocAddendum.functionTooFewParams().format({ + expected: nonDefaultSrcParamCount, + received: destPositionalCount, + }) + ); + canAssign = false; + break; + } + } else if (srcPositionalCount < destPositionalCount) { + if (srcParamDetails.argsIndex !== undefined) { + // Make sure the remaining dest parameters can be assigned to the source + // *args parameter type. + const srcArgsType = srcParamDetails.params[srcParamDetails.argsIndex].type; + for (let paramIndex = srcPositionalCount; paramIndex < destPositionalCount; paramIndex++) { + if (paramIndex === srcParamDetails.argsIndex) { + continue; + } + + const destParamType = destParamDetails.params[paramIndex].type; + if (isTypeVarTuple(destParamType) && !isTypeVarTuple(srcArgsType)) { + diag?.addMessage(LocAddendum.typeVarTupleRequiresKnownLength()); + canAssign = false; + } else { + if ( + !assignParam( + destParamType, + srcArgsType, + paramIndex, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + canAssign = false; + } + + const destParamKind = destParamDetails.params[paramIndex].kind; + if ( + destParamKind !== ParamKind.Positional && + destParamKind !== ParamKind.ExpandedArgs && + srcParamDetails.kwargsIndex === undefined + ) { + diag?.addMessage( + LocAddendum.namedParamMissingInSource().format({ + name: destParamDetails.params[paramIndex].param.name ?? '', + }) + ); + canAssign = false; + } + } + } + } else if (!srcParamDetails.paramSpec) { + // If the dest contains a *args, remove it from the positional count + // because it's OK for zero source args to match it. + let adjDestPositionalCount = destPositionalCount; + if (destParamDetails.argsIndex !== undefined && destParamDetails.argsIndex < destPositionalCount) { + adjDestPositionalCount--; + } + + // If we're doing a partial overload overlap check, ignore dest positional + // params with default values. + if ((flags & AssignTypeFlags.PartialOverloadOverlap) !== 0) { + while ( + adjDestPositionalCount > 0 && + destParamDetails.params[adjDestPositionalCount - 1].defaultType + ) { + adjDestPositionalCount--; + } + } + + if (srcPositionalCount < adjDestPositionalCount) { + diag?.addMessage( + LocAddendum.functionTooManyParams().format({ + expected: srcPositionalCount, + received: destPositionalCount, + }) + ); + canAssign = false; + } + } + } + + // If both src and dest have an "*args" parameter, make sure + // their types are compatible. + if ( + srcParamDetails.argsIndex !== undefined && + destParamDetails.argsIndex !== undefined && + !FunctionType.isGradualCallableForm(destType) + ) { + let destArgsType = destParamDetails.params[destParamDetails.argsIndex].type; + let srcArgsType = srcParamDetails.params[srcParamDetails.argsIndex].type; + + if (!isUnpacked(destArgsType)) { + destArgsType = makeTupleObject( + evaluatorInterface, + [{ type: destArgsType, isUnbounded: true }], + /* isUnpacked */ true + ); + } + + if (!isUnpacked(srcArgsType)) { + srcArgsType = makeTupleObject( + evaluatorInterface, + [{ type: srcArgsType, isUnbounded: true }], + /* isUnpacked */ true + ); + } + + if ( + !assignParam( + destArgsType, + srcArgsType, + destParamDetails.params[destParamDetails.argsIndex].index, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + canAssign = false; + } + } + + // If the dest has an "*args" but the source doesn't, report the incompatibility. + // The converse situation is OK. + if ( + !FunctionType.isGradualCallableForm(destType) && + srcParamDetails.argsIndex === undefined && + srcParamSpec === undefined && + destParamDetails.argsIndex !== undefined && + !destParamDetails.hasUnpackedTypeVarTuple + ) { + diag?.createAddendum().addMessage( + LocAddendum.argsParamMissing().format({ + paramName: destParamDetails.params[destParamDetails.argsIndex].param.name ?? '', + }) + ); + canAssign = false; + } + + // Handle matching of named (keyword) parameters. + if (!targetIncludesParamSpec) { + // Build a dictionary of named parameters in the dest. + const destParamMap = new Map(); + + if (destParamDetails.firstKeywordOnlyIndex !== undefined) { + destParamDetails.params.forEach((param, index) => { + if (index >= destParamDetails.firstKeywordOnlyIndex!) { + if ( + param.param.name && + param.param.category === ParamCategory.Simple && + param.kind !== ParamKind.Positional && + param.kind !== ParamKind.ExpandedArgs + ) { + destParamMap.set(param.param.name, param); + } + } + }); + } + + // If the dest has fewer positional arguments than the source, the remaining + // positional arguments in the source can be treated as named arguments. + let srcStartOfNamed = + srcParamDetails.firstKeywordOnlyIndex !== undefined + ? srcParamDetails.firstKeywordOnlyIndex + : srcParamDetails.params.length; + if (destPositionalCount < srcPositionalCount && destParamDetails.argsIndex === undefined) { + srcStartOfNamed = destPositionalCount; + } + + if (srcStartOfNamed >= 0) { + srcParamDetails.params.forEach((srcParamInfo, index) => { + if (index < srcStartOfNamed) { + return; + } + + if ( + !srcParamInfo.param.name || + srcParamInfo.param.category !== ParamCategory.Simple || + srcParamInfo.kind === ParamKind.Positional + ) { + return; + } + + const destParamInfo = destParamMap.get(srcParamInfo.param.name); + const paramDiag = diag?.createAddendum(); + const srcParamType = srcParamInfo.type; + + if (!destParamInfo) { + if (destParamDetails.kwargsIndex === undefined && !srcParamInfo.defaultType) { + if (paramDiag) { + paramDiag.addMessage( + LocAddendum.namedParamMissingInDest().format({ + name: srcParamInfo.param.name, + }) + ); + } + canAssign = false; + } else if (destParamDetails.kwargsIndex !== undefined) { + // Make sure we can assign the type to the Kwargs. + if ( + !assignParam( + destParamDetails.params[destParamDetails.kwargsIndex].type, + srcParamType, + destParamDetails.params[destParamDetails.kwargsIndex].index, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + canAssign = false; + } + } else if (srcParamInfo.defaultType) { + // Assign default arg values in case they are needed for + // populating TypeVar constraints. + const defaultArgType = srcParamInfo.defaultType ?? srcParamInfo.defaultType; + + if ( + defaultArgType && + !assignType( + srcParamInfo.type, + defaultArgType, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + if ((flags & AssignTypeFlags.PartialOverloadOverlap) === 0) { + canAssign = false; + } + } + } + return; + } + + // If we're performing a partial overload match and both the source + // and dest parameters provide defaults, assume that there could + // be a match. + if (srcParamInfo.defaultType && destParamInfo.defaultType) { + if ((flags & AssignTypeFlags.PartialOverloadOverlap) !== 0) { + destParamMap.delete(srcParamInfo.param.name); + return; + } + } + + const destParamType = destParamInfo.type; + const specializedDestParamType = constraints + ? solveAndApplyConstraints(destParamType, constraints) + : destParamType; + + if ( + !assignParam( + destParamInfo.type, + srcParamType, + /* paramIndex */ undefined, + paramDiag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + if (paramDiag) { + paramDiag.addMessage( + LocAddendum.namedParamTypeMismatch().format({ + name: srcParamInfo.param.name, + sourceType: printType(specializedDestParamType), + destType: printType(srcParamType), + }) + ); + } + canAssign = false; + } + + if (destParamInfo.defaultType && !srcParamInfo.defaultType) { + diag?.createAddendum().addMessage( + LocAddendum.functionParamDefaultMissing().format({ + name: srcParamInfo.param.name, + }) + ); + canAssign = false; + } + + destParamMap.delete(srcParamInfo.param.name); + }); + } + + // See if there are any unmatched named parameters. + destParamMap.forEach((destParamInfo, paramName) => { + if (srcParamDetails.kwargsIndex !== undefined && destParamInfo.param.name) { + // Make sure the src kwargs type is compatible. + if ( + !assignParam( + destParamInfo.type, + srcParamDetails.params[srcParamDetails.kwargsIndex].type, + destParamInfo.index, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + canAssign = false; + } + destParamMap.delete(paramName); + } else { + diag?.createAddendum().addMessage( + LocAddendum.namedParamMissingInSource().format({ name: paramName }) + ); + canAssign = false; + } + }); + + // If both src and dest have a "**kwargs" parameter, make sure their types are compatible. + if (srcParamDetails.kwargsIndex !== undefined && destParamDetails.kwargsIndex !== undefined) { + if ( + !assignParam( + destParamDetails.params[destParamDetails.kwargsIndex].type, + srcParamDetails.params[srcParamDetails.kwargsIndex].type, + destParamDetails.params[destParamDetails.kwargsIndex].index, + diag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + canAssign = false; + } + } + + // If the dest has a "**kwargs" but the source doesn't, report the incompatibility. + // The converse situation is OK. + if ( + !FunctionType.isGradualCallableForm(destType) && + srcParamDetails.kwargsIndex === undefined && + srcParamSpec === undefined && + destParamDetails.kwargsIndex !== undefined + ) { + diag?.createAddendum().addMessage( + LocAddendum.kwargsParamMissing().format({ + paramName: destParamDetails.params[destParamDetails.kwargsIndex].param.name!, + }) + ); + canAssign = false; + } + } + + if ((flags & AssignTypeFlags.OverloadOverlap) !== 0) { + // If we're checking for full overlapping overloads and the source is + // a gradual form, the dest must also be a gradual form. + if (FunctionType.isGradualCallableForm(srcType) && !FunctionType.isGradualCallableForm(destType)) { + canAssign = false; + } + + // If the src contains a ParamSpec the dest must also. + if (srcParamSpec && !destParamSpec) { + canAssign = false; + } + } + + // If the source and the dest are using the same ParamSpec, any additional + // concatenated parameters must match. + if (targetIncludesParamSpec && srcParamSpec?.priv.nameWithScope === destParamSpec?.priv.nameWithScope) { + if (srcParamDetails.params.length !== destParamDetails.params.length) { + canAssign = false; + } + } + + // Are we assigning to a function with a ParamSpec? + if (targetIncludesParamSpec) { + const effectiveSrcType = isContra ? destType : srcType; + const effectiveDestType = isContra ? srcType : destType; + + const effectiveSrcParamSpec = isContra ? destParamSpec : srcParamSpec; + const effectiveDestParamSpec = isContra ? srcParamSpec : destParamSpec; + + if (effectiveDestParamSpec) { + const requiredMatchParamCount = effectiveDestType.shared.parameters.filter((p, i) => { + if (!p.name) { + return false; + } + + const paramType = FunctionType.getParamType(effectiveDestType, i); + if (p.category === ParamCategory.Simple && isParamSpec(paramType)) { + return false; + } + return true; + }).length; + let matchedParamCount = 0; + const remainingParams: FunctionParam[] = []; + + // If there are parameters in the source that are not matched + // to parameters in the dest, assume these are concatenated on + // to the ParamSpec. + effectiveSrcType.shared.parameters.forEach((p, index) => { + if (matchedParamCount < requiredMatchParamCount) { + if (p.name) { + matchedParamCount++; + } + + // If this is a *args parameter, assume that it provides + // the remaining positional parameters, but also assume + // that it is not exhausted and can provide additional + // parameters. + if (p.category !== ParamCategory.ArgsList) { + return; + } + } + + if (isPositionOnlySeparator(p) && remainingParams.length === 0) { + // Don't bother pushing a position-only separator if it + // is the first remaining param. + return; + } + + remainingParams.push( + FunctionParam.create( + p.category, + FunctionType.getParamType(effectiveSrcType, index), + p.flags, + p.name, + FunctionType.getParamDefaultType(effectiveSrcType, index), + p.defaultExpr + ) + ); + }); + + // If there are remaining parameters and the source and dest do not contain + // the same ParamSpec, synthesize a function for the remaining parameters. + if ( + remainingParams.length > 0 || + !effectiveSrcParamSpec || + !isTypeSame(effectiveSrcParamSpec, effectiveDestParamSpec, { ignoreTypeFlags: true }) + ) { + const effectiveSrcPosCount = isContra ? destPositionalCount : srcPositionalCount; + const effectiveDestPosCount = isContra ? srcPositionalCount : destPositionalCount; + + // If the src and dest both have ParamSpecs but the src has additional positional + // parameters that have not been matched to dest positional parameters (probably due + // to a Concatenate), don't attempt to assign the remaining parameters to the ParamSpec. + if (!effectiveSrcParamSpec || effectiveSrcPosCount >= effectiveDestPosCount) { + const remainingFunction = FunctionType.createInstance( + '', + '', + '', + effectiveSrcType.shared.flags | FunctionTypeFlags.SynthesizedMethod, + effectiveSrcType.shared.docString + ); + remainingFunction.shared.deprecatedMessage = effectiveSrcType.shared.deprecatedMessage; + remainingFunction.shared.typeVarScopeId = effectiveSrcType.shared.typeVarScopeId; + remainingFunction.priv.constructorTypeVarScopeId = + effectiveSrcType.priv.constructorTypeVarScopeId; + remainingFunction.shared.methodClass = effectiveSrcType.shared.methodClass; + remainingParams.forEach((param) => { + FunctionType.addParam(remainingFunction, param); + }); + if (effectiveSrcParamSpec) { + FunctionType.addParamSpecVariadics( + remainingFunction, + convertToInstance(effectiveSrcParamSpec) + ); + } + + if ( + !assignType( + effectiveDestParamSpec, + remainingFunction, + /* diag */ undefined, + constraints, + flags + ) + ) { + // If we couldn't assign the function to the ParamSpec, see if we can + // assign only the ParamSpec. This is possible if there were no + // remaining parameters. + if ( + remainingParams.length > 0 || + !effectiveSrcParamSpec || + !assignType( + convertToInstance(effectiveDestParamSpec), + convertToInstance(effectiveSrcParamSpec), + /* diag */ undefined, + constraints, + flags + ) + ) { + canAssign = false; + } + } + } + } + } + } + + // Match the return parameter. + if (checkReturnType) { + const destReturnType = getEffectiveReturnType(destType); + if (!isAnyOrUnknown(destReturnType)) { + const srcReturnType = solveAndApplyConstraints(getEffectiveReturnType(srcType), constraints); + const returnDiag = diag?.createAddendum(); + + let isReturnTypeCompatible = false; + + let effectiveFlags = flags; + + // If the source has a declared return type that includes a literal + // in its annotation, assume that we will want the constraint + // solver to retain literals. + if ( + srcType.shared.declaredReturnType && + containsLiteralType(srcType.shared.declaredReturnType, /* includeTypeArgs */ true) + ) { + effectiveFlags |= AssignTypeFlags.RetainLiteralsForTypeVar; + } + + if ( + assignType( + destReturnType, + srcReturnType, + returnDiag?.createAddendum(), + constraints, + effectiveFlags, + recursionCount + ) + ) { + isReturnTypeCompatible = true; + } else { + // Handle the special case where the return type is a TypeGuard[T] + // or TypeIs[T]. This should also act as a bool, since that's its + // type at runtime. + if ( + isClassInstance(srcReturnType) && + ClassType.isBuiltIn(srcReturnType, ['TypeGuard', 'TypeIs']) && + prefetched?.boolClass && + isInstantiableClass(prefetched.boolClass) + ) { + if ( + assignType( + destReturnType, + ClassType.cloneAsInstance(prefetched.boolClass), + returnDiag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + isReturnTypeCompatible = true; + } + } + } + + if (!isReturnTypeCompatible) { + if (returnDiag) { + returnDiag.addMessage( + LocAddendum.functionReturnTypeMismatch().format({ + sourceType: printType(srcReturnType), + destType: printType(destReturnType), + }) + ); + } + canAssign = false; + } + } + } + + return canAssign; + } + + // When a value is assigned to a variable with a declared type, + // we may be able to narrow the type based on the assignment. + function narrowTypeBasedOnAssignment(declaredType: Type, assignedTypeResult: TypeResult): TypeResult { + // TODO: The rules for narrowing types on assignment are not defined in + // the typing spec. Pyright's current logic is currently not even internally + // consistent and probably not sound from a type theory perspective. It + // should be completely reworked once there has been a public discussion + // about the correct behavior. + + const narrowedType = mapSubtypes(assignedTypeResult.type, (assignedSubtype) => { + // Handle the special case where the assigned type is a literal type. + // Some types include very large unions of literal types, and we don't + // want to use an n^2 loop to compare them. + if (isClass(assignedSubtype) && isLiteralType(assignedSubtype)) { + if (isUnion(declaredType) && UnionType.containsType(declaredType, assignedSubtype)) { + return assignedSubtype; + } + } + + const narrowedSubtype = mapSubtypes(declaredType, (declaredSubtype) => { + if (!assignType(declaredSubtype, assignedSubtype)) { + return undefined; + } + + // Retain unknowns for code flow analysis convergence and for + // unknown type reporting in strict mode. + if (isUnknown(assignedSubtype)) { + return assignedSubtype; + } + + // If the two types are bidirectionally assignable, they are + // either equivalent (in which case it doesn't matter which + // one we choose) or one or both include gradual types (Any, etc.), + // in which case we'll want to stick with the declared subtype. + if (assignType(assignedSubtype, declaredSubtype)) { + // We need to be careful with TypedDict types that have + // narrowed fields. In this case, we want to return the + // assigned type. + if ( + isClass(assignedSubtype) && + assignedSubtype.priv.typedDictNarrowedEntries && + isTypeSame(assignedSubtype, declaredSubtype, { ignoreTypedDictNarrowEntries: true }) + ) { + return assignedSubtype; + } + + // We also need to be careful with callback protocols. + if (isClassInstance(declaredSubtype) && ClassType.isProtocolClass(declaredSubtype)) { + if (isFunctionOrOverloaded(assignedSubtype)) { + return assignedSubtype; + } + } + + return declaredSubtype; + } + + return assignedSubtype; + }); + + // If we couldn't assign the assigned subtype any of the declared + // subtypes, the types are incompatible. Return the unnarrowed form. + if (isNever(narrowedSubtype)) { + return assignedSubtype; + } + + return narrowedSubtype; + }); + + // If the result of narrowing is an Unknown that is incomplete, propagate the + // incomplete type for the benefit of code flow analysis. + // If the result of narrowing is a complete Unknown, combine the Unknown type + // with the declared type. In strict mode, this will retain the "unknown type" + // diagnostics while still providing reasonable completion suggestions. + if (isIncompleteUnknown(narrowedType)) { + return { type: narrowedType, isIncomplete: assignedTypeResult.isIncomplete }; + } else if (isUnknown(narrowedType)) { + return { type: combineTypes([narrowedType, declaredType]), isIncomplete: assignedTypeResult.isIncomplete }; + } + + return { type: narrowedType, isIncomplete: assignedTypeResult.isIncomplete }; + } + + function validateOverrideMethod( + baseMethod: Type, + overrideMethod: FunctionType | OverloadedType, + baseClass: ClassType | undefined, + diag: DiagnosticAddendum, + enforceParamNames = true + ): boolean { + // If we're overriding a non-method with a method, report it as an error. + // This occurs when a non-property overrides a property. + if (!isFunctionOrOverloaded(baseMethod)) { + diag.addMessage(LocAddendum.overrideType().format({ type: printType(baseMethod) })); + return false; + } + + if (isFunction(baseMethod)) { + // Handle the easy case - a simple function overriding another simple function. + if (isFunction(overrideMethod)) { + return validateOverrideMethodInternal(baseMethod, overrideMethod, diag, enforceParamNames); + } + + const overloadsAndImpl = [...OverloadedType.getOverloads(overrideMethod)]; + const impl = OverloadedType.getImplementation(overrideMethod); + if (impl && isFunction(impl)) { + overloadsAndImpl.push(impl); + } + + // For an overload overriding a base method, at least one overload + // or the implementation must be compatible with the base method. + if ( + overloadsAndImpl.some((overrideOverload) => { + return validateOverrideMethodInternal( + baseMethod, + overrideOverload, + /* diag */ undefined, + enforceParamNames + ); + }) + ) { + return true; + } + + diag.addMessage(LocAddendum.overrideNoOverloadMatches()); + return false; + } + + // For a non-overloaded method overriding an overloaded method, the + // override must match all of the overloads. + if (isFunction(overrideMethod)) { + return OverloadedType.getOverloads(baseMethod).every((overload) => { + // If the override isn't applicable for this base class, skip the check. + if (baseClass && !isOverrideMethodApplicable(overload, baseClass)) { + return true; + } + + return validateOverrideMethodInternal( + overload, + overrideMethod, + diag?.createAddendum(), + enforceParamNames + ); + }); + } + + // For an overloaded method overriding an overloaded method, the overrides + // must all match and be in the correct order. It is OK if the base method + // has additional overloads that are not present in the override. + + let previousMatchIndex = -1; + const baseOverloads = OverloadedType.getOverloads(baseMethod); + + for (const overrideOverload of OverloadedType.getOverloads(overrideMethod)) { + let possibleMatchIndex: number | undefined; + + let matchIndex = baseOverloads.findIndex((baseOverload, index) => { + // If the override isn't applicable for this base class, skip the check. + if (baseClass && !isOverrideMethodApplicable(baseOverload, baseClass)) { + return false; + } + + const isCompatible = validateOverrideMethodInternal( + baseOverload, + overrideOverload, + /* diag */ undefined, + enforceParamNames + ); + + // If the override is compatible but the match is one that is below the previous + // matched index, keep looking for additional matches. Record the fact that + // we found at least one match. + if (isCompatible && index <= previousMatchIndex && possibleMatchIndex === undefined) { + possibleMatchIndex = index; + return false; + } + + return isCompatible; + }); + + if (matchIndex < 0 && possibleMatchIndex !== undefined) { + matchIndex = possibleMatchIndex; + } + + if (matchIndex < 0) { + break; + } + + if (matchIndex < previousMatchIndex) { + diag.addMessage(LocAddendum.overrideOverloadOrder()); + return false; + } + + previousMatchIndex = matchIndex; + } + + if (previousMatchIndex < baseOverloads.length - 1) { + const unmatchedOverloads = baseOverloads.slice(previousMatchIndex + 1); + + // See if all of the remaining overrides are nonapplicable. + if ( + !baseClass || + unmatchedOverloads.some((overload) => { + return isOverrideMethodApplicable(overload, baseClass); + }) + ) { + // We didn't find matches for all of the base overloads. + diag.addMessage(LocAddendum.overrideOverloadNoMatch()); + return false; + } + } + + return true; + } + + // Determines whether a child class override is applicable to a parent + // class method signature. This is important in cases where the parent + // class defines an overload where some of the overload signatures supply + // explicit type annotations for the "self" or "cls" parameter and some + // of these do not apply to the child class. + function isOverrideMethodApplicable(baseMethod: FunctionType, childClass: ClassType): boolean { + if ( + !FunctionType.isInstanceMethod(baseMethod) && + !FunctionType.isClassMethod(baseMethod) && + !FunctionType.isConstructorMethod(baseMethod) + ) { + return true; + } + + const baseParamDetails = getParamListDetails(baseMethod); + if (baseParamDetails.params.length === 0) { + return true; + } + + const baseParamType = baseParamDetails.params[0].param; + + if (baseParamType.category !== ParamCategory.Simple || !FunctionParam.isTypeDeclared(baseParamType)) { + return true; + } + + // If this is a self or cls parameter, determine whether the override + // class can be assigned to the base parameter type. If not, then this + // override doesn't apply. This is important for overloads where the + // base class contains some overload signatures that are not applicable + // to the child class. + const childSelfOrClsType = FunctionType.isInstanceMethod(baseMethod) + ? ClassType.cloneAsInstance(childClass) + : childClass; + + return assignType( + baseParamDetails.params[0].type, + childSelfOrClsType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Default + ); + } + + // Determines whether the override method is compatible with the overridden method. + // This is used both for parent/child overrides and implicit overrides for peer + // classes in a multi-inheritance case. If enforceParamNames is true, the parameter + // names of non-positional-only parameters are enforced. + function validateOverrideMethodInternal( + baseMethod: FunctionType, + overrideMethod: FunctionType, + diag: DiagnosticAddendum | undefined, + enforceParamNames: boolean + ): boolean { + const baseParamDetails = getParamListDetails(baseMethod); + const overrideParamDetails = getParamListDetails(overrideMethod); + const constraints = new ConstraintTracker(); + + let canOverride = true; + + if (!FunctionType.isGradualCallableForm(baseMethod) && !FunctionType.isGradualCallableForm(overrideMethod)) { + // Verify that we're not overriding a static, class or instance method with + // an incompatible type. + if (FunctionType.isStaticMethod(baseMethod)) { + if (!FunctionType.isStaticMethod(overrideMethod)) { + diag?.addMessage(LocAddendum.overrideNotStaticMethod()); + canOverride = false; + } + } else if (FunctionType.isClassMethod(baseMethod)) { + if (!FunctionType.isClassMethod(overrideMethod)) { + diag?.addMessage(LocAddendum.overrideNotClassMethod()); + canOverride = false; + } + } else if (FunctionType.isInstanceMethod(baseMethod)) { + if (!FunctionType.isInstanceMethod(overrideMethod)) { + diag?.addMessage(LocAddendum.overrideNotInstanceMethod()); + canOverride = false; + } + } + + // Verify that the positional param count matches exactly or that the override + // adds only params that preserve the original signature. + let foundParamCountMismatch = false; + if (overrideParamDetails.positionParamCount < baseParamDetails.positionParamCount) { + if (overrideParamDetails.argsIndex === undefined) { + foundParamCountMismatch = true; + } else { + const overrideArgsType = overrideParamDetails.params[overrideParamDetails.argsIndex].type; + for ( + let i = overrideParamDetails.positionParamCount; + i < baseParamDetails.positionParamCount; + i++ + ) { + if ( + !assignType( + overrideArgsType, + baseParamDetails.params[i].type, + diag?.createAddendum(), + constraints, + AssignTypeFlags.Default + ) + ) { + LocAddendum.overrideParamType().format({ + index: i + 1, + baseType: printType(baseParamDetails.params[i].type), + overrideType: printType(overrideArgsType), + }); + canOverride = false; + } + } + } + } else if (overrideParamDetails.positionParamCount > baseParamDetails.positionParamCount) { + // Verify that all of the override parameters that extend the + // signature are either *args, **kwargs or parameters with + // default values. + + for (let i = baseParamDetails.positionParamCount; i < overrideParamDetails.positionParamCount; i++) { + const overrideParam = overrideParamDetails.params[i].param; + + if ( + overrideParam.category === ParamCategory.Simple && + overrideParam.name && + !overrideParamDetails.params[i].defaultType + ) { + foundParamCountMismatch = true; + } + } + } + + if (foundParamCountMismatch) { + diag?.addMessage( + LocAddendum.overridePositionalParamCount().format({ + baseCount: baseParamDetails.params.length, + overrideCount: overrideParamDetails.params.length, + }) + ); + canOverride = false; + } + + const positionalParamCount = Math.min( + baseParamDetails.positionParamCount, + overrideParamDetails.positionParamCount + ); + + for (let i = 0; i < positionalParamCount; i++) { + // If the first parameter is a "self" or "cls" parameter, skip the + // test because these are allowed to violate the Liskov substitution + // principle. + if (i === 0) { + if ( + FunctionType.isInstanceMethod(overrideMethod) || + FunctionType.isClassMethod(overrideMethod) || + FunctionType.isConstructorMethod(overrideMethod) + ) { + continue; + } + } + + const baseParam = baseParamDetails.params[i].param; + const overrideParam = overrideParamDetails.params[i].param; + + if ( + i >= baseParamDetails.positionOnlyParamCount && + !isPrivateOrProtectedName(baseParam.name || '') && + baseParamDetails.params[i].kind !== ParamKind.Positional && + baseParam.category === ParamCategory.Simple && + enforceParamNames && + baseParam.name !== overrideParam.name + ) { + if (overrideParam.category === ParamCategory.Simple) { + if (!FunctionParam.isNameSynthesized(baseParam)) { + if (overrideParamDetails.params[i].kind === ParamKind.Positional) { + diag?.addMessage( + LocAddendum.overrideParamNamePositionOnly().format({ + index: i + 1, + baseName: baseParam.name || '*', + }) + ); + } else { + diag?.addMessage( + LocAddendum.overrideParamName().format({ + index: i + 1, + baseName: baseParam.name || '*', + overrideName: overrideParam.name || '*', + }) + ); + } + canOverride = false; + } + } + } else if ( + i < overrideParamDetails.positionOnlyParamCount && + i >= baseParamDetails.positionOnlyParamCount + ) { + if ( + !FunctionParam.isNameSynthesized(baseParam) && + baseParamDetails.params[i].kind !== ParamKind.Positional && + baseParamDetails.params[i].kind !== ParamKind.ExpandedArgs + ) { + diag?.addMessage( + LocAddendum.overrideParamNamePositionOnly().format({ + index: i + 1, + baseName: baseParam.name || '*', + }) + ); + canOverride = false; + } + } else { + const baseParamType = baseParamDetails.params[i].type; + const overrideParamType = overrideParamDetails.params[i].type; + + const baseIsSynthesizedTypeVar = isTypeVar(baseParamType) && baseParamType.shared.isSynthesized; + const overrideIsSynthesizedTypeVar = + isTypeVar(overrideParamType) && overrideParamType.shared.isSynthesized; + + if (!baseIsSynthesizedTypeVar && !overrideIsSynthesizedTypeVar) { + if ( + baseParam.category !== overrideParam.category || + !assignType( + overrideParamType, + baseParamType, + diag?.createAddendum(), + constraints, + AssignTypeFlags.Default + ) + ) { + diag?.addMessage( + LocAddendum.overrideParamType().format({ + index: i + 1, + baseType: printType(baseParamType), + overrideType: printType(overrideParamType), + }) + ); + canOverride = false; + } + } + + if (baseParamDetails.params[i].defaultType && !overrideParamDetails.params[i].defaultType) { + diag?.addMessage( + LocAddendum.overrideParamNoDefault().format({ + index: i + 1, + }) + ); + canOverride = false; + } + } + } + + // Check for positional (named) parameters in the base method that + // do not exist in the override. + if (enforceParamNames && overrideParamDetails.kwargsIndex === undefined) { + for (let i = positionalParamCount; i < baseParamDetails.positionParamCount; i++) { + const baseParam = baseParamDetails.params[i]; + + if (baseParam.kind === ParamKind.Standard && baseParam.param.category === ParamCategory.Simple) { + diag?.addMessage( + LocAddendum.overrideParamNamePositionOnly().format({ + index: i + 1, + baseName: baseParam.param.name || '*', + }) + ); + canOverride = false; + } + } + } + + // Check for a *args match. + if (baseParamDetails.argsIndex !== undefined) { + if (overrideParamDetails.argsIndex === undefined) { + diag?.addMessage( + LocAddendum.overrideParamNameMissing().format({ + name: baseParamDetails.params[baseParamDetails.argsIndex].param.name ?? '?', + }) + ); + canOverride = false; + } else { + const overrideParamType = overrideParamDetails.params[overrideParamDetails.argsIndex].type; + const baseParamType = baseParamDetails.params[baseParamDetails.argsIndex].type; + + if ( + !assignType( + overrideParamType, + baseParamType, + diag?.createAddendum(), + constraints, + AssignTypeFlags.Default + ) + ) { + diag?.addMessage( + LocAddendum.overrideParamKeywordType().format({ + name: overrideParamDetails.params[overrideParamDetails.argsIndex].param.name ?? '?', + baseType: printType(baseParamType), + overrideType: printType(overrideParamType), + }) + ); + canOverride = false; + } + } + } + + // Now check any keyword-only parameters. + const baseKwOnlyParams = baseParamDetails.params.filter( + (paramInfo) => paramInfo.kind === ParamKind.Keyword && paramInfo.param.category === ParamCategory.Simple + ); + const overrideKwOnlyParams = overrideParamDetails.params.filter( + (paramInfo) => paramInfo.kind === ParamKind.Keyword && paramInfo.param.category === ParamCategory.Simple + ); + + baseKwOnlyParams.forEach((paramInfo) => { + const overrideParamInfo = overrideKwOnlyParams.find((pi) => paramInfo.param.name === pi.param.name); + + if (!overrideParamInfo && overrideParamDetails.kwargsIndex === undefined) { + diag?.addMessage( + LocAddendum.overrideParamNameMissing().format({ + name: paramInfo.param.name ?? '?', + }) + ); + canOverride = false; + } else { + let targetParamType = overrideParamInfo?.type; + if (!targetParamType) { + targetParamType = overrideParamDetails.params[overrideParamDetails.kwargsIndex!].type; + } + + if ( + !assignType( + targetParamType, + paramInfo.type, + diag?.createAddendum(), + constraints, + AssignTypeFlags.Default + ) + ) { + diag?.addMessage( + LocAddendum.overrideParamKeywordType().format({ + name: paramInfo.param.name ?? '?', + baseType: printType(paramInfo.type), + overrideType: printType(targetParamType), + }) + ); + canOverride = false; + } + + if (overrideParamInfo) { + if (paramInfo.defaultType && !overrideParamInfo.defaultType) { + diag?.addMessage( + LocAddendum.overrideParamKeywordNoDefault().format({ + name: overrideParamInfo.param.name ?? '?', + }) + ); + canOverride = false; + } + } + } + }); + + // Verify that any keyword-only parameters added by the overload are compatible + // with the **kwargs in the base. + overrideKwOnlyParams.forEach((paramInfo) => { + const baseParamInfo = baseKwOnlyParams.find((pi) => paramInfo.param.name === pi.param.name); + + if (!baseParamInfo) { + if (baseParamDetails.kwargsIndex === undefined) { + if (!paramInfo.defaultType) { + diag?.addMessage( + LocAddendum.overrideParamNameExtra().format({ + name: paramInfo.param.name ?? '?', + }) + ); + canOverride = false; + } + } else { + // Base has a **kwargs; ensure the added keyword-only parameter's + // type is compatible with the base's **kwargs value type. + const baseKwargsType = baseParamDetails.params[baseParamDetails.kwargsIndex].type; + if ( + !assignType( + paramInfo.type, + baseKwargsType, + diag?.createAddendum(), + constraints, + AssignTypeFlags.Default + ) + ) { + diag?.addMessage( + LocAddendum.overrideParamKeywordType().format({ + name: paramInfo.param.name ?? '?', + baseType: printType(baseKwargsType), + overrideType: printType(paramInfo.type), + }) + ); + canOverride = false; + } + } + } + }); + + // Verify that if the base method has a **kwargs parameter, the override does too. + if (baseParamDetails.kwargsIndex !== undefined && overrideParamDetails.kwargsIndex === undefined) { + diag?.addMessage( + LocAddendum.kwargsParamMissing().format({ + paramName: baseParamDetails.params[baseParamDetails.kwargsIndex].param.name!, + }) + ); + canOverride = false; + } + } + + // Verify that one or the other method doesn't contain a ParamSpec. + if (baseParamDetails.paramSpec && !overrideParamDetails.paramSpec) { + // If the override uses an `*args: Any, **kwargs: Any` signature, we + // will allow this as an acceptable overload for a `*args: P.args, **kwargs: P.kwargs`. + const overrideHasArgsKwargs = + overrideParamDetails.argsIndex !== undefined && + isAnyOrUnknown(overrideParamDetails.params[overrideParamDetails.argsIndex].type) && + overrideParamDetails.kwargsIndex !== undefined && + isAnyOrUnknown(overrideParamDetails.params[overrideParamDetails.kwargsIndex].type); + + if (!overrideHasArgsKwargs) { + diag?.addMessage(LocAddendum.paramSpecMissingInOverride()); + canOverride = false; + } + } + + // Now check the return type. + const baseReturnType = getEffectiveReturnType(baseMethod); + const overrideReturnType = solveAndApplyConstraints(getEffectiveReturnType(overrideMethod), constraints); + + if ( + !assignType( + baseReturnType, + overrideReturnType, + diag?.createAddendum(), + constraints, + AssignTypeFlags.Default + ) + ) { + diag?.addMessage( + LocAddendum.overrideReturnType().format({ + baseType: printType(baseReturnType), + overrideType: printType(overrideReturnType), + }) + ); + + canOverride = false; + } + + return canOverride; + } + + // Validates that the specified source type matches the constraints + // of the type variable. If successful, it returns the constraint + // type that applies. If unsuccessful, it returns undefined. + function applyTypeArgToTypeVar(destType: TypeVarType, srcType: Type, diag: DiagnosticAddendum): Type | undefined { + if (isAnyOrUnknown(srcType)) { + return srcType; + } + + let effectiveSrcType: Type = transformPossibleRecursiveTypeAlias(srcType); + + if (isTypeVar(srcType)) { + if (isTypeSame(srcType, destType)) { + return srcType; + } + + effectiveSrcType = makeTopLevelTypeVarsConcrete(srcType); + } + + // If this is a partially-evaluated class, don't perform any further + // checks. Assume in this case that the type is compatible with the + // bound or constraint. + if (isClass(effectiveSrcType) && ClassType.isPartiallyEvaluated(effectiveSrcType)) { + return srcType; + } + + // If there's a bound type, make sure the source is derived from it. + if (destType.shared.boundType && !isTypeAliasPlaceholder(effectiveSrcType)) { + if ( + !assignType( + destType.shared.boundType, + effectiveSrcType, + diag.createAddendum(), + /* constraints */ undefined + ) + ) { + // Avoid adding a message that will confuse users if the TypeVar was + // synthesized for internal purposes. + if (!destType.shared.isSynthesized) { + diag.addMessage( + LocAddendum.typeBound().format({ + sourceType: printType(effectiveSrcType), + destType: printType(destType.shared.boundType), + name: TypeVarType.getReadableName(destType), + }) + ); + } + return undefined; + } + } + + if (isParamSpec(destType)) { + if (isParamSpec(srcType)) { + return srcType; + } + + if (isFunction(srcType) && FunctionType.isParamSpecValue(srcType)) { + return srcType; + } + + if (isClassInstance(srcType) && ClassType.isBuiltIn(srcType, 'Concatenate')) { + return srcType; + } + + diag.addMessage( + LocAddendum.typeParamSpec().format({ + type: printType(srcType), + name: TypeVarType.getReadableName(destType), + }) + ); + + return undefined; + } + + if (isParamSpec(srcType)) { + diag.addMessage(LocMessage.paramSpecContext()); + return undefined; + } + + // If there are no constraints, we're done. + const constraints = destType.shared.constraints; + if (constraints.length === 0) { + return srcType; + } + + if (isTypeAliasPlaceholder(srcType)) { + return srcType; + } + + if (isTypeVar(srcType) && TypeVarType.hasConstraints(srcType)) { + // Make sure all the source constraint types map to constraint types in the dest. + if ( + srcType.shared.constraints.every((sourceConstraint) => { + return constraints.some((destConstraint) => assignType(destConstraint, sourceConstraint)); + }) + ) { + return srcType; + } + } else { + let bestConstraintSoFar: Type | undefined; + + // Try to find the best (narrowest) match among the constraints. + for (const constraint of constraints) { + if (assignType(constraint, effectiveSrcType)) { + // Don't allow Never to match unless the constraint is also explicitly Never. + if (!isNever(effectiveSrcType) || isNever(constraint)) { + if (!bestConstraintSoFar || assignType(bestConstraintSoFar, constraint)) { + bestConstraintSoFar = constraint; + } + } + } + } + + if (bestConstraintSoFar) { + return bestConstraintSoFar; + } + } + + diag.addMessage( + LocAddendum.typeConstrainedTypeVar().format({ + type: printType(srcType), + name: TypeVarType.getReadableName(destType), + }) + ); + + return undefined; + } + + // Returns a list of unimplemented abstract symbols (methods or variables) for + // the specified class. + function getAbstractSymbols(classType: ClassType): AbstractSymbol[] { + const symbolTable = new Map(); + + ClassType.getReverseMro(classType).forEach((mroClass) => { + if (isInstantiableClass(mroClass)) { + // See if this class is introducing a new abstract symbol that has not been + // introduced previously or if it is overriding an abstract symbol with + // a non-abstract one. + ClassType.getSymbolTable(mroClass).forEach((symbol, symbolName) => { + const abstractSymbolInfo = getAbstractSymbolInfo(mroClass, symbolName); + + if (abstractSymbolInfo) { + symbolTable.set(symbolName, abstractSymbolInfo); + } else { + symbolTable.delete(symbolName); + } + }); + } + }); + + // Create a final list of symbols that are abstract. + const symbolList: AbstractSymbol[] = []; + symbolTable.forEach((method) => { + symbolList.push(method); + }); + + return symbolList; + } + + // If the memberType is an instance or class method, creates a new + // version of the function that has the "self" or "cls" parameter bound + // to it. If treatConstructorAsClassMethod is true, the function is + // treated like a class method even if it's not marked as such. That's + // needed to special-case the __new__ magic method when it's invoked as + // a constructor (as opposed to by name). + function bindFunctionToClassOrObject( + baseType: ClassType | undefined, + memberType: FunctionType | OverloadedType, + memberClass?: ClassType, + treatConstructorAsClassMethod = false, + selfType?: ClassType | TypeVarType, + diag?: DiagnosticAddendum, + recursionCount = 0 + ): FunctionType | OverloadedType | undefined { + return mapSignatures(memberType, (functionType) => { + // If the caller specified no base type, always strip the + // first parameter. This is used in cases like constructors. + if (!baseType) { + return FunctionType.clone(functionType, /* stripFirstParam */ true); + } + + // If the first parameter was already stripped, it has already been + // bound. Don't attempt to rebind. + if (functionType.priv.strippedFirstParamType) { + return functionType; + } + + if (FunctionType.isInstanceMethod(functionType)) { + // If the baseType is a metaclass, don't specialize the function. + if (isInstantiableMetaclass(baseType)) { + return functionType; + } + + const baseObj: ClassType = isClassInstance(baseType) + ? baseType + : ClassType.cloneAsInstance(specializeWithDefaultTypeArgs(baseType)); + + let stripFirstParam = false; + if (isClassInstance(baseType)) { + stripFirstParam = true; + } else if (memberClass && isInstantiableMetaclass(memberClass)) { + stripFirstParam = true; + } + + return partiallySpecializeBoundMethod( + baseType, + functionType, + diag, + recursionCount, + selfType ?? baseObj, + stripFirstParam + ); + } + + if ( + FunctionType.isClassMethod(functionType) || + (treatConstructorAsClassMethod && FunctionType.isConstructorMethod(functionType)) + ) { + const baseClass = isInstantiableClass(baseType) ? baseType : ClassType.cloneAsInstantiable(baseType); + const clsType = selfType ? (convertToInstantiable(selfType) as ClassType | TypeVarType) : undefined; + + return partiallySpecializeBoundMethod( + baseClass, + functionType, + diag, + recursionCount, + clsType ?? baseClass, + /* stripFirstParam */ true + ); + } + + if (FunctionType.isStaticMethod(functionType)) { + const baseClass = isInstantiableClass(baseType) ? baseType : ClassType.cloneAsInstantiable(baseType); + + return partiallySpecializeBoundMethod( + baseClass, + functionType, + diag, + recursionCount, + /* firstParamType */ undefined, + /* stripFirstParam */ false + ); + } + + return functionType; + }); + } + + // Specializes the specified function for the specified class, + // optionally stripping the first first parameter (the "self" or "cls") + // off of the specialized function in the process. The baseType + // is the type used to reference the member. + function partiallySpecializeBoundMethod( + baseType: ClassType, + memberType: FunctionType, + diag: DiagnosticAddendum | undefined, + recursionCount: number, + firstParamType: ClassType | TypeVarType | undefined, + stripFirstParam = true + ): FunctionType | undefined { + const constraints = new ConstraintTracker(); + + if (firstParamType) { + if (memberType.shared.parameters.length > 0) { + const memberTypeFirstParam = memberType.shared.parameters[0]; + const memberTypeFirstParamType = FunctionType.getParamType(memberType, 0); + + if ( + isTypeVar(memberTypeFirstParamType) && + memberTypeFirstParamType.shared.boundType && + isClassInstance(memberTypeFirstParamType.shared.boundType) && + ClassType.isProtocolClass(memberTypeFirstParamType.shared.boundType) + ) { + // Handle the protocol class specially. Some protocol classes + // contain references to themselves or their subclasses, so if + // we attempt to call assignType, we'll risk infinite recursion. + // Instead, we'll assume it's assignable. + constraints.setBounds( + memberTypeFirstParamType, + TypeBase.isInstantiable(memberTypeFirstParamType) + ? convertToInstance(firstParamType) + : firstParamType + ); + } else { + const subDiag = diag?.createAddendum(); + + // Protect against the case where a callback protocol is being + // bound to its own __call__ method but the first parameter + // is annotated with its own callable type. This can lead to + // infinite recursion. + if (isFunctionOrOverloaded(memberTypeFirstParamType)) { + if (isClassInstance(firstParamType) && ClassType.isProtocolClass(firstParamType)) { + if (subDiag) { + subDiag.addMessage( + LocMessage.bindTypeMismatch().format({ + type: printType(firstParamType), + methodName: memberType.shared.name || '', + paramName: memberTypeFirstParam.name || '__p0', + }) + ); + } + return undefined; + } + } + + if ( + !assignType( + memberTypeFirstParamType, + firstParamType, + subDiag?.createAddendum(), + constraints, + AssignTypeFlags.AllowUnspecifiedTypeArgs, + recursionCount + ) + ) { + if ( + memberTypeFirstParam.name && + !FunctionParam.isNameSynthesized(memberTypeFirstParam) && + FunctionParam.isTypeDeclared(memberTypeFirstParam) + ) { + if (subDiag) { + subDiag.addMessage( + LocMessage.bindTypeMismatch().format({ + type: printType(firstParamType), + methodName: memberType.shared.name || '', + paramName: memberTypeFirstParam.name, + }) + ); + } + return undefined; + } + } + } + } else { + const subDiag = diag?.createAddendum(); + if (subDiag) { + subDiag.addMessage( + LocMessage.bindParamMissing().format({ + methodName: memberType.shared.name || '', + }) + ); + } + return undefined; + } + } + + // Get the effective return type, which will have the side effect of lazily + // evaluating (and caching) the inferred return type if there is no defined return type. + getEffectiveReturnType(memberType); + + const specializedFunction = solveAndApplyConstraints(memberType, constraints); + if (isFunction(specializedFunction)) { + return FunctionType.clone(specializedFunction, stripFirstParam, baseType); + } + + if (isOverloaded(specializedFunction)) { + // For overloaded functions, use the first overload. This isn't + // strictly correct, but this is an extreme edge case. + return FunctionType.clone(OverloadedType.getOverloads(specializedFunction)[0], stripFirstParam, baseType); + } + + return undefined; + } + + function isFinalVariable(symbol: Symbol): boolean { + return symbol.getDeclarations().some((decl) => isFinalVariableDeclaration(decl)); + } + + function isFinalVariableDeclaration(decl: Declaration): boolean { + return decl.type === DeclarationType.Variable && !!decl.isFinal; + } + + function isExplicitTypeAliasDeclaration(decl: Declaration): boolean { + if (decl.type !== DeclarationType.Variable || !decl.typeAnnotationNode) { + return false; + } + + if ( + decl.typeAnnotationNode.nodeType !== ParseNodeType.Name && + decl.typeAnnotationNode.nodeType !== ParseNodeType.MemberAccess && + decl.typeAnnotationNode.nodeType !== ParseNodeType.StringList + ) { + return false; + } + + const type = getTypeOfAnnotation(decl.typeAnnotationNode, { varTypeAnnotation: true, allowClassVar: true }); + return isClassInstance(type) && ClassType.isBuiltIn(type, 'TypeAlias'); + } + + function isPossibleTypeAliasDeclaration(decl: Declaration): boolean { + if (decl.type !== DeclarationType.Variable || !decl.typeAliasName || decl.typeAnnotationNode) { + return false; + } + + if (decl.node.parent?.nodeType !== ParseNodeType.Assignment) { + return false; + } + + // Perform a sanity check on the RHS expression. Some expression + // forms should never be considered legitimate for type aliases. + return isLegalTypeAliasExpressionForm(decl.node.parent.d.rightExpr, /* allowStrLiteral */ false); + } + + function isLegalTypeAliasExpressionForm(node: ExpressionNode, allowStrLiteral: boolean): boolean { + switch (node.nodeType) { + case ParseNodeType.Error: + case ParseNodeType.UnaryOperation: + case ParseNodeType.AssignmentExpression: + case ParseNodeType.TypeAnnotation: + case ParseNodeType.Await: + case ParseNodeType.Ternary: + case ParseNodeType.Unpack: + case ParseNodeType.Tuple: + case ParseNodeType.Call: + case ParseNodeType.Comprehension: + case ParseNodeType.Slice: + case ParseNodeType.Yield: + case ParseNodeType.YieldFrom: + case ParseNodeType.Lambda: + case ParseNodeType.Number: + case ParseNodeType.Dictionary: + case ParseNodeType.List: + case ParseNodeType.Set: + return false; + + case ParseNodeType.StringList: + case ParseNodeType.String: + return allowStrLiteral; + + case ParseNodeType.Constant: + return node.d.constType === KeywordType.None; + + case ParseNodeType.BinaryOperation: + return ( + node.d.operator === OperatorType.BitwiseOr && + isLegalTypeAliasExpressionForm(node.d.leftExpr, /* allowStrLiteral */ true) && + isLegalTypeAliasExpressionForm(node.d.rightExpr, /* allowStrLiteral */ true) + ); + + case ParseNodeType.Index: + return isLegalTypeAliasExpressionForm(node.d.leftExpr, allowStrLiteral); + + case ParseNodeType.MemberAccess: + return isLegalTypeAliasExpressionForm(node.d.leftExpr, allowStrLiteral); + } + + return true; + } + + function isLegalImplicitTypeAliasType(type: Type) { + // We explicitly exclude "..." and "Unknown". + if (isEllipsisType(type)) { + return false; + } + + if (isUnknown(type)) { + // If this is a union type, we'll assume that it was meant as a type + // alias even though all of the union subtypes are Unknown. + if (type.props?.specialForm && ClassType.isBuiltIn(type.props.specialForm, 'UnionType')) { + return true; + } + return false; + } + + // Look at the subtypes within the union. If any of them are not + // instantiable (other than "None" which is special-cased), it is + // not a legal type alias type. + let isLegal = true; + doForEachSubtype(type, (subtype) => { + if (!TypeBase.isInstantiable(subtype) && !isNoneInstance(subtype)) { + isLegal = false; + } + }); + + return isLegal; + } + + function isPossibleTypeAliasOrTypedDict(decl: Declaration) { + return isPossibleTypeAliasDeclaration(decl) || isPossibleTypeDictFactoryCall(decl); + } + + function isPossibleTypeDictFactoryCall(decl: Declaration) { + if ( + decl.type !== DeclarationType.Variable || + !decl.node.parent || + decl.node.parent.nodeType !== ParseNodeType.Assignment || + decl.node.parent.d.rightExpr?.nodeType !== ParseNodeType.Call + ) { + return false; + } + + const callLeftNode = decl.node.parent.d.rightExpr.d.leftExpr; + + // Use a simple heuristic to determine whether this is potentially + // a call to the TypedDict call. This avoids the expensive (and potentially + // recursive) call to getTypeOfExpression in cases where it's not needed. + if ( + (callLeftNode.nodeType === ParseNodeType.Name && callLeftNode.d.value) === 'TypedDict' || + (callLeftNode.nodeType === ParseNodeType.MemberAccess && + callLeftNode.d.member.d.value === 'TypedDict' && + callLeftNode.d.leftExpr.nodeType === ParseNodeType.Name) + ) { + // See if this is a call to TypedDict. We want to support + // recursive type references in a TypedDict call. + const callType = getTypeOfExpression(callLeftNode, EvalFlags.CallBaseDefaults).type; + + if (isInstantiableClass(callType) && ClassType.isBuiltIn(callType, 'TypedDict')) { + return true; + } + } + + return false; + } + + function printObjectTypeForClass(type: ClassType): string { + return TypePrinter.printObjectTypeForClass(type, evaluatorOptions.printTypeFlags, getEffectiveReturnType); + } + + function printFunctionParts(type: FunctionType, extraFlags?: TypePrinter.PrintTypeFlags): [string[], string] { + const flags = extraFlags ? evaluatorOptions.printTypeFlags | extraFlags : evaluatorOptions.printTypeFlags; + return TypePrinter.printFunctionParts(type, flags, getEffectiveReturnType); + } + + // Prints two types and determines whether they need to be output in + // fully-qualified form for disambiguation. + function printSrcDestTypes( + srcType: Type, + destType: Type, + options?: PrintTypeOptions + ): { sourceType: string; destType: string } { + const simpleSrcType = printType(srcType, options); + const simpleDestType = printType(destType, options); + + if (simpleSrcType !== simpleDestType) { + return { sourceType: simpleSrcType, destType: simpleDestType }; + } + + const fullSrcType = printType(srcType, { ...(options ?? {}), useFullyQualifiedNames: true }); + const fullDestType = printType(destType, { ...(options ?? {}), useFullyQualifiedNames: true }); + + if (fullSrcType !== fullDestType) { + return { sourceType: fullSrcType, destType: fullDestType }; + } + + return { sourceType: simpleSrcType, destType: simpleDestType }; + } + + function isTypeFormSupported(node: ParseNode) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + + // For now, enable only if enableExperimentalFeatures is true. + return fileInfo.diagnosticRuleSet.enableExperimentalFeatures; + } + + function printType(type: Type, options?: PrintTypeOptions): string { + let flags = evaluatorOptions.printTypeFlags; + + if (options?.expandTypeAlias) { + flags |= TypePrinter.PrintTypeFlags.ExpandTypeAlias; + } + if (options?.enforcePythonSyntax) { + flags |= TypePrinter.PrintTypeFlags.PythonSyntax; + } + if (options?.useTypingUnpack) { + flags |= TypePrinter.PrintTypeFlags.UseTypingUnpack; + } + if (options?.printUnknownWithAny) { + flags |= TypePrinter.PrintTypeFlags.PrintUnknownWithAny; + } + if (options?.printTypeVarVariance) { + flags |= TypePrinter.PrintTypeFlags.PrintTypeVarVariance; + } + if (options?.omitTypeArgsIfUnknown) { + flags |= TypePrinter.PrintTypeFlags.OmitTypeArgsIfUnknown; + } + if (options?.useFullyQualifiedNames) { + flags |= TypePrinter.PrintTypeFlags.UseFullyQualifiedNames; + } + + return TypePrinter.printType(type, flags, getEffectiveReturnType); + } + + // Calls back into the parser to parse the contents of a string literal. + // This is unfortunately needed in some cases — specifically where the + // parser couldn't determine that the string literal would be used in + // a context where it should be treated as a forward-declared type. This + // call produces an expression tree that is not attached to the main parse + // expression tree because we don't want to mutate the latter; the + // expression tree created by this function is therefore used only temporarily. + function parseStringAsTypeAnnotation(node: StringListNode, reportErrors: boolean): ExpressionNode | undefined { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const parser = new Parser(); + const textValue = node.d.strings[0].d.value; + + // Determine the offset within the file where the string + // literal's contents begin. + let valueOffset = node.d.strings[0].start; + if (node.d.strings[0].nodeType === ParseNodeType.String) { + valueOffset += node.d.strings[0].d.token.prefixLength + node.d.strings[0].d.token.quoteMarkLength; + } + + // Construct a temporary dummy string with the text value at the appropriate + // offset so as to mimic the original file. This will keep all of the token + // and diagnostic offsets correct. + const dummyFileContents = ' '.repeat(valueOffset) + textValue; + + const parseOptions = new ParseOptions(); + parseOptions.isStubFile = fileInfo.isStubFile; + parseOptions.pythonVersion = fileInfo.executionEnvironment.pythonVersion; + parseOptions.reportErrorsForParsedStringContents = true; + + const parseResults = parser.parseTextExpression( + dummyFileContents, + valueOffset, + textValue.length, + parseOptions, + ParseTextMode.Expression, + /* initialParenDepth */ undefined, + fileInfo.typingSymbolAliases + ); + + if (parseResults.parseTree) { + // If there are errors but we are not reporting them, return + // undefined to indicate that the parse failed. + if (!reportErrors && parseResults.diagnostics.length > 0) { + return undefined; + } + + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + parseResults.diagnostics.forEach((diag) => { + fileInfo.diagnosticSink.addDiagnosticWithTextRange('error', diag.message, node); + }); + + parseResults.parseTree.parent = node; + + // Optionally add the new subtree to the parse tree so it can + // participate in language server operations like find and replace. + if (reportErrors) { + node.d.annotation = parseResults.parseTree; + } + + return parseResults.parseTree; + } + + return undefined; + } + + // Given a code flow node and a constrained TypeVar, determines whether that type + // var can be "narrowed" to a single one of its constraints based on isinstance + // checks within the code flow. + function narrowConstrainedTypeVar(node: ParseNode, typeVar: TypeVarType): Type | undefined { + const flowNode = AnalyzerNodeInfo.getFlowNode(node); + + if (!flowNode) { + return undefined; + } + + return codeFlowEngine.narrowConstrainedTypeVar(flowNode, typeVar); + } + + function getPrintExpressionTypesSpaces() { + return ' '.repeat(printExpressionSpaceCount); + } + + function getLineNum(node: ParseNode) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const range = convertOffsetsToRange(node.start, node.start + node.length, fileInfo.lines); + return (range.start.line + 1).toString(); + } + + function printControlFlowGraph( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + callName: string, + logger: ConsoleInterface + ) { + return codeFlowEngine.printControlFlowGraph(flowNode, reference, callName, logger); + } + + // Track these apis internal usages when logging is on. otherwise, it should be noop. + const getInferredReturnTypeResult = wrapWithLogger(_getInferredReturnTypeResult); + + const evaluatorInterface: TypeEvaluator = { + runWithCancellationToken, + getType, + getTypeResult, + getTypeResultForDecorator, + getCachedType, + getTypeOfExpression, + getTypeOfAnnotation, + getTypeOfClass, + createSubclass, + getTypeOfFunction, + getTypeOfExpressionExpectingType, + getExpectedType, + evaluateTypeForSubnode, + evaluateTypesForStatement, + evaluateTypesForMatchStatement, + evaluateTypesForCaseStatement, + evaluateTypeOfParam, + canBeTruthy, + canBeFalsy, + stripLiteralValue, + removeTruthinessFromType, + removeFalsinessFromType, + stripTypeGuard, + solveAndApplyConstraints, + verifyRaiseExceptionType, + verifyDeleteExpression, + validateOverloadedArgTypes, + validateInitSubclassArgs, + isNodeReachable, + isAfterNodeReachable, + getNodeReachability, + getAfterNodeReachability, + isAsymmetricAccessorAssignment, + suppressDiagnostics, + isSpecialFormClass, + getDeclInfoForStringNode, + getDeclInfoForNameNode, + getTypeForDeclaration, + resolveAliasDeclaration, + resolveAliasDeclarationWithInfo, + getTypeOfIterable, + getTypeOfIterator, + getGetterTypeFromProperty, + getTypeOfArg, + convertNodeToArg, + buildTupleTypesList, + markNamesAccessed, + expandPromotionTypes, + makeTopLevelTypeVarsConcrete, + mapSubtypesExpandTypeVars, + isTypeSubsumedByOtherType, + lookUpSymbolRecursive, + getDeclaredTypeOfSymbol, + getEffectiveTypeOfSymbol, + getEffectiveTypeOfSymbolForUsage, + getInferredTypeOfDeclaration, + getDeclaredTypeForExpression, + getDeclaredReturnType, + getInferredReturnType, + getBestOverloadForArgs, + getBuiltInType, + getTypeOfMember, + getTypeOfBoundMember, + getBoundMagicMethod, + getTypeOfMagicMethodCall, + bindFunctionToClassOrObject, + getCallbackProtocolType, + getCallSignatureInfo, + getAbstractSymbols, + narrowConstrainedTypeVar, + isTypeComparable, + assignType, + validateOverrideMethod, + validateCallArgs, + validateTypeArg, + assignTypeToExpression, + assignClassToSelf, + getTypedDictClassType, + getTupleClassType, + getDictClassType, + getStrClassType, + getObjectType, + getNoneType, + getUnionClassType, + getTypeClassType, + getBuiltInObject, + getTypingType, + getTypeCheckerInternalsType, + assignTypeArgs, + reportMissingTypeArgs, + inferReturnTypeIfNecessary, + inferVarianceForClass, + isFinalVariable, + isFinalVariableDeclaration, + isExplicitTypeAliasDeclaration, + addInformation, + addUnreachableCode, + addDeprecated, + addDiagnostic, + addDiagnosticForTextRange, + printType, + printSrcDestTypes, + printFunctionParts, + getTypeCacheEntryCount, + disposeEvaluator, + useSpeculativeMode, + isSpeculativeModeInUse, + setTypeResultForNode, + checkForCancellation, + printControlFlowGraph, + }; + + const codeFlowEngine = getCodeFlowEngine(evaluatorInterface, speculativeTypeTracker); + + return evaluatorInterface; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeEvaluatorTypes.ts b/python-parser/packages/pyright-internal/src/analyzer/typeEvaluatorTypes.ts new file mode 100644 index 00000000..4cd5751f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeEvaluatorTypes.ts @@ -0,0 +1,894 @@ +/* + * typeEvaluatorTypes.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Abstract interface and other helper types for type evaluator module. + */ + +import { CancellationToken } from 'vscode-languageserver-protocol'; + +import { ConsoleInterface } from '../common/console'; +import { Diagnostic, DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { TextRange } from '../common/textRange'; +import { + ArgCategory, + ArgumentNode, + CallNode, + CaseNode, + ClassNode, + DecoratorNode, + ExpressionNode, + FunctionNode, + MatchNode, + NameNode, + ParamCategory, + ParameterNode, + ParseNode, + StringNode, +} from '../parser/parseNodes'; +import { AnalyzerFileInfo } from './analyzerFileInfo'; +import { CodeFlowReferenceExpressionNode, FlowNode } from './codeFlowTypes'; +import { ConstraintTracker } from './constraintTracker'; +import { Declaration } from './declaration'; +import { ResolvedAliasInfo } from './declarationUtils'; +import { SymbolWithScope } from './scope'; +import { Symbol, SynthesizedTypeInfo } from './symbol'; +import { SpeculativeModeOptions } from './typeCacheUtils'; +import { PrintTypeFlags } from './typePrinter'; +import { + AnyType, + ClassType, + FunctionParam, + FunctionType, + OverloadedType, + TupleTypeArg, + Type, + TypeCondition, + TypeVarType, + UnknownType, + Variance, +} from './types'; +import { ApplyTypeVarOptions, ClassMember, InferenceContext, MemberAccessFlags } from './typeUtils'; + +// Maximum number of unioned subtypes for an inferred type (e.g. +// a list) before the type is considered an "Any". +export const maxSubtypesForInferredType = 64; + +// In certain loops, it's possible to construct arbitrarily-deep containers +// (tuples, lists, sets, or dicts) which can lead to infinite type analysis. +// This limits the depth. +export const maxInferredContainerDepth = 8; + +export const enum EvalFlags { + None = 0, + + // Interpret an ellipsis type annotation to mean "Any". + ConvertEllipsisToAny = 1 << 0, + + // Normally a generic named type is specialized with "Any" + // types. This flag indicates that specialization shouldn't take + // place. + NoSpecialize = 1 << 1, + + // Allow forward references. Don't report unbound errors. + ForwardRefs = 1 << 2, + + // Treat string literal as a type. + StrLiteralAsType = 1 << 3, + + // 'Final' is not allowed in this context. + NoFinal = 1 << 4, + + // A ParamSpec isn't allowed in this context. + NoParamSpec = 1 << 5, + + // A TypeVarTuple isn't allowed in this context. + NoTypeVarTuple = 1 << 6, + + // Expression is expected to be an instantiable type rather + // than an instance (object) + InstantiableType = 1 << 7, + + // A type expression imposes grammatical and semantic limits on an + // expression. If this flag is set, illegal type expressions are + // flagged as errors. + TypeExpression = 1 << 8, + + // Suppress the reportMissingTypeArgument diagnostic in this context. + AllowMissingTypeArgs = 1 << 9, + + // The Generic class type is allowed in this context. It is + // normally not allowed if ExpectingType is set. + AllowGeneric = 1 << 10, + + // TypeVars within this expression must not refer to type vars + // used in an outer scope. + NoTypeVarWithScopeId = 1 << 11, + + // TypeVars within this expression do not need to refer to type vars + // used in an outer scope. + AllowTypeVarWithoutScopeId = 1 << 12, + + // TypeVars within this expression that are otherwise not + // associated with an outer scope should be associated with + // the containing function's scope. + TypeVarGetsCurScope = 1 << 13, + + // When a new class-scoped TypeVar is used within a class + // declaration, make sure that it is not used to parameterize + // a base class whose TypeVar variance is inconsistent. + EnforceVarianceConsistency = 1 << 14, + + // Used for PEP 526-style variable type annotations. + VarTypeAnnotation = 1 << 15, + + // An ellipsis is allowed even if TypeExpression is set. + AllowEllipsis = 1 << 16, + + // 'ClassVar' is not allowed in this context. + NoClassVar = 1 << 17, + + // 'Generic' cannot be used without type arguments in this context. + NoNakedGeneric = 1 << 18, + + // The node is not parsed by the interpreter because it is within + // a comment or a string literal. + NotParsed = 1 << 19, + + // Required and NotRequired are allowed in this context. + AllowRequired = 1 << 20, + + // ReadOnly is allowed in this context. + AllowReadOnly = 1 << 21, + + // Allow Unpack annotation for a tuple or TypeVarTuple. + AllowUnpackedTuple = 1 << 22, + + // Allow Unpack annotation for TypedDict. + AllowUnpackedTypedDict = 1 << 23, + + // Even though an expression is enclosed in a string literal, + // the interpreter (within a source file, not a stub) still + // parses the expression and generates parse errors. + ParsesStringLiteral = 1 << 24, + + // Do not convert special forms to their corresponding runtime + // objects even when expecting a type expression. + NoConvertSpecialForm = 1 << 25, + + // Certain special forms (Protocol, TypedDict, etc.) are not allowed + // in this context. + NoNonTypeSpecialForms = 1 << 26, + + // Allow use of the Concatenate special form. + AllowConcatenate = 1 << 27, + + // Do not infer literal types within a tuple (used for tuples nested within + // other container classes). + StripTupleLiterals = 1 << 28, + + // Interpret the expression using the specialized behaviors associated + // with the second argument to isinstance and issubclass calls. + IsinstanceArg = 1 << 29, + + // Interpret the expression using the behaviors associated with the first + // argument to a TypeForm call. + TypeFormArg = 1 << 30, + + // Enforce that any type variables referenced in this type are associated + // with the enclosing class or an outer scope. + EnforceClassTypeVarScope = 1 << 31, + + // Defaults used for evaluating the LHS of a call expression. + CallBaseDefaults = NoSpecialize, + + // Defaults used for evaluating the LHS of a member access expression. + IndexBaseDefaults = NoSpecialize, + + // Defaults used for evaluating the LHS of a member access expression. + MemberAccessBaseDefaults = NoSpecialize, + + // Defaults used for evaluating the second argument of an 'isinstance' + // or 'issubclass' call. + IsInstanceArgDefaults = AllowMissingTypeArgs | + StrLiteralAsType | + NoParamSpec | + NoTypeVarTuple | + NoFinal | + NoSpecialize | + IsinstanceArg, +} + +// Types whose definitions are prefetched and cached by the type evaluator +export interface PrefetchedTypes { + noneTypeClass: Type; + objectClass: Type; + typeClass: Type; + unionTypeClass: Type; + awaitableClass: Type; + functionClass: Type; + methodClass: Type; + tupleClass: Type; + boolClass: Type; + intClass: Type; + strClass: Type; + dictClass: Type; + moduleTypeClass: Type; + typedDictClass: Type; + typedDictPrivateClass: Type; + supportsKeysAndGetItemClass: Type; + mappingClass: Type; + templateClass: Type; +} + +export interface TypeResult { + type: T; + + // Is the type incomplete (i.e. not fully evaluated) because + // some of the paths involve cyclical dependencies? + isIncomplete?: boolean | undefined; + + // Used for the output of "super" calls used on the LHS of + // a member access. Normally the type of the LHS is the same + // as the class or object used to bind the member, but the + // "super" call can specify a different class or object to + // bind. + bindToSelfType?: ClassType | TypeVarType | undefined; + + unpackedType?: Type | undefined; + typeList?: TypeResultWithNode[] | undefined; + + // Type consistency errors detected when evaluating this type. + typeErrors?: boolean | undefined; + + // For inlined TypedDict definitions. + inlinedTypeDict?: ClassType; + + // Used for getTypeOfBoundMember to indicate that class + // that declares the member. + classType?: ClassType | UnknownType | AnyType; + + // Tuple type arguments allow the shorthand "()" to + // represent an empty tuple (i.e. Tuple[()]). + isEmptyTupleShorthand?: boolean | undefined; + + // Additional diagnostic information that explains why the expression + // type is incompatible with the expected type. + expectedTypeDiagAddendum?: DiagnosticAddendum | undefined; + + // Is member a descriptor object that is asymmetric with respect + // to __get__ and __set__ types? Or is the member accessed through + // a __setattr__ method that is asymmetric with respect to the + // corresponding __getattr__? + isAsymmetricAccessor?: boolean; + + // For member access operations that are 'set', this is the narrowed + // type when considering the declared type of the member. + narrowedTypeForSet?: Type | undefined; + + // Is the type wrapped in a "Required", "NotRequired" or "ReadOnly" class? + isRequired?: boolean; + isNotRequired?: boolean; + isReadOnly?: boolean; + + // If a call expression, which overloads were used to satisfy it? + overloadsUsedForCall?: FunctionType[]; + + // For member access expressions, deprecation messages related to + // magic methods invoked via the member access + memberAccessDeprecationInfo?: MemberAccessDeprecationInfo; + + // Deprecation messages related to magic methods. + magicMethodDeprecationInfo?: MagicMethodDeprecationInfo; +} + +export interface TypeResultWithNode extends TypeResult { + node: ParseNode; +} + +// Describes deprecation details about a symbol accessed via a member +// access expression, perhaps through a property or descriptor accessor +// method. +export interface MemberAccessDeprecationInfo { + accessType: 'property' | 'descriptor'; + accessMethod: 'get' | 'set' | 'del'; + deprecatedMessage: string; +} + +export interface MagicMethodDeprecationInfo { + className: string; + methodName: string; + deprecatedMessage: string; +} + +export interface EvaluatorUsage { + method: 'get' | 'set' | 'del'; + + // Used only for set methods + setType?: TypeResult | undefined; + setErrorNode?: ExpressionNode | undefined; + setExpectedTypeDiag?: DiagnosticAddendum | undefined; +} + +export interface ClassTypeResult { + classType: ClassType; + decoratedType: Type; +} + +export interface FunctionTypeResult { + functionType: FunctionType; + decoratedType: Type; +} + +export interface CallSignature { + type: FunctionType; + activeParam?: FunctionParam | undefined; +} + +export interface CallSignatureInfo { + signatures: CallSignature[]; + callNode: CallNode; +} + +// Used to determine whether an abstract method has been +// overridden by a non-abstract method. +export interface AbstractSymbol { + symbol: Symbol; + symbolName: string; + classType: Type; + hasImplementation: boolean; +} + +export interface ArgBase { + argCategory: ArgCategory; + node?: ArgumentNode | undefined; + name?: NameNode | undefined; + typeResult?: TypeResult | undefined; + valueExpression?: ExpressionNode | undefined; + active?: boolean | undefined; + enforceIterable?: boolean | undefined; +} + +export interface ArgWithType extends ArgBase { + typeResult: TypeResult; +} + +export interface ArgWithExpression extends ArgBase { + valueExpression: ExpressionNode; +} + +export type Arg = ArgWithType | ArgWithExpression; + +export interface EffectiveTypeResult { + type: Type; + isIncomplete: boolean; + includesVariableDecl?: boolean; + includesIllegalTypeAliasDecl?: boolean; + includesSpeculativeResult?: boolean; + isRecursiveDefinition?: boolean; + evaluationAttempts?: number; +} + +export interface ValidateArgTypeParams { + paramCategory: ParamCategory; + paramType: Type; + requiresTypeVarMatching: boolean; + argument: Arg; + isDefaultArg?: boolean; + argType?: Type | undefined; + errorNode: ExpressionNode; + paramName?: string | undefined; + isParamNameSynthesized?: boolean; + mapsToVarArgList?: boolean | undefined; + isinstanceParam?: boolean; +} + +export interface ExpectedTypeOptions { + allowFinal?: boolean; + allowRequired?: boolean; + allowReadOnly?: boolean; + allowUnpackedTuple?: boolean; + allowUnpackedTypedDict?: boolean; + allowParamSpec?: boolean; + allowClassVar?: boolean; + varTypeAnnotation?: boolean; + typeVarGetsCurScope?: boolean; + allowTypeVarsWithoutScopeId?: boolean; + enforceClassTypeVarScope?: boolean; + parsesStringLiteral?: boolean; + notParsed?: boolean; + noNonTypeSpecialForms?: boolean; + typeFormArg?: boolean; + forwardRefs?: boolean; + typeExpression?: boolean; + runtimeTypeExpression?: boolean; + convertEllipsisToAny?: boolean; + allowEllipsis?: boolean; +} + +export interface ExpectedTypeResult { + type: Type; + node: ParseNode; +} + +export interface FunctionResult { + returnType: Type; + argumentErrors: boolean; + isTypeIncomplete: boolean; +} + +export interface ArgResult { + isCompatible: boolean; + argType: Type; + isTypeIncomplete?: boolean | undefined; + condition?: TypeCondition[]; + skippedBareTypeVarExpectedType?: boolean; +} + +export interface CallResult { + // Specialized return type of call + returnType?: Type | undefined; + + // Is return type incomplete? + isTypeIncomplete?: boolean | undefined; + + // Were any errors discovered when evaluating argument types? + argumentErrors?: boolean; + + // Did one or more arguments evaluated to Any or Unknown? + anyOrUnknownArg?: UnknownType | AnyType; + + // Was one or more of the arguments an unpacked iterable or + // mapping whose length is unknown? + unpackedArgOfUnknownLength?: boolean; + + // The parameter associated with the "active" argument (used + // for signature help provider) + activeParam?: FunctionParam | undefined; + + // If the call is to an __init__ with an annotated self parameter, + // this field indicates the specialized type of that self type; this + // is used for overloaded constructors where the arguments to the + // constructor influence the specialized type of the constructed object. + specializedInitSelfType?: Type | undefined; + + // The overload or overloads used to satisfy the call. There can + // be multiple overloads in the case where the call type is a union + // or we have used union expansion for arguments. + overloadsUsedForCall?: FunctionType[]; + + // Types of individual arguments. + argResults?: ArgResult[]; +} + +export interface ClassMemberLookup { + symbol: Symbol | undefined; + + // Type of symbol. + type: Type; + isTypeIncomplete: boolean; + + // True if binding or descriptor access failed. + isDescriptorError: boolean; + + // True if class member, false otherwise. + isClassMember: boolean; + + // The class that declares the accessed member. + classType?: ClassType | UnknownType | AnyType; + + // True if the member is explicitly declared as ClassVar + // within a Protocol. + isClassVar: boolean; + + // Is member a descriptor object that is asymmetric with respect + // to __get__ and __set__ types? + isAsymmetricAccessor: boolean; + + // For member access operations that are 'set', this is the narrowed + // type when considering the declared type of the member. + narrowedTypeForSet?: Type; + + // Deprecation messages related to magic methods invoked via the member access. + memberAccessDeprecationInfo?: MemberAccessDeprecationInfo; +} + +export interface SolveConstraintsOptions { + useLowerBoundOnly?: boolean; +} + +export enum Reachability { + Reachable, + + // The node is unreachable in the code flow graph and + // should be reported as an error. This includes situations + // like code after return statements. + UnreachableStructural, + + // The node is unreachable in the code flow graph due to + // a statically-evaluated condition such as a TYPE_CHECKER + // or Python version check. + UnreachableStaticCondition, + + // The node is unreachable according to code flow analysis. + // The type of one or more expressions has been narrowed to + // never. + UnreachableByAnalysis, +} + +export interface PrintTypeOptions { + expandTypeAlias?: boolean; + enforcePythonSyntax?: boolean; + useFullyQualifiedNames?: boolean; + useTypingUnpack?: boolean; + printUnknownWithAny?: boolean; + printTypeVarVariance?: boolean; + omitTypeArgsIfUnknown?: boolean; +} + +export interface DeclaredSymbolTypeInfo { + type: Type | undefined; + isTypeAlias?: boolean; + exceedsMaxDecls?: boolean; +} + +export interface ResolveAliasOptions { + allowExternallyHiddenAccess?: boolean; + skipFileNeededCheck?: boolean; +} + +export interface ValidateTypeArgsOptions { + allowEmptyTuple?: boolean; + allowTypeVarTuple?: boolean; + allowParamSpec?: boolean; + allowTypeArgList?: boolean; + allowUnpackedTuples?: boolean; +} + +export interface MapSubtypesOptions { + conditionFilter?: TypeCondition[] | undefined; + sortSubtypes?: boolean; + expandCallback?: (type: Type) => Type; +} + +export interface CallSiteEvaluationInfo { + errorNode: ExpressionNode; + args: ValidateArgTypeParams[]; +} + +export interface SymbolDeclInfo { + decls: Declaration[]; + synthesizedTypes: SynthesizedTypeInfo[]; +} + +export const enum AssignTypeFlags { + Default = 0, + + // Require invariance with respect to class matching? Normally + // subclasses are allowed. + Invariant = 1 << 0, + + // The caller has swapped the source and dest types because + // the types are contravariant. Perform type var matching + // on dest type vars rather than source type var. + Contravariant = 1 << 1, + + // We're comparing type compatibility of two distinct recursive types. + // This has the potential of recursing infinitely. This flag allows us + // to detect the recursion after the first level of checking. + SkipRecursiveTypeCheck = 1 << 2, + + // During TypeVar solving for a function call, this flag is set if + // this is the first of multiple passes. It adjusts certain heuristics + // for constraint solving. + ArgAssignmentFirstPass = 1 << 3, + + // If the dest is not Any but the src is Any, treat it + // as incompatible. Also, treat all source TypeVars as their + // concrete counterparts. This option is used for validating + // whether overload signatures overlap. + OverloadOverlap = 1 << 4, + + // When used in conjunction with OverloadOverlapCheck, look + // for partial overlaps. For example, `int | list` overlaps + // partially with `int | str`. + PartialOverloadOverlap = 1 << 5, + + // For function types, skip the return type check. + SkipReturnTypeCheck = 1 << 6, + + // In most cases, literals are stripped when assigning to a + // type variable. This overrides the standard behavior. + RetainLiteralsForTypeVar = 1 << 8, + + // When validating the type of a self or cls parameter, allow + // a type mismatch. This is used in overload consistency validation + // because overloads can provide explicit type annotations for self + // or cls. + SkipSelfClsTypeCheck = 1 << 9, + + // We're initially populating the constraints with an expected type, + // so TypeVars should match the specified type exactly rather than + // employing narrowing or widening. The variance context determines + // whether the upper bound, lower bound, or both are established. + PopulateExpectedType = 1 << 11, + + // Used with PopulatingExpectedType, this flag indicates that a TypeVar + // constraint that is Unknown should be ignored. + SkipPopulateUnknownExpectedType = 1 << 12, + + // Normally, when a class type is assigned to a TypeVar and that class + // hasn't previously been specialized, it will be specialized with + // default type arguments (typically "Unknown"). This flag skips + // this step. + AllowUnspecifiedTypeArgs = 1 << 13, + + // Normally all special form classes are incompatible with type[T], + // but a few of them are allowed in the context of an isinstance + // or issubclass call. + AllowIsinstanceSpecialForms = 1 << 14, + + // When comparing two methods, skip the type check for the "self" or "cls" + // parameters. This is used for variance inference and validation. + SkipSelfClsParamCheck = 1 << 15, + + // Normally a protocol class object cannot be used as a source type. This + // option overrides this behavior. + AllowProtocolClassSource = 1 << 16, + + // When assigning callables, should a kwargs with an unpacked TypedDict + // disallow additional named arguments if it does not have extraItems? + DisallowExtraKwargsForTd = 1 << 17, +} + +export interface TypeEvaluator { + runWithCancellationToken(token: CancellationToken, callback: () => T): T; + runWithCancellationToken(token: CancellationToken, callback: () => Promise): Promise; + + getType: (node: ExpressionNode) => Type | undefined; + getTypeResult: (node: ExpressionNode) => TypeResult | undefined; + getTypeResultForDecorator: (node: DecoratorNode) => TypeResult | undefined; + getCachedType: (node: ExpressionNode) => Type | undefined; + getTypeOfExpression: (node: ExpressionNode, flags?: EvalFlags, context?: InferenceContext) => TypeResult; + getTypeOfAnnotation: (node: ExpressionNode, options?: ExpectedTypeOptions) => Type; + getTypeOfClass: (node: ClassNode) => ClassTypeResult | undefined; + createSubclass: (errorNode: ExpressionNode, type1: ClassType, type2: ClassType) => ClassType; + getTypeOfFunction: (node: FunctionNode) => FunctionTypeResult | undefined; + getTypeOfExpressionExpectingType: (node: ExpressionNode, options?: ExpectedTypeOptions) => TypeResult; + evaluateTypeForSubnode: (subnode: ParseNode, callback: () => void) => TypeResult | undefined; + evaluateTypesForStatement: (node: ParseNode) => void; + evaluateTypesForMatchStatement: (node: MatchNode) => void; + evaluateTypesForCaseStatement: (node: CaseNode) => void; + evaluateTypeOfParam: (node: ParameterNode) => void; + + canBeTruthy: (type: Type) => boolean; + canBeFalsy: (type: Type) => boolean; + stripLiteralValue: (type: Type) => Type; + removeTruthinessFromType: (type: Type) => Type; + removeFalsinessFromType: (type: Type) => Type; + stripTypeGuard: (type: Type) => Type; + + solveAndApplyConstraints: ( + type: Type, + constraints: ConstraintTracker, + applyOptions?: ApplyTypeVarOptions, + solveOptions?: SolveConstraintsOptions + ) => Type; + + getExpectedType: (node: ExpressionNode) => ExpectedTypeResult | undefined; + verifyRaiseExceptionType: (node: ExpressionNode, allowNone: boolean) => void; + verifyDeleteExpression: (node: ExpressionNode) => void; + validateOverloadedArgTypes: ( + errorNode: ExpressionNode, + argList: Arg[], + typeResult: TypeResult, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean, + inferenceContext: InferenceContext | undefined + ) => CallResult; + validateInitSubclassArgs: (node: ClassNode, classType: ClassType) => void; + + isNodeReachable: (node: ParseNode, sourceNode?: ParseNode | undefined) => boolean; + isAfterNodeReachable: (node: ParseNode) => boolean; + getNodeReachability: (node: ParseNode, sourceNode?: ParseNode | undefined) => Reachability; + getAfterNodeReachability: (node: ParseNode) => Reachability; + + isAsymmetricAccessorAssignment: (node: ParseNode) => boolean; + suppressDiagnostics: (node: ParseNode, callback: () => void) => void; + isSpecialFormClass: (classType: ClassType, flags: AssignTypeFlags) => boolean; + + getDeclInfoForStringNode: (node: StringNode) => SymbolDeclInfo | undefined; + getDeclInfoForNameNode: (node: NameNode, skipUnreachableCode?: boolean) => SymbolDeclInfo | undefined; + getTypeForDeclaration: (declaration: Declaration) => DeclaredSymbolTypeInfo; + resolveAliasDeclaration: ( + declaration: Declaration, + resolveLocalNames: boolean, + options?: ResolveAliasOptions + ) => Declaration | undefined; + resolveAliasDeclarationWithInfo: ( + declaration: Declaration, + resolveLocalNames: boolean, + options?: ResolveAliasOptions + ) => ResolvedAliasInfo | undefined; + getTypeOfIterable: ( + typeResult: TypeResult, + isAsync: boolean, + errorNode: ExpressionNode, + emitNotIterableError?: boolean + ) => TypeResult | undefined; + getTypeOfIterator: ( + typeResult: TypeResult, + isAsync: boolean, + errorNode: ExpressionNode, + emitNotIterableError?: boolean + ) => TypeResult | undefined; + getGetterTypeFromProperty: (propertyClass: ClassType) => Type | undefined; + getTypeOfArg: (arg: Arg, inferenceContext: InferenceContext | undefined) => TypeResult; + convertNodeToArg: (node: ArgumentNode) => ArgWithExpression; + buildTupleTypesList: ( + entryTypeResults: TypeResult[], + stripLiterals: boolean, + convertModules: boolean + ) => TupleTypeArg[]; + markNamesAccessed: (node: ParseNode, names: string[]) => void; + expandPromotionTypes: (node: ParseNode, type: Type) => Type; + makeTopLevelTypeVarsConcrete: (type: Type, makeParamSpecsConcrete?: boolean) => Type; + mapSubtypesExpandTypeVars: ( + type: Type, + options: MapSubtypesOptions | undefined, + callback: (expandedSubtype: Type, unexpandedSubtype: Type) => Type | undefined + ) => Type; + isTypeSubsumedByOtherType: (type: Type, otherType: Type, allowAnyToSubsume: boolean) => boolean; + lookUpSymbolRecursive: (node: ParseNode, name: string, honorCodeFlow: boolean) => SymbolWithScope | undefined; + getDeclaredTypeOfSymbol: (symbol: Symbol) => DeclaredSymbolTypeInfo; + getEffectiveTypeOfSymbol: (symbol: Symbol) => Type; + getEffectiveTypeOfSymbolForUsage: ( + symbol: Symbol, + usageNode?: NameNode, + useLastDecl?: boolean + ) => EffectiveTypeResult; + getInferredTypeOfDeclaration: (symbol: Symbol, decl: Declaration) => Type | undefined; + getDeclaredTypeForExpression: (expression: ExpressionNode, usage?: EvaluatorUsage) => Type | undefined; + getDeclaredReturnType: (node: FunctionNode) => Type | undefined; + getInferredReturnType: (type: FunctionType, callSiteInfo?: CallSiteEvaluationInfo) => Type; + getBestOverloadForArgs: ( + errorNode: ExpressionNode, + typeResult: TypeResult, + argList: Arg[] + ) => FunctionType | undefined; + getBuiltInType: (node: ParseNode, name: string) => Type; + getTypeOfMember: (member: ClassMember) => Type; + getTypeOfBoundMember( + errorNode: ExpressionNode, + objectType: ClassType, + memberName: string, + usage?: EvaluatorUsage, + diag?: DiagnosticAddendum | undefined, + flags?: MemberAccessFlags, + selfType?: ClassType | TypeVarType + ): TypeResult | undefined; + getBoundMagicMethod: ( + classType: ClassType, + memberName: string, + selfType?: ClassType | TypeVarType | undefined, + errorNode?: ExpressionNode | undefined, + diag?: DiagnosticAddendum, + recursionCount?: number + ) => FunctionType | OverloadedType | undefined; + getTypeOfMagicMethodCall: ( + objType: Type, + methodName: string, + argList: TypeResult[], + errorNode: ExpressionNode, + inferenceContext: InferenceContext | undefined + ) => TypeResult | undefined; + bindFunctionToClassOrObject: ( + baseType: ClassType | undefined, + memberType: FunctionType | OverloadedType, + memberClass?: ClassType, + treatConstructorAsClassMethod?: boolean, + selfType?: ClassType | TypeVarType, + diag?: DiagnosticAddendum, + recursionCount?: number + ) => FunctionType | OverloadedType | undefined; + getCallbackProtocolType: (objType: ClassType, recursionCount?: number) => FunctionType | OverloadedType | undefined; + getCallSignatureInfo: (node: CallNode, activeIndex: number, activeOrFake: boolean) => CallSignatureInfo | undefined; + getAbstractSymbols: (classType: ClassType) => AbstractSymbol[]; + narrowConstrainedTypeVar: (node: ParseNode, typeVar: TypeVarType) => Type | undefined; + isTypeComparable: (leftType: Type, rightType: Type, assumeIsOperator?: boolean) => boolean; + + assignType: ( + destType: Type, + srcType: Type, + diag?: DiagnosticAddendum, + constraints?: ConstraintTracker, + flags?: AssignTypeFlags, + recursionCount?: number + ) => boolean; + validateOverrideMethod: ( + baseMethod: Type, + overrideMethod: FunctionType | OverloadedType, + baseClass: ClassType | undefined, + diag: DiagnosticAddendum, + enforceParamNames?: boolean + ) => boolean; + validateCallArgs: ( + errorNode: ExpressionNode, + argList: Arg[], + callTypeResult: TypeResult, + constraints: ConstraintTracker | undefined, + skipUnknownArgCheck: boolean | undefined, + inferenceContext: InferenceContext | undefined + ) => CallResult; + validateTypeArg: (argResult: TypeResultWithNode, options?: ValidateTypeArgsOptions) => boolean; + assignTypeToExpression: (target: ExpressionNode, typeResult: TypeResult, srcExpr: ExpressionNode) => void; + assignClassToSelf: (destType: ClassType, srcType: ClassType, assumedVariance: Variance) => boolean; + getBuiltInObject: (node: ParseNode, name: string, typeArgs?: Type[]) => Type; + getTypedDictClassType: () => ClassType | undefined; + getTupleClassType: () => ClassType | undefined; + getDictClassType: () => ClassType | undefined; + getStrClassType: () => ClassType | undefined; + getObjectType: () => Type; + getNoneType: () => Type; + getUnionClassType(): Type; + getTypeClassType(): ClassType | undefined; + getTypingType: (node: ParseNode, symbolName: string) => Type | undefined; + getTypeCheckerInternalsType: (node: ParseNode, symbolName: string) => Type | undefined; + inferReturnTypeIfNecessary: (type: Type) => void; + inferVarianceForClass: (type: ClassType) => void; + assignTypeArgs: ( + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount: number + ) => boolean; + reportMissingTypeArgs: (node: ExpressionNode, type: Type, flags: EvalFlags) => Type; + + isFinalVariable: (symbol: Symbol) => boolean; + isFinalVariableDeclaration: (decl: Declaration) => boolean; + isExplicitTypeAliasDeclaration: (decl: Declaration) => boolean; + + addInformation: (message: string, node: ParseNode, range?: TextRange) => Diagnostic | undefined; + addUnreachableCode: (node: ParseNode, reachability: Reachability, textRange: TextRange) => void; + addDeprecated: (message: string, node: ParseNode) => void; + + addDiagnostic: ( + rule: DiagnosticRule, + message: string, + node: ParseNode, + range?: TextRange + ) => Diagnostic | undefined; + addDiagnosticForTextRange: ( + fileInfo: AnalyzerFileInfo, + rule: DiagnosticRule, + message: string, + range: TextRange + ) => Diagnostic | undefined; + + printType: (type: Type, options?: PrintTypeOptions) => string; + printSrcDestTypes: (srcType: Type, destType: Type) => { sourceType: string; destType: string }; + printFunctionParts: (type: FunctionType, extraFlags?: PrintTypeFlags) => [string[], string]; + + getTypeCacheEntryCount: () => number; + disposeEvaluator: () => void; + useSpeculativeMode: ( + speculativeNode: ParseNode | undefined, + callback: () => T, + options?: SpeculativeModeOptions + ) => T; + isSpeculativeModeInUse: (node: ParseNode | undefined) => boolean; + setTypeResultForNode: (node: ParseNode, typeResult: TypeResult, flags?: EvalFlags) => void; + + checkForCancellation: () => void; + printControlFlowGraph: ( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + callName: string, + logger: ConsoleInterface + ) => void; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeEvaluatorWithTracker.ts b/python-parser/packages/pyright-internal/src/analyzer/typeEvaluatorWithTracker.ts new file mode 100644 index 00000000..6f6d2d73 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeEvaluatorWithTracker.ts @@ -0,0 +1,71 @@ +/* + * typeEvaluatorWithTracker.ts + * + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Wraps type evaluator to track performance of internal calls. + */ + +import { LogLevel } from '../common/console'; +import { isDebugMode } from '../common/core'; +import { LogTracker } from '../common/logTracker'; +import { timingStats } from '../common/timing'; +import { ImportLookup } from './analyzerFileInfo'; +import { TracePrinter } from './tracePrinter'; +import { createTypeEvaluator, EvaluatorOptions } from './typeEvaluator'; + +// We don't want to track calls from the type evaluator itself, but only entry points. +export function createTypeEvaluatorWithTracker( + importLookup: ImportLookup, + evaluatorOptions: EvaluatorOptions, + logger: LogTracker, + printer?: TracePrinter +) { + function wrapWithLogger any>(func: T): (...args: Parameters) => ReturnType { + // Wrap the function only if told to do so and the log level is high + // enough for it to log something. + if (evaluatorOptions.logCalls && logger.logLevel === LogLevel.Log) { + return (...args: Parameters): ReturnType => { + return logger.log( + func.name, + (s) => { + if (func.name === 'importLookup' && args.length > 0) { + // This is actually a filename, so special case it. + s.add(printer?.printFileOrModuleName(args[0])); + } else { + // Print all parameters. + args.forEach((a) => { + s.add(printer?.print(a)); + }); + } + return timingStats.typeEvaluationTime.timeOperation(func, ...args); + }, + evaluatorOptions.minimumLoggingThreshold, + /* logParsingPerf */ true + ); + }; + } else if (!isDebugMode()) { + return timingStats.typeEvaluationTime.timeOperation.bind(timingStats.typeEvaluationTime, func); + } else { + return func; + } + } + + // Wrap all functions with either a logger or a timer. + importLookup = wrapWithLogger(importLookup); + const evaluator = createTypeEvaluator(importLookup, evaluatorOptions, wrapWithLogger); + + // Track these apis external usages when logging is on. otherwise, it should be noop. + const keys = Object.keys(evaluator); + keys.forEach((k) => { + const entry = (evaluator as any)[k]; + if (typeof entry === 'function' && entry.name) { + // Only wrap functions that aren't wrapped already. + (evaluator as any)[k] = wrapWithLogger(entry); + } + }); + + return evaluator; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeGuards.ts b/python-parser/packages/pyright-internal/src/analyzer/typeGuards.ts new file mode 100644 index 00000000..3393d009 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeGuards.ts @@ -0,0 +1,2784 @@ +/* + * typeGuards.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides logic for narrowing types based on conditional + * expressions. The logic handles both positive ("if") and + * negative ("else") narrowing cases. + */ + +import { assert } from '../common/debug'; +import { + ArgCategory, + AssignmentExpressionNode, + ExpressionNode, + isExpressionNode, + NameNode, + ParamCategory, + ParseNode, + ParseNodeType, +} from '../parser/parseNodes'; +import { KeywordType, OperatorType } from '../parser/tokenizerTypes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { addConstraintsForExpectedType } from './constraintSolver'; +import { ConstraintTracker } from './constraintTracker'; +import { Declaration, DeclarationType } from './declaration'; +import { transformTypeForEnumMember } from './enums'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { ScopeType } from './scope'; +import { getScopeForNode, isScopeContainedWithin } from './scopeUtils'; +import { Symbol, SymbolFlags } from './symbol'; +import { getTypedDictMembersForClass } from './typedDicts'; +import { AssignTypeFlags, EvalFlags, TypeEvaluator } from './typeEvaluatorTypes'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + EnumLiteral, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + isAnyOrUnknown, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isModule, + isNever, + isOverloaded, + isParamSpec, + isTypeSame, + isTypeVar, + isUnpackedTypeVarTuple, + maxTypeRecursionCount, + OverloadedType, + TupleTypeArg, + Type, + TypeBase, + TypeCondition, + TypedDictEntry, + TypeVarType, + UnknownType, +} from './types'; +import { + addConditionToType, + ClassMember, + computeMroLinearization, + convertToInstance, + convertToInstantiable, + derivesFromAnyOrUnknown, + doForEachSubtype, + getSpecializedTupleType, + getTypeCondition, + getTypeVarScopeIds, + getUnknownTypeForCallable, + isInstantiableMetaclass, + isLiteralLikeType, + isLiteralType, + isLiteralTypeOrUnion, + isMaybeDescriptorInstance, + isMetaclassInstance, + isNoneInstance, + isNoneTypeClass, + isProperty, + isSentinelLiteral, + isTupleClass, + isTupleGradualForm, + isUnboundedTupleClass, + lookUpClassMember, + lookUpObjectMember, + makeTypeVarsFree, + mapSubtypes, + MemberAccessFlags, + specializeTupleClass, + specializeWithUnknownTypeArgs, + stripTypeForm, + transformPossibleRecursiveTypeAlias, +} from './typeUtils'; + +export interface TypeNarrowingResult { + type: Type; + isIncomplete: boolean; +} + +export type TypeNarrowingCallback = (type: Type) => TypeNarrowingResult | undefined; + +// Given a reference expression and a test expression, returns a callback that +// can be used to narrow the type described by the reference expression. +// If the specified flow node is not associated with the test expression, +// it returns undefined. +export function getTypeNarrowingCallback( + evaluator: TypeEvaluator, + reference: ExpressionNode, + testExpression: ExpressionNode, + isPositiveTest: boolean, + recursionCount = 0 +): TypeNarrowingCallback | undefined { + if (recursionCount > maxTypeRecursionCount) { + return undefined; + } + + recursionCount++; + + if (testExpression.nodeType === ParseNodeType.AssignmentExpression) { + return getTypeNarrowingCallbackForAssignmentExpression( + evaluator, + reference, + testExpression, + isPositiveTest, + recursionCount + ); + } + + if (testExpression.nodeType === ParseNodeType.BinaryOperation) { + const isOrIsNotOperator = + testExpression.d.operator === OperatorType.Is || testExpression.d.operator === OperatorType.IsNot; + const equalsOrNotEqualsOperator = + testExpression.d.operator === OperatorType.Equals || testExpression.d.operator === OperatorType.NotEquals; + const comparisonOperator = + equalsOrNotEqualsOperator || + testExpression.d.operator === OperatorType.LessThan || + testExpression.d.operator === OperatorType.LessThanOrEqual || + testExpression.d.operator === OperatorType.GreaterThan || + testExpression.d.operator === OperatorType.GreaterThanOrEqual; + + if (isOrIsNotOperator || equalsOrNotEqualsOperator) { + // Invert the "isPositiveTest" value if this is an "is not" operation. + const adjIsPositiveTest = + testExpression.d.operator === OperatorType.Is || testExpression.d.operator === OperatorType.Equals + ? isPositiveTest + : !isPositiveTest; + + // Look for "X is None", "X is not None", "X == None", and "X != None". + // These are commonly-used patterns used in control flow. + if ( + testExpression.d.rightExpr.nodeType === ParseNodeType.Constant && + testExpression.d.rightExpr.d.constType === KeywordType.None + ) { + // Allow the LHS to be either a simple expression or an assignment + // expression that assigns to a simple name. + let leftExpression = testExpression.d.leftExpr; + if (leftExpression.nodeType === ParseNodeType.AssignmentExpression) { + leftExpression = leftExpression.d.name; + } + + if ( + ParseTreeUtils.isMatchingExpression(reference, leftExpression, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + return (type: Type) => { + return { type: narrowTypeForIsNone(evaluator, type, adjIsPositiveTest), isIncomplete: false }; + }; + } + + if ( + leftExpression.nodeType === ParseNodeType.Index && + ParseTreeUtils.isMatchingExpression(reference, leftExpression.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) && + leftExpression.d.items.length === 1 && + !leftExpression.d.trailingComma && + leftExpression.d.items[0].d.argCategory === ArgCategory.Simple && + !leftExpression.d.items[0].d.name && + leftExpression.d.items[0].d.valueExpr.nodeType === ParseNodeType.Number && + leftExpression.d.items[0].d.valueExpr.d.isInteger && + !leftExpression.d.items[0].d.valueExpr.d.isImaginary + ) { + const indexValue = leftExpression.d.items[0].d.valueExpr.d.value; + if (typeof indexValue === 'number') { + return (type: Type) => { + return { + type: narrowTupleTypeForIsNone(evaluator, type, adjIsPositiveTest, indexValue), + isIncomplete: false, + }; + }; + } + } + } + + // Look for "X is ...", "X is not ...", "X == ...", and "X != ...". + if (testExpression.d.rightExpr.nodeType === ParseNodeType.Ellipsis) { + // Allow the LHS to be either a simple expression or an assignment + // expression that assigns to a simple name. + let leftExpression = testExpression.d.leftExpr; + if (leftExpression.nodeType === ParseNodeType.AssignmentExpression) { + leftExpression = leftExpression.d.name; + } + + if ( + ParseTreeUtils.isMatchingExpression(reference, leftExpression, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + return (type: Type) => { + return { + type: narrowTypeForIsEllipsis(evaluator, testExpression, type, adjIsPositiveTest), + isIncomplete: false, + }; + }; + } + } + + // Look for "type(X) is Y", "type(X) is not Y", "type(X) == Y" or "type(X) != Y". + if (testExpression.d.leftExpr.nodeType === ParseNodeType.Call) { + if ( + testExpression.d.leftExpr.d.args.length === 1 && + testExpression.d.leftExpr.d.args[0].d.argCategory === ArgCategory.Simple + ) { + const arg0Expr = testExpression.d.leftExpr.d.args[0].d.valueExpr; + if ( + ParseTreeUtils.isMatchingExpression(reference, arg0Expr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const callType = evaluator.getTypeOfExpression( + testExpression.d.leftExpr.d.leftExpr, + EvalFlags.CallBaseDefaults + ).type; + + if (isInstantiableClass(callType) && ClassType.isBuiltIn(callType, 'type')) { + const rhsResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const classTypes: ClassType[] = []; + let isClassType = true; + + evaluator.mapSubtypesExpandTypeVars( + rhsResult.type, + /* options */ undefined, + (expandedSubtype) => { + if (isInstantiableClass(expandedSubtype)) { + classTypes.push(expandedSubtype); + } else { + isClassType = false; + } + return undefined; + } + ); + + if (isClassType && classTypes.length > 0) { + return (type: Type) => { + return { + type: narrowTypeForTypeIs(evaluator, type, classTypes, adjIsPositiveTest), + isIncomplete: !!rhsResult.isIncomplete, + }; + }; + } + } + } + } + } + + if (isOrIsNotOperator) { + if ( + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + + // Look for "X is Y" or "X is not Y" where Y is a literal. + if (isClassInstance(rightType) && rightType.priv.literalValue !== undefined) { + return (type: Type) => { + return { + type: narrowTypeForLiteralComparison( + evaluator, + type, + rightType, + adjIsPositiveTest, + /* isIsOperator */ true + ), + isIncomplete: !!rightTypeResult.isIncomplete, + }; + }; + } + + // Look for X is or X is not . + if (isInstantiableClass(rightType)) { + return (type: Type) => { + return { + type: narrowTypeForClassComparison(evaluator, type, rightType, adjIsPositiveTest), + isIncomplete: !!rightTypeResult.isIncomplete, + }; + }; + } + } + + // Look for X[] is or X[] is not . + if ( + testExpression.d.leftExpr.nodeType === ParseNodeType.Index && + testExpression.d.leftExpr.d.items.length === 1 && + !testExpression.d.leftExpr.d.trailingComma && + testExpression.d.leftExpr.d.items[0].d.argCategory === ArgCategory.Simple && + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const indexTypeResult = evaluator.getTypeOfExpression( + testExpression.d.leftExpr.d.items[0].d.valueExpr + ); + const indexType = indexTypeResult.type; + + if (isClassInstance(indexType) && isLiteralType(indexType)) { + if (ClassType.isBuiltIn(indexType, 'str')) { + const rightType = evaluator.getTypeOfExpression(testExpression.d.rightExpr).type; + if (isClassInstance(rightType) && rightType.priv.literalValue !== undefined) { + return (type: Type) => { + return { + type: narrowTypeForDiscriminatedDictEntryComparison( + evaluator, + type, + indexType, + rightType, + adjIsPositiveTest + ), + isIncomplete: !!indexTypeResult.isIncomplete, + }; + }; + } + } else if (ClassType.isBuiltIn(indexType, 'int')) { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + + if (isClassInstance(rightType) && rightType.priv.literalValue !== undefined) { + let canNarrow = false; + // Narrowing can be applied only for bool or enum literals. + if (ClassType.isBuiltIn(rightType, 'bool')) { + canNarrow = true; + } else if (rightType.priv.literalValue instanceof EnumLiteral) { + canNarrow = true; + } + + if (canNarrow) { + return (type: Type) => { + return { + type: narrowTypeForDiscriminatedTupleComparison( + evaluator, + type, + indexType, + rightType, + adjIsPositiveTest + ), + isIncomplete: !!rightTypeResult.isIncomplete, + }; + }; + } + } + } + } + } + } + + if (equalsOrNotEqualsOperator) { + // Look for X == or X != + const adjIsPositiveTest = + testExpression.d.operator === OperatorType.Equals ? isPositiveTest : !isPositiveTest; + + if ( + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + + if (isClassInstance(rightType) && rightType.priv.literalValue !== undefined) { + return (type: Type) => { + return { + type: narrowTypeForLiteralComparison( + evaluator, + type, + rightType, + adjIsPositiveTest, + /* isIsOperator */ false + ), + isIncomplete: !!rightTypeResult.isIncomplete, + }; + }; + } + } + + // Look for X[] == or X[] != + if ( + testExpression.d.leftExpr.nodeType === ParseNodeType.Index && + testExpression.d.leftExpr.d.items.length === 1 && + !testExpression.d.leftExpr.d.trailingComma && + testExpression.d.leftExpr.d.items[0].d.argCategory === ArgCategory.Simple && + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const indexTypeResult = evaluator.getTypeOfExpression( + testExpression.d.leftExpr.d.items[0].d.valueExpr + ); + const indexType = indexTypeResult.type; + + if (isClassInstance(indexType) && isLiteralType(indexType)) { + if (ClassType.isBuiltIn(indexType, ['str', 'int'])) { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + + if (isLiteralTypeOrUnion(rightType)) { + return (type: Type) => { + let narrowedType: Type; + + if (ClassType.isBuiltIn(indexType, 'str')) { + narrowedType = narrowTypeForDiscriminatedDictEntryComparison( + evaluator, + type, + indexType, + rightType, + adjIsPositiveTest + ); + } else { + narrowedType = narrowTypeForDiscriminatedTupleComparison( + evaluator, + type, + indexType, + rightType, + adjIsPositiveTest + ); + } + + return { + type: narrowedType, + isIncomplete: !!indexTypeResult.isIncomplete || !!rightTypeResult.isIncomplete, + }; + }; + } + } + } + } + } + + // Look for X.Y == or X.Y != + if ( + equalsOrNotEqualsOperator && + testExpression.d.leftExpr.nodeType === ParseNodeType.MemberAccess && + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + const memberName = testExpression.d.leftExpr.d.member; + + if (isClassInstance(rightType)) { + if (rightType.priv.literalValue !== undefined || isNoneInstance(rightType)) { + return (type: Type) => { + return { + type: narrowTypeForDiscriminatedLiteralFieldComparison( + evaluator, + type, + memberName.d.value, + rightType, + adjIsPositiveTest + ), + isIncomplete: !!rightTypeResult.isIncomplete, + }; + }; + } + } + } + + // Look for X.Y is or X.Y is not where is + // an enum or bool literal + if ( + testExpression.d.leftExpr.nodeType === ParseNodeType.MemberAccess && + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + const memberName = testExpression.d.leftExpr.d.member; + + if ( + isClassInstance(rightType) && + (ClassType.isEnumClass(rightType) || ClassType.isBuiltIn(rightType, 'bool')) && + rightType.priv.literalValue !== undefined + ) { + return (type: Type) => { + return { + type: narrowTypeForDiscriminatedLiteralFieldComparison( + evaluator, + type, + memberName.d.value, + rightType, + adjIsPositiveTest + ), + isIncomplete: !!rightTypeResult.isIncomplete, + }; + }; + } + } + + // Look for X.Y is None or X.Y is not None + // These are commonly-used patterns used in control flow. + if ( + testExpression.d.leftExpr.nodeType === ParseNodeType.MemberAccess && + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) && + testExpression.d.rightExpr.nodeType === ParseNodeType.Constant && + testExpression.d.rightExpr.d.constType === KeywordType.None + ) { + const memberName = testExpression.d.leftExpr.d.member; + return (type: Type) => { + return { + type: narrowTypeForDiscriminatedFieldNoneComparison( + evaluator, + type, + memberName.d.value, + adjIsPositiveTest + ), + isIncomplete: false, + }; + }; + } + } + + // Look for len(x) == , len(x) != , len(x) < , etc. + if ( + comparisonOperator && + testExpression.d.leftExpr.nodeType === ParseNodeType.Call && + testExpression.d.leftExpr.d.args.length === 1 + ) { + const arg0Expr = testExpression.d.leftExpr.d.args[0].d.valueExpr; + + if ( + ParseTreeUtils.isMatchingExpression(reference, arg0Expr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const callTypeResult = evaluator.getTypeOfExpression( + testExpression.d.leftExpr.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const callType = callTypeResult.type; + + if (isFunction(callType) && callType.shared.fullName === 'builtins.len') { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + + if ( + isClassInstance(rightType) && + typeof rightType.priv.literalValue === 'number' && + rightType.priv.literalValue >= 0 + ) { + let tupleLength = rightType.priv.literalValue; + + // We'll treat <, <= and == as positive tests with >=, > and != as + // their negative counterparts. + const isLessOrEqual = + testExpression.d.operator === OperatorType.Equals || + testExpression.d.operator === OperatorType.LessThan || + testExpression.d.operator === OperatorType.LessThanOrEqual; + + const adjIsPositiveTest = isLessOrEqual ? isPositiveTest : !isPositiveTest; + + // For <= (or its negative counterpart >), adjust the tuple length by 1. + if ( + testExpression.d.operator === OperatorType.LessThanOrEqual || + testExpression.d.operator === OperatorType.GreaterThan + ) { + tupleLength++; + } + + const isEqualityCheck = + testExpression.d.operator === OperatorType.Equals || + testExpression.d.operator === OperatorType.NotEquals; + + return (type: Type) => { + return { + type: narrowTypeForTupleLength( + evaluator, + type, + tupleLength, + adjIsPositiveTest, + !isEqualityCheck + ), + isIncomplete: !!callTypeResult.isIncomplete || !!rightTypeResult.isIncomplete, + }; + }; + } + } + } + } + + if (testExpression.d.operator === OperatorType.In || testExpression.d.operator === OperatorType.NotIn) { + // Look for "x in y" or "x not in y" where y is one of several built-in types. + if ( + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.leftExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const rightTypeResult = evaluator.getTypeOfExpression(testExpression.d.rightExpr); + const rightType = rightTypeResult.type; + const adjIsPositiveTest = + testExpression.d.operator === OperatorType.In ? isPositiveTest : !isPositiveTest; + + return (type: Type) => { + return { + type: narrowTypeForContainerType(evaluator, type, rightType, adjIsPositiveTest), + isIncomplete: !!rightTypeResult.isIncomplete, + }; + }; + } + + if ( + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.rightExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + // Look for in y where y is a union that contains + // one or more TypedDicts. + const leftTypeResult = evaluator.getTypeOfExpression(testExpression.d.leftExpr); + const leftType = leftTypeResult.type; + + if (isClassInstance(leftType) && ClassType.isBuiltIn(leftType, 'str') && isLiteralType(leftType)) { + const adjIsPositiveTest = + testExpression.d.operator === OperatorType.In ? isPositiveTest : !isPositiveTest; + return (type: Type) => { + return { + type: narrowTypeForTypedDictKey( + evaluator, + type, + ClassType.cloneAsInstantiable(leftType), + adjIsPositiveTest + ), + isIncomplete: !!leftTypeResult.isIncomplete, + }; + }; + } + } + } + } + + if (testExpression.nodeType === ParseNodeType.Call) { + // Look for "isinstance(X, Y)" or "issubclass(X, Y)". + if (testExpression.d.args.length === 2) { + // Make sure the first parameter is a supported expression type + // and the second parameter is a valid class type or a tuple + // of valid class types. + const arg0Expr = testExpression.d.args[0].d.valueExpr; + const arg1Expr = testExpression.d.args[1].d.valueExpr; + + if ( + ParseTreeUtils.isMatchingExpression(reference, arg0Expr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const callTypeResult = evaluator.getTypeOfExpression( + testExpression.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const callType = callTypeResult.type; + + if (isFunction(callType) && FunctionType.isBuiltIn(callType, ['isinstance', 'issubclass'])) { + const isInstanceCheck = FunctionType.isBuiltIn(callType, 'isinstance'); + const arg1TypeResult = evaluator.getTypeOfExpression(arg1Expr, EvalFlags.IsInstanceArgDefaults); + const arg1Type = arg1TypeResult.type; + + const classTypeList = getIsInstanceClassTypes(evaluator, arg1Type); + const isIncomplete = !!callTypeResult.isIncomplete || !!arg1TypeResult.isIncomplete; + + if (classTypeList) { + return (type: Type) => { + return { + type: narrowTypeForInstanceOrSubclass( + evaluator, + type, + classTypeList, + isInstanceCheck, + /* isTypeIsCheck */ false, + isPositiveTest, + testExpression + ), + isIncomplete, + }; + }; + } else if (isIncomplete) { + // If the type is incomplete, it may include unknowns, which will result + // in classTypeList being undefined. + return (type: Type) => { + return { + type, + isIncomplete: true, + }; + }; + } + } + } + } + + // Look for "bool(X)" + if (testExpression.d.args.length === 1 && !testExpression.d.args[0].d.name) { + if ( + ParseTreeUtils.isMatchingExpression(reference, testExpression.d.args[0].d.valueExpr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + const callTypeResult = evaluator.getTypeOfExpression( + testExpression.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const callType = callTypeResult.type; + + if (isInstantiableClass(callType) && ClassType.isBuiltIn(callType, 'bool')) { + return (type: Type) => { + return { + type: narrowTypeForTruthiness(evaluator, type, isPositiveTest), + isIncomplete: !!callTypeResult.isIncomplete, + }; + }; + } + } + } + + // Look for a TypeGuard function. + if (testExpression.d.args.length >= 1) { + const arg0Expr = testExpression.d.args[0].d.valueExpr; + if ( + ParseTreeUtils.isMatchingExpression(reference, arg0Expr, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + // Does this look like it's a custom type guard function? + let isPossiblyTypeGuard = false; + + const isFunctionReturnTypeGuard = (type: FunctionType) => { + return ( + type.shared.declaredReturnType && + isClassInstance(type.shared.declaredReturnType) && + ClassType.isBuiltIn(type.shared.declaredReturnType, ['TypeGuard', 'TypeIs']) + ); + }; + + const callTypeResult = evaluator.getTypeOfExpression( + testExpression.d.leftExpr, + EvalFlags.CallBaseDefaults + ); + const callType = callTypeResult.type; + + if (isFunction(callType) && isFunctionReturnTypeGuard(callType)) { + isPossiblyTypeGuard = true; + } else if ( + isOverloaded(callType) && + OverloadedType.getOverloads(callType).some((o) => isFunctionReturnTypeGuard(o)) + ) { + isPossiblyTypeGuard = true; + } else if (isClassInstance(callType)) { + isPossiblyTypeGuard = true; + } + + if (isPossiblyTypeGuard) { + // Evaluate the type guard call expression. + const functionReturnTypeResult = evaluator.getTypeOfExpression(testExpression); + const functionReturnType = functionReturnTypeResult.type; + + if ( + isClassInstance(functionReturnType) && + ClassType.isBuiltIn(functionReturnType, ['TypeGuard', 'TypeIs']) && + functionReturnType.priv.typeArgs && + functionReturnType.priv.typeArgs.length > 0 + ) { + const isStrictTypeGuard = ClassType.isBuiltIn(functionReturnType, 'TypeIs'); + const typeGuardType = functionReturnType.priv.typeArgs[0]; + const isIncomplete = !!callTypeResult.isIncomplete || !!functionReturnTypeResult.isIncomplete; + + return (type: Type) => { + return { + type: narrowTypeForUserDefinedTypeGuard( + evaluator, + type, + typeGuardType, + isPositiveTest, + isStrictTypeGuard, + testExpression + ), + isIncomplete, + }; + }; + } + } + } + } + } + + if ( + ParseTreeUtils.isMatchingExpression(reference, testExpression, (ref, expr) => + isNameSameScope(evaluator, ref, expr) + ) + ) { + return (type: Type) => { + return { + type: narrowTypeForTruthiness(evaluator, type, isPositiveTest), + isIncomplete: false, + }; + }; + } + + // Is this a reference to an aliased conditional expression (a local variable + // that was assigned a value that can inform type narrowing of the reference expression)? + const narrowingCallback = getTypeNarrowingCallbackForAliasedCondition( + evaluator, + reference, + testExpression, + isPositiveTest, + recursionCount + ); + if (narrowingCallback) { + return narrowingCallback; + } + + // We normally won't find a "not" operator here because they are stripped out + // by the binder when it creates condition flow nodes, but we can find this + // in the case of local variables type narrowing. + if (reference.nodeType === ParseNodeType.Name) { + if ( + testExpression.nodeType === ParseNodeType.UnaryOperation && + testExpression.d.operator === OperatorType.Not + ) { + return getTypeNarrowingCallback( + evaluator, + reference, + testExpression.d.expr, + !isPositiveTest, + recursionCount + ); + } + } + + return undefined; +} + +function getTypeNarrowingCallbackForAliasedCondition( + evaluator: TypeEvaluator, + reference: ExpressionNode, + testExpression: ExpressionNode, + isPositiveTest: boolean, + recursionCount: number +) { + if ( + testExpression.nodeType !== ParseNodeType.Name || + reference.nodeType !== ParseNodeType.Name || + testExpression === reference + ) { + return undefined; + } + + // Make sure the reference expression is a constant parameter or variable. + // If the reference expression is modified within the scope multiple times, + // we need to validate that it is not modified between the test expression + // evaluation and the conditional check. + const testExprDecl = getDeclsForLocalVar(evaluator, testExpression, testExpression, /* requireUnique */ true); + if (!testExprDecl || testExprDecl.length !== 1 || testExprDecl[0].type !== DeclarationType.Variable) { + return undefined; + } + + const referenceDecls = getDeclsForLocalVar(evaluator, reference, testExpression, /* requireUnique */ false); + if (!referenceDecls) { + return undefined; + } + + let modifyingDecls: Declaration[] = []; + if (referenceDecls.length > 1) { + // If there is more than one assignment to the reference variable within + // the local scope, make sure that none of these assignments are done + // after the test expression but before the condition check. + // + // This is OK: + // val = None + // is_none = val is None + // if is_none: ... + // + // This is not OK: + // val = None + // is_none = val is None + // val = 1 + // if is_none: ... + modifyingDecls = referenceDecls.filter((decl) => { + return ( + evaluator.isNodeReachable(testExpression, decl.node) && + evaluator.isNodeReachable(decl.node, testExprDecl[0].node) + ); + }); + } + + if (modifyingDecls.length !== 0) { + return undefined; + } + + const initNode = testExprDecl[0].inferredTypeSource; + + if (!initNode || ParseTreeUtils.isNodeContainedWithin(testExpression, initNode) || !isExpressionNode(initNode)) { + return undefined; + } + + return getTypeNarrowingCallback(evaluator, reference, initNode, isPositiveTest, recursionCount); +} + +// Determines whether the symbol is a local variable or parameter within +// the current scope. If requireUnique is true, there can be only one +// declaration (assignment) of the symbol, otherwise it is rejected. +function getDeclsForLocalVar( + evaluator: TypeEvaluator, + name: NameNode, + reachableFrom: ParseNode, + requireUnique: boolean +): Declaration[] | undefined { + const scope = getScopeForNode(name); + if (scope?.type !== ScopeType.Function && scope?.type !== ScopeType.Module) { + return undefined; + } + + const symbol = scope.lookUpSymbol(name.d.value); + if (!symbol) { + return undefined; + } + + const decls = symbol.getDeclarations(); + if (requireUnique && decls.length > 1) { + return undefined; + } + + if ( + decls.length === 0 || + decls.some((decl) => decl.type !== DeclarationType.Variable && decl.type !== DeclarationType.Param) + ) { + return undefined; + } + + // If there are any assignments within different scopes (e.g. via a "global" or + // "nonlocal" reference), don't consider it a local variable. + let prevDeclScope: ParseNode | undefined; + if ( + decls.some((decl) => { + const nodeToConsider = decl.type === DeclarationType.Param ? decl.node.d.name! : decl.node; + const declScopeNode = ParseTreeUtils.getExecutionScopeNode(nodeToConsider); + if (prevDeclScope && declScopeNode !== prevDeclScope) { + return true; + } + prevDeclScope = declScopeNode; + return false; + }) + ) { + return undefined; + } + + const reachableDecls = decls.filter((decl) => evaluator.isNodeReachable(reachableFrom, decl.node)); + + return reachableDecls.length > 0 ? reachableDecls : undefined; +} + +function getTypeNarrowingCallbackForAssignmentExpression( + evaluator: TypeEvaluator, + reference: ExpressionNode, + testExpression: AssignmentExpressionNode, + isPositiveTest: boolean, + recursionCount: number +) { + return ( + getTypeNarrowingCallback(evaluator, reference, testExpression.d.rightExpr, isPositiveTest, recursionCount) ?? + getTypeNarrowingCallback(evaluator, reference, testExpression.d.name, isPositiveTest, recursionCount) + ); +} + +function narrowTypeForUserDefinedTypeGuard( + evaluator: TypeEvaluator, + type: Type, + typeGuardType: Type, + isPositiveTest: boolean, + isStrictTypeGuard: boolean, + errorNode: ExpressionNode +): Type { + // For non-strict type guards, always narrow to the typeGuardType + // in the positive case and don't narrow in the negative case. + if (!isStrictTypeGuard) { + let result = type; + + if (isPositiveTest) { + result = typeGuardType; + + // If the type guard is a non-constrained TypeVar, add a + // condition to the resulting type. + if (isTypeVar(type) && !isParamSpec(type) && !TypeVarType.hasConstraints(type)) { + result = addConditionToType(result, [{ typeVar: type, constraintIndex: 0 }]); + } + return result; + } + + return result; + } + + const filterTypes: Type[] = []; + doForEachSubtype(typeGuardType, (typeGuardSubtype) => { + filterTypes.push(convertToInstantiable(typeGuardSubtype)); + }); + + return narrowTypeForInstanceOrSubclass( + evaluator, + type, + filterTypes, + /* isInstanceCheck */ true, + /* isTypeIsCheck */ true, + isPositiveTest, + errorNode + ); +} + +// Narrow the type based on whether the subtype can be true or false. +function narrowTypeForTruthiness(evaluator: TypeEvaluator, type: Type, isPositiveTest: boolean) { + return mapSubtypes(type, (subtype) => { + if (isPositiveTest) { + if (evaluator.canBeTruthy(subtype)) { + return evaluator.removeFalsinessFromType(subtype); + } + } else { + if (evaluator.canBeFalsy(subtype)) { + return evaluator.removeTruthinessFromType(subtype); + } + } + return undefined; + }); +} + +// Handle type narrowing for expressions of the form "a[I] is None" and "a[I] is not None" where +// I is an integer and a is a union of Tuples (or subtypes thereof) with known lengths and entry types. +function narrowTupleTypeForIsNone(evaluator: TypeEvaluator, type: Type, isPositiveTest: boolean, indexValue: number) { + return evaluator.mapSubtypesExpandTypeVars(type, /* options */ undefined, (subtype) => { + const tupleType = getSpecializedTupleType(subtype); + if (!tupleType || isUnboundedTupleClass(tupleType) || !tupleType.priv.tupleTypeArgs) { + return subtype; + } + + const tupleLength = tupleType.priv.tupleTypeArgs.length; + if (indexValue < 0 || indexValue >= tupleLength) { + return subtype; + } + + const typeOfEntry = evaluator.makeTopLevelTypeVarsConcrete(tupleType.priv.tupleTypeArgs[indexValue].type); + + if (isPositiveTest) { + if (!evaluator.assignType(typeOfEntry, evaluator.getNoneType())) { + return undefined; + } + } else { + if (isNoneInstance(typeOfEntry)) { + return undefined; + } + } + + return subtype; + }); +} + +// Handle type narrowing for expressions of the form "x is None" and "x is not None". +function narrowTypeForIsNone(evaluator: TypeEvaluator, type: Type, isPositiveTest: boolean) { + const expandedType = mapSubtypes(type, (subtype) => { + return transformPossibleRecursiveTypeAlias(subtype); + }); + + let resultIncludesNoneSubtype = false; + + const result = evaluator.mapSubtypesExpandTypeVars( + expandedType, + /* options */ undefined, + (subtype, unexpandedSubtype) => { + if (isAnyOrUnknown(subtype)) { + // Assume that "Any" is always both None and not None, so it matches + // regardless of whether the test is positive or negative. + return subtype; + } + + let useExpandedSubtype = false; + if (isTypeVar(unexpandedSubtype) && !TypeVarType.isSelf(unexpandedSubtype)) { + // If the TypeVar has value constraints and one or more of them + // are possibly compatible with None, use the expanded subtypes. + if ( + unexpandedSubtype.shared.constraints.some((constraint) => { + return evaluator.assignType(constraint, evaluator.getNoneType()); + }) + ) { + useExpandedSubtype = true; + } + + // If the TypeVar han an explicit bound that is possibly compatible + // with None (e.g. "T: int | None"), use the expanded subtypes. + if ( + unexpandedSubtype.shared.boundType && + evaluator.assignType(unexpandedSubtype.shared.boundType, evaluator.getNoneType()) + ) { + useExpandedSubtype = true; + } + } + + const adjustedSubtype = useExpandedSubtype ? subtype : unexpandedSubtype; + + // Is it an exact match for None? + if (isNoneInstance(subtype)) { + resultIncludesNoneSubtype = true; + return isPositiveTest ? adjustedSubtype : undefined; + } + + // Is it potentially None? + if (evaluator.assignType(subtype, evaluator.getNoneType())) { + resultIncludesNoneSubtype = true; + return isPositiveTest + ? addConditionToType(evaluator.getNoneType(), subtype.props?.condition) + : adjustedSubtype; + } + + return isPositiveTest ? undefined : adjustedSubtype; + } + ); + + // If this is a positive test and the result is a union that includes None, + // we can eliminate all the non-None subtypes include Any or Unknown. If some + // of the subtypes are None types with conditions, retain those. + if (isPositiveTest && resultIncludesNoneSubtype) { + return mapSubtypes(result, (subtype) => { + return isNoneInstance(subtype) ? subtype : undefined; + }); + } + + return result; +} + +// Handle type narrowing for expressions of the form "x is ..." and "x is not ...". +function narrowTypeForIsEllipsis(evaluator: TypeEvaluator, node: ExpressionNode, type: Type, isPositiveTest: boolean) { + const expandedType = mapSubtypes(type, (subtype) => { + return transformPossibleRecursiveTypeAlias(subtype); + }); + + let resultIncludesEllipsisSubtype = false; + + const ellipsisType = + evaluator.getBuiltInObject(node, 'EllipsisType') ?? + evaluator.getBuiltInObject(node, 'ellipsis') ?? + AnyType.create(); + + const isEllipsisInstance = (subtype: Type) => { + return isClassInstance(subtype) && ClassType.isBuiltIn(subtype, ['EllipsisType', 'ellipsis']); + }; + + const result = evaluator.mapSubtypesExpandTypeVars( + expandedType, + /* options */ undefined, + (subtype, unexpandedSubtype) => { + if (isAnyOrUnknown(subtype)) { + // We need to assume that "Any" is always both ellipsis and not ellipsis, + // so it matches regardless of whether the test is positive or negative. + return subtype; + } + + // If this is a TypeVar that isn't constrained, use the unexpanded + // TypeVar. For all other cases (including constrained TypeVars), + // use the expanded subtype. + const adjustedSubtype = + isTypeVar(unexpandedSubtype) && !TypeVarType.hasConstraints(unexpandedSubtype) + ? unexpandedSubtype + : subtype; + + // Is it an exact match for ellipsis? + if (isEllipsisInstance(subtype)) { + resultIncludesEllipsisSubtype = true; + return isPositiveTest ? adjustedSubtype : undefined; + } + + // Is it potentially ellipsis? + if (evaluator.assignType(subtype, ellipsisType)) { + resultIncludesEllipsisSubtype = true; + return isPositiveTest ? addConditionToType(ellipsisType, subtype.props?.condition) : adjustedSubtype; + } + + return isPositiveTest ? undefined : adjustedSubtype; + } + ); + + // If this is a positive test and the result is a union that includes ellipsis, + // we can eliminate all the non-ellipsis subtypes include Any or Unknown. If some + // of the subtypes are ellipsis types with conditions, retain those. + if (isPositiveTest && resultIncludesEllipsisSubtype) { + return mapSubtypes(result, (subtype) => { + return isEllipsisInstance(subtype) ? subtype : undefined; + }); + } + + return result; +} + +// The "isinstance" and "issubclass" calls support two forms - a simple form +// that accepts a single class, and a more complex form that accepts a tuple +// of classes (including arbitrarily-nested tuples). This method determines +// which form and returns a list of classes or undefined. +export function getIsInstanceClassTypes( + evaluator: TypeEvaluator, + argType: Type +): (ClassType | TypeVarType | FunctionType)[] | undefined { + let foundNonClassType = false; + const classTypeList: (ClassType | TypeVarType | FunctionType)[] = []; + + // Create a helper function that returns a list of class types or + // undefined if any of the types are not valid. + const addClassTypesToList = (types: Type[]) => { + types.forEach((subtype) => { + if (isClass(subtype)) { + subtype = specializeWithUnknownTypeArgs(subtype, evaluator.getTupleClassType()); + + if (isInstantiableClass(subtype) && ClassType.isBuiltIn(subtype, 'Callable')) { + subtype = convertToInstantiable(getUnknownTypeForCallable()); + } + } + + if (isInstantiableClass(subtype)) { + // If this is a reference to a class that has type promotions (e.g. + // float or complex), remove the promotions for purposes of the + // isinstance check). + if (!subtype.priv.includeSubclasses && subtype.priv.includePromotions) { + subtype = ClassType.cloneRemoveTypePromotions(subtype); + } + classTypeList.push(subtype); + } else if (isTypeVar(subtype) && TypeBase.isInstantiable(subtype)) { + classTypeList.push(subtype); + } else if (isNoneTypeClass(subtype)) { + assert(isInstantiableClass(subtype)); + classTypeList.push(subtype); + } else if ( + isFunction(subtype) && + subtype.shared.parameters.length === 2 && + subtype.shared.parameters[0].category === ParamCategory.ArgsList && + subtype.shared.parameters[1].category === ParamCategory.KwargsDict + ) { + classTypeList.push(subtype); + } else { + foundNonClassType = true; + } + }); + }; + + const addClassTypesRecursive = (type: Type, recursionCount = 0) => { + if (recursionCount > maxTypeRecursionCount) { + return; + } + + if (isClass(type) && TypeBase.isInstance(type) && isTupleClass(type)) { + if (type.priv.tupleTypeArgs) { + type.priv.tupleTypeArgs.forEach((tupleEntry) => { + addClassTypesRecursive(tupleEntry.type, recursionCount + 1); + }); + } + } else { + doForEachSubtype(type, (subtype) => { + addClassTypesToList([subtype]); + }); + } + }; + + doForEachSubtype(argType, (subtype) => { + addClassTypesRecursive(subtype); + }); + + return foundNonClassType ? undefined : classTypeList; +} + +export function narrowTypeForInstanceOrSubclass( + evaluator: TypeEvaluator, + type: Type, + filterTypes: Type[], + isInstanceCheck: boolean, + isTypeIsCheck: boolean, + isPositiveTest: boolean, + errorNode: ExpressionNode +) { + // First try with intersection types disallowed. + const narrowedType = narrowTypeForInstanceOrSubclassInternal( + evaluator, + type, + filterTypes, + isInstanceCheck, + isTypeIsCheck, + isPositiveTest, + /* allowIntersections */ false, + errorNode + ); + + if (!isNever(narrowedType)) { + return narrowedType; + } + + // Try again with intersection types allowed. + return narrowTypeForInstanceOrSubclassInternal( + evaluator, + type, + filterTypes, + isInstanceCheck, + isTypeIsCheck, + isPositiveTest, + /* allowIntersections */ true, + errorNode + ); +} + +function narrowTypeForInstanceOrSubclassInternal( + evaluator: TypeEvaluator, + type: Type, + filterTypes: Type[], + isInstanceCheck: boolean, + isTypeIsCheck: boolean, + isPositiveTest: boolean, + allowIntersections: boolean, + errorNode: ExpressionNode +): Type { + const result = mapSubtypes(type, (subtype) => { + let adjSubtype = subtype; + let resultRequiresAdj = false; + let adjFilterTypes = filterTypes; + + if (!isInstanceCheck) { + const isTypeInstance = isClassInstance(subtype) && ClassType.isBuiltIn(subtype, 'type'); + + // Handle metaclass instances specially. + if (isMetaclassInstance(subtype) && !isTypeInstance) { + adjFilterTypes = filterTypes.map((filterType) => convertToInstantiable(filterType)); + } else { + adjSubtype = convertToInstance(subtype); + + if (!isAnyOrUnknown(subtype) || isPositiveTest) { + resultRequiresAdj = true; + } + } + } + + const narrowedResult = narrowTypeForInstance( + evaluator, + adjSubtype, + adjFilterTypes, + isTypeIsCheck, + isPositiveTest, + allowIntersections, + errorNode + ); + + if (!resultRequiresAdj) { + return narrowedResult; + } + + if (isAnyOrUnknown(narrowedResult)) { + const typeClass = evaluator.getTypeClassType(); + if (typeClass) { + return ClassType.specialize(ClassType.cloneAsInstance(typeClass), [narrowedResult]); + } + } + + return convertToInstantiable(narrowedResult); + }); + + return result; +} + +// Narrows a type based on a call to isinstance. For example, if the original +// type of expression "x" is "Mammal" and the test expression is +// "isinstance(x, Cow)", (assuming "Cow" is a subclass of "Mammal"), we can +// narrow x to "Cow". +function narrowTypeForInstance( + evaluator: TypeEvaluator, + type: Type, + filterTypes: Type[], + isTypeIsCheck: boolean, + isPositiveTest: boolean, + allowIntersections: boolean, + errorNode: ExpressionNode +): Type { + let expandedTypes = mapSubtypes(type, (subtype) => { + return transformPossibleRecursiveTypeAlias(subtype); + }); + + expandedTypes = evaluator.expandPromotionTypes(errorNode, expandedTypes); + + const convertVarTypeToFree = (varType: Type): Type => { + // If this is a TypeIs check, type variables should remain bound. + if (isTypeIsCheck) { + return varType; + } + + // If this is an isinstance or issubclass check, the type variables + // should be converted to "free" type variables. + return makeTypeVarsFree(varType, ParseTreeUtils.getTypeVarScopesForNode(errorNode)); + }; + + // Filters the varType by the parameters of the isinstance + // and returns the list of types the varType could be after + // applying the filter. + const filterClassType = ( + varType: Type, + concreteVarType: ClassType, + conditions: TypeCondition[] | undefined, + negativeFallbackType: Type + ): Type[] => { + const filteredTypes: Type[] = []; + + let foundSuperclass = false; + let isClassRelationshipIndeterminate = false; + + for (const filterType of filterTypes) { + const concreteFilterType = evaluator.makeTopLevelTypeVarsConcrete(filterType); + + if (isInstantiableClass(concreteFilterType)) { + const filterMetaclass = concreteFilterType.shared.effectiveMetaclass; + if ( + isInstantiableMetaclass(concreteVarType) && + TypeBase.getInstantiableDepth(concreteFilterType) > 0 && + filterMetaclass && + isInstantiableClass(filterMetaclass) + ) { + const metaclassType = convertToInstance(concreteVarType); + let isMetaclassOverlap = evaluator.assignType( + convertVarTypeToFree(metaclassType), + ClassType.cloneAsInstance(filterMetaclass) + ); + + // Handle the special case where the metaclass for the filter is type. + // This will normally be treated as type[Any], which is compatible with + // any metaclass, but we specifically want to treat type as the class + // type[object] in this case. + if (ClassType.isBuiltIn(filterMetaclass, 'type') && !filterMetaclass.priv.isTypeArgExplicit) { + if (!isClass(metaclassType) || !ClassType.isBuiltIn(metaclassType, 'type')) { + isMetaclassOverlap = false; + } + } + + if (isMetaclassOverlap) { + if (isPositiveTest) { + filteredTypes.push(filterType); + foundSuperclass = true; + } else if ( + !isTypeSame(metaclassType, filterMetaclass) || + filterMetaclass.priv.includeSubclasses + ) { + filteredTypes.push(metaclassType); + isClassRelationshipIndeterminate = true; + } + continue; + } + } + + let runtimeVarType = concreteVarType; + + // Type variables are erased for runtime types, so switch from + // bound to free type variables. We'll retain the bound type + // variables for TypeIs checks. + if (!isTypeIsCheck) { + runtimeVarType = makeTypeVarsFree( + runtimeVarType, + ParseTreeUtils.getTypeVarScopesForNode(errorNode) + ); + } + + // If the value is a TypedDict, convert it into its runtime form, + // which is a dict[str, Any]. + if (isInstantiableClass(runtimeVarType) && ClassType.isTypedDictClass(runtimeVarType)) { + const dictClass = evaluator.getDictClassType(); + const strType = evaluator.getStrClassType(); + + if (dictClass && strType) { + runtimeVarType = ClassType.specialize(dictClass, [ + ClassType.cloneAsInstance(strType), + UnknownType.create(), + ]); + } + } + + const filterIsSuperclass = evaluator.assignType( + filterType, + runtimeVarType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.AllowIsinstanceSpecialForms | AssignTypeFlags.AllowProtocolClassSource + ); + + let filterIsSubclass = evaluator.assignType( + runtimeVarType, + filterType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.AllowIsinstanceSpecialForms | AssignTypeFlags.AllowProtocolClassSource + ); + + if (filterIsSuperclass) { + foundSuperclass = true; + } + + // Special-case the TypeForm special form. This represents a variety + // of runtime classes that will not appear to overlap with TypeForm. + if (ClassType.isBuiltIn(runtimeVarType, 'TypeForm')) { + isClassRelationshipIndeterminate = true; + filterIsSubclass = true; + } + + // Normally, a type should never be both a subclass and a superclass. + // This can happen if either of the class types derives from a + // class whose type is unknown (e.g. an import failed). We'll + // note this case specially so we don't do any narrowing, which + // will generate false positives. + if (filterIsSuperclass) { + if (!isTypeIsCheck && concreteFilterType.priv.includeSubclasses) { + // If the filter type includes subclasses, we can't eliminate + // this type in the negative direction. We'll relax this for + // TypeIs checks. + isClassRelationshipIndeterminate = true; + } + + if (filterIsSubclass && !ClassType.isSameGenericClass(runtimeVarType, concreteFilterType)) { + // If the runtime variable type is a type[T], handle a filter + // of 'type' as a special case. + if ( + !ClassType.isBuiltIn(concreteFilterType, 'type') || + TypeBase.getInstantiableDepth(runtimeVarType) === 0 + ) { + isClassRelationshipIndeterminate = true; + } + } + } + + // If both the variable type and the filter type ar generics, we can't + // determine the relationship between the two. + if (isTypeVar(varType) && isTypeVar(filterType)) { + isClassRelationshipIndeterminate = true; + } + + if (isPositiveTest) { + if (filterIsSuperclass) { + // If the variable type is a subclass of the isinstance filter, + // we haven't learned anything new about the variable type. + + // If the varType is a Self or type[Self], retain the unnarrowedType. + if (isTypeVar(varType) && TypeVarType.isSelf(varType)) { + filteredTypes.push(addConditionToType(varType, conditions)); + } else { + filteredTypes.push(addConditionToType(concreteVarType, conditions)); + } + } else if (filterIsSubclass) { + // If the variable type is a superclass of the isinstance + // filter, we can narrow the type to the subclass. + let specializedFilterType = filterType; + + // Try to retain the type arguments for the filter type. This is + // important because a specialized version of the filter cannot + // be passed to isinstance or issubclass. + if (isClass(filterType)) { + if (ClassType.isSpecialBuiltIn(filterType) || filterType.shared.typeParams.length > 0) { + if ( + !filterType.priv.isTypeArgExplicit && + !ClassType.isSameGenericClass(concreteVarType, filterType) + ) { + const constraints = new ConstraintTracker(); + const unspecializedFilterType = ClassType.specialize( + filterType, + /* typeArg */ undefined + ); + + if ( + addConstraintsForExpectedType( + evaluator, + ClassType.cloneAsInstance(unspecializedFilterType), + ClassType.cloneAsInstance(concreteVarType), + constraints, + /* liveTypeVarScopes */ undefined, + errorNode.start + ) + ) { + specializedFilterType = evaluator.solveAndApplyConstraints( + unspecializedFilterType, + constraints, + { + replaceUnsolved: { + scopeIds: getTypeVarScopeIds(filterType), + useUnknown: true, + tupleClassType: evaluator.getTupleClassType(), + }, + } + ) as ClassType; + } + } + } + } + + filteredTypes.push(addConditionToType(specializedFilterType, conditions)); + } else if ( + ClassType.isSameGenericClass( + ClassType.cloneAsInstance(concreteVarType), + ClassType.cloneAsInstance(concreteFilterType) + ) + ) { + if (!isTypeIsCheck) { + // Don't attempt to narrow in this case. + if ( + concreteVarType.priv?.literalValue === undefined && + concreteFilterType.priv?.literalValue === undefined + ) { + const intersection = intersectSameClassType( + evaluator, + concreteVarType, + concreteFilterType + ); + filteredTypes.push(intersection ?? varType); + } + } + } else if ( + allowIntersections && + !ClassType.isFinal(concreteVarType) && + !ClassType.isFinal(concreteFilterType) + ) { + // The two types appear to have no relation. It's possible that the + // two types are protocols or the program is expecting one type to + // be a mix-in class used with the other. In this case, we'll + // synthesize a new class type that represents an intersection of + // the two types. + let newClassType = evaluator.createSubclass(errorNode, concreteVarType, concreteFilterType); + if (isTypeVar(varType) && !isParamSpec(varType) && !TypeVarType.hasConstraints(varType)) { + newClassType = addConditionToType(newClassType, [{ typeVar: varType, constraintIndex: 0 }]); + } + + filteredTypes.push(addConditionToType(newClassType, concreteVarType.props?.condition)); + } + } else { + if (isAnyOrUnknown(varType)) { + filteredTypes.push(addConditionToType(varType, conditions)); + } else if (derivesFromAnyOrUnknown(varType) && !isTypeSame(concreteVarType, concreteFilterType)) { + filteredTypes.push(addConditionToType(varType, conditions)); + } + } + } else if (isTypeVar(filterType) && TypeBase.isInstantiable(filterType)) { + // Handle the case where the filter type is Type[T] and the unexpanded + // subtype is some instance type, possibly T. + if (TypeBase.isInstance(varType)) { + if (isTypeVar(varType) && isTypeSame(convertToInstance(filterType), varType)) { + // If the unexpanded subtype is T, we can definitively filter + // in both the positive and negative cases. + if (isPositiveTest) { + filteredTypes.push(varType); + } else { + foundSuperclass = true; + } + } else { + if (isPositiveTest) { + filteredTypes.push(convertToInstance(filterType)); + } else { + // If the unexpanded subtype is some other instance, we can't + // filter anything because it might be an instance. + filteredTypes.push(varType); + isClassRelationshipIndeterminate = true; + } + } + } + } else if (isFunction(filterType)) { + // Handle an isinstance check against Callable. + let isCallable = false; + + if (isClass(concreteVarType)) { + if (TypeBase.isInstantiable(varType)) { + isCallable = true; + } else { + isCallable = !!lookUpClassMember( + concreteVarType, + '__call__', + MemberAccessFlags.SkipInstanceMembers + ); + } + } + + if (isCallable) { + if (isPositiveTest) { + filteredTypes.push(convertToInstantiable(varType)); + } else { + foundSuperclass = true; + } + } else if ( + evaluator.assignType( + convertVarTypeToFree(concreteVarType), + filterType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.AllowIsinstanceSpecialForms + ) + ) { + if (isPositiveTest) { + filteredTypes.push(addConditionToType(filterType, concreteVarType.props?.condition)); + } + } else if (allowIntersections && isPositiveTest) { + // The type appears to not be callable. It's possible that the + // two type is a subclass that is callable. We'll synthesize a + // new intersection type. + const className = ``; + const fileInfo = getFileInfo(errorNode); + let newClassType = ClassType.createInstantiable( + className, + ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.None, + ParseTreeUtils.getTypeSourceId(errorNode), + /* declaredMetaclass */ undefined, + concreteVarType.shared.effectiveMetaclass, + concreteVarType.shared.docString + ); + newClassType.shared.baseClasses = [concreteVarType]; + computeMroLinearization(newClassType); + + newClassType = addConditionToType(newClassType, concreteVarType.props?.condition); + + // Add a __call__ method to the new class. + const callMethod = FunctionType.createSynthesizedInstance('__call__'); + const selfParam = FunctionParam.create( + ParamCategory.Simple, + ClassType.cloneAsInstance(newClassType), + FunctionParamFlags.TypeDeclared, + 'self' + ); + FunctionType.addParam(callMethod, selfParam); + FunctionType.addDefaultParams(callMethod); + callMethod.shared.declaredReturnType = UnknownType.create(); + ClassType.getSymbolTable(newClassType).set( + '__call__', + Symbol.createWithType(SymbolFlags.ClassMember, callMethod) + ); + + filteredTypes.push(ClassType.cloneAsInstance(newClassType)); + } + } + } + + // In the negative case, if one or more of the filters + // always match the type (i.e. they are an exact match or + // a superclass of the type), then there's nothing left after + // the filter is applied. If we didn't find any superclass + // match, then the original variable type survives the filter. + if (!isPositiveTest) { + if (!foundSuperclass || isClassRelationshipIndeterminate) { + filteredTypes.push(convertToInstantiable(negativeFallbackType)); + } + } + + return filteredTypes.map((t) => convertToInstance(t)); + }; + + const isFilterTypeCallbackProtocol = (filterType: Type) => { + return ( + isInstantiableClass(filterType) && + evaluator.getCallbackProtocolType(ClassType.cloneAsInstance(filterType)) !== undefined + ); + }; + + const filterFunctionType = (varType: FunctionType | OverloadedType, unexpandedType: Type): Type[] => { + const filteredTypes: Type[] = []; + + if (isPositiveTest) { + for (const filterType of filterTypes) { + const concreteFilterType = evaluator.makeTopLevelTypeVarsConcrete(filterType); + + if (!isTypeIsCheck && isFilterTypeCallbackProtocol(concreteFilterType)) { + filteredTypes.push(convertToInstance(varType)); + } else if (evaluator.assignType(convertVarTypeToFree(varType), convertToInstance(concreteFilterType))) { + // If the filter type is a Callable, use the original type. If the + // filter type is a callback protocol, use the filter type. + if (isFunction(filterType)) { + filteredTypes.push(convertToInstance(unexpandedType)); + } else { + filteredTypes.push(convertToInstance(filterType)); + } + } else { + const filterTypeInstance = convertToInstance(convertVarTypeToFree(concreteFilterType)); + if (evaluator.assignType(filterTypeInstance, varType)) { + filteredTypes.push(convertToInstance(varType)); + } else { + // If this is a class instance that's not callable and it's not @final, + // a subclass could be compatible with the filter type. + if (isClassInstance(filterTypeInstance) && !ClassType.isFinal(filterTypeInstance)) { + const gradualFunc = FunctionType.createSynthesizedInstance( + '', + FunctionTypeFlags.GradualCallableForm + ); + FunctionType.addDefaultParams(gradualFunc); + + // If the class is callable (i.e. can be assigned to the generic gradual + // function signature), then the assignment check above didn't fail because + // of a signature mismatch. It failed because the class is not callable. + // We assume therefore that a subclass might be. + if (!evaluator.assignType(gradualFunc, filterTypeInstance)) { + // The resulting type should be an intersection of the filter type and + // the subtype, but we don't have a way to encode that yet. For now, + // we'll use the filter type. + filteredTypes.push(convertToInstance(filterType)); + } + } + } + } + } + } else { + // If one or more filters does not always filter the type, + // we can't eliminate the type in the negative case. + if ( + filterTypes.every((filterType) => { + const concreteFilterType = evaluator.makeTopLevelTypeVarsConcrete(filterType); + + // If the filter type is a callback protocol, the runtime + // isinstance check will filter all objects that have a __call__ + // method regardless of their signature types. + if (!isTypeIsCheck && isFilterTypeCallbackProtocol(concreteFilterType)) { + return false; + } + + if (isFunction(concreteFilterType) && FunctionType.isGradualCallableForm(concreteFilterType)) { + return false; + } + + const isSubtype = evaluator.assignType( + convertToInstance(convertVarTypeToFree(concreteFilterType)), + varType + ); + const isSupertype = evaluator.assignType( + convertVarTypeToFree(varType), + convertToInstance(concreteFilterType) + ); + + return !isSubtype || isSupertype; + }) + ) { + filteredTypes.push(convertToInstance(varType)); + } + } + + return filteredTypes; + }; + + const classListContainsNoneType = () => + filterTypes.some((t) => { + if (isNoneTypeClass(t)) { + return true; + } + return isInstantiableClass(t) && ClassType.isBuiltIn(t, 'NoneType'); + }); + + const anyOrUnknownSubstitutions: Type[] = []; + const anyOrUnknown: Type[] = []; + + const filteredType = evaluator.mapSubtypesExpandTypeVars( + expandedTypes, + { + expandCallback: (type) => { + return evaluator.expandPromotionTypes(errorNode, type); + }, + }, + (subtype, unexpandedSubtype) => { + // If we fail to filter anything in the negative case, we need to decide + // whether to retain the original TypeVar or replace it with its specialized + // type(s). We'll assume that if someone is using isinstance or issubclass + // on a constrained TypeVar that they want to filter based on its constrained + // parts. + const negativeFallback = getTypeCondition(subtype) ? subtype : unexpandedSubtype; + + if (isPositiveTest && isAnyOrUnknown(subtype)) { + // If this is a positive test and the effective type is Any or + // Unknown, we can assume that the type matches one of the + // specified types. + anyOrUnknownSubstitutions.push( + combineTypes(filterTypes.map((classType) => convertToInstance(classType))) + ); + + anyOrUnknown.push(subtype); + return undefined; + } + + if (isNoneInstance(subtype)) { + return classListContainsNoneType() === isPositiveTest ? subtype : undefined; + } + + if (isModule(subtype) || (isClassInstance(subtype) && ClassType.isBuiltIn(subtype, 'ModuleType'))) { + // Handle type narrowing for runtime-checkable protocols + // when applied to modules. + if (isPositiveTest) { + const filteredTypes = filterTypes.filter((classType) => { + const concreteClassType = evaluator.makeTopLevelTypeVarsConcrete(classType); + return isInstantiableClass(concreteClassType) && ClassType.isProtocolClass(concreteClassType); + }); + + if (filteredTypes.length > 0) { + return convertToInstance(combineTypes(filteredTypes)); + } + } + } + + if (isClass(subtype)) { + return combineTypes( + filterClassType( + unexpandedSubtype, + ClassType.cloneAsInstantiable(subtype), + getTypeCondition(subtype), + negativeFallback + ) + ); + } + + if (isFunctionOrOverloaded(subtype)) { + return combineTypes(filterFunctionType(subtype, unexpandedSubtype)); + } + + return isPositiveTest ? undefined : negativeFallback; + } + ); + + // If the result is Any/Unknown and contains no other subtypes and + // we have substitutions for Any/Unknown, use those instead. We don't + // want to apply this if the filtering produced something other than + // Any/Unknown. For example, if the statement is "isinstance(x, list)" + // and the type of x is "List[str] | int | Any", the result should be + // "List[str]", not "List[str] | List[Unknown]". + if (isNever(filteredType) && anyOrUnknownSubstitutions.length > 0) { + return combineTypes(anyOrUnknownSubstitutions); + } + + if (isNever(filteredType) && anyOrUnknown.length > 0) { + return combineTypes(anyOrUnknown); + } + + return filteredType; +} + +// This function assumes that the caller has already verified that the two +// types are the same class and are not literals. It also assumes that the +// caller has verified that type1 is not assignable to type2 or vice versa. +// Returns undefined if there is no intersection between the two types. +function intersectSameClassType(evaluator: TypeEvaluator, type1: ClassType, type2: ClassType): ClassType | undefined { + assert(isInstantiableClass(type1) && isInstantiableClass(type2)); + assert(ClassType.isSameGenericClass(type1, type2)); + assert(type1.priv?.literalValue === undefined); + assert(type2.priv?.literalValue === undefined); + + // Handle tuples specially. + if (ClassType.isBuiltIn(type1, 'tuple')) { + return intersectTupleTypes(type1, type1); + } + + // Indicate that there is no intersection. + return undefined; +} + +function intersectTupleTypes(type1: ClassType, type2: ClassType) { + if (!type2.priv.tupleTypeArgs || isTupleGradualForm(type2)) { + return addConditionToType(type1, type2.props?.condition); + } + + if (!type1.priv.tupleTypeArgs || isTupleGradualForm(type1)) { + return addConditionToType(type2, type1.props?.condition); + } + + // For now, don't attempt to narrow in this case. + // TODO - add more sophisticated logic here. + return undefined; +} + +// Attempts to narrow a union of tuples based on their known length. +function narrowTypeForTupleLength( + evaluator: TypeEvaluator, + referenceType: Type, + lengthValue: number, + isPositiveTest: boolean, + isLessThanCheck: boolean +) { + return mapSubtypes(referenceType, (subtype) => { + const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(subtype); + + // If it's not a tuple, we can't narrow it. + if ( + !isClassInstance(concreteSubtype) || + !isTupleClass(concreteSubtype) || + !concreteSubtype.priv.tupleTypeArgs + ) { + return subtype; + } + + // If the tuple contains a TypeVarTuple, we can't narrow it. + if (concreteSubtype.priv.tupleTypeArgs.some((typeArg) => isUnpackedTypeVarTuple(typeArg.type))) { + return subtype; + } + + // If the tuple contains no unbounded elements, then we know its length exactly. + if (!concreteSubtype.priv.tupleTypeArgs.some((typeArg) => typeArg.isUnbounded)) { + const tupleLengthMatches = isLessThanCheck + ? concreteSubtype.priv.tupleTypeArgs.length < lengthValue + : concreteSubtype.priv.tupleTypeArgs.length === lengthValue; + + return tupleLengthMatches === isPositiveTest ? subtype : undefined; + } + + // The tuple contains a "...". We'll expand this into as many elements as + // necessary to match the lengthValue. + const elementsToAdd = lengthValue - concreteSubtype.priv.tupleTypeArgs.length + 1; + + if (!isLessThanCheck) { + // If the specified length is smaller than the minimum length of this tuple, + // we can rule it out for a positive test and rule it in for a negative test. + if (elementsToAdd < 0) { + return isPositiveTest ? undefined : subtype; + } + + if (!isPositiveTest) { + // If this is an equality check for the minimum length (e.g. + // "len(x) == 0"), we can expand the minimum length by one). + const minLen = concreteSubtype.priv.tupleTypeArgs.length - 1; + if (lengthValue === minLen) { + return expandUnboundedTupleElement(concreteSubtype, 1, /* keepUnbounded */ true); + } + return subtype; + } + + return expandUnboundedTupleElement(concreteSubtype, elementsToAdd, /* keepUnbounded */ false); + } + + // If this is a tuple related to an "*args: P.args" parameter, don't expand it. + if (isParamSpec(subtype) && subtype.priv.paramSpecAccess) { + return subtype; + } + + // Place an upper limit on the number of union subtypes we + // will expand the tuple to. + const maxTupleUnionExpansion = 32; + if (elementsToAdd > maxTupleUnionExpansion) { + return subtype; + } + + if (isPositiveTest) { + if (elementsToAdd < 1) { + return undefined; + } + + const typesToCombine: Type[] = []; + + for (let i = 0; i < elementsToAdd; i++) { + typesToCombine.push(expandUnboundedTupleElement(concreteSubtype, i, /* keepUnbounded */ false)); + } + + return combineTypes(typesToCombine); + } + + return expandUnboundedTupleElement(concreteSubtype, elementsToAdd, /* keepUnbounded */ true); + }); +} + +// Expands a tuple type that contains an unbounded element to include +// multiple bounded elements of that same type in place of (or in addition +// to) the unbounded element. +function expandUnboundedTupleElement(tupleType: ClassType, elementsToAdd: number, keepUnbounded: boolean) { + const tupleTypeArgs: TupleTypeArg[] = []; + + tupleType.priv.tupleTypeArgs!.forEach((typeArg) => { + if (!typeArg.isUnbounded) { + tupleTypeArgs.push(typeArg); + } else { + for (let i = 0; i < elementsToAdd; i++) { + tupleTypeArgs.push({ isUnbounded: false, type: typeArg.type }); + } + + if (keepUnbounded) { + tupleTypeArgs.push(typeArg); + } + } + }); + + return specializeTupleClass(tupleType, tupleTypeArgs); +} + +// Attempts to narrow a type (make it more constrained) based on an "in" binary operator. +function narrowTypeForContainerType( + evaluator: TypeEvaluator, + referenceType: Type, + containerType: Type, + isPositiveTest: boolean +) { + if (isPositiveTest) { + const elementType = getElementTypeForContainerNarrowing(containerType); + if (!elementType) { + return referenceType; + } + + return narrowTypeForContainerElementType( + evaluator, + referenceType, + evaluator.makeTopLevelTypeVarsConcrete(elementType) + ); + } + + // Narrowing in the negative case is possible only with tuples + // with a known length. + if ( + !isClassInstance(containerType) || + !ClassType.isBuiltIn(containerType, 'tuple') || + !containerType.priv.tupleTypeArgs + ) { + return referenceType; + } + + // Determine which tuple types can be eliminated. Only "None" and + // literal types can be handled here. + const typesToEliminate: Type[] = []; + containerType.priv.tupleTypeArgs.forEach((tupleEntry) => { + if (!tupleEntry.isUnbounded) { + if (isNoneInstance(tupleEntry.type)) { + typesToEliminate.push(tupleEntry.type); + } else if (isClassInstance(tupleEntry.type) && isLiteralType(tupleEntry.type)) { + typesToEliminate.push(tupleEntry.type); + } + } + }); + + if (typesToEliminate.length === 0) { + return referenceType; + } + + return mapSubtypes(referenceType, (referenceSubtype) => { + referenceSubtype = evaluator.makeTopLevelTypeVarsConcrete(referenceSubtype); + if (isClassInstance(referenceSubtype) && referenceSubtype.priv.literalValue === undefined) { + // If we're able to enumerate all possible literal values + // (for bool or enum), we can eliminate all others in a negative test. + const allLiteralTypes = enumerateLiteralsForType(evaluator, referenceSubtype); + if (allLiteralTypes && allLiteralTypes.length > 0) { + return combineTypes( + allLiteralTypes.filter((type) => !typesToEliminate.some((t) => isTypeSame(t, type))) + ); + } + } + + if (typesToEliminate.some((t) => isTypeSame(t, referenceSubtype))) { + return undefined; + } + + return referenceSubtype; + }); +} + +export function getElementTypeForContainerNarrowing(containerType: Type) { + // We support contains narrowing only for certain built-in types that have been specialized. + const supportedContainers = ['list', 'set', 'frozenset', 'deque', 'tuple', 'dict', 'defaultdict', 'OrderedDict']; + if (!isClassInstance(containerType) || !ClassType.isBuiltIn(containerType, supportedContainers)) { + return undefined; + } + + if (!containerType.priv.typeArgs || containerType.priv.typeArgs.length < 1) { + return undefined; + } + + let elementType = containerType.priv.typeArgs[0]; + if (isTupleClass(containerType) && containerType.priv.tupleTypeArgs) { + elementType = combineTypes(containerType.priv.tupleTypeArgs.map((t) => t.type)); + } + + return elementType; +} + +export function narrowTypeForContainerElementType(evaluator: TypeEvaluator, referenceType: Type, elementType: Type) { + return evaluator.mapSubtypesExpandTypeVars(referenceType, /* options */ undefined, (referenceSubtype) => { + return mapSubtypes(elementType, (elementSubtype) => { + if (isAnyOrUnknown(elementSubtype)) { + return referenceSubtype; + } + + // If the two types are disjoint (i.e. are not comparable), eliminate this subtype. + if (!evaluator.isTypeComparable(elementSubtype, referenceSubtype)) { + return undefined; + } + + // If one of the two types is a literal, we can narrow to that type. + if ( + isClassInstance(elementSubtype) && + (isLiteralLikeType(elementSubtype) || isNoneInstance(elementSubtype)) && + evaluator.assignType(referenceSubtype, elementSubtype) + ) { + return stripTypeForm(addConditionToType(elementSubtype, referenceSubtype.props?.condition)); + } + + if ( + isClassInstance(referenceSubtype) && + (isLiteralLikeType(referenceSubtype) || isNoneInstance(referenceSubtype)) && + evaluator.assignType(elementSubtype, referenceSubtype) + ) { + return stripTypeForm(addConditionToType(referenceSubtype, elementSubtype.props?.condition)); + } + + // If the element type is a known class object that is assignable to + // the reference type, we can narrow to that class object. + if ( + isInstantiableClass(elementSubtype) && + !elementSubtype.priv.includeSubclasses && + evaluator.assignType(referenceSubtype, elementSubtype) + ) { + return stripTypeForm(addConditionToType(elementSubtype, referenceSubtype.props?.condition)); + } + + // It's not safe to narrow. + return referenceSubtype; + }); + }); +} + +// Attempts to narrow a type based on whether it is a TypedDict with +// a literal key value. +function narrowTypeForTypedDictKey( + evaluator: TypeEvaluator, + referenceType: Type, + literalKey: ClassType, + isPositiveTest: boolean +): Type { + const narrowedType = evaluator.mapSubtypesExpandTypeVars( + referenceType, + /* options */ undefined, + (subtype, unexpandedSubtype) => { + if (isParamSpec(unexpandedSubtype)) { + return unexpandedSubtype; + } + + if (isClassInstance(subtype) && ClassType.isTypedDictClass(subtype)) { + const entries = getTypedDictMembersForClass(evaluator, subtype, /* allowNarrowed */ true); + const tdEntry = entries.knownItems.get(literalKey.priv.literalValue as string) ?? entries.extraItems; + + if (isPositiveTest) { + // The code that is commented out below implements the behavior that is technically + // correct, but until we PEP 728 is ratified and we have a way to express "extra items" + // and closed TypedDicts, we'll preserve the older (less correct) behavior to enable + // narrowing of TypedDicts based on checks for specific keys. + // TODO - remove this behavior once PEP 728 is accepted and the feature is no + // longer experimental. + if (!tdEntry) { + return undefined; + } + + // if (!tdEntry) { + // // If there is no TD entry for this key and no "extra items" defined, + // // we have to assume that the TypedDict may contain extra items, so + // // narrowing it isn't possible in this case. + // return subtype; + // } + + // if (isNever(tdEntry.valueType)) { + // // If the entry is typed as Never or the "extra items" is typed as Never, + // // then this key cannot be present in the TypedDict, and we can eliminate it. + // return undefined; + // } + + // If the entry is currently not required and not marked provided, we can mark + // it as provided after this guard expression confirms it is. + if (tdEntry.isRequired || tdEntry.isProvided) { + return subtype; + } + + const newNarrowedEntriesMap = new Map( + subtype.priv.typedDictNarrowedEntries ?? [] + ); + + // Add the new entry. + newNarrowedEntriesMap.set(literalKey.priv.literalValue as string, { + valueType: tdEntry.valueType, + isReadOnly: tdEntry.isReadOnly, + isRequired: false, + isProvided: true, + }); + + // Clone the TypedDict object with the new entries. + return ClassType.cloneAsInstance( + ClassType.cloneForNarrowedTypedDictEntries( + ClassType.cloneAsInstantiable(subtype), + newNarrowedEntriesMap + ) + ); + } else { + return tdEntry !== undefined && (tdEntry.isRequired || tdEntry.isProvided) ? undefined : subtype; + } + } + + return subtype; + } + ); + + return narrowedType; +} + +// Attempts to narrow a TypedDict type based on a comparison (equal or not +// equal) between a discriminating entry type that has a declared literal +// type to a literal value. +export function narrowTypeForDiscriminatedDictEntryComparison( + evaluator: TypeEvaluator, + referenceType: Type, + indexLiteralType: ClassType, + literalType: Type, + isPositiveTest: boolean +): Type { + let canNarrow = true; + + const narrowedType = mapSubtypes(referenceType, (subtype) => { + if (isClassInstance(subtype) && ClassType.isTypedDictClass(subtype)) { + const symbolMap = getTypedDictMembersForClass(evaluator, subtype); + const tdEntry = symbolMap.knownItems.get(indexLiteralType.priv.literalValue as string); + + if (tdEntry && isLiteralTypeOrUnion(tdEntry.valueType)) { + if (isPositiveTest) { + let foundMatch = false; + + doForEachSubtype(literalType, (literalSubtype) => { + if (evaluator.assignType(tdEntry.valueType, literalSubtype)) { + foundMatch = true; + } + }); + + return foundMatch ? subtype : undefined; + } else { + let foundNonMatch = false; + + doForEachSubtype(literalType, (literalSubtype) => { + if (!evaluator.assignType(literalSubtype, tdEntry.valueType)) { + foundNonMatch = true; + } + }); + + return foundNonMatch ? subtype : undefined; + } + } + } + + canNarrow = false; + return subtype; + }); + + return canNarrow ? narrowedType : referenceType; +} + +export function narrowTypeForDiscriminatedTupleComparison( + evaluator: TypeEvaluator, + referenceType: Type, + indexLiteralType: ClassType, + literalType: Type, + isPositiveTest: boolean +): Type { + let canNarrow = true; + + const narrowedType = mapSubtypes(referenceType, (subtype) => { + if ( + isClassInstance(subtype) && + ClassType.isTupleClass(subtype) && + !isUnboundedTupleClass(subtype) && + typeof indexLiteralType.priv.literalValue === 'number' && + isClassInstance(literalType) + ) { + const indexValue = indexLiteralType.priv.literalValue; + if (subtype.priv.tupleTypeArgs && indexValue >= 0 && indexValue < subtype.priv.tupleTypeArgs.length) { + const tupleEntryType = subtype.priv.tupleTypeArgs[indexValue]?.type; + if (tupleEntryType && isLiteralTypeOrUnion(tupleEntryType)) { + if (isPositiveTest) { + return evaluator.assignType(tupleEntryType, literalType) ? subtype : undefined; + } else { + return evaluator.assignType(literalType, tupleEntryType) ? undefined : subtype; + } + } + } + } + + canNarrow = false; + return subtype; + }); + + return canNarrow ? narrowedType : referenceType; +} + +// Attempts to narrow a type based on a comparison (equal or not equal) +// between a discriminating field that has a declared literal type to a +// literal value. +export function narrowTypeForDiscriminatedLiteralFieldComparison( + evaluator: TypeEvaluator, + referenceType: Type, + memberName: string, + literalType: ClassType, + isPositiveTest: boolean +): Type { + const narrowedType = mapSubtypes(referenceType, (subtype) => { + let memberInfo: ClassMember | undefined; + + if (isClassInstance(subtype)) { + memberInfo = lookUpObjectMember(subtype, memberName); + } else if (isInstantiableClass(subtype)) { + memberInfo = lookUpClassMember(subtype, memberName); + } + + if (memberInfo && memberInfo.isTypeDeclared) { + let memberType = evaluator.getTypeOfMember(memberInfo); + + // Handle the case where the field is a property + // that has a declared literal return type for its getter. + if (isClassInstance(subtype) && isClassInstance(memberType) && isProperty(memberType)) { + const getterType = memberType.priv.fgetInfo?.methodType; + if (getterType && getterType.shared.declaredReturnType) { + const getterReturnType = FunctionType.getEffectiveReturnType(getterType); + if (getterReturnType) { + memberType = getterReturnType; + } + } + } + + if (isLiteralTypeOrUnion(memberType, /* allowNone */ true)) { + if (isPositiveTest) { + return evaluator.assignType(memberType, literalType) ? subtype : undefined; + } else { + return evaluator.assignType(literalType, memberType) ? undefined : subtype; + } + } + } + + return subtype; + }); + + return narrowedType; +} + +// Attempts to narrow a type based on a comparison (equal or not equal) +// between a discriminating field that has a declared None type to a +// None. +function narrowTypeForDiscriminatedFieldNoneComparison( + evaluator: TypeEvaluator, + referenceType: Type, + memberName: string, + isPositiveTest: boolean +): Type { + return mapSubtypes(referenceType, (subtype) => { + let memberInfo: ClassMember | undefined; + if (isClassInstance(subtype)) { + memberInfo = lookUpObjectMember(subtype, memberName); + } else if (isInstantiableClass(subtype)) { + memberInfo = lookUpClassMember(subtype, memberName); + } + + if (memberInfo && memberInfo.isTypeDeclared) { + const memberType = evaluator.makeTopLevelTypeVarsConcrete(evaluator.getTypeOfMember(memberInfo)); + let canNarrow = true; + + if (isPositiveTest) { + doForEachSubtype(memberType, (memberSubtype) => { + memberSubtype = evaluator.makeTopLevelTypeVarsConcrete(memberSubtype); + + // Don't attempt to narrow if the member is a descriptor or property. + if (isProperty(memberSubtype) || isMaybeDescriptorInstance(memberSubtype)) { + canNarrow = false; + } + + if (isAnyOrUnknown(memberSubtype) || isNoneInstance(memberSubtype) || isNever(memberSubtype)) { + canNarrow = false; + } + }); + } else { + canNarrow = isNoneInstance(memberType); + } + + if (canNarrow) { + return undefined; + } + } + + return subtype; + }); +} + +// Attempts to narrow a type based on a "type(x) is y" or "type(x) is not y" check. +function narrowTypeForTypeIs(evaluator: TypeEvaluator, type: Type, classTypes: ClassType[], isPositiveTest: boolean) { + // We currently don't support narrowing in the negative direction + // when there are more than one class types. + if (!isPositiveTest && classTypes.length > 1) { + return type; + } + + const typesToCombine = classTypes.map((classType) => { + return evaluator.mapSubtypesExpandTypeVars( + type, + /* options */ undefined, + (subtype: Type, unexpandedSubtype: Type) => { + if (isClassInstance(subtype)) { + const matches = ClassType.isDerivedFrom(classType, ClassType.cloneAsInstantiable(subtype)); + if (isPositiveTest) { + if (matches) { + if (ClassType.isSameGenericClass(ClassType.cloneAsInstantiable(subtype), classType)) { + return addConditionToType(subtype, getTypeCondition(classType)); + } + + return addConditionToType(ClassType.cloneAsInstance(classType), subtype.props?.condition); + } + + if (!classType.priv.includeSubclasses) { + return undefined; + } + + if (!isTypeVar(unexpandedSubtype) || !TypeVarType.isSelf(unexpandedSubtype)) { + return addConditionToType(subtype, classType.props?.condition); + } + } + + if (!classType.priv.includeSubclasses) { + // If the class if marked final and it matches, then + // we can eliminate it in the negative case. + if (matches && ClassType.isFinal(subtype)) { + return undefined; + } + + // We can't eliminate the subtype in the negative + // case because it could be a subclass of the type, + // in which case `type(x) is y` would fail. + return subtype; + } + } + + if (isAnyOrUnknown(subtype)) { + return isPositiveTest + ? ClassType.cloneAsInstance(addConditionToType(classType, getTypeCondition(subtype))) + : subtype; + } + + return unexpandedSubtype; + } + ); + }); + + return combineTypes(typesToCombine); +} + +// Attempts to narrow a type based on a comparison with a class using "is" or +// "is not". This pattern is sometimes used for sentinels. +function narrowTypeForClassComparison( + evaluator: TypeEvaluator, + referenceType: Type, + classType: ClassType, + isPositiveTest: boolean +): Type { + return mapSubtypes(referenceType, (subtype) => { + let concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(subtype); + + if (isPositiveTest) { + if ( + isClassInstance(concreteSubtype) && + TypeBase.isInstance(subtype) && + ClassType.isBuiltIn(concreteSubtype, 'type') + ) { + concreteSubtype = + concreteSubtype.priv.typeArgs && concreteSubtype.priv.typeArgs.length > 0 + ? convertToInstantiable(concreteSubtype.priv.typeArgs[0]) + : UnknownType.create(); + } + + if (isAnyOrUnknown(concreteSubtype)) { + return addConditionToType(classType, getTypeCondition(concreteSubtype)); + } + + if (isClass(concreteSubtype)) { + if (TypeBase.isInstance(concreteSubtype)) { + return ClassType.isBuiltIn(concreteSubtype, 'object') ? classType : undefined; + } + + const isSuperType = isFilterSuperclass(subtype, concreteSubtype, classType, classType); + + if (!classType.priv.includeSubclasses) { + // Handle the case where the LHS and RHS operands are specific + // classes, as opposed to types that represent classes and their + // subclasses. + if (!concreteSubtype.priv.includeSubclasses) { + return ClassType.isSameGenericClass(concreteSubtype, classType) ? classType : undefined; + } + + if (isSuperType) { + return addConditionToType(classType, getTypeCondition(concreteSubtype)); + } + + const isSubType = ClassType.isDerivedFrom(classType, concreteSubtype); + if (isSubType) { + return addConditionToType(classType, getTypeCondition(concreteSubtype)); + } + + return undefined; + } + + if (ClassType.isFinal(concreteSubtype) && !isSuperType) { + return undefined; + } + } + } else { + if ( + isInstantiableClass(concreteSubtype) && + ClassType.isSameGenericClass(classType, concreteSubtype) && + ClassType.isFinal(classType) + ) { + return undefined; + } + } + + return subtype; + }); +} + +function isFilterSuperclass( + varType: Type, + concreteVarType: ClassType, + filterType: Type, + concreteFilterType: ClassType +) { + if (isTypeVar(filterType) || concreteFilterType.priv.literalValue !== undefined) { + return isTypeSame(convertToInstance(filterType), varType); + } + + // If the filter type represents all possible subclasses + // of a type, we can't make any statements about its superclass + // relationship with concreteVarType. + if (concreteFilterType.priv.includeSubclasses) { + return false; + } + + if (ClassType.isDerivedFrom(concreteVarType, concreteFilterType)) { + return true; + } + + // Handle the special case where the variable type is a TypedDict and + // we're filtering against 'dict'. TypedDict isn't derived from dict, + // but at runtime, isinstance returns True. + if (ClassType.isBuiltIn(concreteFilterType, 'dict') && ClassType.isTypedDictClass(concreteVarType)) { + return true; + } + + return false; +} + +// Attempts to narrow a type (make it more constrained) based on a comparison +// (equal or not equal) to a literal value. It also handles "is" or "is not" +// operators if isIsOperator is true. +function narrowTypeForLiteralComparison( + evaluator: TypeEvaluator, + referenceType: Type, + literalType: ClassType, + isPositiveTest: boolean, + isIsOperator: boolean +): Type { + return evaluator.mapSubtypesExpandTypeVars(referenceType, /* options */ undefined, (subtype) => { + subtype = evaluator.makeTopLevelTypeVarsConcrete(subtype); + + if (isAnyOrUnknown(subtype)) { + if (isPositiveTest) { + return literalType; + } + + return subtype; + } + + if (isClassInstance(subtype) && ClassType.isSameGenericClass(literalType, subtype)) { + if (subtype.priv.literalValue !== undefined) { + const literalValueMatches = ClassType.isLiteralValueSame(subtype, literalType); + if (isPositiveTest) { + return literalValueMatches ? subtype : undefined; + } + + const isSingleton = + ClassType.isEnumClass(literalType) || + isSentinelLiteral(subtype) || + ClassType.isBuiltIn(literalType, 'bool'); + + // For negative tests, we can eliminate the literal value if it doesn't match, + // but only for equality tests or for 'is' tests that involve enums, bools, or sentinels. + return literalValueMatches && (isSingleton || !isIsOperator) ? undefined : subtype; + } + + if (isPositiveTest) { + return literalType; + } + + // If we're able to enumerate all possible literal values + // (for bool or enum), we can eliminate all others in a negative test. + const allLiteralTypes = enumerateLiteralsForType(evaluator, subtype); + if (allLiteralTypes && allLiteralTypes.length > 0) { + return combineTypes(allLiteralTypes.filter((type) => !ClassType.isLiteralValueSame(type, literalType))); + } + + return subtype; + } + + if (isPositiveTest) { + if (isClassInstance(subtype) && ClassType.isBuiltIn(subtype, 'LiteralString')) { + return literalType; + } + + if (isIsOperator || isNoneInstance(subtype)) { + const isSubtype = evaluator.assignType(subtype, literalType); + return isSubtype ? literalType : undefined; + } + } + + return subtype; + }); +} + +export function enumerateLiteralsForType(evaluator: TypeEvaluator, type: ClassType): ClassType[] | undefined { + if (ClassType.isBuiltIn(type, 'bool')) { + // Booleans have only two types: True and False. + return [ + ClassType.cloneWithLiteral(type, /* value */ true), + ClassType.cloneWithLiteral(type, /* value */ false), + ]; + } + + if (ClassType.isEnumClass(type)) { + // Enum expansion doesn't apply to enum classes that derive + // from enum.Flag. + if (type.shared.baseClasses.some((baseClass) => isClass(baseClass) && ClassType.isBuiltIn(baseClass, 'Flag'))) { + return undefined; + } + + // Enumerate all of the values in this enumeration. + const enumList: ClassType[] = []; + const fields = ClassType.getSymbolTable(type); + fields.forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + let symbolType = evaluator.getEffectiveTypeOfSymbol(symbol); + symbolType = transformTypeForEnumMember(evaluator, type, name) ?? symbolType; + + if ( + isClassInstance(symbolType) && + ClassType.isSameGenericClass(type, symbolType) && + symbolType.priv.literalValue !== undefined + ) { + enumList.push(symbolType); + } + } + }); + + return enumList; + } + + return undefined; +} + +// Determines whether the expression name node is in the same scope or +// an outer scope from the reference name node. This allows isMatchingExpression +// to determine whether two name nodes are referring to the same symbol. +function isNameSameScope(evaluator: TypeEvaluator, reference: NameNode, expression: NameNode): boolean { + const refSymbol = evaluator.lookUpSymbolRecursive(reference, reference.d.value, /* honorCodeFlow */ false); + const exprSymbol = evaluator.lookUpSymbolRecursive(expression, expression.d.value, /* honorCodeFlow */ false); + + if (!refSymbol || !exprSymbol) { + // This shouldn't happen, but just to be safe... + return true; + } + + const refScope = refSymbol.scope; + const exprScope = exprSymbol.scope; + + if (refScope === exprScope) { + return true; + } + + return isScopeContainedWithin(refScope, exprScope); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typePrinter.ts b/python-parser/packages/pyright-internal/src/analyzer/typePrinter.ts new file mode 100644 index 00000000..42531758 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typePrinter.ts @@ -0,0 +1,1539 @@ +/* + * typePrinter.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Converts a type into a string representation. + */ + +import { appendArray } from '../common/collectionUtils'; +import { ConfigOptions } from '../common/configOptions'; +import { assert } from '../common/debug'; +import { ParamCategory } from '../parser/parseNodes'; +import { isTypedKwargs } from './parameterUtils'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { printBytesLiteral, printStringLiteral } from './typePrinterUtils'; +import { + ClassType, + EnumLiteral, + FunctionParam, + FunctionType, + isAnyOrUnknown, + isClass, + isClassInstance, + isInstantiableClass, + isNever, + isParamSpec, + isTypeSame, + isTypeVar, + isTypeVarTuple, + isUnknown, + isUnpacked, + maxTypeRecursionCount, + OverloadedType, + SentinelLiteral, + TupleTypeArg, + Type, + TypeBase, + TypeCategory, + TypeVarType, + UnionType, + Variance, +} from './types'; +import { + convertToInstance, + doForEachSubtype, + isNoneInstance, + isSentinelLiteral, + isTupleClass, + removeNoneFromUnion, +} from './typeUtils'; + +export const enum PrintTypeFlags { + None = 0, + + // Avoid printing "Unknown" and always use "Any" instead. + PrintUnknownWithAny = 1 << 0, + + // Omit type arguments for generic classes if they are "Unknown". + OmitTypeArgsIfUnknown = 1 << 1, + + // Omit printing type for param if type is not specified. + OmitUnannotatedParamType = 1 << 2, + + // Print Union and Optional in PEP 604 format. + PEP604 = 1 << 3, + + // Include a parentheses around a union if there's more than + // one subtype. + ParenthesizeUnion = 1 << 4, + + // Expand type aliases to display their individual parts? + ExpandTypeAlias = 1 << 5, + + // Omit "*" for types that are conditionally constrained when + // used with constrained TypeVars. + OmitConditionalConstraint = 1 << 6, + + // Include a parentheses around a callable. + ParenthesizeCallable = 1 << 7, + + // Limit output to legal Python syntax. + PythonSyntax = 1 << 8, + + // Use Unpack instead of "*" for unpacked tuples and TypeVarTuples. + // Requires Python 3.11 or newer. + UseTypingUnpack = 1 << 9, + + // Expand TypedDict kwargs to show the keys from the TypedDict instead of **kwargs. + ExpandTypedDictArgs = 1 << 10, + + // Print the variance of a type parameter. + PrintTypeVarVariance = 1 << 11, + + // Use the fully-qualified name of classes, type aliases, modules, + // and functions rather than short names. + UseFullyQualifiedNames = 1 << 12, + + // Omit TypeVar scopes. + OmitTypeVarScope = 1 << 13, +} + +export type FunctionReturnTypeCallback = (type: FunctionType) => Type; + +export function printType( + type: Type, + printTypeFlags: PrintTypeFlags, + returnTypeCallback: FunctionReturnTypeCallback +): string { + const uniqueNameMap = new UniqueNameMap(printTypeFlags, returnTypeCallback); + uniqueNameMap.build(type); + + return printTypeInternal(type, printTypeFlags, returnTypeCallback, uniqueNameMap, [], 0); +} + +export function printFunctionParts( + type: FunctionType, + printTypeFlags: PrintTypeFlags, + returnTypeCallback: FunctionReturnTypeCallback +): [string[], string] { + const uniqueNameMap = new UniqueNameMap(printTypeFlags, returnTypeCallback); + uniqueNameMap.build(type); + + return printFunctionPartsInternal(type, printTypeFlags, returnTypeCallback, uniqueNameMap, [], 0); +} + +export function printObjectTypeForClass( + type: ClassType, + printTypeFlags: PrintTypeFlags, + returnTypeCallback: FunctionReturnTypeCallback +): string { + const uniqueNameMap = new UniqueNameMap(printTypeFlags, returnTypeCallback); + uniqueNameMap.build(type); + + return printObjectTypeForClassInternal(type, printTypeFlags, returnTypeCallback, uniqueNameMap, [], 0); +} + +const maxLiteralStringLength = 50; + +export function isLiteralValueTruncated(type: ClassType): boolean { + if (typeof type.priv.literalValue === 'string') { + if (type.priv.literalValue.length > maxLiteralStringLength) { + return true; + } + } + + return false; +} + +export function printLiteralValueTruncated(type: ClassType): string { + if (type.shared.name === 'bytes') { + return 'bytes'; + } + + assert(type.shared.name === 'str'); + return 'LiteralString'; +} + +export function printLiteralValue(type: ClassType, quotation = "'"): string { + const literalValue = type.priv.literalValue; + if (literalValue === undefined) { + return ''; + } + + let literalStr: string; + if (typeof literalValue === 'string') { + let effectiveLiteralValue = literalValue; + + // Limit the length of the string literal. + if (literalValue.length > maxLiteralStringLength) { + effectiveLiteralValue = literalValue.substring(0, maxLiteralStringLength) + '…'; + } + + if (type.shared.name === 'bytes') { + literalStr = printBytesLiteral(effectiveLiteralValue); + } else { + literalStr = printStringLiteral(effectiveLiteralValue, quotation); + } + } else if (typeof literalValue === 'boolean') { + literalStr = literalValue ? 'True' : 'False'; + } else if (literalValue instanceof EnumLiteral) { + literalStr = `${literalValue.className}.${literalValue.itemName}`; + } else if (typeof literalValue === 'bigint') { + literalStr = literalValue.toString(); + if (literalStr.endsWith('n')) { + literalStr = literalStr.substring(0, literalStr.length - 1); + } + } else { + literalStr = literalValue.toString(); + } + + return literalStr; +} + +function printTypeInternal( + type: Type, + printTypeFlags: PrintTypeFlags, + returnTypeCallback: FunctionReturnTypeCallback, + uniqueNameMap: UniqueNameMap, + recursionTypes: Type[], + recursionCount: number +): string { + if (recursionCount > maxTypeRecursionCount) { + if (printTypeFlags & PrintTypeFlags.PythonSyntax) { + return 'Any'; + } + return ''; + } + recursionCount++; + + const originalPrintTypeFlags = printTypeFlags; + const parenthesizeUnion = (printTypeFlags & PrintTypeFlags.ParenthesizeUnion) !== 0; + printTypeFlags &= ~(PrintTypeFlags.ParenthesizeUnion | PrintTypeFlags.ParenthesizeCallable); + + // If this is a type alias, see if we should use its name rather than + // the type it represents. + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo) { + let expandTypeAlias = true; + if ((printTypeFlags & PrintTypeFlags.ExpandTypeAlias) === 0) { + expandTypeAlias = false; + } else { + if (recursionTypes.find((t) => t === type)) { + expandTypeAlias = false; + } + } + + if (!expandTypeAlias) { + try { + recursionTypes.push(type); + let aliasName = + (printTypeFlags & PrintTypeFlags.UseFullyQualifiedNames) !== 0 + ? aliasInfo.shared.fullName + : aliasInfo.shared.name; + + // Use the fully-qualified name if the name isn't unique. + if (!uniqueNameMap.isUnique(aliasName)) { + aliasName = aliasInfo.shared.fullName; + } + + const typeParams = aliasInfo.shared.typeParams; + + if (typeParams && typeParams.length > 0) { + let argumentStrings: string[] | undefined; + + // If there is a type arguments array, it's a specialized type alias. + if (aliasInfo.typeArgs) { + if ( + (printTypeFlags & PrintTypeFlags.OmitTypeArgsIfUnknown) === 0 || + aliasInfo.typeArgs.some((typeArg) => !isUnknown(typeArg)) + ) { + argumentStrings = []; + aliasInfo.typeArgs.forEach((typeArg, index) => { + // Which type parameter does this map to? + const typeParam = + index < typeParams.length ? typeParams[index] : typeParams[typeParams.length - 1]; + + // If this type argument maps to a TypeVarTuple, unpack it. + if ( + isTypeVarTuple(typeParam) && + isClassInstance(typeArg) && + isTupleClass(typeArg) && + typeArg.priv.tupleTypeArgs && + typeArg.priv.tupleTypeArgs.every((typeArg) => !typeArg.isUnbounded) + ) { + typeArg.priv.tupleTypeArgs.forEach((tupleTypeArg) => { + argumentStrings!.push( + printTypeInternal( + tupleTypeArg.type, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + ); + }); + } else { + argumentStrings!.push( + printTypeInternal( + typeArg, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + ); + } + }); + } + } else { + if ( + (printTypeFlags & PrintTypeFlags.OmitTypeArgsIfUnknown) === 0 || + typeParams.some((typeParam) => !isUnknown(typeParam)) + ) { + argumentStrings = []; + typeParams.forEach((typeParam) => { + argumentStrings!.push( + printTypeInternal( + typeParam, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + ); + }); + } + } + + if (argumentStrings) { + if (argumentStrings.length === 0) { + aliasName += `[()]`; + } else { + aliasName += `[${argumentStrings.join(', ')}]`; + } + } + } + + // If it's a TypeVar, don't use the alias name. Instead, use the full + // name, which may have a scope associated with it. + if (type.category !== TypeCategory.TypeVar) { + return aliasName; + } + } finally { + recursionTypes.pop(); + } + } + } + + if ( + recursionTypes.find( + (t) => + t === type || + (!!t.props?.typeAliasInfo && t.props.typeAliasInfo.shared.fullName === aliasInfo?.shared.fullName) + ) || + recursionTypes.length > maxTypeRecursionCount + ) { + // If this is a recursive TypeVar, we've already expanded it once, so + // just print its name at this point. + if (isTypeVar(type) && type.shared.isSynthesized && type.shared.recursiveAlias) { + return type.shared.recursiveAlias.name; + } + + if (aliasInfo) { + if (!aliasInfo.shared.typeParams) { + let name = + (printTypeFlags & PrintTypeFlags.UseFullyQualifiedNames) !== 0 + ? aliasInfo.shared.fullName + : aliasInfo.shared.name; + if (!uniqueNameMap.isUnique(name)) { + name = aliasInfo.shared.fullName; + } + return name; + } + + try { + recursionTypes.push(type); + + return printTypeInternal( + type, + printTypeFlags & ~PrintTypeFlags.ExpandTypeAlias, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + } finally { + recursionTypes.pop(); + } + } + + return '...'; + } + + try { + recursionTypes.push(type); + + const includeConditionalIndicator = + (printTypeFlags & (PrintTypeFlags.OmitConditionalConstraint | PrintTypeFlags.PythonSyntax)) === 0; + const getConditionalIndicator = (subtype: Type) => { + return !!subtype.props?.condition && includeConditionalIndicator ? '*' : ''; + }; + const printWrappedType = (type: Type, typeToWrap: string) => { + return `${_printNestedInstantiable(type, typeToWrap)}${getConditionalIndicator(type)}`; + }; + + switch (type.category) { + case TypeCategory.Unbound: { + if (printTypeFlags & PrintTypeFlags.PythonSyntax) { + return 'Any'; + } + return 'Unbound'; + } + + case TypeCategory.Unknown: { + if (printTypeFlags & (PrintTypeFlags.PythonSyntax | PrintTypeFlags.PrintUnknownWithAny)) { + return 'Any'; + } + return 'Unknown'; + } + + case TypeCategory.Module: { + if (printTypeFlags & PrintTypeFlags.PythonSyntax) { + return 'Any'; + } + return `Module("${type.priv.moduleName}")`; + } + + case TypeCategory.Class: { + if (TypeBase.isInstance(type)) { + if (type.priv.literalValue !== undefined) { + if (isLiteralValueTruncated(type) && (printTypeFlags & PrintTypeFlags.PythonSyntax) !== 0) { + return printLiteralValueTruncated(type); + } else if (type.priv.literalValue instanceof SentinelLiteral) { + return type.priv.literalValue.className; + } else { + return `Literal[${printLiteralValue(type)}]`; + } + } + + return `${printObjectTypeForClassInternal( + type, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + )}${getConditionalIndicator(type)}`; + } else { + let typeToWrap: string; + + if (type.priv.literalValue !== undefined) { + if (isLiteralValueTruncated(type) && (printTypeFlags & PrintTypeFlags.PythonSyntax) !== 0) { + typeToWrap = printLiteralValueTruncated(type); + } else if (type.priv.literalValue instanceof SentinelLiteral) { + return type.priv.literalValue.className; + } else { + typeToWrap = `Literal[${printLiteralValue(type)}]`; + } + + return printWrappedType(type, typeToWrap); + } + + if (type.props?.specialForm) { + const specialFormText = printTypeInternal( + type.props.specialForm, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + + return specialFormText; + } + + typeToWrap = printObjectTypeForClassInternal( + type, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + + return printWrappedType(type, typeToWrap); + } + } + + case TypeCategory.Function: { + if (TypeBase.isInstantiable(type)) { + const typeString = printFunctionType( + FunctionType.cloneAsInstance(type), + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + return `type[${typeString}]`; + } + + return printFunctionType( + type, + originalPrintTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + } + + case TypeCategory.Overloaded: { + const overloads = OverloadedType.getOverloads(type).map((overload) => + printTypeInternal( + overload, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + ); + + if ((printTypeFlags & PrintTypeFlags.PythonSyntax) !== 0) { + return 'Callable[..., Any]'; + } + + if (overloads.length === 1) { + return overloads[0]; + } + + return `Overload[${overloads.join(', ')}]`; + } + + case TypeCategory.Union: { + // If this is a value expression that evaluates to a union type but is + // not a type alias, simply print the special form ("UnionType"). + if (TypeBase.isInstantiable(type) && type.props?.specialForm && !type.props?.typeAliasInfo) { + const specialFormText = printTypeInternal( + type.props.specialForm, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + + return specialFormText; + } + + // If we're using "|" notation, enclose callable subtypes in parens. + const updatedPrintTypeFlags = + printTypeFlags & PrintTypeFlags.PEP604 + ? printTypeFlags | PrintTypeFlags.ParenthesizeCallable + : printTypeFlags; + + return printUnionType( + type, + updatedPrintTypeFlags, + parenthesizeUnion, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + } + + case TypeCategory.TypeVar: { + // If it's synthesized, don't expose the internal name we generated. + // This will confuse users. The exception is if it's a bound synthesized + // type, in which case we'll print the bound type. This is used for + // "self" and "cls" parameters. + if (type.shared.isSynthesized) { + // If it's a synthesized type var used to implement recursive type + // aliases, return the type alias name. + if (type.shared.recursiveAlias) { + if ((printTypeFlags & PrintTypeFlags.ExpandTypeAlias) !== 0 && type.shared.boundType) { + return printTypeInternal( + TypeBase.isInstance(type) + ? convertToInstance(type.shared.boundType) + : type.shared.boundType, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + } + return type.shared.recursiveAlias.name; + } + + // If it's a synthesized type var used to implement `self` or `cls` types, + // print the type with a special character that indicates that the type + // is internally represented as a TypeVar. + if (TypeVarType.isSelf(type) && type.shared.boundType) { + let boundTypeString = printTypeInternal( + type.shared.boundType, + printTypeFlags & ~PrintTypeFlags.ExpandTypeAlias, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + + if (!isAnyOrUnknown(type.shared.boundType)) { + if ( + (printTypeFlags & PrintTypeFlags.PythonSyntax) === 0 && + (printTypeFlags & PrintTypeFlags.OmitTypeVarScope) === 0 + ) { + boundTypeString = `Self@${boundTypeString}`; + } else { + boundTypeString = `Self`; + } + } + + if (TypeBase.isInstantiable(type)) { + return `${_printNestedInstantiable(type, boundTypeString)}`; + } + + return boundTypeString; + } + + return (printTypeFlags & (PrintTypeFlags.PrintUnknownWithAny | PrintTypeFlags.PythonSyntax)) !== 0 + ? 'Any' + : 'Unknown'; + } + + if (isParamSpec(type)) { + const paramSpecText = getReadableTypeVarName( + type, + (printTypeFlags & PrintTypeFlags.PythonSyntax) === 0 && + (printTypeFlags & PrintTypeFlags.OmitTypeVarScope) === 0 + ); + + if (type.priv.paramSpecAccess) { + return `${paramSpecText}.${type.priv.paramSpecAccess}`; + } + return paramSpecText; + } + + let typeVarName = getReadableTypeVarName( + type, + (printTypeFlags & PrintTypeFlags.PythonSyntax) === 0 && + (printTypeFlags & PrintTypeFlags.OmitTypeVarScope) === 0 + ); + + if (type.priv.isUnpacked) { + typeVarName = printUnpack(typeVarName, printTypeFlags); + } + + if (isTypeVarTuple(type) && type.priv.isInUnion) { + typeVarName = `Union[${typeVarName}]`; + } + + if (TypeBase.isInstantiable(type)) { + typeVarName = `${_printNestedInstantiable(type, typeVarName)}`; + } + + if (!isTypeVarTuple(type) && (printTypeFlags & PrintTypeFlags.PrintTypeVarVariance) !== 0) { + const varianceText = getTypeVarVarianceText(type); + if (varianceText) { + typeVarName = `${typeVarName} (${varianceText})`; + } + } + + return typeVarName; + } + + case TypeCategory.Never: { + return type.priv.isNoReturn ? 'NoReturn' : 'Never'; + } + + case TypeCategory.Any: { + const anyType = type; + return anyType.priv.isEllipsis ? '...' : 'Any'; + } + } + + return ''; + } finally { + recursionTypes.pop(); + } +} + +function printUnionType( + type: UnionType, + printTypeFlags: PrintTypeFlags, + parenthesizeUnion: boolean, + returnTypeCallback: FunctionReturnTypeCallback, + uniqueNameMap: UniqueNameMap, + recursionTypes: Type[], + recursionCount: number +) { + // Allocate a set that refers to subtypes in the union by + // their indices. If the index is within the set, it is already + // accounted for in the output. + const subtypeHandledSet = new Set(); + + // Allocate another set that represents the textual representations + // of the subtypes in the union. + const subtypeStrings = new Set(); + + // Start by matching possible type aliases to the subtypes. + if ((printTypeFlags & PrintTypeFlags.ExpandTypeAlias) === 0 && type.priv.typeAliasSources) { + for (const typeAliasSource of type.priv.typeAliasSources) { + let matchedAllSubtypes = true; + let allSubtypesPreviouslyHandled = true; + const indicesCoveredByTypeAlias = new Set(); + + for (const sourceSubtype of typeAliasSource.priv.subtypes) { + let unionSubtypeIndex = 0; + let foundMatch = false; + const sourceSubtypeInstance = convertToInstance(sourceSubtype); + + for (const unionSubtype of type.priv.subtypes) { + if (isTypeSame(sourceSubtypeInstance, unionSubtype)) { + if (!subtypeHandledSet.has(unionSubtypeIndex)) { + allSubtypesPreviouslyHandled = false; + } + indicesCoveredByTypeAlias.add(unionSubtypeIndex); + foundMatch = true; + break; + } + + unionSubtypeIndex++; + } + + if (!foundMatch) { + matchedAllSubtypes = false; + break; + } + } + + if (matchedAllSubtypes && !allSubtypesPreviouslyHandled) { + subtypeStrings.add( + printTypeInternal( + typeAliasSource, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + ); + indicesCoveredByTypeAlias.forEach((index) => subtypeHandledSet.add(index)); + } + } + } + + const noneIndex = type.priv.subtypes.findIndex((subtype) => isNoneInstance(subtype)); + if (noneIndex >= 0 && !subtypeHandledSet.has(noneIndex)) { + const typeWithoutNone = removeNoneFromUnion(type); + if (isNever(typeWithoutNone)) { + return 'None'; + } + + const optionalType = printTypeInternal( + typeWithoutNone, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + + if (printTypeFlags & PrintTypeFlags.PEP604) { + const unionString = optionalType + ' | None'; + if (parenthesizeUnion) { + return `(${unionString})`; + } + return unionString; + } + + return 'Optional[' + optionalType + ']'; + } + + const literalObjectStrings = new Set(); + const literalClassStrings = new Set(); + doForEachSubtype(type, (subtype, index) => { + if (!subtypeHandledSet.has(index)) { + if (isClassInstance(subtype) && subtype.priv.literalValue !== undefined && !isSentinelLiteral(subtype)) { + if (isLiteralValueTruncated(subtype) && (printTypeFlags & PrintTypeFlags.PythonSyntax) !== 0) { + subtypeStrings.add(printLiteralValueTruncated(subtype)); + } else { + literalObjectStrings.add(printLiteralValue(subtype)); + } + } else if ( + isInstantiableClass(subtype) && + subtype.priv.literalValue !== undefined && + !isSentinelLiteral(subtype) + ) { + if (isLiteralValueTruncated(subtype) && (printTypeFlags & PrintTypeFlags.PythonSyntax) !== 0) { + subtypeStrings.add(`type[${printLiteralValueTruncated(subtype)}]`); + } else { + literalClassStrings.add(printLiteralValue(subtype)); + } + } else { + subtypeStrings.add( + printTypeInternal( + subtype, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + ); + } + } + }); + + const dedupedSubtypeStrings: string[] = []; + subtypeStrings.forEach((s) => dedupedSubtypeStrings.push(s)); + + if (literalObjectStrings.size > 0) { + const literalStrings: string[] = []; + literalObjectStrings.forEach((s) => literalStrings.push(s)); + dedupedSubtypeStrings.push(`Literal[${literalStrings.join(', ')}]`); + } + + if (literalClassStrings.size > 0) { + const literalStrings: string[] = []; + literalClassStrings.forEach((s) => literalStrings.push(s)); + dedupedSubtypeStrings.push(`type[Literal[${literalStrings.join(', ')}]]`); + } + + if (dedupedSubtypeStrings.length === 1) { + return dedupedSubtypeStrings[0]; + } + + if (printTypeFlags & PrintTypeFlags.PEP604) { + const unionString = dedupedSubtypeStrings.join(' | '); + if (parenthesizeUnion) { + return `(${unionString})`; + } + return unionString; + } + + return `Union[${dedupedSubtypeStrings.join(', ')}]`; +} + +function printFunctionType( + type: FunctionType, + printTypeFlags: PrintTypeFlags, + returnTypeCallback: FunctionReturnTypeCallback, + uniqueNameMap: UniqueNameMap, + recursionTypes: Type[], + recursionCount: number +) { + if (printTypeFlags & PrintTypeFlags.PythonSyntax) { + const paramSpec = FunctionType.getParamSpecFromArgsKwargs(type); + const typeWithoutParamSpec = paramSpec ? FunctionType.cloneRemoveParamSpecArgsKwargs(type) : type; + + // Callable works only in cases where all parameters are positional-only. + let isPositionalParamsOnly = false; + if (typeWithoutParamSpec.shared.parameters.length === 0) { + isPositionalParamsOnly = true; + } else { + if (typeWithoutParamSpec.shared.parameters.every((param) => param.category === ParamCategory.Simple)) { + const lastParam = + typeWithoutParamSpec.shared.parameters[typeWithoutParamSpec.shared.parameters.length - 1]; + if (!lastParam.name) { + isPositionalParamsOnly = true; + } + } + } + + const returnType = returnTypeCallback(typeWithoutParamSpec); + let returnTypeString = 'Any'; + if (returnType) { + returnTypeString = printTypeInternal( + returnType, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + } + + if (isPositionalParamsOnly) { + const paramTypes: string[] = []; + + typeWithoutParamSpec.shared.parameters.forEach((param, index) => { + if (param.name) { + const paramType = FunctionType.getParamType(typeWithoutParamSpec, index); + if (recursionTypes.length < maxTypeRecursionCount) { + paramTypes.push( + printTypeInternal( + paramType, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + ); + } else { + paramTypes.push('Any'); + } + } + }); + + if (paramSpec) { + if (paramTypes.length > 0) { + return `Callable[Concatenate[${paramTypes.join(', ')}, ${ + paramSpec.shared.name + }], ${returnTypeString}]`; + } + + return `Callable[${paramSpec.shared.name}, ${returnTypeString}]`; + } + + return `Callable[[${paramTypes.join(', ')}], ${returnTypeString}]`; + } else { + // We can't represent this type using a Callable so default to + // a "catch all" Callable. + return `Callable[..., ${returnTypeString}]`; + } + } else { + const parts = printFunctionPartsInternal( + type, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + const paramSignature = `(${parts[0].join(', ')})`; + + if (FunctionType.isParamSpecValue(type)) { + if (parts[0].length === 1 && parts[0][0] === '...') { + return parts[0][0]; + } + + return paramSignature; + } + + const fullSignature = `${paramSignature} -> ${parts[1]}`; + const parenthesizeCallable = (printTypeFlags & PrintTypeFlags.ParenthesizeCallable) !== 0; + if (parenthesizeCallable) { + return `(${fullSignature})`; + } + + return fullSignature; + } +} + +function printObjectTypeForClassInternal( + type: ClassType, + printTypeFlags: PrintTypeFlags, + returnTypeCallback: FunctionReturnTypeCallback, + uniqueNameMap: UniqueNameMap, + recursionTypes: Type[], + recursionCount: number +): string { + let objName = type.priv.aliasName; + if (!objName) { + objName = + (printTypeFlags & PrintTypeFlags.UseFullyQualifiedNames) !== 0 ? type.shared.fullName : type.shared.name; + } + + // Special-case NoneType to convert it to None. + if (ClassType.isBuiltIn(type, 'NoneType')) { + objName = 'None'; + } + + // Use the fully-qualified name if the name isn't unique. + if (!uniqueNameMap.isUnique(objName)) { + objName = type.shared.fullName; + } + + // If this is a pseudo-generic class, don't display the type arguments + // or type parameters because it will confuse users. + if (!ClassType.isPseudoGenericClass(type)) { + const typeParams = ClassType.getTypeParams(type); + const lastTypeParam = typeParams.length > 0 ? typeParams[typeParams.length - 1] : undefined; + const isVariadic = lastTypeParam ? isTypeVarTuple(lastTypeParam) : false; + + // If there is a type arguments array, it's a specialized class. + const typeArgs: TupleTypeArg[] | undefined = + type.priv.tupleTypeArgs ?? + type.priv.typeArgs?.map((t) => { + return { type: t, isUnbounded: false }; + }); + if (typeArgs) { + // Handle Tuple[()] as a special case. + if (typeArgs.length > 0) { + const typeArgStrings: string[] = []; + let isAllUnknown = true; + + typeArgs.forEach((typeArg, index) => { + const typeParam = index < typeParams.length ? typeParams[index] : undefined; + if ( + typeParam && + isTypeVarTuple(typeParam) && + isClassInstance(typeArg.type) && + ClassType.isBuiltIn(typeArg.type, 'tuple') && + typeArg.type.priv.tupleTypeArgs + ) { + // Expand the tuple type that maps to the TypeVarTuple. + if (typeArg.type.priv.tupleTypeArgs.length === 0) { + if (!isUnknown(typeArg.type)) { + isAllUnknown = false; + } + + if (index === 0) { + typeArgStrings.push(printUnpack('tuple[()]', printTypeFlags)); + } + } else { + appendArray( + typeArgStrings, + typeArg.type.priv.tupleTypeArgs.map((typeArg) => { + if (!isUnknown(typeArg.type)) { + isAllUnknown = false; + } + + const typeArgText = printTypeInternal( + typeArg.type, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + + if (typeArg.isUnbounded) { + return printUnpack(`tuple[${typeArgText}, ...]`, printTypeFlags); + } + + return typeArgText; + }) + ); + } + } else { + if (!isUnknown(typeArg.type)) { + isAllUnknown = false; + } + + const typeArgTypeText = printTypeInternal( + typeArg.type, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + + if (typeArg.isUnbounded) { + if (typeArgs.length === 1) { + typeArgStrings.push(typeArgTypeText, '...'); + } else { + typeArgStrings.push(printUnpack(`tuple[${typeArgTypeText}, ...]`, printTypeFlags)); + } + } else { + typeArgStrings.push(typeArgTypeText); + } + } + }); + + if (type.priv.isUnpacked) { + objName = printUnpack(objName, printTypeFlags); + } + + if ((printTypeFlags & PrintTypeFlags.OmitTypeArgsIfUnknown) === 0 || !isAllUnknown) { + objName += '[' + typeArgStrings.join(', ') + ']'; + } + } else { + if (type.priv.isUnpacked) { + objName = printUnpack(objName, printTypeFlags); + } + + if (ClassType.isTupleClass(type) || isVariadic) { + objName += '[()]'; + } + } + } else { + if (type.priv.isUnpacked) { + objName = printUnpack(objName, printTypeFlags); + } + + if (typeParams.length > 0) { + if ( + (printTypeFlags & PrintTypeFlags.OmitTypeArgsIfUnknown) === 0 || + typeParams.some((typeParam) => !isUnknown(typeParam)) + ) { + objName += + '[' + + typeParams + .map((typeParam) => { + return printTypeInternal( + typeParam, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + }) + .join(', ') + + ']'; + } + } + } + } + + // Wrap in a "Partial" for TypedDict that has been synthesized as partial. + if (type.priv.isTypedDictPartial) { + if ((printTypeFlags & PrintTypeFlags.PythonSyntax) === 0) { + objName = `Partial[${objName}]`; + } + } + + return objName; +} + +function printFunctionPartsInternal( + type: FunctionType, + printTypeFlags: PrintTypeFlags, + returnTypeCallback: FunctionReturnTypeCallback, + uniqueNameMap: UniqueNameMap, + recursionTypes: Type[], + recursionCount: number +): [string[], string] { + const paramTypeStrings: string[] = []; + let sawDefinedName = false; + + // Remove the (*args: P.args, **kwargs: P.kwargs) from the end of the parameter list. + const paramSpec = FunctionType.getParamSpecFromArgsKwargs(type); + if (paramSpec) { + type = FunctionType.cloneRemoveParamSpecArgsKwargs(type); + } + + type.shared.parameters.forEach((param, index) => { + const paramType = FunctionType.getParamType(type, index); + const defaultType = FunctionType.getParamDefaultType(type, index); + + // Handle specialized TypeVarTuples specially. + if ( + index === type.shared.parameters.length - 1 && + param.category === ParamCategory.ArgsList && + isTypeVarTuple(paramType) + ) { + const specializedParamType = FunctionType.getParamType(type, index); + if ( + isClassInstance(specializedParamType) && + ClassType.isBuiltIn(specializedParamType, 'tuple') && + specializedParamType.priv.tupleTypeArgs + ) { + specializedParamType.priv.tupleTypeArgs.forEach((paramType) => { + const paramString = printTypeInternal( + paramType.type, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + paramTypeStrings.push(paramString); + }); + return; + } + } + + // Handle expanding TypedDict kwargs specially. + if ( + isTypedKwargs(param, paramType) && + printTypeFlags & PrintTypeFlags.ExpandTypedDictArgs && + paramType.category === TypeCategory.Class + ) { + paramType.shared.typedDictEntries!.knownItems.forEach((v, k) => { + const valueTypeString = printTypeInternal( + v.valueType, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + paramTypeStrings.push(`${k}: ${valueTypeString}`); + }); + + const extraItemsType = paramType.shared.typedDictEntries?.extraItems?.valueType; + if (extraItemsType && !isNever(extraItemsType)) { + const valueTypeString = printTypeInternal( + extraItemsType, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ); + paramTypeStrings.push(`**kwargs: ${valueTypeString}`); + } + + return; + } + + let paramString = ''; + if (param.category === ParamCategory.ArgsList) { + if (!param.name || !FunctionParam.isNameSynthesized(param)) { + paramString += '*'; + } + } else if (param.category === ParamCategory.KwargsDict) { + paramString += '**'; + } + + let emittedParamName = false; + if (param.name && !FunctionParam.isNameSynthesized(param)) { + paramString += param.name; + sawDefinedName = true; + emittedParamName = true; + } else if (printTypeFlags & PrintTypeFlags.PythonSyntax) { + paramString += `__p${index}`; + sawDefinedName = true; + emittedParamName = true; + } + + let defaultValueAssignment = '='; + let isParamSpecArgsKwargsParam = false; + + if (param.name) { + // Avoid printing type types if parameter have unknown type. + if (FunctionParam.isTypeDeclared(param) || FunctionParam.isTypeInferred(param)) { + const paramType = FunctionType.getParamType(type, index); + let paramTypeString = + recursionTypes.length < maxTypeRecursionCount + ? printTypeInternal( + paramType, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + : ''; + + if (emittedParamName) { + paramString += ': '; + } else if (param.category === ParamCategory.ArgsList && !isUnpacked(paramType)) { + paramString += '*'; + } + + if (param.category === ParamCategory.KwargsDict && isUnpacked(paramType)) { + if (printTypeFlags & PrintTypeFlags.PythonSyntax) { + // Use "Unpack" because ** isn't legal syntax prior to Python 3.12. + paramTypeString = `Unpack[${paramTypeString.substring(1)}]`; + } else { + // If this is an unpacked TypeDict for a **kwargs parameter, add another star. + paramTypeString = '*' + paramTypeString; + } + } + + paramString += paramTypeString; + + if (isParamSpec(paramType)) { + if (param.category === ParamCategory.ArgsList || param.category === ParamCategory.KwargsDict) { + isParamSpecArgsKwargsParam = true; + } + } + + // PEP8 indicates that the "=" for the default value should have surrounding + // spaces when used with a type annotation. + defaultValueAssignment = ' = '; + } else if ((printTypeFlags & PrintTypeFlags.OmitTypeArgsIfUnknown) === 0) { + if (!FunctionParam.isNameSynthesized(param)) { + paramString += ': '; + } + if (printTypeFlags & (PrintTypeFlags.PrintUnknownWithAny | PrintTypeFlags.PythonSyntax)) { + paramString += 'Any'; + } else { + paramString += 'Unknown'; + } + defaultValueAssignment = ' = '; + } + } else if (param.category === ParamCategory.Simple) { + if (sawDefinedName) { + paramString += '/'; + } else { + return; + } + } + + if (defaultType) { + if (param.defaultExpr) { + paramString += defaultValueAssignment + ParseTreeUtils.printExpression(param.defaultExpr); + } else { + // If the function doesn't originate from a function declaration (e.g. it is + // synthesized), we can't get to the default declaration, but we can still indicate + // that there is a default value provided. + paramString += defaultValueAssignment + '...'; + } + } + + // If this is a (...) signature, replace the *args, **kwargs with "...". + if (FunctionType.isGradualCallableForm(type) && !isParamSpecArgsKwargsParam) { + if (param.category === ParamCategory.ArgsList) { + paramString = '...'; + } else if (param.category === ParamCategory.KwargsDict) { + return; + } + } + + paramTypeStrings.push(paramString); + }); + + if (paramSpec) { + if (printTypeFlags & PrintTypeFlags.PythonSyntax) { + paramTypeStrings.push(`*args: ${paramSpec}.args`); + paramTypeStrings.push(`**kwargs: ${paramSpec}.kwargs`); + } else { + paramTypeStrings.push( + `**${printTypeInternal( + paramSpec, + printTypeFlags, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + )}` + ); + } + } + + const returnType = returnTypeCallback(type); + const returnTypeString = + recursionTypes.length < maxTypeRecursionCount + ? printTypeInternal( + returnType, + printTypeFlags | PrintTypeFlags.ParenthesizeUnion | PrintTypeFlags.ParenthesizeCallable, + returnTypeCallback, + uniqueNameMap, + recursionTypes, + recursionCount + ) + : ''; + + return [paramTypeStrings, returnTypeString]; +} + +function printUnpack(textToWrap: string, flags: PrintTypeFlags) { + return flags & PrintTypeFlags.UseTypingUnpack ? `Unpack[${textToWrap}]` : `*${textToWrap}`; +} + +// Surrounds a printed type with Type[...] as many times as needed +// for the nested instantiable count. +function _printNestedInstantiable(type: Type, textToWrap: string) { + const nestedTypes = (type.props?.instantiableDepth ?? 0) + 1; + + for (let nestLevel = 0; nestLevel < nestedTypes; nestLevel++) { + textToWrap = `type[${textToWrap}]`; + } + + return textToWrap; +} + +function getReadableTypeVarName(type: TypeVarType, includeScope: boolean) { + return TypeVarType.getReadableName(type, includeScope); +} + +function getTypeVarVarianceText(type: TypeVarType) { + const computedVariance = type.priv.computedVariance ?? type.shared.declaredVariance; + if (computedVariance === Variance.Invariant) { + return 'invariant'; + } + + if (computedVariance === Variance.Covariant) { + return 'covariant'; + } + + if (computedVariance === Variance.Contravariant) { + return 'contravariant'; + } + + return ''; +} + +// Represents a map of named types (classes and type aliases) that appear within +// a specified type to determine whether any of the names require disambiguation +// (i.e. their fully-qualified name is required). +class UniqueNameMap { + private _map = new Map(); + + constructor(private _printTypeFlags: PrintTypeFlags, private _returnTypeCallback: FunctionReturnTypeCallback) {} + + build(type: Type, recursionTypes: Type[] = [], recursionCount = 0) { + if (recursionCount > maxTypeRecursionCount) { + return; + } + recursionCount++; + + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo) { + let expandTypeAlias = true; + if ((this._printTypeFlags & PrintTypeFlags.ExpandTypeAlias) === 0) { + expandTypeAlias = false; + } else { + if (recursionTypes.find((t) => t === type)) { + expandTypeAlias = false; + } + } + + if (!expandTypeAlias) { + const typeAliasName = + (this._printTypeFlags & PrintTypeFlags.UseFullyQualifiedNames) !== 0 + ? aliasInfo.shared.fullName + : aliasInfo.shared.name; + this._addIfUnique(typeAliasName, type, /* useTypeAliasName */ true); + + // Recursively add the type arguments if present. + if (aliasInfo.typeArgs) { + recursionTypes.push(type); + + try { + aliasInfo.typeArgs.forEach((typeArg) => { + this.build(typeArg, recursionTypes, recursionCount); + }); + } finally { + recursionTypes.pop(); + } + } + + return; + } + } + + try { + recursionTypes.push(type); + + switch (type.category) { + case TypeCategory.Function: { + type.shared.parameters.forEach((_, index) => { + const paramType = FunctionType.getParamType(type, index); + this.build(paramType, recursionTypes, recursionCount); + }); + + const returnType = this._returnTypeCallback(type); + this.build(returnType, recursionTypes, recursionCount); + break; + } + + case TypeCategory.Overloaded: { + OverloadedType.getOverloads(type).forEach((overload) => { + this.build(overload, recursionTypes, recursionCount); + }); + break; + } + + case TypeCategory.Class: { + if (type.priv.literalValue !== undefined) { + break; + } + + let className = type.priv.aliasName; + if (!className) { + className = + (this._printTypeFlags & PrintTypeFlags.UseFullyQualifiedNames) !== 0 + ? type.shared.fullName + : type.shared.name; + } + + this._addIfUnique(className, type); + + if (!ClassType.isPseudoGenericClass(type)) { + if (type.priv.tupleTypeArgs) { + type.priv.tupleTypeArgs.forEach((typeArg) => { + this.build(typeArg.type, recursionTypes, recursionCount); + }); + } else if (type.priv.typeArgs) { + type.priv.typeArgs.forEach((typeArg) => { + this.build(typeArg, recursionTypes, recursionCount); + }); + } + } + break; + } + + case TypeCategory.Union: { + doForEachSubtype(type, (subtype) => { + this.build(subtype, recursionTypes, recursionCount); + }); + + type.priv.typeAliasSources?.forEach((typeAliasSource) => { + this.build(typeAliasSource, recursionTypes, recursionCount); + }); + break; + } + } + } finally { + recursionTypes.pop(); + } + } + + isUnique(name: string) { + const entry = this._map.get(name); + return !entry || entry.length === 1; + } + + private _addIfUnique(name: string, type: Type, useTypeAliasName = false) { + const existingEntry = this._map.get(name); + if (!existingEntry) { + this._map.set(name, [type]); + } else { + if (!existingEntry.some((t) => this._isSameTypeName(t, type, useTypeAliasName))) { + existingEntry.push(type); + } + } + } + + private _isSameTypeName(type1: Type, type2: Type, useTypeAliasName: boolean): boolean { + if (useTypeAliasName) { + return type1.props?.typeAliasInfo?.shared.fullName === type2.props?.typeAliasInfo?.shared.fullName; + } + + if (isClass(type1) && isClass(type2)) { + while (TypeBase.isInstantiable(type1)) { + type1 = ClassType.cloneAsInstance(type1); + } + + while (TypeBase.isInstantiable(type2)) { + type2 = ClassType.cloneAsInstance(type2); + } + + return ClassType.isSameGenericClass(type1, type2); + } + + return false; + } +} + +export function getPrintTypeFlags(configOptions: ConfigOptions): PrintTypeFlags { + let flags = PrintTypeFlags.None; + + if (configOptions.diagnosticRuleSet.printUnknownAsAny) { + flags |= PrintTypeFlags.PrintUnknownWithAny; + } + + if (configOptions.diagnosticRuleSet.omitConditionalConstraint) { + flags |= PrintTypeFlags.OmitConditionalConstraint; + } + + if (configOptions.diagnosticRuleSet.omitTypeArgsIfUnknown) { + flags |= PrintTypeFlags.OmitTypeArgsIfUnknown; + } + + if (configOptions.diagnosticRuleSet.omitUnannotatedParamType) { + flags |= PrintTypeFlags.OmitUnannotatedParamType; + } + + if (configOptions.diagnosticRuleSet.pep604Printing) { + flags |= PrintTypeFlags.PEP604; + } + + return flags; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typePrinterUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/typePrinterUtils.ts new file mode 100644 index 00000000..d4e4c490 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typePrinterUtils.ts @@ -0,0 +1,49 @@ +/* + * typePrinterUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Simple utility functions used by the type printer. + */ + +const singleTickRegEx = /'/g; +const escapedDoubleQuoteRegEx = /\\"/g; + +export function printStringLiteral(value: string, quotation = '"'): string { + // JSON.stringify will perform proper escaping for " case. + // So, we only need to do our own escaping for ' case. + let literalStr = JSON.stringify(value).toString(); + if (quotation !== '"') { + literalStr = `'${literalStr + .substring(1, literalStr.length - 1) + .replace(escapedDoubleQuoteRegEx, '"') + .replace(singleTickRegEx, "\\'")}'`; // CodeQL [SM02383] Code ql is just wrong here. We don't need to replace backslashes. + } + + return literalStr; +} + +export function printBytesLiteral(value: string) { + let bytesString = ''; + + // There's no good built-in conversion routine in javascript to convert + // bytes strings. Determine on a character-by-character basis whether + // it can be rendered into an ASCII character. If not, use an escape. + for (let i = 0; i < value.length; i++) { + const char = value.substring(i, i + 1); + const charCode = char.charCodeAt(0); + + if (charCode >= 20 && charCode <= 126) { + if (charCode === 34) { + bytesString += '\\' + char; + } else { + bytesString += char; + } + } else { + bytesString += `\\x${((charCode >> 4) & 0xf).toString(16)}${(charCode & 0xf).toString(16)}`; + } + } + + return `b"${bytesString}"`; +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeStubWriter.ts b/python-parser/packages/pyright-internal/src/analyzer/typeStubWriter.ts new file mode 100644 index 00000000..8387eb01 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeStubWriter.ts @@ -0,0 +1,808 @@ +/* + * typeStubWriter.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic to emit a type stub file for a corresponding parsed + * and analyzed python source file. + */ + +import { Uri } from '../common/uri/uri'; +import { + ArgCategory, + AssignmentNode, + AugmentedAssignmentNode, + ClassNode, + DecoratorNode, + ExpressionNode, + ForNode, + FunctionNode, + IfNode, + ImportFromNode, + ImportNode, + MemberAccessNode, + ModuleNameNode, + NameNode, + ParamCategory, + ParameterNode, + ParseNode, + ParseNodeType, + StatementListNode, + StringNode, + TryNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParamKind, + TypeParameterListNode, + TypeParameterNode, + WhileNode, + WithNode, +} from '../parser/parseNodes'; +import { OperatorType } from '../parser/tokenizerTypes'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { ParseTreeWalker } from './parseTreeWalker'; +import { getScopeForNode } from './scopeUtils'; +import { SourceFile } from './sourceFile'; +import { Symbol } from './symbol'; +import * as SymbolNameUtils from './symbolNameUtils'; +import { TypeEvaluator } from './typeEvaluatorTypes'; +import { + ClassType, + isClassInstance, + isFunction, + isInstantiableClass, + isNever, + isUnknown, + removeUnknownFromUnion, +} from './types'; + +class TrackedImport { + isAccessed = false; + + constructor(public importName: string) {} +} + +class TrackedImportAs extends TrackedImport { + constructor(importName: string, public alias: string | undefined, public symbol: Symbol) { + super(importName); + } +} + +interface TrackedImportSymbol { + symbol?: Symbol | undefined; + name: string; + alias?: string | undefined; + isAccessed: boolean; +} + +class TrackedImportFrom extends TrackedImport { + symbols: TrackedImportSymbol[] = []; + + constructor(importName: string, public isWildcardImport: boolean, public node?: ImportFromNode) { + super(importName); + } + + addSymbol(symbol: Symbol | undefined, name: string, alias: string | undefined, isAccessed = false) { + if (!this.symbols.find((s) => s.name === name)) { + this.symbols.push({ + symbol, + name, + alias, + isAccessed, + }); + } + } +} + +class ImportSymbolWalker extends ParseTreeWalker { + constructor(private _accessedImportedSymbols: Set, private _treatStringsAsSymbols: boolean) { + super(); + } + + analyze(node: ExpressionNode) { + this.walk(node); + } + + override walk(node: ParseNode) { + if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { + super.walk(node); + } + } + + override visitName(node: NameNode) { + this._accessedImportedSymbols.add(node.d.value); + return true; + } + + override visitMemberAccess(node: MemberAccessNode): boolean { + const baseExpression = this._getRecursiveModuleAccessExpression(node.d.leftExpr); + + if (baseExpression) { + this._accessedImportedSymbols.add(`${baseExpression}.${node.d.member.d.value}`); + } + + return true; + } + + override visitString(node: StringNode) { + if (this._treatStringsAsSymbols) { + this._accessedImportedSymbols.add(node.d.value); + } + + return true; + } + + private _getRecursiveModuleAccessExpression(node: ExpressionNode): string | undefined { + if (node.nodeType === ParseNodeType.Name) { + return node.d.value; + } + + if (node.nodeType === ParseNodeType.MemberAccess) { + const baseExpression = this._getRecursiveModuleAccessExpression(node.d.leftExpr); + if (!baseExpression) { + return undefined; + } + + return `${baseExpression}.${node.d.member.d.value}`; + } + + return undefined; + } +} + +export class TypeStubWriter extends ParseTreeWalker { + private _indentAmount = 0; + private _includeAllImports = false; + private _typeStubText = ''; + private _lineEnd = '\n'; + private _tab = ' '; + private _classNestCount = 0; + private _functionNestCount = 0; + private _ifNestCount = 0; + private _emittedSuite = false; + private _emitDocString = true; + private _trackedImportAs = new Map(); + private _trackedImportFrom = new Map(); + private _accessedImportedSymbols = new Set(); + + constructor(private _stubPath: Uri, private _sourceFile: SourceFile, private _evaluator: TypeEvaluator) { + super(); + + // As a heuristic, we'll include all of the import statements + // in "__init__.pyi" files even if they're not locally referenced + // because these are often used as ways to re-export symbols. + if (this._stubPath.fileName === '__init__.pyi') { + this._includeAllImports = true; + } + } + + write() { + const parseResults = this._sourceFile.getParseResults()!; + this._lineEnd = parseResults.tokenizerOutput.predominantEndOfLineSequence; + this._tab = parseResults.tokenizerOutput.predominantTabSequence; + + this.walk(parseResults.parserOutput.parseTree); + + this._writeFile(); + } + + override walk(node: ParseNode) { + if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { + super.walk(node); + } + } + + override visitClass(node: ClassNode) { + const className = node.d.name.d.value; + + this._emittedSuite = true; + this._emitDocString = true; + this._emitDecorators(node.d.decorators); + let line = `class ${className}`; + + if (node.d.typeParams) { + line += this._printTypeParams(node.d.typeParams); + } + + // Remove "object" from the list, since it's implied + const args = node.d.arguments.filter( + (arg) => + arg.d.name !== undefined || + arg.d.argCategory !== ArgCategory.Simple || + arg.d.valueExpr.nodeType !== ParseNodeType.Name || + arg.d.valueExpr.d.value !== 'object' + ); + + if (args.length > 0) { + line += `(${args + .map((arg) => { + let argString = ''; + if (arg.d.name) { + argString = arg.d.name.d.value + '='; + } + argString += this._printExpression(arg.d.valueExpr); + return argString; + }) + .join(', ')})`; + } + line += ':'; + this._emitLine(line); + + this._emitSuite(() => { + this._classNestCount++; + this.walk(node.d.suite); + this._classNestCount--; + }); + + this._emitLine(''); + this._emitLine(''); + + return false; + } + + override visitFunction(node: FunctionNode) { + const functionName = node.d.name.d.value; + + // Skip if we're already within a function or if the name is private/protected. + if (this._functionNestCount === 0 && !SymbolNameUtils.isPrivateOrProtectedName(functionName)) { + this._emittedSuite = true; + this._emitDocString = true; + this._emitDecorators(node.d.decorators); + let line = node.d.isAsync ? 'async ' : ''; + line += `def ${functionName}`; + + if (node.d.typeParams) { + line += this._printTypeParams(node.d.typeParams); + } + + line += `(${node.d.params.map((param, index) => this._printParam(param, node, index)).join(', ')})`; + + let returnAnnotation: string | undefined; + if (node.d.returnAnnotation) { + returnAnnotation = this._printExpression(node.d.returnAnnotation, /* treatStringsAsSymbols */ true); + } else if (node.d.funcAnnotationComment) { + returnAnnotation = this._printExpression( + node.d.funcAnnotationComment.d.returnAnnotation, + /* treatStringsAsSymbols */ true + ); + } else { + // Handle a few common cases where we always know the answer. + if (node.d.name.d.value === '__init__') { + returnAnnotation = 'None'; + } else if (node.d.name.d.value === '__str__') { + returnAnnotation = 'str'; + } else if (['__int__', '__hash__'].some((name) => name === node.d.name.d.value)) { + returnAnnotation = 'int'; + } else if ( + ['__eq__', '__ne__', '__gt__', '__lt__', '__ge__', '__le__'].some( + (name) => name === node.d.name.d.value + ) + ) { + returnAnnotation = 'bool'; + } + } + + if (returnAnnotation) { + line += ' -> ' + returnAnnotation; + } + + line += ':'; + + // If there was not return type annotation, see if we can infer + // a type that is not unknown and add it as a comment. + if (!returnAnnotation) { + const functionType = this._evaluator.getTypeOfFunction(node); + if (functionType && isFunction(functionType.functionType)) { + let returnType = this._evaluator.getInferredReturnType(functionType.functionType); + returnType = removeUnknownFromUnion(returnType); + if (!isNever(returnType) && !isUnknown(returnType)) { + line += ` # -> ${this._evaluator.printType(returnType, { enforcePythonSyntax: true })}:`; + } + } + } + + this._emitLine(line); + + this._emitSuite(() => { + // Don't emit any nested functions. + this._functionNestCount++; + this.walk(node.d.suite); + this._functionNestCount--; + }); + + this._emitLine(''); + } + + return false; + } + + override visitWhile(node: WhileNode) { + // Don't emit a doc string after the first statement. + this._emitDocString = false; + return false; + } + + override visitFor(node: ForNode) { + // Don't emit a doc string after the first statement. + this._emitDocString = false; + return false; + } + + override visitTry(node: TryNode) { + // Don't emit a doc string after the first statement. + this._emitDocString = false; + + // Only walk a single branch of the try/catch to for imports. + this.walk(node.d.trySuite); + return false; + } + + override visitWith(node: WithNode) { + // Don't emit a doc string after the first statement. + this._emitDocString = false; + return false; + } + + override visitIf(node: IfNode) { + // Don't emit a doc string after the first statement. + this._emitDocString = false; + + // Include if statements if they are located + // at the global scope. + if (this._functionNestCount === 0 && this._ifNestCount === 0) { + this._ifNestCount++; + this._emittedSuite = true; + this._emitLine('if ' + this._printExpression(node.d.testExpr) + ':'); + this._emitSuite(() => { + this.walkMultiple(node.d.ifSuite.d.statements); + }); + + const elseSuite = node.d.elseSuite; + if (elseSuite) { + this._emitLine('else:'); + this._emitSuite(() => { + if (elseSuite.nodeType === ParseNodeType.If) { + this.walkMultiple([elseSuite.d.testExpr, elseSuite.d.ifSuite, elseSuite.d.elseSuite]); + } else { + this.walkMultiple(elseSuite.d.statements); + } + }); + } + this._ifNestCount--; + } + + return false; + } + + override visitTypeAlias(node: TypeAliasNode): boolean { + let line = ''; + line = this._printExpression(node.d.name); + + if (node.d.typeParams) { + line += this._printTypeParams(node.d.typeParams); + } + + line += ' = '; + line += this._printExpression(node.d.expr); + this._emitLine(line); + + return false; + } + + override visitAssignment(node: AssignmentNode) { + let isTypeAlias = false; + let line = ''; + + if (node.d.leftExpr.nodeType === ParseNodeType.Name) { + // Handle "__all__" as a special case. + if (node.d.leftExpr.d.value === '__all__') { + if (this._functionNestCount === 0 && this._ifNestCount === 0) { + this._emittedSuite = true; + + line = this._printExpression(node.d.leftExpr); + line += ' = '; + line += this._printExpression(node.d.rightExpr); + this._emitLine(line); + } + + return false; + } + + if (this._functionNestCount === 0) { + line = this._printExpression(node.d.leftExpr); + if (node.d.annotationComment) { + line += ': ' + this._printExpression(node.d.annotationComment, /* treatStringsAsSymbols */ true); + } + + const valueType = this._evaluator.getType(node.d.leftExpr); + if (valueType?.props?.typeAliasInfo) { + isTypeAlias = true; + } else if (node.d.rightExpr.nodeType === ParseNodeType.Call) { + // Special-case TypeVar, TypeVarTuple, ParamSpec and NewType calls. Treat + // them like type aliases. + const callBaseType = this._evaluator.getType(node.d.rightExpr.d.leftExpr); + if ( + callBaseType && + isInstantiableClass(callBaseType) && + ClassType.isBuiltIn(callBaseType, ['TypeVar', 'TypeVarTuple', 'ParamSpec', 'NewType']) + ) { + isTypeAlias = true; + } + } + } + } else if (node.d.leftExpr.nodeType === ParseNodeType.TypeAnnotation) { + const valueExpr = node.d.leftExpr.d.valueExpr; + + const declaredType = this._evaluator.getTypeOfAnnotation(node.d.leftExpr.d.annotation, { + varTypeAnnotation: true, + allowClassVar: true, + }); + + // Is this an explicit TypeAlias declaration? + if (isClassInstance(declaredType) && ClassType.isBuiltIn(declaredType, 'TypeAlias')) { + isTypeAlias = true; + } + + if (valueExpr.nodeType === ParseNodeType.Name) { + if (this._functionNestCount === 0) { + line = `${this._printExpression(valueExpr)}: ${this._printExpression( + node.d.leftExpr.d.annotation, + /* treatStringsAsSymbols */ true + )}`; + } + } + } + + if (line) { + this._emittedSuite = true; + + line += ' = '; + + if (isTypeAlias) { + line += this._printExpression(node.d.rightExpr); + } else { + line += '...'; + } + this._emitLine(line); + } + + return false; + } + + override visitAugmentedAssignment(node: AugmentedAssignmentNode) { + if (node.d.leftExpr.nodeType === ParseNodeType.Name) { + // Handle "__all__ +=" as a special case. + if (node.d.leftExpr.d.value === '__all__' && node.d.operator === OperatorType.AddEqual) { + if (this._functionNestCount === 0 && this._ifNestCount === 0) { + let line = this._printExpression(node.d.leftExpr); + line += ' += '; + line += this._printExpression(node.d.rightExpr); + this._emitLine(line); + } + } + } + + return false; + } + + override visitTypeAnnotation(node: TypeAnnotationNode) { + if (this._functionNestCount === 0) { + let line = ''; + if (node.d.valueExpr.nodeType === ParseNodeType.Name) { + line = this._printExpression(node.d.valueExpr); + } else if (node.d.valueExpr.nodeType === ParseNodeType.MemberAccess) { + const baseExpression = node.d.valueExpr.d.leftExpr; + if (baseExpression.nodeType === ParseNodeType.Name) { + if (baseExpression.d.value === 'self') { + const memberName = node.d.valueExpr.d.member.d.value; + if (!SymbolNameUtils.isPrivateOrProtectedName(memberName)) { + line = this._printExpression(node.d.valueExpr); + } + } + } + } + + if (line) { + line += ': ' + this._printExpression(node.d.annotation, /* treatStringsAsSymbols */ true); + this._emitLine(line); + } + } + + return false; + } + + override visitImport(node: ImportNode) { + if (this._functionNestCount > 0 || this._classNestCount > 0) { + return false; + } + + const currentScope = getScopeForNode(node); + if (currentScope) { + // Record the input for later. + node.d.list.forEach((imp) => { + const moduleName = this._printModuleName(imp.d.module); + if (!this._trackedImportAs.has(moduleName)) { + const symbolName = imp.d.alias + ? imp.d.alias.d.value + : imp.d.module.d.nameParts.length > 0 + ? imp.d.module.d.nameParts[0].d.value + : ''; + const symbolInfo = currentScope.lookUpSymbolRecursive(symbolName); + if (symbolInfo) { + const trackedImportAs = new TrackedImportAs( + moduleName, + imp.d.alias ? imp.d.alias.d.value : undefined, + symbolInfo.symbol + ); + this._trackedImportAs.set(moduleName, trackedImportAs); + } + } + }); + } + + return false; + } + + override visitImportFrom(node: ImportFromNode) { + if (this._functionNestCount > 0 || this._classNestCount > 0) { + return false; + } + + const currentScope = getScopeForNode(node); + if (currentScope) { + // Record the input for later. + const moduleName = this._printModuleName(node.d.module); + let trackedImportFrom = this._trackedImportFrom.get(moduleName); + if (!trackedImportFrom) { + trackedImportFrom = new TrackedImportFrom(moduleName, node.d.isWildcardImport, node); + this._trackedImportFrom.set(moduleName, trackedImportFrom); + } + + node.d.imports.forEach((imp) => { + const symbolName = imp.d.alias ? imp.d.alias.d.value : imp.d.name.d.value; + const symbolInfo = currentScope.lookUpSymbolRecursive(symbolName); + if (symbolInfo) { + trackedImportFrom!.addSymbol( + symbolInfo.symbol, + imp.d.name.d.value, + imp.d.alias ? imp.d.alias.d.value : undefined, + false + ); + } + }); + } + + return false; + } + + override visitStatementList(node: StatementListNode) { + if (node.d.statements.length > 0 && node.d.statements[0].nodeType === ParseNodeType.StringList) { + // Is this the first statement in a suite? If it's a string + // literal, assume it's a doc string and emit it. + if (!this._emittedSuite && this._emitDocString) { + this._emitLine(this._printExpression(node.d.statements[0])); + } + } + + // Don't emit a doc string after the first statement. + this._emitDocString = false; + + this.walkMultiple(node.d.statements); + return false; + } + + private _emitSuite(callback: () => void) { + this._increaseIndent(() => { + const prevEmittedSuite = this._emittedSuite; + this._emittedSuite = false; + + callback(); + + if (!this._emittedSuite) { + this._emitLine('...'); + } + + this._emittedSuite = prevEmittedSuite; + }); + } + + private _increaseIndent(callback: () => void) { + this._indentAmount++; + callback(); + this._indentAmount--; + } + + private _emitDecorators(decorators: DecoratorNode[]) { + decorators.forEach((decorator) => { + this._emitLine('@' + this._printExpression(decorator.d.expr)); + }); + } + + private _printHeaderDocString() { + return ( + '"""' + + this._lineEnd + + 'This type stub file was generated by pyright.' + + this._lineEnd + + '"""' + + this._lineEnd + + this._lineEnd + ); + } + + private _emitLine(line: string) { + for (let i = 0; i < this._indentAmount; i++) { + this._typeStubText += this._tab; + } + + this._typeStubText += line + this._lineEnd; + } + + private _printTypeParams(node: TypeParameterListNode): string { + return `[${node.d.params.map((typeParam) => this._printTypeParam(typeParam)).join(',')}]`; + } + + private _printTypeParam(node: TypeParameterNode): string { + let line = ''; + + if (node.d.typeParamKind === TypeParamKind.TypeVarTuple) { + line += '*'; + } else if (node.d.typeParamKind === TypeParamKind.ParamSpec) { + line += '**'; + } + + line += node.d.name.d.value; + + if (node.d.boundExpr) { + line += ': '; + line += this._printExpression(node.d.boundExpr); + } + + if (node.d.defaultExpr) { + line += ' = '; + line += this._printExpression(node.d.defaultExpr); + } + + return line; + } + + private _printModuleName(node: ModuleNameNode): string { + let line = ''; + for (let i = 0; i < node.d.leadingDots; i++) { + line += '.'; + } + line += node.d.nameParts.map((part) => part.d.value).join('.'); + return line; + } + + private _printParam(paramNode: ParameterNode, functionNode: FunctionNode, paramIndex: number): string { + let line = ''; + if (paramNode.d.category === ParamCategory.ArgsList) { + line += '*'; + } else if (paramNode.d.category === ParamCategory.KwargsDict) { + line += '**'; + } + + if (paramNode.d.name) { + line += paramNode.d.name.d.value; + } else if (paramNode.d.category === ParamCategory.Simple) { + line += '/'; + } + + const paramTypeAnnotation = ParseTreeUtils.getTypeAnnotationForParam(functionNode, paramIndex); + let paramType = ''; + if (paramTypeAnnotation) { + paramType = this._printExpression(paramTypeAnnotation, /* treatStringsAsSymbols */ true); + } + + if (paramType) { + line += ': ' + paramType; + } + + if (paramNode.d.defaultValue) { + // Follow PEP8 spacing rules. Include spaces if type + // annotation is present, no space otherwise. + if (paramType) { + line += ' = ...'; + } else { + line += '=...'; + } + } + + return line; + } + + private _printExpression(node: ExpressionNode, isType = false, treatStringsAsSymbols = false): string { + const importSymbolWalker = new ImportSymbolWalker(this._accessedImportedSymbols, treatStringsAsSymbols); + importSymbolWalker.analyze(node); + + let expressionFlags = isType + ? ParseTreeUtils.PrintExpressionFlags.ForwardDeclarations + : ParseTreeUtils.PrintExpressionFlags.None; + expressionFlags |= ParseTreeUtils.PrintExpressionFlags.DoNotLimitStringLength; + + return ParseTreeUtils.printExpression(node, expressionFlags); + } + + private _printTrackedImports() { + let importStr = ''; + let lineEmitted = false; + + // Emit the "import" statements. + this._trackedImportAs.forEach((imp) => { + if (this._accessedImportedSymbols.has(imp.alias || imp.importName)) { + imp.isAccessed = true; + } + + if (imp.isAccessed || this._includeAllImports) { + importStr += `import ${imp.importName}`; + if (imp.alias) { + importStr += ` as ${imp.alias}`; + } + importStr += this._lineEnd; + lineEmitted = true; + } + }); + + // Emit the "import from" statements. + this._trackedImportFrom.forEach((imp) => { + imp.symbols.forEach((s) => { + if (this._accessedImportedSymbols.has(s.alias || s.name)) { + s.isAccessed = true; + } + }); + + if (imp.isWildcardImport) { + importStr += `from ${imp.importName} import *` + this._lineEnd; + lineEmitted = true; + } + + const sortedSymbols = imp.symbols + .filter((s) => s.isAccessed || this._includeAllImports) + .sort((a, b) => { + if (a.name < b.name) { + return -1; + } else if (a.name > b.name) { + return 1; + } + return 0; + }); + + // Don't emit a "from __future__" import. Just ignore these + // because they have no meaning in stubs, and they must appear + // at the top of a file. + if (sortedSymbols.length > 0 && imp.importName !== '__future__') { + importStr += `from ${imp.importName} import `; + + importStr += sortedSymbols + .map((symbol) => { + let symStr = symbol.name; + if (symbol.alias) { + symStr += ' as ' + symbol.alias; + } + return symStr; + }) + .join(', '); + + importStr += this._lineEnd; + lineEmitted = true; + } + }); + + if (lineEmitted) { + importStr += this._lineEnd; + } + + return importStr; + } + + private _writeFile() { + let finalText = this._printHeaderDocString(); + finalText += this._printTrackedImports(); + finalText += this._typeStubText; + + this._sourceFile.fileSystem.writeFileSync(this._stubPath, finalText, 'utf8'); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeUtils.ts b/python-parser/packages/pyright-internal/src/analyzer/typeUtils.ts new file mode 100644 index 00000000..0d50ad5d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeUtils.ts @@ -0,0 +1,4485 @@ +/* + * typeUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Functions that operate on Type objects. + */ + +import { appendArray } from '../common/collectionUtils'; +import { assert } from '../common/debug'; +import { ParamCategory } from '../parser/parseNodes'; +import { ConstraintSolution, ConstraintSolutionSet } from './constraintSolution'; +import { DeclarationType } from './declaration'; +import { Symbol, SymbolFlags, SymbolTable } from './symbol'; +import { isEffectivelyClassVar, isTypedDictMemberAccessedThroughIndex } from './symbolUtils'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + findSubtype, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + isAny, + isAnyOrUnknown, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isKeywordOnlySeparator, + isNever, + isOverloaded, + isParamSpec, + isPositionOnlySeparator, + isTypeSame, + isTypeVar, + isTypeVarTuple, + isUnbound, + isUnion, + isUnknown, + isUnpackedClass, + isUnpackedTypeVar, + isUnpackedTypeVarTuple, + maxTypeRecursionCount, + ModuleType, + NeverType, + OverloadedType, + ParamSpecAccess, + ParamSpecType, + PropertyMethodInfo, + removeFromUnion, + SentinelLiteral, + SignatureWithOffsets, + SpecializedFunctionTypes, + TupleTypeArg, + Type, + TypeBase, + TypeCategory, + TypeCondition, + TypeFlags, + TypeSameOptions, + TypeVarScopeId, + TypeVarScopeType, + TypeVarTupleType, + TypeVarType, + UnboundType, + UnionType, + UnknownType, + Variance, +} from './types'; +import { TypeWalker } from './typeWalker'; + +export interface ClassMember { + // Symbol + symbol: Symbol; + + // Partially-specialized class that contains the class member + classType: ClassType | UnknownType | AnyType; + + // Unspecialized class that contains the class member + unspecializedClassType: ClassType | UnknownType | AnyType; + + // True if it is an instance or class member; it can be both a class and + // an instance member in cases where a class variable is overridden + // by an instance variable + isInstanceMember: boolean; + isClassMember: boolean; + + // Is the member in __slots__? + isSlotsMember: boolean; + + // True if explicitly declared as "ClassVar" and therefore is + // a type violation if it is overwritten by an instance variable + isClassVar: boolean; + + // True if the member is read-only, such as with named tuples + // or frozen dataclasses. + isReadOnly: boolean; + + // True if member has declared type, false if inferred + isTypeDeclared: boolean; + + // True if member lookup skipped an undeclared (inferred) type + // in a subclass before finding a declared type in a base class + skippedUndeclaredType: boolean; +} + +export const enum MemberAccessFlags { + Default = 0, + + // By default, the original (derived) class is searched along + // with its base classes. If this flag is set, the original + // class is skipped and only the base classes are searched. + SkipOriginalClass = 1 << 0, + + // By default, base classes are searched as well as the + // original (derived) class. If this flag is set, no recursion + // is performed. + SkipBaseClasses = 1 << 1, + + // Skip the 'object' base class in particular. + SkipObjectBaseClass = 1 << 2, + + // Skip the 'type' base class in particular. + SkipTypeBaseClass = 1 << 3, + + // By default, both class and instance variables are searched. + // If this flag is set, the instance variables are skipped. + SkipInstanceMembers = 1 << 4, + + // By default, both class and instance variables are searched. + // If this flag is set, the class variables are skipped. + SkipClassMembers = 1 << 5, + + // By default, the first symbol is returned even if it has only + // an inferred type associated with it. If this flag is set, + // the search looks only for symbols with declared types. + DeclaredTypesOnly = 1 << 6, + + // Consider writes to symbols flagged as ClassVars as an error. + DisallowClassVarWrites = 1 << 7, + + // Normally __new__ is treated as a static method, but when + // it is invoked implicitly through a constructor call, it + // acts like a class method instead. + TreatConstructorAsClassMethod = 1 << 8, + + // If an attribute cannot be found when looking for instance + // members, normally an attribute access override method + // (__getattr__, etc.) may provide the missing attribute type. + // This disables this check. + SkipAttributeAccessOverride = 1 << 9, + + // Report an error if a symbol is an instance variable whose + // type is parameterized by a class TypeVar. + DisallowGenericInstanceVariableAccess = 1 << 10, + + // The member access should be treated as if it's within a type + // expression, and errors should be reported if it doesn't conform + // with type expression rules. + TypeExpression = 1 << 11, + + // Skip symbol table entries in the class that correspond to + // TypedDict entries. These are not considered attributes of + // the class and cannot be accessed using a member access expression. + SkipTypedDictEntries = 1 << 12, +} + +export const enum ClassIteratorFlags { + Default = 0, + + // By default, base classes are searched as well as the + // original (derived) class. If this flag is set, no recursion + // is performed. + SkipBaseClasses = 1 << 0, + + // Skip the 'object' base class in particular. + SkipObjectBaseClass = 1 << 1, + + // Skip the 'type' base class in particular. + SkipTypeBaseClass = 1 << 2, +} + +export interface InferenceContext { + expectedType: Type; + isTypeIncomplete?: boolean; + returnTypeOverride?: Type; +} + +export interface RequiresSpecializationOptions { + // Ignore pseudo-generic classes (those with PseudoGenericClass flag set) + // when determining whether the type requires specialization? + ignorePseudoGeneric?: boolean; + + // Ignore Self type? + ignoreSelf?: boolean; + + // Ignore classes whose isTypeArgExplicit flag is false? + ignoreImplicitTypeArgs?: boolean; +} + +export interface IsInstantiableOptions { + honorTypeVarBounds?: boolean; +} + +export interface SelfSpecializeOptions { + // Override any existing type arguments? By default, + // existing type arguments are left as is. + overrideTypeArgs?: boolean; + + // Specialize with "bound" versions of the type parameters? + useBoundTypeVars?: boolean; +} + +export interface ApplyTypeVarOptions { + typeClassType?: ClassType; + replaceUnsolved?: { + scopeIds: TypeVarScopeId[]; + tupleClassType: ClassType | undefined; + unsolvedExemptTypeVars?: TypeVarType[]; + useUnknown?: boolean; + eliminateUnsolvedInUnions?: boolean; + }; +} + +export interface AddConditionOptions { + skipSelfCondition?: boolean; + skipBoundTypeVars?: boolean; +} + +// There are cases where tuple types can be infinitely nested. The +// recursion count limit will eventually be hit, but this will create +// deep types that will effectively hang the analyzer. To prevent this, +// we'll limit the depth of the tuple type arguments. This value is +// large enough that we should never hit it in legitimate circumstances. +const maxTupleTypeArgRecursionDepth = 10; + +// Tracks whether a function signature has been seen before within +// an expression. For example, in the expression "foo(foo, foo)", the +// signature for "foo" will be seen three times at three different +// file offsets. If the signature is generic, we need to create unique +// type variables for each instance because they are independent of +// each other. +export class UniqueSignatureTracker { + private _trackedSignatures: SignatureWithOffsets[]; + + constructor() { + this._trackedSignatures = []; + } + + getTrackedSignatures() { + return this._trackedSignatures; + } + + addTrackedSignatures(signatures: SignatureWithOffsets[]) { + signatures.forEach((s) => { + s.expressionOffsets.forEach((offset) => { + this.addSignature(s.type, offset); + }); + }); + } + + findSignature(signature: FunctionType | OverloadedType): SignatureWithOffsets | undefined { + // Use the associated overload type if this is a function associated with an overload. + let effectiveSignature = signature; + if (isFunction(signature) && signature.priv.overloaded) { + effectiveSignature = signature.priv.overloaded; + } + + return this._trackedSignatures.find((s) => { + return isTypeSame(effectiveSignature, s.type); + }); + } + + addSignature(signature: FunctionType | OverloadedType, offset: number) { + // If this function is part of a broader overload, use the overload instead. + const effectiveSignature = isFunction(signature) ? signature.priv.overloaded ?? signature : signature; + + const existingSignature = this.findSignature(effectiveSignature); + if (existingSignature) { + if (!existingSignature.expressionOffsets.some((o) => o === offset)) { + existingSignature.expressionOffsets.push(offset); + } + } else { + this._trackedSignatures.push({ type: effectiveSignature, expressionOffsets: [offset] }); + } + } +} + +export function isOptionalType(type: Type): boolean { + if (isUnion(type)) { + return findSubtype(type, (subtype) => isNoneInstance(subtype)) !== undefined; + } + + return false; +} + +export function isNoneInstance(type: Type): boolean { + return isClassInstance(type) && ClassType.isBuiltIn(type, 'NoneType'); +} + +export function isNoneTypeClass(type: Type): boolean { + return isInstantiableClass(type) && ClassType.isBuiltIn(type, 'NoneType'); +} + +// If the type is a union, remove an "None" type from the union, +// returning only the known types. +export function removeNoneFromUnion(type: Type): Type { + return removeFromUnion(type, (t: Type) => isNoneInstance(t)); +} + +export function isIncompleteUnknown(type: Type): boolean { + return isUnknown(type) && type.priv.isIncomplete; +} + +// Similar to isTypeSame except that type1 is a TypeVar and type2 +// can be either a TypeVar of the same type or a union that includes +// conditional types associated with that bound TypeVar. +export function isTypeVarSame(type1: TypeVarType, type2: Type) { + if (isTypeSame(type1, type2)) { + return true; + } + + // If this isn't a bound TypeVar, return false. + if (isParamSpec(type1) || isTypeVarTuple(type1) || !TypeVarType.hasBound(type1)) { + return false; + } + + // If the second type isn't a union, return false. + if (!isUnion(type2)) { + return false; + } + + let isCompatible = true; + doForEachSubtype(type2, (subtype) => { + if (!isCompatible) { + return; + } + + if (!isTypeSame(type1, subtype)) { + const conditions = getTypeCondition(subtype); + + if ( + !conditions || + !conditions.some((condition) => condition.typeVar.priv.nameWithScope === type1.priv.nameWithScope) + ) { + isCompatible = false; + } + } + }); + + return isCompatible; +} + +export function makeInferenceContext( + expectedType: undefined, + isTypeIncomplete?: boolean, + returnTypeOverride?: Type | undefined +): undefined; +export function makeInferenceContext( + expectedType: Type, + isTypeIncomplete?: boolean, + returnTypeOverride?: Type | undefined +): InferenceContext; +export function makeInferenceContext( + expectedType?: Type, + isTypeIncomplete?: boolean, + returnTypeOverride?: Type | undefined +): InferenceContext | undefined; + +export function makeInferenceContext( + expectedType: Type | undefined, + isTypeIncomplete?: boolean, + returnTypeOverride?: Type | undefined +): InferenceContext | undefined { + if (!expectedType) { + return undefined; + } + + return { expectedType, isTypeIncomplete, returnTypeOverride }; +} + +export interface MapSubtypesOptions { + // Should subtypes in a union be sorted before iteration? + sortSubtypes?: boolean; + + // Should unions retain redundant literal types if they + // are present in the original type? + skipElideRedundantLiterals?: boolean; + + // Should the type alias be retained as is? This is safe only + // if the caller has already transformed the associated type + // alias in a way that is compatible with transforms applied + // to the type. + retainTypeAlias?: boolean; +} + +// Calls a callback for each subtype and combines the results +// into a final type. It performs no memory allocations if the +// transformed type is the same as the original. +export function mapSubtypes( + type: Type, + callback: (type: Type) => Type | undefined, + options?: MapSubtypesOptions +): Type { + if (isUnion(type)) { + const subtypes = options?.sortSubtypes ? sortTypes(type.priv.subtypes) : type.priv.subtypes; + + for (let i = 0; i < subtypes.length; i++) { + const subtype = subtypes[i]; + const transformedType = callback(subtype); + + // Avoid doing any memory allocations until a change is detected. + if (subtype !== transformedType) { + const typesToCombine: Type[] = subtypes.slice(0, i); + + // Create a helper lambda that accumulates transformed subtypes. + const accumulateSubtype = (newSubtype: Type | undefined) => { + if (newSubtype) { + typesToCombine.push(addConditionToType(newSubtype, getTypeCondition(type))); + } + }; + + accumulateSubtype(transformedType); + + for (i++; i < subtypes.length; i++) { + accumulateSubtype(callback(subtypes[i])); + } + + let newType = combineTypes(typesToCombine, { + skipElideRedundantLiterals: options?.skipElideRedundantLiterals, + }); + + if (options?.retainTypeAlias) { + if (type.props?.typeAliasInfo) { + newType = TypeBase.cloneForTypeAlias(newType, type.props.typeAliasInfo); + } + } else { + // Do our best to retain type aliases. + if (isUnion(newType)) { + UnionType.addTypeAliasSource(newType, type); + } + } + + return newType; + } + } + + return type; + } + + const transformedSubtype = callback(type); + if (!transformedSubtype) { + return NeverType.createNever(); + } + return transformedSubtype; +} + +// Iterates over each signature in a function or overload, allowing the +// caller to replace one or more signatures with new ones. +export function mapSignatures( + type: FunctionType | OverloadedType, + callback: (type: FunctionType) => FunctionType | undefined +): OverloadedType | FunctionType | undefined { + if (isFunction(type)) { + return callback(type); + } + + const newSignatures: FunctionType[] = []; + let changeMade = false; + + OverloadedType.getOverloads(type).forEach((overload, index) => { + const newOverload = callback(overload); + if (newOverload !== overload) { + changeMade = true; + } + + if (newOverload) { + newSignatures.push(newOverload); + } + }); + + if (newSignatures.length === 0) { + return undefined; + } + + // Add the unmodified implementation if it's present. + const implementation = OverloadedType.getImplementation(type); + let newImplementation: Type | undefined = implementation; + + if (implementation && isFunction(implementation)) { + newImplementation = callback(implementation); + + if (newImplementation) { + changeMade = true; + } + } + + if (!changeMade) { + return type; + } + + if (newSignatures.length === 1) { + return newSignatures[0]; + } + + return OverloadedType.create(newSignatures, newImplementation); +} + +// The code flow engine uses a special form of the UnknownType (with the +// isIncomplete flag set) to distinguish between an unknown that was generated +// in a loop because it was temporarily incomplete versus an unknown that is +// permanently incomplete. Once an unknown appears within a loop, it is often +// propagated to other types during code flow analysis. We want to remove these +// incomplete unknowns if we find that they are union'ed with other types. +export function cleanIncompleteUnknown(type: Type, recursionCount = 0): Type { + if (recursionCount >= maxTypeRecursionCount) { + return type; + } + recursionCount++; + + const result = mapSubtypes(type, (subtype) => { + // If it's an incomplete unknown, eliminate it. + if (isUnknown(subtype) && subtype.priv.isIncomplete) { + return undefined; + } + + if (isClass(subtype) && subtype.priv.typeArgs) { + let typeChanged = false; + + if (subtype.priv.tupleTypeArgs) { + const updatedTupleTypeArgs: TupleTypeArg[] = subtype.priv.tupleTypeArgs.map((tupleTypeArg) => { + const newTypeArg = cleanIncompleteUnknown(tupleTypeArg.type, recursionCount); + if (newTypeArg !== tupleTypeArg.type) { + typeChanged = true; + } + return { + type: newTypeArg, + isUnbounded: tupleTypeArg.isUnbounded, + isOptional: tupleTypeArg.isOptional, + }; + }); + + if (typeChanged) { + return specializeTupleClass( + subtype, + updatedTupleTypeArgs, + !!subtype.priv.isTypeArgExplicit, + !!subtype.priv.isUnpacked + ); + } + } else { + const updatedTypeArgs = subtype.priv.typeArgs.map((typeArg) => { + const newTypeArg = cleanIncompleteUnknown(typeArg, recursionCount); + if (newTypeArg !== typeArg) { + typeChanged = true; + } + return newTypeArg; + }); + + if (typeChanged) { + return ClassType.specialize(subtype, updatedTypeArgs, !!subtype.priv.isTypeArgExplicit); + } + } + } + + // TODO - this doesn't currently handle function types. + + return subtype; + }); + + // If we eliminated everything, don't return a Never. + return isNever(result) ? type : result; +} + +// Sorts types into a deterministic order. +export function sortTypes(types: Type[]): Type[] { + return types.slice(0).sort((a, b) => { + return compareTypes(a, b); + }); +} + +function compareTypes(a: Type, b: Type, recursionCount = 0): number { + if (recursionCount > maxTypeRecursionCount) { + return 0; + } + recursionCount++; + + if (a.category !== b.category) { + return b.category - a.category; + } + + switch (a.category) { + case TypeCategory.Unbound: + case TypeCategory.Unknown: + case TypeCategory.Any: + case TypeCategory.Never: + case TypeCategory.Union: { + return 0; + } + + case TypeCategory.Function: { + const bFunc = b as FunctionType; + + const aParamCount = a.shared.parameters.length; + const bParamCount = bFunc.shared.parameters.length; + if (aParamCount !== bParamCount) { + return bParamCount - aParamCount; + } + + for (let i = 0; i < aParamCount; i++) { + const aParam = a.shared.parameters[i]; + const bParam = bFunc.shared.parameters[i]; + if (aParam.category !== bParam.category) { + return bParam.category - aParam.category; + } + + const typeComparison = compareTypes( + FunctionType.getParamType(a, i), + FunctionType.getParamType(bFunc, i) + ); + + if (typeComparison !== 0) { + return typeComparison; + } + } + + const returnTypeComparison = compareTypes( + FunctionType.getEffectiveReturnType(a) ?? UnknownType.create(), + FunctionType.getEffectiveReturnType(bFunc) ?? UnknownType.create() + ); + + if (returnTypeComparison !== 0) { + return returnTypeComparison; + } + + const aName = a.shared.name; + const bName = bFunc.shared.name; + + if (aName < bName) { + return -1; + } else if (aName > bName) { + return 1; + } + + return 0; + } + + case TypeCategory.Overloaded: { + const bOver = b as OverloadedType; + + const aOverloads = OverloadedType.getOverloads(a); + const bOverloads = OverloadedType.getOverloads(bOver); + const aOverloadCount = aOverloads.length; + const bOverloadCount = bOverloads.length; + if (aOverloadCount !== bOverloadCount) { + return bOverloadCount - aOverloadCount; + } + + for (let i = 0; i < aOverloadCount; i++) { + const typeComparison = compareTypes(aOverloads[i], bOverloads[i]); + if (typeComparison !== 0) { + return typeComparison; + } + } + + return 0; + } + + case TypeCategory.Class: { + const bClass = b as ClassType; + + // Sort instances before instantiables. + if (isClassInstance(a) && isInstantiableClass(bClass)) { + return -1; + } else if (isInstantiableClass(a) && isClassInstance(bClass)) { + return 1; + } + + // Sort literals before non-literals. + if (isLiteralType(a)) { + if (!isLiteralType(bClass)) { + return -1; + } else if (ClassType.isSameGenericClass(a, bClass)) { + // Sort by literal value. + const aLiteralValue = a.priv.literalValue; + const bLiteralValue = bClass.priv.literalValue; + + if ( + (typeof aLiteralValue === 'string' && typeof bLiteralValue === 'string') || + (typeof aLiteralValue === 'number' && typeof bLiteralValue === 'number') + ) { + if (aLiteralValue < bLiteralValue) { + return -1; + } else if (aLiteralValue > bLiteralValue) { + return 1; + } + } + } + } else if (isLiteralType(bClass)) { + return 1; + } + + // Always sort NoneType at the end. + if (ClassType.isBuiltIn(a, 'NoneType')) { + return 1; + } else if (ClassType.isBuiltIn(bClass, 'NoneType')) { + return -1; + } + + // Sort non-generics before generics. + if (a.shared.typeParams.length > 0 || isTupleClass(a)) { + if (bClass.shared.typeParams.length === 0) { + return 1; + } + } else if (bClass.shared.typeParams.length > 0 || isTupleClass(bClass)) { + return -1; + } + + // Sort by class name. + const aName = a.shared.name; + const bName = (b as ClassType).shared.name; + + if (aName < bName) { + return -1; + } else if (aName > bName) { + return 1; + } + + // Sort by type argument count. + const aTypeArgCount = a.priv.typeArgs ? a.priv.typeArgs.length : 0; + const bTypeArgCount = bClass.priv.typeArgs ? bClass.priv.typeArgs.length : 0; + + if (aTypeArgCount < bTypeArgCount) { + return -1; + } else if (aTypeArgCount > bTypeArgCount) { + return 1; + } + + // Sort by type argument. + for (let i = 0; i < aTypeArgCount; i++) { + const typeComparison = compareTypes(a.priv.typeArgs![i], bClass.priv.typeArgs![i], recursionCount); + if (typeComparison !== 0) { + return typeComparison; + } + } + + return 0; + } + + case TypeCategory.Module: { + const aName = a.priv.moduleName; + const bName = (b as ModuleType).priv.moduleName; + return aName < bName ? -1 : aName === bName ? 0 : 1; + } + + case TypeCategory.TypeVar: { + const aName = a.shared.name; + const bName = (b as TypeVarType).shared.name; + return aName < bName ? -1 : aName === bName ? 0 : 1; + } + } + + return 1; +} + +export function doForEachSubtype( + type: Type, + callback: (type: Type, index: number, allSubtypes: Type[]) => void, + sortSubtypes = false +): void { + if (isUnion(type)) { + const subtypes = sortSubtypes ? sortTypes(type.priv.subtypes) : type.priv.subtypes; + subtypes.forEach((subtype, index) => { + callback(subtype, index, subtypes); + }); + } else { + callback(type, 0, [type]); + } +} + +export function someSubtypes(type: Type, callback: (type: Type) => boolean): boolean { + if (isUnion(type)) { + return type.priv.subtypes.some((subtype) => { + return callback(subtype); + }); + } else { + return callback(type); + } +} + +export function allSubtypes(type: Type, callback: (type: Type) => boolean): boolean { + if (isUnion(type)) { + return type.priv.subtypes.every((subtype) => { + callback(subtype); + }); + } else { + return callback(type); + } +} + +export function doForEachSignature( + type: FunctionType | OverloadedType, + callback: (type: FunctionType, index: number) => void +) { + if (isFunction(type)) { + callback(type, 0); + } else { + OverloadedType.getOverloads(type).forEach((overload, index) => { + callback(overload, index); + }); + } +} + +// Determines if all of the types in the array are the same. +export function areTypesSame(types: Type[], options: TypeSameOptions): boolean { + if (types.length < 2) { + return true; + } + + for (let i = 1; i < types.length; i++) { + if (!isTypeSame(types[0], types[i], options)) { + return false; + } + } + + return true; +} + +// If either type is "Unknown" (versus Any), propagate the Unknown. Preserve +// the incomplete flag on the unknown if present. The caller should verify that +// one or the other type is Unknown or Any. +export function preserveUnknown(type1: Type, type2: Type): AnyType | UnknownType { + if (isUnknown(type1) && type1.priv.isIncomplete) { + return type1; + } else if (isUnknown(type2) && type2.priv.isIncomplete) { + return type2; + } else if (isUnknown(type1) || isUnknown(type2)) { + return UnknownType.create(); + } else { + return AnyType.create(); + } +} + +// Determines whether the specified type is a type that can be +// combined with other types for a union. +export function isUnionableType(subtypes: Type[]): boolean { + // If all of the subtypes are TypeForm types, we know that they + // are unionable. + if (subtypes.every((t) => t.props?.typeForm !== undefined)) { + return true; + } + + let typeFlags = TypeFlags.Instance | TypeFlags.Instantiable; + + for (const subtype of subtypes) { + typeFlags &= subtype.flags; + } + + // All subtypes need to be instantiable. Some types (like Any + // and None) are both instances and instantiable. It's OK to + // include some of these, but at least one subtype needs to + // be definitively instantiable (not an instance). + return (typeFlags & TypeFlags.Instantiable) !== 0 && (typeFlags & TypeFlags.Instance) === 0; +} + +export function derivesFromAnyOrUnknown(type: Type): boolean { + let anyOrUnknown = false; + + doForEachSubtype(type, (subtype) => { + if (isAnyOrUnknown(type)) { + anyOrUnknown = true; + } else if (isInstantiableClass(subtype)) { + if (ClassType.derivesFromAnyOrUnknown(subtype)) { + anyOrUnknown = true; + } + } else if (isClassInstance(subtype)) { + if (ClassType.derivesFromAnyOrUnknown(subtype)) { + anyOrUnknown = true; + } + } + }); + + return anyOrUnknown; +} + +export function getFullNameOfType(type: Type): string | undefined { + if (type.props?.typeAliasInfo?.shared.fullName) { + return type.props.typeAliasInfo.shared.fullName; + } + + switch (type.category) { + case TypeCategory.Any: + case TypeCategory.Unknown: + return 'typing.Any'; + + case TypeCategory.Class: + return type.shared.fullName; + + case TypeCategory.Function: + return type.shared.fullName; + + case TypeCategory.Module: + return type.priv.moduleName; + + case TypeCategory.Overloaded: { + const overloads = OverloadedType.getOverloads(type); + if (overloads.length > 0) { + return overloads[0].shared.fullName; + } + + const impl = OverloadedType.getImplementation(type); + if (impl && isFunction(impl)) { + return impl.shared.fullName; + } + } + } + + return undefined; +} + +export function addConditionToType( + type: T, + condition: TypeCondition[] | undefined, + options?: AddConditionOptions +): T { + if (!condition) { + return type; + } + + if (options?.skipSelfCondition) { + condition = condition.filter((c) => !TypeVarType.isSelf(c.typeVar)); + if (condition.length === 0) { + return type; + } + } + + if (options?.skipBoundTypeVars) { + condition = condition.filter((c) => c.typeVar.shared.constraints.length > 0); + if (condition.length === 0) { + return type; + } + } + + switch (type.category) { + case TypeCategory.Unbound: + case TypeCategory.Unknown: + case TypeCategory.Any: + case TypeCategory.Never: + case TypeCategory.Module: + case TypeCategory.TypeVar: + return type; + + case TypeCategory.Function: + return TypeBase.cloneForCondition(type, TypeCondition.combine(type.props?.condition, condition)); + + case TypeCategory.Overloaded: + return OverloadedType.create( + OverloadedType.getOverloads(type).map((t) => addConditionToType(t, condition)) + ) as T; + + case TypeCategory.Class: + return TypeBase.cloneForCondition(type, TypeCondition.combine(type.props?.condition, condition)); + + case TypeCategory.Union: + return combineTypes(type.priv.subtypes.map((t) => addConditionToType(t, condition))) as T; + } +} + +export function getTypeCondition(type: Type): TypeCondition[] | undefined { + switch (type.category) { + case TypeCategory.Unbound: + case TypeCategory.Unknown: + case TypeCategory.Any: + case TypeCategory.Never: + case TypeCategory.Module: + case TypeCategory.TypeVar: + case TypeCategory.Overloaded: + case TypeCategory.Union: + return undefined; + + case TypeCategory.Class: + case TypeCategory.Function: + return type.props?.condition; + } +} + +// Indicates whether the specified type is a recursive type alias +// placeholder that has not yet been resolved. +export function isTypeAliasPlaceholder(type: Type): boolean { + return isTypeVar(type) && TypeVarType.isTypeAliasPlaceholder(type); +} + +// Determines whether the type alias placeholder is used directly +// within the specified type. It's OK if it's used indirectly as +// a type argument. +export function isTypeAliasRecursive(typeAliasPlaceholder: TypeVarType, type: Type) { + if (type.category !== TypeCategory.Union) { + if (type === typeAliasPlaceholder) { + return true; + } + + if (!isUnbound(type) && !isTypeAliasPlaceholder(type)) { + return false; + } + + // Handle the specific case where the type alias directly refers to itself. + return ( + type.props?.typeAliasInfo && + type.props.typeAliasInfo.shared.name === typeAliasPlaceholder.shared.recursiveAlias?.name + ); + } + + return ( + findSubtype(type, (subtype) => isTypeVar(subtype) && subtype.shared === typeAliasPlaceholder.shared) !== + undefined + ); +} + +// Recursively transforms all top-level TypeVars that represent recursive +// type aliases into their actual types. +export function transformPossibleRecursiveTypeAlias(type: Type, recursionCount?: number): Type; +export function transformPossibleRecursiveTypeAlias(type: Type | undefined, recursionCount?: number): Type | undefined; +export function transformPossibleRecursiveTypeAlias(type: Type | undefined, recursionCount = 0): Type | undefined { + if (recursionCount >= maxTypeRecursionCount) { + return type; + } + recursionCount++; + + if (type) { + const aliasInfo = type.props?.typeAliasInfo; + + if (isTypeVar(type) && type.shared.recursiveAlias?.name && type.shared.boundType) { + const unspecializedType = TypeBase.isInstance(type) + ? convertToInstance(type.shared.boundType) + : type.shared.boundType; + + if (!aliasInfo?.typeArgs || !type.shared.recursiveAlias.typeParams) { + return transformPossibleRecursiveTypeAlias(unspecializedType, recursionCount); + } + + const solution = buildSolution(type.shared.recursiveAlias.typeParams, aliasInfo.typeArgs); + return transformPossibleRecursiveTypeAlias( + applySolvedTypeVars(unspecializedType, solution), + recursionCount + ); + } + + if (isUnion(type) && type.priv.includesRecursiveTypeAlias) { + let newType = mapSubtypes(type, (subtype) => transformPossibleRecursiveTypeAlias(subtype, recursionCount)); + + if (newType !== type && aliasInfo) { + // Copy the type alias information if present. + newType = TypeBase.cloneForTypeAlias(newType, aliasInfo); + } + + return newType; + } + } + + return type; +} + +export function getTypeVarScopeId(type: Type): TypeVarScopeId | undefined { + if (isClass(type)) { + return type.shared.typeVarScopeId; + } + + if (isFunction(type)) { + return type.shared.typeVarScopeId; + } + + if (isTypeVar(type)) { + return type.priv.scopeId; + } + + return undefined; +} + +// This is similar to getTypeVarScopeId except that it includes +// the secondary scope IDs for functions. +export function getTypeVarScopeIds(type: Type): TypeVarScopeId[] { + const scopeIds: TypeVarScopeId[] = []; + + const scopeId = getTypeVarScopeId(type); + if (scopeId) { + scopeIds.push(scopeId); + } + + if (isFunction(type)) { + if (type.priv.constructorTypeVarScopeId) { + scopeIds.push(type.priv.constructorTypeVarScopeId); + } + } + + return scopeIds; +} + +// Specializes the class with "Unknown" type args (or the equivalent for ParamSpecs +// or TypeVarTuples). +export function specializeWithUnknownTypeArgs(type: ClassType, tupleClassType?: ClassType): ClassType { + if (type.shared.typeParams.length === 0) { + return type; + } + + if (isTupleClass(type)) { + return ClassType.cloneIncludeSubclasses( + specializeTupleClass( + type, + [{ type: UnknownType.create(), isUnbounded: true }], + /* isTypeArgExplicit */ false + ), + !!type.priv.includeSubclasses + ); + } + + return ClassType.specialize( + type, + type.shared.typeParams.map((param) => getUnknownForTypeVar(param, tupleClassType)), + /* isTypeArgExplicit */ false, + /* includeSubclasses */ type.priv.includeSubclasses + ); +} + +// Returns "Unknown" for simple TypeVars or the equivalent for a ParamSpec. +export function getUnknownForTypeVar(typeVar: TypeVarType, tupleClassType?: ClassType): Type { + if (isParamSpec(typeVar)) { + return ParamSpecType.getUnknown(); + } + + if (isTypeVarTuple(typeVar) && tupleClassType) { + return getUnknownForTypeVarTuple(tupleClassType); + } + + return UnknownType.create(); +} + +export function getUnknownForTypeVarTuple(tupleClassType: ClassType): Type { + assert(isInstantiableClass(tupleClassType) && ClassType.isBuiltIn(tupleClassType, 'tuple')); + + return ClassType.cloneAsInstance( + specializeTupleClass( + tupleClassType, + [{ type: UnknownType.create(), isUnbounded: true }], + /* isTypeArgExplicit */ true, + /* isUnpacked */ true + ) + ); +} + +// Returns the equivalent of "Callable[..., Unknown]". +export function getUnknownTypeForCallable(): FunctionType { + const newFunction = FunctionType.createSynthesizedInstance('', FunctionTypeFlags.GradualCallableForm); + FunctionType.addDefaultParams(newFunction); + newFunction.shared.declaredReturnType = UnknownType.create(); + return newFunction; +} + +// If the class is generic and not already specialized, this function +// "self specializes" the class, filling in its own type parameters +// as type arguments. +export function selfSpecializeClass(type: ClassType, options?: SelfSpecializeOptions): ClassType { + // We can't use requiresTypeArgs(type) here because it returns false + // if the type parameters have default values. + if (type.shared.typeParams.length === 0) { + return type; + } + + if (type.priv.typeArgs && !options?.overrideTypeArgs) { + return type; + } + + const typeParams = type.shared.typeParams.map((typeParam) => { + if (isTypeVarTuple(typeParam)) { + typeParam = TypeVarType.cloneForUnpacked(typeParam); + } + + return options?.useBoundTypeVars ? TypeVarType.cloneAsBound(typeParam) : typeParam; + }); + return ClassType.specialize(type, typeParams); +} + +// Determines whether the type derives from tuple. If so, it returns +// the specialized tuple type. +export function getSpecializedTupleType(type: Type): ClassType | undefined { + let classType: ClassType | undefined; + + if (isInstantiableClass(type)) { + classType = type; + } else if (isClassInstance(type)) { + classType = ClassType.cloneAsInstantiable(type); + } + + if (!classType) { + return undefined; + } + + // See if this class derives from Tuple or tuple. If it does, we'll assume that it + // hasn't been overridden in a way that changes the behavior of the tuple class. + const tupleClass = classType.shared.mro.find((mroClass) => isInstantiableClass(mroClass) && isTupleClass(mroClass)); + if (!tupleClass || !isInstantiableClass(tupleClass)) { + return undefined; + } + + if (ClassType.isSameGenericClass(classType, tupleClass)) { + return classType; + } + + const solution = buildSolutionFromSpecializedClass(classType); + return applySolvedTypeVars(tupleClass, solution) as ClassType; +} + +export function isLiteralType(type: ClassType): boolean { + return TypeBase.isInstance(type) && type.priv.literalValue !== undefined; +} + +export function isLiteralTypeOrUnion(type: Type, allowNone = false): boolean { + if (isClassInstance(type)) { + if (allowNone && isNoneInstance(type)) { + return true; + } + + return type.priv.literalValue !== undefined; + } + + if (isUnion(type)) { + return !findSubtype(type, (subtype) => { + if (!isClassInstance(subtype)) { + return true; + } + + if (isNoneInstance(subtype)) { + return !allowNone; + } + + return subtype.priv.literalValue === undefined; + }); + } + + return false; +} + +export function isLiteralLikeType(type: ClassType): boolean { + if (type.priv.literalValue !== undefined) { + return true; + } + + if (ClassType.isBuiltIn(type, 'LiteralString')) { + return true; + } + + return false; +} + +export function isSentinelLiteral(type: Type): boolean { + return isClassInstance(type) && type.priv.literalValue instanceof SentinelLiteral; +} + +export function containsLiteralType(type: Type, includeTypeArgs = false): boolean { + class ContainsLiteralTypeWalker extends TypeWalker { + foundLiteral = false; + + constructor(private _includeTypeArgs: boolean) { + super(); + } + + override visitClass(classType: ClassType): void { + if (isClassInstance(classType)) { + if (isLiteralLikeType(classType)) { + this.foundLiteral = true; + this.cancelWalk(); + } + } + + if (this._includeTypeArgs) { + super.visitClass(classType); + } + } + } + + const walker = new ContainsLiteralTypeWalker(includeTypeArgs); + walker.walk(type); + return walker.foundLiteral; +} + +// If all of the subtypes are literals with the same built-in class (e.g. +// all 'int' or all 'str'), this function returns the name of that type. If +// some of the subtypes are not literals or the literal classes don't match, +// it returns undefined. +export function getLiteralTypeClassName(type: Type): string | undefined { + if (isClassInstance(type)) { + if (type.priv.literalValue !== undefined && ClassType.isBuiltIn(type)) { + return type.shared.name; + } + return undefined; + } + + if (isUnion(type)) { + let className: string | undefined; + let foundMismatch = false; + + doForEachSubtype(type, (subtype) => { + const subtypeLiteralTypeName = getLiteralTypeClassName(subtype); + if (!subtypeLiteralTypeName) { + foundMismatch = true; + } else if (!className) { + className = subtypeLiteralTypeName; + } + }); + + return foundMismatch ? undefined : className; + } + + return undefined; +} + +export function stripTypeForm(type: Type): Type { + if (type.props?.typeForm) { + return TypeBase.cloneWithTypeForm(type, undefined); + } + + return type; +} + +export function stripTypeFormRecursive(type: Type, recursionCount = 0): Type { + if (recursionCount > maxTypeRecursionCount) { + return type; + } + recursionCount++; + + if (type.props?.typeForm) { + type = TypeBase.cloneWithTypeForm(type, undefined); + } + + return mapSubtypes(type, (subtype) => stripTypeFormRecursive(subtype, recursionCount)); +} + +export function getUnionSubtypeCount(type: Type): number { + if (isUnion(type)) { + return type.priv.subtypes.length; + } + + return 1; +} + +export function isEllipsisType(type: Type): boolean { + return isAny(type) && type.priv.isEllipsis; +} + +export function isProperty(type: Type) { + return isClassInstance(type) && ClassType.isPropertyClass(type); +} + +export function isCallableType(type: Type): boolean { + if (isFunctionOrOverloaded(type) || isAnyOrUnknown(type)) { + return true; + } + + if (isEffectivelyInstantiable(type)) { + return true; + } + + if (isClass(type)) { + if (TypeBase.isInstantiable(type)) { + return true; + } + + const callMember = lookUpObjectMember(type, '__call__', MemberAccessFlags.SkipInstanceMembers); + return !!callMember; + } + + if (isUnion(type)) { + return type.priv.subtypes.every((subtype) => isCallableType(subtype)); + } + + return false; +} + +export function isDescriptorInstance(type: Type, requireSetter = false): boolean { + if (isUnion(type)) { + return type.priv.subtypes.every((subtype) => isMaybeDescriptorInstance(subtype, requireSetter)); + } + + return isMaybeDescriptorInstance(type, requireSetter); +} + +export function isMaybeDescriptorInstance(type: Type, requireSetter = false): boolean { + if (isUnion(type)) { + return type.priv.subtypes.some((subtype) => isMaybeDescriptorInstance(subtype, requireSetter)); + } + + if (!isClassInstance(type)) { + return false; + } + + if (!ClassType.getSymbolTable(type).has('__get__')) { + return false; + } + + if (requireSetter && !ClassType.getSymbolTable(type).has('__set__')) { + return false; + } + + return true; +} + +export function isTupleGradualForm(type: Type) { + return ( + isClassInstance(type) && + isTupleClass(type) && + type.priv.tupleTypeArgs && + type.priv.tupleTypeArgs.length === 1 && + isAnyOrUnknown(type.priv.tupleTypeArgs[0].type) && + type.priv.tupleTypeArgs[0].isUnbounded + ); +} + +export function isTupleClass(type: ClassType) { + return ClassType.isBuiltIn(type, 'tuple'); +} + +// Indicates whether the type is a tuple class of +// the form tuple[x, ...] where the number of elements +// in the tuple is unknown. +export function isUnboundedTupleClass(type: ClassType) { + return type.priv.tupleTypeArgs?.some( + (t) => t.isUnbounded || isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type) + ); +} + +// Indicates whether the specified index is within range and its type is unambiguous +// in that it doesn't involve any element ranges that are of indeterminate length. +export function isTupleIndexUnambiguous(type: ClassType, index: number) { + if (!type.priv.tupleTypeArgs) { + return false; + } + + const unboundedIndex = type.priv.tupleTypeArgs.findIndex( + (t) => t.isUnbounded || isUnpackedTypeVarTuple(t.type) || isUnpackedTypeVar(t.type) + ); + + if (index < 0) { + const lowerIndexLimit = unboundedIndex < 0 ? 0 : unboundedIndex; + index += type.priv.tupleTypeArgs.length; + return index >= lowerIndexLimit; + } + + const upperIndexLimit = unboundedIndex < 0 ? type.priv.tupleTypeArgs.length : unboundedIndex; + return index < upperIndexLimit; +} + +// Partially specializes a type within the context of a specified +// (presumably specialized) class. Optionally specializes the `Self` +// type variables, replacing them with selfClass. +export function partiallySpecializeType( + type: Type, + contextClassType: ClassType, + typeClassType: ClassType | undefined, + selfClass?: ClassType | TypeVarType +): Type { + // If the context class is not specialized (or doesn't need specialization), + // then there's no need to do any more work. + if (ClassType.isUnspecialized(contextClassType) && !selfClass) { + return type; + } + + // Partially specialize the type using the specialized class type vars. + const solution = buildSolutionFromSpecializedClass(contextClassType); + + if (selfClass) { + addSolutionForSelfType(solution, contextClassType, selfClass); + } + + let result = applySolvedTypeVars(type, solution, { typeClassType }); + + // If this is a property, we may need to partially specialize the + // access methods associated with it. + if (isClass(result)) { + if (result.priv.fgetInfo || result.priv.fsetInfo || result.priv.fdelInfo) { + function updatePropertyMethodInfo(methodInfo?: PropertyMethodInfo): PropertyMethodInfo | undefined { + if (!methodInfo) { + return undefined; + } + + return { + methodType: partiallySpecializeType( + methodInfo.methodType, + contextClassType, + typeClassType, + selfClass + ) as FunctionType, + classType: methodInfo.classType, + }; + } + + result = TypeBase.cloneType(result); + result.priv.fgetInfo = updatePropertyMethodInfo(result.priv.fgetInfo); + result.priv.fsetInfo = updatePropertyMethodInfo(result.priv.fsetInfo); + result.priv.fdelInfo = updatePropertyMethodInfo(result.priv.fdelInfo); + } + } + + return result; +} + +export function addSolutionForSelfType( + solution: ConstraintSolution, + contextClassType: ClassType, + selfClass: ClassType | TypeVarType +) { + const synthesizedSelfTypeVar = synthesizeTypeVarForSelfCls(contextClassType, /* isClsParam */ false); + const selfInstance = convertToInstance(selfClass); + + // We can't call stripLiteralValue here because that method requires the type evaluator. + // Instead, we'll do a simplified version of it here. + const selfWithoutLiteral = mapSubtypes(selfInstance, (subtype) => { + if (isClass(subtype)) { + if (subtype.priv.literalValue !== undefined) { + return ClassType.cloneWithLiteral(subtype, /* value */ undefined); + } + } + + return subtype; + }); + + solution.setType(synthesizedSelfTypeVar, selfWithoutLiteral); +} + +// Looks for duplicate function types within the type and ensures that +// if they are generic, they have unique type variables. +export function ensureSignaturesAreUnique( + type: T, + signatureTracker: UniqueSignatureTracker, + expressionOffset: number +): T { + const transformer = new UniqueFunctionSignatureTransformer(signatureTracker, expressionOffset); + return transformer.apply(type, 0) as T; +} + +export function makeFunctionTypeVarsBound(type: FunctionType | OverloadedType): FunctionType | OverloadedType { + const scopeIds: TypeVarScopeId[] = []; + doForEachSignature(type, (signature) => { + const localScopeId = getTypeVarScopeId(signature); + if (localScopeId) { + scopeIds.push(localScopeId); + } + }); + + return makeTypeVarsBound(type, scopeIds); +} + +export function makeTypeVarsBound>(type: T, scopeIds: TypeVarScopeId[] | undefined): T; +export function makeTypeVarsBound(type: Type, scopeIds: TypeVarScopeId[] | undefined): Type { + if (scopeIds && scopeIds.length === 0) { + return type; + } + + const transformer = new BoundTypeVarTransform(scopeIds); + return transformer.apply(type, 0); +} + +export function makeTypeVarsFree>(type: T, scopeIds: TypeVarScopeId[]): T; +export function makeTypeVarsFree(type: Type, scopeIds: TypeVarScopeId[]): Type { + if (scopeIds.length === 0) { + return type; + } + + const transformer = new FreeTypeVarTransform(scopeIds); + return transformer.apply(type, 0); +} + +// Specializes a (potentially generic) type by substituting +// type variables from a type var map. +export function applySolvedTypeVars(type: Type, solution: ConstraintSolution, options: ApplyTypeVarOptions = {}): Type { + // Use a shortcut if constraints is empty and no transform is necessary. + if (solution.isEmpty() && !options.replaceUnsolved) { + return type; + } + + const transformer = new ApplySolvedTypeVarsTransformer(solution, options); + return transformer.apply(type, 0); +} + +// Validates that a default type associated with a TypeVar does not refer to +// other TypeVars or ParamSpecs that are out of scope. +export function validateTypeVarDefault( + typeVar: TypeVarType, + liveTypeParams: TypeVarType[], + invalidTypeVars: Set +) { + // If there is no default type or the default type is concrete, there's + // no need to do any more work here. + if (typeVar.shared.isDefaultExplicit && requiresSpecialization(typeVar.shared.defaultType)) { + const validator = new TypeVarDefaultValidator(liveTypeParams, invalidTypeVars); + validator.apply(typeVar.shared.defaultType, 0); + } +} + +// During bidirectional type inference for constructors, an "expected type" +// is used to prepopulate the type var map. This is problematic when the +// expected type uses TypeVars that are not part of the context of the +// class we are constructing. We'll replace these type variables with +// so-called "unification" type variables. +export function transformExpectedType( + expectedType: Type, + liveTypeVarScopes: TypeVarScopeId[], + usageOffset: number | undefined +): Type { + const transformer = new UnificationTypeTransformer(liveTypeVarScopes, usageOffset); + return transformer.apply(expectedType, 0); +} + +// Given a protocol class (or abstract class), this function returns +// a set of all the symbols (indexed by symbol name) that are part of +// that protocol and its protocol parent classes. If a same-named symbol +// appears in a parent and a child, the child overrides the parent. +export function getProtocolSymbols(classType: ClassType) { + const symbolMap = new Map(); + + if ((classType.shared.flags & ClassTypeFlags.ProtocolClass) !== 0) { + getProtocolSymbolsRecursive(classType, symbolMap, ClassTypeFlags.ProtocolClass); + } + + return symbolMap; +} + +export function getProtocolSymbolsRecursive( + classType: ClassType, + symbolMap: Map, + classFlags = ClassTypeFlags.ProtocolClass, + recursionCount = 0 +) { + // Special-case the NamedTuple class because it's not really + // a separate class at runtime. The typeshed stubs model it + // this way, and we don't want it to be treated as a protocol + // or abstract class. + if (ClassType.isBuiltIn(classType, 'NamedTuple')) { + return; + } + + if (recursionCount > maxTypeRecursionCount) { + return; + } + + classType.shared.baseClasses.forEach((baseClass) => { + if (isClass(baseClass) && (baseClass.shared.flags & classFlags) !== 0) { + getProtocolSymbolsRecursive(baseClass, symbolMap, classFlags, recursionCount + 1); + } + }); + + if ((classType.shared.flags & classFlags) !== 0) { + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + symbolMap.set(name, { + symbol, + classType, + unspecializedClassType: classType, + isInstanceMember: symbol.isInstanceMember(), + isClassMember: symbol.isClassMember(), + isSlotsMember: symbol.isSlotsMember(), + isClassVar: isEffectivelyClassVar(symbol, /* isDataclass */ false), + isReadOnly: false, + isTypeDeclared: symbol.hasTypedDeclarations(), + skippedUndeclaredType: false, + }); + } + }); + } +} + +// Determines the maximum depth of a tuple, list, set or dictionary. +// For example, if the type is tuple[tuple[tuple[int]]], its depth would be 3. +export function getContainerDepth(type: Type, recursionCount = 0) { + if (recursionCount > maxTypeRecursionCount) { + return 1; + } + + recursionCount++; + + if (!isClassInstance(type)) { + return 0; + } + + let maxChildDepth = 0; + + if (type.priv.tupleTypeArgs) { + type.priv.tupleTypeArgs.forEach((typeArgInfo) => { + doForEachSubtype(typeArgInfo.type, (subtype) => { + const childDepth = getContainerDepth(subtype, recursionCount); + maxChildDepth = Math.max(childDepth, maxChildDepth); + }); + }); + } else if (type.priv.typeArgs) { + type.priv.typeArgs.forEach((typeArg) => { + doForEachSubtype(typeArg, (subtype) => { + const childDepth = getContainerDepth(subtype, recursionCount); + maxChildDepth = Math.max(childDepth, maxChildDepth); + }); + }); + } else { + return 0; + } + + return 1 + maxChildDepth; +} + +export function lookUpObjectMember( + objectType: ClassType, + memberName: string, + flags = MemberAccessFlags.Default, + skipMroClass?: ClassType | undefined +): ClassMember | undefined { + if (isClassInstance(objectType)) { + return lookUpClassMember(objectType, memberName, flags, skipMroClass); + } + + return undefined; +} + +// Looks up a member in a class using the multiple-inheritance rules +// defined by Python. +export function lookUpClassMember( + classType: ClassType, + memberName: string, + flags = MemberAccessFlags.Default, + skipMroClass?: ClassType | undefined +): ClassMember | undefined { + // Look in the metaclass first. + const metaclass = classType.shared.effectiveMetaclass; + + // Skip the "type" class as an optimization because it is known to not + // define any instance variables, and it's by far the most common metaclass. + if (metaclass && isClass(metaclass) && !ClassType.isBuiltIn(metaclass, 'type')) { + const metaMemberItr = getClassMemberIterator(metaclass, memberName, MemberAccessFlags.SkipClassMembers); + const metaMember = metaMemberItr.next()?.value; + + // If the metaclass defines the member and we didn't hit an Unknown + // class in the metaclass MRO, use the metaclass member. + if (metaMember && !isAnyOrUnknown(metaMember.classType)) { + // Set the isClassMember to true because it's a class member from the + // perspective of the classType. + metaMember.isClassMember = true; + return metaMember; + } + } + + const memberItr = getClassMemberIterator(classType, memberName, flags, skipMroClass); + + return memberItr.next()?.value; +} + +// Iterates members in a class matching memberName using the multiple-inheritance rules. +// For more details, see this note on method resolution +// order: https://www.python.org/download/releases/2.3/mro/. +// As it traverses the inheritance tree, it applies partial specialization +// to the the base class and member. For example, if ClassA inherits from +// ClassB[str] which inherits from Dict[_T1, int], a search for '__iter__' +// would return a class type of Dict[str, int] and a symbolType of +// (self) -> Iterator[str]. +// If skipMroClass is defined, all MRO classes up to and including that class +// are skipped. +export function* getClassMemberIterator( + classType: ClassType | AnyType | UnknownType, + memberName: string, + flags = MemberAccessFlags.Default, + skipMroClass?: ClassType | undefined +) { + const declaredTypesOnly = (flags & MemberAccessFlags.DeclaredTypesOnly) !== 0; + let skippedUndeclaredType = false; + + if (isClass(classType)) { + let classFlags = ClassIteratorFlags.Default; + if (flags & MemberAccessFlags.SkipOriginalClass) { + if (isClass(classType)) { + skipMroClass = isClassInstance(classType) ? ClassType.cloneAsInstantiable(classType) : classType; + } + } + if (flags & MemberAccessFlags.SkipBaseClasses) { + classFlags = classFlags | ClassIteratorFlags.SkipBaseClasses; + } + if (flags & MemberAccessFlags.SkipObjectBaseClass) { + classFlags = classFlags | ClassIteratorFlags.SkipObjectBaseClass; + } + if (flags & MemberAccessFlags.SkipTypeBaseClass) { + classFlags = classFlags | ClassIteratorFlags.SkipTypeBaseClass; + } + + const classItr = getClassIterator(classType, classFlags, skipMroClass); + + for (const [mroClass, specializedMroClass] of classItr) { + if (!isInstantiableClass(mroClass)) { + if (!declaredTypesOnly) { + const classType = isAnyOrUnknown(mroClass) ? mroClass : UnknownType.create(); + + // The class derives from an unknown type, so all bets are off + // when trying to find a member. Return an unknown symbol. + const cm: ClassMember = { + symbol: Symbol.createWithType(SymbolFlags.None, mroClass), + isInstanceMember: false, + isClassMember: true, + isClassVar: false, + isSlotsMember: false, + classType, + unspecializedClassType: classType, + isReadOnly: false, + isTypeDeclared: false, + skippedUndeclaredType: false, + }; + yield cm; + } + continue; + } + + if (!isInstantiableClass(specializedMroClass)) { + continue; + } + + const memberFields = ClassType.getSymbolTable(specializedMroClass); + const skipTdEntry = + (flags & MemberAccessFlags.SkipTypedDictEntries) !== 0 && + specializedMroClass.shared.typedDictEntries?.knownItems.has(memberName); + + // Look at instance members first if requested. + if ((flags & MemberAccessFlags.SkipInstanceMembers) === 0) { + const symbol = memberFields.get(memberName); + + if (symbol && symbol.isInstanceMember() && !skipTdEntry) { + const hasDeclaredType = symbol.hasTypedDeclarations(); + if (!declaredTypesOnly || hasDeclaredType) { + const cm: ClassMember = { + symbol, + isInstanceMember: true, + isClassMember: symbol.isClassMember(), + isSlotsMember: symbol.isSlotsMember(), + isClassVar: isEffectivelyClassVar(symbol, ClassType.isDataClass(specializedMroClass)), + classType: specializedMroClass, + unspecializedClassType: mroClass, + isReadOnly: isMemberReadOnly(specializedMroClass, memberName), + isTypeDeclared: hasDeclaredType, + skippedUndeclaredType, + }; + yield cm; + } else { + skippedUndeclaredType = true; + } + } + } + + // Next look at class members. + if ((flags & MemberAccessFlags.SkipClassMembers) === 0) { + let symbol = memberFields.get(memberName); + + if (symbol && symbol.isClassMember() && !skipTdEntry) { + const hasDeclaredType = symbol.hasTypedDeclarations(); + if (!declaredTypesOnly || hasDeclaredType) { + let isInstanceMember = symbol.isInstanceMember(); + let isClassMember = true; + + // For data classes and typed dicts, variables that are declared + // within the class are treated as instance variables. This distinction + // is important in cases where a variable is a callable type because + // we don't want to bind it to the instance like we would for a + // class member. + const isDataclass = ClassType.isDataClass(specializedMroClass); + const isTypedDict = ClassType.isTypedDictClass(specializedMroClass); + if (hasDeclaredType && (isDataclass || isTypedDict)) { + const decls = symbol.getDeclarations(); + if (decls.length > 0 && decls[0].type === DeclarationType.Variable) { + isInstanceMember = true; + isClassMember = isDataclass; + } + } + + // Handle the special case of a __call__ class member in a partial class. + if ( + memberName === '__call__' && + classType.priv.partialCallType && + ClassType.isSameGenericClass( + TypeBase.isInstance(classType) ? ClassType.cloneAsInstantiable(classType) : classType, + specializedMroClass + ) + ) { + symbol = Symbol.createWithType(SymbolFlags.ClassMember, classType.priv.partialCallType); + } + + const cm: ClassMember = { + symbol, + isInstanceMember, + isClassMember, + isSlotsMember: symbol.isSlotsMember(), + isClassVar: isEffectivelyClassVar(symbol, isDataclass), + classType: specializedMroClass, + unspecializedClassType: mroClass, + isReadOnly: false, + isTypeDeclared: hasDeclaredType, + skippedUndeclaredType, + }; + yield cm; + } else { + skippedUndeclaredType = true; + } + } + } + } + } else if (isAnyOrUnknown(classType)) { + // The class derives from an unknown type, so all bets are off + // when trying to find a member. Return an Any or Unknown symbol. + const cm: ClassMember = { + symbol: Symbol.createWithType(SymbolFlags.None, classType), + isInstanceMember: false, + isClassMember: true, + isSlotsMember: false, + isClassVar: false, + classType, + unspecializedClassType: classType, + isReadOnly: false, + isTypeDeclared: false, + skippedUndeclaredType: false, + }; + yield cm; + } + + return undefined; +} + +// Checks for whether the member is effectively read only because it +// belongs to a frozen dataclass or a named tuple. +export function isMemberReadOnly(classType: ClassType, name: string): boolean { + if (ClassType.hasNamedTupleEntry(classType, name)) { + return true; + } + + if (ClassType.isDataClassFrozen(classType)) { + const dcEntries = classType.shared?.dataClassEntries; + if (dcEntries?.some((entry) => entry.name === name)) { + return true; + } + } + + return false; +} + +export function* getClassIterator(classType: Type, flags = ClassIteratorFlags.Default, skipMroClass?: ClassType) { + if (isClass(classType)) { + let foundSkipMroClass = skipMroClass === undefined; + + for (const mroClass of classType.shared.mro) { + // Are we still searching for the skipMroClass? + if (!foundSkipMroClass && skipMroClass) { + if (!isClass(mroClass)) { + foundSkipMroClass = true; + } else if (ClassType.isSameGenericClass(mroClass, skipMroClass)) { + foundSkipMroClass = true; + continue; + } else { + continue; + } + } + + // If mroClass is an ancestor of classType, partially specialize + // it in the context of classType. + const specializedMroClass = partiallySpecializeType(mroClass, classType, /* typeClassType */ undefined); + + // Should we ignore members on the 'object' base class? + if (flags & ClassIteratorFlags.SkipObjectBaseClass) { + if (isInstantiableClass(specializedMroClass)) { + if (ClassType.isBuiltIn(specializedMroClass, 'object')) { + break; + } + } + } + + // Should we ignore members on the 'type' base class? + if (flags & ClassIteratorFlags.SkipTypeBaseClass) { + if (isInstantiableClass(specializedMroClass)) { + if (ClassType.isBuiltIn(specializedMroClass, 'type')) { + break; + } + } + } + + yield [mroClass, specializedMroClass]; + + if ((flags & ClassIteratorFlags.SkipBaseClasses) !== 0) { + break; + } + } + } + + return undefined; +} + +export function getClassFieldsRecursive(classType: ClassType): Map { + const memberMap = new Map(); + + // Evaluate the types of members from the end of the MRO to the beginning. + ClassType.getReverseMro(classType).forEach((mroClass) => { + const specializedMroClass = partiallySpecializeType(mroClass, classType, /* typeClassType */ undefined); + + if (isClass(specializedMroClass)) { + ClassType.getSymbolTable(specializedMroClass).forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch() && symbol.hasTypedDeclarations()) { + memberMap.set(name, { + classType: specializedMroClass, + unspecializedClassType: mroClass, + symbol, + isInstanceMember: symbol.isInstanceMember(), + isClassMember: symbol.isClassMember(), + isSlotsMember: symbol.isSlotsMember(), + isClassVar: isEffectivelyClassVar(symbol, ClassType.isDataClass(specializedMroClass)), + isReadOnly: isMemberReadOnly(specializedMroClass, name), + isTypeDeclared: true, + skippedUndeclaredType: false, + }); + } + }); + } else { + // If this ancestor class is unknown, throw away all symbols + // found so far because they could be overridden by the unknown class. + memberMap.clear(); + } + }); + + return memberMap; +} + +// Combines two lists of type var types, maintaining the combined order +// but removing any duplicates. +export function addTypeVarsToListIfUnique(list1: TypeVarType[], list2: TypeVarType[], typeVarScopeId?: TypeVarScopeId) { + for (const type2 of list2) { + if (typeVarScopeId && type2.priv.scopeId !== typeVarScopeId) { + continue; + } + + if (!list1.find((type1) => isTypeSame(type1, type2))) { + list1.push(type2); + } + } +} + +// Walks the type recursively (in a depth-first manner), finds all +// type variables that are referenced, and returns an ordered list +// of unique type variables. For example, if the type is +// Union[List[Dict[_T1, _T2]], _T1, _T3], the result would be +// [_T1, _T2, _T3]. +export function getTypeVarArgsRecursive(type: Type, recursionCount = 0): TypeVarType[] { + if (recursionCount > maxTypeRecursionCount) { + return []; + } + recursionCount++; + + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo) { + const combinedList: TypeVarType[] = []; + + if (aliasInfo.typeArgs) { + aliasInfo?.typeArgs.forEach((typeArg) => { + addTypeVarsToListIfUnique(combinedList, getTypeVarArgsRecursive(typeArg, recursionCount)); + }); + + return combinedList; + } + + if (aliasInfo.shared.typeParams) { + aliasInfo.shared.typeParams.forEach((typeParam) => { + addTypeVarsToListIfUnique(combinedList, [typeParam]); + }); + + return combinedList; + } + } + + if (isTypeVar(type)) { + // Don't return any recursive type alias placeholders. + if (type.shared.recursiveAlias) { + return []; + } + + // Don't return any bound type variables. + if (TypeVarType.isBound(type)) { + return []; + } + + // Don't return any P.args or P.kwargs types. + if (isParamSpec(type) && type.priv.paramSpecAccess) { + return [TypeVarType.cloneForParamSpecAccess(type, /* access */ undefined)]; + } + + return [TypeBase.isInstantiable(type) ? TypeVarType.cloneAsInstance(type) : type]; + } + + if (isClass(type)) { + const combinedList: TypeVarType[] = []; + const typeArgs = type.priv.tupleTypeArgs ? type.priv.tupleTypeArgs.map((e) => e.type) : type.priv.typeArgs; + if (typeArgs) { + typeArgs.forEach((typeArg) => { + addTypeVarsToListIfUnique(combinedList, getTypeVarArgsRecursive(typeArg, recursionCount)); + }); + } + + return combinedList; + } + + if (isUnion(type)) { + const combinedList: TypeVarType[] = []; + doForEachSubtype(type, (subtype) => { + addTypeVarsToListIfUnique(combinedList, getTypeVarArgsRecursive(subtype, recursionCount)); + }); + return combinedList; + } + + if (isFunction(type)) { + const combinedList: TypeVarType[] = []; + + for (let i = 0; i < type.shared.parameters.length; i++) { + addTypeVarsToListIfUnique( + combinedList, + getTypeVarArgsRecursive(FunctionType.getParamType(type, i), recursionCount) + ); + } + + const returnType = FunctionType.getEffectiveReturnType(type); + if (returnType) { + addTypeVarsToListIfUnique(combinedList, getTypeVarArgsRecursive(returnType, recursionCount)); + } + + return combinedList; + } + + return []; +} + +// Creates a specialized version of the class, filling in any unspecified +// type arguments with Unknown or default value. +export function specializeWithDefaultTypeArgs(type: ClassType): ClassType { + if (type.shared.typeParams.length === 0 || type.priv.typeArgs || !type.shared.typeVarScopeId) { + return type; + } + + const solution = new ConstraintSolution(); + + return applySolvedTypeVars(type, solution, { + replaceUnsolved: { scopeIds: [type.shared.typeVarScopeId], tupleClassType: undefined }, + }) as ClassType; +} + +// Builds a mapping between type parameters and their specialized +// types. For example, if the generic type is Dict[_T1, _T2] and the +// specialized type is Dict[str, int], it returns a map that associates +// _T1 with str and _T2 with int. +export function buildSolutionFromSpecializedClass(classType: ClassType): ConstraintSolution { + const typeParams = ClassType.getTypeParams(classType); + let typeArgs: Type[] | undefined; + + if (classType.priv.tupleTypeArgs) { + typeArgs = [ + convertToInstance( + specializeTupleClass( + classType, + classType.priv.tupleTypeArgs, + classType.priv.isTypeArgExplicit, + /* isUnpacked */ true + ) + ), + ]; + } else { + typeArgs = classType.priv.typeArgs; + } + + return buildSolution(typeParams, typeArgs); +} + +export function buildSolution(typeParams: TypeVarType[], typeArgs: Type[] | undefined): ConstraintSolution { + const solution = new ConstraintSolution(); + + if (!typeArgs) { + return solution; + } + + typeParams.forEach((typeParam, index) => { + if (index < typeArgs.length) { + solution.setType(typeParam, typeArgs[index]); + } + }); + + return solution; +} + +// Determines the specialized base class type that srcType derives from. +export function specializeForBaseClass(srcType: ClassType, baseClass: ClassType): ClassType { + const typeParams = ClassType.getTypeParams(baseClass); + + // If there are no type parameters for the specified base class, + // no specialization is required. + if (typeParams.length === 0) { + return baseClass; + } + + const solution = buildSolutionFromSpecializedClass(srcType); + const specializedType = applySolvedTypeVars(baseClass, solution); + assert(isInstantiableClass(specializedType)); + return specializedType as ClassType; +} + +export function derivesFromStdlibClass(classType: ClassType, className: string) { + return classType.shared.mro.some((mroClass) => isClass(mroClass) && ClassType.isBuiltIn(mroClass, className)); +} + +// If ignoreUnknown is true, an unknown base class is ignored when +// checking for derivation. If ignoreUnknown is false, a return value +// of true is assumed. +export function derivesFromClassRecursive(classType: ClassType, baseClassToFind: ClassType, ignoreUnknown: boolean) { + if (ClassType.isSameGenericClass(classType, baseClassToFind)) { + return true; + } + + for (const baseClass of classType.shared.baseClasses) { + if (isInstantiableClass(baseClass)) { + if (derivesFromClassRecursive(baseClass, baseClassToFind, ignoreUnknown)) { + return true; + } + } else if (!ignoreUnknown && isAnyOrUnknown(baseClass)) { + // If the base class is unknown, we have to make a conservative assumption. + return true; + } + } + + return false; +} + +export function synthesizeTypeVarForSelfCls(classType: ClassType, isClsParam: boolean): TypeVarType { + const selfType = TypeVarType.createInstance(`__type_of_self__`); + const scopeId = getTypeVarScopeId(classType) ?? ''; + selfType.shared.isSynthesized = true; + selfType.shared.isSynthesizedSelf = true; + selfType.priv.scopeId = scopeId; + selfType.priv.scopeName = ''; + selfType.priv.nameWithScope = TypeVarType.makeNameWithScope(selfType.shared.name, scopeId, selfType.priv.scopeName); + + const boundType = ClassType.specialize( + classType, + /* typeArgs */ undefined, + /* isTypeArgExplicit */ false, + /* includeSubclasses */ !!classType.priv.includeSubclasses + ); + + selfType.shared.boundType = ClassType.cloneAsInstance(boundType); + + return isClsParam ? TypeVarType.cloneAsInstantiable(selfType) : selfType; +} + +// Returns the declared "return" type (the type returned from a return statement) +// if it was declared, or undefined otherwise. +export function getDeclaredGeneratorReturnType(functionType: FunctionType): Type | undefined { + const returnType = FunctionType.getEffectiveReturnType(functionType); + if (returnType) { + const generatorTypeArgs = getGeneratorTypeArgs(returnType); + + if (generatorTypeArgs) { + // The send type is the third type arg. + return generatorTypeArgs.length >= 3 ? generatorTypeArgs[2] : UnknownType.create(); + } + } + + return undefined; +} + +// If the declared return type is a Generator, Iterable, Iterator or the async +// counterparts, returns the yield type. If the type is invalid for a generator, +// returns undefined. +export function getGeneratorYieldType(declaredReturnType: Type, isAsync: boolean): Type | undefined { + let isLegalGeneratorType = true; + + const yieldType = mapSubtypes(declaredReturnType, (subtype) => { + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + if (isClassInstance(subtype)) { + const expectedClasses = [ + ['AsyncIterable', 'Iterable'], + ['AsyncIterator', 'Iterator'], + ['AsyncGenerator', 'Generator'], + ['', 'AwaitableGenerator'], + ]; + + if (expectedClasses.some((classes) => ClassType.isBuiltIn(subtype, isAsync ? classes[0] : classes[1]))) { + return subtype.priv.typeArgs && subtype.priv.typeArgs.length >= 1 + ? subtype.priv.typeArgs[0] + : UnknownType.create(); + } + } + + isLegalGeneratorType = false; + return undefined; + }); + + return isLegalGeneratorType ? yieldType : undefined; +} + +export function isInstantiableMetaclass(type: Type): boolean { + return ( + isInstantiableClass(type) && + type.shared.mro.some((mroClass) => isClass(mroClass) && ClassType.isBuiltIn(mroClass, 'type')) + ); +} + +export function isMetaclassInstance(type: Type): boolean { + return ( + isClassInstance(type) && + type.shared.mro.some((mroClass) => isClass(mroClass) && ClassType.isBuiltIn(mroClass, 'type')) + ); +} + +export function isEffectivelyInstantiable(type: Type, options?: IsInstantiableOptions, recursionCount = 0): boolean { + if (recursionCount > maxTypeRecursionCount) { + return false; + } + + recursionCount++; + + if (TypeBase.isInstantiable(type)) { + return true; + } + + if (options?.honorTypeVarBounds && isTypeVar(type) && type.shared.boundType) { + if (isEffectivelyInstantiable(type.shared.boundType, options, recursionCount)) { + return true; + } + } + + // Handle the special case of 'type' (or subclasses thereof), + // which are instantiable. + if (isMetaclassInstance(type)) { + return true; + } + + if (isUnion(type)) { + return type.priv.subtypes.every((subtype) => isEffectivelyInstantiable(subtype, options, recursionCount)); + } + + return false; +} + +export function convertToInstance(type: ParamSpecType, includeSubclasses?: boolean): ParamSpecType; +export function convertToInstance(type: TypeVarTupleType, includeSubclasses?: boolean): TypeVarTupleType; +export function convertToInstance(type: TypeVarType, includeSubclasses?: boolean): TypeVarType; +export function convertToInstance(type: Type, includeSubclasses?: boolean): Type; +export function convertToInstance(type: Type, includeSubclasses = true): Type { + // See if we've already performed this conversion and cached it. + if (type.cached?.instanceType && includeSubclasses) { + return type.cached.instanceType; + } + + let result = mapSubtypes( + type, + (subtype) => { + switch (subtype.category) { + case TypeCategory.Class: { + // Handle type[x] as a special case. + if (ClassType.isBuiltIn(subtype, 'type')) { + if (TypeBase.isInstance(subtype)) { + if (!subtype.priv.typeArgs || subtype.priv.typeArgs.length < 1) { + return UnknownType.create(); + } else { + return subtype.priv.typeArgs[0]; + } + } else { + if (subtype.priv.typeArgs && subtype.priv.typeArgs.length > 0) { + if (!isAnyOrUnknown(subtype.priv.typeArgs[0])) { + return convertToInstantiable(subtype.priv.typeArgs[0]); + } + } + } + } + + return ClassType.cloneAsInstance(subtype, includeSubclasses); + } + + case TypeCategory.Function: { + if (TypeBase.isInstantiable(subtype)) { + return FunctionType.cloneAsInstance(subtype); + } + break; + } + + case TypeCategory.TypeVar: { + if (TypeBase.isInstantiable(subtype)) { + return TypeVarType.cloneAsInstance(subtype); + } + break; + } + + case TypeCategory.Any: { + return AnyType.convertToInstance(subtype); + } + + case TypeCategory.Unknown: { + return UnknownType.convertToInstance(subtype); + } + + case TypeCategory.Never: { + return NeverType.convertToInstance(subtype); + } + + case TypeCategory.Unbound: { + return UnboundType.convertToInstance(subtype); + } + } + + return subtype; + }, + { + skipElideRedundantLiterals: true, + } + ); + + // Copy over any type alias information. + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo && type !== result) { + result = TypeBase.cloneForTypeAlias(result, aliasInfo); + } + + if (type !== result && includeSubclasses) { + // Cache the converted value for next time. + if (!type.cached) { + type.cached = {}; + } + type.cached.instanceType = result; + } + + return result; +} + +export function convertToInstantiable(type: Type, includeSubclasses = true): Type { + // See if we've already performed this conversion and cached it. + if (type.cached?.instantiableType) { + return type.cached.instantiableType; + } + + const result = mapSubtypes(type, (subtype) => { + switch (subtype.category) { + case TypeCategory.Class: { + return ClassType.cloneAsInstantiable(subtype, includeSubclasses); + } + + case TypeCategory.Function: { + return FunctionType.cloneAsInstantiable(subtype); + } + + case TypeCategory.TypeVar: { + return TypeVarType.cloneAsInstantiable(subtype); + } + } + + return subtype; + }); + + if (type !== result) { + // Cache the converted value for next time. + if (!type.cached) { + type.cached = {}; + } + type.cached.instantiableType = result; + } + + return result; +} + +export function getMembersForClass(classType: ClassType, symbolTable: SymbolTable, includeInstanceVars: boolean) { + classType.shared.mro.forEach((mroClass) => { + if (isInstantiableClass(mroClass)) { + // Add any new member variables from this class. + const isClassTypedDict = ClassType.isTypedDictClass(mroClass); + ClassType.getSymbolTable(mroClass).forEach((symbol, name) => { + if (symbol.isClassMember() || (includeInstanceVars && symbol.isInstanceMember())) { + if (!isClassTypedDict || !isTypedDictMemberAccessedThroughIndex(symbol)) { + if (!symbol.isInitVar()) { + const existingSymbol = symbolTable.get(name); + + if (!existingSymbol) { + symbolTable.set(name, symbol); + } else if (!existingSymbol.hasTypedDeclarations() && symbol.hasTypedDeclarations()) { + // If the existing symbol is unannotated but a parent class + // has an annotation for the symbol, use the parent type instead. + symbolTable.set(name, symbol); + } + } + } + } + }); + } + }); + + // Add members of the metaclass as well. + if (!includeInstanceVars) { + const metaclass = classType.shared.effectiveMetaclass; + if (metaclass && isInstantiableClass(metaclass)) { + for (const mroClass of metaclass.shared.mro) { + if (isInstantiableClass(mroClass)) { + ClassType.getSymbolTable(mroClass).forEach((symbol, name) => { + const existingSymbol = symbolTable.get(name); + + if (!existingSymbol) { + symbolTable.set(name, symbol); + } else if (!existingSymbol.hasTypedDeclarations() && symbol.hasTypedDeclarations()) { + // If the existing symbol is unannotated but a parent class + // has an annotation for the symbol, use the parent type instead. + symbolTable.set(name, symbol); + } + }); + } else { + break; + } + } + } + } +} + +export function getMembersForModule(moduleType: ModuleType, symbolTable: SymbolTable) { + // Start with the loader fields. If there are any symbols of the + // same name defined within the module, they will overwrite the + // loader fields. + if (moduleType.priv.loaderFields) { + moduleType.priv.loaderFields.forEach((symbol, name) => { + symbolTable.set(name, symbol); + }); + } + + moduleType.priv.fields.forEach((symbol, name) => { + symbolTable.set(name, symbol); + }); +} + +// Determines if the type contains an Any recursively. +export function containsAnyRecursive(type: Type, includeUnknown = true): boolean { + class AnyWalker extends TypeWalker { + foundAny = false; + + constructor(private _includeUnknown: boolean) { + super(); + } + + override visitAny(type: AnyType) { + this.foundAny = true; + this.cancelWalk(); + } + + override visitUnknown(type: UnknownType): void { + if (this._includeUnknown) { + this.foundAny = true; + this.cancelWalk(); + } + } + } + + const walker = new AnyWalker(includeUnknown); + walker.walk(type); + return walker.foundAny; +} + +// Determines if the type contains an Any or Unknown type. If so, +// it returns the Any or Unknown type. Unknowns are preferred over +// Any if both are present. If recurse is true, it will recurse +// through type arguments and parameters. +export function containsAnyOrUnknown(type: Type, recurse: boolean): AnyType | UnknownType | undefined { + class AnyOrUnknownWalker extends TypeWalker { + anyOrUnknownType: AnyType | UnknownType | undefined; + + constructor(private _recurse: boolean) { + super(); + } + + override visitTypeAlias(type: Type) { + // Don't explore type aliases. + } + + override visitUnknown(type: UnknownType) { + this.anyOrUnknownType = this.anyOrUnknownType ? preserveUnknown(this.anyOrUnknownType, type) : type; + } + + override visitAny(type: AnyType) { + this.anyOrUnknownType = this.anyOrUnknownType ? preserveUnknown(this.anyOrUnknownType, type) : type; + } + + override visitClass(type: ClassType) { + if (this._recurse) { + super.visitClass(type); + } + } + + override visitFunction(type: FunctionType) { + if (this._recurse) { + // A function with a "..." type is effectively an "Any". + if (FunctionType.isGradualCallableForm(type)) { + this.anyOrUnknownType = this.anyOrUnknownType + ? preserveUnknown(this.anyOrUnknownType, AnyType.create()) + : AnyType.create(); + } + + super.visitFunction(type); + } + } + } + + const walker = new AnyOrUnknownWalker(recurse); + walker.walk(type); + return walker.anyOrUnknownType; +} + +// Determines if any part of the type contains "Unknown", including any type arguments. +// This function does not use the TypeWalker because it is called very frequently, +// and allocating a memory walker object for every call significantly increases +// peak memory usage. +export function isPartlyUnknown(type: Type, recursionCount = 0): boolean { + if (recursionCount > maxTypeRecursionCount) { + return false; + } + recursionCount++; + + if (isUnknown(type)) { + return true; + } + + // If this is a generic type alias, see if any of its type arguments + // are either unspecified or are partially known. + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo?.typeArgs) { + if (aliasInfo.typeArgs.some((typeArg) => isPartlyUnknown(typeArg, recursionCount))) { + return true; + } + } + + // See if a union contains an unknown type. + if (isUnion(type)) { + return findSubtype(type, (subtype) => isPartlyUnknown(subtype, recursionCount)) !== undefined; + } + + // See if an object or class has an unknown type argument. + if (isClass(type)) { + // If this is a reference to the class itself, as opposed to a reference + // to a type that represents the class and its subclasses, don't flag + // the type as partially unknown. + if (!type.priv.includeSubclasses) { + return false; + } + + if (!ClassType.isPseudoGenericClass(type)) { + const typeArgs = type.priv.tupleTypeArgs?.map((t) => t.type) || type.priv.typeArgs; + if (typeArgs) { + for (const argType of typeArgs) { + if (isPartlyUnknown(argType, recursionCount)) { + return true; + } + } + } + } + + return false; + } + + // See if a function has an unknown type. + if (isOverloaded(type)) { + return OverloadedType.getOverloads(type).some((overload) => { + return isPartlyUnknown(overload, recursionCount); + }); + } + + if (isFunction(type)) { + for (let i = 0; i < type.shared.parameters.length; i++) { + // Ignore parameters such as "*" that have no name. + if (type.shared.parameters[i].name) { + const paramType = FunctionType.getParamType(type, i); + if (isPartlyUnknown(paramType, recursionCount)) { + return true; + } + } + } + + if ( + type.shared.declaredReturnType && + !FunctionType.isParamSpecValue(type) && + isPartlyUnknown(type.shared.declaredReturnType, recursionCount) + ) { + return true; + } + + return false; + } + + return false; +} + +// If the specified type is a generic class with a single type argument +// that is a union, it "explodes" the class into a union of classes with +// each element of the union - e.g. Foo[A | B] becomes Foo[A] | Foo[B]. +export function explodeGenericClass(classType: ClassType) { + if (!classType.priv.typeArgs || classType.priv.typeArgs.length !== 1 || !isUnion(classType.priv.typeArgs[0])) { + return classType; + } + + return combineTypes( + classType.priv.typeArgs[0].priv.subtypes.map((subtype) => { + return ClassType.specialize(classType, [subtype]); + }) + ); +} + +// If the type is a union of same-sized tuples, these are combined into +// a single tuple with that size. Otherwise, returns undefined. +export function combineSameSizedTuples(type: Type, tupleType: Type | undefined): Type { + if (!tupleType || !isInstantiableClass(tupleType) || isUnboundedTupleClass(tupleType)) { + return type; + } + + let tupleEntries: Type[][] | undefined; + let isValid = true; + + doForEachSubtype(type, (subtype) => { + if (isClassInstance(subtype)) { + let tupleClass: ClassType | undefined; + if (isClass(subtype) && isTupleClass(subtype) && !isUnboundedTupleClass(subtype)) { + tupleClass = subtype; + } + + if (!tupleClass) { + // Look in the mro list to see if this subtype derives from a + // tuple with a known size. This includes named tuples. + tupleClass = subtype.shared.mro.find( + (mroClass) => isClass(mroClass) && isTupleClass(mroClass) && !isUnboundedTupleClass(mroClass) + ) as ClassType | undefined; + } + + if (tupleClass && isClass(tupleClass) && tupleClass.priv.tupleTypeArgs) { + if (tupleEntries) { + if (tupleEntries.length === tupleClass.priv.tupleTypeArgs.length) { + tupleClass.priv.tupleTypeArgs.forEach((entry, index) => { + tupleEntries![index].push(entry.type); + }); + } else { + isValid = false; + } + } else { + tupleEntries = tupleClass.priv.tupleTypeArgs.map((entry) => [entry.type]); + } + } else { + isValid = false; + } + } else { + isValid = false; + } + }); + + if (!isValid || !tupleEntries) { + return type; + } + + return convertToInstance( + specializeTupleClass( + tupleType, + tupleEntries.map((entry) => { + return { type: combineTypes(entry), isUnbounded: false }; + }) + ) + ); +} + +export function combineTupleTypeArgs(typeArgs: TupleTypeArg[]): Type { + const typesToCombine: Type[] = []; + + typeArgs.forEach((t) => { + if (isTypeVar(t.type)) { + if (isUnpackedTypeVarTuple(t.type)) { + // Treat the unpacked TypeVarTuple as a union. + typesToCombine.push(TypeVarType.cloneForUnpacked(t.type, /* isInUnion */ true)); + return; + } + + if (isUnpackedTypeVar(t.type)) { + if ( + t.type.shared.boundType && + isClassInstance(t.type.shared.boundType) && + isTupleClass(t.type.shared.boundType) && + t.type.shared.boundType.priv.tupleTypeArgs + ) { + typesToCombine.push(combineTupleTypeArgs(t.type.shared.boundType.priv.tupleTypeArgs)); + } + return; + } + } + + typesToCombine.push(t.type); + }); + + return combineTypes(typesToCombine); +} + +// Tuples require special handling for specialization. This method computes +// the "effective" type argument, which is a union of the variadic type +// arguments. +export function specializeTupleClass( + classType: ClassType, + typeArgs: TupleTypeArg[], + isTypeArgExplicit = true, + isUnpacked = false +): ClassType { + const clonedClassType = ClassType.specialize( + classType, + [combineTupleTypeArgs(typeArgs)], + isTypeArgExplicit, + /* includeSubclasses */ undefined, + typeArgs + ); + + if (isUnpacked) { + clonedClassType.priv.isUnpacked = true; + } + + return clonedClassType; +} + +function _expandUnpackedTypeVarTupleUnion(type: Type) { + if (isClassInstance(type) && isTupleClass(type) && type.priv.tupleTypeArgs && type.priv.isUnpacked) { + return combineTypes(type.priv.tupleTypeArgs.map((t) => t.type)); + } + + return type; +} + +// If this is an unpacked type, returns the type as no longer unpacked. +export function makePacked(type: Type): Type { + if (isUnpackedClass(type)) { + return ClassType.cloneForPacked(type); + } + + if (isUnpackedTypeVarTuple(type) && !type.priv.isInUnion) { + return TypeVarType.cloneForPacked(type); + } + + if (isUnpackedTypeVar(type)) { + return TypeVarType.cloneForPacked(type); + } + + return type; +} + +export function makeUnpacked(type: Type): Type { + if (isClass(type)) { + return ClassType.cloneForUnpacked(type); + } + + if (isTypeVarTuple(type) && !type.priv.isInUnion) { + return TypeVarType.cloneForUnpacked(type); + } + + if (isTypeVar(type)) { + return TypeVarType.cloneForUnpacked(type); + } + + return type; +} + +// If the declared return type for the function is a Generator or AsyncGenerator, +// returns the type arguments for the type. +export function getGeneratorTypeArgs(returnType: Type): Type[] | undefined { + if (isClassInstance(returnType)) { + if (ClassType.isBuiltIn(returnType, ['Generator', 'AsyncGenerator'])) { + return returnType.priv.typeArgs; + } else if (ClassType.isBuiltIn(returnType, 'AwaitableGenerator')) { + // AwaitableGenerator has four type arguments, and the first 3 + // correspond to the generator. + return returnType.priv.typeArgs?.slice(0, 3); + } + } + + return undefined; +} + +export function requiresTypeArgs(classType: ClassType) { + if (classType.shared.typeParams.length > 0) { + const firstTypeParam = classType.shared.typeParams[0]; + + // If there are type parameters, type arguments are needed. + // The exception is if type parameters have been synthesized + // for classes that have untyped constructors. + if (firstTypeParam.shared.isSynthesized) { + return false; + } + + // If the first type parameter has a default type, then no + // type arguments are needed. + if (firstTypeParam.shared.isDefaultExplicit) { + return false; + } + + return true; + } + + // There are a few built-in special classes that require + // type arguments even though typeParams is empty. + if (ClassType.isSpecialBuiltIn(classType)) { + const specialClasses = [ + 'Tuple', + 'Callable', + 'Generic', + 'Type', + 'Optional', + 'Union', + 'Literal', + 'Annotated', + 'TypeGuard', + 'TypeIs', + ]; + + if (specialClasses.some((t) => t === (classType.priv.aliasName || classType.shared.name))) { + return true; + } + } + + return false; +} + +export function requiresSpecialization( + type: Type, + options?: RequiresSpecializationOptions, + recursionCount = 0 +): boolean { + if (recursionCount > maxTypeRecursionCount) { + return false; + } + recursionCount++; + + // Is the answer cached? + const canUseCache = !options?.ignorePseudoGeneric && !options?.ignoreSelf; + if (canUseCache && type.cached?.requiresSpecialization !== undefined) { + return type.cached.requiresSpecialization; + } + + const result = _requiresSpecialization(type, options, recursionCount); + + if (canUseCache) { + if (type.cached === undefined) { + type.cached = {}; + } + type.cached.requiresSpecialization = result; + } + + return result; +} + +function _requiresSpecialization(type: Type, options?: RequiresSpecializationOptions, recursionCount = 0): boolean { + // If the type is conditioned on a TypeVar, it may need to be specialized. + if (type.props?.condition) { + return true; + } + + switch (type.category) { + case TypeCategory.Class: { + if (ClassType.isPseudoGenericClass(type) && options?.ignorePseudoGeneric) { + return false; + } + + if (!type.priv.isTypeArgExplicit && options?.ignoreImplicitTypeArgs) { + return false; + } + + if (type.priv.tupleTypeArgs) { + if ( + type.priv.tupleTypeArgs.some((typeArg) => + requiresSpecialization(typeArg.type, options, recursionCount) + ) + ) { + return true; + } + } + + if (type.priv.typeArgs) { + return type.priv.typeArgs.some((typeArg) => requiresSpecialization(typeArg, options, recursionCount)); + } + + return ClassType.getTypeParams(type).length > 0; + } + + case TypeCategory.Function: { + for (let i = 0; i < type.shared.parameters.length; i++) { + if (requiresSpecialization(FunctionType.getParamType(type, i), options, recursionCount)) { + return true; + } + } + + const declaredReturnType = + type.priv.specializedTypes && type.priv.specializedTypes.returnType + ? type.priv.specializedTypes.returnType + : type.shared.declaredReturnType; + if (declaredReturnType) { + if (requiresSpecialization(declaredReturnType, options, recursionCount)) { + return true; + } + } else if (type.shared.inferredReturnType) { + if (requiresSpecialization(type.shared.inferredReturnType?.type, options, recursionCount)) { + return true; + } + } + + return false; + } + + case TypeCategory.Overloaded: { + const overloads = OverloadedType.getOverloads(type); + if (overloads.some((overload) => requiresSpecialization(overload, options, recursionCount))) { + return true; + } + + const impl = OverloadedType.getImplementation(type); + if (impl) { + return requiresSpecialization(impl, options, recursionCount); + } + + return false; + } + + case TypeCategory.Union: { + return type.priv.subtypes.some((subtype) => requiresSpecialization(subtype, options, recursionCount)); + } + + case TypeCategory.TypeVar: { + // Most TypeVar types need to be specialized. + if (!type.shared.recursiveAlias) { + if (TypeVarType.isSelf(type) && options?.ignoreSelf) { + return false; + } + + return true; + } + + // If this is a recursive type alias, it may need to be specialized + // if it has generic type arguments. + const aliasInfo = type.props?.typeAliasInfo; + if (aliasInfo?.typeArgs) { + return aliasInfo.typeArgs.some((typeArg) => requiresSpecialization(typeArg, options, recursionCount)); + } + } + } + + return false; +} + +// Converts contravariant to a covariant or vice versa. Leaves +// other variances unchanged. +export function invertVariance(variance: Variance) { + if (variance === Variance.Contravariant) { + return Variance.Covariant; + } + + if (variance === Variance.Covariant) { + return Variance.Contravariant; + } + + return variance; +} + +// Combines two variances to produce a resulting variance. +export function combineVariances(variance1: Variance, variance2: Variance) { + if (variance1 === Variance.Unknown) { + return variance2; + } + + if ( + variance2 === Variance.Invariant || + (variance2 === Variance.Covariant && variance1 === Variance.Contravariant) || + (variance2 === Variance.Contravariant && variance1 === Variance.Covariant) + ) { + return Variance.Invariant; + } + + return variance1; +} + +// Determines if the variance of the type argument for a generic class is compatible +// With the declared variance of the corresponding type parameter. +export function isVarianceOfTypeArgCompatible(type: Type, typeParamVariance: Variance): boolean { + if (typeParamVariance === Variance.Unknown || typeParamVariance === Variance.Auto) { + return true; + } + + if (isTypeVar(type) && !isParamSpec(type) && !isTypeVarTuple(type)) { + const typeArgVariance = type.shared.declaredVariance; + + if (typeArgVariance === Variance.Contravariant || typeArgVariance === Variance.Covariant) { + return typeArgVariance === typeParamVariance; + } + } else if (isClassInstance(type)) { + if (type.shared.typeParams && type.shared.typeParams.length > 0) { + return type.shared.typeParams.every((typeParam, index) => { + let typeArgType: Type | undefined; + + if (isParamSpec(typeParam) || isTypeVarTuple(typeParam)) { + return true; + } + + if (type.priv.typeArgs && index < type.priv.typeArgs.length) { + typeArgType = type.priv.typeArgs[index]; + } + + const declaredVariance = typeParam.shared.declaredVariance; + if (declaredVariance === Variance.Auto) { + return true; + } + + let effectiveVariance = Variance.Invariant; + if (declaredVariance === Variance.Covariant) { + // If the declared variance is covariant, the effective variance + // is simply copied from the type param variance. + effectiveVariance = typeParamVariance; + } else if (declaredVariance === Variance.Contravariant) { + // If the declared variance is contravariant, it flips the + // effective variance from contravariant to covariant or vice versa. + if (typeParamVariance === Variance.Covariant) { + effectiveVariance = Variance.Contravariant; + } else if (typeParamVariance === Variance.Contravariant) { + effectiveVariance = Variance.Covariant; + } + } + + return isVarianceOfTypeArgCompatible(typeArgType ?? UnknownType.create(), effectiveVariance); + }); + } + } + + return true; +} + +// Computes the method resolution ordering for a class whose base classes +// have already been filled in. The algorithm for computing MRO is described +// here: https://www.python.org/download/releases/2.3/mro/. It returns true +// if an MRO was possible, false otherwise. +export function computeMroLinearization(classType: ClassType): boolean { + let isMroFound = true; + + // Clear out any existing MRO information. + classType.shared.mro = []; + + const filteredBaseClasses = classType.shared.baseClasses.filter((baseClass, index) => { + if (isInstantiableClass(baseClass)) { + // Generic has some special-case logic (see description of __mro_entries__ + // in PEP 560) that we need to account for here. + if (ClassType.isBuiltIn(baseClass, 'Generic')) { + // If the class is a Protocol or TypedDict, the generic is ignored for + // the purposes of computing the MRO. + if (ClassType.isProtocolClass(classType) || ClassType.isTypedDictClass(classType)) { + return false; + } + + // If the class contains any specialized generic classes after + // the Generic base, the Generic base is ignored for purposes + // of computing the MRO. + if ( + classType.shared.baseClasses.some((innerBaseClass, innerIndex) => { + return ( + innerIndex > index && + isInstantiableClass(innerBaseClass) && + innerBaseClass.priv.typeArgs && + innerBaseClass.priv.isTypeArgExplicit + ); + }) + ) { + return false; + } + } + } + + return true; + }); + + // Construct the list of class lists that need to be merged. + const classListsToMerge: Type[][] = []; + + filteredBaseClasses.forEach((baseClass) => { + if (isInstantiableClass(baseClass)) { + const solution = buildSolutionFromSpecializedClass(baseClass); + classListsToMerge.push( + baseClass.shared.mro.map((mroClass) => { + return applySolvedTypeVars(mroClass, solution); + }) + ); + } else { + classListsToMerge.push([baseClass]); + } + }); + + classListsToMerge.push( + filteredBaseClasses.map((baseClass) => { + const solution = buildSolutionFromSpecializedClass(classType); + return applySolvedTypeVars(baseClass, solution); + }) + ); + + // The first class in the MRO is the class itself. + const solution = buildSolutionFromSpecializedClass(classType); + let specializedClassType = applySolvedTypeVars(classType, solution); + if (!isClass(specializedClassType) && !isAnyOrUnknown(specializedClassType)) { + specializedClassType = UnknownType.create(); + } + + classType.shared.mro.push(specializedClassType); + + // Helper function that returns true if the specified searchClass + // is found in the "tail" (i.e. in elements 1 through n) of any + // of the class lists. + function isInTail(searchClass: ClassType, classLists: Type[][]) { + return classLists.some((classList) => { + return ( + classList.findIndex( + (value) => isInstantiableClass(value) && ClassType.isSameGenericClass(value, searchClass) + ) > 0 + ); + }); + } + + // Helper function that filters the class lists to remove any duplicate + // entries of the specified class. This is used once the class has been + // added to the MRO. + function filterClass(classToFilter: ClassType, classLists: Type[][]) { + for (let i = 0; i < classLists.length; i++) { + classLists[i] = classLists[i].filter((value) => { + return !isInstantiableClass(value) || !ClassType.isSameGenericClass(value, classToFilter); + }); + } + } + + while (true) { + let foundValidHead = false; + let nonEmptyList: Type[] | undefined = undefined; + + for (let i = 0; i < classListsToMerge.length; i++) { + const classList = classListsToMerge[i]; + + if (classList.length > 0) { + if (nonEmptyList === undefined) { + nonEmptyList = classList; + } + + if (!isInstantiableClass(classList[0])) { + foundValidHead = true; + let head = classList[0]; + if (!isClass(head) && !isAnyOrUnknown(head)) { + head = UnknownType.create(); + } + classType.shared.mro.push(head); + classList.shift(); + break; + } + + if (!isInTail(classList[0], classListsToMerge)) { + foundValidHead = true; + classType.shared.mro.push(classList[0]); + filterClass(classList[0], classListsToMerge); + break; + } + } + } + + // If all lists are empty, we are done. + if (!nonEmptyList) { + break; + } + + // We made it all the way through the list of class lists without + // finding a valid head, but there is at least one list that's not + // yet empty. This means there's no valid MRO order. + if (!foundValidHead) { + isMroFound = false; + + // Handle the situation by pull the head off the first empty list. + // This allows us to make forward progress. + if (!isInstantiableClass(nonEmptyList[0])) { + let head = nonEmptyList[0]; + if (!isClass(head) && !isAnyOrUnknown(head)) { + head = UnknownType.create(); + } + classType.shared.mro.push(head); + nonEmptyList.shift(); + } else { + classType.shared.mro.push(nonEmptyList[0]); + filterClass(nonEmptyList[0], classListsToMerge); + } + } + } + + return isMroFound; +} + +// Returns zero or more unique module names that point to the place(s) +// where the type is declared. Unions, for example, can result in more +// than one result. Type arguments are not included. +export function getDeclaringModulesForType(type: Type): string[] { + const moduleList: string[] = []; + addDeclaringModuleNamesForType(type, moduleList); + return moduleList; +} + +function addDeclaringModuleNamesForType(type: Type, moduleList: string[], recursionCount = 0) { + if (recursionCount > maxTypeRecursionCount) { + return; + } + recursionCount++; + + const addIfUnique = (moduleName: string) => { + if (moduleName && !moduleList.some((n) => n === moduleName)) { + moduleList.push(moduleName); + } + }; + + switch (type.category) { + case TypeCategory.Class: { + addIfUnique(type.shared.moduleName); + break; + } + + case TypeCategory.Function: { + addIfUnique(type.shared.moduleName); + break; + } + + case TypeCategory.Overloaded: { + const overloads = OverloadedType.getOverloads(type); + overloads.forEach((overload) => { + addDeclaringModuleNamesForType(overload, moduleList, recursionCount); + }); + const impl = OverloadedType.getImplementation(type); + if (impl) { + addDeclaringModuleNamesForType(impl, moduleList, recursionCount); + } + break; + } + + case TypeCategory.Union: { + doForEachSubtype(type, (subtype) => { + addDeclaringModuleNamesForType(subtype, moduleList, recursionCount); + }); + break; + } + + case TypeCategory.Module: { + addIfUnique(type.priv.moduleName); + break; + } + } +} + +// Converts a function into a FunctionType that represents the function's +// input signature and converts a ParamSpec into a FunctionType with the input +// signature (*args: P.args, **kwargs: P.kwargs). +export function convertTypeToParamSpecValue(type: Type): FunctionType { + if (isParamSpec(type)) { + const newFunction = FunctionType.createInstance('', '', '', FunctionTypeFlags.ParamSpecValue); + FunctionType.addParamSpecVariadics(newFunction, type); + newFunction.shared.typeVarScopeId = getTypeVarScopeId(type); + return newFunction; + } + + if (isFunction(type)) { + // If it's already a ParamSpecValue, return it as is. + if (FunctionType.isParamSpecValue(type)) { + return type; + } + + const newFunction = FunctionType.createInstance( + '', + '', + '', + type.shared.flags | FunctionTypeFlags.ParamSpecValue, + type.shared.docString + ); + + newFunction.shared.deprecatedMessage = type.shared.deprecatedMessage; + + type.shared.parameters.forEach((param, index) => { + FunctionType.addParam( + newFunction, + FunctionParam.create( + param.category, + FunctionType.getParamType(type, index), + param.flags, + param.name, + FunctionType.getParamDefaultType(type, index), + param.defaultExpr + ) + ); + }); + + newFunction.shared.typeVarScopeId = type.shared.typeVarScopeId; + newFunction.priv.constructorTypeVarScopeId = type.priv.constructorTypeVarScopeId; + + return newFunction; + } + + return ParamSpecType.getUnknown(); +} + +// Converts a FunctionType into a ParamSpec if it consists only of +// (* args: P.args, ** kwargs: P.kwargs). Otherwise returns the original type. +export function simplifyFunctionToParamSpec(type: FunctionType): FunctionType | ParamSpecType { + const paramSpec = FunctionType.getParamSpecFromArgsKwargs(type); + const withoutParamSpec = FunctionType.cloneRemoveParamSpecArgsKwargs(type); + + let hasParams = withoutParamSpec.shared.parameters.length > 0; + + if (withoutParamSpec.shared.parameters.length === 1) { + // If the ParamSpec has a position-only separator as its only parameter, + // treat it as though there are no parameters. + const onlyParam = withoutParamSpec.shared.parameters[0]; + if (isPositionOnlySeparator(onlyParam)) { + hasParams = false; + } + } + + // Can we simplify it to just a paramSpec? + if (!hasParams && paramSpec) { + return paramSpec; + } + + return type; +} + +// Recursively walks a type and calls a callback for each TypeVar, allowing +// it to be replaced with something else. +export class TypeVarTransformer { + private _pendingTypeVarTransformations = new Set(); + private _pendingFunctionTransformations: (FunctionType | OverloadedType)[] = []; + + get pendingTypeVarTransformations() { + return this._pendingTypeVarTransformations; + } + + apply(type: Type, recursionCount: number): Type { + if (recursionCount > maxTypeRecursionCount) { + return type; + } + recursionCount++; + + type = this.transformGenericTypeAlias(type, recursionCount); + + // If the type is conditioned on a type variable, see if the condition + // still applies. + if (type.props?.condition) { + type = this.transformConditionalType(type, recursionCount); + } + + // Shortcut the operation if possible. + if (this.canSkipTransform(type)) { + return type; + } + + if (isAnyOrUnknown(type)) { + return type; + } + + if (isNoneInstance(type)) { + return type; + } + + if (isTypeVar(type)) { + // Handle recursive type aliases specially. In particular, + // we need to specialize type arguments for generic recursive + // type aliases. + const aliasInfo = type.props?.typeAliasInfo; + if (type.shared.recursiveAlias) { + if (!aliasInfo?.typeArgs) { + return type; + } + + let requiresUpdate = false; + const typeArgs = aliasInfo.typeArgs.map((typeArg) => { + const replacementType = this.apply(typeArg, recursionCount); + if (replacementType !== typeArg) { + requiresUpdate = true; + } + return replacementType; + }); + + if (requiresUpdate) { + return TypeBase.cloneForTypeAlias(type, { ...aliasInfo, typeArgs }); + } + + return type; + } + + let replacementType: Type = type; + + // Recursively transform the results, but ensure that we don't replace any + // type variables in the same scope recursively by setting it the scope in the + // _pendingTypeVarTransformations set. + if (!this._isTypeVarScopePending(type.priv.scopeId)) { + let paramSpecAccess: ParamSpecAccess | undefined; + + // If this is a ParamSpec with a ".args" or ".kwargs" access, strip + // it off for now. We'll add it back later if appropriate. + if (isParamSpec(type) && type.priv.paramSpecAccess) { + paramSpecAccess = type.priv.paramSpecAccess; + type = TypeVarType.cloneForParamSpecAccess(type, /* access */ undefined); + } + + replacementType = this.transformTypeVar(type, recursionCount) ?? type; + + if (isParamSpec(type) && replacementType !== type) { + replacementType = simplifyFunctionToParamSpec(convertTypeToParamSpecValue(replacementType)); + } + + // If the original type was a ParamSpec with a ".args" or ".kwargs" access, + // preserve that information in the transformed type. + if (paramSpecAccess) { + if (isParamSpec(replacementType)) { + replacementType = TypeVarType.cloneForParamSpecAccess(replacementType, paramSpecAccess); + } else { + replacementType = UnknownType.create(); + } + } + + // If we're transforming a TypeVarTuple that was in a union, + // expand the union types. + if (isTypeVarTuple(type) && type.priv.isInUnion) { + replacementType = _expandUnpackedTypeVarTupleUnion(replacementType); + } + + if (type.priv.scopeId) { + this._pendingTypeVarTransformations.add(type.priv.scopeId); + replacementType = this.apply(replacementType, recursionCount); + this._pendingTypeVarTransformations.delete(type.priv.scopeId); + } + } + + return replacementType; + } + + if (isUnion(type)) { + const newUnionType = mapSubtypes( + type, + (subtype) => { + let transformedType: Type = this.apply(subtype, recursionCount); + + // If we're transforming a TypeVarTuple within a union, + // combine the individual types within the TypeVarTuple. + if (isTypeVarTuple(subtype) && !isTypeVarTuple(transformedType)) { + const subtypesToCombine: Type[] = []; + doForEachSubtype(transformedType, (transformedSubtype) => { + subtypesToCombine.push(_expandUnpackedTypeVarTupleUnion(transformedSubtype)); + }); + + transformedType = combineTypes(subtypesToCombine); + } + + if (this.transformUnionSubtype) { + return this.transformUnionSubtype(subtype, transformedType, recursionCount); + } + + return transformedType; + }, + { retainTypeAlias: true } + ); + + return !isNever(newUnionType) ? newUnionType : UnknownType.create(); + } + + if (isClass(type)) { + return this.transformTypeVarsInClassType(type, recursionCount); + } + + if (isFunction(type)) { + // Prevent recursion. + if (this._pendingFunctionTransformations.some((t) => t === type)) { + return type; + } + + this._pendingFunctionTransformations.push(type); + const result = this.transformTypeVarsInFunctionType(type, recursionCount); + this._pendingFunctionTransformations.pop(); + + return result; + } + + if (isOverloaded(type)) { + // Prevent recursion. + if (this._pendingFunctionTransformations.some((t) => t === type)) { + return type; + } + + this._pendingFunctionTransformations.push(type); + + let requiresUpdate = false; + + // Specialize each of the functions in the overload. + const overloads = OverloadedType.getOverloads(type); + const newOverloads: FunctionType[] = []; + + overloads.forEach((entry) => { + const replacementType = this.transformTypeVarsInFunctionType(entry, recursionCount); + + if (isFunction(replacementType)) { + newOverloads.push(replacementType); + } else { + appendArray(newOverloads, OverloadedType.getOverloads(replacementType)); + } + + if (replacementType !== entry) { + requiresUpdate = true; + } + }); + + const impl = OverloadedType.getImplementation(type); + let newImpl: Type | undefined = impl; + + if (impl) { + newImpl = this.apply(impl, recursionCount); + + if (newImpl !== impl) { + requiresUpdate = true; + } + } + + this._pendingFunctionTransformations.pop(); + + // Construct a new overload with the specialized function types. + return requiresUpdate ? OverloadedType.create(newOverloads, newImpl) : type; + } + + return type; + } + + canSkipTransform(type: Type): boolean { + return !requiresSpecialization(type); + } + + transformTypeVar(typeVar: TypeVarType, recursionCount: number): Type | undefined { + return undefined; + } + + transformTupleTypeVar(paramSpec: TypeVarType, recursionCount: number): TupleTypeArg[] | undefined { + return undefined; + } + + transformUnionSubtype(preTransform: Type, postTransform: Type, recursionCount: number): Type | undefined { + return postTransform; + } + + doForEachConstraintSet(callback: () => FunctionType): FunctionType | OverloadedType { + // By default, simply return the result of the callback. Subclasses + // can override this method as they see fit. + return callback(); + } + + transformGenericTypeAlias(type: Type, recursionCount: number) { + const aliasInfo = type.props?.typeAliasInfo; + if (!aliasInfo || !aliasInfo.shared.typeParams || !aliasInfo.typeArgs) { + return type; + } + + let requiresUpdate = false; + const newTypeArgs = aliasInfo.typeArgs.map((typeArg) => { + const updatedType = this.apply(typeArg, recursionCount); + if (type !== updatedType) { + requiresUpdate = true; + } + return updatedType; + }); + + return requiresUpdate ? TypeBase.cloneForTypeAlias(type, { ...aliasInfo, typeArgs: newTypeArgs }) : type; + } + + transformConditionalType(type: Type, recursionCount: number): Type { + // By default, do not perform any transform. + return type; + } + + transformTypeVarsInClassType(classType: ClassType, recursionCount: number): Type { + const typeParams = ClassType.getTypeParams(classType); + + // Handle the common case where the class has no type parameters. + if ( + typeParams.length === 0 && + !ClassType.isSpecialBuiltIn(classType) && + !ClassType.isBuiltIn(classType, 'type') + ) { + return classType; + } + + let newTypeArgs: Type[] | undefined; + let newTupleTypeArgs: TupleTypeArg[] | undefined; + let specializationNeeded = false; + let isTypeArgExplicit = true; + + // If type args were previously provided, specialize them. + + // Handle tuples specially. + if (ClassType.isTupleClass(classType)) { + if (getContainerDepth(classType) > maxTupleTypeArgRecursionDepth) { + return classType; + } + + if (classType.priv.tupleTypeArgs) { + newTupleTypeArgs = []; + + classType.priv.tupleTypeArgs.forEach((oldTypeArgType) => { + const newTypeArgType = this.apply(oldTypeArgType.type, recursionCount); + + if (newTypeArgType !== oldTypeArgType.type) { + specializationNeeded = true; + } + + if ( + isUnpackedTypeVarTuple(oldTypeArgType.type) && + isClassInstance(newTypeArgType) && + isTupleClass(newTypeArgType) && + newTypeArgType.priv.tupleTypeArgs + ) { + appendArray(newTupleTypeArgs!, newTypeArgType.priv.tupleTypeArgs); + } else if (isUnpackedClass(newTypeArgType) && newTypeArgType.priv.tupleTypeArgs) { + appendArray(newTupleTypeArgs!, newTypeArgType.priv.tupleTypeArgs); + } else { + // Handle the special case where tuple[T, ...] is being specialized + // to tuple[Never, ...]. This is equivalent to tuple[()]. + const isEmptyTuple = + oldTypeArgType.isUnbounded && + isTypeVar(oldTypeArgType.type) && + isNever(newTypeArgType) && + classType.priv.tupleTypeArgs!.length === 1; + + if (!isEmptyTuple) { + newTupleTypeArgs!.push({ + type: newTypeArgType, + isUnbounded: oldTypeArgType.isUnbounded, + isOptional: oldTypeArgType.isOptional, + }); + } + } + }); + } else if (typeParams.length > 0) { + newTupleTypeArgs = this.transformTupleTypeVar(typeParams[0], recursionCount); + if (newTupleTypeArgs) { + specializationNeeded = true; + } else { + const newTypeArgType = this.apply(typeParams[0], recursionCount); + newTupleTypeArgs = [{ type: newTypeArgType, isUnbounded: true }]; + + // If this is the literal "tuple" class (as opposed to a type that + // represents all subtypes of tuple), don't specialize + // if the type arg is the same as the type param. This is the same + // thing we do with non-tuple classes below. + if (newTypeArgType !== typeParams[0] || classType.priv.includeSubclasses) { + specializationNeeded = true; + } + isTypeArgExplicit = false; + } + } + + // If this is an empty tuple, don't recompute the non-tuple type argument. + if (newTupleTypeArgs && newTupleTypeArgs.length > 0) { + // Combine the tuple type args into a single non-tuple type argument. + newTypeArgs = [combineTupleTypeArgs(newTupleTypeArgs)]; + } + } + + if (!newTypeArgs) { + const typeArgs = classType.priv.typeArgs ?? typeParams; + + if (!classType.priv.typeArgs) { + isTypeArgExplicit = false; + } + + newTypeArgs = typeArgs.map((oldTypeArgType) => { + let newTypeArgType = this.apply(oldTypeArgType, recursionCount); + if (newTypeArgType !== oldTypeArgType) { + specializationNeeded = true; + + // If this was a TypeVarTuple that was part of a union + // (e.g. Union[Unpack[Vs]]), expand the subtypes into a union here. + if (isTypeVar(oldTypeArgType) && isTypeVarTuple(oldTypeArgType) && oldTypeArgType.priv.isInUnion) { + newTypeArgType = _expandUnpackedTypeVarTupleUnion(newTypeArgType); + } + } + return newTypeArgType; + }); + } + + // If specialization wasn't needed, don't allocate a new class. + if (!specializationNeeded) { + return classType; + } + + return ClassType.specialize( + classType, + newTypeArgs, + isTypeArgExplicit, + /* includeSubclasses */ undefined, + newTupleTypeArgs + ); + } + + transformTypeVarsInFunctionType(sourceType: FunctionType, recursionCount: number): FunctionType | OverloadedType { + return this.doForEachConstraintSet(() => { + let functionType = sourceType; + + const declaredReturnType = FunctionType.getEffectiveReturnType(functionType); + const specializedReturnType = declaredReturnType + ? this.apply(declaredReturnType, recursionCount) + : undefined; + let typesRequiredSpecialization = declaredReturnType !== specializedReturnType; + + const specializedParams: SpecializedFunctionTypes = { + parameterTypes: [], + parameterDefaultTypes: undefined, + returnType: specializedReturnType, + }; + + const paramSpec = FunctionType.getParamSpecFromArgsKwargs(functionType); + + if (paramSpec) { + const paramSpecType = this.transformTypeVar(paramSpec, recursionCount); + if (paramSpecType) { + const paramSpecValue = convertTypeToParamSpecValue(paramSpecType); + const transformedParamSpec = FunctionType.getParamSpecFromArgsKwargs(paramSpecValue); + + if ( + paramSpecValue.shared.parameters.length > 0 || + !transformedParamSpec || + !isTypeSame(paramSpec, transformedParamSpec) + ) { + functionType = FunctionType.applyParamSpecValue(functionType, paramSpecValue); + } + } + } + + let variadicParamIndex: number | undefined; + let variadicTypesToUnpack: TupleTypeArg[] | undefined; + const specializedDefaultArgs: (Type | undefined)[] = []; + + for (let i = 0; i < functionType.shared.parameters.length; i++) { + const paramType = FunctionType.getParamType(functionType, i); + const specializedType = this.apply(paramType, recursionCount); + specializedParams.parameterTypes.push(specializedType); + + // Do we need to specialize the default argument type for this parameter? + let defaultArgType = FunctionType.getParamDefaultType(functionType, i); + if (defaultArgType) { + const specializedArgType = this.apply(defaultArgType, recursionCount); + if (specializedArgType !== defaultArgType) { + defaultArgType = specializedArgType; + typesRequiredSpecialization = true; + } + } + specializedDefaultArgs.push(defaultArgType); + + if ( + variadicParamIndex === undefined && + isTypeVarTuple(paramType) && + functionType.shared.parameters[i].category === ParamCategory.ArgsList + ) { + variadicParamIndex = i; + + if ( + isClassInstance(specializedType) && + isTupleClass(specializedType) && + specializedType.priv.isUnpacked + ) { + variadicTypesToUnpack = specializedType.priv.tupleTypeArgs; + } + } + + if (paramType !== specializedType) { + typesRequiredSpecialization = true; + } + } + + let specializedInferredReturnType: Type | undefined; + if (functionType.shared.inferredReturnType) { + specializedInferredReturnType = this.apply( + functionType.shared.inferredReturnType?.type, + recursionCount + ); + if (specializedInferredReturnType !== functionType.shared.inferredReturnType?.type) { + specializedParams.returnType = specializedInferredReturnType; + typesRequiredSpecialization = true; + } + } + + // Do we need to update the boundToType? + if (functionType.priv.boundToType) { + const newBoundToType = this.apply(functionType.priv.boundToType, recursionCount); + if (newBoundToType !== functionType.priv.boundToType && isClass(newBoundToType)) { + functionType = FunctionType.clone(functionType, /* stripFirstParam */ false, newBoundToType); + } + } + + // Do we need to update the strippedFirstParamType? + if (functionType.priv.strippedFirstParamType && !isAnyOrUnknown(functionType.priv.strippedFirstParamType)) { + const newStrippedType = this.apply(functionType.priv.strippedFirstParamType, recursionCount); + if (newStrippedType !== functionType.priv.strippedFirstParamType) { + functionType = TypeBase.cloneType(functionType); + functionType.priv.strippedFirstParamType = newStrippedType; + } + } + + if (!typesRequiredSpecialization) { + return functionType; + } + + if (specializedDefaultArgs.some((t) => t !== undefined)) { + specializedParams.parameterDefaultTypes = specializedDefaultArgs; + } + + // If there was no unpacked variadic type variable, we're done. + if (!variadicTypesToUnpack) { + return FunctionType.specialize(functionType, specializedParams); + } + + // Unpack the tuple and synthesize a new function in the process. + const newFunctionType = TypeBase.isInstantiable(functionType) + ? FunctionType.createInstantiable(functionType.shared.flags | FunctionTypeFlags.SynthesizedMethod) + : FunctionType.createSynthesizedInstance('', functionType.shared.flags); + let insertKeywordOnlySeparator = false; + let swallowPositionOnlySeparator = false; + + specializedParams.parameterTypes.forEach((paramType, index) => { + if (index === variadicParamIndex) { + let sawUnboundedEntry = false; + + // Unpack the tuple into individual parameters. + variadicTypesToUnpack!.forEach((unpackedType) => { + FunctionType.addParam( + newFunctionType, + FunctionParam.create( + unpackedType.isUnbounded || isTypeVarTuple(unpackedType.type) + ? ParamCategory.ArgsList + : ParamCategory.Simple, + unpackedType.type, + FunctionParamFlags.NameSynthesized | FunctionParamFlags.TypeDeclared, + `__p${newFunctionType.shared.parameters.length}` + ) + ); + + if (unpackedType.isUnbounded) { + sawUnboundedEntry = true; + } + }); + + if (sawUnboundedEntry) { + swallowPositionOnlySeparator = true; + } else { + insertKeywordOnlySeparator = true; + } + } else { + const param = functionType.shared.parameters[index]; + + if (isKeywordOnlySeparator(param)) { + insertKeywordOnlySeparator = false; + } else if (param.category === ParamCategory.KwargsDict) { + insertKeywordOnlySeparator = false; + } + + // Insert a keyword-only separator parameter if we previously + // unpacked a TypeVarTuple. + if (param.category === ParamCategory.Simple && param.name && insertKeywordOnlySeparator) { + FunctionType.addKeywordOnlyParamSeparator(newFunctionType); + insertKeywordOnlySeparator = false; + } + + if (param.category !== ParamCategory.Simple || param.name || !swallowPositionOnlySeparator) { + FunctionType.addParam( + newFunctionType, + FunctionParam.create( + param.category, + paramType, + param.flags, + param.name && FunctionParam.isNameSynthesized(param) + ? `__p${newFunctionType.shared.parameters.length}` + : param.name, + FunctionType.getParamDefaultType(functionType, index), + param.defaultExpr + ) + ); + } + } + }); + + newFunctionType.shared.declaredReturnType = specializedParams.returnType; + + return newFunctionType; + }); + } + + private _isTypeVarScopePending(typeVarScopeId: TypeVarScopeId | undefined) { + return !!typeVarScopeId && this._pendingTypeVarTransformations.has(typeVarScopeId); + } +} + +// For a TypeVar with a default type, validates whether the default type is using +// any other TypeVars that are not currently in scope. +class TypeVarDefaultValidator extends TypeVarTransformer { + constructor(private _liveTypeParams: TypeVarType[], private _invalidTypeVars: Set) { + super(); + } + + override transformTypeVar(typeVar: TypeVarType) { + const replacementType = this._liveTypeParams.find((param) => param.shared.name === typeVar.shared.name); + if (!replacementType || isParamSpec(replacementType) !== isParamSpec(typeVar)) { + this._invalidTypeVars.add(typeVar.shared.name); + } + + return UnknownType.create(); + } +} + +class UniqueFunctionSignatureTransformer extends TypeVarTransformer { + constructor(private _signatureTracker: UniqueSignatureTracker, private _expressionOffset: number) { + super(); + } + + override transformGenericTypeAlias(type: Type, recursionCount: number): Type { + // Don't transform type aliases. + return type; + } + + override transformTypeVarsInClassType(classType: ClassType, recursionCount: number): Type { + // Don't transform classes. + return classType; + } + + override transformTypeVarsInFunctionType( + sourceType: FunctionType, + recursionCount: number + ): FunctionType | OverloadedType { + // If this function is not generic, there's no need to check for uniqueness. + if (sourceType.shared.typeParams.length === 0) { + return super.transformTypeVarsInFunctionType(sourceType, recursionCount); + } + + let updatedSourceType: Type = sourceType; + const existingSignature = this._signatureTracker.findSignature(sourceType); + if (existingSignature) { + let offsetIndex = existingSignature.expressionOffsets.findIndex( + (offset) => offset === this._expressionOffset + ); + if (offsetIndex < 0) { + offsetIndex = existingSignature.expressionOffsets.length; + } + + if (offsetIndex > 0) { + const solution = new ConstraintSolution(); + + // Create new type variables with the same scope but with + // different (unique) names. + sourceType.shared.typeParams.forEach((typeParam) => { + if (typeParam.priv.scopeType === TypeVarScopeType.Function) { + const replacement: Type = TypeVarType.cloneForNewName( + typeParam, + `${typeParam.shared.name}(${offsetIndex})` + ); + + solution.setType(typeParam, replacement); + } + }); + + updatedSourceType = applySolvedTypeVars(sourceType, solution); + assert(isFunctionOrOverloaded(updatedSourceType)); + } + } + + this._signatureTracker.addSignature(sourceType, this._expressionOffset); + + return updatedSourceType; + } +} + +// Replaces the free type vars within a type with their corresponding bound +// type vars if they are in one of the specified scopes. If undefined is +// passed for the scopeIds list, all free type vars are replaced. +class BoundTypeVarTransform extends TypeVarTransformer { + constructor(private _scopeIds: TypeVarScopeId[] | undefined) { + super(); + } + + override transformTypeVar(typeVar: TypeVarType): Type | undefined { + if (this._isTypeVarInScope(typeVar)) { + return this._replaceTypeVar(typeVar); + } + + return undefined; + } + + private _isTypeVarInScope(typeVar: TypeVarType) { + if (!typeVar.priv.scopeId) { + return false; + } + + // If no scopeIds were specified, transform all Type Vars. + if (!this._scopeIds) { + return true; + } + + return this._scopeIds.includes(typeVar.priv.scopeId); + } + + private _replaceTypeVar(typeVar: TypeVarType): TypeVarType { + return TypeVarType.cloneAsBound(typeVar); + } +} + +// Replaces the bound type vars within a type with their corresponding +// free type vars. +class FreeTypeVarTransform extends TypeVarTransformer { + constructor(private _scopeIds: TypeVarScopeId[]) { + super(); + } + + override transformTypeVar(typeVar: TypeVarType): Type | undefined { + if (typeVar.priv.freeTypeVar && this._isTypeVarInScope(typeVar.priv.freeTypeVar)) { + return typeVar.priv.freeTypeVar; + } + + return undefined; + } + + private _isTypeVarInScope(typeVar: TypeVarType) { + if (!typeVar.priv.scopeId) { + return false; + } + + return this._scopeIds.includes(typeVar.priv.scopeId); + } +} + +// Specializes a (potentially generic) type by substituting +// type variables from a type var map. +class ApplySolvedTypeVarsTransformer extends TypeVarTransformer { + private _isSolvingDefaultType = false; + private _activeConstraintSetIndex: number | undefined; + + constructor(private _solution: ConstraintSolution, private _options: ApplyTypeVarOptions) { + super(); + } + + override transformTypeVar(typeVar: TypeVarType, recursionCount: number) { + const solutionSet = this._solution.getSolutionSet(this._activeConstraintSetIndex ?? 0); + + // If we're solving a default type, handle type variables with no scope ID. + if (this._isSolvingDefaultType && !typeVar.priv.scopeId) { + const replacement = this._getReplacementForDefaultByName(typeVar, solutionSet); + if (replacement) { + return replacement; + } + + if (typeVar.shared.isDefaultExplicit) { + return this.apply(typeVar.shared.defaultType, recursionCount); + } + + return UnknownType.create(); + } + + if (!this._shouldReplaceTypeVar(typeVar)) { + return undefined; + } + + let replacement = solutionSet.getType(typeVar); + + if (replacement) { + // No more processing is needed for ParamSpecs. + if (isParamSpec(typeVar)) { + return replacement; + } + + if (TypeBase.isInstantiable(typeVar)) { + if ( + isAnyOrUnknown(replacement) && + this._options.typeClassType && + isInstantiableClass(this._options.typeClassType) + ) { + replacement = ClassType.specialize(ClassType.cloneAsInstance(this._options.typeClassType), [ + replacement, + ]); + } else { + replacement = convertToInstantiable(replacement, /* includeSubclasses */ false); + } + } else { + // If the TypeVar is not instantiable (i.e. not a type[T]), then + // it represents an instance of a type. If the replacement includes + // a generic class that has not been specialized, specialize it + // now with default type arguments. + replacement = mapSubtypes(replacement, (subtype) => { + if (isClassInstance(subtype)) { + // If the includeSubclasses wasn't set, force it to be set by + // converting to/from an instantiable. + if (!subtype.priv.includeSubclasses) { + subtype = ClassType.cloneAsInstance(ClassType.cloneAsInstantiable(subtype)); + } + + if (subtype.shared.typeParams && !subtype.priv.typeArgs) { + if (this._options.replaceUnsolved) { + return this._options.replaceUnsolved.useUnknown + ? specializeWithUnknownTypeArgs( + subtype, + this._options.replaceUnsolved.tupleClassType + ) + : specializeWithDefaultTypeArgs(subtype); + } + } + } + + return subtype; + }); + } + + if (isTypeVarTuple(replacement) && isTypeVarTuple(typeVar) && typeVar.priv.isUnpacked) { + return TypeVarType.cloneForUnpacked(replacement, typeVar.priv.isInUnion); + } + + if ( + !isTypeVarTuple(replacement) && + isTypeVar(replacement) && + isTypeVar(typeVar) && + typeVar.priv.isUnpacked + ) { + return TypeVarType.cloneForUnpacked(replacement); + } + + // If this isn't a TypeVarTuple, combine all of the tuple + // type args into a common type. + if ( + !isTypeVarTuple(typeVar) && + isClassInstance(replacement) && + replacement.priv.tupleTypeArgs && + replacement.priv.isUnpacked + ) { + replacement = combineTupleTypeArgs(replacement.priv.tupleTypeArgs); + } + + if (isUnpackedTypeVar(typeVar) && isClass(replacement)) { + replacement = ClassType.cloneForUnpacked(replacement); + } + + if (!isTypeVar(replacement) || !TypeVarType.isUnification(replacement) || !this._options.replaceUnsolved) { + return replacement; + } + } + + if (!this._shouldReplaceUnsolvedTypeVar(typeVar)) { + return undefined; + } + + // Use the default value if there is one. + if (typeVar.shared.isDefaultExplicit && !this._options.replaceUnsolved?.useUnknown) { + return this._solveDefaultType(typeVar, recursionCount); + } + + return getUnknownForTypeVar(typeVar, this._options.replaceUnsolved?.tupleClassType); + } + + override transformUnionSubtype(preTransform: Type, postTransform: Type): Type | undefined { + // If a union contains unsolved TypeVars within scope, eliminate them + // unless this results in an empty union. This elimination is needed + // in cases where TypeVars can go unsolved due to unions in parameter + // annotations, like this: + // def test(x: Union[str, T]) -> Union[str, T] + if (!this._options.replaceUnsolved?.eliminateUnsolvedInUnions) { + return postTransform; + } + + const solutionSet = this._solution.getSolutionSet(this._activeConstraintSetIndex ?? 0); + + if (isTypeVar(preTransform)) { + if (!this._shouldReplaceTypeVar(preTransform) || !this._shouldReplaceUnsolvedTypeVar(preTransform)) { + return postTransform; + } + + const typeVarType = solutionSet.getType(preTransform); + + // Did the TypeVar remain unsolved? + if (typeVarType) { + if (!isTypeVar(typeVarType) || !TypeVarType.isUnification(typeVarType)) { + return postTransform; + } + } + + // If the TypeVar was not transformed, then it was unsolved, + // and we'll eliminate it. + if (preTransform === postTransform) { + return undefined; + } + + // If useDefaultForUnsolved or useUnknownForUnsolved is true, the postTransform type will + // be Unknown, which we want to eliminate. + if (this._options.replaceUnsolved && isUnknown(postTransform)) { + return undefined; + } + } else if (preTransform.props?.condition) { + // If this is a type that is conditioned on a unification TypeVar, + // see if TypeVar was solved. If not, eliminate the type. + for (const condition of preTransform.props.condition) { + if (TypeVarType.isUnification(condition.typeVar) && !solutionSet.getType(condition.typeVar)) { + return undefined; + } + } + } + + return postTransform; + } + + override transformTupleTypeVar(typeVar: TypeVarType): TupleTypeArg[] | undefined { + if (!this._shouldReplaceTypeVar(typeVar)) { + const defaultType = typeVar.shared.defaultType; + + if (typeVar.shared.isDefaultExplicit && isClassInstance(defaultType) && defaultType.priv.tupleTypeArgs) { + return defaultType.priv.tupleTypeArgs; + } + + return undefined; + } + + const solutionSet = this._solution.getSolutionSet(this._activeConstraintSetIndex ?? 0); + const value = solutionSet.getType(typeVar); + if (value && isClassInstance(value) && value.priv.tupleTypeArgs && isUnpackedClass(value)) { + return value.priv.tupleTypeArgs; + } + return undefined; + } + + override transformConditionalType(type: Type, recursionCount: number): Type { + if (!type.props?.condition) { + return type; + } + + const solutionSet = this._solution.getSolutionSet(this._activeConstraintSetIndex ?? 0); + + for (const condition of type.props.condition) { + // This doesn't apply to bound type variables. + if (!TypeVarType.hasConstraints(condition.typeVar)) { + continue; + } + + const conditionTypeVar = condition.typeVar.priv?.freeTypeVar ?? condition.typeVar; + const replacement = solutionSet.getType(conditionTypeVar); + if (!replacement || condition.constraintIndex >= conditionTypeVar.shared.constraints.length) { + continue; + } + + const value = solutionSet.getType(conditionTypeVar); + if (!value) { + continue; + } + + const constraintType = conditionTypeVar.shared.constraints[condition.constraintIndex]; + + // If this violates the constraint, substitute a Never type. + if (!isTypeSame(constraintType, value)) { + return NeverType.createNever(); + } + } + return type; + } + + override doForEachConstraintSet(callback: () => FunctionType): FunctionType | OverloadedType { + const solutionSets = this._solution.getSolutionSets(); + + // Handle the common case where there are not multiple signature contexts. + if (solutionSets.length <= 1) { + return callback(); + } + + // Handle the case where we're already processing one of the signature contexts + // and are called recursively. Don't loop over all the signature contexts again. + if (this._activeConstraintSetIndex !== undefined) { + return callback(); + } + + // Loop through all of the signature contexts in the type var context + // to create an overload type. + const overloadTypes = solutionSets.map((_, index) => { + this._activeConstraintSetIndex = index; + return callback(); + }); + this._activeConstraintSetIndex = undefined; + + const filteredOverloads: FunctionType[] = []; + doForEachSubtype(combineTypes(overloadTypes), (subtype) => { + assert(isFunction(subtype)); + subtype = FunctionType.cloneWithNewFlags(subtype, subtype.shared.flags | FunctionTypeFlags.Overloaded); + filteredOverloads.push(subtype); + }); + + if (filteredOverloads.length === 1) { + return filteredOverloads[0]; + } + + return OverloadedType.create(filteredOverloads); + } + + // Handle the case where we need the default replacement value for a typeVar + // that has no scope and therefore doesn't have an assigned scopeID. We'll + // look it up by name in the solution set. This is a bit hacky because there + // could be multiple typeVars with the same name, but we'll assume that this + // won't happen. + private _getReplacementForDefaultByName( + typeVar: TypeVarType, + solutionSet: ConstraintSolutionSet + ): Type | undefined { + let replacementValue: Type | undefined; + const partialScopeId = `${typeVar.shared.name}.`; + + solutionSet.doForEachTypeVar((value, typeVarId) => { + if (typeVarId.startsWith(partialScopeId)) { + replacementValue = value; + } + }); + + return replacementValue; + } + + private _shouldReplaceTypeVar(typeVar: TypeVarType): boolean { + if (!typeVar.priv.scopeId || TypeVarType.isBound(typeVar)) { + return false; + } + + return true; + } + + private _shouldReplaceUnsolvedTypeVar(typeVar: TypeVarType): boolean { + // Never replace nested TypeVars with unknown. + if (this.pendingTypeVarTransformations.size > 0) { + return false; + } + + if (!typeVar.priv.scopeId) { + return false; + } + + if (!this._options.replaceUnsolved) { + return false; + } + + if (!this._options.replaceUnsolved.scopeIds.includes(typeVar.priv.scopeId)) { + return false; + } + + const exemptTypeVars = this._options.replaceUnsolved?.unsolvedExemptTypeVars; + if (exemptTypeVars) { + if (exemptTypeVars.some((t) => isTypeSame(t, typeVar, { ignoreTypeFlags: true }))) { + return false; + } + } + + return true; + } + + private _solveDefaultType(typeVar: TypeVarType, recursionCount: number) { + const defaultType = typeVar.shared.defaultType; + const wasSolvingDefaultType = this._isSolvingDefaultType; + this._isSolvingDefaultType = true; + const result = this.apply(defaultType, recursionCount); + this._isSolvingDefaultType = wasSolvingDefaultType; + return result; + } +} + +class UnificationTypeTransformer extends TypeVarTransformer { + constructor(private _liveTypeVarScopes: TypeVarScopeId[], private _usageOffset: number | undefined) { + super(); + } + + override transformTypeVar(typeVar: TypeVarType) { + if (!this._isTypeVarLive(typeVar)) { + return TypeVarType.cloneAsUnificationVar(typeVar, this._usageOffset); + } + + return undefined; + } + + private _isTypeVarLive(typeVar: TypeVarType) { + return this._liveTypeVarScopes.some( + (scopeId) => typeVar.priv.scopeId === scopeId || typeVar.priv.freeTypeVar?.priv.scopeId === scopeId + ); + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typeWalker.ts b/python-parser/packages/pyright-internal/src/analyzer/typeWalker.ts new file mode 100644 index 00000000..27964729 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typeWalker.ts @@ -0,0 +1,204 @@ +/* + * typeWalker.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A class that walks the parts of a type (e.g. the parameters of a function + * or the type arguments of a class). It detects and prevents infinite recursion. + */ + +import { assert, assertNever } from '../common/debug'; +import { + AnyType, + ClassType, + FunctionType, + ModuleType, + NeverType, + OverloadedType, + Type, + TypeCategory, + TypeVarType, + UnboundType, + UnionType, + UnknownType, + maxTypeRecursionCount, +} from './types'; + +export class TypeWalker { + private _recursionCount = 0; + private _isWalkCanceled = false; + private _hitRecursionLimit = false; + + get isRecursionLimitHit() { + return this._hitRecursionLimit; + } + + get isWalkCanceled() { + return this._isWalkCanceled; + } + + walk(type: Type): void { + if (this._recursionCount > maxTypeRecursionCount) { + this._hitRecursionLimit = true; + return; + } + + if (this._isWalkCanceled) { + return; + } + + this._recursionCount++; + + if (type.props?.typeAliasInfo) { + this.visitTypeAlias(type); + } + + switch (type.category) { + case TypeCategory.Unbound: + this.visitUnbound(type); + break; + + case TypeCategory.Any: + this.visitAny(type); + break; + + case TypeCategory.Unknown: + this.visitUnknown(type); + break; + + case TypeCategory.Never: + this.visitNever(type); + break; + + case TypeCategory.Function: + this.visitFunction(type); + break; + + case TypeCategory.Overloaded: + this.visitOverloaded(type); + break; + + case TypeCategory.Class: + this.visitClass(type); + break; + + case TypeCategory.Module: + this.visitModule(type); + break; + + case TypeCategory.Union: + this.visitUnion(type); + break; + + case TypeCategory.TypeVar: + this.visitTypeVar(type); + break; + + default: + assertNever(type); + } + + this._recursionCount--; + } + + cancelWalk() { + this._isWalkCanceled = true; + } + + visitTypeAlias(type: Type) { + const aliasInfo = type.props?.typeAliasInfo; + assert(aliasInfo !== undefined); + + if (aliasInfo.typeArgs) { + for (const typeArg of aliasInfo.typeArgs) { + this.walk(typeArg); + if (this._isWalkCanceled) { + break; + } + } + } + } + + visitUnbound(type: UnboundType): void { + // Nothing to do. + } + + visitAny(type: AnyType): void { + // Nothing to do. + } + + visitUnknown(type: UnknownType): void { + // Nothing to do. + } + + visitNever(type: NeverType): void { + // Nothing to do. + } + + visitFunction(type: FunctionType): void { + for (let i = 0; i < type.shared.parameters.length; i++) { + // Ignore parameters such as "*" that have no name. + if (type.shared.parameters[i].name) { + const paramType = FunctionType.getParamType(type, i); + this.walk(paramType); + if (this._isWalkCanceled) { + break; + } + } + } + + if (!this._isWalkCanceled && !FunctionType.isParamSpecValue(type) && !FunctionType.isParamSpecValue(type)) { + const returnType = type.shared.declaredReturnType ?? type.shared.inferredReturnType?.type; + if (returnType) { + this.walk(returnType); + } + } + } + + visitOverloaded(type: OverloadedType): void { + const overloads = OverloadedType.getOverloads(type); + for (const overload of overloads) { + this.walk(overload); + if (this._isWalkCanceled) { + break; + } + } + + const impl = OverloadedType.getImplementation(type); + if (impl) { + this.walk(impl); + } + } + + visitClass(type: ClassType): void { + if (!ClassType.isPseudoGenericClass(type)) { + const typeArgs = type.priv.tupleTypeArgs?.map((t) => t.type) || type.priv.typeArgs; + if (typeArgs) { + for (const argType of typeArgs) { + this.walk(argType); + if (this._isWalkCanceled) { + break; + } + } + } + } + } + + visitModule(type: ModuleType): void { + // Nothing to do. + } + + visitUnion(type: UnionType): void { + for (const subtype of type.priv.subtypes) { + this.walk(subtype); + if (this._isWalkCanceled) { + break; + } + } + } + + visitTypeVar(type: TypeVarType): void { + // Nothing to do. + } +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/typedDicts.ts b/python-parser/packages/pyright-internal/src/analyzer/typedDicts.ts new file mode 100644 index 00000000..be0cf4cb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/typedDicts.ts @@ -0,0 +1,1693 @@ +/* + * typedDicts.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides special-case logic for the construction of TypedDict + * classes. + */ + +import { appendArray } from '../common/collectionUtils'; +import { assert } from '../common/debug'; +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { convertOffsetsToRange } from '../common/positionUtils'; +import { TextRange } from '../common/textRange'; +import { LocAddendum, LocMessage } from '../localization/localize'; +import { + ArgCategory, + ClassNode, + DictionaryNode, + ExpressionNode, + IndexNode, + ParamCategory, + ParseNodeType, +} from '../parser/parseNodes'; +import { KeywordType } from '../parser/tokenizerTypes'; +import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { ConstraintTracker } from './constraintTracker'; +import { DeclarationType, VariableDeclaration } from './declaration'; +import * as ParseTreeUtils from './parseTreeUtils'; +import { Symbol, SymbolFlags, SymbolTable } from './symbol'; +import { getLastTypedDeclarationForSymbol } from './symbolUtils'; +import { + Arg, + AssignTypeFlags, + EvaluatorUsage, + TypeEvaluator, + TypeResult, + TypeResultWithNode, +} from './typeEvaluatorTypes'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + isAnyOrUnknown, + isClass, + isClassInstance, + isInstantiableClass, + isNever, + maxTypeRecursionCount, + NeverType, + OverloadedType, + Type, + TypedDictEntries, + TypedDictEntry, + TypeVarScopeType, + TypeVarType, + UnknownType, +} from './types'; +import { + applySolvedTypeVars, + buildSolutionFromSpecializedClass, + computeMroLinearization, + convertToInstance, + getTypeVarScopeId, + isLiteralType, + mapSubtypes, + partiallySpecializeType, + specializeTupleClass, +} from './typeUtils'; + +// Creates a new custom TypedDict "alternate syntax" factory class. +export function createTypedDictType( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + typedDictClass: ClassType, + argList: Arg[] +): ClassType { + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + + // TypedDict supports two different syntaxes: + // Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + // Point2D = TypedDict('Point2D', x=int, y=int, label=str) + let className: string | undefined; + if (argList.length === 0) { + evaluator.addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.typedDictFirstArg(), errorNode); + } else { + const nameArg = argList[0]; + if ( + nameArg.argCategory !== ArgCategory.Simple || + !nameArg.valueExpression || + nameArg.valueExpression.nodeType !== ParseNodeType.StringList + ) { + evaluator.addDiagnostic( + DiagnosticRule.reportArgumentType, + LocMessage.typedDictFirstArg(), + argList[0].valueExpression || errorNode + ); + } else { + className = nameArg.valueExpression.d.strings.map((s) => s.d.value).join(''); + } + } + + const effectiveClassName = className || 'TypedDict'; + const classType = ClassType.createInstantiable( + effectiveClassName, + ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, effectiveClassName), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.TypedDictClass | ClassTypeFlags.ValidTypeAliasClass, + ParseTreeUtils.getTypeSourceId(errorNode), + /* declaredMetaclass */ undefined, + typedDictClass.shared.effectiveMetaclass + ); + classType.shared.baseClasses.push(typedDictClass); + computeMroLinearization(classType); + + const classFields = ClassType.getSymbolTable(classType); + classFields.set( + '__class__', + Symbol.createWithType(SymbolFlags.ClassMember | SymbolFlags.IgnoredForProtocolMatch, classType) + ); + + let usingDictSyntax = false; + if (argList.length < 2) { + evaluator.addDiagnostic(DiagnosticRule.reportCallIssue, LocMessage.typedDictSecondArgDict(), errorNode); + } else { + const entriesArg = argList[1]; + + if ( + entriesArg.argCategory === ArgCategory.Simple && + entriesArg.valueExpression && + entriesArg.valueExpression.nodeType === ParseNodeType.Dictionary + ) { + usingDictSyntax = true; + + getTypedDictFieldsFromDictSyntax(evaluator, entriesArg.valueExpression, classFields, /* isInline */ false); + } else if (entriesArg.name) { + const entrySet = new Set(); + for (let i = 1; i < argList.length; i++) { + const entry = argList[i]; + if (!entry.name || !entry.valueExpression) { + continue; + } + + if (entrySet.has(entry.name.d.value)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictEntryUnique(), + entry.valueExpression + ); + continue; + } + + // Record names in a map to detect duplicates. + entrySet.add(entry.name.d.value); + + const newSymbol = new Symbol(SymbolFlags.InstanceMember); + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: entry.name, + uri: fileInfo.fileUri, + typeAnnotationNode: entry.valueExpression, + isRuntimeTypeExpression: true, + range: convertOffsetsToRange( + entry.name.start, + TextRange.getEnd(entry.valueExpression), + fileInfo.lines + ), + moduleName: fileInfo.moduleName, + isInExceptSuite: false, + }; + newSymbol.addDeclaration(declaration); + + classFields.set(entry.name.d.value, newSymbol); + } + } else { + evaluator.addDiagnostic(DiagnosticRule.reportArgumentType, LocMessage.typedDictSecondArgDict(), errorNode); + } + } + + if (usingDictSyntax) { + const argsToConsider = argList.slice(2); + let sawClosedOrExtraItems = false; + + for (const arg of argsToConsider) { + if (arg.name?.d.value === 'total' || arg.name?.d.value === 'closed') { + if ( + !arg.valueExpression || + arg.valueExpression.nodeType !== ParseNodeType.Constant || + !( + arg.valueExpression.d.constType === KeywordType.False || + arg.valueExpression.d.constType === KeywordType.True + ) + ) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictBoolParam().format({ name: arg.name.d.value }), + arg.valueExpression || errorNode + ); + } else if (arg.name.d.value === 'total' && arg.valueExpression.d.constType === KeywordType.False) { + classType.shared.flags |= ClassTypeFlags.CanOmitDictValues; + } else if (arg.name.d.value === 'closed') { + if (arg.valueExpression.d.constType === KeywordType.True) { + classType.shared.flags |= + ClassTypeFlags.TypedDictMarkedClosed | ClassTypeFlags.TypedDictEffectivelyClosed; + } + + if (sawClosedOrExtraItems) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictExtraItemsClosed(), + arg.valueExpression || errorNode + ); + } + + sawClosedOrExtraItems = true; + } + } else if (arg.name?.d.value === 'extra_items') { + classType.shared.typedDictExtraItemsExpr = arg.valueExpression; + classType.shared.flags |= ClassTypeFlags.TypedDictEffectivelyClosed; + + if (sawClosedOrExtraItems) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictExtraItemsClosed(), + arg.valueExpression || errorNode + ); + } + + sawClosedOrExtraItems = true; + } else { + evaluator.addDiagnostic( + DiagnosticRule.reportCallIssue, + LocMessage.typedDictExtraArgs(), + arg.valueExpression || errorNode + ); + } + } + } + + synthesizeTypedDictClassMethods(evaluator, errorNode, classType); + + // Validate that the assigned variable name is consistent with the provided name. + if (errorNode.parent?.nodeType === ParseNodeType.Assignment && className) { + const target = errorNode.parent.d.leftExpr; + const typedDictTarget = target.nodeType === ParseNodeType.TypeAnnotation ? target.d.valueExpr : target; + + if (typedDictTarget.nodeType === ParseNodeType.Name) { + if (typedDictTarget.d.value !== className) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictAssignedName().format({ + name: className, + }), + typedDictTarget + ); + } + } + } + + return classType; +} + +// Creates a new anonymous TypedDict class from an inlined dict[{}] type annotation. +export function createTypedDictTypeInlined( + evaluator: TypeEvaluator, + dictNode: DictionaryNode, + typedDictClass: ClassType +): ClassType { + const fileInfo = AnalyzerNodeInfo.getFileInfo(dictNode); + const className = ''; + + const classType = ClassType.createInstantiable( + className, + ParseTreeUtils.getClassFullName(dictNode, fileInfo.moduleName, className), + fileInfo.moduleName, + fileInfo.fileUri, + ClassTypeFlags.TypedDictClass, + ParseTreeUtils.getTypeSourceId(dictNode), + /* declaredMetaclass */ undefined, + typedDictClass.shared.effectiveMetaclass + ); + classType.shared.baseClasses.push(typedDictClass); + computeMroLinearization(classType); + + getTypedDictFieldsFromDictSyntax(evaluator, dictNode, ClassType.getSymbolTable(classType), /* isInline */ true); + synthesizeTypedDictClassMethods(evaluator, dictNode, classType); + + return classType; +} + +export function synthesizeTypedDictClassMethods( + evaluator: TypeEvaluator, + node: ClassNode | ExpressionNode, + classType: ClassType +) { + assert(ClassType.isTypedDictClass(classType)); + + // Synthesize a __new__ method. + const newType = FunctionType.createSynthesizedInstance('__new__', FunctionTypeFlags.ConstructorMethod); + FunctionType.addParam( + newType, + FunctionParam.create(ParamCategory.Simple, classType, FunctionParamFlags.TypeDeclared, 'cls') + ); + FunctionType.addDefaultParams(newType); + newType.shared.declaredReturnType = ClassType.cloneAsInstance(classType); + newType.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + + // Synthesize an __init__ method with two overrides. + const initOverride1 = FunctionType.createSynthesizedInstance('__init__', FunctionTypeFlags.Overloaded); + FunctionType.addParam( + initOverride1, + FunctionParam.create( + ParamCategory.Simple, + ClassType.cloneAsInstance(classType), + FunctionParamFlags.TypeDeclared, + 'self' + ) + ); + initOverride1.shared.declaredReturnType = evaluator.getNoneType(); + initOverride1.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + + // The first parameter must be positional-only. + FunctionType.addParam( + initOverride1, + FunctionParam.create( + ParamCategory.Simple, + ClassType.cloneAsInstance(classType), + FunctionParamFlags.TypeDeclared, + '__map' + ) + ); + + const entries = getTypedDictMembersForClass(evaluator, classType); + const extraEntriesInfo = entries.extraItems ?? getEffectiveExtraItemsEntryType(evaluator, classType); + let allEntriesAreReadOnly = entries.knownItems.size > 0; + + if (entries.knownItems.size > 0) { + FunctionType.addPositionOnlyParamSeparator(initOverride1); + + // All subsequent parameters must be named, so insert an empty "*". + FunctionType.addKeywordOnlyParamSeparator(initOverride1); + } + + const initOverride2 = FunctionType.createSynthesizedInstance('__init__', FunctionTypeFlags.Overloaded); + FunctionType.addParam( + initOverride2, + FunctionParam.create( + ParamCategory.Simple, + ClassType.cloneAsInstance(classType), + FunctionParamFlags.TypeDeclared, + 'self' + ) + ); + initOverride2.shared.declaredReturnType = evaluator.getNoneType(); + initOverride2.priv.constructorTypeVarScopeId = getTypeVarScopeId(classType); + + if (entries.knownItems.size > 0) { + // All parameters must be named, so insert an empty "*". + FunctionType.addKeywordOnlyParamSeparator(initOverride2); + } + + entries.knownItems.forEach((entry, name) => { + FunctionType.addParam( + initOverride1, + FunctionParam.create( + ParamCategory.Simple, + entry.valueType, + FunctionParamFlags.TypeDeclared, + name, + entry.valueType + ) + ); + + FunctionType.addParam( + initOverride2, + FunctionParam.create( + ParamCategory.Simple, + entry.valueType, + FunctionParamFlags.TypeDeclared, + name, + entry.isRequired ? undefined : entry.valueType + ) + ); + + if (!entry.isReadOnly) { + allEntriesAreReadOnly = false; + } + }); + + if (entries.extraItems && !isNever(entries.extraItems.valueType)) { + FunctionType.addParam( + initOverride1, + FunctionParam.create( + ParamCategory.KwargsDict, + entries.extraItems.valueType, + FunctionParamFlags.TypeDeclared, + 'kwargs' + ) + ); + + FunctionType.addParam( + initOverride2, + FunctionParam.create( + ParamCategory.KwargsDict, + entries.extraItems.valueType, + FunctionParamFlags.TypeDeclared, + 'kwargs' + ) + ); + } + + const symbolTable = ClassType.getSymbolTable(classType); + const initType = OverloadedType.create([initOverride1, initOverride2]); + symbolTable.set('__init__', Symbol.createWithType(SymbolFlags.ClassMember, initType)); + symbolTable.set('__new__', Symbol.createWithType(SymbolFlags.ClassMember, newType)); + + const strClass = evaluator.getBuiltInType(node, 'str'); + + // Synthesize a "get", pop, and setdefault method for each named entry. + if (isInstantiableClass(strClass)) { + const selfParam = FunctionParam.create( + ParamCategory.Simple, + ClassType.cloneAsInstance(classType), + FunctionParamFlags.TypeDeclared, + 'self' + ); + + function createDefaultTypeVar(func: FunctionType) { + let defaultTypeVar = TypeVarType.createInstance(`__TDefault`); + defaultTypeVar = TypeVarType.cloneForScopeId( + defaultTypeVar, + func.shared.typeVarScopeId!, + classType.shared.name, + TypeVarScopeType.Function + ); + return defaultTypeVar; + } + + function createGetMethod( + keyType: Type, + valueType: Type, + includeDefault: boolean, + isEntryRequired = false, + defaultTypeMatchesField = false + ) { + const getOverload = FunctionType.createSynthesizedInstance('get', FunctionTypeFlags.Overloaded); + FunctionType.addParam(getOverload, selfParam); + getOverload.shared.typeVarScopeId = ParseTreeUtils.getScopeIdForNode(node); + FunctionType.addParam( + getOverload, + FunctionParam.create(ParamCategory.Simple, keyType, FunctionParamFlags.TypeDeclared, 'k') + ); + + if (includeDefault) { + const defaultTypeVar = createDefaultTypeVar(getOverload); + let defaultParamType: Type; + let returnType: Type; + + if (isEntryRequired) { + // If the entry is required, the type of the default param doesn't matter + // because the type will always come from the value. + defaultParamType = AnyType.create(); + returnType = valueType; + } else { + if (defaultTypeMatchesField) { + defaultParamType = valueType; + } else { + defaultParamType = combineTypes([valueType, defaultTypeVar]); + } + + returnType = defaultParamType; + } + + FunctionType.addParam( + getOverload, + FunctionParam.create( + ParamCategory.Simple, + defaultParamType, + FunctionParamFlags.TypeDeclared, + 'default' + ) + ); + getOverload.shared.declaredReturnType = returnType; + } else { + getOverload.shared.declaredReturnType = isEntryRequired + ? valueType + : combineTypes([valueType, evaluator.getNoneType()]); + } + return getOverload; + } + + function createPopMethods(keyType: Type, valueType: Type, isEntryRequired: boolean) { + const keyParam = FunctionParam.create(ParamCategory.Simple, keyType, FunctionParamFlags.TypeDeclared, 'k'); + + const popOverload1 = FunctionType.createSynthesizedInstance('pop', FunctionTypeFlags.Overloaded); + FunctionType.addParam(popOverload1, selfParam); + FunctionType.addParam(popOverload1, keyParam); + popOverload1.shared.declaredReturnType = valueType; + + const popOverload2 = FunctionType.createSynthesizedInstance('pop', FunctionTypeFlags.Overloaded); + FunctionType.addParam(popOverload2, selfParam); + FunctionType.addParam(popOverload2, keyParam); + popOverload2.shared.typeVarScopeId = ParseTreeUtils.getScopeIdForNode(node); + const defaultTypeVar = createDefaultTypeVar(popOverload2); + + let defaultParamType: Type; + let returnType: Type; + + if (isEntryRequired) { + // If the entry is required, the type of the default param doesn't matter + // because the type will always come from the value. + defaultParamType = AnyType.create(); + returnType = valueType; + } else { + defaultParamType = combineTypes([valueType, defaultTypeVar]); + returnType = defaultParamType; + } + + FunctionType.addParam( + popOverload2, + FunctionParam.create( + ParamCategory.Simple, + defaultParamType, + FunctionParamFlags.TypeDeclared, + 'default', + defaultParamType + ) + ); + popOverload2.shared.declaredReturnType = returnType; + return [popOverload1, popOverload2]; + } + + function createSetDefaultMethod(keyType: Type, valueType: Type) { + const setDefaultOverload = FunctionType.createSynthesizedInstance( + 'setdefault', + FunctionTypeFlags.Overloaded + ); + FunctionType.addParam(setDefaultOverload, selfParam); + FunctionType.addParam( + setDefaultOverload, + FunctionParam.create(ParamCategory.Simple, keyType, FunctionParamFlags.TypeDeclared, 'k') + ); + FunctionType.addParam( + setDefaultOverload, + FunctionParam.create(ParamCategory.Simple, valueType, FunctionParamFlags.TypeDeclared, 'default') + ); + setDefaultOverload.shared.declaredReturnType = valueType; + return setDefaultOverload; + } + + function createDelItemMethod(keyType: Type) { + const delItemOverload = FunctionType.createSynthesizedInstance('delitem', FunctionTypeFlags.Overloaded); + FunctionType.addParam(delItemOverload, selfParam); + FunctionType.addParam( + delItemOverload, + FunctionParam.create(ParamCategory.Simple, keyType, FunctionParamFlags.TypeDeclared, 'k') + ); + delItemOverload.shared.declaredReturnType = evaluator.getNoneType(); + return delItemOverload; + } + + function createUpdateMethod() { + // Overload 1: update(__m: Partial[], /) + const updateMethod1 = FunctionType.createSynthesizedInstance('update', FunctionTypeFlags.Overloaded); + FunctionType.addParam(updateMethod1, selfParam); + + // Overload 2: update(__m: Iterable[tuple[, ]], /) + const updateMethod2 = FunctionType.createSynthesizedInstance('update', FunctionTypeFlags.Overloaded); + FunctionType.addParam(updateMethod2, selfParam); + + // Overload 3: update(*, : , ...) + const updateMethod3 = FunctionType.createSynthesizedInstance('update', FunctionTypeFlags.Overloaded); + FunctionType.addParam(updateMethod3, selfParam); + + // If all entries are read-only, don't allow updates. + FunctionType.addParam( + updateMethod1, + FunctionParam.create( + ParamCategory.Simple, + allEntriesAreReadOnly + ? NeverType.createNever() + : ClassType.cloneAsInstance(ClassType.cloneForPartialTypedDict(classType)), + FunctionParamFlags.TypeDeclared, + '__m' + ) + ); + + if (entries.knownItems.size > 0) { + FunctionType.addPositionOnlyParamSeparator(updateMethod1); + FunctionType.addKeywordOnlyParamSeparator(updateMethod3); + } + + updateMethod1.shared.declaredReturnType = evaluator.getNoneType(); + updateMethod2.shared.declaredReturnType = evaluator.getNoneType(); + updateMethod3.shared.declaredReturnType = evaluator.getNoneType(); + + const tuplesToCombine: Type[] = []; + const tupleClass = evaluator.getBuiltInType(node, 'tuple'); + + entries.knownItems.forEach((entry, name) => { + if (!entry.isReadOnly) { + // For writable entries, add a tuple entry. + if (tupleClass && isInstantiableClass(tupleClass) && strClass && isInstantiableClass(strClass)) { + const tupleType = specializeTupleClass(ClassType.cloneAsInstance(tupleClass), [ + { + type: ClassType.cloneWithLiteral(ClassType.cloneAsInstance(strClass), name), + isUnbounded: false, + }, + { type: entry.valueType, isUnbounded: false }, + ]); + + tuplesToCombine.push(tupleType); + } + + // For writable entries, add a keyword argument. + FunctionType.addParam( + updateMethod3, + FunctionParam.create( + ParamCategory.Simple, + entry.valueType, + FunctionParamFlags.TypeDeclared, + name, + AnyType.create(/* isEllipsis */ true) + ) + ); + } + }); + + const iterableClass = evaluator.getTypingType(node, 'Iterable'); + if (iterableClass && isInstantiableClass(iterableClass)) { + const iterableType = ClassType.cloneAsInstance(iterableClass); + + FunctionType.addParam( + updateMethod2, + FunctionParam.create( + ParamCategory.Simple, + ClassType.specialize(iterableType, [combineTypes(tuplesToCombine)]), + FunctionParamFlags.TypeDeclared, + '__m' + ) + ); + } + + if (entries.knownItems.size > 0) { + FunctionType.addPositionOnlyParamSeparator(updateMethod2); + } + + // Note that the order of method1 and method2 is swapped. This is done so + // the method1 signature is used in the error message when neither method2 + // or method1 match. + return OverloadedType.create([updateMethod2, updateMethod1, updateMethod3]); + } + + const getOverloads: FunctionType[] = []; + const popOverloads: FunctionType[] = []; + const setDefaultOverloads: FunctionType[] = []; + + entries.knownItems.forEach((entry, name) => { + const nameLiteralType = ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strClass, name)); + + getOverloads.push( + createGetMethod(nameLiteralType, entry.valueType, /* includeDefault */ false, entry.isRequired) + ); + + getOverloads.push( + createGetMethod( + nameLiteralType, + entry.valueType, + /* includeDefault */ true, + /* isEntryRequired */ entry.isRequired, + /* defaultTypeMatchesField */ entry.isRequired + ) + ); + + // Add a pop method if the entry is not required. + if (!entry.isRequired && !entry.isReadOnly) { + appendArray(popOverloads, createPopMethods(nameLiteralType, entry.valueType, entry.isRequired)); + } + + if (!entry.isReadOnly) { + setDefaultOverloads.push(createSetDefaultMethod(nameLiteralType, entry.valueType)); + } + }); + + const strType = ClassType.cloneAsInstance(strClass); + + // If the class is closed, we can assume that any other keys that + // are present will return the default parameter value or the extra + // entries value type. + if (ClassType.isTypedDictEffectivelyClosed(classType)) { + getOverloads.push( + createGetMethod( + strType, + combineTypes([extraEntriesInfo.valueType, evaluator.getNoneType()]), + /* includeDefault */ false, + /* isEntryRequired */ true + ) + ); + getOverloads.push(createGetMethod(strType, extraEntriesInfo.valueType, /* includeDefault */ true)); + } else { + // Provide a final `get` overload that handles the general case where + // the key is a str but the literal value isn't known. + getOverloads.push(createGetMethod(strType, AnyType.create(), /* includeDefault */ false)); + getOverloads.push(createGetMethod(strType, AnyType.create(), /* includeDefault */ true)); + } + + // Add a catch-all pop method. + if (ClassType.isTypedDictEffectivelyClosed(classType)) { + if (!isNever(extraEntriesInfo.valueType)) { + popOverloads.push( + ...createPopMethods(strType, extraEntriesInfo.valueType, /* isEntryRequired */ false) + ); + } + } else { + popOverloads.push(...createPopMethods(strType, evaluator.getObjectType(), /* isEntryRequired */ false)); + } + + symbolTable.set('get', Symbol.createWithType(SymbolFlags.ClassMember, OverloadedType.create(getOverloads))); + + if (popOverloads.length > 0) { + symbolTable.set('pop', Symbol.createWithType(SymbolFlags.ClassMember, OverloadedType.create(popOverloads))); + } + + if (setDefaultOverloads.length > 0) { + symbolTable.set( + 'setdefault', + Symbol.createWithType(SymbolFlags.ClassMember, OverloadedType.create(setDefaultOverloads)) + ); + } + + if (!allEntriesAreReadOnly) { + symbolTable.set( + '__delitem__', + Symbol.createWithType(SymbolFlags.ClassMember, createDelItemMethod(strType)) + ); + } + + symbolTable.set('update', Symbol.createWithType(SymbolFlags.ClassMember, createUpdateMethod())); + + // If the TypedDict is closed and all of its entries are NotRequired and + // not ReadOnly, add a "clear" and "popitem" method. + const dictValueType = getTypedDictDictEquivalent(evaluator, classType); + + if (dictValueType) { + const clearMethod = FunctionType.createSynthesizedInstance('clear'); + FunctionType.addParam(clearMethod, selfParam); + clearMethod.shared.declaredReturnType = evaluator.getNoneType(); + symbolTable.set('clear', Symbol.createWithType(SymbolFlags.ClassMember, clearMethod)); + + const popItemMethod = FunctionType.createSynthesizedInstance('popitem'); + FunctionType.addParam(popItemMethod, selfParam); + let tupleType: Type | undefined = evaluator.getTupleClassType(); + + if (tupleType && isInstantiableClass(tupleType)) { + tupleType = specializeTupleClass( + ClassType.cloneAsInstance(tupleType), + [ + { type: strType, isUnbounded: false }, + { type: dictValueType, isUnbounded: false }, + ], + /* isTypeArgExplicit */ true + ); + } else { + tupleType = UnknownType.create(); + } + + popItemMethod.shared.declaredReturnType = tupleType; + symbolTable.set('popitem', Symbol.createWithType(SymbolFlags.ClassMember, popItemMethod)); + } + + // If the TypedDict is closed, we can provide a more accurate value type + // for the "items", "keys" and "values" methods. + const mappingValueType = getTypedDictMappingEquivalent(evaluator, classType); + + if (mappingValueType) { + let keyValueType: Type = strType; + + // If we know that there can be no more items, we can provide + // a more accurate key type consisting of all known keys. + if (entries.extraItems && isNever(entries.extraItems.valueType)) { + keyValueType = combineTypes( + Array.from(entries.knownItems.keys()).map((key) => ClassType.cloneWithLiteral(strType, key)) + ); + } + + ['items', 'keys', 'values'].forEach((methodName) => { + const method = FunctionType.createSynthesizedInstance(methodName); + FunctionType.addParam(method, selfParam); + + const returnTypeClass = evaluator.getTypingType(node, `dict_${methodName}`); + if ( + returnTypeClass && + isInstantiableClass(returnTypeClass) && + returnTypeClass.shared.typeParams.length === 2 + ) { + method.shared.declaredReturnType = ClassType.specialize( + ClassType.cloneAsInstance(returnTypeClass), + [keyValueType, mappingValueType] + ); + + symbolTable.set(methodName, Symbol.createWithType(SymbolFlags.ClassMember, method)); + } + }); + } + } +} + +export function getTypedDictMembersForClass( + evaluator: TypeEvaluator, + classType: ClassType, + allowNarrowed = false +): TypedDictEntries { + // Were the entries already calculated and cached? + if (!classType.shared.typedDictEntries) { + const entries: TypedDictEntries = { + knownItems: new Map(), + extraItems: undefined, + }; + getTypedDictMembersForClassRecursive(evaluator, classType, entries); + + if (ClassType.isTypedDictMarkedClosed(classType) && !entries.extraItems) { + entries.extraItems = { + valueType: NeverType.createNever(), + isReadOnly: false, + isRequired: false, + isProvided: false, + }; + } + + // Cache the entries for next time. + classType.shared.typedDictEntries = entries; + } + + const solution = buildSolutionFromSpecializedClass(classType); + + // Create a specialized copy of the entries so the caller can mutate them. + const entries = new Map(); + classType.shared.typedDictEntries!.knownItems.forEach((value, key) => { + const tdEntry = { ...value }; + tdEntry.valueType = applySolvedTypeVars(tdEntry.valueType, solution); + + // If the class is "Partial", make all entries optional and convert all + // read-only entries to Never. + if (classType.priv.isTypedDictPartial) { + tdEntry.isRequired = false; + + if (tdEntry.isReadOnly) { + tdEntry.valueType = NeverType.createNever(); + } else { + tdEntry.isReadOnly = true; + } + } + + entries.set(key, tdEntry); + }); + + // Apply narrowed types on top of existing entries if present. + if (allowNarrowed && classType.priv.typedDictNarrowedEntries) { + classType.priv.typedDictNarrowedEntries.forEach((value, key) => { + const tdEntry = { ...value }; + tdEntry.valueType = applySolvedTypeVars(tdEntry.valueType, solution); + entries.set(key, tdEntry); + }); + } + + let extraItems = classType.shared.typedDictEntries?.extraItems; + if (extraItems) { + extraItems = { + ...extraItems, + valueType: applySolvedTypeVars(extraItems.valueType, solution), + }; + } + + return { knownItems: entries, extraItems }; +} + +// If the TypedDict class is consistent with Mapping[str, T] where T +// is some type other than object, it returns T. Otherwise it returns undefined. +export function getTypedDictMappingEquivalent(evaluator: TypeEvaluator, classType: ClassType): Type | undefined { + assert(isInstantiableClass(classType)); + assert(ClassType.isTypedDictClass(classType)); + + // If the TypedDict class isn't closed, it's just a normal Mapping[str, object]. + if (!ClassType.isTypedDictEffectivelyClosed(classType)) { + return undefined; + } + + const entries = getTypedDictMembersForClass(evaluator, classType); + const typesToCombine: Type[] = []; + + entries.knownItems.forEach((entry) => { + typesToCombine.push(entry.valueType); + }); + + if (entries.extraItems) { + typesToCombine.push(entries.extraItems.valueType); + } + + // Is the final value type 'object'? + const valueType = combineTypes(typesToCombine); + if (isClassInstance(valueType) && ClassType.isBuiltIn(valueType, 'object')) { + return undefined; + } + + return valueType; +} + +// If the TypedDict class is consistent with dict[str, T], it returns T. +// Otherwise it returns undefined. +export function getTypedDictDictEquivalent( + evaluator: TypeEvaluator, + classType: ClassType, + recursionCount = 0 +): Type | undefined { + assert(isInstantiableClass(classType)); + assert(ClassType.isTypedDictClass(classType)); + + // If the TypedDict class isn't closed, it's not equivalent to a dict. + if (!ClassType.isTypedDictEffectivelyClosed(classType)) { + return undefined; + } + + const entries = getTypedDictMembersForClass(evaluator, classType); + + // If there is no "extraItems" defined or it is read-only, it's not + // equivalent to a dict. + if (!entries.extraItems || entries.extraItems.isReadOnly) { + return undefined; + } + + let dictValueType = entries.extraItems.valueType; + + let isEquivalentToDict = true; + entries.knownItems.forEach((entry) => { + if (entry.isReadOnly || entry.isRequired) { + isEquivalentToDict = false; + } + + dictValueType = combineTypes([dictValueType, entry.valueType]); + + if ( + !evaluator.assignType( + dictValueType, + entry.valueType, + /* diag */ undefined, + /* constraints */ undefined, + AssignTypeFlags.Invariant, + recursionCount + 1 + ) + ) { + isEquivalentToDict = false; + } + }); + + if (!isEquivalentToDict) { + return undefined; + } + + return dictValueType; +} + +function getTypedDictFieldsFromDictSyntax( + evaluator: TypeEvaluator, + entryDict: DictionaryNode, + classFields: SymbolTable, + isInline: boolean +) { + const entrySet = new Set(); + const fileInfo = AnalyzerNodeInfo.getFileInfo(entryDict); + + entryDict.d.items.forEach((entry) => { + if (entry.nodeType !== ParseNodeType.DictionaryKeyEntry) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictSecondArgDictEntry(), + entry + ); + return; + } + + if (entry.d.keyExpr.nodeType !== ParseNodeType.StringList) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictEntryName(), + entry.d.keyExpr + ); + return; + } + + const entryName = entry.d.keyExpr.d.strings.map((s) => s.d.value).join(''); + if (!entryName) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictEmptyName(), + entry.d.keyExpr + ); + return; + } + + if (entrySet.has(entryName)) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typedDictEntryUnique(), + entry.d.keyExpr + ); + return; + } + + // Record names in a set to detect duplicates. + entrySet.add(entryName); + + const newSymbol = new Symbol(SymbolFlags.InstanceMember); + const declaration: VariableDeclaration = { + type: DeclarationType.Variable, + node: entry.d.keyExpr, + uri: fileInfo.fileUri, + typeAnnotationNode: entry.d.valueExpr, + isRuntimeTypeExpression: !isInline, + range: convertOffsetsToRange(entry.d.keyExpr.start, TextRange.getEnd(entry.d.keyExpr), fileInfo.lines), + moduleName: fileInfo.moduleName, + isInExceptSuite: false, + isInInlinedTypedDict: true, + }; + newSymbol.addDeclaration(declaration); + + classFields.set(entryName, newSymbol); + }); + + // Set the type in the type cache for the dict node so it doesn't + // get evaluated again. + evaluator.setTypeResultForNode(entryDict, { type: UnknownType.create() }); +} + +function getTypedDictMembersForClassRecursive( + evaluator: TypeEvaluator, + classType: ClassType, + entries: TypedDictEntries, + recursionCount = 0 +) { + assert(ClassType.isTypedDictClass(classType)); + if (recursionCount > maxTypeRecursionCount) { + return; + } + recursionCount++; + + classType.shared.baseClasses.forEach((baseClassType) => { + if (isInstantiableClass(baseClassType) && ClassType.isTypedDictClass(baseClassType)) { + const specializedBaseClassType = partiallySpecializeType( + baseClassType, + classType, + evaluator.getTypeClassType() + ); + assert(isClass(specializedBaseClassType)); + + // Recursively gather keys from parent classes. Don't report any errors + // in these cases because they will be reported within that class. + getTypedDictMembersForClassRecursive(evaluator, specializedBaseClassType, entries, recursionCount); + } + }); + + const solution = buildSolutionFromSpecializedClass(classType); + + if (ClassType.isTypedDictMarkedClosed(classType)) { + entries.extraItems = { + valueType: NeverType.createNever(), + isReadOnly: false, + isRequired: false, + isProvided: false, + }; + } else if (classType.shared.typedDictExtraItemsExpr) { + const extraItemsTypeResult = evaluator.getTypeOfExpressionExpectingType( + classType.shared.typedDictExtraItemsExpr, + { allowReadOnly: true } + ); + + entries.extraItems = { + valueType: convertToInstance(extraItemsTypeResult.type), + isReadOnly: !!extraItemsTypeResult.isReadOnly, + isRequired: false, + isProvided: true, + }; + } + + // Add any new typed dict entries from this class. + ClassType.getSymbolTable(classType).forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + // Only variables (not functions, classes, etc.) are considered. + const lastDecl = getLastTypedDeclarationForSymbol(symbol); + + if (lastDecl && lastDecl.type === DeclarationType.Variable) { + let valueType = evaluator.getEffectiveTypeOfSymbol(symbol); + valueType = applySolvedTypeVars(valueType, solution); + + let isRequired = !ClassType.isCanOmitDictValues(classType); + let isReadOnly = false; + + if (isRequiredTypedDictVariable(evaluator, symbol)) { + isRequired = true; + } else if (isNotRequiredTypedDictVariable(evaluator, symbol)) { + isRequired = false; + } + + if (isReadOnlyTypedDictVariable(evaluator, symbol)) { + isReadOnly = true; + } + + const tdEntry: TypedDictEntry = { + valueType, + isReadOnly, + isRequired, + isProvided: false, + }; + + entries.knownItems.set(name, tdEntry); + } + } + }); +} + +export function getEffectiveExtraItemsEntryType(evaluator: TypeEvaluator, classType: ClassType): TypedDictEntry { + assert(ClassType.isTypedDictClass(classType)); + + // Missing entries in a non-closed TypedDict class are implicitly typed as + // ReadOnly[NotRequired[object]]. + if (!ClassType.isTypedDictMarkedClosed(classType)) { + return { + valueType: evaluator.getObjectType(), + isReadOnly: true, + isRequired: false, + isProvided: false, + }; + } + + if (classType.shared.typedDictEntries?.extraItems) { + return classType.shared.typedDictEntries.extraItems; + } + + return { + valueType: NeverType.createNever(), + isReadOnly: true, + isRequired: false, + isProvided: false, + }; +} + +export function assignTypedDictToTypedDict( + evaluator: TypeEvaluator, + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + constraints: ConstraintTracker | undefined, + flags: AssignTypeFlags, + recursionCount = 0 +) { + let typesAreConsistent = true; + const destEntries = getTypedDictMembersForClass(evaluator, destType); + const srcEntries = getTypedDictMembersForClass(evaluator, srcType, /* allowNarrowed */ true); + const extraSrcEntries = srcEntries.extraItems ?? getEffectiveExtraItemsEntryType(evaluator, srcType); + + destEntries.knownItems.forEach((destEntry, name) => { + // If we've already determined that the types are inconsistent and + // the caller isn't interested in detailed diagnostics, skip the remainder. + if (!typesAreConsistent && !diag) { + return; + } + + const srcEntry = srcEntries.knownItems.get(name); + if (!srcEntry) { + if (destEntry.isRequired || !destEntry.isReadOnly) { + diag?.createAddendum().addMessage( + LocAddendum.typedDictFieldMissing().format({ + name, + type: evaluator.printType(ClassType.cloneAsInstance(srcType)), + }) + ); + typesAreConsistent = false; + } else { + if (isClassInstance(extraSrcEntries.valueType)) { + const subDiag = diag?.createAddendum(); + if ( + !evaluator.assignType( + destEntry.valueType, + extraSrcEntries.valueType, + subDiag?.createAddendum(), + constraints, + flags, + recursionCount + ) + ) { + subDiag?.addMessage(LocAddendum.memberTypeMismatch().format({ name })); + typesAreConsistent = false; + } + } + } + } else { + if (destEntry.isRequired !== srcEntry.isRequired && !destEntry.isReadOnly) { + const message = destEntry.isRequired + ? LocAddendum.typedDictFieldRequired() + : LocAddendum.typedDictFieldNotRequired(); + diag?.createAddendum().addMessage( + message.format({ + name, + type: evaluator.printType(ClassType.cloneAsInstance(destType)), + }) + ); + typesAreConsistent = false; + } + + if (!destEntry.isReadOnly && srcEntry.isReadOnly) { + diag?.createAddendum().addMessage( + LocAddendum.typedDictFieldNotReadOnly().format({ + name, + type: evaluator.printType(ClassType.cloneAsInstance(destType)), + }) + ); + typesAreConsistent = false; + } + + const subDiag = diag?.createAddendum(); + + if ( + !evaluator.assignType( + destEntry.valueType, + srcEntry.valueType, + subDiag?.createAddendum(), + constraints, + destEntry.isReadOnly ? flags : flags | AssignTypeFlags.Invariant, + recursionCount + ) + ) { + subDiag?.addMessage(LocAddendum.memberTypeMismatch().format({ name })); + typesAreConsistent = false; + } + } + }); + + // If the types are not consistent and the caller isn't interested + // in detailed diagnostics, don't do additional work. + if (!typesAreConsistent && !diag) { + return false; + } + + // If the destination TypedDict is closed, check any extra entries in the source + // TypedDict to ensure that they don't violate the "extra items" type. + if (ClassType.isTypedDictEffectivelyClosed(destType)) { + const extraDestEntries = destEntries.extraItems ?? getEffectiveExtraItemsEntryType(evaluator, destType); + + srcEntries.knownItems.forEach((srcEntry, name) => { + // Have we already checked this item in the loop above? + if (destEntries.knownItems.has(name)) { + return; + } + + if (!destEntries.extraItems) { + const subDiag = diag?.createAddendum(); + subDiag?.addMessage( + LocAddendum.typedDictExtraFieldNotAllowed().format({ + name, + type: evaluator.printType(ClassType.cloneAsInstance(destType)), + }) + ); + typesAreConsistent = false; + } else { + if (srcEntry.isRequired && !destEntries.extraItems.isReadOnly) { + diag?.createAddendum().addMessage( + LocAddendum.typedDictFieldNotRequired().format({ + name, + type: evaluator.printType(ClassType.cloneAsInstance(destType)), + }) + ); + typesAreConsistent = false; + } + + const subDiag = diag?.createAddendum(); + + if ( + !evaluator.assignType( + destEntries.extraItems.valueType, + srcEntry.valueType, + subDiag?.createAddendum(), + constraints, + destEntries.extraItems.isReadOnly ? flags : flags | AssignTypeFlags.Invariant, + recursionCount + ) + ) { + subDiag?.addMessage( + LocAddendum.typedDictExtraFieldTypeMismatch().format({ + name, + type: evaluator.printType(ClassType.cloneAsInstance(destType)), + }) + ); + typesAreConsistent = false; + } else if (!destEntries.extraItems.isReadOnly && srcEntry.isReadOnly) { + diag?.createAddendum().addMessage( + LocAddendum.typedDictFieldNotReadOnly().format({ + name, + type: evaluator.printType(ClassType.cloneAsInstance(srcType)), + }) + ); + typesAreConsistent = false; + } + } + }); + + const subDiag = diag?.createAddendum(); + if ( + !evaluator.assignType( + extraDestEntries.valueType, + extraSrcEntries.valueType, + subDiag?.createAddendum(), + constraints, + extraDestEntries.isReadOnly ? flags : flags | AssignTypeFlags.Invariant, + recursionCount + ) + ) { + subDiag?.addMessage( + LocAddendum.typedDictExtraFieldTypeMismatch().format({ + name: 'extra_items', + type: evaluator.printType(ClassType.cloneAsInstance(srcType)), + }) + ); + typesAreConsistent = false; + } else if (!extraDestEntries.isReadOnly && extraSrcEntries.isReadOnly) { + diag?.createAddendum().addMessage( + LocAddendum.typedDictFieldNotReadOnly().format({ + name: 'extra_items', + type: evaluator.printType(ClassType.cloneAsInstance(destType)), + }) + ); + typesAreConsistent = false; + } + } + + return typesAreConsistent; +} + +// Determines whether the specified keys and values can be assigned to +// a typed dictionary class. The caller should have already validated +// that the class is indeed a typed dict. If the types are compatible, +// the typed dict class or a narrowed form of the class is returned. +// Narrowing is possible when not-required keys are provided. If the +// types are not compatible, the function returns undefined. +export function assignToTypedDict( + evaluator: TypeEvaluator, + classType: ClassType, + keyTypes: TypeResultWithNode[], + valueTypes: TypeResultWithNode[], + diagAddendum?: DiagnosticAddendum +): ClassType | undefined { + assert(isClassInstance(classType)); + assert(ClassType.isTypedDictClass(classType)); + assert(keyTypes.length === valueTypes.length); + + let isMatch = true; + const narrowedEntries = new Map(); + + let constraints: ConstraintTracker | undefined; + let genericClassType = classType; + + if (classType.shared.typeParams.length > 0) { + constraints = new ConstraintTracker(); + + // Create a generic (nonspecialized version) of the class. + if (classType.priv.typeArgs) { + genericClassType = ClassType.specialize(classType, /* typeArgs */ undefined); + } + } + + const tdEntries = getTypedDictMembersForClass(evaluator, genericClassType); + + keyTypes.forEach((keyTypeResult, index) => { + const keyType = keyTypeResult.type; + if (!isClassInstance(keyType) || !ClassType.isBuiltIn(keyType, 'str') || !isLiteralType(keyType)) { + isMatch = false; + } else { + const keyValue = keyType.priv.literalValue as string; + const symbolEntry = tdEntries.knownItems.get(keyValue); + + if (!symbolEntry) { + if (tdEntries.extraItems) { + const subDiag = diagAddendum?.createAddendum(); + if ( + !evaluator.assignType( + tdEntries.extraItems.valueType, + valueTypes[index].type, + subDiag?.createAddendum(), + constraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ) + ) { + if (subDiag) { + subDiag.addMessage( + LocAddendum.typedDictFieldTypeMismatch().format({ + name: 'extra_items', + type: evaluator.printType(valueTypes[index].type), + }) + ); + + subDiag.addTextRange(keyTypeResult.node); + } + isMatch = false; + } + } else { + // The provided key name doesn't exist. + isMatch = false; + if (diagAddendum) { + const subDiag = diagAddendum?.createAddendum(); + subDiag.addMessage( + LocAddendum.typedDictFieldUndefined().format({ + name: keyType.priv.literalValue as string, + type: evaluator.printType(ClassType.cloneAsInstance(classType)), + }) + ); + + subDiag.addTextRange(keyTypeResult.node); + } + } + } else { + // Can we assign the value to the declared type? + const subDiag = diagAddendum?.createAddendum(); + if ( + !evaluator.assignType( + symbolEntry.valueType, + valueTypes[index].type, + subDiag?.createAddendum(), + constraints, + AssignTypeFlags.RetainLiteralsForTypeVar + ) + ) { + if (subDiag) { + subDiag.addMessage( + LocAddendum.typedDictFieldTypeMismatch().format({ + name: keyType.priv.literalValue as string, + type: evaluator.printType(valueTypes[index].type), + }) + ); + + subDiag.addTextRange(keyTypeResult.node); + } + isMatch = false; + } + + if (!symbolEntry.isRequired) { + narrowedEntries.set(keyValue, { + valueType: valueTypes[index].type, + isReadOnly: !!valueTypes[index].isReadOnly, + isRequired: false, + isProvided: true, + }); + } + + symbolEntry.isProvided = true; + } + } + }); + + if (!isMatch) { + return undefined; + } + + // See if any required keys are missing. + tdEntries.knownItems.forEach((entry, name) => { + if (entry.isRequired && !entry.isProvided) { + if (diagAddendum) { + diagAddendum.addMessage( + LocAddendum.typedDictFieldRequired().format({ + name, + type: evaluator.printType(classType), + }) + ); + } + isMatch = false; + } + }); + + if (!isMatch) { + return undefined; + } + + const specializedClassType = constraints + ? (evaluator.solveAndApplyConstraints(genericClassType, constraints) as ClassType) + : classType; + + return narrowedEntries.size === 0 + ? specializedClassType + : ClassType.cloneForNarrowedTypedDictEntries(specializedClassType, narrowedEntries); +} + +export function getTypeOfIndexedTypedDict( + evaluator: TypeEvaluator, + node: IndexNode, + baseType: ClassType, + usage: EvaluatorUsage +): TypeResult | undefined { + if (node.d.items.length !== 1) { + evaluator.addDiagnostic( + DiagnosticRule.reportGeneralTypeIssues, + LocMessage.typeArgsMismatchOne().format({ received: node.d.items.length }), + node + ); + return { type: UnknownType.create() }; + } + + // Look for subscript types that are not supported by TypedDict. + if (node.d.trailingComma || node.d.items[0].d.name || node.d.items[0].d.argCategory !== ArgCategory.Simple) { + return undefined; + } + + const entries = getTypedDictMembersForClass(evaluator, baseType, /* allowNarrowed */ usage.method === 'get'); + + const indexTypeResult = evaluator.getTypeOfExpression(node.d.items[0].d.valueExpr); + const indexType = indexTypeResult.type; + let diag = new DiagnosticAddendum(); + let allDiagsInvolveNotRequiredKeys = true; + + const resultingType = mapSubtypes(indexType, (subtype) => { + if (isAnyOrUnknown(subtype)) { + return subtype; + } + + if (isClassInstance(subtype) && ClassType.isBuiltIn(subtype, 'str')) { + if (subtype.priv.literalValue === undefined) { + // If it's a plain str with no literal value, we can't + // make any determination about the resulting type. + return UnknownType.create(); + } + + // Look up the entry in the typed dict to get its type. + const entryName = subtype.priv.literalValue as string; + const entry = entries.knownItems.get(entryName) ?? entries.extraItems; + if (!entry || isNever(entry.valueType)) { + diag.addMessage( + LocAddendum.keyUndefined().format({ + name: entryName, + type: evaluator.printType(baseType), + }) + ); + allDiagsInvolveNotRequiredKeys = false; + return UnknownType.create(); + } else if (!(entry.isRequired || entry.isProvided) && usage.method === 'get') { + diag.addMessage( + LocAddendum.keyNotRequired().format({ + name: entryName, + type: evaluator.printType(baseType), + }) + ); + } else if (entry.isReadOnly && usage.method !== 'get') { + diag.addMessage( + LocAddendum.keyReadOnly().format({ + name: entryName, + type: evaluator.printType(baseType), + }) + ); + } + + if (usage.method === 'set') { + if (!evaluator.assignType(entry.valueType, usage.setType?.type ?? AnyType.create(), diag)) { + allDiagsInvolveNotRequiredKeys = false; + } + } else if (usage.method === 'del' && entry.isRequired) { + diag.addMessage( + LocAddendum.keyRequiredDeleted().format({ + name: entryName, + }) + ); + allDiagsInvolveNotRequiredKeys = false; + } + + return entry.valueType; + } + + diag.addMessage(LocAddendum.typeNotStringLiteral().format({ type: evaluator.printType(subtype) })); + allDiagsInvolveNotRequiredKeys = false; + return UnknownType.create(); + }); + + // If we have an "expected type" diagnostic addendum (used for assignments), + // use that rather than the local diagnostic information because it will + // be more informative. + if (usage.setExpectedTypeDiag && !diag.isEmpty() && !usage.setExpectedTypeDiag.isEmpty()) { + diag = usage.setExpectedTypeDiag; + } + + if (!diag.isEmpty()) { + let typedDictDiag: string; + if (usage.method === 'set') { + typedDictDiag = LocMessage.typedDictSet(); + } else if (usage.method === 'del') { + typedDictDiag = LocMessage.typedDictDelete(); + } else { + typedDictDiag = LocMessage.typedDictAccess(); + } + + evaluator.addDiagnostic( + allDiagsInvolveNotRequiredKeys + ? DiagnosticRule.reportTypedDictNotRequiredAccess + : DiagnosticRule.reportGeneralTypeIssues, + typedDictDiag + diag.getString(), + node + ); + } + + return { type: resultingType, isIncomplete: !!indexTypeResult.isIncomplete }; +} + +// If the specified type has a non-required key, this method marks the +// key as present. +export function narrowForKeyAssignment(classType: ClassType, key: string) { + // We should never be called if the classType is not a TypedDict or if typedDictEntries + // is empty, but this can theoretically happen in the presence of certain circular + // dependencies. + if (!ClassType.isTypedDictClass(classType) || !classType.shared.typedDictEntries) { + return classType; + } + + const tdEntry = classType.shared.typedDictEntries.knownItems.get(key); + if (!tdEntry || tdEntry.isRequired) { + return classType; + } + + const narrowedTdEntry = classType.priv.typedDictNarrowedEntries?.get(key); + if (narrowedTdEntry?.isProvided) { + return classType; + } + + const narrowedEntries = classType.priv.typedDictNarrowedEntries + ? new Map(classType.priv.typedDictNarrowedEntries) + : new Map(); + narrowedEntries.set(key, { + isProvided: true, + isRequired: false, + isReadOnly: tdEntry.isReadOnly, + valueType: tdEntry.valueType, + }); + + return ClassType.cloneForNarrowedTypedDictEntries(classType, narrowedEntries); +} + +function isRequiredTypedDictVariable(evaluator: TypeEvaluator, symbol: Symbol) { + return symbol.getDeclarations().some((decl) => { + if (decl.type !== DeclarationType.Variable || !decl.typeAnnotationNode) { + return false; + } + + const annotatedType = evaluator.getTypeOfExpressionExpectingType(decl.typeAnnotationNode, { + allowFinal: true, + allowRequired: true, + allowReadOnly: true, + }); + + return !!annotatedType.isRequired; + }); +} + +function isNotRequiredTypedDictVariable(evaluator: TypeEvaluator, symbol: Symbol) { + return symbol.getDeclarations().some((decl) => { + if (decl.type !== DeclarationType.Variable || !decl.typeAnnotationNode) { + return false; + } + + const annotatedType = evaluator.getTypeOfExpressionExpectingType(decl.typeAnnotationNode, { + allowFinal: true, + allowRequired: true, + allowReadOnly: true, + }); + + return !!annotatedType.isNotRequired; + }); +} + +function isReadOnlyTypedDictVariable(evaluator: TypeEvaluator, symbol: Symbol) { + return symbol.getDeclarations().some((decl) => { + if (decl.type !== DeclarationType.Variable || !decl.typeAnnotationNode) { + return false; + } + + const annotatedType = evaluator.getTypeOfExpressionExpectingType(decl.typeAnnotationNode, { + allowFinal: true, + allowRequired: true, + allowReadOnly: true, + }); + + return !!annotatedType.isReadOnly; + }); +} diff --git a/python-parser/packages/pyright-internal/src/analyzer/types.ts b/python-parser/packages/pyright-internal/src/analyzer/types.ts new file mode 100644 index 00000000..d3e1bdbe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/analyzer/types.ts @@ -0,0 +1,3999 @@ +/* + * types.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Representation of types used during type analysis within Python. + */ + +import { partition } from '../common/collectionUtils'; +import { assert } from '../common/debug'; +import { Uri } from '../common/uri/uri'; +import { ArgumentNode, ExpressionNode, NameNode, ParamCategory, TypeAnnotationNode } from '../parser/parseNodes'; +import { ClassDeclaration, FunctionDeclaration, SpecialBuiltInClassDeclaration } from './declaration'; +import { Symbol, SymbolTable } from './symbol'; + +export const enum TypeCategory { + // Name is not bound to a value of any type + Unbound, + + // Implicit Any type + Unknown, + + // Type can be anything + Any, + + // The bottom type, equivalent to an empty union + Never, + + // Callable type + Function, + + // Functions defined with @overload decorator + Overloaded, + + // Class definition + Class, + + // Module instance + Module, + + // Union of two or more other types + Union, + + // Type variable + TypeVar, +} + +export const enum TypeFlags { + None = 0, + + // This type refers to something that can be instantiated. + Instantiable = 1 << 0, + + // This type refers to something that has been instantiated. + Instance = 1 << 1, + + // This type is inferred within a py.typed source file and could be + // inferred differently by other type checkers. + Ambiguous = 1 << 2, + + // This mask indicates which flags should be considered significant + // when comparing two types for equivalence. + TypeCompatibilityMask = Instantiable | Instance, +} + +export type UnionableType = + | UnboundType + | UnknownType + | AnyType + | FunctionType + | OverloadedType + | ClassType + | ModuleType + | TypeVarType; + +export type Type = UnionableType | NeverType | UnionType; + +// A string that uniquely identifies a TypeVar that is bound to a scope +// (a generic class, function, or type alias). +export type TypeVarScopeId = string; +export const UnificationScopeId: TypeVarScopeId = '-'; + +// Information about an enum member that can be used within a Literal +// type annotation. +export class EnumLiteral { + constructor( + public classFullName: string, + public className: string, + public itemName: string, + public itemType: Type, + public isReprEnum: boolean + ) {} + + getName() { + return `${this.classFullName}.${this.itemName}`; + } +} + +export class SentinelLiteral { + constructor(public classFullName: string, public className: string) {} + + getName() { + return this.className; + } +} + +export type LiteralValue = number | bigint | boolean | string | EnumLiteral | SentinelLiteral; + +export type TypeSourceId = number; + +// This constant controls the maximum number of nested types (i.e. types +// used as type arguments or parameter types in other types) before we +// give up. This constant was previously set to 32, but there were certain +// pathological recursive types where this resulted in a hang. It was also +// previously lowered to 10, but this caused some legitimate failures in +// code that used numpy. Even at 16, there are some legitimate failures in +// numpy. +export const maxTypeRecursionCount = 20; + +export type InheritanceChain = (ClassType | UnknownType)[]; + +// Options used with the isTypeSame function +export interface TypeSameOptions { + ignorePseudoGeneric?: boolean; + ignoreTypeFlags?: boolean; + ignoreConditions?: boolean; + ignoreTypedDictNarrowEntries?: boolean; + honorTypeForm?: boolean; + honorIsTypeArgExplicit?: boolean; + treatAnySameAsUnknown?: boolean; +} + +export interface TypeAliasSharedInfo { + name: string; + fullName: string; + moduleName: string; + fileUri: Uri; + + typeVarScopeId: TypeVarScopeId; + + // Is the type alias a PEP 695 TypeAliasType instance? + isTypeAliasType: boolean; + + // Type parameters, if type alias is generic + typeParams: TypeVarType[] | undefined; + + // Lazily-evaluated variance of type parameters based on how + // they are used in the type alias + computedVariance: Variance[] | undefined; +} + +export interface TypeAliasInfo { + shared: TypeAliasSharedInfo; + + // Type argument, if type alias is specialized + typeArgs: Type[] | undefined; +} + +interface CachedTypeInfo { + // Type converted to instantiable and instance by convertToInstance + // and convertToInstantiable (cached) + instantiableType?: Type; + instanceType?: Type; + + // Type converted to instantiable and instance by TypeBase methods (cached) + typeBaseInstantiableType?: Type; + typeBaseInstanceType?: Type; + + // Requires specialization flag (cached) + requiresSpecialization?: boolean; +} + +export interface TypeBaseProps { + // Used to handle nested references to instantiable classes + // (e.g. type[type[type[T]]]). If the field isn't present, + // it is assumed to be zero + instantiableDepth: number | undefined; + + // Used in cases where the type is a special form when used in a + // value expression such as UnionType, Literal, or Required + specialForm: ClassType | undefined; + + // Used for "type form" objects, the evaluated form + // of a type expression in a value expression context + typeForm: Type | undefined; + + // Used only for type aliases + typeAliasInfo: TypeAliasInfo | undefined; + + // Used only for types that are conditioned on a TypeVar + condition: TypeCondition[] | undefined; +} + +export interface TypeBase { + category: T; + flags: TypeFlags; + + // Optional properties common to all types. + props: TypeBaseProps | undefined; + + // Optional cached values are not cloned. + cached: CachedTypeInfo | undefined; + + // Fields that are specific to a particular type category. + // These are shared between type instances and are not + // cloned. + shared: object | undefined; + + // Fields that are specific to a particular type category. + // These are private to each type instances and are + // cloned. + priv: object | undefined; +} + +export namespace TypeBase { + export function isInstantiable(type: TypeBase) { + return (type.flags & TypeFlags.Instantiable) !== 0; + } + + export function isInstance(type: TypeBase) { + return (type.flags & TypeFlags.Instance) !== 0; + } + + export function isAmbiguous(type: TypeBase) { + return (type.flags & TypeFlags.Ambiguous) !== 0; + } + + export function addProps(type: TypeBase): TypeBaseProps { + if (!type.props) { + type.props = { + instantiableDepth: undefined, + specialForm: undefined, + typeForm: undefined, + typeAliasInfo: undefined, + condition: undefined, + }; + } + return type.props; + } + + export function getInstantiableDepth(type: TypeBase) { + return type.props?.instantiableDepth ?? 0; + } + + export function setSpecialForm(type: TypeBase, specialForm: ClassType | undefined) { + TypeBase.addProps(type).specialForm = specialForm; + } + + export function setInstantiableDepth(type: TypeBase, depth: number | undefined) { + TypeBase.addProps(type).instantiableDepth = depth; + } + + export function setTypeAliasInfo(type: TypeBase, typeAliasInfo: TypeAliasInfo | undefined) { + TypeBase.addProps(type).typeAliasInfo = typeAliasInfo; + } + + export function setTypeForm(type: TypeBase, typeForm: Type | undefined) { + TypeBase.addProps(type).typeForm = typeForm; + } + + export function setCondition(type: TypeBase, condition: TypeCondition[] | undefined) { + TypeBase.addProps(type).condition = condition; + } + + export function cloneType>(type: T): T { + const clone = { ...type }; + if (type.props) { + clone.props = { ...type.props }; + } + if (type.priv) { + clone.priv = { ...type.priv }; + } + clone.cached = undefined; + return clone; + } + + export function cloneAsSpecialForm>(type: T, specialForm: ClassType | undefined): T { + const clone = TypeBase.cloneType(type); + TypeBase.setSpecialForm(clone, specialForm); + return clone; + } + + export function cloneTypeAsInstance(type: T, cache: boolean): T { + assert(TypeBase.isInstantiable(type)); + + const newInstance = TypeBase.cloneType(type); + + // Remove type form information from the type. + if (newInstance.props?.typeForm) { + TypeBase.setTypeForm(newInstance, undefined); + } + + const depth = newInstance.props?.instantiableDepth; + if (depth === undefined) { + newInstance.flags &= ~TypeFlags.Instantiable; + newInstance.flags |= TypeFlags.Instance; + } else if (depth <= 1) { + TypeBase.setInstantiableDepth(newInstance, undefined); + } else { + TypeBase.setInstantiableDepth(newInstance, depth - 1); + } + + // Should we cache it for next time? + if (cache) { + if (!type.cached) { + type.cached = {}; + } + + type.cached.typeBaseInstanceType = newInstance; + } + + return newInstance; + } + + export function cloneTypeAsInstantiable(type: T, cache: boolean): T { + const newInstance: T = TypeBase.cloneType(type); + + if (TypeBase.isInstance(type)) { + newInstance.flags &= ~TypeFlags.Instance; + newInstance.flags |= TypeFlags.Instantiable; + } else { + const oldDepth = type.props?.instantiableDepth; + TypeBase.setInstantiableDepth(newInstance, oldDepth === undefined ? 1 : oldDepth + 1); + } + + // Remove type alias information because the type will no longer match + // that of the type alias definition. + if (newInstance.props?.typeAliasInfo) { + TypeBase.setTypeAliasInfo(newInstance, undefined); + } + + // Remove type form information from the type. + if (newInstance.props?.typeForm) { + TypeBase.setTypeForm(newInstance, undefined); + } + + // Should we cache it for next time? + if (cache) { + if (!type.cached) { + type.cached = {}; + } + + type.cached.typeBaseInstantiableType = newInstance; + } + + return newInstance; + } + + export function cloneForTypeAlias(type: T, aliasInfo: TypeAliasInfo): T { + const typeClone = cloneType(type); + + TypeBase.setTypeAliasInfo(typeClone, aliasInfo); + + return typeClone; + } + + export function cloneWithTypeForm(type: T, typeForm: Type | undefined): T { + const typeClone = cloneType(type); + + TypeBase.setTypeForm(typeClone, typeForm); + + return typeClone; + } + + export function cloneForCondition(type: T, condition: TypeCondition[] | undefined): T { + // Handle the common case where there are no conditions. In this case, + // cloning isn't necessary. + if (type.props?.condition === undefined && condition === undefined) { + return type; + } + + const typeClone = cloneType(type); + TypeBase.setCondition(typeClone, condition); + return typeClone; + } + + export function cloneForAmbiguousType(type: Type) { + if (TypeBase.isAmbiguous(type)) { + return type; + } + + const typeClone = cloneType(type); + typeClone.flags |= TypeFlags.Ambiguous; + return typeClone; + } +} + +export interface UnboundType extends TypeBase {} + +export namespace UnboundType { + const _instance: UnboundType = { + category: TypeCategory.Unbound, + flags: TypeFlags.Instantiable | TypeFlags.Instance, + props: undefined, + cached: undefined, + shared: undefined, + priv: undefined, + }; + + export function create() { + // All Unbound objects are the same, so use a shared instance. + return _instance; + } + + export function convertToInstance(type: UnboundType): UnboundType { + // Remove the "special form" if present. Otherwise return the existing type. + return type.props?.specialForm ? UnboundType.create() : type; + } +} + +export interface UnknownDetailsPriv { + // Flag that indicates whether the type is a placeholder for an incomplete + // type during code flow analysis. + isIncomplete: boolean; + + // A "possible type" is a form of a "weak union" where the actual + // type is unknown, but it could be one of the subtypes in the union. + // This is used for overload matching in cases where more than one + // overload matches due to an argument that evaluates to Any or Unknown. + possibleType: Type | undefined; +} + +export interface UnknownType extends TypeBase { + priv: UnknownDetailsPriv; +} + +export namespace UnknownType { + const _instance: UnknownType = { + category: TypeCategory.Unknown, + flags: TypeFlags.Instantiable | TypeFlags.Instance, + props: undefined, + cached: undefined, + shared: undefined, + priv: { + isIncomplete: false, + possibleType: undefined, + }, + }; + const _incompleteInstance: UnknownType = { + category: TypeCategory.Unknown, + flags: TypeFlags.Instantiable | TypeFlags.Instance, + props: undefined, + cached: undefined, + shared: undefined, + priv: { + isIncomplete: true, + possibleType: undefined, + }, + }; + + export function create(isIncomplete = false) { + return isIncomplete ? _incompleteInstance : _instance; + } + + export function createPossibleType(possibleType: Type, isIncomplete: boolean) { + const unknownWithPossibleType: UnknownType = { + category: TypeCategory.Unknown, + flags: TypeFlags.Instantiable | TypeFlags.Instance, + props: undefined, + cached: undefined, + shared: undefined, + priv: { + isIncomplete, + possibleType, + }, + }; + + return unknownWithPossibleType; + } + + export function convertToInstance(type: UnknownType): UnknownType { + // Remove the "special form" if present. Otherwise return the existing type. + return type.props?.specialForm ? UnknownType.create(type.priv.isIncomplete) : type; + } +} + +export interface ModuleDetailsPriv { + fields: SymbolTable; + docString: string | undefined; + + // If a field lookup isn't found, should the type of the + // resulting field be Any/Unknown or treated as an error? + notPresentFieldType: AnyType | UnknownType | undefined; + + // A "loader" module includes symbols that were injected by + // the module loader. We keep these separate so we don't + // pollute the symbols exported by the module itself. + loaderFields: SymbolTable; + + // The period-delimited import name of this module. + moduleName: string; + + fileUri: Uri; +} + +export interface ModuleType extends TypeBase { + priv: ModuleDetailsPriv; +} + +export namespace ModuleType { + export function create(moduleName: string, fileUri: Uri, symbolTable?: SymbolTable) { + const newModuleType: ModuleType = { + category: TypeCategory.Module, + flags: TypeFlags.Instantiable | TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: undefined, + priv: { + fields: symbolTable || new Map(), + docString: undefined, + notPresentFieldType: undefined, + loaderFields: new Map(), + moduleName, + fileUri, + }, + }; + return newModuleType; + } + + export function getField(moduleType: ModuleType, name: string): Symbol | undefined { + // Always look for the symbol in the module's fields before consulting + // the loader fields. The loader runs before the module, so its values + // will be overwritten by the module. + let symbol = moduleType.priv.fields.get(name); + + if (moduleType.priv.loaderFields) { + if (!symbol) { + symbol = moduleType.priv.loaderFields.get(name); + } else if (symbol.getDeclarations().length === 1) { + // If the symbol is hidden when accessed via the module but is + // also accessible through a loader field, use the latter so it + // isn't flagged as an error. + const loaderSymbol = moduleType.priv.loaderFields.get(name); + if (loaderSymbol && !loaderSymbol.isExternallyHidden()) { + symbol = loaderSymbol; + } + } + } + return symbol; + } +} + +export interface DataClassEntry { + name: string; + classType: ClassType; + mroClass?: ClassType; + isClassVar: boolean; + isKeywordOnly: boolean; + alias?: string | undefined; + hasDefault?: boolean | undefined; + isDefaultFactory?: boolean | undefined; + nameNode: NameNode | undefined; + typeAnnotationNode: TypeAnnotationNode | undefined; + defaultExpr?: ExpressionNode | undefined; + includeInInit: boolean; + type: Type; + converter?: ArgumentNode | undefined; +} + +export interface TypedDictEntry { + valueType: Type; + isRequired: boolean; + isReadOnly: boolean; + isProvided: boolean; +} + +export interface TypedDictEntries { + knownItems: Map; + extraItems?: TypedDictEntry | undefined; +} + +export const enum ClassTypeFlags { + None = 0, + + // Class is defined in the "builtins" or "typing" file. + BuiltIn = 1 << 0, + + // Class requires special-case handling because it + // exhibits non-standard behavior or is not defined + // formally as a class. Examples include 'Optional' + // and 'Union'. + SpecialBuiltIn = 1 << 1, + + // Introduced in PEP 589, TypedDict classes provide a way + // to specify type hints for dictionaries with different + // value types and a limited set of static keys. + TypedDictClass = 1 << 2, + + // Used in conjunction with TypedDictClass, indicates that + // the TypedDict class is marked "closed". + TypedDictMarkedClosed = 1 << 3, + + // Used in conjunction with TypedDictClass, indicates that + // the TypedDict class is marked "closed" or one or more of + // its superclasses is marked "closed". + TypedDictEffectivelyClosed = 1 << 4, + + // Used in conjunction with TypedDictClass, indicates that + // the dictionary values can be omitted. + CanOmitDictValues = 1 << 5, + + // The class derives from a class that has the ABCMeta + // metaclass. Such classes are allowed to contain + // @abstractmethod decorators. + SupportsAbstractMethods = 1 << 6, + + // Derives from property class and has the semantics of + // a property (with optional setter, deleter). + PropertyClass = 1 << 7, + + // The class is decorated with a "@final" decorator + // indicating that it cannot be subclassed. + Final = 1 << 8, + + // The class derives directly from "Protocol". + ProtocolClass = 1 << 9, + + // A class whose constructor (__init__ method) does not have + // annotated types and is treated as though each parameter + // is a generic type for purposes of type inference. + PseudoGenericClass = 1 << 10, + + // A protocol class that is "runtime checkable" can be used + // in an isinstance call. + RuntimeCheckable = 1 << 11, + + // The type is defined in the typing_extensions.pyi file. + TypingExtensionClass = 1 << 12, + + // The class type is in the process of being evaluated and + // is not yet complete. This allows us to detect cases where + // the class refers to itself (e.g. uses itself as a type + // argument to one of its generic base classes). + PartiallyEvaluated = 1 << 13, + + // The class or one of its ancestors defines a __class_getitem__ + // method that is used for subscripting. This is not set if the + // class is generic, and therefore supports standard subscripting + // semantics. + HasCustomClassGetItem = 1 << 14, + + // The tuple class requires special-case handling for its type arguments. + TupleClass = 1 << 15, + + // The class has a metaclass of EnumMeta or derives from + // a class that has this metaclass. + EnumClass = 1 << 16, + + // Properties that are defined using the @classmethod decorator. + ClassProperty = 1 << 17, + + // Class is declared within a type stub file. + DefinedInStub = 1 << 18, + + // Decorated with @type_check_only. + TypeCheckOnly = 1 << 20, + + // Created with the NewType call. + NewTypeClass = 1 << 21, + + // Class is allowed to be used as an implicit type alias even + // though it is not defined using a `class` statement. + ValidTypeAliasClass = 1 << 22, + + // A special form is not compatible with type[T] and cannot + // be directly instantiated. + SpecialFormClass = 1 << 23, + + // This class is rejected when used as the second argument to + // an isinstance or issubclass call. + IllegalIsinstanceClass = 1 << 24, +} + +export interface DataClassBehaviors { + skipGenerateInit?: boolean; + skipGenerateEq?: boolean; + generateOrder?: boolean; + generateSlots?: boolean; + generateHash?: boolean; + matchArgs?: boolean; + keywordOnly?: boolean; + frozen?: boolean; + frozenDefault?: boolean; + fieldDescriptorNames: string[]; +} + +export interface ClassDetailsShared { + name: string; + fullName: string; + moduleName: string; + fileUri: Uri; + flags: ClassTypeFlags; + typeSourceId: TypeSourceId; + baseClasses: Type[]; + mro: (ClassType | AnyType | UnknownType)[]; + declaration?: ClassDeclaration | SpecialBuiltInClassDeclaration | undefined; + declaredMetaclass?: ClassType | UnknownType | undefined; + effectiveMetaclass?: ClassType | UnknownType | undefined; + fields: SymbolTable; + typeParams: TypeVarType[]; + typeVarScopeId?: TypeVarScopeId | undefined; + docString?: string | undefined; + dataClassEntries?: DataClassEntry[] | undefined; + dataClassBehaviors?: DataClassBehaviors | undefined; + namedTupleEntries?: Set | undefined; + typedDictEntries?: TypedDictEntries | undefined; + typedDictExtraItemsExpr?: ExpressionNode | undefined; + localSlotsNames?: string[]; + + // If the class is decorated with a @deprecated decorator, this + // string provides the message to be displayed when the class + // is used. + deprecatedMessage?: string | undefined; + + // A cache of protocol classes (indexed by the class full name) + // that have been determined to be compatible or incompatible + // with this class. We use "object" here to avoid a circular dependency. + // It's actually a map of ProtocolCompatibility objects. + protocolCompatibility?: object; + + // Transforms to apply if this class is used as a metaclass + // or a base class. + classDataClassTransform?: DataClassBehaviors | undefined; + + // Indicates that one or more type parameters has an + // autovariance, so variance must be inferred. + requiresVarianceInference?: boolean; + + // A cached value that indicates whether an instance of this class + // is hashable (i.e. does not override "__hash__" with None). + isInstanceHashable?: boolean; + + // Callback for deferred synthesis of methods in symbol table. + synthesizeMethodsDeferred?: () => void; + + // Callback for calculating inherited slots names. + calculateInheritedSlotsNamesDeferred?: () => void; + inheritedSlotsNamesCached?: string[]; +} + +export interface TupleTypeArg { + type: Type; + + // Does the type argument represent a single value or + // an "unbounded" (zero or more) arguments? + isUnbounded: boolean; + + // For tuples captured from a callable, this indicates + // the corresponding positional parameter has a default + // argument and can therefore be omitted. + isOptional?: boolean; +} + +export interface PropertyMethodInfo { + // The decorated function (fget, fset, fdel) for a property + methodType: FunctionType; + + // The class that declared this function + classType: ClassType | undefined; +} + +export interface ClassDetailsPriv { + // A generic class that has been completely or partially + // specialized will have type arguments that correspond to + // some or all of the type parameters. + typeArgs?: Type[] | undefined; + + // If a generic container class (like a list or dict) is known + // to contain no elements, its type arguments may be "Unknown". + // This value allows us to elide the Unknown when it's safe to + // do so. + isEmptyContainer?: boolean | undefined; + + // For tuples, the class definition calls for a single type parameter but + // the spec allows the programmer to provide an arbitrary number of + // type arguments. This field holds the individual type arguments + // while the "typeArgs" field holds the derived non-variadic + // type argument, which is the union of the tuple type arguments. + tupleTypeArgs?: TupleTypeArg[] | undefined; + + // We sometimes package multiple types into a tuple internally + // for matching against a variadic type variable or another unpacked + // tuple. We need to be able to distinguish this case from normal tuples. + isUnpacked?: boolean | undefined; + + // If type arguments are present, were they explicit (i.e. + // provided explicitly in the code)? + isTypeArgExplicit?: boolean | undefined; + + // This class type represents the class and any classes that + // derive from it, as opposed to the original class only. This + // distinction is important in certain scenarios like instantiation + // of abstract or protocol classes. + includeSubclasses?: boolean; + + // This class type represents the class and any auto-promotion + // types that PEP 484 indicates should be treated as subclasses + // when the type appears within a type annotation. + includePromotions?: boolean; + + // Some types can be further constrained to have + // literal types (e.g. true or 'string' or 3). + literalValue?: LiteralValue | undefined; + + // The typing module defines aliases for builtin types + // (e.g. Tuple, List, Dict). This field holds the alias + // name. + aliasName?: string | undefined; + + // Used for "narrowing" of typed dicts where some entries + // that are not required have been confirmed to be present + // through the use of a guard expression. + typedDictNarrowedEntries?: Map | undefined; + + // Indicates that the typed dict class should be considered "partial", + // i.e. all of its entries are effectively NotRequired and only + // writable entries are considered present, and they are marked read-only. + // This is used for the TypedDict "update" method. + isTypedDictPartial?: boolean; + + // Indicates whether the class is an asymmetric descriptor + // or property - one where the __get__ and __set__ types differ. + // If undefined, it hasn't been tested yet for asymmetry. + isAsymmetricDescriptor?: boolean; + + // Indicates whether the class has an asymmetric __getattr__ and + // __setattr__ signature. + isAsymmetricAttributeAccessor?: boolean; + + // Special-case fields for property classes. + fgetInfo?: PropertyMethodInfo | undefined; + fsetInfo?: PropertyMethodInfo | undefined; + fdelInfo?: PropertyMethodInfo | undefined; + + // Provides the deprecated message specifically for instances of + // the "deprecated" class. This allows these instances to be used + // as decorators for other classes or functions. + deprecatedInstanceMessage?: string | undefined; + + // Special-case fields for partial class. + partialCallType?: Type | undefined; +} + +export interface ClassType extends TypeBase { + shared: ClassDetailsShared; + priv: ClassDetailsPriv; +} + +export namespace ClassType { + export function createInstantiable( + name: string, + fullName: string, + moduleName: string, + fileUri: Uri, + flags: ClassTypeFlags, + typeSourceId: TypeSourceId, + declaredMetaclass: ClassType | UnknownType | undefined, + effectiveMetaclass: ClassType | UnknownType | undefined, + docString?: string + ) { + const newClass: ClassType = { + category: TypeCategory.Class, + flags: TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: { + name, + fullName, + moduleName, + fileUri, + flags, + typeSourceId, + baseClasses: [], + declaredMetaclass, + effectiveMetaclass, + mro: [], + fields: new Map(), + typeParams: [], + docString, + }, + priv: {}, + }; + + return newClass; + } + + export function cloneAsInstance(type: ClassType, includeSubclasses = true): ClassType { + if (TypeBase.isInstance(type)) { + return type; + } + + if (includeSubclasses && type.cached?.typeBaseInstanceType) { + return type.cached.typeBaseInstanceType as ClassType; + } + + const newInstance = TypeBase.cloneTypeAsInstance(type, /* cache */ includeSubclasses); + if (newInstance.props?.specialForm) { + TypeBase.setSpecialForm(newInstance, undefined); + } + + if (includeSubclasses) { + newInstance.priv.includeSubclasses = true; + } + + return newInstance; + } + + export function cloneAsInstantiable(type: ClassType, includeSubclasses = true): ClassType { + if (includeSubclasses && type.cached?.typeBaseInstantiableType) { + return type.cached.typeBaseInstantiableType as ClassType; + } + + const newInstance = TypeBase.cloneTypeAsInstantiable(type, includeSubclasses); + if (includeSubclasses) { + newInstance.priv.includeSubclasses = true; + } + + return newInstance; + } + + export function specialize( + classType: ClassType, + typeArgs: Type[] | undefined, + isTypeArgExplicit?: boolean, + includeSubclasses = false, + tupleTypeArgs?: TupleTypeArg[], + isEmptyContainer?: boolean + ): ClassType { + const newClassType = TypeBase.cloneType(classType); + + newClassType.priv.typeArgs = typeArgs?.length === 0 ? undefined : typeArgs; + + // If the user passed undefined for this argument, infer it + // based on whether typeArgs was provided. + if (isTypeArgExplicit === undefined) { + isTypeArgExplicit = !!typeArgs; + } + + newClassType.priv.isTypeArgExplicit = isTypeArgExplicit; + + if (includeSubclasses) { + newClassType.priv.includeSubclasses = true; + } + + newClassType.priv.tupleTypeArgs = tupleTypeArgs ? [...tupleTypeArgs] : undefined; + + if (isEmptyContainer !== undefined) { + newClassType.priv.isEmptyContainer = isEmptyContainer; + } + + return newClassType; + } + + export function cloneIncludeSubclasses(classType: ClassType, includeSubclasses = true) { + if (!!classType.priv.includeSubclasses === includeSubclasses) { + return classType; + } + + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.includeSubclasses = includeSubclasses; + return newClassType; + } + + export function cloneWithLiteral(classType: ClassType, value: LiteralValue | undefined): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.literalValue = value; + + // Remove type alias information because the type will no longer match + // that of the type alias definition if we change the literal type. + if (newClassType.props?.typeAliasInfo) { + TypeBase.setTypeAliasInfo(newClassType, undefined); + } + + return newClassType; + } + + export function cloneForDeprecatedInstance(type: ClassType, deprecatedMessage?: string): ClassType { + const newClassType = TypeBase.cloneType(type); + newClassType.priv.deprecatedInstanceMessage = deprecatedMessage; + return newClassType; + } + + export function cloneForTypingAlias(classType: ClassType, aliasName: string): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.aliasName = aliasName; + return newClassType; + } + + export function cloneForNarrowedTypedDictEntries( + classType: ClassType, + narrowedEntries?: Map + ): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.typedDictNarrowedEntries = narrowedEntries; + return newClassType; + } + + export function cloneForPartialTypedDict(classType: ClassType): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.isTypedDictPartial = true; + return newClassType; + } + + export function cloneRemoveTypePromotions(classType: ClassType): ClassType { + if (!classType.priv.includePromotions) { + return classType; + } + + const newClassType = TypeBase.cloneType(classType); + if (newClassType.priv.includePromotions !== undefined) { + newClassType.priv.includePromotions = undefined; + } + return newClassType; + } + + export function cloneForPartial(classType: ClassType, partialCallType: Type): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.partialCallType = partialCallType; + return newClassType; + } + + export function cloneForUnpacked(classType: ClassType): ClassType { + if (classType.priv.isUnpacked) { + return classType; + } + + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.isUnpacked = true; + return newClassType; + } + + export function cloneForPacked(classType: ClassType): ClassType { + if (!classType.priv.isUnpacked) { + return classType; + } + + const newClassType = TypeBase.cloneType(classType); + newClassType.priv.isUnpacked = false; + return newClassType; + } + + export function cloneWithNewFlags(classType: ClassType, newFlags: ClassTypeFlags): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.shared = { ...newClassType.shared }; + newClassType.shared.flags = newFlags; + return newClassType; + } + + export function isLiteralValueSame(type1: ClassType, type2: ClassType): boolean { + if (type1.priv.literalValue === undefined) { + return type2.priv.literalValue === undefined; + } else if (type2.priv.literalValue === undefined) { + return false; + } + + if (type1.priv.literalValue instanceof EnumLiteral) { + if (type2.priv.literalValue instanceof EnumLiteral) { + return type1.priv.literalValue.itemName === type2.priv.literalValue.itemName; + } + return false; + } + + if (type1.priv.literalValue instanceof SentinelLiteral) { + if (type2.priv.literalValue instanceof SentinelLiteral) { + return type1.priv.literalValue.classFullName === type2.priv.literalValue.classFullName; + } + return false; + } + + return type1.priv.literalValue === type2.priv.literalValue; + } + + // Determines whether two typed dict classes are equivalent given + // that one or both have narrowed entries (i.e. entries that are + // guaranteed to be present). + export function isTypedDictNarrowedEntriesSame(type1: ClassType, type2: ClassType): boolean { + if (type1.priv.typedDictNarrowedEntries) { + if (!type2.priv.typedDictNarrowedEntries) { + return false; + } + + const tdEntries1 = type1.priv.typedDictNarrowedEntries; + const tdEntries2 = type2.priv.typedDictNarrowedEntries; + + if (tdEntries1.size !== tdEntries2.size) { + return false; + } + + let key: string; + let entry1: TypedDictEntry; + for ([key, entry1] of tdEntries1.entries()) { + const entry2 = tdEntries2.get(key); + if (!entry2) { + return false; + } + if (entry1.isProvided !== entry2.isProvided) { + return false; + } + } + } else if (type2.priv.typedDictNarrowedEntries) { + return false; + } + + return true; + } + + // Determines whether typed dict class type1 is a narrower form of type2, + // i.e. all of the "narrowed entries" found within type2 are also found + // within type1. + export function isTypedDictNarrower(type1: ClassType, type2: ClassType): boolean { + const tdEntries2 = type2.priv.typedDictNarrowedEntries; + if (!tdEntries2) { + return true; + } + + const tdEntries1 = type1.priv.typedDictNarrowedEntries ?? new Map(); + + let key: string; + let entry2: TypedDictEntry; + for ([key, entry2] of tdEntries2.entries()) { + if (entry2.isProvided) { + const entry1 = tdEntries1.get(key); + if (!entry1?.isProvided) { + return false; + } + } + } + + return true; + } + + // Is the class generic but not specialized? + export function isUnspecialized(classType: ClassType) { + return classType.shared.typeParams.length > 0 && classType.priv.typeArgs === undefined; + } + + export function isSpecialBuiltIn(classType: ClassType, className?: string) { + if (!(classType.shared.flags & ClassTypeFlags.SpecialBuiltIn) && !classType.priv.aliasName) { + return false; + } + + if (className !== undefined) { + return classType.shared.name === className; + } + + return true; + } + + export function isBuiltIn(classType: ClassType, className?: string | string[]) { + if (!(classType.shared.flags & ClassTypeFlags.BuiltIn)) { + return false; + } + + if (className !== undefined) { + const classArray = Array.isArray(className) ? className : [className]; + return classArray.some( + (name) => + name === classType.shared.name || + name === classType.shared.fullName || + name === classType.priv.aliasName + ); + } + + return true; + } + + export function supportsAbstractMethods(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.SupportsAbstractMethods); + } + + export function isDataClass(classType: ClassType) { + return !!classType.shared.dataClassBehaviors; + } + + export function isDataClassSkipGenerateInit(classType: ClassType) { + return !!classType.shared.dataClassBehaviors?.skipGenerateInit; + } + + export function isDataClassSkipGenerateEq(classType: ClassType) { + return !!classType.shared.dataClassBehaviors?.skipGenerateEq; + } + + export function isDataClassFrozen(classType: ClassType) { + return !!classType.shared.dataClassBehaviors?.frozen; + } + + export function isDataClassGenerateOrder(classType: ClassType) { + return !!classType.shared.dataClassBehaviors?.generateOrder; + } + + export function isDataClassKeywordOnly(classType: ClassType) { + return !!classType.shared.dataClassBehaviors?.keywordOnly; + } + + export function isDataClassGenerateSlots(classType: ClassType) { + return !!classType.shared.dataClassBehaviors?.generateSlots; + } + + export function isDataClassGenerateHash(classType: ClassType) { + return !!classType.shared.dataClassBehaviors?.generateHash; + } + + export function isTypeCheckOnly(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.TypeCheckOnly); + } + + export function isNewTypeClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.NewTypeClass); + } + + export function isValidTypeAliasClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.ValidTypeAliasClass); + } + + export function isSpecialFormClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.SpecialFormClass); + } + + export function isIllegalIsinstanceClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.IllegalIsinstanceClass); + } + + export function isTypedDictClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.TypedDictClass); + } + + export function isCanOmitDictValues(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.CanOmitDictValues); + } + + export function isTypedDictMarkedClosed(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.TypedDictMarkedClosed); + } + + export function isTypedDictEffectivelyClosed(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.TypedDictEffectivelyClosed); + } + + export function isEnumClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.EnumClass); + } + + export function isPropertyClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.PropertyClass); + } + + export function isClassProperty(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.ClassProperty); + } + + export function isFinal(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.Final); + } + + export function isProtocolClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.ProtocolClass); + } + + export function isDefinedInStub(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.DefinedInStub); + } + + export function isPseudoGenericClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.PseudoGenericClass); + } + + export function getDataClassEntries(classType: ClassType): DataClassEntry[] { + classType.shared.synthesizeMethodsDeferred?.(); + + return classType.shared.dataClassEntries || []; + } + + export function isRuntimeCheckable(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.RuntimeCheckable); + } + + export function isTypingExtensionClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.TypingExtensionClass); + } + + export function isPartiallyEvaluated(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.PartiallyEvaluated); + } + + export function hasCustomClassGetItem(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.HasCustomClassGetItem); + } + + export function isTupleClass(classType: ClassType) { + return !!(classType.shared.flags & ClassTypeFlags.TupleClass); + } + + export function getTypeParams(classType: ClassType) { + return classType.shared.typeParams; + } + + export function derivesFromAnyOrUnknown(classType: ClassType) { + return classType.shared.mro.some((baseClass) => isAnyOrUnknown(baseClass)); + } + + export function getSymbolTable(classType: ClassType) { + classType.shared.synthesizeMethodsDeferred?.(); + + return classType.shared.fields; + } + + export function getInheritedSlotsNames(classType: ClassType) { + // First synthesize methods if needed. The slots entries + // can depend on synthesized methods. + classType.shared.synthesizeMethodsDeferred?.(); + + classType.shared.calculateInheritedSlotsNamesDeferred?.(); + + return classType.shared.inheritedSlotsNamesCached; + } + + // Similar to isPartiallyEvaluated except that it also looks at all of the + // classes in the MRO list for this class to see if any of them are still + // partially evaluated. + export function isHierarchyPartiallyEvaluated(classType: ClassType) { + return ( + ClassType.isPartiallyEvaluated(classType) || + classType.shared.mro.some((mroClass) => isClass(mroClass) && ClassType.isPartiallyEvaluated(mroClass)) + ); + } + + export function hasNamedTupleEntry(classType: ClassType, name: string): boolean { + if (!classType.shared.namedTupleEntries) { + return false; + } + + return classType.shared.namedTupleEntries.has(name); + } + + // Same as isTypeSame except that it doesn't compare type arguments. + export function isSameGenericClass(classType: ClassType, type2: ClassType, recursionCount = 0) { + if (!classType.priv.isTypedDictPartial !== !type2.priv.isTypedDictPartial) { + return false; + } + + if (TypeBase.isInstance(classType) !== TypeBase.isInstance(type2)) { + return false; + } + + if (TypeBase.getInstantiableDepth(classType) !== TypeBase.getInstantiableDepth(type2)) { + return false; + } + + const class1Details = classType.shared; + const class2Details = type2.shared; + + if (class1Details === class2Details) { + return true; + } + + // Compare most of the details fields. We intentionally skip the isAbstractClass + // flag because it gets set dynamically. + if ( + class1Details.fullName !== class2Details.fullName || + class1Details.flags !== class2Details.flags || + class1Details.typeSourceId !== class2Details.typeSourceId || + class1Details.baseClasses.length !== class2Details.baseClasses.length || + class1Details.typeParams.length !== class2Details.typeParams.length + ) { + return false; + } + + if (recursionCount > maxTypeRecursionCount) { + return true; + } + recursionCount++; + + // Special-case NamedTuple and Tuple classes because we rewrite the base classes + // in these cases. + if (ClassType.isBuiltIn(classType, 'NamedTuple') && ClassType.isBuiltIn(type2, 'NamedTuple')) { + return true; + } + if (ClassType.isBuiltIn(classType, 'tuple') && ClassType.isBuiltIn(type2, 'tuple')) { + return true; + } + + // Make sure the base classes match. + for (let i = 0; i < class1Details.baseClasses.length; i++) { + if ( + !isTypeSame( + class1Details.baseClasses[i], + class2Details.baseClasses[i], + { ignorePseudoGeneric: true }, + recursionCount + ) + ) { + return false; + } + } + + if (class1Details.declaredMetaclass || class2Details.declaredMetaclass) { + if ( + !class1Details.declaredMetaclass || + !class2Details.declaredMetaclass || + !isTypeSame( + class1Details.declaredMetaclass, + class2Details.declaredMetaclass, + { ignorePseudoGeneric: true }, + recursionCount + ) + ) { + return false; + } + } + + for (let i = 0; i < class1Details.typeParams.length; i++) { + if ( + !isTypeSame( + class1Details.typeParams[i], + class2Details.typeParams[i], + { ignorePseudoGeneric: true }, + recursionCount + ) + ) { + return false; + } + } + + return true; + } + + // Determines whether this is a subclass (derived class) + // of the specified class. If the caller passes an empty + // array to inheritanceChain, it will be filled in by + // the call to include the chain of inherited classes starting + // with type2 and ending with this type. + export function isDerivedFrom( + subclassType: ClassType, + parentClassType: ClassType, + inheritanceChain?: InheritanceChain + ): boolean { + // Is it the exact same class? + if (isSameGenericClass(subclassType, parentClassType)) { + // Handle literal types. + if (parentClassType.priv.literalValue !== undefined) { + if ( + subclassType.priv.literalValue === undefined || + !ClassType.isLiteralValueSame(parentClassType, subclassType) + ) { + return false; + } + } + + if (inheritanceChain) { + inheritanceChain.push(subclassType); + } + return true; + } + + // Handle built-in types like 'dict' and 'list', which are all + // subclasses of object even though they are not explicitly declared + // that way. + if (isBuiltIn(subclassType) && isBuiltIn(parentClassType, 'object')) { + if (inheritanceChain) { + inheritanceChain.push(parentClassType); + } + return true; + } + + // Handle the case where the subclass is a type[type[T]] and the parent + // class is type. + const subclassDepth = TypeBase.getInstantiableDepth(subclassType); + if (subclassDepth > 0) { + if (isBuiltIn(parentClassType, 'type') && TypeBase.getInstantiableDepth(parentClassType) < subclassDepth) { + if (inheritanceChain) { + inheritanceChain.push(parentClassType); + } + return true; + } + } + + // Handle the case where both source and dest are property objects. This + // special case is needed because we synthesize a new class for each + // property declaration. + if (ClassType.isBuiltIn(subclassType, 'property') && ClassType.isBuiltIn(parentClassType, 'property')) { + if (inheritanceChain) { + inheritanceChain.push(subclassType); + } + return true; + } + + for (const baseClass of subclassType.shared.baseClasses) { + if (isInstantiableClass(baseClass)) { + if (isDerivedFrom(baseClass, parentClassType, inheritanceChain)) { + if (inheritanceChain) { + inheritanceChain.push(subclassType); + } + return true; + } + } else if (isAnyOrUnknown(baseClass)) { + if (inheritanceChain) { + inheritanceChain.push(UnknownType.create()); + } + return true; + } + } + + return false; + } + + export function getReverseMro(classType: ClassType): (ClassType | UnknownType | AnyType)[] { + return classType.shared.mro.slice(0).reverse(); + } +} + +export enum FunctionParamFlags { + None = 0, + + // Is the name of the parameter synthesize internally? + NameSynthesized = 1 << 0, + + // Does the parameter have an explicitly-declared type? + TypeDeclared = 1 << 1, + + // Is the type of the parameter inferred? + TypeInferred = 1 << 2, +} + +export interface FunctionParam { + category: ParamCategory; + flags: FunctionParamFlags; + name: string | undefined; + + // Use getParamType to access this field. + // eslint-disable-next-line @typescript-eslint/naming-convention + _type: Type; + + // Use getParamDefaultType to access this field. + // eslint-disable-next-line @typescript-eslint/naming-convention + _defaultType: Type | undefined; + + defaultExpr: ExpressionNode | undefined; +} + +export namespace FunctionParam { + export function create( + category: ParamCategory, + type: Type, + flags = FunctionParamFlags.None, + name?: string, + defaultType?: Type, + defaultExpr?: ExpressionNode + ): FunctionParam { + return { category, flags, name, _type: type, _defaultType: defaultType, defaultExpr }; + } + + export function isNameSynthesized(param: FunctionParam) { + return !!(param.flags & FunctionParamFlags.NameSynthesized); + } + + export function isTypeDeclared(param: FunctionParam) { + return !!(param.flags & FunctionParamFlags.TypeDeclared); + } + + export function isTypeInferred(param: FunctionParam) { + return !!(param.flags & FunctionParamFlags.TypeInferred); + } +} + +export function isPositionOnlySeparator(param: FunctionParam) { + // A simple parameter with no name is treated as a "/" separator. + return param.category === ParamCategory.Simple && !param.name; +} + +export function isKeywordOnlySeparator(param: FunctionParam) { + // An *args parameter with no name is treated as a "*" separator. + return param.category === ParamCategory.ArgsList && !param.name; +} + +export const enum FunctionTypeFlags { + None = 0, + + // Function is a __new__ method; first parameter is "cls" + ConstructorMethod = 1 << 0, + + // Function is decorated with @classmethod; first parameter is "cls"; + // can be bound to associated class + ClassMethod = 1 << 1, + + // Function is decorated with @staticmethod; cannot be bound to class + StaticMethod = 1 << 2, + + // Function is decorated with @abstractmethod + AbstractMethod = 1 << 3, + + // Function contains "yield" or "yield from" statements + Generator = 1 << 4, + + // Skip check that validates that all parameters without default + // value expressions have corresponding arguments; used for + // named tuples in some cases + DisableDefaultChecks = 1 << 5, + + // Method has no declaration in user code, it's synthesized; used + // for implied methods such as those used in namedtuple, dataclass, etc. + SynthesizedMethod = 1 << 6, + + // Decorated with @type_check_only. + TypeCheckOnly = 1 << 7, + + // Function is decorated with @overload + Overloaded = 1 << 8, + + // Function is declared with async keyword + Async = 1 << 9, + + // Function is declared within a type stub fille + StubDefinition = 1 << 11, + + // Function is declared within a module that claims to be fully typed + // (i.e. a "py.typed" file is present). + PyTypedDefinition = 1 << 12, + + // Function is decorated with @final + Final = 1 << 13, + + // Function has one or more parameters that are missing type annotations + UnannotatedParams = 1 << 14, + + // The *args and **kwargs parameters do not need to be present for this + // function to be compatible. This is used for Callable[..., x] and + // ... type arguments to ParamSpec and Concatenate. + GradualCallableForm = 1 << 15, + + // This function represents the value bound to a ParamSpec, so its return + // type is not meaningful. + ParamSpecValue = 1 << 16, + + // The function type is in the process of being evaluated and + // is not yet complete. This allows us to detect cases where + // the function refers to itself (e.g. uses a type annotation + // that contains a forward reference that requires the function + // type itself to be evaluated first). + PartiallyEvaluated = 1 << 17, + + // Decorated with @override as defined in PEP 698. + Overridden = 1 << 18, + + // Decorated with @no_type_check. + NoTypeCheck = 1 << 19, + + // Function defined in one of the core stdlib modules. + BuiltIn = 1 << 20, +} + +interface FunctionDetailsShared { + name: string; + fullName: string; + moduleName: string; + flags: FunctionTypeFlags; + typeParams: TypeVarType[]; + parameters: FunctionParam[]; + declaredReturnType: Type | undefined; + declaration: FunctionDeclaration | undefined; + typeVarScopeId: TypeVarScopeId | undefined; + docString: string | undefined; + deprecatedMessage: string | undefined; + + // If this is a method, this refers to the class that contains it. + methodClass: ClassType | undefined; + + // Transforms to apply if this function is used + // as a decorator. + decoratorDataClassBehaviors: DataClassBehaviors | undefined; + + // Inferred return type. Filled in lazily. + inferredReturnType?: { + type: Type; + isIncomplete?: boolean; + evaluationCount?: number; + }; +} + +export interface SpecializedFunctionTypes { + // Specialized types for each of the parameters in the "parameters" array. + parameterTypes: Type[]; + + // Specialized types of default arguments for each parameter in + // the "parameters" array. If an entry is undefined or the entire array + // is missing, there is no specialized type, and the original "defaultType" + // should be used. + parameterDefaultTypes: (Type | undefined)[] | undefined; + + // Specialized type of the declared return type. Undefined if there is + // no declared return type. + returnType: Type | undefined; +} + +export interface CallSiteInferenceTypeCacheEntry { + paramTypes: Type[]; + returnType: Type; +} + +export interface SignatureWithOffsets { + type: FunctionType | OverloadedType; + expressionOffsets: number[]; +} + +export interface FunctionDetailsPriv { + // For __new__ and __init__ methods, the TypeVar scope ID of the + // associated class. + constructorTypeVarScopeId?: TypeVarScopeId | undefined; + + // A function type can be specialized (i.e. generic type + // variables replaced by a concrete type). + specializedTypes?: SpecializedFunctionTypes | undefined; + + // Call-site return type inference cache. + callSiteReturnTypeCache?: CallSiteInferenceTypeCacheEntry[]; + + // If this is a bound function where the first parameter + // was stripped from the original unbound function, the + // (specialized) type of that stripped parameter. + strippedFirstParamType?: Type | undefined; + + // If this is a bound function where the first parameter + // was stripped from the original unbound function, + // the class or object to which the function was bound. + boundToType?: ClassType | undefined; + + // The flags for the function prior to binding + preBoundFlags?: FunctionTypeFlags; + + // If this function is part of an overloaded function, this + // refers back to the overloaded function type. + overloaded?: OverloadedType; + + // If this function is created with a "Callable" annotation with + // type arguments? This allows us to detect and report an error + // when this is used in an isinstance call. + isCallableWithTypeArgs?: boolean; +} + +export interface FunctionType extends TypeBase { + shared: FunctionDetailsShared; + priv: FunctionDetailsPriv; +} + +export namespace FunctionType { + export function createInstance( + name: string, + fullName: string, + moduleName: string, + functionFlags: FunctionTypeFlags, + docString?: string + ) { + return create(name, fullName, moduleName, functionFlags, TypeFlags.Instance, docString); + } + + export function createInstantiable(functionFlags: FunctionTypeFlags, docString?: string) { + return create('', '', '', functionFlags, TypeFlags.Instantiable, docString); + } + + export function createSynthesizedInstance(name: string, additionalFlags = FunctionTypeFlags.None) { + return create(name, name, '', additionalFlags | FunctionTypeFlags.SynthesizedMethod, TypeFlags.Instance); + } + + function create( + name: string, + fullName: string, + moduleName: string, + functionFlags: FunctionTypeFlags, + typeFlags: TypeFlags, + docString?: string + ) { + const newFunctionType: FunctionType = { + category: TypeCategory.Function, + flags: typeFlags, + props: undefined, + cached: undefined, + shared: { + name, + fullName, + moduleName, + flags: functionFlags, + typeParams: [], + parameters: [], + declaredReturnType: undefined, + declaration: undefined, + typeVarScopeId: undefined, + docString, + deprecatedMessage: undefined, + methodClass: undefined, + decoratorDataClassBehaviors: undefined, + }, + priv: {}, + }; + return newFunctionType; + } + + // Creates a deep copy of the function type, including a fresh + // version of _functionDetails. + export function clone(type: FunctionType, stripFirstParam = false, boundToType?: ClassType): FunctionType { + const newFunction = TypeBase.cloneType(type); + + newFunction.shared = { ...type.shared }; + newFunction.priv.preBoundFlags = newFunction.shared.flags; + newFunction.priv.boundToType = boundToType; + + if (boundToType) { + if (type.shared.name === '__new__' || type.shared.name === '__init__') { + newFunction.priv.constructorTypeVarScopeId = boundToType.shared.typeVarScopeId; + } + } + + if (stripFirstParam) { + if (type.shared.parameters.length > 0) { + if (type.shared.parameters[0].category === ParamCategory.Simple) { + if (type.shared.parameters.length > 0) { + // Stash away the effective type of the first parameter or + // Any if it was inferred. + newFunction.priv.strippedFirstParamType = FunctionParam.isTypeInferred( + type.shared.parameters[0] + ) + ? AnyType.create() + : getParamType(type, 0); + } + newFunction.shared.parameters = type.shared.parameters.slice(1); + } + } else { + stripFirstParam = false; + } + } + + if (type.props?.typeAliasInfo) { + TypeBase.setTypeAliasInfo(newFunction, type.props.typeAliasInfo); + } + + if (type.priv.specializedTypes) { + newFunction.priv.specializedTypes = { + parameterTypes: stripFirstParam + ? type.priv.specializedTypes.parameterTypes.slice(1) + : type.priv.specializedTypes.parameterTypes, + parameterDefaultTypes: stripFirstParam + ? type.priv.specializedTypes.parameterDefaultTypes?.slice(1) + : type.priv.specializedTypes.parameterDefaultTypes, + returnType: type.priv.specializedTypes.returnType, + }; + } + + newFunction.shared.inferredReturnType = type.shared.inferredReturnType; + + return newFunction; + } + + export function cloneAsInstance(type: FunctionType): FunctionType { + if (type.cached?.typeBaseInstanceType) { + return type.cached.typeBaseInstanceType as FunctionType; + } + + const newInstance = TypeBase.cloneTypeAsInstance(type, /* cache */ true); + if (newInstance.props?.specialForm) { + TypeBase.setSpecialForm(newInstance, undefined); + } + return newInstance; + } + + export function cloneAsInstantiable(type: FunctionType): FunctionType { + if (type.cached?.typeBaseInstantiableType) { + return type.cached.typeBaseInstantiableType as FunctionType; + } + + const newInstance = TypeBase.cloneTypeAsInstantiable(type, /* cache */ true); + return newInstance; + } + + // Creates a shallow copy of the function type with new + // specialized types. The clone shares the _functionDetails + // with the object being cloned. + export function specialize(type: FunctionType, specializedTypes: SpecializedFunctionTypes): FunctionType { + const newFunction = TypeBase.cloneType(type); + + assert(specializedTypes.parameterTypes.length === type.shared.parameters.length); + if (specializedTypes.parameterDefaultTypes) { + assert(specializedTypes.parameterDefaultTypes.length === type.shared.parameters.length); + } + + newFunction.priv.specializedTypes = specializedTypes; + return newFunction; + } + + // Creates a new function based on the parameters of another function. + export function applyParamSpecValue(type: FunctionType, paramSpecValue: FunctionType): FunctionType { + const hasPositionalOnly = paramSpecValue.shared.parameters.some((param) => isPositionOnlySeparator(param)); + const newFunction = FunctionType.cloneRemoveParamSpecArgsKwargs(TypeBase.cloneType(type), hasPositionalOnly); + const paramSpec = FunctionType.getParamSpecFromArgsKwargs(type); + assert(paramSpec !== undefined); + + // Make a shallow clone of the details. + newFunction.shared = { ...newFunction.shared }; + + newFunction.shared.typeParams = newFunction.shared.typeParams.filter((t) => !isTypeSame(t, paramSpec)); + + const prevParams = Array.from(newFunction.shared.parameters); + + newFunction.shared.parameters = [ + ...prevParams, + ...paramSpecValue.shared.parameters.map((param, index) => { + return FunctionParam.create( + param.category, + FunctionType.getParamType(paramSpecValue, index), + (param.flags & FunctionParamFlags.NameSynthesized) | FunctionParamFlags.TypeDeclared, + param.name, + FunctionType.getParamDefaultType(paramSpecValue, index), + param.defaultExpr + ); + }), + ]; + + if (newFunction.shared.docString === undefined) { + newFunction.shared.docString = paramSpecValue.shared.docString; + } + + if (newFunction.shared.deprecatedMessage === undefined) { + newFunction.shared.deprecatedMessage = paramSpecValue.shared.deprecatedMessage; + } + + const origFlagsMask = FunctionTypeFlags.Overloaded | FunctionTypeFlags.ParamSpecValue; + newFunction.shared.flags = type.shared.flags & origFlagsMask; + + const methodFlagsMask = + FunctionTypeFlags.ClassMethod | FunctionTypeFlags.StaticMethod | FunctionTypeFlags.ConstructorMethod; + + // If the original function was a method, use its method type. Otherwise + // use the method type of the param spec. + if (type.shared.methodClass) { + newFunction.shared.flags |= type.shared.flags & methodFlagsMask; + } else { + newFunction.shared.flags |= paramSpecValue.shared.flags & methodFlagsMask; + } + + // Use the "..." flag from the param spec. + newFunction.shared.flags |= paramSpecValue.shared.flags & FunctionTypeFlags.GradualCallableForm; + + // Mark the function as synthesized since there is no user-defined declaration for it. + newFunction.shared.flags |= FunctionTypeFlags.SynthesizedMethod; + if (newFunction.shared.declaration) { + newFunction.shared.declaration = undefined; + } + + // Update the specialized parameter types as well. + const specializedTypes = newFunction.priv.specializedTypes; + if (specializedTypes) { + paramSpecValue.shared.parameters.forEach((_, index) => { + specializedTypes.parameterTypes.push(FunctionType.getParamType(paramSpecValue, index)); + + if (specializedTypes.parameterDefaultTypes) { + specializedTypes.parameterDefaultTypes?.push( + FunctionType.getParamDefaultType(paramSpecValue, index) + ); + } + }); + } + + newFunction.priv.constructorTypeVarScopeId = paramSpecValue.priv.constructorTypeVarScopeId; + + if (!newFunction.shared.methodClass && paramSpecValue.shared.methodClass) { + newFunction.shared.methodClass = paramSpecValue.shared.methodClass; + } + + return newFunction; + } + + export function cloneWithNewFlags(type: FunctionType, flags: FunctionTypeFlags): FunctionType { + const newFunction = TypeBase.cloneType(type); + + // Make a shallow clone of the details. + newFunction.shared = { ...type.shared }; + newFunction.shared.flags = flags; + + return newFunction; + } + + export function cloneWithNewTypeVarScopeId( + type: FunctionType, + newScopeId: TypeVarScopeId | undefined, + newConstructorScopeId: TypeVarScopeId | undefined, + typeParams: TypeVarType[] + ): FunctionType { + const newFunction = TypeBase.cloneType(type); + + // Make a shallow clone of the details. + newFunction.shared = { ...type.shared }; + newFunction.shared.typeVarScopeId = newScopeId; + newFunction.priv.constructorTypeVarScopeId = newConstructorScopeId; + newFunction.shared.typeParams = typeParams; + + return newFunction; + } + + export function cloneWithDocString(type: FunctionType, docString?: string): FunctionType { + const newFunction = TypeBase.cloneType(type); + + // Make a shallow clone of the details. + newFunction.shared = { ...type.shared }; + + newFunction.shared.docString = docString; + + return newFunction; + } + + export function cloneWithDeprecatedMessage(type: FunctionType, deprecatedMessage?: string): FunctionType { + const newFunction = TypeBase.cloneType(type); + + // Make a shallow clone of the details. + newFunction.shared = { ...type.shared }; + + newFunction.shared.deprecatedMessage = deprecatedMessage; + + return newFunction; + } + + // If the function ends with "*args: P.args, **kwargs: P.kwargs", this function + // returns a new function that is a clone of the input function with the + // *args and **kwargs parameters removed. If stripPositionOnlySeparator is true, + // a trailing positional-only separator will be removed. + export function cloneRemoveParamSpecArgsKwargs( + type: FunctionType, + stripPositionOnlySeparator = false + ): FunctionType { + const paramCount = type.shared.parameters.length; + if (paramCount < 2) { + return type; + } + + const argsParam = type.shared.parameters[paramCount - 2]; + const kwargsParam = type.shared.parameters[paramCount - 1]; + + if (argsParam.category !== ParamCategory.ArgsList || kwargsParam.category !== ParamCategory.KwargsDict) { + return type; + } + + const argsType = FunctionType.getParamType(type, paramCount - 2); + const kwargsType = FunctionType.getParamType(type, paramCount - 1); + if (!isParamSpec(argsType) || !isParamSpec(kwargsType) || !isTypeSame(argsType, kwargsType)) { + return type; + } + + const newFunction = TypeBase.cloneType(type); + + // Make a shallow clone of the details. + newFunction.shared = { ...type.shared }; + const details = newFunction.shared; + + let paramsToDrop = 2; + + // If the last remaining parameter is a position-only separator, remove it as well. + // Always remove it if it's the only remaining parameter. + if (paramCount >= 3 && isPositionOnlySeparator(details.parameters[paramCount - 3])) { + if (paramCount === 3 || stripPositionOnlySeparator) { + paramsToDrop = 3; + } + } + + // Remove the last parameters, which are the *args and **kwargs. + details.parameters = details.parameters.slice(0, details.parameters.length - paramsToDrop); + + if (type.priv.specializedTypes) { + newFunction.priv.specializedTypes = { ...type.priv.specializedTypes }; + newFunction.priv.specializedTypes.parameterTypes = newFunction.priv.specializedTypes.parameterTypes.slice( + 0, + newFunction.priv.specializedTypes.parameterTypes.length - paramsToDrop + ); + if (newFunction.priv.specializedTypes.parameterDefaultTypes) { + newFunction.priv.specializedTypes.parameterDefaultTypes = + newFunction.priv.specializedTypes.parameterDefaultTypes.slice( + 0, + newFunction.priv.specializedTypes.parameterDefaultTypes.length - paramsToDrop + ); + } + } + + if (type.shared.inferredReturnType) { + newFunction.shared.inferredReturnType = type.shared.inferredReturnType; + } + + return newFunction; + } + + // If the function ends with "*args: P.args, **kwargs: P.kwargs", this function + // returns P. Otherwise, it returns undefined. + export function getParamSpecFromArgsKwargs(type: FunctionType): ParamSpecType | undefined { + const params = type.shared.parameters; + if (params.length < 2) { + return undefined; + } + + const secondLastParam = params[params.length - 2]; + const secondLastParamType = FunctionType.getParamType(type, params.length - 2); + const lastParam = params[params.length - 1]; + const lastParamType = FunctionType.getParamType(type, params.length - 1); + + if ( + secondLastParam.category === ParamCategory.ArgsList && + isParamSpec(secondLastParamType) && + secondLastParamType.priv.paramSpecAccess === 'args' && + lastParam.category === ParamCategory.KwargsDict && + isParamSpec(lastParamType) && + lastParamType.priv.paramSpecAccess === 'kwargs' + ) { + return TypeVarType.cloneForParamSpecAccess(secondLastParamType, /* access */ undefined); + } + + return undefined; + } + + export function addParamSpecVariadics(type: FunctionType, paramSpec: ParamSpecType) { + FunctionType.addParam( + type, + FunctionParam.create( + ParamCategory.ArgsList, + TypeVarType.cloneForParamSpecAccess(paramSpec, 'args'), + FunctionParamFlags.TypeDeclared, + 'args' + ) + ); + + FunctionType.addParam( + type, + FunctionParam.create( + ParamCategory.KwargsDict, + TypeVarType.cloneForParamSpecAccess(paramSpec, 'kwargs'), + FunctionParamFlags.TypeDeclared, + 'kwargs' + ) + ); + } + + export function addDefaultParams(type: FunctionType, useUnknown = false) { + getDefaultParams(useUnknown).forEach((param) => { + FunctionType.addParam(type, param); + }); + } + + export function getDefaultParams(useUnknown = false): FunctionParam[] { + return [ + FunctionParam.create( + ParamCategory.ArgsList, + useUnknown ? UnknownType.create() : AnyType.create(), + useUnknown ? FunctionParamFlags.None : FunctionParamFlags.TypeDeclared, + 'args' + ), + FunctionParam.create( + ParamCategory.KwargsDict, + useUnknown ? UnknownType.create() : AnyType.create(), + useUnknown ? FunctionParamFlags.None : FunctionParamFlags.TypeDeclared, + 'kwargs' + ), + ]; + } + + // Indicates whether the input signature consists of (*args: Any, **kwargs: Any). + export function hasDefaultParams(functionType: FunctionType): boolean { + let sawArgs = false; + let sawKwargs = false; + + for (let i = 0; i < functionType.shared.parameters.length; i++) { + const param = functionType.shared.parameters[i]; + + // Ignore nameless separator parameters. + if (!param.name) { + continue; + } + + if (param.category === ParamCategory.Simple) { + return false; + } else if (param.category === ParamCategory.ArgsList) { + sawArgs = true; + } else if (param.category === ParamCategory.KwargsDict) { + sawKwargs = true; + } + + if (!isAnyOrUnknown(FunctionType.getParamType(functionType, i))) { + return false; + } + } + + return sawArgs && sawKwargs; + } + + export function isInstanceMethod(type: FunctionType): boolean { + return ( + (type.shared.flags & + (FunctionTypeFlags.ConstructorMethod | + FunctionTypeFlags.StaticMethod | + FunctionTypeFlags.ClassMethod)) === + 0 + ); + } + + export function isConstructorMethod(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.ConstructorMethod) !== 0; + } + + export function isStaticMethod(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.StaticMethod) !== 0; + } + + export function isClassMethod(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.ClassMethod) !== 0; + } + + export function isAbstractMethod(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.AbstractMethod) !== 0; + } + + export function isGenerator(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.Generator) !== 0; + } + + export function isSynthesizedMethod(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.SynthesizedMethod) !== 0; + } + + export function isTypeCheckOnly(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.TypeCheckOnly) !== 0; + } + + export function isOverloaded(type: FunctionType): boolean { + return (type.shared.flags & FunctionTypeFlags.Overloaded) !== 0; + } + + export function isDefaultParamCheckDisabled(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.DisableDefaultChecks) !== 0; + } + + export function isAsync(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.Async) !== 0; + } + + export function isStubDefinition(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.StubDefinition) !== 0; + } + + export function isPyTypedDefinition(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.PyTypedDefinition) !== 0; + } + + export function isFinal(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.Final) !== 0; + } + + export function hasUnannotatedParams(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.UnannotatedParams) !== 0; + } + + export function isGradualCallableForm(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.GradualCallableForm) !== 0; + } + + export function isParamSpecValue(type: FunctionType) { + return (type.shared.flags & FunctionTypeFlags.ParamSpecValue) !== 0; + } + + export function isPartiallyEvaluated(type: FunctionType) { + return !!(type.shared.flags & FunctionTypeFlags.PartiallyEvaluated); + } + + export function isOverridden(type: FunctionType) { + return !!(type.shared.flags & FunctionTypeFlags.Overridden); + } + + export function isBuiltIn(type: FunctionType, name?: string | string[]) { + if (!(type.shared.flags & FunctionTypeFlags.BuiltIn)) { + return false; + } + + if (name !== undefined) { + const functionArray = Array.isArray(name) ? name : [name]; + return functionArray.some((name) => name === type.shared.name || name === type.shared.fullName); + } + + return true; + } + + export function getDeclaredParamType(type: FunctionType, index: number): Type { + return type.shared.parameters[index]._type; + } + + export function getParamType(type: FunctionType, index: number): Type { + assert(index < type.shared.parameters.length, 'Parameter types array overflow'); + + if (type.priv.specializedTypes && index < type.priv.specializedTypes.parameterTypes.length) { + return type.priv.specializedTypes.parameterTypes[index]; + } + + return type.shared.parameters[index]._type; + } + + export function getParamDefaultType(type: FunctionType, index: number): Type | undefined { + assert(index < type.shared.parameters.length, 'Parameter types array overflow'); + + if ( + type.priv.specializedTypes?.parameterDefaultTypes && + index < type.priv.specializedTypes.parameterDefaultTypes.length + ) { + const defaultArgType = type.priv.specializedTypes.parameterDefaultTypes[index]; + if (defaultArgType) { + return defaultArgType; + } + } + + return type.shared.parameters[index]._defaultType; + } + + export function addParam(type: FunctionType, param: FunctionParam) { + type.shared.parameters.push(param); + + if (type.priv.specializedTypes) { + type.priv.specializedTypes.parameterTypes.push(param._type); + } + } + + export function addPositionOnlyParamSeparator(type: FunctionType) { + addParam(type, FunctionParam.create(ParamCategory.Simple, AnyType.create())); + } + + export function addKeywordOnlyParamSeparator(type: FunctionType) { + addParam(type, FunctionParam.create(ParamCategory.ArgsList, AnyType.create())); + } + + export function getEffectiveReturnType(type: FunctionType, includeInferred = true): Type | undefined { + if (type.priv.specializedTypes?.returnType) { + return type.priv.specializedTypes.returnType; + } + + if (type.shared.declaredReturnType) { + return type.shared.declaredReturnType; + } + + if (includeInferred) { + return type.shared.inferredReturnType?.type; + } + + return undefined; + } +} + +export interface OverloadedDetailsPriv { + // eslint-disable-next-line @typescript-eslint/naming-convention + _overloads: FunctionType[]; + + // eslint-disable-next-line @typescript-eslint/naming-convention + _implementation: Type | undefined; +} + +export interface OverloadedType extends TypeBase { + priv: OverloadedDetailsPriv; +} + +export namespace OverloadedType { + export function create(overloads: FunctionType[], implementation?: Type): OverloadedType { + const newType: OverloadedType = { + category: TypeCategory.Overloaded, + flags: TypeFlags.Instance, + props: undefined, + cached: undefined, + shared: undefined, + priv: { + _overloads: [], + _implementation: implementation, + }, + }; + + overloads.forEach((overload) => { + OverloadedType.addOverload(newType, overload); + }); + + if (implementation && isFunction(implementation)) { + implementation.priv.overloaded = newType; + } + + return newType; + } + + // Adds a new overload or an implementation. + export function addOverload(type: OverloadedType, functionType: FunctionType) { + functionType.priv.overloaded = type; + type.priv._overloads.push(functionType); + } + + export function getOverloads(type: OverloadedType): FunctionType[] { + return type.priv._overloads; + } + + export function getImplementation(type: OverloadedType): Type | undefined { + return type.priv._implementation; + } +} + +export interface NeverDetailsPriv { + isNoReturn: boolean; +} + +export interface NeverType extends TypeBase { + priv: NeverDetailsPriv; +} + +export namespace NeverType { + const _neverInstance: NeverType = { + category: TypeCategory.Never, + flags: TypeFlags.Instance | TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: undefined, + priv: { isNoReturn: false }, + }; + + const _noReturnInstance: NeverType = { + category: TypeCategory.Never, + flags: TypeFlags.Instance | TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: undefined, + priv: { isNoReturn: true }, + }; + + export function createNever() { + return _neverInstance; + } + + export function createNoReturn() { + return _noReturnInstance; + } + + export function convertToInstance(type: NeverType): NeverType { + // Remove the specialForm or typeForm if present. Otherwise return the existing type. + if (!type.props?.specialForm && !type.props?.typeForm) { + return type; + } + + return type.priv.isNoReturn ? NeverType.createNoReturn() : NeverType.createNever(); + } +} + +export interface AnyDetailsPriv { + isEllipsis: boolean; +} + +export interface AnyType extends TypeBase { + priv: AnyDetailsPriv; +} + +export namespace AnyType { + const _anyInstanceSpecialForm: AnyType = { + category: TypeCategory.Any, + flags: TypeFlags.Instance | TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: undefined, + priv: { isEllipsis: false }, + }; + + const _anyInstance: AnyType = { + category: TypeCategory.Any, + flags: TypeFlags.Instance | TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: undefined, + priv: { isEllipsis: false }, + }; + + const _ellipsisInstance: AnyType = { + category: TypeCategory.Any, + flags: TypeFlags.Instance | TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: undefined, + priv: { isEllipsis: true }, + }; + + export function create(isEllipsis = false) { + return isEllipsis ? _ellipsisInstance : _anyInstance; + } + + export function createSpecialForm() { + return _anyInstanceSpecialForm; + } +} + +export namespace AnyType { + export function convertToInstance(type: AnyType): AnyType { + // Remove the "special form" if present. Otherwise return the existing type. + return type.props?.specialForm ? AnyType.create() : type; + } +} + +// References a single condition associated with a constrained TypeVar. +export interface TypeCondition { + typeVar: TypeVarType; + constraintIndex: number; +} + +export namespace TypeCondition { + export function combine( + conditions1: TypeCondition[] | undefined, + conditions2: TypeCondition[] | undefined + ): TypeCondition[] | undefined { + if (!conditions1) { + return conditions2; + } + + if (!conditions2) { + return conditions1; + } + + // Deduplicate the lists. + const combined = Array.from(conditions1); + conditions2.forEach((c1) => { + if (!combined.some((c2) => _compare(c1, c2) === 0)) { + combined.push(c1); + } + }); + + // Always keep the conditions sorted for easier comparison. + return combined.sort(_compare); + } + + function _compare(c1: TypeCondition, c2: TypeCondition) { + if (c1.typeVar.shared.name < c2.typeVar.shared.name) { + return -1; + } else if (c1.typeVar.shared.name > c2.typeVar.shared.name) { + return 1; + } + if (c1.constraintIndex < c2.constraintIndex) { + return -1; + } else if (c1.constraintIndex > c2.constraintIndex) { + return 1; + } + return 0; + } + + export function isSame( + conditions1: TypeCondition[] | undefined, + conditions2: TypeCondition[] | undefined + ): boolean { + if (!conditions1) { + return !conditions2; + } + + if (!conditions2 || conditions1.length !== conditions2.length) { + return false; + } + + return ( + conditions1.find( + (c1, index) => + c1.typeVar.priv.nameWithScope !== conditions2[index].typeVar.priv.nameWithScope || + c1.constraintIndex !== conditions2[index].constraintIndex + ) === undefined + ); + } + + // Determines if the two conditions can be used at the same time. If + // one constraint list contains a constraint for a type variable, and the + // same constraint is not in the other constraint list, the two are considered + // incompatible. + export function isCompatible( + conditions1: TypeCondition[] | undefined, + conditions2: TypeCondition[] | undefined + ): boolean { + if (!conditions1 || !conditions2) { + return true; + } + + for (const c1 of conditions1) { + let foundTypeVarMatch = false; + const exactMatch = conditions2.find((c2) => { + if (c1.typeVar.priv.nameWithScope === c2.typeVar.priv.nameWithScope) { + foundTypeVarMatch = true; + return c1.constraintIndex === c2.constraintIndex; + } + return false; + }); + + if (foundTypeVarMatch && !exactMatch) { + return false; + } + } + + return true; + } +} + +export interface LiteralTypes { + literalStrMap: Map | undefined; + literalIntMap: Map | undefined; + literalEnumMap: Map | undefined; +} + +export interface UnionDetailsPriv { + subtypes: UnionableType[]; + literalInstances: LiteralTypes; + literalClasses: LiteralTypes; + typeAliasSources: Set | undefined; + includesRecursiveTypeAlias: boolean; +} + +export interface UnionType extends TypeBase { + priv: UnionDetailsPriv; +} + +export namespace UnionType { + export function create() { + const newUnionType: UnionType = { + category: TypeCategory.Union, + flags: TypeFlags.Instance | TypeFlags.Instantiable, + props: undefined, + cached: undefined, + shared: undefined, + priv: { + subtypes: [], + literalInstances: { + literalStrMap: undefined, + literalIntMap: undefined, + literalEnumMap: undefined, + }, + literalClasses: { + literalStrMap: undefined, + literalIntMap: undefined, + literalEnumMap: undefined, + }, + typeAliasSources: undefined, + includesRecursiveTypeAlias: false, + }, + }; + + return newUnionType; + } + + export function addType(unionType: UnionType, newType: UnionableType) { + // If we're adding a string, integer or enum literal, add it to the + // corresponding literal map to speed up some operations. It's not + // uncommon for unions to contain hundreds of literals. + if (isClass(newType) && newType.priv.literalValue !== undefined && !newType.props?.condition) { + const literalMaps = isClassInstance(newType) + ? unionType.priv.literalInstances + : unionType.priv.literalClasses; + + if (ClassType.isBuiltIn(newType, 'str')) { + if (literalMaps.literalStrMap === undefined) { + literalMaps.literalStrMap = new Map(); + } + literalMaps.literalStrMap.set(newType.priv.literalValue as string, newType); + } else if (ClassType.isBuiltIn(newType, 'int')) { + if (literalMaps.literalIntMap === undefined) { + literalMaps.literalIntMap = new Map(); + } + literalMaps.literalIntMap.set(newType.priv.literalValue as number | bigint, newType); + } else if (ClassType.isEnumClass(newType)) { + if (literalMaps.literalEnumMap === undefined) { + literalMaps.literalEnumMap = new Map(); + } + const enumLiteral = newType.priv.literalValue as EnumLiteral; + literalMaps.literalEnumMap.set(enumLiteral.getName(), newType); + } + } + + unionType.flags &= newType.flags; + unionType.priv.subtypes.push(newType); + + if (isTypeVar(newType) && newType.shared.recursiveAlias?.name) { + // Note that at least one recursive type alias was included in + // this union. We'll need to expand it before the union is used. + unionType.priv.includesRecursiveTypeAlias = true; + } + } + + // Determines whether the union contains a specified subtype. If exclusionSet is passed, + // the method skips any subtype indexes that are in the set and adds a found index to + // the exclusion set. This speeds up union type comparisons. + export function containsType( + unionType: UnionType, + subtype: Type, + options: TypeSameOptions = {}, + exclusionSet?: Set, + recursionCount = 0 + ): boolean { + // Handle string literals as a special case because unions can sometimes + // contain hundreds of string literal types. + if (isClass(subtype) && subtype.props?.condition === undefined && subtype.priv.literalValue !== undefined) { + const literalMaps = isClassInstance(subtype) + ? unionType.priv.literalInstances + : unionType.priv.literalClasses; + + if (ClassType.isBuiltIn(subtype, 'str') && literalMaps.literalStrMap !== undefined) { + return literalMaps.literalStrMap.has(subtype.priv.literalValue as string); + } else if (ClassType.isBuiltIn(subtype, 'int') && literalMaps.literalIntMap !== undefined) { + return literalMaps.literalIntMap.has(subtype.priv.literalValue as number | bigint); + } else if (ClassType.isEnumClass(subtype) && literalMaps.literalEnumMap !== undefined) { + const enumLiteral = subtype.priv.literalValue as EnumLiteral; + return literalMaps.literalEnumMap.has(enumLiteral.getName()); + } + } + + const foundIndex = unionType.priv.subtypes.findIndex((t, i) => { + if (exclusionSet?.has(i)) { + return false; + } + + return isTypeSame(t, subtype, options, recursionCount); + }); + + if (foundIndex < 0) { + return false; + } + + exclusionSet?.add(foundIndex); + return true; + } + + export function addTypeAliasSource(unionType: UnionType, typeAliasSource: Type) { + if (typeAliasSource.category === TypeCategory.Union) { + const sourcesToAdd = typeAliasSource.props?.typeAliasInfo + ? [typeAliasSource] + : typeAliasSource.priv.typeAliasSources; + + if (sourcesToAdd) { + if (!unionType.priv.typeAliasSources) { + unionType.priv.typeAliasSources = new Set(); + } + + sourcesToAdd.forEach((source) => { + unionType.priv.typeAliasSources!.add(source); + }); + } + } + } +} + +export const enum Variance { + Auto, + Unknown, + Invariant, + Covariant, + Contravariant, +} + +export interface RecursiveAliasInfo { + // Used for recursive type aliases. + name: string; + scopeId: TypeVarScopeId; + isPep695Syntax: boolean; + + // Type parameters for a recursive type alias. + typeParams: TypeVarType[] | undefined; +} + +export enum TypeVarKind { + TypeVar, + TypeVarTuple, + ParamSpec, +} + +export interface TypeVarDetailsShared { + kind: TypeVarKind; + name: string; + constraints: Type[]; + boundType: Type | undefined; + isDefaultExplicit: boolean; + defaultType: Type; + + declaredVariance: Variance; + + // Internally created (e.g. for pseudo-generic classes) + isSynthesized: boolean; + isSynthesizedSelf: boolean; + synthesizedIndex: number | undefined; + isExemptFromBoundCheck: boolean; + + // Does this type variable originate from PEP 695 type parameter syntax? + isTypeParamSyntax: boolean; + + // Information about recursive type aliases. + recursiveAlias: TypeAliasSharedInfo | undefined; +} + +export type ParamSpecAccess = 'args' | 'kwargs'; + +export const enum TypeVarScopeType { + Class, + Function, + TypeAlias, +} + +export interface TypeVarDetailsPriv { + // An ID that uniquely identifies the scope to which this TypeVar is bound + scopeId?: TypeVarScopeId | undefined; + + // A human-readable name of the function, class, or type alias that + // provides the scope to which this type variable is bound. Unlike the + // scopeId, this might not be unique, so it should be used only for error + // messages. + scopeName?: string | undefined; + + // If the TypeVar is bound to a scope, this is the scope type + scopeType?: TypeVarScopeType; + + // String formatted as . + nameWithScope?: string | undefined; + + // May be different from declaredVariance if declared as Auto + computedVariance?: Variance; + + // When a TypeVar appears within an expected type during bidirectional + // type inference, it needs to be solved along with the in-scope TypeVars. + // This is done by cloning the TypeVar and making it a "unification" + // variable. + isUnificationVar?: boolean; + + // If the TypeVar is bound form of a TypeVar, this refers to + // the corresponding free TypeVar. + freeTypeVar?: TypeVarType | undefined; + + // Is this TypeVar or TypeVarTuple unpacked (i.e. Unpack or * operator applied)? + isUnpacked?: boolean | undefined; +} + +export interface TypeVarType extends TypeBase { + shared: TypeVarDetailsShared; + priv: TypeVarDetailsPriv; +} + +export interface ParamSpecDetailsPriv extends TypeVarDetailsPriv { + // Represents access to "args" or "kwargs" of a ParamSpec + paramSpecAccess?: ParamSpecAccess; + + freeTypeVar?: ParamSpecType | undefined; +} + +export interface ParamSpecType extends TypeVarType { + shared: TypeVarDetailsShared & { kind: TypeVarKind.ParamSpec }; + priv: ParamSpecDetailsPriv; +} + +export namespace ParamSpecType { + // Returns the "Unknown" equivalent for a ParamSpec. + export function getUnknown(): FunctionType { + const newFunction = FunctionType.createInstance( + '', + '', + '', + FunctionTypeFlags.ParamSpecValue | FunctionTypeFlags.GradualCallableForm + ); + FunctionType.addDefaultParams(newFunction); + return newFunction; + } +} + +export interface TypeVarTupleDetailsPriv extends TypeVarDetailsPriv { + // Is this TypeVarTuple included in a Union[]? This allows us to + // differentiate between Unpack[Vs] and Union[Unpack[Vs]]. + isInUnion?: boolean | undefined; + + freeTypeVar?: TypeVarTupleType | undefined; +} + +export interface TypeVarTupleType extends TypeVarType { + shared: TypeVarDetailsShared & { kind: TypeVarKind.TypeVarTuple }; + priv: TypeVarTupleDetailsPriv; +} + +export namespace TypeVarType { + export function createInstance(name: string, kind: TypeVarKind = TypeVarKind.TypeVar) { + return create(name, kind, TypeFlags.Instance); + } + + export function createInstantiable(name: string, kind: TypeVarKind = TypeVarKind.TypeVar) { + return create(name, kind, TypeFlags.Instantiable); + } + + export function cloneAsInstance(type: TypeVarType): TypeVarType { + assert(TypeBase.isInstantiable(type)); + + if (type.cached?.typeBaseInstanceType) { + return type.cached.typeBaseInstanceType as TypeVarType; + } + + const newInstance = TypeBase.cloneTypeAsInstance(type, /* cache */ true); + if (newInstance.props?.specialForm) { + TypeBase.setSpecialForm(newInstance, undefined); + } + + if (newInstance.priv.freeTypeVar) { + newInstance.priv.freeTypeVar = TypeVarType.cloneAsInstance(newInstance.priv.freeTypeVar); + } + + return newInstance; + } + + export function cloneAsInstantiable(type: TypeVarType): TypeVarType { + if (type.cached?.typeBaseInstantiableType) { + return type.cached.typeBaseInstantiableType as TypeVarType; + } + + const newInstance = TypeBase.cloneTypeAsInstantiable(type, /* cache */ true); + + if (newInstance.priv.freeTypeVar) { + newInstance.priv.freeTypeVar = TypeVarType.cloneAsInstantiable(newInstance.priv.freeTypeVar); + } + + return newInstance; + } + + export function cloneForNewName(type: TypeVarType, name: string): TypeVarType { + const newInstance = TypeBase.cloneType(type); + newInstance.shared = { ...type.shared }; + newInstance.shared.name = name; + + if (newInstance.priv.scopeId) { + newInstance.priv.nameWithScope = makeNameWithScope( + name, + newInstance.priv.scopeId, + newInstance.priv.scopeName ?? '' + ); + } + + return newInstance; + } + + export function cloneForScopeId( + type: TypeVarType, + scopeId: string, + scopeName: string | undefined, + scopeType: TypeVarScopeType | undefined + ): TypeVarType { + const newInstance = TypeBase.cloneType(type); + newInstance.priv.nameWithScope = makeNameWithScope(type.shared.name, scopeId, scopeName ?? ''); + newInstance.priv.scopeId = scopeId; + newInstance.priv.scopeName = scopeName; + newInstance.priv.scopeType = scopeType; + return newInstance; + } + + export function cloneForUnpacked(type: TypeVarType, isInUnion = false) { + const newInstance = TypeBase.cloneType(type); + newInstance.priv.isUnpacked = true; + + if (isTypeVarTuple(newInstance) && isInUnion) { + newInstance.priv.isInUnion = isInUnion; + } + + if (newInstance.priv.freeTypeVar) { + newInstance.priv.freeTypeVar = TypeVarType.cloneForUnpacked(newInstance.priv.freeTypeVar, isInUnion); + } + return newInstance; + } + + export function cloneForPacked(type: TypeVarType) { + const newInstance = TypeBase.cloneType(type); + newInstance.priv.isUnpacked = false; + + if (isTypeVarTuple(newInstance)) { + newInstance.priv.isInUnion = false; + } + + if (newInstance.priv.freeTypeVar) { + newInstance.priv.freeTypeVar = TypeVarType.cloneForPacked(newInstance.priv.freeTypeVar); + } + return newInstance; + } + + // Creates a "simplified" version of the TypeVar with invariance + // and no bound or constraints. ParamSpecs and TypeVarTuples are left + // unmodified. So are auto-variant type variables. + export function cloneAsInvariant(type: TypeVarType): TypeVarType { + if (isParamSpec(type) || isTypeVarTuple(type)) { + return type; + } + + if (type.shared.declaredVariance === Variance.Auto) { + return type; + } + + if (type.shared.declaredVariance === Variance.Invariant) { + if (!TypeVarType.hasBound(type) && !TypeVarType.hasConstraints(type)) { + return type; + } + } + + const newInstance = TypeBase.cloneType(type); + newInstance.shared = { ...newInstance.shared }; + newInstance.shared.declaredVariance = Variance.Invariant; + newInstance.shared.boundType = undefined; + newInstance.shared.constraints = []; + return newInstance; + } + + export function cloneForParamSpecAccess(type: ParamSpecType, access: ParamSpecAccess | undefined): ParamSpecType { + const newInstance = TypeBase.cloneType(type); + newInstance.priv.paramSpecAccess = access; + return newInstance; + } + + export function cloneAsSpecializedSelf(type: TypeVarType, specializedBoundType: Type): TypeVarType { + assert(TypeVarType.isSelf(type)); + const newInstance = TypeBase.cloneType(type); + newInstance.shared = { ...newInstance.shared }; + newInstance.shared.boundType = specializedBoundType; + return newInstance; + } + + export function cloneAsUnificationVar(type: TypeVarType, usageOffset?: number): TypeVarType { + if (TypeVarType.isUnification(type)) { + return type; + } + + // If the caller specified a usage offset, append it to the TypeVar + // internal name. This allows us to distinguish it from other uses + // of the same TypeVar. For example nested calls to a generic + // function like `foo(foo(1))`. + let newNameWithScope = type.priv.nameWithScope; + if (usageOffset) { + newNameWithScope = `${type.priv.nameWithScope}-${usageOffset}`; + } + + const newInstance = TypeBase.cloneType(type); + newInstance.priv.isUnificationVar = true; + newInstance.priv.scopeId = UnificationScopeId; + newInstance.priv.nameWithScope = newNameWithScope; + return newInstance; + } + + export function cloneWithComputedVariance(type: TypeVarType, computedVariance: Variance): TypeVarType { + const newInstance = TypeBase.cloneType(type); + newInstance.priv.computedVariance = computedVariance; + return newInstance; + } + + export function makeNameWithScope(name: string, scopeId: string, scopeName: string) { + // We include the scopeName here even though it's normally already part + // of the scopeId. There are cases where it can diverge, specifically + // in scenarios involving higher-order functions that return generic + // callable types. See adjustCallableReturnType for details. + return `${name}.${scopeId}.${scopeName}`; + } + + // When solving the TypeVars for a callable, we need to distinguish between + // the externally-visible "free" type vars and the internal "bound" type vars. + // The distinction is important for recursive calls (e.g. calling a constructor + // for a generic class within the class implementation). + export function makeBoundScopeId(scopeId: TypeVarScopeId): TypeVarScopeId; + export function makeBoundScopeId(scopeId: TypeVarScopeId | undefined): TypeVarScopeId | undefined; + export function makeBoundScopeId(scopeId: TypeVarScopeId | undefined): TypeVarScopeId | undefined { + if (!scopeId) { + return undefined; + } + + // Append an asterisk to denote a bound scope. + return `${scopeId}*`; + } + + export function cloneAsBound(type: TypeVarType): TypeVarType { + if (type.priv.scopeId === undefined || type.priv.freeTypeVar) { + return type; + } + + const clone = TypeVarType.cloneForScopeId( + type, + TypeVarType.makeBoundScopeId(type.priv.scopeId), + type.priv.scopeName, + type.priv.scopeType + ); + + clone.priv.freeTypeVar = type; + + return clone; + } + + // Indicates that the type var is a "free" or unbound type var. Free + // type variables can be solved whereas bound type vars are already bound + // to a value. + export function isBound(type: TypeVarType) { + // If the type var has an associated free type var, then it's + // considered bound. If it has no associated free var, then it's + // considered free. + return !!type.priv.freeTypeVar; + } + + export function isUnification(type: TypeVarType) { + return type.priv.isUnificationVar; + } + + function create(name: string, kind: TypeVarKind, typeFlags: TypeFlags): TypeVarType { + const newTypeVarType: TypeVarType = { + category: TypeCategory.TypeVar, + flags: typeFlags, + props: undefined, + cached: undefined, + shared: { + kind, + name, + constraints: [], + boundType: undefined, + isDefaultExplicit: false, + defaultType: UnknownType.create(), + declaredVariance: Variance.Invariant, + isSynthesized: false, + isSynthesizedSelf: false, + synthesizedIndex: undefined, + isExemptFromBoundCheck: false, + isTypeParamSyntax: false, + recursiveAlias: undefined, + }, + priv: {}, + }; + return newTypeVarType; + } + + export function addConstraint(type: TypeVarType, constraintType: Type) { + type.shared.constraints.push(constraintType); + } + + export function getNameWithScope(typeVarType: TypeVarType) { + // If there is no name with scope, fall back on the (unscoped) name. + return typeVarType.priv.nameWithScope || typeVarType.shared.name; + } + + export function getReadableName(type: TypeVarType, includeScope = true) { + if (type.priv.scopeName && includeScope) { + return `${type.shared.name}@${type.priv.scopeName}`; + } + + return type.shared.name; + } + + export function getVariance(type: TypeVarType) { + const variance = type.priv.computedVariance ?? type.shared.declaredVariance; + + // By this point, the variance should have been inferred. + assert(variance !== Variance.Auto, 'Expected variance to be inferred'); + + // If we're in the process of computing variance, it will still be + // unknown. Default to covariant in this case. + if (variance === Variance.Unknown) { + return Variance.Covariant; + } + + return variance; + } + + // Indicates whether the specified type is a recursive type alias + // placeholder that has not yet been resolved. + export function isTypeAliasPlaceholder(type: TypeVarType) { + return !!type.shared.recursiveAlias && !type.shared.boundType; + } + + export function isSelf(type: TypeVarType) { + return !!type.shared.isSynthesizedSelf; + } + + export function hasConstraints(type: TypeVarType) { + return type.shared.constraints.length > 0; + } + + export function hasBound(type: TypeVarType) { + return !!type.shared.boundType; + } +} + +export function isNever(type: Type): type is NeverType { + return type.category === TypeCategory.Never; +} + +export function isAny(type: Type): type is AnyType { + return type.category === TypeCategory.Any; +} + +export function isUnknown(type: Type): type is UnknownType { + return type.category === TypeCategory.Unknown; +} + +export function isAnyOrUnknown(type: Type): type is AnyType | UnknownType { + if (type.category === TypeCategory.Any || type.category === TypeCategory.Unknown) { + return true; + } + + if (isUnion(type)) { + return type.priv.subtypes.find((subtype) => !isAnyOrUnknown(subtype)) === undefined; + } + + return false; +} + +export function isUnbound(type: Type): type is UnboundType { + return type.category === TypeCategory.Unbound; +} + +export function isUnion(type: Type): type is UnionType { + return type.category === TypeCategory.Union; +} + +export function isPossiblyUnbound(type: Type): boolean { + if (isUnbound(type)) { + return true; + } + + if (isUnion(type)) { + return type.priv.subtypes.find((subtype) => isPossiblyUnbound(subtype)) !== undefined; + } + + return false; +} + +export function isClass(type: Type): type is ClassType { + return type.category === TypeCategory.Class; +} + +export function isInstantiableClass(type: Type): type is ClassType { + return type.category === TypeCategory.Class && TypeBase.isInstantiable(type); +} + +export function isClassInstance(type: Type): type is ClassType { + return type.category === TypeCategory.Class && TypeBase.isInstance(type); +} + +export function isModule(type: Type): type is ModuleType { + return type.category === TypeCategory.Module; +} + +export function isTypeVar(type: Type): type is TypeVarType { + return type.category === TypeCategory.TypeVar; +} + +export function isParamSpec(type: Type): type is ParamSpecType { + return type.category === TypeCategory.TypeVar && type.shared.kind === TypeVarKind.ParamSpec; +} + +export function isTypeVarTuple(type: Type): type is TypeVarTupleType { + return type.category === TypeCategory.TypeVar && type.shared.kind === TypeVarKind.TypeVarTuple; +} + +export function isUnpackedTypeVarTuple(type: Type): type is TypeVarTupleType { + return isTypeVarTuple(type) && !!type.priv.isUnpacked && !type.priv.isInUnion; +} + +export function isUnpackedTypeVar(type: Type): type is TypeVarTupleType { + return isTypeVar(type) && !isTypeVarTuple(type) && !!type.priv.isUnpacked; +} + +export function isUnpackedClass(type: Type): type is ClassType { + if (!isClass(type) || !type.priv.isUnpacked) { + return false; + } + + return true; +} + +export function isUnpacked(type: Type): boolean { + return isUnpackedTypeVarTuple(type) || isUnpackedTypeVar(type) || isUnpackedClass(type); +} + +export function isFunction(type: Type): type is FunctionType { + return type.category === TypeCategory.Function; +} + +export function isOverloaded(type: Type): type is OverloadedType { + return type.category === TypeCategory.Overloaded; +} + +export function isFunctionOrOverloaded(type: Type): type is FunctionType | OverloadedType { + return type.category === TypeCategory.Function || type.category === TypeCategory.Overloaded; +} + +export function isMethodType(type: FunctionType | OverloadedType): boolean { + let funcType: FunctionType | undefined; + + if (isFunction(type)) { + funcType = type; + } else { + if (type.priv._overloads.length === 0) { + return false; + } + funcType = type.priv._overloads[0]; + } + + // __new__ methods are never really bound at runtime. + if ( + funcType.priv.preBoundFlags !== undefined && + (funcType.priv.preBoundFlags & FunctionTypeFlags.ConstructorMethod) !== 0 + ) { + return false; + } + + // If the function type has a stripped first parameter type, it was + // bound to class or object and is therefore a MethodType rather + // a FunctionType. + return !!funcType.priv.strippedFirstParamType; +} + +export function getTypeAliasInfo(type: Type) { + if (type.props?.typeAliasInfo) { + return type.props.typeAliasInfo; + } + + if ( + isTypeVar(type) && + type.shared.recursiveAlias && + type.shared.boundType && + type.shared.boundType.props?.typeAliasInfo + ) { + return type.shared.boundType.props.typeAliasInfo; + } + + return undefined; +} + +// Determines whether two types are the same. If ignorePseudoGeneric is true, +// type arguments for "pseudo-generic" classes (non-generic classes whose init +// methods are not annotated and are therefore treated as generic) are ignored. +export function isTypeSame(type1: Type, type2: Type, options: TypeSameOptions = {}, recursionCount = 0): boolean { + if (type1 === type2) { + return true; + } + + if (type1.category !== type2.category) { + if (options.treatAnySameAsUnknown) { + if (type1.category === TypeCategory.Any && type2.category === TypeCategory.Unknown) { + return true; + } + if (type1.category === TypeCategory.Unknown && type2.category === TypeCategory.Any) { + return true; + } + } + + return false; + } + + if (!options.ignoreTypeFlags) { + if ((type1.flags & TypeFlags.TypeCompatibilityMask) !== (type2.flags & TypeFlags.TypeCompatibilityMask)) { + return false; + } + } + + if (recursionCount > maxTypeRecursionCount) { + return true; + } + recursionCount++; + + if (options.honorTypeForm) { + const typeForm1 = type1.props?.typeForm; + const typeForm2 = type2.props?.typeForm; + + if (typeForm1) { + if (!typeForm2) { + return false; + } + + if (!isTypeSame(typeForm1, typeForm2, options, recursionCount)) { + return false; + } + } else if (typeForm2) { + return false; + } + } + + switch (type1.category) { + case TypeCategory.Class: { + const classType2 = type2 as ClassType; + + // If the details are not the same it's not the same class. + if (!ClassType.isSameGenericClass(type1, classType2, recursionCount)) { + return false; + } + + if (!options.ignoreConditions && !TypeCondition.isSame(type1.props?.condition, type2.props?.condition)) { + return false; + } + + if (!options.ignorePseudoGeneric || !ClassType.isPseudoGenericClass(type1)) { + // Make sure the type args match. + if (type1.priv.tupleTypeArgs && classType2.priv.tupleTypeArgs) { + const type1TupleTypeArgs = type1.priv.tupleTypeArgs || []; + const type2TupleTypeArgs = classType2.priv.tupleTypeArgs || []; + if (type1TupleTypeArgs.length !== type2TupleTypeArgs.length) { + return false; + } + + for (let i = 0; i < type1TupleTypeArgs.length; i++) { + if ( + !isTypeSame( + type1TupleTypeArgs[i].type, + type2TupleTypeArgs[i].type, + { ...options, ignoreTypeFlags: false }, + recursionCount + ) + ) { + return false; + } + + if (type1TupleTypeArgs[i].isUnbounded !== type2TupleTypeArgs[i].isUnbounded) { + return false; + } + } + } else { + const type1TypeArgs = type1.priv.typeArgs || []; + const type2TypeArgs = classType2.priv.typeArgs || []; + const typeArgCount = Math.max(type1TypeArgs.length, type2TypeArgs.length); + + for (let i = 0; i < typeArgCount; i++) { + // Assume that missing type args are "Unknown". + const typeArg1 = i < type1TypeArgs.length ? type1TypeArgs[i] : UnknownType.create(); + const typeArg2 = i < type2TypeArgs.length ? type2TypeArgs[i] : UnknownType.create(); + + if (!isTypeSame(typeArg1, typeArg2, { ...options, ignoreTypeFlags: false }, recursionCount)) { + return false; + } + } + } + } + + if (!ClassType.isLiteralValueSame(type1, classType2)) { + return false; + } + + if (!type1.priv.isUnpacked !== !classType2.priv.isUnpacked) { + return false; + } + + if (!type1.priv.isTypedDictPartial !== !classType2.priv.isTypedDictPartial) { + return false; + } + + if (options.honorIsTypeArgExplicit) { + if (!!type1.priv.isTypeArgExplicit !== !!classType2.priv.isTypeArgExplicit) { + return false; + } + } + + if (!options.ignoreTypedDictNarrowEntries && !ClassType.isTypedDictNarrowedEntriesSame(type1, classType2)) { + return false; + } + + return true; + } + + case TypeCategory.Function: { + // Make sure the parameter counts match. + const functionType2 = type2 as FunctionType; + const params1 = type1.shared.parameters; + const params2 = functionType2.shared.parameters; + + if (params1.length !== params2.length) { + return false; + } + + // If one function is ... and the other is not, they are not the same. + if (FunctionType.isGradualCallableForm(type1) !== FunctionType.isGradualCallableForm(functionType2)) { + return false; + } + + const positionOnlyIndex1 = params1.findIndex((param) => isPositionOnlySeparator(param)); + const positionOnlyIndex2 = params2.findIndex((param) => isPositionOnlySeparator(param)); + + // Make sure the parameter details match. + for (let i = 0; i < params1.length; i++) { + const param1 = params1[i]; + const param2 = params2[i]; + + if (param1.category !== param2.category) { + return false; + } + + const isName1Relevant = positionOnlyIndex1 !== undefined && i > positionOnlyIndex1; + const isName2Relevant = positionOnlyIndex2 !== undefined && i > positionOnlyIndex2; + + if (isName1Relevant !== isName2Relevant) { + return false; + } + + if (isName1Relevant) { + if (param1.name !== param2.name) { + return false; + } + } else if (isPositionOnlySeparator(param1) && isPositionOnlySeparator(param2)) { + continue; + } else if (isKeywordOnlySeparator(param1) && isKeywordOnlySeparator(param2)) { + continue; + } + + const param1Type = FunctionType.getParamType(type1, i); + const param2Type = FunctionType.getParamType(functionType2, i); + if (!isTypeSame(param1Type, param2Type, { ...options, ignoreTypeFlags: false }, recursionCount)) { + return false; + } + } + + // Make sure the return types match. + let return1Type = type1.shared.declaredReturnType; + if (type1.priv.specializedTypes && type1.priv.specializedTypes.returnType) { + return1Type = type1.priv.specializedTypes.returnType; + } + if (!return1Type && type1.shared.inferredReturnType) { + return1Type = type1.shared.inferredReturnType?.type; + } + + let return2Type = functionType2.shared.declaredReturnType; + if (functionType2.priv.specializedTypes && functionType2.priv.specializedTypes.returnType) { + return2Type = functionType2.priv.specializedTypes.returnType; + } + if (!return2Type && functionType2.shared.inferredReturnType) { + return2Type = functionType2.shared.inferredReturnType?.type; + } + + if (return1Type || return2Type) { + if ( + !return1Type || + !return2Type || + !isTypeSame(return1Type, return2Type, { ...options, ignoreTypeFlags: false }, recursionCount) + ) { + return false; + } + } + + return true; + } + + case TypeCategory.Overloaded: { + // Make sure the overload counts match. + const functionType2 = type2 as OverloadedType; + if (type1.priv._overloads.length !== functionType2.priv._overloads.length) { + return false; + } + + // We assume here that overloaded functions always appear + // in the same order from one analysis pass to another. + for (let i = 0; i < type1.priv._overloads.length; i++) { + if (!isTypeSame(type1.priv._overloads[i], functionType2.priv._overloads[i], options, recursionCount)) { + return false; + } + } + + return true; + } + + case TypeCategory.Union: { + const unionType2 = type2 as UnionType; + const subtypes1 = type1.priv.subtypes; + const subtypes2 = unionType2.priv.subtypes; + + if (subtypes1.length !== subtypes2.length) { + return false; + } + + // The types do not have a particular order, so we need to + // do the comparison in an order-independent manner. + const exclusionSet = new Set(); + return ( + findSubtype( + type1, + (subtype) => !UnionType.containsType(unionType2, subtype, options, exclusionSet, recursionCount) + ) === undefined + ); + } + + case TypeCategory.TypeVar: { + const type2TypeVar = type2 as TypeVarType; + + if (type1.priv.scopeId !== type2TypeVar.priv.scopeId) { + return false; + } + + if (type1.priv.nameWithScope !== type2TypeVar.priv.nameWithScope) { + return false; + } + + // Handle the case where this is a generic recursive type alias. Make + // sure that the type argument types match. + if (type1.shared.recursiveAlias && type2TypeVar.shared.recursiveAlias) { + const type1TypeArgs = type1?.props?.typeAliasInfo?.typeArgs || []; + const type2TypeArgs = type2?.props?.typeAliasInfo?.typeArgs || []; + const typeArgCount = Math.max(type1TypeArgs.length, type2TypeArgs.length); + + for (let i = 0; i < typeArgCount; i++) { + // Assume that missing type args are "Any". + const typeArg1 = i < type1TypeArgs.length ? type1TypeArgs[i] : AnyType.create(); + const typeArg2 = i < type2TypeArgs.length ? type2TypeArgs[i] : AnyType.create(); + + if (!isTypeSame(typeArg1, typeArg2, { ...options, ignoreTypeFlags: false }, recursionCount)) { + return false; + } + } + } + + if (isTypeVarTuple(type1) && isTypeVarTuple(type2TypeVar)) { + if (!type1.priv.isInUnion !== !type2TypeVar.priv.isInUnion) { + return false; + } + } + + if (type1.shared === type2TypeVar.shared) { + return true; + } + + if (isParamSpec(type1) !== isParamSpec(type2TypeVar)) { + return false; + } + + if (isTypeVarTuple(type1) !== isTypeVarTuple(type2TypeVar)) { + return false; + } + + if ( + type1.shared.name !== type2TypeVar.shared.name || + type1.shared.isSynthesized !== type2TypeVar.shared.isSynthesized || + type1.shared.declaredVariance !== type2TypeVar.shared.declaredVariance || + type1.priv.scopeId !== type2TypeVar.priv.scopeId + ) { + return false; + } + + const boundType1 = type1.shared.boundType; + const boundType2 = type2TypeVar.shared.boundType; + if (boundType1) { + if ( + !boundType2 || + !isTypeSame(boundType1, boundType2, { ...options, ignoreTypeFlags: false }, recursionCount) + ) { + return false; + } + } else { + if (boundType2) { + return false; + } + } + + const constraints1 = type1.shared.constraints; + const constraints2 = type2TypeVar.shared.constraints; + if (constraints1.length !== constraints2.length) { + return false; + } + + for (let i = 0; i < constraints1.length; i++) { + if ( + !isTypeSame( + constraints1[i], + constraints2[i], + { ...options, ignoreTypeFlags: false }, + recursionCount + ) + ) { + return false; + } + } + + return true; + } + + case TypeCategory.Module: { + const type2Module = type2 as ModuleType; + + // Module types are the same if they share the same + // module symbol table. + if (type1.priv.fields === type2Module.priv.fields) { + return true; + } + + // If both symbol tables are empty, we can also assume + // they're equal. + if (type1.priv.fields.size === 0 && type2Module.priv.fields.size === 0) { + return true; + } + + return false; + } + + case TypeCategory.Unknown: { + const type2Unknown = type2 as UnknownType; + + return type1.priv.isIncomplete === type2Unknown.priv.isIncomplete; + } + } + + return true; +} + +// If the type is a union, remove an "unknown" type from the union, +// returning only the known types. +export function removeUnknownFromUnion(type: Type): Type { + return removeFromUnion(type, (t: Type) => isUnknown(t)); +} + +// If the type is a union, remove an "unbound" type from the union, +// returning only the known types. +export function removeUnbound(type: Type): Type { + if (isUnion(type)) { + return removeFromUnion(type, (t: Type) => isUnbound(t)); + } + + if (isUnbound(type)) { + return UnknownType.create(); + } + + return type; +} + +export function removeFromUnion(type: Type, removeFilter: (type: Type) => boolean) { + if (isUnion(type)) { + const remainingTypes = type.priv.subtypes.filter((t) => !removeFilter(t)); + if (remainingTypes.length < type.priv.subtypes.length) { + const newType = combineTypes(remainingTypes); + + if (isUnion(newType)) { + UnionType.addTypeAliasSource(newType, type); + } + + return newType; + } + } + + return type; +} + +export function findSubtype(type: Type, filter: (type: UnionableType | NeverType) => boolean) { + if (isUnion(type)) { + return type.priv.subtypes.find((subtype) => { + return filter(subtype); + }); + } + + return filter(type) ? type : undefined; +} + +export interface CombineTypesOptions { + // By default, literals are elided (removed) from a union if the non-literal + // subtype is present. Should this be skipped? + skipElideRedundantLiterals?: boolean; + + // If specified, the maximum number of subtypes that should be allowed + // in the union before it is converted to an "Any" type. + maxSubtypeCount?: number; +} + +// Combines multiple types into a single type. If the types are +// the same, only one is returned. If they differ, they +// are combined into a UnionType. NeverTypes are filtered out. +// If no types remain in the end, a NeverType is returned. +export function combineTypes(subtypes: Type[], options?: CombineTypesOptions): Type { + let neverTypes: NeverType[]; + + // Filter out any Never or NoReturn types. + [neverTypes, subtypes] = partition(subtypes, isNever); + + if (subtypes.length === 0) { + if (neverTypes.length > 0) { + // Prefer NoReturn over Never. This approach preserves type alias + // information if present. + return neverTypes.find((t) => t.priv.isNoReturn) ?? neverTypes[0]; + } + + return NeverType.createNever(); + } + + // Handle the common case where there is only one type. + // Also handle the common case where there are multiple copies of the same type. + let allSubtypesAreSame = true; + if (subtypes.length > 1) { + for (let index = 1; index < subtypes.length; index++) { + if (subtypes[index] !== subtypes[0]) { + allSubtypesAreSame = false; + break; + } + } + } + + if (allSubtypesAreSame) { + return subtypes[0]; + } + + // Expand all union types. + let expandedTypes: Type[] | undefined; + const typeAliasSources = new Set(); + + for (let i = 0; i < subtypes.length; i++) { + const subtype = subtypes[i]; + if (isUnion(subtype)) { + if (!expandedTypes) { + expandedTypes = subtypes.slice(0, i); + } + expandedTypes = expandedTypes.concat(subtype.priv.subtypes); + + if (subtype.props?.typeAliasInfo) { + typeAliasSources.add(subtype); + } else if (subtype.priv.typeAliasSources) { + subtype.priv.typeAliasSources.forEach((subtype) => { + typeAliasSources.add(subtype); + }); + } + } else if (expandedTypes) { + expandedTypes.push(subtype); + } + } + + expandedTypes = expandedTypes ?? subtypes; + + // Sort all of the literal and empty types to the end. + expandedTypes = expandedTypes.sort((type1, type2) => { + if (isClass(type1) && type1.priv.literalValue !== undefined) { + return 1; + } + + if (isClass(type2) && type2.priv.literalValue !== undefined) { + return -1; + } + + if (isClassInstance(type1) && type1.priv.isEmptyContainer) { + return 1; + } else if (isClassInstance(type2) && type2.priv.isEmptyContainer) { + return -1; + } + + return 0; + }); + + // If removing all NoReturn types results in no remaining types, + // convert it to an unknown. + if (expandedTypes.length === 0) { + return UnknownType.create(); + } + + const newUnionType = UnionType.create(); + if (typeAliasSources.size > 0) { + newUnionType.priv.typeAliasSources = typeAliasSources; + } + + let hitMaxSubtypeCount = false; + + expandedTypes.forEach((subtype, index) => { + if (index === 0) { + UnionType.addType(newUnionType, subtype as UnionableType); + } else { + if (options?.maxSubtypeCount === undefined || newUnionType.priv.subtypes.length < options.maxSubtypeCount) { + _addTypeIfUnique(newUnionType, subtype as UnionableType, !options?.skipElideRedundantLiterals); + } else { + hitMaxSubtypeCount = true; + } + } + }); + + if (hitMaxSubtypeCount) { + return AnyType.create(); + } + + // If only one type remains, convert it from a union to a simple type. + if (newUnionType.priv.subtypes.length === 1) { + return newUnionType.priv.subtypes[0]; + } + + return newUnionType; +} + +// Determines whether the dest type is the same as the source type with +// the possible exception that the source type has a literal value when +// the dest does not. +export function isSameWithoutLiteralValue(destType: Type, srcType: Type): boolean { + // If it's the same with literals, great. + if (isTypeSame(destType, srcType)) { + return true; + } + + if (isInstantiableClass(srcType) && srcType.priv.literalValue !== undefined) { + // Strip the literal. + srcType = ClassType.cloneWithLiteral(srcType, /* value */ undefined); + return isTypeSame(destType, srcType); + } + + if (isClassInstance(srcType) && srcType.priv.literalValue !== undefined) { + // Strip the literal. + srcType = ClassType.cloneWithLiteral(srcType, /* value */ undefined); + return isTypeSame(destType, srcType, { ignoreConditions: true }); + } + + return false; +} + +function _addTypeIfUnique(unionType: UnionType, typeToAdd: UnionableType, elideRedundantLiterals: boolean) { + // Handle the addition of a string literal in a special manner to + // avoid n^2 behavior in unions that contain hundreds of string + // literal types. Skip this for constrained types. + if (isClass(typeToAdd) && !typeToAdd.props?.condition) { + const literalMaps = isClassInstance(typeToAdd) + ? unionType.priv.literalInstances + : unionType.priv.literalClasses; + + if ( + ClassType.isBuiltIn(typeToAdd, 'str') && + typeToAdd.priv.literalValue !== undefined && + literalMaps.literalStrMap !== undefined + ) { + if (!literalMaps.literalStrMap.has(typeToAdd.priv.literalValue as string)) { + UnionType.addType(unionType, typeToAdd); + } + return; + } else if ( + ClassType.isBuiltIn(typeToAdd, 'int') && + typeToAdd.priv.literalValue !== undefined && + literalMaps.literalIntMap !== undefined + ) { + if (!literalMaps.literalIntMap.has(typeToAdd.priv.literalValue as number | bigint)) { + UnionType.addType(unionType, typeToAdd); + } + return; + } else if ( + ClassType.isEnumClass(typeToAdd) && + typeToAdd.priv.literalValue !== undefined && + literalMaps.literalEnumMap !== undefined + ) { + const enumLiteral = typeToAdd.priv.literalValue as EnumLiteral; + if (!literalMaps.literalEnumMap.has(enumLiteral.getName())) { + UnionType.addType(unionType, typeToAdd); + } + return; + } + } + + const isPseudoGeneric = isClass(typeToAdd) && ClassType.isPseudoGenericClass(typeToAdd); + + for (let i = 0; i < unionType.priv.subtypes.length; i++) { + const type = unionType.priv.subtypes[i]; + + // Does this type already exist in the types array? + if (isTypeSame(type, typeToAdd, { honorTypeForm: true })) { + return; + } + + // Handle the case where pseudo-generic classes with different + // type arguments are being combined. Rather than add multiple + // specialized types, we will replace them with a single specialized + // type that is specialized with Unknowns. This is important because + // we can hit recursive cases (where a pseudo-generic class is + // parameterized with its own class) ad infinitum. + if (isPseudoGeneric) { + if (isTypeSame(type, typeToAdd, { ignorePseudoGeneric: true, honorTypeForm: true })) { + unionType.priv.subtypes[i] = ClassType.specialize( + typeToAdd, + typeToAdd.shared.typeParams.map(() => UnknownType.create()) + ); + return; + } + } + + if (isClassInstance(type) && isClassInstance(typeToAdd)) { + // If the typeToAdd is a literal value and there's already + // a non-literal type that matches, don't add the literal value. + if (elideRedundantLiterals && isSameWithoutLiteralValue(type, typeToAdd)) { + if (type.priv.literalValue === undefined) { + return; + } + } + + // If we're adding Literal[False] or Literal[True] to its + // opposite, combine them into a non-literal 'bool' type. + if ( + ClassType.isBuiltIn(type, 'bool') && + !type.props?.condition && + ClassType.isBuiltIn(typeToAdd, 'bool') && + !typeToAdd.props?.condition + ) { + if ( + typeToAdd.priv.literalValue !== undefined && + !typeToAdd.priv.literalValue === type.priv.literalValue + ) { + unionType.priv.subtypes[i] = ClassType.cloneWithLiteral(type, /* value */ undefined); + return; + } + } + + // If the typeToAdd is a TypedDict that is the same class as the + // existing type, see if one of them is a proper subset of the other. + if (ClassType.isTypedDictClass(type) && ClassType.isSameGenericClass(type, typeToAdd)) { + // Do not proceed if the TypedDicts are generic and have different type arguments. + if (!type.priv.typeArgs && !typeToAdd.priv.typeArgs) { + if (ClassType.isTypedDictNarrower(typeToAdd, type)) { + return; + } else if (ClassType.isTypedDictNarrower(type, typeToAdd)) { + unionType.priv.subtypes[i] = typeToAdd; + return; + } + } + } + } + + // If the typeToAdd is an empty container and there's already + // non-empty container of the same type, don't add the empty container. + if (isClassInstance(typeToAdd) && typeToAdd.priv.isEmptyContainer) { + if (isClassInstance(type) && ClassType.isSameGenericClass(type, typeToAdd)) { + return; + } + } + } + + UnionType.addType(unionType, typeToAdd); +} diff --git a/python-parser/packages/pyright-internal/src/backgroundAnalysis.ts b/python-parser/packages/pyright-internal/src/backgroundAnalysis.ts new file mode 100644 index 00000000..d0823014 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/backgroundAnalysis.ts @@ -0,0 +1,65 @@ +/* + * backgroundAnalysis.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * run analyzer from background thread + */ + +import { Worker } from 'worker_threads'; + +import { ImportResolver } from './analyzer/importResolver'; +import { BackgroundAnalysisBase, BackgroundAnalysisRunnerBase } from './backgroundAnalysisBase'; +import { InitializationData } from './backgroundThreadBase'; +import { getCancellationFolderName } from './common/cancellationUtils'; +import { ConfigOptions } from './common/configOptions'; +import { FullAccessHost } from './common/fullAccessHost'; +import { Host } from './common/host'; +import { ServiceProvider } from './common/serviceProvider'; +import { getRootUri } from './common/uri/uriUtils'; +import { ServiceKeys } from './common/serviceKeys'; +import { Uri } from './common/uri/uri'; + +export class BackgroundAnalysis extends BackgroundAnalysisBase { + private static _workerIndex = 0; + + constructor(workspaceRoot: Uri, serviceProvider: ServiceProvider) { + super(serviceProvider.console()); + + const index = ++BackgroundAnalysis._workerIndex; + const initialData: InitializationData = { + rootUri: getRootUri(serviceProvider)?.toString() ?? '', + workspaceRootUri: workspaceRoot.toString(), + tempFileName: serviceProvider.get(ServiceKeys.tempFile).tmpdir().getFilePath(), + serviceId: index.toString(), + cancellationFolderName: getCancellationFolderName(), + runner: undefined, + workerIndex: index, + }; + + // this will load this same file in BG thread and start listener + const worker = new Worker(__filename, { workerData: initialData }); + this.setup(worker); + + // Tell the cacheManager we have a worker that needs to share data. + serviceProvider.cacheManager()?.addWorker(initialData.workerIndex, worker); + } +} + +export class BackgroundAnalysisRunner extends BackgroundAnalysisRunnerBase { + constructor(serviceProvider: ServiceProvider) { + super(serviceProvider); + } + + protected override createHost(): Host { + return new FullAccessHost(this.getServiceProvider()); + } + + protected override createImportResolver( + serviceProvider: ServiceProvider, + options: ConfigOptions, + host: Host + ): ImportResolver { + return new ImportResolver(serviceProvider, options, host); + } +} diff --git a/python-parser/packages/pyright-internal/src/backgroundAnalysisBase.ts b/python-parser/packages/pyright-internal/src/backgroundAnalysisBase.ts new file mode 100644 index 00000000..2773104a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/backgroundAnalysisBase.ts @@ -0,0 +1,898 @@ +/* + * backgroundAnalysisBase.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * run analyzer from background thread + */ + +import { CancellationToken, Disposable } from 'vscode-languageserver'; +import { MessageChannel, MessagePort, Worker, parentPort, threadId, workerData } from 'worker_threads'; + +import { + AnalysisCompleteCallback, + AnalysisResults, + RequiringAnalysisCount, + analyzeProgram, + nullCallback, +} from './analyzer/analysis'; +import { InvalidatedReason } from './analyzer/backgroundAnalysisProgram'; +import { ImportResolver } from './analyzer/importResolver'; +import { OpenFileOptions, Program } from './analyzer/program'; +import { + BackgroundThreadBase, + InitializationData, + LogData, + deserialize, + getBackgroundWaiter, + run, + serialize, +} from './backgroundThreadBase'; +import { + OperationCanceledException, + getCancellationTokenId, + throwIfCancellationRequested, +} from './common/cancellationUtils'; +import { ConfigOptions } from './common/configOptions'; +import { ConsoleInterface, LogLevel, log } from './common/console'; +import * as debug from './common/debug'; +import { Diagnostic } from './common/diagnostic'; +import { FileDiagnostics } from './common/diagnosticSink'; +import { disposeCancellationToken, getCancellationTokenFromId } from './common/fileBasedCancellationUtils'; +import { Host, HostKind } from './common/host'; +import { LogTracker } from './common/logTracker'; +import { ServiceProvider } from './common/serviceProvider'; +import { Range } from './common/textRange'; +import { Uri } from './common/uri/uri'; +import { UriMap } from './common/uri/uriMap'; +import { ProgramView } from './common/extensibility'; + +export interface IBackgroundAnalysis extends Disposable { + setProgramView(program: Program): void; + setCompletionCallback(callback?: AnalysisCompleteCallback): void; + setImportResolver(importResolver: ImportResolver): void; + setConfigOptions(configOptions: ConfigOptions): void; + setTrackedFiles(fileUris: Uri[]): void; + setAllowedThirdPartyImports(importNames: string[]): void; + ensurePartialStubPackages(executionRoot: string | undefined): void; + setFileOpened(fileUri: Uri, version: number | null, contents: string, options: OpenFileOptions): void; + updateChainedUri(fileUri: Uri, chainedUri: Uri | undefined): void; + setFileClosed(fileUri: Uri, isTracked?: boolean): void; + addInterimFile(fileUri: Uri): void; + markAllFilesDirty(evenIfContentsAreSame: boolean): void; + markFilesDirty(fileUris: Uri[], evenIfContentsAreSame: boolean): void; + startAnalysis(token: CancellationToken): void; + analyzeFile(fileUri: Uri, token: CancellationToken): Promise; + analyzeFileAndGetDiagnostics(fileUri: Uri, token: CancellationToken): Promise; + getDiagnosticsForRange(fileUri: Uri, range: Range, token: CancellationToken): Promise; + writeTypeStub( + targetImportPath: Uri, + targetIsSingleFile: boolean, + stubPath: Uri, + token: CancellationToken + ): Promise; + invalidateAndForceReanalysis(reason: InvalidatedReason): void; + restart(): void; + shutdown(): void; +} + +export class BackgroundAnalysisBase implements IBackgroundAnalysis { + // This map tracks pending analysis requests and their associated cancellation tokens. + // When analysis is completed or cancelled, the token will be disposed. + private readonly _analysisCancellationMap = new Map(); + + private _worker: Worker | undefined; + private _onAnalysisCompletion: AnalysisCompleteCallback = nullCallback; + private _messageChannel: MessageChannel; + + protected program: ProgramView | undefined; + + protected constructor(protected console: ConsoleInterface) { + // Don't allow instantiation of this type directly. + + // Create a message channel for handling 'analysis' or 'background' type results. + // The other side of this channel will be sent to the BG thread for sending responses. + this._messageChannel = new MessageChannel(); + this._messageChannel.port1.on('message', (msg: BackgroundResponse) => this.handleBackgroundResponse(msg)); + } + + dispose() { + if (this._messageChannel) { + this._messageChannel.port1.close(); + this._messageChannel.port2.close(); + } + if (this._worker) { + this._worker.terminate(); + } + } + + setProgramView(programView: Program) { + this.program = programView; + } + + setCompletionCallback(callback?: AnalysisCompleteCallback) { + this._onAnalysisCompletion = callback ?? nullCallback; + } + + setImportResolver(importResolver: ImportResolver) { + this.enqueueRequest({ requestType: 'setImportResolver', data: serialize(importResolver.host.kind) }); + } + + setConfigOptions(configOptions: ConfigOptions) { + this.enqueueRequest({ requestType: 'setConfigOptions', data: serialize(configOptions) }); + } + + setTrackedFiles(fileUris: Uri[]) { + this.enqueueRequest({ requestType: 'setTrackedFiles', data: serialize(fileUris) }); + } + + setAllowedThirdPartyImports(importNames: string[]) { + this.enqueueRequest({ requestType: 'setAllowedThirdPartyImports', data: serialize(importNames) }); + } + + ensurePartialStubPackages(executionRoot: string | undefined) { + this.enqueueRequest({ requestType: 'ensurePartialStubPackages', data: serialize({ executionRoot }) }); + } + + setFileOpened(fileUri: Uri, version: number | null, contents: string, options: OpenFileOptions) { + this.enqueueRequest({ + requestType: 'setFileOpened', + data: serialize({ fileUri, version, contents, options }), + }); + } + + updateChainedUri(fileUri: Uri, chainedUri: Uri | undefined) { + this.enqueueRequest({ + requestType: 'updateChainedFileUri', + data: serialize({ fileUri, chainedUri }), + }); + } + + setFileClosed(fileUri: Uri, isTracked?: boolean) { + this.enqueueRequest({ requestType: 'setFileClosed', data: serialize({ fileUri, isTracked }) }); + } + + addInterimFile(fileUri: Uri) { + this.enqueueRequest({ requestType: 'addInterimFile', data: serialize({ fileUri }) }); + } + + markAllFilesDirty(evenIfContentsAreSame: boolean) { + this.enqueueRequest({ requestType: 'markAllFilesDirty', data: serialize({ evenIfContentsAreSame }) }); + } + + markFilesDirty(fileUris: Uri[], evenIfContentsAreSame: boolean) { + this.enqueueRequest({ + requestType: 'markFilesDirty', + data: serialize({ fileUris, evenIfContentsAreSame }), + }); + } + + startAnalysis(token: CancellationToken) { + const tokenId = getCancellationTokenId(token); + if (tokenId) { + this._analysisCancellationMap.set(tokenId, token); + } + + this.enqueueRequest({ + requestType: 'analyze', + data: serialize(token), + }); + } + + async analyzeFile(fileUri: Uri, token: CancellationToken): Promise { + throwIfCancellationRequested(token); + + const { port1, port2 } = new MessageChannel(); + const waiter = getBackgroundWaiter(port1); + + const cancellationId = getCancellationTokenId(token); + this.enqueueRequest({ + requestType: 'analyzeFile', + data: serialize({ fileUri, cancellationId }), + port: port2, + }); + + const result = await waiter; + + port2.close(); + port1.close(); + + return result; + } + + async analyzeFileAndGetDiagnostics(fileUri: Uri, token: CancellationToken): Promise { + throwIfCancellationRequested(token); + + const { port1, port2 } = new MessageChannel(); + const waiter = getBackgroundWaiter(port1); + + const cancellationId = getCancellationTokenId(token); + this.enqueueRequest({ + requestType: 'analyzeFileAndGetDiagnostics', + data: serialize({ fileUri, cancellationId }), + port: port2, + }); + + const result = await waiter; + + port2.close(); + port1.close(); + + return convertDiagnostics(result); + } + + async getDiagnosticsForRange(fileUri: Uri, range: Range, token: CancellationToken): Promise { + throwIfCancellationRequested(token); + + const { port1, port2 } = new MessageChannel(); + const waiter = getBackgroundWaiter(port1); + + const cancellationId = getCancellationTokenId(token); + this.enqueueRequest({ + requestType: 'getDiagnosticsForRange', + data: serialize({ fileUri, range, cancellationId }), + port: port2, + }); + + const result = await waiter; + + port2.close(); + port1.close(); + + return convertDiagnostics(result); + } + + async writeTypeStub( + targetImportPath: Uri, + targetIsSingleFile: boolean, + stubPath: Uri, + token: CancellationToken + ): Promise { + throwIfCancellationRequested(token); + + const { port1, port2 } = new MessageChannel(); + const waiter = getBackgroundWaiter(port1); + + const cancellationId = getCancellationTokenId(token); + this.enqueueRequest({ + requestType: 'writeTypeStub', + data: serialize({ + targetImportPath, + targetIsSingleFile, + stubPath, + cancellationId, + }), + port: port2, + }); + + await waiter; + + port2.close(); + port1.close(); + } + + invalidateAndForceReanalysis(reason: InvalidatedReason) { + this.enqueueRequest({ requestType: 'invalidateAndForceReanalysis', data: serialize({ reason }) }); + } + + restart() { + this.enqueueRequest({ requestType: 'restart', data: null }); + } + + shutdown(): void { + if (this._worker) { + this.enqueueRequest({ requestType: 'shutdown', data: null }); + } + } + + protected setup(worker: Worker) { + this._worker = worker; + + // global channel to communicate from BG channel to main thread. + worker.on('message', (msg: BackgroundResponse) => this.onMessage(msg)); + + // this will catch any exception thrown from background thread, + // print log and ignore exception + worker.on('error', (msg) => { + this.log(LogLevel.Error, `Error occurred on background thread: ${JSON.stringify(msg)}`); + }); + + worker.on('exit', (code) => { + this.log(LogLevel.Log, `Background thread exited with code: ${code}`); + }); + + // Send the port to the other side for use in sending responses. It can only be sent once cause after it's transferred + // it's not usable anymore. + this.enqueueRequest({ requestType: 'start', data: '', port: this._messageChannel.port2 }); + } + + protected onMessage(msg: BackgroundResponse) { + switch (msg.requestType) { + case 'log': { + const logData = deserialize(msg.data); + this.log(logData.level, logData.message); + break; + } + + case 'analysisResult': { + // Change in diagnostics due to host such as file closed rather than + // analyzing files. + this._onAnalysisCompletion(convertAnalysisResults(deserialize(msg.data))); + break; + } + + default: + debug.fail(`${msg.requestType} is not expected. Message structure: ${JSON.stringify(msg)}`); + } + } + + protected enqueueRequest(request: BackgroundRequest) { + if (this._worker) { + this._worker.postMessage(request, request.port ? [request.port] : undefined); + } + } + + protected log(level: LogLevel, msg: string) { + log(this.console, level, msg); + } + + protected handleBackgroundResponse(msg: BackgroundResponse) { + switch (msg.requestType) { + case 'analysisResult': { + this._onAnalysisCompletion(convertAnalysisResults(deserialize(msg.data))); + break; + } + + case 'analysisPaused': { + // Analysis request has completed, but there is more to + // analyze, so queue another message to resume later. + this.enqueueRequest({ + requestType: 'resumeAnalysis', + data: serialize(msg.data), + }); + break; + } + + case 'analysisDone': { + if (!msg.data) { + break; + } + + const token = this._analysisCancellationMap.get(msg.data); + this._analysisCancellationMap.delete(msg.data); + + if (!token) { + break; + } + + disposeCancellationToken(token); + break; + } + + default: + debug.fail(`${msg.requestType} is not expected. Message structure: ${JSON.stringify(msg)}`); + } + } +} + +export abstract class BackgroundAnalysisRunnerBase extends BackgroundThreadBase { + private _configOptions: ConfigOptions; + private _program: Program; + private _responsePort: MessagePort | undefined; + protected importResolver: ImportResolver; + protected logTracker: LogTracker; + protected isCaseSensitive = true; + + protected constructor(protected serviceProvider: ServiceProvider) { + super(workerData as InitializationData, serviceProvider); + + // Stash the base directory into a global variable. + const data = workerData as InitializationData; + this.log(LogLevel.Info, `Background analysis(${threadId}) root directory: ${data.rootUri}`); + this._configOptions = new ConfigOptions(Uri.parse(data.rootUri, serviceProvider)); + this.importResolver = this.createImportResolver(serviceProvider, this._configOptions, this.createHost()); + + const console = this.getConsole(); + this.logTracker = new LogTracker(console, `BG(${threadId})`); + + this._program = new Program( + this.importResolver, + this._configOptions, + serviceProvider, + this.logTracker, + undefined, + data.serviceId + ); + } + + get program(): Program { + return this._program; + } + + get responsePort(): MessagePort { + debug.assert(this._responsePort !== undefined, 'BG thread was not started properly. No response port'); + return this._responsePort!; + } + + start() { + this.log(LogLevel.Info, `Background analysis(${threadId}) started`); + + // Get requests from main thread. + parentPort?.on('message', this._onMessageWrapper.bind(this)); + parentPort?.on('error', (msg) => debug.fail(`failed ${msg}`)); + parentPort?.on('exit', (c) => { + if (c !== 0) { + debug.fail(`worker stopped with exit code ${c}`); + } + }); + } + + protected onMessage(msg: BackgroundRequest) { + switch (msg.requestType) { + case 'start': { + // Take ownership of the port for sending responses. This should + // have been provided in the 'start' message. + this._responsePort = msg.port!; + break; + } + case 'cacheUsageBuffer': { + this.serviceProvider.cacheManager()?.handleCachedUsageBufferMessage(msg); + break; + } + + case 'analyze': { + const token = deserialize(msg.data); + this.handleAnalyze(this.responsePort, token); + break; + } + + case 'resumeAnalysis': { + const token = getCancellationTokenFromId(deserialize(msg.data)); + this.handleResumeAnalysis(this.responsePort, token); + break; + } + + case 'analyzeFile': { + run(() => { + const { fileUri, cancellationId } = deserialize(msg.data); + const token = getCancellationTokenFromId(cancellationId); + + return this.handleAnalyzeFile(fileUri, token); + }, msg.port!); + break; + } + + case 'analyzeFileAndGetDiagnostics': { + run(() => { + const { fileUri, cancellationId } = deserialize(msg.data); + const token = getCancellationTokenFromId(cancellationId); + + return this.handleAnalyzeFileAndGetDiagnostics(fileUri, token); + }, msg.port!); + break; + } + + case 'getDiagnosticsForRange': { + run(() => { + const { fileUri, range, cancellationId } = deserialize(msg.data); + const token = getCancellationTokenFromId(cancellationId); + + return this.handleGetDiagnosticsForRange(fileUri, range, token); + }, msg.port!); + break; + } + + case 'writeTypeStub': { + run(() => { + const { targetImportPath, targetIsSingleFile, stubPath, cancellationId } = deserialize(msg.data); + const token = getCancellationTokenFromId(cancellationId); + + this.handleWriteTypeStub(targetImportPath, targetIsSingleFile, stubPath, token); + }, msg.port!); + break; + } + + case 'setImportResolver': { + this.handleSetImportResolver(deserialize(msg.data)); + break; + } + + case 'setConfigOptions': { + this.handleSetConfigOptions(deserialize(msg.data)); + break; + } + + case 'setTrackedFiles': { + this.handleSetTrackedFiles(deserialize(msg.data)); + break; + } + + case 'setAllowedThirdPartyImports': { + this.handleSetAllowedThirdPartyImports(deserialize(msg.data)); + break; + } + + case 'ensurePartialStubPackages': { + const { executionRoot } = deserialize(msg.data); + this.handleEnsurePartialStubPackages(executionRoot); + break; + } + + case 'setFileOpened': { + const { fileUri, version, contents, options } = deserialize(msg.data); + this.handleSetFileOpened(fileUri, version, contents, options); + break; + } + + case 'updateChainedFileUri': { + const { fileUri, chainedUri } = deserialize(msg.data); + this.handleUpdateChainedFileUri(fileUri, chainedUri); + break; + } + + case 'setFileClosed': { + const { fileUri, isTracked } = deserialize(msg.data); + this.handleSetFileClosed(fileUri, isTracked); + break; + } + + case 'addInterimFile': { + const { fileUri } = deserialize(msg.data); + this.handleAddInterimFile(fileUri); + break; + } + + case 'markAllFilesDirty': { + const { evenIfContentsAreSame } = deserialize(msg.data); + this.handleMarkAllFilesDirty(evenIfContentsAreSame); + break; + } + + case 'markFilesDirty': { + const { fileUris, evenIfContentsAreSame } = deserialize(msg.data); + this.handleMarkFilesDirty(fileUris, evenIfContentsAreSame); + break; + } + + case 'invalidateAndForceReanalysis': { + const { reason } = deserialize(msg.data); + this.handleInvalidateAndForceReanalysis(reason); + break; + } + + case 'restart': { + // recycle import resolver + this.handleRestart(); + break; + } + + case 'shutdown': { + this.handleShutdown(); + break; + } + + default: { + debug.fail(`${msg.requestType} is not expected. Message structure: ${JSON.stringify(msg)}`); + } + } + } + + protected abstract createHost(): Host; + + protected abstract createImportResolver( + serviceProvider: ServiceProvider, + options: ConfigOptions, + host: Host + ): ImportResolver; + + protected handleAnalyze(port: MessagePort, token: CancellationToken) { + // Report files to analyze first. + const requiringAnalysisCount = this.program.getFilesToAnalyzeCount(); + + this.onAnalysisCompletion(port, { + diagnostics: [], + filesInProgram: this.program.getFileCount(), + requiringAnalysisCount: requiringAnalysisCount, + checkingOnlyOpenFiles: this.program.isCheckingOnlyOpenFiles(), + fatalErrorOccurred: false, + configParseErrorOccurred: false, + elapsedTime: 0, + reason: 'analysis', + }); + + this.handleResumeAnalysis(port, token); + } + + protected handleResumeAnalysis(port: MessagePort, token: CancellationToken) { + // Report results at the interval of the max analysis time. + const maxTime = { openFilesTimeInMs: 50, noOpenFilesTimeInMs: 200 }; + const moreToAnalyze = analyzeProgram( + this.program, + maxTime, + this._configOptions, + (result) => this.onAnalysisCompletion(port, result), + this.getConsole(), + token + ); + + if (moreToAnalyze) { + // There's more to analyze after we exceeded max time, + // so report that we are paused. The foreground thread will + // then queue up a message to resume the analysis. + this._analysisPaused(port, token); + } else { + this.analysisDone(port, token); + } + } + + protected handleAnalyzeFile(fileUri: Uri, token: CancellationToken) { + throwIfCancellationRequested(token); + return this.program.analyzeFile(fileUri, token); + } + + protected handleAnalyzeFileAndGetDiagnostics(fileUri: Uri, token: CancellationToken) { + return this.program.analyzeFileAndGetDiagnostics(fileUri, token); + } + + protected handleGetDiagnosticsForRange(fileUri: Uri, range: Range, token: CancellationToken) { + throwIfCancellationRequested(token); + return this.program.getDiagnosticsForRange(fileUri, range); + } + + protected handleWriteTypeStub( + targetImportPath: Uri, + targetIsSingleFile: boolean, + stubPath: Uri, + token: CancellationToken + ) { + analyzeProgram( + this.program, + /* maxTime */ undefined, + this._configOptions, + nullCallback, + this.getConsole(), + token + ); + + this.program.writeTypeStub(targetImportPath, targetIsSingleFile, stubPath, token); + } + + protected handleSetImportResolver(hostKind: HostKind) { + this.importResolver = this.createImportResolver( + this.getServiceProvider(), + this._configOptions, + this.createHost() + ); + this.program.setImportResolver(this.importResolver); + } + + protected handleSetConfigOptions(configOptions: ConfigOptions) { + this._configOptions = configOptions; + + this.importResolver = this.createImportResolver( + this.getServiceProvider(), + this._configOptions, + this.importResolver.host + ); + this.program.setConfigOptions(this._configOptions); + this.program.setImportResolver(this.importResolver); + } + + protected handleSetTrackedFiles(fileUris: Uri[]) { + const diagnostics = this.program.setTrackedFiles(fileUris); + this._reportDiagnostics(diagnostics, this.program.getFilesToAnalyzeCount(), 0); + } + + protected handleSetAllowedThirdPartyImports(importNames: string[]) { + this.program.setAllowedThirdPartyImports(importNames); + } + + protected handleEnsurePartialStubPackages(executionRoot: string | undefined) { + const execEnv = this._configOptions + .getExecutionEnvironments() + .find((e) => e.root?.toString() === executionRoot); + if (execEnv) { + this.importResolver.ensurePartialStubPackages(execEnv); + } + } + + protected handleSetFileOpened( + fileUri: Uri, + version: number | null, + contents: string, + options: OpenFileOptions | undefined + ) { + this.program.setFileOpened( + fileUri, + version, + contents, + options + ? { + ...options, + chainedFileUri: Uri.fromJsonObj(options?.chainedFileUri), + } + : undefined + ); + } + + protected handleUpdateChainedFileUri(fileUri: Uri, chainedFileUri: Uri | undefined) { + this.program.updateChainedUri(fileUri, chainedFileUri); + } + + protected handleSetFileClosed(fileUri: Uri, isTracked: boolean | undefined) { + const diagnostics = this.program.setFileClosed(fileUri, isTracked); + this._reportDiagnostics(diagnostics, this.program.getFilesToAnalyzeCount(), 0); + } + + protected handleAddInterimFile(fileUri: Uri) { + this.program.addInterimFile(fileUri); + } + + protected handleMarkFilesDirty(fileUris: Uri[], evenIfContentsAreSame: boolean) { + this.program.markFilesDirty(fileUris, evenIfContentsAreSame); + } + + protected handleMarkAllFilesDirty(evenIfContentsAreSame: boolean) { + this.program.markAllFilesDirty(evenIfContentsAreSame); + } + + protected handleInvalidateAndForceReanalysis(reason: InvalidatedReason) { + // Make sure the import resolver doesn't have invalid + // cached entries. + this.importResolver.invalidateCache(); + + // Mark all files with one or more errors dirty. + this.program.markAllFilesDirty(/* evenIfContentsAreSame */ true); + } + + protected handleRestart() { + this.importResolver = this.createImportResolver( + this.getServiceProvider(), + this._configOptions, + this.importResolver.host + ); + this.program.setImportResolver(this.importResolver); + } + + protected override handleShutdown() { + this._program.dispose(); + super.handleShutdown(); + } + + protected analysisDone(port: MessagePort, token: CancellationToken) { + port.postMessage({ requestType: 'analysisDone', data: getCancellationTokenId(token) }); + } + + protected onAnalysisCompletion(port: MessagePort, result: AnalysisResults) { + // Result URIs can't be sent in current form as they contain methods on + // them. This causes a DataCloneError when posting. + // See https://stackoverflow.com/questions/68467946/datacloneerror-the-object-could-not-be-cloned-firefox-browser + // We turn them back into JSON so we can use Uri.fromJsonObj on the other side. + port.postMessage({ requestType: 'analysisResult', data: serialize(result) }); + } + + private _onMessageWrapper(msg: BackgroundRequest) { + try { + return this.onMessage(msg); + } catch (e: any) { + // Don't crash the worker, just send an exception or cancel message + this.log(LogLevel.Log, `Background analysis exception leak: ${e}`); + + if (OperationCanceledException.is(e)) { + parentPort?.postMessage({ kind: 'cancelled', data: e.message }); + return; + } + + parentPort?.postMessage({ + kind: 'failed', + data: `Exception: for msg ${msg.requestType}: ${e.message} in ${e.stack}`, + }); + } + } + + private _reportDiagnostics( + diagnostics: FileDiagnostics[], + requiringAnalysisCount: RequiringAnalysisCount, + elapsedTime: number + ) { + if (parentPort) { + this.onAnalysisCompletion(parentPort, { + diagnostics, + filesInProgram: this.program.getFileCount(), + requiringAnalysisCount: requiringAnalysisCount, + checkingOnlyOpenFiles: this.program.isCheckingOnlyOpenFiles(), + fatalErrorOccurred: false, + configParseErrorOccurred: false, + elapsedTime, + reason: 'tracking', + }); + } + } + + private _analysisPaused(port: MessagePort, token: CancellationToken) { + port.postMessage({ requestType: 'analysisPaused', data: getCancellationTokenId(token) }); + } +} + +function convertAnalysisResults(result: AnalysisResults): AnalysisResults { + result.diagnostics = result.diagnostics.map((f: FileDiagnostics) => { + return { + fileUri: Uri.fromJsonObj(f.fileUri), + version: f.version, + diagnostics: convertDiagnostics(f.diagnostics), + }; + }); + + return result; +} + +function convertDiagnostics(diagnostics: Diagnostic[]) { + // Elements are typed as "any" since data crossing the process + // boundary loses type info. + return diagnostics.map((d: any) => { + const diag = new Diagnostic(d.category, d.message, d.range, d.priority); + if (d._actions) { + for (const action of d._actions) { + diag.addAction(action); + } + } + + if (d._rule) { + diag.setRule(d._rule); + } + + if (d._relatedInfo) { + for (const info of d._relatedInfo) { + diag.addRelatedInfo(info.message, info.uri, info.range); + } + } + + return diag; + }); +} + +export type BackgroundRequestKind = + | 'start' + | 'analyze' + | 'resumeAnalysis' + | 'setConfigOptions' + | 'setTrackedFiles' + | 'setAllowedThirdPartyImports' + | 'ensurePartialStubPackages' + | 'setFileOpened' + | 'updateChainedFileUri' + | 'setFileClosed' + | 'markAllFilesDirty' + | 'markFilesDirty' + | 'invalidateAndForceReanalysis' + | 'restart' + | 'getDiagnosticsForRange' + | 'writeTypeStub' + | 'setImportResolver' + | 'shutdown' + | 'addInterimFile' + | 'analyzeFile' + | 'analyzeFileAndGetDiagnostics' + | 'cacheUsageBuffer'; + +export interface BackgroundRequest { + requestType: BackgroundRequestKind; + data: string | null; + port?: MessagePort | undefined; + sharedUsageBuffer?: SharedArrayBuffer; +} + +export type BackgroundResponseKind = 'log' | 'analysisResult' | 'analysisPaused' | 'analysisDone'; + +export interface BackgroundResponse { + requestType: BackgroundResponseKind; + data: string | null; +} + +export interface RefreshOptions { + // No files/folders are added or removed. only changes. + changesOnly: boolean; + // Specific files that changed (if known). When provided, only these files should be marked dirty. + // Using UriMap for O(1) lookup instead of O(n) with array. + changedFileUris?: UriMap; +} diff --git a/python-parser/packages/pyright-internal/src/backgroundThreadBase.ts b/python-parser/packages/pyright-internal/src/backgroundThreadBase.ts new file mode 100644 index 00000000..6c839637 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/backgroundThreadBase.ts @@ -0,0 +1,286 @@ +/* + * backgroundThreadBase.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * base class for background worker thread. + */ + +import { MessagePort, parentPort, TransferListItem } from 'worker_threads'; + +import { CacheManager } from './analyzer/cacheManager'; +import { + getCancellationTokenId, + OperationCanceledException, + setCancellationFolderName, +} from './common/cancellationUtils'; +import { ConfigOptions } from './common/configOptions'; +import { ConsoleInterface, LogLevel } from './common/console'; +import { isThenable } from './common/core'; +import * as debug from './common/debug'; +import { createFromRealFileSystem, RealTempFile } from './common/realFileSystem'; +import { ServiceKeys } from './common/serviceKeys'; +import { ServiceProvider } from './common/serviceProvider'; +import './common/serviceProviderExtensions'; +import { Uri } from './common/uri/uri'; +import { CancellationToken } from 'vscode-jsonrpc'; +import { getCancellationTokenFromId } from './common/fileBasedCancellationUtils'; + +export class BackgroundConsole implements ConsoleInterface { + private _level = LogLevel.Log; + + constructor(private readonly _parentPort: MessagePort) {} + + get level() { + return this._level; + } + + set level(value: LogLevel) { + this._level = value; + } + + log(msg: string) { + this.post(LogLevel.Log, msg); + } + + info(msg: string) { + this.post(LogLevel.Info, msg); + } + + warn(msg: string) { + this.post(LogLevel.Warn, msg); + } + + error(msg: string) { + this.post(LogLevel.Error, msg); + } + + protected post(level: LogLevel, msg: string) { + this._parentPort.postMessage({ requestType: 'log', data: serialize({ level: level, message: `BG: ${msg}` }) }); + } +} + +export class BackgroundThreadBase { + private readonly _serviceProvider: ServiceProvider; + + protected constructor(data: InitializationData, serviceProvider?: ServiceProvider) { + setCancellationFolderName(data.cancellationFolderName); + + // Make sure there's a file system and a console interface. + this._serviceProvider = serviceProvider ?? new ServiceProvider(); + if (!this._serviceProvider.tryGet(ServiceKeys.console)) { + this._serviceProvider.add(ServiceKeys.console, new BackgroundConsole(parentPort!)); + } + + let tempFile = this._serviceProvider.tryGet(ServiceKeys.tempFile); + if (!tempFile) { + tempFile = new RealTempFile(data.tempFileName); + this._serviceProvider.add(ServiceKeys.tempFile, tempFile); + } + + if (!this._serviceProvider.tryGet(ServiceKeys.caseSensitivityDetector)) { + this._serviceProvider.add(ServiceKeys.caseSensitivityDetector, tempFile as RealTempFile); + } + + if (!this._serviceProvider.tryGet(ServiceKeys.fs)) { + this._serviceProvider.add( + ServiceKeys.fs, + createFromRealFileSystem( + this._serviceProvider.get(ServiceKeys.caseSensitivityDetector), + this.getConsole() + ) + ); + } + if (!this._serviceProvider.tryGet(ServiceKeys.cacheManager)) { + this._serviceProvider.add(ServiceKeys.cacheManager, new CacheManager()); + } + + // Stash the base directory into a global variable. + (global as any).__rootDirectory = Uri.parse(data.rootUri, this._serviceProvider).getFilePath(); + } + + protected get fs() { + return this._serviceProvider.fs(); + } + + protected log(level: LogLevel, msg: string) { + parentPort?.postMessage({ requestType: 'log', data: serialize({ level: level, message: `BG: ${msg}` }) }); + } + + protected getConsole() { + return this._serviceProvider.console(); + } + + protected getServiceProvider() { + return this._serviceProvider; + } + + protected handleShutdown() { + this._serviceProvider.dispose(); + parentPort?.close(); + } +} + +// Function used to serialize specific types that can't automatically be serialized. +// Exposed here so it can be reused by a caller that wants to add more cases. +export function serializeReplacer(value: any) { + if (Uri.is(value) && value.toJsonObj !== undefined) { + return { __serialized_uri_val: value.toJsonObj() }; + } + if (value instanceof Map) { + return { __serialized_map_val: [...value] }; + } + if (value instanceof Set) { + return { __serialized_set_val: [...value] }; + } + if (value instanceof RegExp) { + return { __serialized_regexp_val: { source: value.source, flags: value.flags } }; + } + if (value instanceof ConfigOptions) { + const entries = Object.entries(value); + return { __serialized_config_options: entries.reduce((obj, e, i) => ({ ...obj, [e[0]]: e[1] }), {}) }; + } + if (CancellationToken.is(value)) { + return { cancellation_token_val: getCancellationTokenId(value) ?? null }; + } + + return value; +} + +export function serialize(obj: any): string { + // Convert the object to a string so it can be sent across a message port. + return JSON.stringify(obj, (k, v) => serializeReplacer(v)); +} + +export function deserializeReviver(value: any) { + if (value && typeof value === 'object') { + if (value.__serialized_uri_val !== undefined) { + return Uri.fromJsonObj(value.__serialized_uri_val); + } + if (value.__serialized_map_val) { + return new Map(value.__serialized_map_val); + } + if (value.__serialized_set_val) { + return new Set(value.__serialized_set_val); + } + if (value.__serialized_regexp_val) { + return new RegExp(value.__serialized_regexp_val.source, value.__serialized_regexp_val.flags); + } + if (value.__serialized_config_options) { + const configOptions = new ConfigOptions(value.__serialized_config_options.projectRoot); + Object.assign(configOptions, value.__serialized_config_options); + return configOptions; + } + if (Object.keys(value).includes('cancellation_token_val')) { + return getCancellationTokenFromId(value.cancellation_token_val); + } + } + return value; +} + +export function deserialize(json: string | null): T { + if (!json) { + return undefined as any; + } + // Convert the string back to an object. + return JSON.parse(json, (k, v) => deserializeReviver(v)); +} + +export interface MessagePoster { + postMessage(value: any, transferList?: ReadonlyArray): void; +} + +export function run(code: () => Promise, port: MessagePoster): Promise; +export function run(code: () => Promise, port: MessagePoster, serializer: (obj: any) => any): Promise; +export function run(code: () => T, port: MessagePoster): void; +export function run(code: () => T, port: MessagePoster, serializer: (obj: any) => any): void; +export function run( + code: () => T | Promise, + port: MessagePoster, + serializer = serialize +): void | Promise { + try { + const result = code(); + if (!isThenable(result)) { + port.postMessage({ kind: 'ok', data: serializer(result) }); + return; + } + + return result.then( + (r) => { + port.postMessage({ kind: 'ok', data: serializer(r) }); + }, + (e) => { + if (OperationCanceledException.is(e)) { + port.postMessage({ kind: 'cancelled', data: e.message }); + return; + } + + port.postMessage({ kind: 'failed', data: `Exception: ${e.message} in ${e.stack}` }); + } + ); + } catch (e: any) { + if (OperationCanceledException.is(e)) { + port.postMessage({ kind: 'cancelled', data: e.message }); + return; + } + + port.postMessage({ kind: 'failed', data: `Exception: ${e.message} in ${e.stack}` }); + } +} + +export type BackgroundDataHandler = (data: any, port: MessagePort) => void; + +export function getBackgroundWaiter( + port: MessagePort, + options?: { deserializer?: (v: any) => T; dataHandler?: BackgroundDataHandler } +): Promise { + const deserializer = options?.deserializer ?? deserialize; + const dataHandler = options?.dataHandler ?? (() => {}); + + return new Promise((resolve, reject) => { + port.on('message', (m: RequestResponse) => { + switch (m.kind) { + case 'ok': + resolve(deserializer(m.data)); + break; + + case 'cancelled': + reject(new OperationCanceledException()); + break; + + case 'failed': + reject(m.data); + break; + + case 'data': + // Handle streaming data from the background thread + dataHandler(m.data, port); + break; + + default: + debug.fail(`unknown kind ${m.kind} ${JSON.stringify(m)}`); + } + }); + }); +} + +export interface InitializationData { + rootUri: string; + workspaceRootUri: string; + tempFileName: string; + serviceId: string; + workerIndex: number; + cancellationFolderName: string | undefined; + runner: string | undefined; +} + +export interface RequestResponse { + kind: 'ok' | 'failed' | 'cancelled' | 'data'; + data: any; +} + +export interface LogData { + level: LogLevel; + message: string; +} diff --git a/python-parser/packages/pyright-internal/src/commands/commandController.ts b/python-parser/packages/pyright-internal/src/commands/commandController.ts new file mode 100644 index 00000000..390ab03d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/commands/commandController.ts @@ -0,0 +1,73 @@ +/* + * commandController.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements language server commands execution functionality. + */ + +import { CancellationToken, ExecuteCommandParams, ResponseError } from 'vscode-languageserver'; + +import { LanguageServerInterface } from '../common/languageServerInterface'; +import { Commands } from './commands'; +import { CreateTypeStubCommand } from './createTypeStub'; +import { DumpFileDebugInfoCommand } from './dumpFileDebugInfoCommand'; +import { QuickActionCommand } from './quickActionCommand'; +import { RestartServerCommand } from './restartServer'; + +export interface ServerCommand { + execute(cmdParams: ExecuteCommandParams, token: CancellationToken): Promise; +} + +export class CommandController implements ServerCommand { + private _createStub: CreateTypeStubCommand; + private _restartServer: RestartServerCommand; + private _quickAction: QuickActionCommand; + private _dumpFileDebugInfo: DumpFileDebugInfoCommand; + + constructor(ls: LanguageServerInterface) { + this._createStub = new CreateTypeStubCommand(ls); + this._restartServer = new RestartServerCommand(ls); + this._quickAction = new QuickActionCommand(ls); + this._dumpFileDebugInfo = new DumpFileDebugInfoCommand(ls); + } + + async execute(cmdParams: ExecuteCommandParams, token: CancellationToken): Promise { + switch (cmdParams.command) { + case Commands.orderImports: { + return this._quickAction.execute(cmdParams, token); + } + + case Commands.createTypeStub: { + return this._createStub.execute(cmdParams, token); + } + + case Commands.restartServer: { + return this._restartServer.execute(cmdParams); + } + + case Commands.dumpFileDebugInfo: { + return this._dumpFileDebugInfo.execute(cmdParams, token); + } + + default: { + return new ResponseError(1, 'Unsupported command'); + } + } + } + + isLongRunningCommand(command: string): boolean { + switch (command) { + case Commands.createTypeStub: + case Commands.restartServer: + return true; + + default: + return false; + } + } + + isRefactoringCommand(command: string): boolean { + return false; + } +} diff --git a/python-parser/packages/pyright-internal/src/commands/commandResult.ts b/python-parser/packages/pyright-internal/src/commands/commandResult.ts new file mode 100644 index 00000000..6df995fb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/commands/commandResult.ts @@ -0,0 +1,21 @@ +/* + * commandResult.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * wrapper for returning custom command data + */ + +import { WorkspaceEdit } from 'vscode-languageserver-types'; + +export interface CommandResult { + data?: any; + label: string; + edits: WorkspaceEdit; +} + +export namespace CommandResult { + export function is(value: any): value is CommandResult { + return value && value.label !== undefined && value.edits && WorkspaceEdit.is(value.edits); + } +} diff --git a/python-parser/packages/pyright-internal/src/commands/commands.ts b/python-parser/packages/pyright-internal/src/commands/commands.ts new file mode 100644 index 00000000..7424025b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/commands/commands.ts @@ -0,0 +1,21 @@ +/* + * commands.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Command identifier strings. + */ + +export const enum Commands { + createTypeStub = 'pyright.createtypestub', + restartServer = 'pyright.restartserver', + orderImports = 'pyright.organizeimports', + unusedImport = 'pyright.unusedImport', + dumpFileDebugInfo = 'pyright.dumpFileDebugInfo', + dumpTokens = 'pyright.dumpTokens', + dumpNodes = 'pyright.dumpNodes', + dumpTypes = 'pyright.dumpTypes', + dumpCachedTypes = 'pyright.dumpCachedTypes', + dumpCodeFlowGraph = 'pyright.dumpCodeFlowGraph', +} diff --git a/python-parser/packages/pyright-internal/src/commands/createTypeStub.ts b/python-parser/packages/pyright-internal/src/commands/createTypeStub.ts new file mode 100644 index 00000000..91f93435 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/commands/createTypeStub.ts @@ -0,0 +1,73 @@ +/* + * createTypeStub.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements 'create stub' command functionality. + */ + +import { CancellationToken, ExecuteCommandParams } from 'vscode-languageserver'; + +import { OperationCanceledException } from '../common/cancellationUtils'; +import { LanguageServerBaseInterface, LanguageServerInterface } from '../common/languageServerInterface'; +import { AnalyzerServiceExecutor } from '../languageService/analyzerServiceExecutor'; +import { ServerCommand } from './commandController'; +import { Uri } from '../common/uri/uri'; +import { Workspace } from '../workspaceFactory'; + +export class CreateTypeStubCommand implements ServerCommand { + constructor(private _ls: LanguageServerInterface) { + // Empty + } + + async execute(cmdParams: ExecuteCommandParams, token: CancellationToken): Promise { + if (!cmdParams.arguments || cmdParams.arguments.length < 2) { + return undefined; + } + + const workspaceRoot = Uri.parse(cmdParams.arguments[0] as string, this._ls.serviceProvider); + const importName = cmdParams.arguments[1] as string; + const callingFile = Uri.parse(cmdParams.arguments[2] as string, this._ls.serviceProvider); + + const workspace = await this._ls.getWorkspaceForFile(callingFile ?? workspaceRoot); + return await new TypeStubCreator(this._ls).create(workspace, importName, token); + } +} + +export class TypeStubCreator { + constructor(private _ls: LanguageServerBaseInterface) {} + + async create(workspace: Workspace, importName: string, token: CancellationToken): Promise { + const service = await AnalyzerServiceExecutor.cloneService(this._ls, workspace, { + typeStubTargetImportName: importName, + useBackgroundAnalysis: true, + }); + + try { + await service.writeTypeStubInBackground(token); + service.dispose(); + + const infoMessage = `Type stub was successfully created for '${importName}'.`; + this._ls.window.showInformationMessage(infoMessage); + + // This is called after a new type stub has been created. It allows + // us to invalidate caches and force reanalysis of files that potentially + // are affected by the appearance of a new type stub. + this._ls.reanalyze(); + } catch (err) { + const isCancellation = OperationCanceledException.is(err); + if (isCancellation) { + const errMessage = `Type stub creation for '${importName}' was canceled`; + this._ls.console.error(errMessage); + } else { + let errMessage = ''; + if (err instanceof Error) { + errMessage = ': ' + err.message; + } + errMessage = `An error occurred when creating type stub for '${importName}'` + errMessage; + this._ls.console.error(errMessage); + this._ls.window.showErrorMessage(errMessage); + } + } + } +} diff --git a/python-parser/packages/pyright-internal/src/commands/dumpFileDebugInfoCommand.ts b/python-parser/packages/pyright-internal/src/commands/dumpFileDebugInfoCommand.ts new file mode 100644 index 00000000..3810bcc6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/commands/dumpFileDebugInfoCommand.ts @@ -0,0 +1,120 @@ +/* + * dumpFileDebugInfoCommand.ts + * Copyright (c) Microsoft Corporation. + * + * Dump various token/node/type info + */ + +import { CancellationToken, ExecuteCommandParams } from 'vscode-languageserver'; + +import { getFlowNode } from '../analyzer/analyzerNodeInfo'; +import { findNodeByOffset } from '../analyzer/parseTreeUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { dumpSyntaxInfo, dumpTokenInfo, dumpTypeInfo } from '../common/languageInfoUtils'; +import { LanguageServerInterface } from '../common/languageServerInterface'; +import { Uri } from '../common/uri/uri'; +import { Workspace } from '../workspaceFactory'; +import { ServerCommand } from './commandController'; + +export class DumpFileDebugInfoCommand implements ServerCommand { + constructor(private _ls: LanguageServerInterface) {} + + async execute(params: ExecuteCommandParams, token: CancellationToken): Promise { + throwIfCancellationRequested(token); + + if (!params.arguments || params.arguments.length < 2) { + return []; + } + + const fileUri = Uri.parse(params.arguments[0] as string, this._ls.serviceProvider); + const workspace = await this._ls.getWorkspaceForFile(fileUri); + + return new DumpFileDebugInfo().dump(workspace, fileUri, params.arguments, token); + } +} + +export class DumpFileDebugInfo { + dump(workspace: Workspace, fileUri: Uri, args: any[], token: CancellationToken) { + return workspace.service.run((p) => { + const kind = args[1]; + + const parseResults = workspace.service.getParseResults(workspace.service.fs.realCasePath(fileUri)); + if (!parseResults) { + return []; + } + + const output: string[] = []; + const collectingConsole = { + info: (m: string) => { + output.push(m); + }, + log: (m: string) => { + output.push(m); + }, + error: (m: string) => { + output.push(m); + }, + warn: (m: string) => { + output.push(m); + }, + }; + + collectingConsole.info(`* Dump debug info for '${fileUri.toUserVisibleString()}'`); + + switch (kind) { + case 'tokens': { + collectingConsole.info(dumpTokenInfo(fileUri, parseResults)); + break; + } + case 'nodes': { + collectingConsole.info(dumpSyntaxInfo(fileUri, parseResults)); + break; + } + case 'types': { + const evaluator = p.evaluator; + const start = args[2] as number; + const end = args[3] as number; + if (!evaluator || !start || !end) { + return []; + } + + collectingConsole.info(dumpTypeInfo(fileUri, evaluator, parseResults, start, end)); + break; + } + case 'cachedtypes': { + const evaluator = p.evaluator; + const start = args[2] as number; + const end = args[3] as number; + if (!evaluator || !start || !end) { + return []; + } + + collectingConsole.info(dumpTypeInfo(fileUri, evaluator, parseResults, start, end, true)); + break; + } + + case 'codeflowgraph': { + const evaluator = p.evaluator; + const offset = args[2] as number; + if (!evaluator || offset === undefined) { + return []; + } + const node = findNodeByOffset(parseResults.parserOutput.parseTree, offset); + if (!node) { + return []; + } + const flowNode = getFlowNode(node); + if (!flowNode) { + return []; + } + collectingConsole.info(`* CodeFlow Graph`); + evaluator.printControlFlowGraph(flowNode, undefined, 'Dump CodeFlowGraph', collectingConsole); + } + } + + // Print all of the output in one message so the trace log is smaller. + workspace.service.serviceProvider.console().info(output.join('\n')); + return []; + }, token); + } +} diff --git a/python-parser/packages/pyright-internal/src/commands/quickActionCommand.ts b/python-parser/packages/pyright-internal/src/commands/quickActionCommand.ts new file mode 100644 index 00000000..c4086cd3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/commands/quickActionCommand.ts @@ -0,0 +1,38 @@ +/* + * quickActionCommand.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements command that maps to a quick action. + */ + +import { CancellationToken, ExecuteCommandParams } from 'vscode-languageserver'; + +import { convertToFileTextEdits, convertToWorkspaceEdit } from '../common/workspaceEditUtils'; +import { LanguageServerInterface } from '../common/languageServerInterface'; +import { performQuickAction } from '../languageService/quickActions'; +import { ServerCommand } from './commandController'; +import { Commands } from './commands'; +import { Uri } from '../common/uri/uri'; + +export class QuickActionCommand implements ServerCommand { + constructor(private _ls: LanguageServerInterface) {} + + async execute(params: ExecuteCommandParams, token: CancellationToken): Promise { + if (params.arguments && params.arguments.length >= 1) { + const docUri = Uri.parse(params.arguments[0] as string, this._ls.serviceProvider); + const otherArgs = params.arguments.slice(1); + const workspace = await this._ls.getWorkspaceForFile(docUri); + + if (params.command === Commands.orderImports && workspace.disableOrganizeImports) { + return []; + } + + const editActions = workspace.service.run((p) => { + return performQuickAction(p, docUri, params.command, otherArgs, token); + }, token); + + return convertToWorkspaceEdit(workspace.service.fs, convertToFileTextEdits(docUri, editActions ?? [])); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/commands/restartServer.ts b/python-parser/packages/pyright-internal/src/commands/restartServer.ts new file mode 100644 index 00000000..0c1a3f4a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/commands/restartServer.ts @@ -0,0 +1,20 @@ +/* + * restartServer.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements 'restart server' command functionality. + */ + +import { ExecuteCommandParams } from 'vscode-languageserver'; + +import { LanguageServerInterface } from '../common/languageServerInterface'; +import { ServerCommand } from './commandController'; + +export class RestartServerCommand implements ServerCommand { + constructor(private _ls: LanguageServerInterface) {} + + async execute(cmdParams: ExecuteCommandParams): Promise { + this._ls.restart(); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/asyncInitialization.ts b/python-parser/packages/pyright-internal/src/common/asyncInitialization.ts new file mode 100644 index 00000000..d5086b43 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/asyncInitialization.ts @@ -0,0 +1,19 @@ +/* + * asyncInitialization.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * helpers shared between multiple packages such as pyright-internal and pyright + */ + +import { ensureTomlModuleLoaded } from './tomlUtils'; + +export async function initializeDependencies() { + // Ensure dynamic imports are loaded. + await ensureTomlModuleLoaded(); + + if (process.env.NODE_ENV === 'production') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + require('source-map-support').install(); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/cancellationUtils.ts b/python-parser/packages/pyright-internal/src/common/cancellationUtils.ts new file mode 100644 index 00000000..5924a2e7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/cancellationUtils.ts @@ -0,0 +1,253 @@ +/* + * cancellationUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Helper methods relating to cancellation. + */ + +import { AbstractCancellationTokenSource, CancellationTokenSource, Emitter, Event } from 'vscode-jsonrpc'; +import { CancellationToken, Disposable, LSPErrorCodes, ResponseError } from 'vscode-languageserver'; + +import { isDebugMode } from './core'; +import { Uri } from './uri/uri'; +import { UriEx } from './uri/uriUtils'; + +export interface CancellationProvider { + createCancellationTokenSource(): AbstractCancellationTokenSource; +} + +export namespace CancellationProvider { + export function is(value: any): value is CancellationProvider { + return value && !!value.createCancellationTokenSource; + } +} + +let cancellationFolderName: string | undefined; + +export function getCancellationFolderName() { + return cancellationFolderName; +} + +export function setCancellationFolderName(folderName?: string) { + cancellationFolderName = folderName; +} + +export function invalidateTypeCacheIfCanceled(cb: () => T): T { + try { + return cb(); + } catch (e: any) { + if (OperationCanceledException.is(e)) { + // If the work was canceled before the function type was updated, the + // function type in the type cache is in an invalid, partially-constructed state. + e.isTypeCacheInvalid = true; + } + + throw e; + } +} + +export class OperationCanceledException extends ResponseError { + // If true, indicates that the cancellation may have left the type cache + // in an invalid state. + isTypeCacheInvalid = false; + + constructor(message?: string | undefined) { + super(LSPErrorCodes.RequestCancelled, message || 'request cancelled'); + } + + static is(e: any): e is OperationCanceledException { + return e.code === LSPErrorCodes.RequestCancelled; + } +} + +export function throwIfCancellationRequested(token: CancellationToken) { + // Don't use cancellation in debug mode because it interferes with + // debugging if requests are cancelled. + if (!isDebugMode() && token.isCancellationRequested) { + throw new OperationCanceledException(); + } +} + +const nullDisposable = Disposable.create(() => {}); + +export function onCancellationRequested(token: CancellationToken, func: (i: any) => void): Disposable { + try { + return token.onCancellationRequested(func); + } catch { + // Certain cancellation token implementations, like SharedArrayCancellation + // (https://github.com/microsoft/vscode-languageserver-node/blob/main/jsonrpc/src/common/sharedArrayCancellation.ts#L70), + // do not support the `onCancellationRequested` method. In such cases, proceed to the next token. + return nullDisposable; + } +} + +export function CancelAfter(provider: CancellationProvider, ...tokens: CancellationToken[]) { + const source = provider.createCancellationTokenSource(); + setupCombinedTokensFor(source, ...tokens); + return source; +} + +export function createCombinedToken(...tokens: CancellationToken[]): CancellationToken { + const source = new CancellationTokenSource(); + setupCombinedTokensFor(source, ...tokens); + return source.token; +} + +export function setupCombinedTokensFor(source: AbstractCancellationTokenSource, ...tokens: CancellationToken[]) { + // If any token is already cancelled, cancel immediately. + for (const token of tokens) { + if (!token.isCancellationRequested) { + continue; + } + + source.cancel(); + return; + } + + const disposables: Disposable[] = []; + for (const token of tokens) { + disposables.push( + onCancellationRequested(token, () => { + source.cancel(); + }) + ); + } + + disposables.push( + onCancellationRequested(source.token, () => { + disposables.forEach((d) => d.dispose()); + }) + ); +} + +export class DefaultCancellationProvider implements CancellationProvider { + createCancellationTokenSource(): AbstractCancellationTokenSource { + return new CancellationTokenSource(); + } +} + +export const CancelledTokenId = 'cancelled'; + +export function getCancellationTokenId(token: CancellationToken): string | undefined { + if (token === CancellationToken.Cancelled) { + // Ensure the token is recognized as already cancelled. Returning `undefined` would be interpreted as CancellationToken.None. + return CancelledTokenId; + } + + return token instanceof FileBasedToken ? token.id : undefined; +} + +export class FileBasedToken implements CancellationToken { + protected readonly cancellationFilePath: Uri; + + protected isCancelled = false; + private _emitter: Emitter | undefined; + + constructor(cancellationId: string, private _fs: { statSync(fileUri: Uri): void }) { + // Normally, `UriEx` is intended for use in tests only. However, this is a special case + // because we construct the cancellationId and control the file casing. + this.cancellationFilePath = UriEx.file(cancellationId); + } + + get id(): string { + return this.cancellationFilePath.toString(); + } + + get isCancellationRequested(): boolean { + if (this.isCancelled) { + return true; + } + + if (CancellationThrottle.shouldCheck() && this._pipeExists()) { + // The first time it encounters the cancellation file, it will + // cancel itself and raise a cancellation event. + // In this mode, cancel() might not be called explicitly by + // jsonrpc layer. + this.cancel(); + } + + return this.isCancelled; + } + + get onCancellationRequested(): Event { + if (!this._emitter) { + this._emitter = new Emitter(); + } + return this._emitter.event; + } + + cancel() { + if (!this.isCancelled) { + this.isCancelled = true; + if (this._emitter) { + this._emitter.fire(undefined); + this._disposeEmitter(); + } + } + } + + dispose(): void { + this._disposeEmitter(); + } + + private _disposeEmitter() { + if (this._emitter) { + this._emitter.dispose(); + this._emitter = undefined; + } + } + + private _pipeExists(): boolean { + try { + this._fs.statSync(this.cancellationFilePath); + return true; + } catch (e: any) { + return false; + } + } +} + +export class CancellationThrottle { + private static _lastCheckTimestamp = 0; + + static shouldCheck() { + // Throttle cancellation checks to one every 5ms. This value + // was selected through empirical testing. If we call the + // file system more often than this, type analysis performance + // is affected. If we call it less often, performance doesn't + // improve much, but responsiveness suffers. + const minTimeBetweenChecksInMs = 5; + const curTimestamp = Date.now().valueOf(); + const timeSinceLastCheck = curTimestamp - this._lastCheckTimestamp; + + if (timeSinceLastCheck >= minTimeBetweenChecksInMs) { + this._lastCheckTimestamp = curTimestamp; + return true; + } + + return false; + } +} + +export async function raceCancellation(token?: CancellationToken, ...promises: Promise[]): Promise { + if (!token) { + return Promise.race(promises); + } + if (token.isCancellationRequested) { + throw new OperationCanceledException(); + } + + return new Promise((resolve, reject) => { + if (token.isCancellationRequested) { + return reject(new OperationCanceledException()); + } + const disposable = onCancellationRequested(token, () => { + disposable.dispose(); + reject(new OperationCanceledException()); + }); + Promise.race(promises) + .then(resolve, reject) + .finally(() => disposable.dispose()); + }); +} diff --git a/python-parser/packages/pyright-internal/src/common/caseSensitivityDetector.ts b/python-parser/packages/pyright-internal/src/common/caseSensitivityDetector.ts new file mode 100644 index 00000000..c3cf8c53 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/caseSensitivityDetector.ts @@ -0,0 +1,17 @@ +/* + * caseSensitivityDetector.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * interface to determine whether the given uri string should be case sensitive or not. + */ + +export interface CaseSensitivityDetector { + isCaseSensitive(uri: string): boolean; +} + +export namespace CaseSensitivityDetector { + export function is(value: any): value is CaseSensitivityDetector { + return !!value.isCaseSensitive; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/charCodes.ts b/python-parser/packages/pyright-internal/src/common/charCodes.ts new file mode 100644 index 00000000..99698ffa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/charCodes.ts @@ -0,0 +1,162 @@ +/* + * charCodes.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Based on code from typescript-char: + * https://github.com/mason-lang/typescript-char + * + * Character code definitions. + */ + +export const enum Char { + Null = 0, + StartOfHeading = 1, + StartOfText = 2, + EndOfText = 3, + EndOfTransmission = 4, + Enquiry = 5, + Acknowledge = 6, + Bell = 7, + Backspace = 8, + Tab = 9, + LineFeed = 0xa, + VerticalTab = 0xb, + FormFeed = 0xc, + CarriageReturn = 0xd, + ShiftOut = 0xe, + ShirtIn = 0xf, + DataLineEscape = 0x10, + DeviceControl1 = 0x11, + DeviceControl2 = 0x12, + DeviceControl3 = 0x13, + DeviceControl4 = 0x14, + NegativeAcknowledgement = 0x15, + SynchronousIdle = 0x16, + EndOfTransmitBlock = 0x17, + Cancel = 0x18, + EndOfMedium = 0x19, + Substitute = 0x1a, + Escape = 0x1b, + FileSeparator = 0x1c, + GroupSeparator = 0x1d, + RecordSeparator = 0x1e, + UnitSeparator = 0x1f, + + // Printable characters + Space = 0x20, + ExclamationMark = 0x21, + DoubleQuote = 0x22, + Hash = 0x23, + Dollar = 0x24, + Percent = 0x25, + Ampersand = 0x26, + SingleQuote = 0x27, + OpenParenthesis = 0x28, + CloseParenthesis = 0x29, + Asterisk = 0x2a, + Plus = 0x2b, + Comma = 0x2c, + Hyphen = 0x2d, + Period = 0x2e, + Slash = 0x2f, + _0 = 0x30, + _1 = 0x31, + _2 = 0x32, + _3 = 0x33, + _4 = 0x34, + _5 = 0x35, + _6 = 0x36, + _7 = 0x37, + _8 = 0x38, + _9 = 0x39, + Colon = 0x3a, + Semicolon = 0x3b, + Less = 0x3c, + Equal = 0x3d, + Greater = 0x3e, + QuestionMark = 0x3f, + At = 0x40, + A = 0x41, + B = 0x42, + C = 0x43, + D = 0x44, + E = 0x45, + F = 0x46, + G = 0x47, + H = 0x48, + I = 0x49, + J = 0x4a, + K = 0x4b, + L = 0x4c, + M = 0x4d, + N = 0x4e, + O = 0x4f, + P = 0x50, + Q = 0x51, + R = 0x52, + S = 0x53, + T = 0x54, + U = 0x55, + V = 0x56, + W = 0x57, + X = 0x58, + Y = 0x59, + Z = 0x5a, + OpenBracket = 0x5b, + Backslash = 0x5c, + CloseBracket = 0x5d, + Caret = 0x5e, + Underscore = 0x5f, + Backtick = 0x60, + a = 0x61, + b = 0x62, + c = 0x63, + d = 0x64, + e = 0x65, + f = 0x66, + g = 0x67, + h = 0x68, + i = 0x69, + j = 0x6a, + k = 0x6b, + l = 0x6c, + m = 0x6d, + n = 0x6e, + o = 0x6f, + p = 0x70, + q = 0x71, + r = 0x72, + s = 0x73, + t = 0x74, + u = 0x75, + v = 0x76, + w = 0x77, + x = 0x78, + y = 0x79, + z = 0x7a, + OpenBrace = 0x7b, + Bar = 0x7c, + CloseBrace = 0x7d, + Tilde = 0x7e, + Delete = 0x7f, + + // Other space characters + NonBreakingSpace = 0xa0, + EnQuad = 0x2000, + EmQuad = 0x2001, + EnSpace = 0x2002, + EmSpace = 0x2003, + ThreePerEmSpace = 0x2004, + FourPerEmSpace = 0x2005, + SixPerEmSpace = 0x2006, + FigureSpace = 0x2007, + PunctuationSpace = 0x2008, + ThinSpace = 0x2009, + HairSpace = 0x200a, + ZeroWidthSpace = 0x200b, + NarrowNoBreakSpace = 0x202f, + IdeographicSpace = 0x3000, + MathematicalSpace = 0x205f, + Ogham = 0x1680, +} diff --git a/python-parser/packages/pyright-internal/src/common/chokidarFileWatcherProvider.ts b/python-parser/packages/pyright-internal/src/common/chokidarFileWatcherProvider.ts new file mode 100644 index 00000000..1f952f0b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/chokidarFileWatcherProvider.ts @@ -0,0 +1,70 @@ +/* + * chokidarFileWatcherProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements a FileWatcherProvider using chokidar. + */ + +import * as chokidar from 'chokidar'; + +import { ConsoleInterface } from './console'; +import { FileWatcher, FileWatcherEventHandler, FileWatcherProvider } from './fileWatcher'; + +const _isMacintosh = process.platform === 'darwin'; +const _isLinux = process.platform === 'linux'; + +export class ChokidarFileWatcherProvider implements FileWatcherProvider { + constructor(private _console?: ConsoleInterface) {} + + createFileWatcher(paths: string[], listener: FileWatcherEventHandler): FileWatcher { + return this._createFileSystemWatcher(paths).on('all', listener); + } + + private _createFileSystemWatcher(paths: string[]): chokidar.FSWatcher { + // The following options are copied from VS Code source base. It also + // uses chokidar for its file watching. + const watcherOptions: chokidar.WatchOptions = { + ignoreInitial: true, + ignorePermissionErrors: true, + followSymlinks: true, // this is the default of chokidar and supports file events through symlinks + interval: 1000, // while not used in normal cases, if any error causes chokidar to fallback to polling, increase its intervals + binaryInterval: 1000, + disableGlobbing: true, // fix https://github.com/Microsoft/vscode/issues/4586 + awaitWriteFinish: { + // this will make sure we re-scan files once file changes are written to disk + stabilityThreshold: 1000, + pollInterval: 1000, + }, + }; + + if (_isMacintosh) { + // Explicitly disable on MacOS because it uses up large amounts of memory + // and CPU for large file hierarchies, resulting in instability and crashes. + watcherOptions.usePolling = false; + } + + const excludes: string[] = ['**/__pycache__/**']; + if (_isMacintosh || _isLinux) { + if (paths.some((path) => path === '' || path === '/')) { + excludes.push('/dev/**'); + if (_isLinux) { + excludes.push('/proc/**', '/sys/**'); + } + } + } + watcherOptions.ignored = excludes; + + const watcher = chokidar.watch(paths, watcherOptions); + watcher.on('error', (_) => { + this._console?.error('Error returned from file system watcher.'); + }); + + // Detect if for some reason the native watcher library fails to load + if (_isMacintosh && !watcher.options.useFsEvents) { + this._console?.info('Watcher could not use native fsevents library. File system watcher disabled.'); + } + + return watcher; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/collectionUtils.ts b/python-parser/packages/pyright-internal/src/common/collectionUtils.ts new file mode 100644 index 00000000..0135e0e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/collectionUtils.ts @@ -0,0 +1,412 @@ +/* + * collectionUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Helper functions relating to collections and arrays. + */ + +import { compareValues, Comparison, equateValues, isArray, MapLike } from './core'; + +export const emptyArray: never[] = [] as never[]; +export type EqualityComparer = (a: T, b: T) => boolean; + +export function contains( + array: readonly T[] | undefined, + value: T, + equalityComparer: EqualityComparer = equateValues +): boolean { + if (array) { + for (const v of array) { + if (equalityComparer(v, value)) { + return true; + } + } + } + return false; +} + +/** Array that is only intended to be pushed to, never read. */ +export interface Push { + push(...values: T[]): void; +} + +/** + * Appends a value to an array, returning the array. + * + * @param to The array to which `value` is to be appended. If `to` is `undefined`, a new array + * is created if `value` was appended. + * @param value The value to append to the array. If `value` is `undefined`, nothing is + * appended. + */ +export function append[number] | undefined>( + to: TArray, + value: TValue +): [undefined, undefined] extends [TArray, TValue] ? TArray : NonNullable[number][]; +export function append(to: T[], value: T | undefined): T[]; +export function append(to: T[] | undefined, value: T): T[]; +export function append(to: T[] | undefined, value: T | undefined): T[] | undefined; +export function append(to: T[] | undefined, value: T | undefined): T[] | undefined { + if (value === undefined) { + return to; + } + if (to === undefined) { + return [value]; + } + to.push(value); + return to; +} + +/** + * Safely pushes the values of one array onto another array. This is the + * same as receiver.push(...elementsToPush) except that it doesn't risk overflowing + * the stack if elementsToPush is very large. + */ +export function appendArray(to: T[], elementsToPush: T[]) { + if (elementsToPush.length < 256) { + to.push(...elementsToPush); + return; + } + + for (const elem of elementsToPush) { + to.push(elem); + } +} + +/** Works like Array.filter except that it returns a second array with the filtered elements. **/ +export function partition(array: readonly T[], cb: (value: T) => boolean): [S[], T[]] { + const trueItems: S[] = []; + const falseItems: T[] = []; + + for (const item of array) { + if (cb(item)) { + trueItems.push(item as S); + } else { + falseItems.push(item); + } + } + + return [trueItems, falseItems]; +} + +/** Works like Array.prototype.find, returning `undefined` if no element satisfying the predicate is found. */ +export function find( + array: readonly T[], + predicate: (element: T, index: number) => element is U +): U | undefined; +export function find(array: readonly T[], predicate: (element: T, index: number) => boolean): T | undefined; +export function find(array: readonly T[], predicate: (element: T, index: number) => boolean): T | undefined { + for (let i = 0; i < array.length; i++) { + const value = array[i]; + if (predicate(value, i)) { + return value; + } + } + return undefined; +} + +/** + * Gets the actual offset into an array for a relative offset. Negative offsets indicate a + * position offset from the end of the array. + */ +function toOffset(array: readonly any[], offset: number) { + return offset < 0 ? array.length + offset : offset; +} + +/** + * Appends a range of value to an array, returning the array. + * + * @param to The array to which `value` is to be appended. If `to` is `undefined`, a new array + * is created if `value` was appended. + * @param from The values to append to the array. If `from` is `undefined`, nothing is + * appended. If an element of `from` is `undefined`, that element is not appended. + * @param start The offset in `from` at which to start copying values. + * @param end The offset in `from` at which to stop copying values (non-inclusive). + */ +export function addRange(to: T[], from: readonly T[] | undefined, start?: number, end?: number): T[]; +export function addRange( + to: T[] | undefined, + from: readonly T[] | undefined, + start?: number, + end?: number +): T[] | undefined; +export function addRange( + to: T[] | undefined, + from: readonly T[] | undefined, + start?: number, + end?: number +): T[] | undefined { + if (from === undefined || from.length === 0) { + return to; + } + if (to === undefined) { + return from.slice(start, end); + } + start = start === undefined ? 0 : toOffset(from, start); + end = end === undefined ? from.length : toOffset(from, end); + for (let i = start; i < end && i < from.length; i++) { + if (from[i] !== undefined) { + to.push(from[i]); + } + } + return to; +} + +export function insertAt(array: T[], index: number, value: T) { + if (index === 0) { + array.unshift(value); + } else if (index === array.length) { + array.push(value); + } else { + for (let i = array.length; i > index; i--) { + array[i] = array[i - 1]; + } + array[index] = value; + } + return array; +} + +export type Comparer = (a: T, b: T) => Comparison; + +export interface SortedReadonlyArray extends ReadonlyArray { + ' __sortedArrayBrand': any; +} + +export interface SortedArray extends Array { + ' __sortedArrayBrand': any; +} + +/** + * Returns a new sorted array. + */ +export function cloneAndSort(array: readonly T[], comparer?: Comparer): SortedReadonlyArray { + return (array.length === 0 ? array : array.slice().sort(comparer)) as SortedReadonlyArray; +} + +function selectIndex(_: unknown, i: number) { + return i; +} + +function indicesOf(array: readonly unknown[]): number[] { + return array.map(selectIndex); +} + +/** + * Stable sort of an array. Elements equal to each other maintain their relative position in the array. + */ +export function stableSort(array: readonly T[], comparer: Comparer): SortedReadonlyArray { + const indices = indicesOf(array); + stableSortIndices(array, indices, comparer); + return indices.map((i) => array[i]) as SortedArray as SortedReadonlyArray; +} + +function stableSortIndices(array: readonly T[], indices: number[], comparer: Comparer) { + // sort indices by value then position + indices.sort((x, y) => comparer(array[x], array[y]) || compareValues(x, y)); +} + +export function map(array: readonly T[], f: (x: T, i: number) => U): U[]; +export function map(array: readonly T[] | undefined, f: (x: T, i: number) => U): U[] | undefined; +export function map(array: readonly T[] | undefined, f: (x: T, i: number) => U): U[] | undefined { + if (array) { + return array.map(f); + } + return undefined; +} + +export function some(array: readonly T[] | undefined): array is readonly T[]; +export function some(array: readonly T[] | undefined, predicate: (value: T) => boolean): boolean; +export function some(array: readonly T[] | undefined, predicate?: (value: T) => boolean): boolean { + if (array) { + if (predicate) { + return array.some(predicate); + } else { + return array.length > 0; + } + } + return false; +} + +/** + * Iterates through `array` by index and performs the callback on each element of array until the callback + * returns a falsey value, then returns false. + * If no such value is found, the callback is applied to each element of array and `true` is returned. + */ +export function every(array: readonly T[], callback: (element: T, index: number) => boolean): boolean { + if (array) { + return array.every(callback); + } + + return true; +} + +/** + * Performs a binary search, finding the index at which `value` occurs in `array`. + * If no such index is found, returns the 2's-complement of first index at which + * `array[index]` exceeds `value`. + * @param array A sorted array whose first element must be no larger than number + * @param value The value to be searched for in the array. + * @param keySelector A callback used to select the search key from `value` and each element of + * `array`. + * @param keyComparer A callback used to compare two keys in a sorted array. + * @param offset An offset into `array` at which to start the search. + */ +export function binarySearch( + array: readonly T[], + value: T, + keySelector: (v: T) => U, + keyComparer: Comparer, + offset?: number +): number { + return binarySearchKey(array, keySelector(value), keySelector, keyComparer, offset); +} + +/** + * Performs a binary search, finding the index at which an object with `key` occurs in `array`. + * If no such index is found, returns the 2's-complement of first index at which + * `array[index]` exceeds `key`. + * @param array A sorted array whose first element must be no larger than number + * @param key The key to be searched for in the array. + * @param keySelector A callback used to select the search key from each element of `array`. + * @param keyComparer A callback used to compare two keys in a sorted array. + * @param offset An offset into `array` at which to start the search. + */ +export function binarySearchKey( + array: readonly T[], + key: U, + keySelector: (v: T) => U, + keyComparer: Comparer, + offset?: number +): number { + if (!some(array)) { + return -1; + } + + let low = offset || 0; + let high = array.length - 1; + while (low <= high) { + const middle = low + ((high - low) >> 1); + const midKey = keySelector(array[middle]); + switch (keyComparer(midKey, key)) { + case Comparison.LessThan: + low = middle + 1; + break; + case Comparison.EqualTo: + return middle; + case Comparison.GreaterThan: + high = middle - 1; + break; + } + } + + return ~low; +} + +/** + * Flattens an array containing a mix of array or non-array elements. + * + * @param array The array to flatten. + */ +export function flatten(array: (NonNullable[] | NonNullable)[]): T[] { + const result: T[] = []; + for (const v of array) { + if (v) { + if (isArray(v)) { + addRange(result, v); + } else { + result.push(v); + } + } + } + return result; +} + +/** + * Retrieves nested objects by parsing chained properties. ie. "a.b.c" + * Returns undefined if not found + * @param object The object to query + * @param property The property to be searched for in the object ie. "a.b.c" + */ +export function getNestedProperty(object: any, property: string) { + const value = property.split('.').reduce((obj, prop) => { + return obj && obj[prop]; + }, object); + return value; +} + +export function getOrAdd(map: MapLike, key: K, newValueFactory: () => V): V { + const value = map.get(key); + if (value !== undefined) { + return value; + } + + const newValue = newValueFactory(); + map.set(key, newValue); + + return newValue; +} + +/** + * Remove matching item from the array in place. + * Returns the given array itself. + * @param array The array to operate on. + * @param predicate Return true for an item to delete. + */ +export function removeArrayElements(array: T[], predicate: (item: T) => boolean): T[] { + for (let i = 0; i < array.length; i++) { + if (predicate(array[i])) { + array.splice(i, 1); + + // Array is modified in place, we need to look at the same index again. + i--; + } + } + + return array; +} + +export function createMapFromItems(items: T[], keyGetter: (t: T) => string) { + return items + .map((t) => keyGetter(t)) + .reduce((map, key, i) => { + map.set(key, (map.get(key) || []).concat(items[i])); + return map; + }, new Map()); +} + +export function addIfUnique(arr: T[], t: T, equalityComparer: EqualityComparer = equateValues): T[] { + if (contains(arr, t, equalityComparer)) { + return arr; + } + + arr.push(t); + return arr; +} + +export function getMapValues(m: Map, predicate: (k: K, v: V) => boolean): V[] { + const values: V[] = []; + m.forEach((v, k) => { + if (predicate(k, v)) { + values.push(v); + } + }); + + return values; +} + +export function addIfNotNull(arr: T[], t: T): T[] { + if (t === undefined) { + return arr; + } + + arr.push(t); + return arr; +} + +export function arrayEquals(c1: T[], c2: T[], predicate: (e1: T, e2: T) => boolean) { + if (c1.length !== c2.length) { + return false; + } + + return c1.every((v, i) => predicate(v, c2[i])); +} diff --git a/python-parser/packages/pyright-internal/src/common/commandLineOptions.ts b/python-parser/packages/pyright-internal/src/common/commandLineOptions.ts new file mode 100644 index 00000000..c8de1333 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/commandLineOptions.ts @@ -0,0 +1,181 @@ +/* + * commandLineOptions.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Class that holds the command-line options (those that can be + * passed into the main entry point of the command-line version + * of the analyzer). + */ + +import { TaskListToken } from './diagnostic'; +import { PythonVersion } from './pythonVersion'; +import { Uri } from './uri/uri'; + +export const enum DiagnosticSeverityOverrides { + Error = 'error', + Warning = 'warning', + Information = 'information', + None = 'none', +} + +export function getDiagnosticSeverityOverrides() { + return [ + DiagnosticSeverityOverrides.Error, + DiagnosticSeverityOverrides.Warning, + DiagnosticSeverityOverrides.Information, + DiagnosticSeverityOverrides.None, + ]; +} + +export type DiagnosticSeverityOverridesMap = { [ruleName: string]: DiagnosticSeverityOverrides }; +export type DiagnosticBooleanOverridesMap = { [ruleName: string]: boolean }; + +// Options that can be specified in a JSON config file. This list should match what is +// defined in the pyrightconfig.schema.json file. +export class CommandLineConfigOptions { + // A list of file specs to include in the analysis. Can contain + // directories, in which case all "*.py" files within those directories + // are included. + includeFileSpecs: string[] = []; + + // If specified, this list of file specs overrides the includeFileSpecs + // above, rendering it as ignored. This is used + // for the CLI "--files" option, which should always override the "include" + // and "exclude" config file settings. + includeFileSpecsOverride?: string[]; + + // A list of file specs to exclude in the analysis. Can contain + // directories, in which case all "*.py" files within those directories + // are excluded. + excludeFileSpecs: string[] = []; + + // A list of file specs whose errors and warnings should be ignored even + // if they are included in the transitive closure of included files. + ignoreFileSpecs: string[] = []; + + // Virtual environments directory. + venvPath?: string | undefined; + + // Path to python interpreter. + pythonPath?: string | undefined; + + // Name for the virtual environment. + pythonEnvironmentName?: string | undefined; + + // Python platform indicator (darwin, linux, win32, ios, android) + pythonPlatform?: 'Darwin' | 'Linux' | 'Windows' | 'iOS' | 'Android' | undefined; + + // Python version string (3.3, 3.4, etc.) + pythonVersion?: PythonVersion | undefined; + + // Path of typeshed stubs. + typeshedPath?: string | undefined; + + // Path of typing folder + stubPath?: string | undefined; + // In the absence of type stubs, use library implementations + // to extract type information? + useLibraryCodeForTypes?: boolean | undefined; + + // Look for a common root folders such as 'src' and automatically + // add them as extra paths if the user has not explicitly defined + // execution environments. + autoSearchPaths?: boolean | undefined; + + // Extra paths to add to the default execution environment + // when user has not explicitly defined execution environments. + extraPaths?: string[] | undefined; + + // Default type-checking rule set. Should be one of 'off', + // 'basic', 'standard', or 'strict'. + typeCheckingMode?: string | undefined; + + // Indicates diagnostic severity overrides + diagnosticSeverityOverrides?: DiagnosticSeverityOverridesMap | undefined; + + // Indicates diagnostic boolean overrides + diagnosticBooleanOverrides?: DiagnosticBooleanOverridesMap | undefined; + + // Analyze functions and methods that have no type annotations? + analyzeUnannotatedFunctions?: boolean; + + // Emit verbose information to console? + verboseOutput?: boolean | undefined; +} + +// Options that are not specified in a JSON config file but apply to a language server. +export class CommandLineLanguageServerOptions { + // Watch for changes in workspace source files. + watchForSourceChanges?: boolean | undefined; + + // Watch for changes in environment library/search paths. + watchForLibraryChanges?: boolean | undefined; + + // Watch for changes in config files. + watchForConfigChanges?: boolean | undefined; + + // Type stub import target (for creation of type stubs). + typeStubTargetImportName?: string | undefined; + + // Indicates that only open files should be checked. + checkOnlyOpenFiles?: boolean | undefined; + + // Offer auto-import completions. + autoImportCompletions?: boolean | undefined; + + // Use indexing. + indexing?: boolean | undefined; + + // Task list tokens, used for VS task list population + taskListTokens?: TaskListToken[] | undefined; + + // Use type evaluator call tracking. + logTypeEvaluationTime = false; + + // Minimum threshold for type eval logging. + typeEvaluationTimeThreshold = 50; + + // Run ambient analysis. + enableAmbientAnalysis = true; + + // Disable reporting of hint diagnostics with tags? + disableTaggedHints?: boolean; + + // Path to python interpreter. This is used when the language server + // gets the python path from the client. + pythonPath?: string | undefined; + + // Virtual environments directory. + venvPath?: string | undefined; +} + +// Some options can be specified from a source other than the pyright config file. +// This can be from command-line parameters or some other settings mechanism, like +// that provided through a language client like the VS Code editor. These options +// are later combined with those from the config file to produce the final configuration. +export class CommandLineOptions { + // Settings that are possible to set in a config.json file. + configSettings: CommandLineConfigOptions = new CommandLineConfigOptions(); + + // Settings that are not possible to set in a config.json file. + languageServerSettings: CommandLineLanguageServerOptions = new CommandLineLanguageServerOptions(); + + // Path of config file. This option cannot be combined with + // file specs. + configFilePath?: string | undefined; + + // Absolute execution root (current working directory). + executionRoot: string | Uri | undefined; + + // Indicates that the settings came from a language server rather than + // from the command-line. Useful for providing clearer error + // messages. + fromLanguageServer: boolean; + + constructor(executionRoot: string | Uri | undefined, fromLanguageServer: boolean) { + this.executionRoot = executionRoot; + this.fromLanguageServer = fromLanguageServer; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/commandUtils.ts b/python-parser/packages/pyright-internal/src/common/commandUtils.ts new file mode 100644 index 00000000..81793147 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/commandUtils.ts @@ -0,0 +1,21 @@ +/* + * commandUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Utilities for working with LSP commands. + */ + +import { Command } from 'vscode-languageserver-types'; +import { Uri } from './uri/uri'; + +export function createCommand(title: string, command: string, ...args: any[]): Command { + // Make sure if any of the args are URIs, we convert them to strings. + const convertedArgs = args.map((arg) => { + if (Uri.is(arg)) { + return arg.toString(); + } + return arg; + }); + return Command.create(title, command, ...convertedArgs); +} diff --git a/python-parser/packages/pyright-internal/src/common/configOptions.ts b/python-parser/packages/pyright-internal/src/common/configOptions.ts new file mode 100644 index 00000000..484469ee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/configOptions.ts @@ -0,0 +1,1798 @@ +/* + * configOptions.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Class that holds the configuration options for the analyzer. + */ + +import { isAbsolute } from 'path'; + +import { ImportLogger } from '../analyzer/importLogger'; +import { getPathsFromPthFiles } from '../analyzer/pythonPathUtils'; +import * as pathConsts from '../common/pathConsts'; +import { appendArray } from './collectionUtils'; +import { + DiagnosticBooleanOverridesMap, + DiagnosticSeverityOverrides, + DiagnosticSeverityOverridesMap, + getDiagnosticSeverityOverrides, +} from './commandLineOptions'; +import { ConsoleInterface, NullConsole } from './console'; +import { isBoolean } from './core'; +import { TaskListToken } from './diagnostic'; +import { DiagnosticRule } from './diagnosticRules'; +import { FileSystem } from './fileSystem'; +import { Host } from './host'; +import { PythonVersion, latestStablePythonVersion } from './pythonVersion'; +import { ServiceKeys } from './serviceKeys'; +import { ServiceProvider } from './serviceProvider'; +import { Uri } from './uri/uri'; +import { FileSpec, getFileSpec, isDirectory } from './uri/uriUtils'; + +export enum PythonPlatform { + Darwin = 'Darwin', + Windows = 'Windows', + Linux = 'Linux', + iOS = 'iOS', + Android = 'Android', +} + +export class ExecutionEnvironment { + // Root directory for execution. + // Undefined if this is a rootless environment (e.g., open file mode). + root?: Uri; + + // Name of a virtual environment if there is one, otherwise + // just the path to the python executable. + name: string; + + // Always default to the latest stable version of the language. + pythonVersion: PythonVersion; + + // Default to no platform. + pythonPlatform?: string | undefined; + + // Default to no extra paths. + extraPaths: Uri[] = []; + + // Diagnostic rules with overrides. + diagnosticRuleSet: DiagnosticRuleSet; + + // Skip import resolution attempts for native libraries. These can + // be expensive and are not needed for some use cases (e.g. web-based + // tools or playgrounds). + skipNativeLibraries: boolean; + + // Default to "." which indicates every file in the project. + constructor( + name: string, + root: Uri, + defaultDiagRuleSet: DiagnosticRuleSet, + defaultPythonVersion: PythonVersion | undefined, + defaultPythonPlatform: string | undefined, + defaultExtraPaths: Uri[] | undefined, + skipNativeLibraries = false + ) { + this.name = name; + this.root = root; + this.pythonVersion = defaultPythonVersion ?? latestStablePythonVersion; + this.pythonPlatform = defaultPythonPlatform; + this.extraPaths = Array.from(defaultExtraPaths ?? []); + this.diagnosticRuleSet = { ...defaultDiagRuleSet }; + this.skipNativeLibraries = skipNativeLibraries; + } +} + +export type DiagnosticLevel = 'none' | 'information' | 'warning' | 'error'; + +export enum SignatureDisplayType { + compact = 'compact', + formatted = 'formatted', +} + +export interface DiagnosticRuleSet { + // Should "Unknown" types be reported as "Any"? + printUnknownAsAny: boolean; + + // Should type arguments to a generic class be omitted + // when printed if all arguments are Unknown? + omitTypeArgsIfUnknown: boolean; + + // Should parameter type be omitted if it is not annotated? + omitUnannotatedParamType: boolean; + + // Indicate when a type is conditional based on a constrained + // type variable type? + omitConditionalConstraint: boolean; + + // Should Union and Optional types be printed in PEP 604 format? + pep604Printing: boolean; + + // Use strict inference rules for list expressions? + strictListInference: boolean; + + // Use strict inference rules for set expressions? + strictSetInference: boolean; + + // Use strict inference rules for dictionary expressions? + strictDictionaryInference: boolean; + + // Analyze functions and methods that have no annotations? + analyzeUnannotatedFunctions: boolean; + + // Use strict type rules for parameters assigned default of None? + strictParameterNoneValue: boolean; + + // Enable experimental features that are not yet part of the + // official Python typing spec? + enableExperimentalFeatures: boolean; + + // Enable support for type: ignore comments? + enableTypeIgnoreComments: boolean; + + // Use tagged hints to identify unreachable code via type analysis? + enableReachabilityAnalysis: boolean; + + // Treat old typing aliases as deprecated if pythonVersion >= 3.9? + deprecateTypingAliases: boolean; + + // No longer treat bytearray and memoryview as subclasses of bytes? + disableBytesTypePromotions: boolean; + + // Report general type issues? + reportGeneralTypeIssues: DiagnosticLevel; + + // Report mismatch in types between property getter and setter? + reportPropertyTypeMismatch: DiagnosticLevel; + + // Report the use of unknown member accesses on function objects? + reportFunctionMemberAccess: DiagnosticLevel; + + // Report missing imports? + reportMissingImports: DiagnosticLevel; + + // Report missing imported module source files? + reportMissingModuleSource: DiagnosticLevel; + + // Report invalid type annotation forms? + reportInvalidTypeForm: DiagnosticLevel; + + // Report missing type stub files? + reportMissingTypeStubs: DiagnosticLevel; + + // Report cycles in import graph? + reportImportCycles: DiagnosticLevel; + + // Report imported symbol that is not accessed? + reportUnusedImport: DiagnosticLevel; + + // Report private class that is not accessed? + reportUnusedClass: DiagnosticLevel; + + // Report private function or method that is not accessed? + reportUnusedFunction: DiagnosticLevel; + + // Report variable that is not accessed? + reportUnusedVariable: DiagnosticLevel; + + // Report symbol or module that is imported more than once? + reportDuplicateImport: DiagnosticLevel; + + // Report use of wildcard import for non-local imports? + reportWildcardImportFromLibrary: DiagnosticLevel; + + // Report use of abstract method or variable? + reportAbstractUsage: DiagnosticLevel; + + // Report argument type incompatibilities? + reportArgumentType: DiagnosticLevel; + + // Report failure of assert_type call? + reportAssertTypeFailure: DiagnosticLevel; + + // Report type incompatibility for assignments? + reportAssignmentType: DiagnosticLevel; + + // Report issues related to attribute access expressions? + reportAttributeAccessIssue: DiagnosticLevel; + + // Report issues related to call expressions? + reportCallIssue: DiagnosticLevel; + + // Report inconsistencies with function overload signatures? + reportInconsistentOverload: DiagnosticLevel; + + // Report issues with index operations and expressions? + reportIndexIssue: DiagnosticLevel; + + // Report invalid type argument usage? + reportInvalidTypeArguments: DiagnosticLevel; + + // Report missing overloaded function implementation? + reportNoOverloadImplementation: DiagnosticLevel; + + // Report issues related to the use of unary or binary operators? + reportOperatorIssue: DiagnosticLevel; + + // Report attempts to subscript (index) an Optional type? + reportOptionalSubscript: DiagnosticLevel; + + // Report attempts to access members on a Optional type? + reportOptionalMemberAccess: DiagnosticLevel; + + // Report attempts to call a Optional type? + reportOptionalCall: DiagnosticLevel; + + // Report attempts to use an Optional type as an iterable? + reportOptionalIterable: DiagnosticLevel; + + // Report attempts to use an Optional type in a "with" statement? + reportOptionalContextManager: DiagnosticLevel; + + // Report attempts to use an Optional type in a binary or unary operation? + reportOptionalOperand: DiagnosticLevel; + + // Report attempts to redeclare the type of a symbol? + reportRedeclaration: DiagnosticLevel; + + // Report return type mismatches? + reportReturnType: DiagnosticLevel; + + // Report accesses to non-required TypedDict fields? + reportTypedDictNotRequiredAccess: DiagnosticLevel; + + // Report untyped function decorators that obscure the function type? + reportUntypedFunctionDecorator: DiagnosticLevel; + + // Report untyped class decorators that obscure the class type? + reportUntypedClassDecorator: DiagnosticLevel; + + // Report untyped base class that obscure the class type? + reportUntypedBaseClass: DiagnosticLevel; + + // Report use of untyped namedtuple factory method? + reportUntypedNamedTuple: DiagnosticLevel; + + // Report usage of private variables and functions outside of + // the owning class or module? + reportPrivateUsage: DiagnosticLevel; + + // Report usage of deprecated type comments. + reportTypeCommentUsage: DiagnosticLevel; + + // Report usage of an import from a py.typed module that is + // not meant to be re-exported from that module. + reportPrivateImportUsage: DiagnosticLevel; + + // Report attempts to redefine variables that are in all-caps. + reportConstantRedefinition: DiagnosticLevel; + + // Report use of deprecated classes or functions. + reportDeprecated: DiagnosticLevel; + + // Report usage of method override that is incompatible with + // the base class method of the same name? + reportIncompatibleMethodOverride: DiagnosticLevel; + + // Report usage of variable override that is incompatible with + // the base class symbol of the same name? + reportIncompatibleVariableOverride: DiagnosticLevel; + + // Report inconsistencies between __init__ and __new__ signatures. + reportInconsistentConstructor: DiagnosticLevel; + + // Report function overloads that overlap in signature but have + // incompatible return types. + reportOverlappingOverload: DiagnosticLevel; + + // Report usage of possibly unbound variables. + reportPossiblyUnboundVariable: DiagnosticLevel; + + // Report failure to call super().__init__() in __init__ method. + reportMissingSuperCall: DiagnosticLevel; + + // Report instance variables that are not initialized within + // the constructor. + reportUninitializedInstanceVariable: DiagnosticLevel; + + // Report usage of invalid escape sequences in string literals? + reportInvalidStringEscapeSequence: DiagnosticLevel; + + // Report usage of unknown input or return parameters for functions? + reportUnknownParameterType: DiagnosticLevel; + + // Report usage of unknown arguments for function calls? + reportUnknownArgumentType: DiagnosticLevel; + + // Report usage of unknown input or return parameters for lambdas? + reportUnknownLambdaType: DiagnosticLevel; + + // Report usage of unknown input or return parameters? + reportUnknownVariableType: DiagnosticLevel; + + // Report usage of unknown input or return parameters? + reportUnknownMemberType: DiagnosticLevel; + + // Report input parameters that are missing type annotations? + reportMissingParameterType: DiagnosticLevel; + + // Report usage of generic class without explicit type arguments? + reportMissingTypeArgument: DiagnosticLevel; + + // Report improper usage of type variables within function signatures? + reportInvalidTypeVarUse: DiagnosticLevel; + + // Report usage of function call within default value + // initialization expression? + reportCallInDefaultInitializer: DiagnosticLevel; + + // Report calls to isinstance or issubclass that are statically determined + // to always be true. + reportUnnecessaryIsInstance: DiagnosticLevel; + + // Report calls to cast that are statically determined + // to always unnecessary. + reportUnnecessaryCast: DiagnosticLevel; + + // Report == or != operators that always evaluate to True or False. + reportUnnecessaryComparison: DiagnosticLevel; + + // Report 'in' operations that always evaluate to True or False. + reportUnnecessaryContains: DiagnosticLevel; + + // Report assert expressions that will always evaluate to true. + reportAssertAlwaysTrue: DiagnosticLevel; + + // Report when "self" or "cls" parameter is missing or is misnamed. + reportSelfClsParameterName: DiagnosticLevel; + + // Report implicit concatenation of string literals. + reportImplicitStringConcatenation: DiagnosticLevel; + + // Report usage of undefined variables. + reportUndefinedVariable: DiagnosticLevel; + + // Report usage of unbound variables. + reportUnboundVariable: DiagnosticLevel; + + // Report use of unhashable type in a dictionary. + reportUnhashable: DiagnosticLevel; + + // Report statements that are syntactically correct but + // have no semantic meaning within a type stub file. + reportInvalidStubStatement: DiagnosticLevel; + + // Report usage of __getattr__ at the module level in a stub. + reportIncompleteStub: DiagnosticLevel; + + // Report operations on __all__ symbol that are not supported + // by a static type checker. + reportUnsupportedDunderAll: DiagnosticLevel; + + // Report cases where a call expression's return result is not + // None and is not used in any way. + reportUnusedCallResult: DiagnosticLevel; + + // Report cases where a call expression's return result is Coroutine + // and is not used in any way. + reportUnusedCoroutine: DiagnosticLevel; + + // Report except clause that is unreachable. + reportUnusedExcept: DiagnosticLevel; + + // Report cases where a simple expression result is not used in any way. + reportUnusedExpression: DiagnosticLevel; + + // Report cases where the removal of a "# type: ignore" or "# pyright: ignore" + // comment would have no effect. + reportUnnecessaryTypeIgnoreComment: DiagnosticLevel; + + // Report cases where the a "match" statement is not exhaustive in + // covering all possible cases. + reportMatchNotExhaustive: DiagnosticLevel; + + // Report code that is determined to be unreachable via type analysis. + reportUnreachable: DiagnosticLevel; + + // Report missing @override decorator. + reportImplicitOverride: DiagnosticLevel; +} + +export function cloneDiagnosticRuleSet(diagSettings: DiagnosticRuleSet): DiagnosticRuleSet { + // Create a shallow copy of the existing object. + return Object.assign({}, diagSettings); +} + +// Returns a list of the diagnostic rules that are configured with +// a true or false value. +export function getBooleanDiagnosticRules(includeNonOverridable = false) { + const boolRules = [ + DiagnosticRule.strictListInference, + DiagnosticRule.strictSetInference, + DiagnosticRule.strictDictionaryInference, + DiagnosticRule.analyzeUnannotatedFunctions, + DiagnosticRule.strictParameterNoneValue, + DiagnosticRule.enableExperimentalFeatures, + DiagnosticRule.deprecateTypingAliases, + DiagnosticRule.disableBytesTypePromotions, + ]; + + if (includeNonOverridable) { + // Do not include these because we don't + // want to override it in strict mode or support + // it within pyright comments. + boolRules.push(DiagnosticRule.enableTypeIgnoreComments); + boolRules.push(DiagnosticRule.enableReachabilityAnalysis); + } + + return boolRules; +} + +// Returns a list of the diagnostic rules that are configured with +// a diagnostic level ('none', 'error', etc.). +export function getDiagLevelDiagnosticRules() { + return [ + DiagnosticRule.reportGeneralTypeIssues, + DiagnosticRule.reportPropertyTypeMismatch, + DiagnosticRule.reportFunctionMemberAccess, + DiagnosticRule.reportMissingImports, + DiagnosticRule.reportMissingModuleSource, + DiagnosticRule.reportInvalidTypeForm, + DiagnosticRule.reportMissingTypeStubs, + DiagnosticRule.reportImportCycles, + DiagnosticRule.reportUnusedImport, + DiagnosticRule.reportUnusedClass, + DiagnosticRule.reportUnusedFunction, + DiagnosticRule.reportUnusedVariable, + DiagnosticRule.reportDuplicateImport, + DiagnosticRule.reportWildcardImportFromLibrary, + DiagnosticRule.reportAbstractUsage, + DiagnosticRule.reportArgumentType, + DiagnosticRule.reportAssertTypeFailure, + DiagnosticRule.reportAssignmentType, + DiagnosticRule.reportAttributeAccessIssue, + DiagnosticRule.reportCallIssue, + DiagnosticRule.reportInconsistentOverload, + DiagnosticRule.reportIndexIssue, + DiagnosticRule.reportInvalidTypeArguments, + DiagnosticRule.reportNoOverloadImplementation, + DiagnosticRule.reportOperatorIssue, + DiagnosticRule.reportOptionalSubscript, + DiagnosticRule.reportOptionalMemberAccess, + DiagnosticRule.reportOptionalCall, + DiagnosticRule.reportOptionalIterable, + DiagnosticRule.reportOptionalContextManager, + DiagnosticRule.reportOptionalOperand, + DiagnosticRule.reportRedeclaration, + DiagnosticRule.reportReturnType, + DiagnosticRule.reportTypedDictNotRequiredAccess, + DiagnosticRule.reportUntypedFunctionDecorator, + DiagnosticRule.reportUntypedClassDecorator, + DiagnosticRule.reportUntypedBaseClass, + DiagnosticRule.reportUntypedNamedTuple, + DiagnosticRule.reportPrivateUsage, + DiagnosticRule.reportTypeCommentUsage, + DiagnosticRule.reportPrivateImportUsage, + DiagnosticRule.reportConstantRedefinition, + DiagnosticRule.reportDeprecated, + DiagnosticRule.reportIncompatibleMethodOverride, + DiagnosticRule.reportIncompatibleVariableOverride, + DiagnosticRule.reportInconsistentConstructor, + DiagnosticRule.reportOverlappingOverload, + DiagnosticRule.reportPossiblyUnboundVariable, + DiagnosticRule.reportMissingSuperCall, + DiagnosticRule.reportUninitializedInstanceVariable, + DiagnosticRule.reportInvalidStringEscapeSequence, + DiagnosticRule.reportUnknownParameterType, + DiagnosticRule.reportUnknownArgumentType, + DiagnosticRule.reportUnknownLambdaType, + DiagnosticRule.reportUnknownVariableType, + DiagnosticRule.reportUnknownMemberType, + DiagnosticRule.reportMissingParameterType, + DiagnosticRule.reportMissingTypeArgument, + DiagnosticRule.reportInvalidTypeVarUse, + DiagnosticRule.reportCallInDefaultInitializer, + DiagnosticRule.reportUnnecessaryIsInstance, + DiagnosticRule.reportUnnecessaryCast, + DiagnosticRule.reportUnnecessaryComparison, + DiagnosticRule.reportUnnecessaryContains, + DiagnosticRule.reportAssertAlwaysTrue, + DiagnosticRule.reportSelfClsParameterName, + DiagnosticRule.reportImplicitStringConcatenation, + DiagnosticRule.reportUndefinedVariable, + DiagnosticRule.reportUnhashable, + DiagnosticRule.reportUnboundVariable, + DiagnosticRule.reportInvalidStubStatement, + DiagnosticRule.reportIncompleteStub, + DiagnosticRule.reportUnsupportedDunderAll, + DiagnosticRule.reportUnusedCallResult, + DiagnosticRule.reportUnusedCoroutine, + DiagnosticRule.reportUnusedExcept, + DiagnosticRule.reportUnusedExpression, + DiagnosticRule.reportUnnecessaryTypeIgnoreComment, + DiagnosticRule.reportMatchNotExhaustive, + DiagnosticRule.reportUnreachable, + DiagnosticRule.reportImplicitOverride, + ]; +} + +export function getStrictModeNotOverriddenRules() { + // In strict mode, the value in the user config file should be honored and + // not overwritten by the value from the strict rule set. + return [DiagnosticRule.reportMissingModuleSource]; +} + +export function getOffDiagnosticRuleSet(): DiagnosticRuleSet { + const diagSettings: DiagnosticRuleSet = { + printUnknownAsAny: true, + omitTypeArgsIfUnknown: true, + omitUnannotatedParamType: true, + omitConditionalConstraint: true, + pep604Printing: true, + strictListInference: false, + strictSetInference: false, + strictDictionaryInference: false, + analyzeUnannotatedFunctions: true, + strictParameterNoneValue: true, + enableExperimentalFeatures: false, + enableTypeIgnoreComments: true, + enableReachabilityAnalysis: false, + deprecateTypingAliases: false, + disableBytesTypePromotions: true, + reportGeneralTypeIssues: 'none', + reportPropertyTypeMismatch: 'none', + reportFunctionMemberAccess: 'none', + reportMissingImports: 'warning', + reportMissingModuleSource: 'warning', + reportInvalidTypeForm: 'warning', + reportMissingTypeStubs: 'none', + reportImportCycles: 'none', + reportUnusedImport: 'none', + reportUnusedClass: 'none', + reportUnusedFunction: 'none', + reportUnusedVariable: 'none', + reportDuplicateImport: 'none', + reportWildcardImportFromLibrary: 'none', + reportAbstractUsage: 'none', + reportArgumentType: 'none', + reportAssertTypeFailure: 'none', + reportAssignmentType: 'none', + reportAttributeAccessIssue: 'none', + reportCallIssue: 'none', + reportInconsistentOverload: 'none', + reportIndexIssue: 'none', + reportInvalidTypeArguments: 'none', + reportNoOverloadImplementation: 'none', + reportOperatorIssue: 'none', + reportOptionalSubscript: 'none', + reportOptionalMemberAccess: 'none', + reportOptionalCall: 'none', + reportOptionalIterable: 'none', + reportOptionalContextManager: 'none', + reportOptionalOperand: 'none', + reportRedeclaration: 'none', + reportReturnType: 'none', + reportTypedDictNotRequiredAccess: 'none', + reportUntypedFunctionDecorator: 'none', + reportUntypedClassDecorator: 'none', + reportUntypedBaseClass: 'none', + reportUntypedNamedTuple: 'none', + reportPrivateUsage: 'none', + reportTypeCommentUsage: 'none', + reportPrivateImportUsage: 'none', + reportConstantRedefinition: 'none', + reportDeprecated: 'none', + reportIncompatibleMethodOverride: 'none', + reportIncompatibleVariableOverride: 'none', + reportInconsistentConstructor: 'none', + reportOverlappingOverload: 'none', + reportPossiblyUnboundVariable: 'none', + reportMissingSuperCall: 'none', + reportUninitializedInstanceVariable: 'none', + reportInvalidStringEscapeSequence: 'none', + reportUnknownParameterType: 'none', + reportUnknownArgumentType: 'none', + reportUnknownLambdaType: 'none', + reportUnknownVariableType: 'none', + reportUnknownMemberType: 'none', + reportMissingParameterType: 'none', + reportMissingTypeArgument: 'none', + reportInvalidTypeVarUse: 'none', + reportCallInDefaultInitializer: 'none', + reportUnnecessaryIsInstance: 'none', + reportUnnecessaryCast: 'none', + reportUnnecessaryComparison: 'none', + reportUnnecessaryContains: 'none', + reportAssertAlwaysTrue: 'none', + reportSelfClsParameterName: 'none', + reportImplicitStringConcatenation: 'none', + reportUnboundVariable: 'none', + reportUnhashable: 'none', + reportUndefinedVariable: 'warning', + reportInvalidStubStatement: 'none', + reportIncompleteStub: 'none', + reportUnsupportedDunderAll: 'none', + reportUnusedCallResult: 'none', + reportUnusedCoroutine: 'none', + reportUnusedExcept: 'none', + reportUnusedExpression: 'none', + reportUnnecessaryTypeIgnoreComment: 'none', + reportMatchNotExhaustive: 'none', + reportUnreachable: 'none', + reportImplicitOverride: 'none', + }; + + return diagSettings; +} + +export function getBasicDiagnosticRuleSet(): DiagnosticRuleSet { + const diagSettings: DiagnosticRuleSet = { + printUnknownAsAny: false, + omitTypeArgsIfUnknown: false, + omitUnannotatedParamType: true, + omitConditionalConstraint: false, + pep604Printing: true, + strictListInference: false, + strictSetInference: false, + strictDictionaryInference: false, + analyzeUnannotatedFunctions: true, + strictParameterNoneValue: true, + enableExperimentalFeatures: false, + enableTypeIgnoreComments: true, + enableReachabilityAnalysis: true, + deprecateTypingAliases: false, + disableBytesTypePromotions: true, + reportGeneralTypeIssues: 'error', + reportPropertyTypeMismatch: 'none', + reportFunctionMemberAccess: 'none', + reportMissingImports: 'error', + reportMissingModuleSource: 'warning', + reportInvalidTypeForm: 'error', + reportMissingTypeStubs: 'none', + reportImportCycles: 'none', + reportUnusedImport: 'none', + reportUnusedClass: 'none', + reportUnusedFunction: 'none', + reportUnusedVariable: 'none', + reportDuplicateImport: 'none', + reportWildcardImportFromLibrary: 'warning', + reportAbstractUsage: 'error', + reportArgumentType: 'error', + reportAssertTypeFailure: 'error', + reportAssignmentType: 'error', + reportAttributeAccessIssue: 'error', + reportCallIssue: 'error', + reportInconsistentOverload: 'error', + reportIndexIssue: 'error', + reportInvalidTypeArguments: 'error', + reportNoOverloadImplementation: 'error', + reportOperatorIssue: 'error', + reportOptionalSubscript: 'error', + reportOptionalMemberAccess: 'error', + reportOptionalCall: 'error', + reportOptionalIterable: 'error', + reportOptionalContextManager: 'error', + reportOptionalOperand: 'error', + reportRedeclaration: 'error', + reportReturnType: 'error', + reportTypedDictNotRequiredAccess: 'error', + reportUntypedFunctionDecorator: 'none', + reportUntypedClassDecorator: 'none', + reportUntypedBaseClass: 'none', + reportUntypedNamedTuple: 'none', + reportPrivateUsage: 'none', + reportTypeCommentUsage: 'none', + reportPrivateImportUsage: 'error', + reportConstantRedefinition: 'none', + reportDeprecated: 'none', + reportIncompatibleMethodOverride: 'none', + reportIncompatibleVariableOverride: 'none', + reportInconsistentConstructor: 'none', + reportOverlappingOverload: 'none', + reportPossiblyUnboundVariable: 'none', + reportMissingSuperCall: 'none', + reportUninitializedInstanceVariable: 'none', + reportInvalidStringEscapeSequence: 'warning', + reportUnknownParameterType: 'none', + reportUnknownArgumentType: 'none', + reportUnknownLambdaType: 'none', + reportUnknownVariableType: 'none', + reportUnknownMemberType: 'none', + reportMissingParameterType: 'none', + reportMissingTypeArgument: 'none', + reportInvalidTypeVarUse: 'warning', + reportCallInDefaultInitializer: 'none', + reportUnnecessaryIsInstance: 'none', + reportUnnecessaryCast: 'none', + reportUnnecessaryComparison: 'none', + reportUnnecessaryContains: 'none', + reportAssertAlwaysTrue: 'warning', + reportSelfClsParameterName: 'warning', + reportImplicitStringConcatenation: 'none', + reportUnboundVariable: 'error', + reportUnhashable: 'error', + reportUndefinedVariable: 'error', + reportInvalidStubStatement: 'none', + reportIncompleteStub: 'none', + reportUnsupportedDunderAll: 'warning', + reportUnusedCallResult: 'none', + reportUnusedCoroutine: 'error', + reportUnusedExcept: 'error', + reportUnusedExpression: 'warning', + reportUnnecessaryTypeIgnoreComment: 'none', + reportMatchNotExhaustive: 'none', + reportUnreachable: 'none', + reportImplicitOverride: 'none', + }; + + return diagSettings; +} + +export function getStandardDiagnosticRuleSet(): DiagnosticRuleSet { + const diagSettings: DiagnosticRuleSet = { + printUnknownAsAny: false, + omitTypeArgsIfUnknown: false, + omitUnannotatedParamType: true, + omitConditionalConstraint: false, + pep604Printing: true, + strictListInference: false, + strictSetInference: false, + strictDictionaryInference: false, + analyzeUnannotatedFunctions: true, + strictParameterNoneValue: true, + enableExperimentalFeatures: false, + enableTypeIgnoreComments: true, + enableReachabilityAnalysis: true, + deprecateTypingAliases: false, + disableBytesTypePromotions: true, + reportGeneralTypeIssues: 'error', + reportPropertyTypeMismatch: 'none', + reportFunctionMemberAccess: 'error', + reportMissingImports: 'error', + reportMissingModuleSource: 'warning', + reportInvalidTypeForm: 'error', + reportMissingTypeStubs: 'none', + reportImportCycles: 'none', + reportUnusedImport: 'none', + reportUnusedClass: 'none', + reportUnusedFunction: 'none', + reportUnusedVariable: 'none', + reportDuplicateImport: 'none', + reportWildcardImportFromLibrary: 'warning', + reportAbstractUsage: 'error', + reportArgumentType: 'error', + reportAssertTypeFailure: 'error', + reportAssignmentType: 'error', + reportAttributeAccessIssue: 'error', + reportCallIssue: 'error', + reportInconsistentOverload: 'error', + reportIndexIssue: 'error', + reportInvalidTypeArguments: 'error', + reportNoOverloadImplementation: 'error', + reportOperatorIssue: 'error', + reportOptionalSubscript: 'error', + reportOptionalMemberAccess: 'error', + reportOptionalCall: 'error', + reportOptionalIterable: 'error', + reportOptionalContextManager: 'error', + reportOptionalOperand: 'error', + reportRedeclaration: 'error', + reportReturnType: 'error', + reportTypedDictNotRequiredAccess: 'error', + reportUntypedFunctionDecorator: 'none', + reportUntypedClassDecorator: 'none', + reportUntypedBaseClass: 'none', + reportUntypedNamedTuple: 'none', + reportPrivateUsage: 'none', + reportTypeCommentUsage: 'none', + reportPrivateImportUsage: 'error', + reportConstantRedefinition: 'none', + reportDeprecated: 'none', + reportIncompatibleMethodOverride: 'error', + reportIncompatibleVariableOverride: 'error', + reportInconsistentConstructor: 'none', + reportOverlappingOverload: 'error', + reportPossiblyUnboundVariable: 'error', + reportMissingSuperCall: 'none', + reportUninitializedInstanceVariable: 'none', + reportInvalidStringEscapeSequence: 'warning', + reportUnknownParameterType: 'none', + reportUnknownArgumentType: 'none', + reportUnknownLambdaType: 'none', + reportUnknownVariableType: 'none', + reportUnknownMemberType: 'none', + reportMissingParameterType: 'none', + reportMissingTypeArgument: 'none', + reportInvalidTypeVarUse: 'warning', + reportCallInDefaultInitializer: 'none', + reportUnnecessaryIsInstance: 'none', + reportUnnecessaryCast: 'none', + reportUnnecessaryComparison: 'none', + reportUnnecessaryContains: 'none', + reportAssertAlwaysTrue: 'warning', + reportSelfClsParameterName: 'warning', + reportImplicitStringConcatenation: 'none', + reportUnboundVariable: 'error', + reportUnhashable: 'error', + reportUndefinedVariable: 'error', + reportInvalidStubStatement: 'none', + reportIncompleteStub: 'none', + reportUnsupportedDunderAll: 'warning', + reportUnusedCallResult: 'none', + reportUnusedCoroutine: 'error', + reportUnusedExcept: 'error', + reportUnusedExpression: 'warning', + reportUnnecessaryTypeIgnoreComment: 'none', + reportMatchNotExhaustive: 'none', + reportUnreachable: 'none', + reportImplicitOverride: 'none', + }; + + return diagSettings; +} + +export function getStrictDiagnosticRuleSet(): DiagnosticRuleSet { + const diagSettings: DiagnosticRuleSet = { + printUnknownAsAny: false, + omitTypeArgsIfUnknown: false, + omitUnannotatedParamType: false, + omitConditionalConstraint: false, + pep604Printing: true, + strictListInference: true, + strictSetInference: true, + strictDictionaryInference: true, + analyzeUnannotatedFunctions: true, + strictParameterNoneValue: true, + enableExperimentalFeatures: false, + enableTypeIgnoreComments: true, // Not overridden by strict mode + enableReachabilityAnalysis: true, // Not overridden by strict mode + deprecateTypingAliases: false, + disableBytesTypePromotions: true, + reportGeneralTypeIssues: 'error', + reportPropertyTypeMismatch: 'none', + reportFunctionMemberAccess: 'error', + reportMissingImports: 'error', + reportMissingModuleSource: 'warning', // Not overridden by strict mode + reportInvalidTypeForm: 'error', + reportMissingTypeStubs: 'error', + reportImportCycles: 'none', + reportUnusedImport: 'error', + reportUnusedClass: 'error', + reportUnusedFunction: 'error', + reportUnusedVariable: 'error', + reportDuplicateImport: 'error', + reportWildcardImportFromLibrary: 'error', + reportAbstractUsage: 'error', + reportArgumentType: 'error', + reportAssertTypeFailure: 'error', + reportAssignmentType: 'error', + reportAttributeAccessIssue: 'error', + reportCallIssue: 'error', + reportInconsistentOverload: 'error', + reportIndexIssue: 'error', + reportInvalidTypeArguments: 'error', + reportNoOverloadImplementation: 'error', + reportOperatorIssue: 'error', + reportOptionalSubscript: 'error', + reportOptionalMemberAccess: 'error', + reportOptionalCall: 'error', + reportOptionalIterable: 'error', + reportOptionalContextManager: 'error', + reportOptionalOperand: 'error', + reportRedeclaration: 'error', + reportReturnType: 'error', + reportTypedDictNotRequiredAccess: 'error', + reportUntypedFunctionDecorator: 'error', + reportUntypedClassDecorator: 'error', + reportUntypedBaseClass: 'error', + reportUntypedNamedTuple: 'error', + reportPrivateUsage: 'error', + reportTypeCommentUsage: 'error', + reportPrivateImportUsage: 'error', + reportConstantRedefinition: 'error', + reportDeprecated: 'error', + reportIncompatibleMethodOverride: 'error', + reportIncompatibleVariableOverride: 'error', + reportInconsistentConstructor: 'error', + reportOverlappingOverload: 'error', + reportPossiblyUnboundVariable: 'error', + reportMissingSuperCall: 'none', + reportUninitializedInstanceVariable: 'none', + reportInvalidStringEscapeSequence: 'error', + reportUnknownParameterType: 'error', + reportUnknownArgumentType: 'error', + reportUnknownLambdaType: 'error', + reportUnknownVariableType: 'error', + reportUnknownMemberType: 'error', + reportMissingParameterType: 'error', + reportMissingTypeArgument: 'error', + reportInvalidTypeVarUse: 'error', + reportCallInDefaultInitializer: 'none', + reportUnnecessaryIsInstance: 'error', + reportUnnecessaryCast: 'error', + reportUnnecessaryComparison: 'error', + reportUnnecessaryContains: 'error', + reportAssertAlwaysTrue: 'error', + reportSelfClsParameterName: 'error', + reportImplicitStringConcatenation: 'none', + reportUnboundVariable: 'error', + reportUnhashable: 'error', + reportUndefinedVariable: 'error', + reportInvalidStubStatement: 'error', + reportIncompleteStub: 'error', + reportUnsupportedDunderAll: 'error', + reportUnusedCallResult: 'none', + reportUnusedCoroutine: 'error', + reportUnusedExcept: 'error', + reportUnusedExpression: 'error', + reportUnnecessaryTypeIgnoreComment: 'none', + reportMatchNotExhaustive: 'error', + reportUnreachable: 'none', + reportImplicitOverride: 'none', + }; + + return diagSettings; +} + +export function matchFileSpecs(configOptions: ConfigOptions, uri: Uri, isFile = true) { + for (const includeSpec of configOptions.include) { + if (FileSpec.matchIncludeFileSpec(includeSpec.regExp, configOptions.exclude, uri, isFile)) { + return true; + } + } + + return false; +} + +// Internal configuration options. These are derived from a combination +// of the command line and from a JSON-based config file. +export class ConfigOptions { + // Absolute directory of project. All relative paths in the config + // are based on this path. + projectRoot: Uri; + + // Path to python interpreter. + pythonPath?: Uri | undefined; + + // Name of the python environment. + pythonEnvironmentName?: string | undefined; + + // Path to use for typeshed definitions. + typeshedPath?: Uri | undefined; + + // Path to custom typings (stub) modules. + stubPath?: Uri | undefined; + + // A list of file specs to include in the analysis. Can contain + // directories, in which case all "*.py" files within those directories + // are included. + include: FileSpec[] = []; + + // A list of file specs to exclude from the analysis (overriding include + // if necessary). Can contain directories, in which case all "*.py" files + // within those directories are included. + exclude: FileSpec[] = []; + + // Automatically detect virtual environment folders and exclude them. + // This property is for internal use and not exposed externally + // as a config setting. + // It is used to store whether the user has specified directories in + // the exclude setting, which is later modified to include a default set. + // This setting is true when user has not specified any exclude. + autoExcludeVenv?: boolean | undefined; + + // A list of file specs whose errors and warnings should be ignored even + // if they are included in the transitive closure of included files. + ignore: FileSpec[] = []; + + // A list of file specs that should be analyzed using "strict" mode. + strict: FileSpec[] = []; + + // A set of defined constants that are used by the binder to determine + // whether runtime conditions should evaluate to True or False. + defineConstant = new Map(); + + // Emit verbose information to console? + verboseOutput?: boolean | undefined; + + // Perform type checking and report diagnostics only for open files? + checkOnlyOpenFiles?: boolean | undefined; + + // In the absence of type stubs, use library implementations to extract + // type information? + useLibraryCodeForTypes?: boolean | undefined; + + // Offer auto-import completions. + autoImportCompletions = true; + + // Use indexing. + indexing = false; + + // Use type evaluator call tracking + logTypeEvaluationTime = false; + + // Minimum threshold for type eval logging + typeEvaluationTimeThreshold = 50; + + // Was this config initialized from JSON (pyrightconfig/pyproject)? + initializedFromJson = false; + + // Filter out any hint diagnostics with tags? + disableTaggedHints = false; + + //--------------------------------------------------------------- + // Diagnostics Rule Set + + diagnosticRuleSet: DiagnosticRuleSet; + + //--------------------------------------------------------------- + // TaskList tokens used by diagnostics + + taskListTokens?: TaskListToken[] | undefined; + + //--------------------------------------------------------------- + // Parsing and Import Resolution Settings + + // Parameters that specify the execution environment for + // the files being analyzed. + executionEnvironments: ExecutionEnvironment[] = []; + + // Path to a directory containing one or more virtual environment + // directories. This is used in conjunction with the "venv" name in + // the config file to identify the python environment used for resolving + // third-party modules. + venvPath?: Uri | undefined; + + // Default venv environment. + venv?: string | undefined; + + // Default pythonVersion. Can be overridden by executionEnvironment. + defaultPythonVersion?: PythonVersion | undefined; + + // Default pythonPlatform. Can be overridden by executionEnvironment. + defaultPythonPlatform?: string | undefined; + + // Default extraPaths. Can be overridden by executionEnvironment. + defaultExtraPaths?: Uri[] | undefined; + + // Should native library import resolutions be skipped? + skipNativeLibraries?: boolean; + + //--------------------------------------------------------------- + // Internal-only switches + + // Run additional analysis as part of test cases? + internalTestMode?: boolean | undefined; + + // Run program in index generation mode. + indexGenerationMode?: boolean | undefined; + + // When a symbol cannot be resolved from an import, should it be + // treated as Any rather than Unknown? + evaluateUnknownImportsAsAny?: boolean; + + // Controls how hover and completion function signatures are displayed. + functionSignatureDisplay: SignatureDisplayType; + + // Determines if has a config file (pyrightconfig.json or pyproject.toml) or not. + configFileSource?: Uri | undefined; + + // Determines the effective default type checking mode. + effectiveTypeCheckingMode: 'strict' | 'basic' | 'off' | 'standard' = 'standard'; + + constructor(projectRoot: Uri) { + this.projectRoot = projectRoot; + this.diagnosticRuleSet = ConfigOptions.getDiagnosticRuleSet(); + this.functionSignatureDisplay = SignatureDisplayType.formatted; + } + + static getDiagnosticRuleSet(typeCheckingMode?: string): DiagnosticRuleSet { + if (typeCheckingMode === 'strict') { + return getStrictDiagnosticRuleSet(); + } + + if (typeCheckingMode === 'basic') { + return getBasicDiagnosticRuleSet(); + } + + if (typeCheckingMode === 'off') { + return getOffDiagnosticRuleSet(); + } + + return getStandardDiagnosticRuleSet(); + } + + getDefaultExecEnvironment(): ExecutionEnvironment { + return new ExecutionEnvironment( + this._getEnvironmentName(), + this.projectRoot, + this.diagnosticRuleSet, + this.defaultPythonVersion, + this.defaultPythonPlatform, + this.defaultExtraPaths, + this.skipNativeLibraries + ); + } + + // Finds the best execution environment for a given file uri. The + // specified file path should be absolute. + // If no matching execution environment can be found, a default + // execution environment is used. + findExecEnvironment(file: Uri): ExecutionEnvironment { + return ( + this.executionEnvironments.find((env) => { + const envRoot = Uri.is(env.root) ? env.root : this.projectRoot.resolvePaths(env.root || ''); + return file.startsWith(envRoot); + }) ?? this.getDefaultExecEnvironment() + ); + } + + getExecutionEnvironments(): ExecutionEnvironment[] { + if (this.executionEnvironments.length > 0) { + return this.executionEnvironments; + } + + return [this.getDefaultExecEnvironment()]; + } + + initializeTypeCheckingMode( + typeCheckingMode: string | undefined, + severityOverrides?: DiagnosticSeverityOverridesMap + ) { + this.diagnosticRuleSet = ConfigOptions.getDiagnosticRuleSet(typeCheckingMode); + this.effectiveTypeCheckingMode = typeCheckingMode as 'strict' | 'basic' | 'off' | 'standard'; + + if (severityOverrides) { + this.applyDiagnosticOverrides(severityOverrides); + } + } + + // Initialize the structure from a JSON object. + initializeFromJson(configObj: any, configDirUri: Uri, serviceProvider: ServiceProvider, host: Host) { + this.initializedFromJson = true; + const console = serviceProvider.tryGet(ServiceKeys.console) ?? new NullConsole(); + const configObjKeys = configObj && typeof configObj === 'object' ? Object.getOwnPropertyNames(configObj) : []; + const unusedConfigKeys = new Set(configObjKeys); + + // Read the "include" entry. + if (configObj.include !== undefined) { + unusedConfigKeys.delete('include'); + if (!Array.isArray(configObj.include)) { + console.error(`Config "include" entry must contain an array.`); + } else { + this.include = []; + const filesList = configObj.include as string[]; + filesList.forEach((fileSpec, index) => { + if (typeof fileSpec !== 'string') { + console.error(`Index ${index} of "include" array should be a string.`); + } else if (isAbsolute(fileSpec)) { + console.error(`Ignoring path "${fileSpec}" in "include" array because it is not relative.`); + } else { + this.include.push(getFileSpec(configDirUri, fileSpec)); + } + }); + } + } + + // Read the "exclude" entry. + if (configObj.exclude !== undefined) { + unusedConfigKeys.delete('exclude'); + if (!Array.isArray(configObj.exclude)) { + console.error(`Config "exclude" entry must contain an array.`); + } else { + this.exclude = []; + const filesList = configObj.exclude as string[]; + filesList.forEach((fileSpec, index) => { + if (typeof fileSpec !== 'string') { + console.error(`Index ${index} of "exclude" array should be a string.`); + } else if (isAbsolute(fileSpec)) { + console.error(`Ignoring path "${fileSpec}" in "exclude" array because it is not relative.`); + } else { + this.exclude.push(getFileSpec(configDirUri, fileSpec)); + } + }); + } + } + + // Read the "ignore" entry. + if (configObj.ignore !== undefined) { + unusedConfigKeys.delete('ignore'); + if (!Array.isArray(configObj.ignore)) { + console.error(`Config "ignore" entry must contain an array.`); + } else { + this.ignore = []; + const filesList = configObj.ignore as string[]; + filesList.forEach((fileSpec, index) => { + if (typeof fileSpec !== 'string') { + console.error(`Index ${index} of "ignore" array should be a string.`); + } else { + // We'll allow absolute paths in the ignore list. While it + // is not recommended to use absolute paths anywhere in + // the config file, there are a few legit use cases for ignore + // paths when the conf file is used with a language server. + this.ignore.push(getFileSpec(configDirUri, fileSpec)); + } + }); + } + } + + // Read the "strict" entry. + if (configObj.strict !== undefined) { + unusedConfigKeys.delete('strict'); + if (!Array.isArray(configObj.strict)) { + console.error(`Config "strict" entry must contain an array.`); + } else { + this.strict = []; + const filesList = configObj.strict as string[]; + filesList.forEach((fileSpec, index) => { + if (typeof fileSpec !== 'string') { + console.error(`Index ${index} of "strict" array should be a string.`); + } else if (isAbsolute(fileSpec)) { + console.error(`Ignoring path "${fileSpec}" in "strict" array because it is not relative.`); + } else { + this.strict.push(getFileSpec(configDirUri, fileSpec)); + } + }); + } + } + + // If there is a "typeCheckingMode", it can override the provided setting. + if (configObj.typeCheckingMode !== undefined) { + unusedConfigKeys.delete('typeCheckingMode'); + if ( + configObj.typeCheckingMode === 'off' || + configObj.typeCheckingMode === 'basic' || + configObj.typeCheckingMode === 'standard' || + configObj.typeCheckingMode === 'strict' + ) { + this.initializeTypeCheckingMode(configObj.typeCheckingMode); + } else { + console.error(`Config "typeCheckingMode" entry must contain "off", "basic", "standard", or "strict".`); + } + } + + if (configObj.useLibraryCodeForTypes !== undefined) { + unusedConfigKeys.delete('useLibraryCodeForTypes'); + if (typeof configObj.useLibraryCodeForTypes === 'boolean') { + this.useLibraryCodeForTypes = configObj.useLibraryCodeForTypes; + } else { + console.error(`Config "useLibraryCodeForTypes" entry must be true or false.`); + } + } + + // Apply overrides from the config file for the boolean rules. + const configRuleSet = { ...this.diagnosticRuleSet }; + getBooleanDiagnosticRules(/* includeNonOverridable */ true).forEach((ruleName) => { + unusedConfigKeys.delete(ruleName); + (configRuleSet as any)[ruleName] = this._convertBoolean( + configObj[ruleName], + ruleName, + configRuleSet[ruleName] as boolean + ); + }); + + // Apply overrides from the config file for the diagnostic level rules. + getDiagLevelDiagnosticRules().forEach((ruleName) => { + unusedConfigKeys.delete(ruleName); + (configRuleSet as any)[ruleName] = this._convertDiagnosticLevel( + configObj[ruleName], + ruleName, + configRuleSet[ruleName] as DiagnosticLevel + ); + }); + this.diagnosticRuleSet = { ...configRuleSet }; + + // Read the "venvPath". + if (configObj.venvPath !== undefined) { + unusedConfigKeys.delete('venvPath'); + if (typeof configObj.venvPath !== 'string') { + console.error(`Config "venvPath" field must contain a string.`); + } else { + this.venvPath = configDirUri.resolvePaths(configObj.venvPath); + } + } + + // Read the "venv" name. + if (configObj.venv !== undefined) { + unusedConfigKeys.delete('venv'); + if (typeof configObj.venv !== 'string') { + console.error(`Config "venv" field must contain a string.`); + } else { + this.venv = configObj.venv; + } + } + + // Read the config "extraPaths". + const configExtraPaths: Uri[] = []; + if (configObj.extraPaths !== undefined) { + unusedConfigKeys.delete('extraPaths'); + if (!Array.isArray(configObj.extraPaths)) { + console.error(`Config "extraPaths" field must contain an array.`); + } else { + const pathList = configObj.extraPaths as string[]; + pathList.forEach((path, pathIndex) => { + if (typeof path !== 'string') { + console.error(`Config "extraPaths" field ${pathIndex} must be a string.`); + } else { + configExtraPaths!.push(configDirUri.resolvePaths(path)); + } + }); + this.defaultExtraPaths = [...configExtraPaths]; + } + } + + // Read the default "pythonVersion". + if (configObj.pythonVersion !== undefined) { + unusedConfigKeys.delete('pythonVersion'); + if (typeof configObj.pythonVersion === 'string') { + const version = PythonVersion.fromString(configObj.pythonVersion); + if (version) { + this.defaultPythonVersion = version; + } else { + console.error(`Config "pythonVersion" field contains unsupported version.`); + } + } else { + console.error(`Config "pythonVersion" field must contain a string.`); + } + } + + // Read the default "pythonPlatform". + if (configObj.pythonPlatform !== undefined) { + unusedConfigKeys.delete('pythonPlatform'); + if (typeof configObj.pythonPlatform !== 'string') { + console.error(`Config "pythonPlatform" field must contain a string.`); + } else { + this.defaultPythonPlatform = configObj.pythonPlatform; + } + } + + // Read the skipNativeLibraries flag. This isn't officially documented + // or supported. It was added specifically to improve initialization + // performance for playgrounds or web-based environments where native + // libraries will not be present. + if (configObj.skipNativeLibraries !== undefined) { + unusedConfigKeys.delete('skipNativeLibraries'); + if (typeof configObj.skipNativeLibraries === 'boolean') { + this.skipNativeLibraries = configObj.skipNativeLibraries; + } else { + console.error(`Config "skipNativeLibraries" field must contain a boolean.`); + } + } + + // Read the "typeshedPath" setting. + if (configObj.typeshedPath !== undefined) { + unusedConfigKeys.delete('typeshedPath'); + if (typeof configObj.typeshedPath !== 'string') { + console.error(`Config "typeshedPath" field must contain a string.`); + } else { + this.typeshedPath = configObj.typeshedPath + ? configDirUri.resolvePaths(configObj.typeshedPath) + : undefined; + } + } + + // Read the "stubPath" setting. + + // Keep this for backward compatibility + if (configObj.typingsPath !== undefined) { + unusedConfigKeys.delete('typingsPath'); + if (typeof configObj.typingsPath !== 'string') { + console.error(`Config "typingsPath" field must contain a string.`); + } else { + console.error(`Config "typingsPath" is now deprecated. Please, use stubPath instead.`); + this.stubPath = configDirUri.resolvePaths(configObj.typingsPath); + } + } + + if (configObj.stubPath !== undefined) { + unusedConfigKeys.delete('stubPath'); + if (typeof configObj.stubPath !== 'string') { + console.error(`Config "stubPath" field must contain a string.`); + } else { + this.stubPath = configDirUri.resolvePaths(configObj.stubPath); + } + } + + // Read the "verboseOutput" setting. + // Don't initialize to a default value because we want the command-line "verbose" + // switch to apply if this setting isn't specified in the config file. + if (configObj.verboseOutput !== undefined) { + unusedConfigKeys.delete('verboseOutput'); + if (typeof configObj.verboseOutput !== 'boolean') { + console.error(`Config "verboseOutput" field must be true or false.`); + } else { + this.verboseOutput = configObj.verboseOutput; + } + } + + // Read the "defineConstant" setting. + if (configObj.defineConstant !== undefined) { + unusedConfigKeys.delete('defineConstant'); + if (typeof configObj.defineConstant !== 'object' || Array.isArray(configObj.defineConstant)) { + console.error(`Config "defineConstant" field must contain a map indexed by constant names.`); + } else { + const keys = Object.getOwnPropertyNames(configObj.defineConstant); + keys.forEach((key) => { + const value = configObj.defineConstant[key]; + const valueType = typeof value; + if (valueType !== 'boolean' && valueType !== 'string') { + console.error(`Defined constant "${key}" must be associated with a boolean or string value.`); + } else { + this.defineConstant.set(key, value); + } + }); + } + } + + // Read the "useLibraryCodeForTypes" setting. + if (configObj.useLibraryCodeForTypes !== undefined) { + unusedConfigKeys.delete('useLibraryCodeForTypes'); + if (typeof configObj.useLibraryCodeForTypes !== 'boolean') { + console.error(`Config "useLibraryCodeForTypes" field must be true or false.`); + } else { + this.useLibraryCodeForTypes = configObj.useLibraryCodeForTypes; + } + } + + // Read the "autoImportCompletions" setting. + if (configObj.autoImportCompletions !== undefined) { + unusedConfigKeys.delete('autoImportCompletions'); + if (typeof configObj.autoImportCompletions !== 'boolean') { + console.error(`Config "autoImportCompletions" field must be true or false.`); + } else { + this.autoImportCompletions = configObj.autoImportCompletions; + } + } + + // Read the "indexing" setting. + if (configObj.indexing !== undefined) { + unusedConfigKeys.delete('indexing'); + if (typeof configObj.indexing !== 'boolean') { + console.error(`Config "indexing" field must be true or false.`); + } else { + this.indexing = configObj.indexing; + } + } + + // Read the "logTypeEvaluationTime" setting. + if (configObj.logTypeEvaluationTime !== undefined) { + unusedConfigKeys.delete('logTypeEvaluationTime'); + if (typeof configObj.logTypeEvaluationTime !== 'boolean') { + console.error(`Config "logTypeEvaluationTime" field must be true or false.`); + } else { + this.logTypeEvaluationTime = configObj.logTypeEvaluationTime; + } + } + + // Read the "typeEvaluationTimeThreshold" setting. + if (configObj.typeEvaluationTimeThreshold !== undefined) { + unusedConfigKeys.delete('typeEvaluationTimeThreshold'); + if (typeof configObj.typeEvaluationTimeThreshold !== 'number') { + console.error(`Config "typeEvaluationTimeThreshold" field must be a number.`); + } else { + this.typeEvaluationTimeThreshold = configObj.typeEvaluationTimeThreshold; + } + } + + // Read the "functionSignatureDisplay" setting. + if (configObj.functionSignatureDisplay !== undefined) { + unusedConfigKeys.delete('functionSignatureDisplay'); + if (typeof configObj.functionSignatureDisplay !== 'string') { + console.error(`Config "functionSignatureDisplay" field must be true or false.`); + } else { + if ( + configObj.functionSignatureDisplay === 'compact' || + configObj.functionSignatureDisplay === 'formatted' + ) { + this.functionSignatureDisplay = configObj.functionSignatureDisplay as SignatureDisplayType; + } + } + } + + unusedConfigKeys.delete('executionEnvironments'); + unusedConfigKeys.delete('extends'); + + Array.from(unusedConfigKeys).forEach((unknownKey) => { + console.error(`Config contains unrecognized setting "${unknownKey}".`); + }); + } + + static resolveExtends(configObj: any, configDirUri: Uri): Uri | undefined { + if (configObj.extends !== undefined) { + if (typeof configObj.extends !== 'string') { + console.error(`Config "extends" field must contain a string.`); + } else { + return configDirUri.resolvePaths(configObj.extends); + } + } + + return undefined; + } + + ensureDefaultPythonPlatform(host: Host, console: ConsoleInterface) { + // If no default python platform was specified, assume that the + // user wants to use the current platform. + if (this.defaultPythonPlatform !== undefined) { + return; + } + + this.defaultPythonPlatform = host.getPythonPlatform(); + if (this.defaultPythonPlatform !== undefined) { + console.log(`Assuming Python platform ${this.defaultPythonPlatform}`); + } + } + + ensureDefaultPythonVersion(host: Host, console: ConsoleInterface) { + // If no default python version was specified, retrieve the version + // from the currently-selected python interpreter. + if (this.defaultPythonVersion !== undefined) { + return; + } + + const importLogger = new ImportLogger(); + this.defaultPythonVersion = host.getPythonVersion(this.pythonPath, importLogger); + if (this.defaultPythonVersion !== undefined) { + console.info(`Assuming Python version ${PythonVersion.toString(this.defaultPythonVersion)}`); + } + + for (const log of importLogger.getLogs()) { + console.info(log); + } + } + + ensureDefaultExtraPaths(fs: FileSystem, autoSearchPaths: boolean, extraPaths: string[] | undefined) { + const paths: Uri[] = []; + + if (autoSearchPaths) { + // Auto-detect the common scenario where the sources are under the src folder + const srcPath = this.projectRoot.resolvePaths(pathConsts.src); + if (fs.existsSync(srcPath) && !fs.existsSync(srcPath.resolvePaths('__init__.py'))) { + paths.push(fs.realCasePath(srcPath)); + } + } + + if (extraPaths && extraPaths.length > 0) { + for (const p of extraPaths) { + const path = this.projectRoot.resolvePaths(p); + paths.push(fs.realCasePath(path)); + if (isDirectory(fs, path)) { + appendArray(paths, getPathsFromPthFiles(fs, path)); + } + } + } + + if (paths.length > 0) { + this.defaultExtraPaths = paths; + } + } + + applyDiagnosticOverrides( + diagnosticOverrides: DiagnosticSeverityOverridesMap | DiagnosticBooleanOverridesMap | undefined + ) { + if (!diagnosticOverrides) { + return; + } + + for (const ruleName of getDiagLevelDiagnosticRules()) { + const severity = diagnosticOverrides[ruleName]; + if (severity !== undefined && !isBoolean(severity) && getDiagnosticSeverityOverrides().includes(severity)) { + (this.diagnosticRuleSet as any)[ruleName] = severity; + } + } + + for (const ruleName of getBooleanDiagnosticRules(/* includeNonOverridable */ true)) { + const value = diagnosticOverrides[ruleName]; + if (value !== undefined && isBoolean(value)) { + (this.diagnosticRuleSet as any)[ruleName] = value; + } + } + } + + setupExecutionEnvironments(configObj: any, configDirUri: Uri, console: ConsoleInterface) { + // Read the "executionEnvironments" array. This should be done at the end + // after we've established default values. + if (configObj.executionEnvironments !== undefined) { + if (!Array.isArray(configObj.executionEnvironments)) { + console.error(`Config "executionEnvironments" field must contain an array.`); + } else { + this.executionEnvironments = []; + + const execEnvironments = configObj.executionEnvironments as ExecutionEnvironment[]; + + execEnvironments.forEach((env, index) => { + const execEnv = this._initExecutionEnvironmentFromJson( + env, + configDirUri, + index, + console, + this.diagnosticRuleSet, + this.defaultPythonVersion, + this.defaultPythonPlatform, + this.defaultExtraPaths || [] + ); + + if (execEnv) { + this.executionEnvironments.push(execEnv); + } + }); + } + } + } + + private _getEnvironmentName(): string { + return this.pythonEnvironmentName || this.pythonPath?.toString() || 'python'; + } + + private _convertBoolean(value: any, fieldName: string, defaultValue: boolean): boolean { + if (value === undefined) { + return defaultValue; + } else if (typeof value === 'boolean') { + return value ? true : false; + } + + console.log(`Config "${fieldName}" entry must be true or false.`); + return defaultValue; + } + + private _convertDiagnosticLevel(value: any, fieldName: string, defaultValue: DiagnosticLevel): DiagnosticLevel { + if (value === undefined) { + return defaultValue; + } else if (typeof value === 'boolean') { + return value ? 'error' : 'none'; + } else if (typeof value === 'string') { + if (value === 'error' || value === 'warning' || value === 'information' || value === 'none') { + return value; + } + } + + console.log(`Config "${fieldName}" entry must be true, false, "error", "warning", "information" or "none".`); + return defaultValue; + } + + private _initExecutionEnvironmentFromJson( + envObj: any, + configDirUri: Uri, + index: number, + console: ConsoleInterface, + configDiagnosticRuleSet: DiagnosticRuleSet, + configPythonVersion: PythonVersion | undefined, + configPythonPlatform: string | undefined, + configExtraPaths: Uri[] + ): ExecutionEnvironment | undefined { + try { + const envObjKeys = envObj && typeof envObj === 'object' ? Object.getOwnPropertyNames(envObj) : []; + const unusedEnvKeys = new Set(envObjKeys); + + const newExecEnv = new ExecutionEnvironment( + this._getEnvironmentName(), + configDirUri, + configDiagnosticRuleSet, + configPythonVersion, + configPythonPlatform, + configExtraPaths + ); + + // Validate the root. + unusedEnvKeys.delete('root'); + if (envObj.root && typeof envObj.root === 'string') { + newExecEnv.root = configDirUri.resolvePaths(envObj.root); + } else { + console.error(`Config executionEnvironments index ${index}: missing root value.`); + } + + // Validate the extraPaths. + unusedEnvKeys.delete('extraPaths'); + if (envObj.extraPaths) { + if (!Array.isArray(envObj.extraPaths)) { + console.error( + `Config executionEnvironments index ${index}: extraPaths field must contain an array.` + ); + } else { + // If specified, this overrides the default extra paths inherited + // from the top-level config. + newExecEnv.extraPaths = []; + + const pathList = envObj.extraPaths as string[]; + pathList.forEach((path, pathIndex) => { + if (typeof path !== 'string') { + console.error( + `Config executionEnvironments index ${index}:` + + ` extraPaths field ${pathIndex} must be a string.` + ); + } else { + newExecEnv.extraPaths.push(configDirUri.resolvePaths(path)); + } + }); + } + } + + // Validate the pythonVersion. + unusedEnvKeys.delete('pythonVersion'); + if (envObj.pythonVersion) { + if (typeof envObj.pythonVersion === 'string') { + const version = PythonVersion.fromString(envObj.pythonVersion); + if (version) { + newExecEnv.pythonVersion = version; + } else { + console.warn(`Config executionEnvironments index ${index} contains unsupported pythonVersion.`); + } + } else { + console.error(`Config executionEnvironments index ${index} pythonVersion must be a string.`); + } + } + + // Validate the pythonPlatform. + unusedEnvKeys.delete('pythonPlatform'); + if (envObj.pythonPlatform) { + if (typeof envObj.pythonPlatform === 'string') { + newExecEnv.pythonPlatform = envObj.pythonPlatform; + } else { + console.error(`Config executionEnvironments index ${index} pythonPlatform must be a string.`); + } + } + + // Validate the name. + unusedEnvKeys.delete('name'); + if (envObj.name) { + if (typeof envObj.name === 'string') { + newExecEnv.name = envObj.name; + } else { + console.error(`Config executionEnvironments index ${index} name must be a string.`); + } + } + + // Apply overrides from the config file for the boolean overrides. + getBooleanDiagnosticRules(/* includeNonOverridable */ true).forEach((ruleName) => { + unusedEnvKeys.delete(ruleName); + (newExecEnv.diagnosticRuleSet as any)[ruleName] = this._convertBoolean( + envObj[ruleName], + ruleName, + newExecEnv.diagnosticRuleSet[ruleName] as boolean + ); + }); + + // Apply overrides from the config file for the diagnostic level overrides. + getDiagLevelDiagnosticRules().forEach((ruleName) => { + unusedEnvKeys.delete(ruleName); + (newExecEnv.diagnosticRuleSet as any)[ruleName] = this._convertDiagnosticLevel( + envObj[ruleName], + ruleName, + newExecEnv.diagnosticRuleSet[ruleName] as DiagnosticLevel + ); + }); + + Array.from(unusedEnvKeys).forEach((unknownKey) => { + console.error(`Config executionEnvironments index ${index}: unrecognized setting "${unknownKey}".`); + }); + + return newExecEnv; + } catch { + console.error(`Config executionEnvironments index ${index} is not accessible.`); + } + + return undefined; + } +} + +export function parseDiagLevel(value: string | boolean): DiagnosticSeverityOverrides | undefined { + switch (value) { + case false: + case 'none': + return DiagnosticSeverityOverrides.None; + + case true: + case 'error': + return DiagnosticSeverityOverrides.Error; + + case 'warning': + return DiagnosticSeverityOverrides.Warning; + + case 'information': + return DiagnosticSeverityOverrides.Information; + + default: + return undefined; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/console.ts b/python-parser/packages/pyright-internal/src/common/console.ts new file mode 100644 index 00000000..56fb97f0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/console.ts @@ -0,0 +1,311 @@ +/* + * console.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides an abstraction for console logging and error-reporting + * methods. + */ + +import { Disposable } from 'vscode-jsonrpc'; +import * as debug from './debug'; +import { addIfUnique, removeArrayElements } from './collectionUtils'; +import { isString } from './core'; + +export enum LogLevel { + Error = 'error', + Warn = 'warn', + Info = 'info', + Log = 'log', +} +export interface ConsoleInterface { + error: (message: string) => void; + warn: (message: string) => void; + info: (message: string) => void; + log: (message: string) => void; +} + +export namespace ConsoleInterface { + export function is(obj: any): obj is ConsoleInterface { + return obj.error !== undefined && obj.warn !== undefined && obj.info !== undefined && obj.log !== undefined; + } + + export function hasLevel(console: any): console is ConsoleInterface & { level: LogLevel } { + return is(console) && 'level' in console; + } +} + +const levelMap = new Map([ + [LogLevel.Error, 0], + [LogLevel.Warn, 1], + [LogLevel.Info, 2], + [LogLevel.Log, 3], +]); + +export function getLevelNumber(level: LogLevel): number { + return levelMap.get(level) ?? 3; +} + +// Avoids outputting errors to the console but counts +// the number of logs and errors, which can be useful +// for unit tests. +export class NullConsole implements ConsoleInterface { + logCount = 0; + infoCount = 0; + warnCount = 0; + errorCount = 0; + + log(message: string) { + this.logCount++; + } + + info(message: string) { + this.infoCount++; + } + + warn(message: string) { + this.warnCount++; + } + + error(message: string) { + this.errorCount++; + } +} + +export class StandardConsole implements ConsoleInterface { + constructor(private _maxLevel: LogLevel = LogLevel.Log) {} + + get level(): LogLevel { + return this._maxLevel; + } + + log(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Log)) { + console.log(message); + } + } + + info(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Info)) { + console.info(message); + } + } + + warn(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Warn)) { + console.warn(message); + } + } + + error(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Error)) { + console.error(message); + } + } +} + +export class StderrConsole implements ConsoleInterface { + constructor(private _maxLevel: LogLevel = LogLevel.Log) {} + + get level(): LogLevel { + return this._maxLevel; + } + + log(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Log)) { + console.error(message); + } + } + + info(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Info)) { + console.error(message); + } + } + + warn(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Warn)) { + console.error(message); + } + } + + error(message: string) { + if (getLevelNumber(this._maxLevel) >= getLevelNumber(LogLevel.Error)) { + console.error(message); + } + } +} + +export interface Chainable { + addChain(console: ConsoleInterface): void; + removeChain(console: ConsoleInterface): void; +} + +export namespace Chainable { + export function is(value: any): value is Chainable { + return value && value.addChain && value.removeChain; + } +} + +export interface Clonable { + clone(name: string): ConsoleInterface; +} + +export namespace Clonable { + export function is(value: any): value is Clonable { + return value && value.clone; + } +} + +export interface SupportName { + readonly name: string; +} + +export namespace SupportName { + export function is(value: any): value is SupportName { + return value && isString(value.name); + } +} + +export class ConsoleWithLogLevel implements ConsoleInterface, Chainable, Clonable, SupportName, Disposable { + private readonly _chains: ConsoleInterface[] = []; + + private _maxLevel = 2; + private _disposed = false; + + constructor(private _console: ConsoleInterface, readonly name = '') {} + + get level(): LogLevel { + switch (this._maxLevel) { + case 0: + return LogLevel.Error; + + case 1: + return LogLevel.Warn; + + case 2: + return LogLevel.Info; + } + + return LogLevel.Log; + } + + set level(value: LogLevel) { + let maxLevel = getLevelNumber(value); + if (maxLevel === undefined) { + maxLevel = getLevelNumber(LogLevel.Info)!; + } + + this._maxLevel = maxLevel; + } + + dispose() { + this._disposed = true; + } + + clone(name: string): ConsoleWithLogLevel { + // For now, we won't support cloning chains. + const newConsole = new ConsoleWithLogLevel(this._console, name); + newConsole._maxLevel = this._maxLevel; + return newConsole; + } + + error(message: string) { + this._log(LogLevel.Error, `${this._prefix}${message}`); + } + + warn(message: string) { + this._log(LogLevel.Warn, `${this._prefix}${message}`); + } + + info(message: string) { + this._log(LogLevel.Info, `${this._prefix}${message}`); + } + + log(message: string) { + this._log(LogLevel.Log, `${this._prefix}${message}`); + } + + addChain(console: ConsoleInterface): void { + addIfUnique(this._chains, console); + } + + removeChain(console: ConsoleInterface): void { + removeArrayElements(this._chains, (i) => i === console); + } + + private get _prefix() { + return this.name ? `${this.name}: ` : ''; + } + + private _log(level: LogLevel, message: string): void { + if (this._disposed) { + return; + } + + this._processChains(level, message); + + if (this._getNumericalLevel(level) > this._maxLevel) { + return; + } + + log(this._console, level, message); + } + + private _getNumericalLevel(level: LogLevel): number { + const numericLevel = getLevelNumber(level); + debug.assert(numericLevel !== undefined, 'Logger: unknown log level.'); + return numericLevel !== undefined ? numericLevel : 2; + } + + private _processChains(level: LogLevel, message: string) { + this._chains.forEach((c) => log(c, level, message)); + } +} + +export function log(console: ConsoleInterface, logType: LogLevel, msg: string) { + switch (logType) { + case LogLevel.Log: + console.log(msg); + break; + + case LogLevel.Info: + console.info(msg); + break; + + case LogLevel.Warn: + console.warn(msg); + break; + + case LogLevel.Error: + console.error(msg); + break; + + default: + debug.fail(`${logType} is not expected`); + } +} + +export function convertLogLevel(logLevelValue?: string): LogLevel { + if (!logLevelValue) { + return LogLevel.Info; + } + + switch (logLevelValue.toLowerCase()) { + case 'error': + return LogLevel.Error; + + case 'warning': + return LogLevel.Warn; + + case 'information': + return LogLevel.Info; + + case 'trace': + return LogLevel.Log; + + default: + return LogLevel.Info; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/core.ts b/python-parser/packages/pyright-internal/src/common/core.ts new file mode 100644 index 00000000..24b70171 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/core.ts @@ -0,0 +1,221 @@ +/* + * core.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Various helpers that don't have a dependency on other code files. + */ + +export const enum Comparison { + LessThan = -1, + EqualTo = 0, + GreaterThan = 1, +} + +/** + * Safer version of `Function` which should not be called. + * Every function should be assignable to this, but this should not be assignable to every function. + */ +export type AnyFunction = (...args: never[]) => void; + +/** Do nothing and return false */ +export function returnFalse(): false { + return false; +} + +/** Do nothing and return true */ +export function returnTrue(): true { + return true; +} + +/** Do nothing and return undefined */ +export function returnUndefined(): undefined { + return undefined; +} + +/** Returns its argument. */ +export function identity(x: T) { + return x; +} + +/** Returns lower case string */ +export function toLowerCase(x: string) { + return x.toLowerCase(); +} + +export function equateValues(a: T, b: T) { + return a === b; +} + +export function compareComparableValues(a: string | undefined, b: string | undefined): Comparison; +export function compareComparableValues(a: number | undefined, b: number | undefined): Comparison; +export function compareComparableValues(a: string | number | undefined, b: string | number | undefined) { + return a === b + ? Comparison.EqualTo + : a === undefined + ? Comparison.LessThan + : b === undefined + ? Comparison.GreaterThan + : a < b + ? Comparison.LessThan + : Comparison.GreaterThan; +} + +/** + * Compare two numeric values for their order relative to each other. + * To compare strings, use any of the `compareStrings` functions. + */ +export function compareValues(a: number | undefined, b: number | undefined): Comparison { + return compareComparableValues(a, b); +} + +/** + * Tests whether a value is an array. + */ +export function isArray(value: any): value is T { + return Array.isArray ? Array.isArray(value) : value instanceof Array; +} + +/** + * Tests whether a value is string + */ +export function isString(text: unknown): text is string { + return typeof text === 'string'; +} + +export function isNumber(x: unknown): x is number { + return typeof x === 'number'; +} + +export function isBoolean(x: unknown): x is boolean { + return typeof x === 'boolean'; +} + +const hasOwnProperty = Object.prototype.hasOwnProperty; + +/** + * Type of objects whose values are all of the same type. + * The `in` and `for-in` operators can *not* be safely used, + * since `Object.prototype` may be modified by outside code. + */ +export interface MapLike { + readonly [Symbol.toStringTag]: string; + get(key: K): V | undefined; + has(key: K): boolean; + set(key: K, value: V): this; +} + +/** + * Indicates whether a map-like contains an own property with the specified key. + * + * @param map A map-like. + * @param key A property key. + */ +export function hasProperty(map: { [index: string]: any }, key: string): boolean { + return hasOwnProperty.call(map, key); +} + +/** + * Convert the given value to boolean + * @param trueOrFalse string value 'true' or 'false' + */ +export function toBoolean(trueOrFalse: string): boolean { + const normalized = trueOrFalse?.trim().toUpperCase(); + if (normalized === 'TRUE') { + return true; + } + + return false; +} + +let _debugMode: boolean | undefined = undefined; +export function test_setDebugMode(debugMode: boolean | undefined) { + const oldValue = _debugMode; + _debugMode = debugMode; + return oldValue; +} + +export function isDebugMode() { + if (_debugMode === undefined) { + // Cache debugging mode since it can't be changed while process is running. + const argv = process.execArgv.join(); + _debugMode = argv.includes('inspect') || argv.includes('debug'); + } + + return _debugMode; +} + +interface Thenable { + then( + onfulfilled?: (value: T) => TResult | Thenable, + onrejected?: (reason: any) => TResult | Thenable + ): Thenable; + then( + onfulfilled?: (value: T) => TResult | Thenable, + onrejected?: (reason: any) => void + ): Thenable; +} + +export function isThenable(v: any): v is Thenable { + return typeof v?.then === 'function'; +} + +export function isDefined(element: T | undefined): element is T { + return element !== undefined; +} + +export function getEnumNames(enumType: T) { + const result: string[] = []; + for (const value in enumType) { + if (isNaN(Number(value))) { + result.push(value); + } + } + + return result; +} + +export function containsOnlyWhitespace(text: string, start?: number, end?: number) { + if (start !== undefined) { + text = text.substring(start, end); + } + + return /^\s*$/.test(text); +} + +export function cloneStr(str: string): string { + // Ensure we get a copy of the string that is not shared with the original string. + // Node.js has an internal optimization where it uses sliced strings for `substring`, `slice`, `substr` + // when it deems appropriate. Most of the time, this optimization is beneficial, but in this case, we want + // to ensure we get a copy of the string to prevent the original string from being retained in memory. + // For example, the import resolution cache in importResolver might hold onto the full original file content + // because seemingly innocent the import name (e.g., `foo` in `import foo`) is in the cache. + + // V8 uses a SlicedString representation for substrings only above a small length threshold (currently 13), + // so short strings can be returned as-is without retaining the original text in memory. + // https://github.com/v8/v8/blob/02558d5a88c8f06ff064e3b6b332f342e1ab6143/src/objects/string.h#L1054 + if (str.length < 13) { + return str; + } + + return Buffer.from(str, 'utf8').toString('utf8'); +} + +export namespace Disposable { + export function is(value: any): value is { dispose(): void } { + return value && typeof value.dispose === 'function'; + } +} + +export function isMap(obj: unknown): obj is Map { + return typeof obj === 'object' && obj !== null && obj.constructor === Map; +} + +export function isPromise(obj: unknown): obj is Promise { + return ( + typeof obj === 'object' && + obj !== null && + typeof (obj as any).then === 'function' && + typeof (obj as any).catch === 'function' + ); +} diff --git a/python-parser/packages/pyright-internal/src/common/crypto.ts b/python-parser/packages/pyright-internal/src/common/crypto.ts new file mode 100644 index 00000000..ff200a94 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/crypto.ts @@ -0,0 +1,60 @@ +/* + * crypto.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Platform-independent helper functions for crypto. + */ + +import { fail } from './debug'; + +let nodeCrypto: typeof import('crypto') | undefined; + +try { + // eslint-disable-next-line @typescript-eslint/no-var-requires + nodeCrypto = require('crypto'); + if (!nodeCrypto?.randomBytes) { + nodeCrypto = undefined; + } +} catch { + // Not running in node. +} + +// See lib.dom.d.ts. +interface Crypto { + getRandomValues< + T extends + | Int8Array + | Int16Array + | Int32Array + | Uint8Array + | Uint16Array + | Uint32Array + | Uint8ClampedArray + | Float32Array + | Float64Array + | DataView + | null + >( + array: T + ): T; +} + +declare const crypto: Crypto | undefined; + +function arrayToHex(arr: Uint8Array): string { + return [...arr].map((x) => x.toString(16).padStart(2, '0')).join(''); +} + +export function randomBytesHex(size: number): string { + if (nodeCrypto) { + return nodeCrypto.randomBytes(size).toString('hex'); + } + + if (crypto) { + const buf = crypto.getRandomValues(new Uint8Array(size)); + return arrayToHex(buf); + } + + fail('crypto library not found'); +} diff --git a/python-parser/packages/pyright-internal/src/common/debug.ts b/python-parser/packages/pyright-internal/src/common/debug.ts new file mode 100644 index 00000000..6f35dc33 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/debug.ts @@ -0,0 +1,151 @@ +/* + * debug.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Helper functions that display user friendly debugging info. + */ + +import { stableSort } from './collectionUtils'; +import { AnyFunction, compareValues, hasProperty, isString } from './core'; + +export function assert( + expression: any, + message?: string, + verboseDebugInfo?: string | (() => string), + stackCrawlMark?: AnyFunction +): asserts expression { + if (!expression) { + if (verboseDebugInfo) { + message += + '\r\nVerbose Debug Information: ' + + (typeof verboseDebugInfo === 'string' ? verboseDebugInfo : verboseDebugInfo()); + } + fail(message ? 'False expression: ' + message : 'False expression.', stackCrawlMark || assert); + } +} + +export function fail(message?: string, stackCrawlMark?: AnyFunction): never { + // debugger; + const e = new Error(message ? `Debug Failure. ${message}` : 'Debug Failure.'); + if (Error.captureStackTrace) { + Error.captureStackTrace(e, stackCrawlMark || fail); + } + throw e; +} + +export function assertDefined( + value: T, + message?: string, + stackCrawlMark?: AnyFunction +): asserts value is NonNullable { + if (value === undefined || value === null) { + fail(message, stackCrawlMark || assertDefined); + } +} + +export function assertEachDefined( + value: T[], + message?: string, + stackCrawlMark?: AnyFunction +): asserts value is NonNullable[] { + for (const v of value) { + assertDefined(v, message, stackCrawlMark || assertEachDefined); + } +} + +export function assertNever(member: never, message = 'Illegal value:', stackCrawlMark?: AnyFunction): never { + let detail = ''; + + try { + detail = JSON.stringify(member); + } catch { + // Do nothing. + } + + fail(`${message} ${detail}`, stackCrawlMark || assertNever); +} + +export function getFunctionName(func: AnyFunction) { + if (typeof func !== 'function') { + return ''; + } else if (hasProperty(func, 'name')) { + return (func as any).name; + } else { + const text = Function.prototype.toString.call(func); + const match = /^function\s+([\w$]+)\s*\(/.exec(text); + return match ? match[1] : ''; + } +} + +/** + * Formats an enum value as a string for debugging and debug assertions. + */ +export function formatEnum(value = 0, enumObject: any, isFlags?: boolean) { + const members = getEnumMembers(enumObject); + if (value === 0) { + return members.length > 0 && members[0][0] === 0 ? members[0][1] : '0'; + } + if (isFlags) { + let result = ''; + let remainingFlags = value; + for (const [enumValue, enumName] of members) { + if (enumValue > value) { + break; + } + if (enumValue !== 0 && enumValue & value) { + result = `${result}${result ? '|' : ''}${enumName}`; + remainingFlags &= ~enumValue; + } + } + if (remainingFlags === 0) { + return result; + } + } else { + for (const [enumValue, enumName] of members) { + if (enumValue === value) { + return enumName; + } + } + } + return value.toString(); +} + +export function getErrorString(error: any): string { + return ( + (error.stack ? error.stack.toString() : undefined) || + (typeof error.message === 'string' ? error.message : undefined) || + JSON.stringify(error) + ); +} + +export function getSerializableError(error: any): Error | undefined { + if (!error) { + return undefined; + } + + const exception = JSON.stringify(error); + if (exception.length > 2) { + // Given error object is JSON.stringify serializable. Use it as it is + // to preserve properties. + return error; + } + + // Convert error to JSON.stringify serializable Error shape. + const name = error.name ? (isString(error.name) ? error.name : 'noname') : 'noname'; + const message = error.message ? (isString(error.message) ? error.message : 'nomessage') : 'nomessage'; + const stack = error.stack ? (isString(error.stack) ? error.stack : undefined) : undefined; + return { name, message, stack }; +} + +function getEnumMembers(enumObject: any) { + const result: [number, string][] = []; + for (const name of Object.keys(enumObject)) { + const value = enumObject[name]; + if (typeof value === 'number') { + result.push([value, name]); + } + } + + return stableSort<[number, string]>(result, (x, y) => compareValues(x[0], y[0])); +} diff --git a/python-parser/packages/pyright-internal/src/common/deferred.ts b/python-parser/packages/pyright-internal/src/common/deferred.ts new file mode 100644 index 00000000..3d652b06 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/deferred.ts @@ -0,0 +1,78 @@ +/* + * deferred.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Promise utilities for async operations. + */ + +export interface Deferred { + readonly promise: Promise; + readonly resolved: boolean; + readonly rejected: boolean; + readonly completed: boolean; + resolve(value?: T | PromiseLike): void; + reject(reason?: any): void; +} + +class DeferredImpl implements Deferred { + private _resolve!: (value: T | PromiseLike) => void; + private _reject!: (reason?: any) => void; + private _resolved = false; + private _rejected = false; + private _promise: Promise; + + constructor(private _scope: any = null) { + this._promise = new Promise((res, rej) => { + this._resolve = res; + this._reject = rej; + }); + } + + get promise(): Promise { + return this._promise; + } + + get resolved(): boolean { + return this._resolved; + } + + get rejected(): boolean { + return this._rejected; + } + + get completed(): boolean { + return this._rejected || this._resolved; + } + + resolve(_value?: T | PromiseLike) { + // eslint-disable-next-line prefer-rest-params + this._resolve.apply(this._scope ? this._scope : this, arguments as any); + this._resolved = true; + } + + reject(_reason?: any) { + // eslint-disable-next-line prefer-rest-params + this._reject.apply(this._scope ? this._scope : this, arguments as any); + this._rejected = true; + } +} + +export function createDeferred(scope: any = null): Deferred { + return new DeferredImpl(scope); +} + +export function createDeferredFrom(...promises: Promise[]): Deferred { + const deferred = createDeferred(); + Promise.all(promises) + .then(deferred.resolve.bind(deferred) as any) + .catch(deferred.reject.bind(deferred) as any); + + return deferred; +} + +export function createDeferredFromPromise(promise: Promise): Deferred { + const deferred = createDeferred(); + promise.then(deferred.resolve.bind(deferred)).catch(deferred.reject.bind(deferred)); + return deferred; +} diff --git a/python-parser/packages/pyright-internal/src/common/diagnostic.ts b/python-parser/packages/pyright-internal/src/common/diagnostic.ts new file mode 100644 index 00000000..aab22a33 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/diagnostic.ts @@ -0,0 +1,340 @@ +/* + * diagnostics.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Class that represents errors and warnings. + */ + +import { Commands } from '../commands/commands'; +import { appendArray } from './collectionUtils'; +import { DiagnosticLevel } from './configOptions'; +import { Range, TextRange } from './textRange'; +import { Uri } from './uri/uri'; + +export const defaultMaxDiagnosticDepth = 5; +export const defaultMaxDiagnosticLineCount = 8; +const maxRecursionCount = 64; + +// Corresponds to the CommentTaskPriority enum at https://devdiv.visualstudio.com/DefaultCollection/DevDiv/_git/VS?path=src/env/shell/PackageFramework/Framework/CommentTaskPriority.cs +export enum TaskListPriority { + High = 'High', + Normal = 'Normal', + Low = 'Low', +} + +export interface TaskListToken { + text: string; + priority: TaskListPriority; +} + +export const enum DiagnosticCategory { + Error, + Warning, + Information, + UnusedCode, + UnreachableCode, + Deprecated, + TaskItem, +} + +export function convertLevelToCategory(level: DiagnosticLevel) { + switch (level) { + case 'error': + return DiagnosticCategory.Error; + + case 'warning': + return DiagnosticCategory.Warning; + + case 'information': + return DiagnosticCategory.Information; + + default: + throw new Error(`${level} is not expected`); + } +} + +export interface DiagnosticAction { + action: string; +} + +export interface DiagnosticWithinFile { + uri: Uri; + diagnostic: Diagnostic; +} + +export interface CreateTypeStubFileAction extends DiagnosticAction { + action: Commands.createTypeStub; + moduleName: string; +} + +export interface DiagnosticRelatedInfo { + message: string; + uri: Uri; + range: Range; + priority: TaskListPriority; +} + +export namespace DiagnosticRelatedInfo { + export function toJsonObj(info: DiagnosticRelatedInfo): any { + return { + message: info.message, + uri: info.uri.toJsonObj(), + range: info.range, + priority: info.priority, + }; + } + + export function fromJsonObj(obj: any): DiagnosticRelatedInfo { + return { + message: obj.message, + uri: Uri.fromJsonObj(obj.uri), + range: obj.range, + priority: obj.priority, + }; + } +} + +// Represents a single error or warning. +export class Diagnostic { + private _actions: DiagnosticAction[] | undefined; + private _rule: string | undefined; + private _relatedInfo: DiagnosticRelatedInfo[] = []; + private _data: any | null = null; + + constructor( + readonly category: DiagnosticCategory, + readonly message: string, + readonly range: Range, + readonly priority: TaskListPriority = TaskListPriority.Normal + ) {} + + toJsonObj() { + return { + category: this.category, + message: this.message, + range: this.range, + priority: this.priority, + actions: this._actions, + rule: this._rule, + data: this._data, + relatedInfo: this._relatedInfo.map((info) => DiagnosticRelatedInfo.toJsonObj(info)), + }; + } + + static fromJsonObj(obj: any) { + const diag = new Diagnostic(obj.category, obj.message, obj.range, obj.priority); + diag._actions = obj.actions; + diag._rule = obj.rule; + diag._relatedInfo = obj.relatedInfo.map((info: any) => DiagnosticRelatedInfo.fromJsonObj(info)); + diag._data = obj.data; + return diag; + } + + addAction(action: DiagnosticAction) { + if (this._actions === undefined) { + this._actions = [action]; + } else { + this._actions.push(action); + } + } + + setData(data: any) { + this._data = data; + } + + getData(): any | null { + return this._data; + } + + getActions() { + return this._actions; + } + + setRule(rule: string) { + this._rule = rule; + } + + getRule() { + return this._rule; + } + + addRelatedInfo(message: string, fileUri: Uri, range: Range, priority: TaskListPriority = TaskListPriority.Normal) { + this._relatedInfo.push({ uri: fileUri, message, range, priority }); + } + + getRelatedInfo() { + return this._relatedInfo; + } +} + +// Compares two diagnostics by location for sorting. +export function compareDiagnostics(d1: Diagnostic, d2: Diagnostic) { + if (d1.range.start.line < d2.range.start.line) { + return -1; + } else if (d1.range.start.line > d2.range.start.line) { + return 1; + } + + if (d1.range.start.character < d2.range.start.character) { + return -1; + } else if (d1.range.start.character > d2.range.start.character) { + return 1; + } + + return 0; +} + +// Helps to build additional information that can be appended to a diagnostic +// message. It supports hierarchical information and flexible formatting. +export class DiagnosticAddendum { + private _messages: string[] = []; + private _childAddenda: DiagnosticAddendum[] = []; + + // The nest level is accurate only for the common case where all + // addendum are created using createAddendum. This is an upper bound. + // The actual nest level may be smaller. + private _nestLevel: number | undefined; + + // Addenda normally don't have their own ranges, but there are cases + // where we want to track ranges that can influence the range of the + // diagnostic. + private _range: TextRange | undefined; + + addMessage(message: string) { + this._messages.push(message); + } + + addMessageMultiline(message: string) { + message.split('\n').forEach((line) => { + this._messages.push(line); + }); + } + + addTextRange(range: TextRange) { + this._range = range; + } + + // Create a new (nested) addendum to which messages can be added. + createAddendum() { + const newAddendum = new DiagnosticAddendum(); + newAddendum._nestLevel = (this._nestLevel ?? 0) + 1; + this.addAddendum(newAddendum); + return newAddendum; + } + + getString(maxDepth = defaultMaxDiagnosticDepth, maxLineCount = defaultMaxDiagnosticLineCount): string { + let lines = this._getLinesRecursive(maxDepth, maxLineCount); + + if (lines.length > maxLineCount) { + lines = lines.slice(0, maxLineCount); + lines.push(' ...'); + } + + const text = lines.join('\n'); + if (text.length > 0) { + return '\n' + text; + } + + return ''; + } + + isEmpty() { + return this._getMessageCount() === 0; + } + + addAddendum(addendum: DiagnosticAddendum) { + this._childAddenda.push(addendum); + } + + getChildren() { + return this._childAddenda; + } + + getMessages() { + return this._messages; + } + + getNestLevel() { + return this._nestLevel ?? 0; + } + + // Returns undefined if no range is associated with this addendum + // or its children. Returns a non-empty range if there is a single range + // associated. + getEffectiveTextRange(): TextRange | undefined { + const range = this._getTextRangeRecursive(); + + // If we received an empty range, it means that there were multiple + // non-overlapping ranges associated with this addendum. + if (range?.length === 0) { + return undefined; + } + + return range; + } + + private _getTextRangeRecursive(recursionCount = 0): TextRange | undefined { + if (recursionCount > maxRecursionCount) { + return undefined; + } + recursionCount++; + + const childRanges = this._childAddenda + .map((child) => child._getTextRangeRecursive(recursionCount)) + .filter((r) => !!r); + + if (childRanges.length > 1) { + return { start: 0, length: 0 }; + } + + if (childRanges.length === 1) { + return childRanges[0]; + } + + if (this._range) { + return this._range; + } + + return undefined; + } + + private _getMessageCount(recursionCount = 0) { + if (recursionCount > maxRecursionCount) { + return 0; + } + + // Get the nested message count. + let messageCount = this._messages.length; + + for (const diag of this._childAddenda) { + messageCount += diag._getMessageCount(recursionCount + 1); + } + + return messageCount; + } + + private _getLinesRecursive(maxDepth: number, maxLineCount: number, recursionCount = 0): string[] { + if (maxDepth <= 0 || recursionCount > maxRecursionCount) { + return []; + } + + let childLines: string[] = []; + for (const addendum of this._childAddenda) { + const maxDepthRemaining = this._messages.length > 0 ? maxDepth - 1 : maxDepth; + appendArray(childLines, addendum._getLinesRecursive(maxDepthRemaining, maxLineCount, recursionCount + 1)); + + // If the number of lines exceeds our max line count, don't bother adding more. + if (childLines.length >= maxLineCount) { + childLines = childLines.slice(0, maxLineCount); + break; + } + } + + // Prepend indentation for readability. Skip if there are no + // messages at this level. + const extraSpace = this._messages.length > 0 ? '  ' : ''; + return this._messages.concat(childLines).map((line) => extraSpace + line); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/diagnosticRules.ts b/python-parser/packages/pyright-internal/src/common/diagnosticRules.ts new file mode 100644 index 00000000..45cb51f6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/diagnosticRules.ts @@ -0,0 +1,106 @@ +/* + * diagnosticRules.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Strings that represent each of the diagnostic rules + * that can be enabled or disabled in the configuration. + */ + +// Not const enum since keys need to be inspected in tests +// to match declaration of user-visible settings in package.json +export enum DiagnosticRule { + strictListInference = 'strictListInference', + strictSetInference = 'strictSetInference', + strictDictionaryInference = 'strictDictionaryInference', + analyzeUnannotatedFunctions = 'analyzeUnannotatedFunctions', + strictParameterNoneValue = 'strictParameterNoneValue', + enableExperimentalFeatures = 'enableExperimentalFeatures', + enableTypeIgnoreComments = 'enableTypeIgnoreComments', + enableReachabilityAnalysis = 'enableReachabilityAnalysis', + deprecateTypingAliases = 'deprecateTypingAliases', + disableBytesTypePromotions = 'disableBytesTypePromotions', + + reportGeneralTypeIssues = 'reportGeneralTypeIssues', + reportPropertyTypeMismatch = 'reportPropertyTypeMismatch', + reportFunctionMemberAccess = 'reportFunctionMemberAccess', + reportMissingImports = 'reportMissingImports', + reportMissingModuleSource = 'reportMissingModuleSource', + reportInvalidTypeForm = 'reportInvalidTypeForm', + reportMissingTypeStubs = 'reportMissingTypeStubs', + reportImportCycles = 'reportImportCycles', + reportUnusedImport = 'reportUnusedImport', + reportUnusedClass = 'reportUnusedClass', + reportUnusedFunction = 'reportUnusedFunction', + reportUnusedVariable = 'reportUnusedVariable', + reportDuplicateImport = 'reportDuplicateImport', + reportWildcardImportFromLibrary = 'reportWildcardImportFromLibrary', + reportAbstractUsage = 'reportAbstractUsage', + reportArgumentType = 'reportArgumentType', + reportAssertTypeFailure = 'reportAssertTypeFailure', + reportAssignmentType = 'reportAssignmentType', + reportAttributeAccessIssue = 'reportAttributeAccessIssue', + reportCallIssue = 'reportCallIssue', + reportInconsistentOverload = 'reportInconsistentOverload', + reportIndexIssue = 'reportIndexIssue', + reportInvalidTypeArguments = 'reportInvalidTypeArguments', + reportNoOverloadImplementation = 'reportNoOverloadImplementation', + reportOperatorIssue = 'reportOperatorIssue', + reportOptionalSubscript = 'reportOptionalSubscript', + reportOptionalMemberAccess = 'reportOptionalMemberAccess', + reportOptionalCall = 'reportOptionalCall', + reportOptionalIterable = 'reportOptionalIterable', + reportOptionalContextManager = 'reportOptionalContextManager', + reportOptionalOperand = 'reportOptionalOperand', + reportRedeclaration = 'reportRedeclaration', + reportReturnType = 'reportReturnType', + reportTypedDictNotRequiredAccess = 'reportTypedDictNotRequiredAccess', + reportUntypedFunctionDecorator = 'reportUntypedFunctionDecorator', + reportUntypedClassDecorator = 'reportUntypedClassDecorator', + reportUntypedBaseClass = 'reportUntypedBaseClass', + reportUntypedNamedTuple = 'reportUntypedNamedTuple', + reportPrivateUsage = 'reportPrivateUsage', + reportTypeCommentUsage = 'reportTypeCommentUsage', + reportPrivateImportUsage = 'reportPrivateImportUsage', + reportConstantRedefinition = 'reportConstantRedefinition', + reportDeprecated = 'reportDeprecated', + reportIncompatibleMethodOverride = 'reportIncompatibleMethodOverride', + reportIncompatibleVariableOverride = 'reportIncompatibleVariableOverride', + reportInconsistentConstructor = 'reportInconsistentConstructor', + reportOverlappingOverload = 'reportOverlappingOverload', + reportPossiblyUnboundVariable = 'reportPossiblyUnboundVariable', + reportMissingSuperCall = 'reportMissingSuperCall', + reportUninitializedInstanceVariable = 'reportUninitializedInstanceVariable', + reportInvalidStringEscapeSequence = 'reportInvalidStringEscapeSequence', + reportUnknownParameterType = 'reportUnknownParameterType', + reportUnknownArgumentType = 'reportUnknownArgumentType', + reportUnknownLambdaType = 'reportUnknownLambdaType', + reportUnknownVariableType = 'reportUnknownVariableType', + reportUnknownMemberType = 'reportUnknownMemberType', + reportMissingParameterType = 'reportMissingParameterType', + reportMissingTypeArgument = 'reportMissingTypeArgument', + reportInvalidTypeVarUse = 'reportInvalidTypeVarUse', + reportCallInDefaultInitializer = 'reportCallInDefaultInitializer', + reportUnnecessaryIsInstance = 'reportUnnecessaryIsInstance', + reportUnnecessaryCast = 'reportUnnecessaryCast', + reportUnnecessaryComparison = 'reportUnnecessaryComparison', + reportUnnecessaryContains = 'reportUnnecessaryContains', + reportAssertAlwaysTrue = 'reportAssertAlwaysTrue', + reportSelfClsParameterName = 'reportSelfClsParameterName', + reportImplicitStringConcatenation = 'reportImplicitStringConcatenation', + reportUndefinedVariable = 'reportUndefinedVariable', + reportUnboundVariable = 'reportUnboundVariable', + reportUnhashable = 'reportUnhashable', + reportInvalidStubStatement = 'reportInvalidStubStatement', + reportIncompleteStub = 'reportIncompleteStub', + reportUnsupportedDunderAll = 'reportUnsupportedDunderAll', + reportUnusedCallResult = 'reportUnusedCallResult', + reportUnusedCoroutine = 'reportUnusedCoroutine', + reportUnusedExcept = 'reportUnusedExcept', + reportUnusedExpression = 'reportUnusedExpression', + reportUnnecessaryTypeIgnoreComment = 'reportUnnecessaryTypeIgnoreComment', + reportMatchNotExhaustive = 'reportMatchNotExhaustive', + reportUnreachable = 'reportUnreachable', + reportImplicitOverride = 'reportImplicitOverride', +} diff --git a/python-parser/packages/pyright-internal/src/common/diagnosticSink.ts b/python-parser/packages/pyright-internal/src/common/diagnosticSink.ts new file mode 100644 index 00000000..e17d85d7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/diagnosticSink.ts @@ -0,0 +1,189 @@ +/* + * diagnostics.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Class that collects and deduplicates diagnostics. + */ + +import { appendArray } from './collectionUtils'; +import { DiagnosticLevel } from './configOptions'; +import { Diagnostic, DiagnosticAction, DiagnosticCategory } from './diagnostic'; +import { convertOffsetsToRange } from './positionUtils'; +import { hashString } from './stringUtils'; +import { Range, TextRange } from './textRange'; +import { TextRangeCollection } from './textRangeCollection'; +import { Uri } from './uri/uri'; + +// Represents a collection of diagnostics within a file. +export interface FileDiagnostics { + fileUri: Uri; + version: number | undefined; + diagnostics: Diagnostic[]; +} + +export namespace FileDiagnostics { + export function toJsonObj(fileDiag: FileDiagnostics): any { + return { + fileUri: fileDiag.fileUri.toJsonObj(), + version: fileDiag.version, + diagnostics: fileDiag.diagnostics.map((d) => d.toJsonObj()), + }; + } + + export function fromJsonObj(fileDiagObj: any): FileDiagnostics { + return { + fileUri: Uri.fromJsonObj(fileDiagObj.fileUri), + version: fileDiagObj.version, + diagnostics: fileDiagObj.diagnostics.map((d: any) => Diagnostic.fromJsonObj(d)), + }; + } +} + +// Creates and tracks a list of diagnostics. +export class DiagnosticSink { + private _diagnosticList: Diagnostic[]; + private _diagnosticMap: Map; + + constructor(diagnostics?: Diagnostic[]) { + this._diagnosticList = diagnostics || []; + this._diagnosticMap = new Map(); + } + + fetchAndClear() { + const prevDiagnostics = this._diagnosticList; + this._diagnosticList = []; + this._diagnosticMap.clear(); + return prevDiagnostics; + } + + addError(message: string, range: Range) { + return this.addDiagnostic(new Diagnostic(DiagnosticCategory.Error, message, range)); + } + + addWarning(message: string, range: Range) { + return this.addDiagnostic(new Diagnostic(DiagnosticCategory.Warning, message, range)); + } + + addInformation(message: string, range: Range) { + return this.addDiagnostic(new Diagnostic(DiagnosticCategory.Information, message, range)); + } + + addUnusedCode(message: string, range: Range, action?: DiagnosticAction) { + const diag = new Diagnostic(DiagnosticCategory.UnusedCode, message, range); + if (action) { + diag.addAction(action); + } + return this.addDiagnostic(diag); + } + + addUnreachableCode(message: string, range: Range, action?: DiagnosticAction) { + const diag = new Diagnostic(DiagnosticCategory.UnreachableCode, message, range); + if (action) { + diag.addAction(action); + } + return this.addDiagnostic(diag); + } + + addDeprecated(message: string, range: Range, action?: DiagnosticAction) { + const diag = new Diagnostic(DiagnosticCategory.Deprecated, message, range); + if (action) { + diag.addAction(action); + } + return this.addDiagnostic(diag); + } + + addDiagnostic(diag: Diagnostic) { + // Create a unique key for the diagnostic to prevent + // adding duplicates. + const key = + `${diag.range.start.line},${diag.range.start.character}-` + + `${diag.range.end.line}-${diag.range.end.character}:${hashString(diag.message)}}`; + if (!this._diagnosticMap.has(key)) { + this._diagnosticList.push(diag); + this._diagnosticMap.set(key, diag); + } + return diag; + } + + addDiagnostics(diagsToAdd: Diagnostic[]) { + appendArray(this._diagnosticList, diagsToAdd); + } + + getErrors() { + return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.Error); + } + + getWarnings() { + return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.Warning); + } + + getInformation() { + return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.Information); + } + + getUnusedCode() { + return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.UnusedCode); + } + + getUnreachableCode() { + return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.UnreachableCode); + } + + getDeprecated() { + return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.Deprecated); + } +} + +// Specialized version of DiagnosticSink that works with TextRange objects +// and converts text ranges to line and column numbers. +export class TextRangeDiagnosticSink extends DiagnosticSink { + private _lines: TextRangeCollection; + + constructor(lines: TextRangeCollection, diagnostics?: Diagnostic[]) { + super(diagnostics); + this._lines = lines; + } + + addDiagnosticWithTextRange(level: DiagnosticLevel, message: string, range: TextRange) { + const positionRange = convertOffsetsToRange(range.start, range.start + range.length, this._lines); + switch (level) { + case 'error': + return this.addError(message, positionRange); + + case 'warning': + return this.addWarning(message, positionRange); + + case 'information': + return this.addInformation(message, positionRange); + + default: + throw new Error(`${level} is not expected value`); + } + } + + addUnusedCodeWithTextRange(message: string, range: TextRange, action?: DiagnosticAction) { + return this.addUnusedCode( + message, + convertOffsetsToRange(range.start, range.start + range.length, this._lines), + action + ); + } + + addUnreachableCodeWithTextRange(message: string, range: TextRange, action?: DiagnosticAction) { + return this.addUnreachableCode( + message, + convertOffsetsToRange(range.start, range.start + range.length, this._lines), + action + ); + } + + addDeprecatedWithTextRange(message: string, range: TextRange, action?: DiagnosticAction) { + return this.addDeprecated( + message, + convertOffsetsToRange(range.start, range.start + range.length, this._lines), + action + ); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/docRange.ts b/python-parser/packages/pyright-internal/src/common/docRange.ts new file mode 100644 index 00000000..740c05e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/docRange.ts @@ -0,0 +1,15 @@ +/* + * docRange.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Specifies the range of text within a document. + */ + +import { Range } from './textRange'; +import { Uri } from './uri/uri'; + +export interface DocumentRange { + uri: Uri; + range: Range; +} diff --git a/python-parser/packages/pyright-internal/src/common/docStringService.ts b/python-parser/packages/pyright-internal/src/common/docStringService.ts new file mode 100644 index 00000000..2c15c46b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/docStringService.ts @@ -0,0 +1,64 @@ +/* + * docStringService.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Interface for service that parses docstrings and converts them to other formats. + */ + +import { MarkupKind } from 'vscode-languageserver-types'; +import { convertDocStringToMarkdown, convertDocStringToPlainText } from '../analyzer/docStringConversion'; +import { extractAttributeDocumentation, extractParameterDocumentation } from '../analyzer/docStringUtils'; +import { Uri } from './uri/uri'; + +export interface DocStringService { + convertDocStringToPlainText(docString: string): string; + convertDocStringToMarkdown(docString: string, forceLiteral?: boolean, sourceFileUri?: Uri): string; + extractParameterDocumentation( + functionDocString: string, + paramName: string, + format?: MarkupKind, + forceLiteral?: boolean + ): string | undefined; + extractAttributeDocumentation( + classDocString: string, + attrName: string, + format?: MarkupKind, + forceLiteral?: boolean + ): string | undefined; + clone(): DocStringService; +} + +export namespace DocStringService { + export function is(value: any): value is DocStringService { + return ( + !!value.convertDocStringToMarkdown && + !!value.convertDocStringToPlainText && + !!value.extractParameterDocumentation && + !!value.extractAttributeDocumentation + ); + } +} + +export class PyrightDocStringService implements DocStringService { + convertDocStringToPlainText(docString: string): string { + return convertDocStringToPlainText(docString); + } + + convertDocStringToMarkdown(docString: string, _forceLiteral?: boolean, _sourceFileUri?: Uri): string { + return convertDocStringToMarkdown(docString); + } + + extractParameterDocumentation(functionDocString: string, paramName: string): string | undefined { + return extractParameterDocumentation(functionDocString, paramName); + } + + extractAttributeDocumentation(classDocString: string, attrName: string): string | undefined { + return extractAttributeDocumentation(classDocString, attrName); + } + + clone() { + // No need to clone, no internal state + return this; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/editAction.ts b/python-parser/packages/pyright-internal/src/common/editAction.ts new file mode 100644 index 00000000..0736fea6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/editAction.ts @@ -0,0 +1,68 @@ +/* + * editAction.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Represents a single edit within a file. + */ + +import { Range, rangesAreEqual } from './textRange'; +import { Uri } from './uri/uri'; + +export interface TextEditAction { + range: Range; + replacementText: string; +} + +export interface FileEditAction extends TextEditAction { + fileUri: Uri; +} + +export interface FileEditActions { + edits: FileEditAction[]; + fileOperations: FileOperations[]; +} + +export type FileOperations = RenameFileOperation | CreateFileOperation | DeleteFileOperation; + +export interface FileOperation { + kind: 'create' | 'delete' | 'rename'; +} + +export interface RenameFileOperation extends FileOperation { + kind: 'rename'; + oldFileUri: Uri; + newFileUri: Uri; +} + +export interface CreateFileOperation extends FileOperation { + kind: 'create'; + fileUri: Uri; +} + +export interface DeleteFileOperation extends FileOperation { + kind: 'delete'; + fileUri: Uri; +} + +export namespace TextEditAction { + export function is(value: any): value is TextEditAction { + return !!value.range && value.replacementText !== undefined; + } +} + +export namespace FileEditAction { + export function is(value: any): value is FileEditAction { + return value.fileUri !== undefined && TextEditAction.is(value); + } + + export function areEqual(e1: FileEditAction, e2: FileEditAction) { + return ( + e1 === e2 || + (e1.fileUri.equals(e2.fileUri) && + rangesAreEqual(e1.range, e2.range) && + e1.replacementText === e2.replacementText) + ); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/envVarUtils.ts b/python-parser/packages/pyright-internal/src/common/envVarUtils.ts new file mode 100644 index 00000000..a043d60c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/envVarUtils.ts @@ -0,0 +1,93 @@ +/* + * envVarUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Utils functions that handles environment variables. + */ + +import * as os from 'os'; + +import { Workspace, WorkspaceFolder } from '../workspaceFactory'; +import { Uri } from './uri/uri'; +import { isRootedDiskPath, normalizeSlashes } from './pathUtils'; +import { ServiceKeys } from './serviceKeys'; +import { escapeRegExp } from './stringUtils'; + +export function resolvePathWithEnvVariables( + workspace: Workspace, + path: string, + workspaces: Workspace[] +): Uri | undefined { + const rootUri = workspace.rootUri; + + const expanded = expandPathVariables(path, rootUri ?? Uri.empty(), workspaces); + const caseDetector = workspace.service.serviceProvider.get(ServiceKeys.caseSensitivityDetector); + if (Uri.maybeUri(expanded)) { + // If path is expanded to uri, no need to resolve it against the workspace root. + return Uri.parse(normalizeSlashes(expanded, '/'), caseDetector); + } + + // Expansion may have failed. + if (expanded.includes('${')) { + return undefined; + } + + if (rootUri) { + // normal case, resolve the path against workspace root. + return rootUri.resolvePaths(normalizeSlashes(expanded, '/')); + } + + // We don't have workspace root. but path contains something that require `workspace root` + if (path.includes('${workspaceFolder')) { + return undefined; + } + + // Without workspace root, we can't handle any `relative path`. + if (!isRootedDiskPath(normalizeSlashes(expanded))) { + return undefined; + } + + // We have absolute file path. + return Uri.file(expanded, caseDetector); +} + +// Expands certain predefined variables supported within VS Code settings. +// Ideally, VS Code would provide an API for doing this expansion, but +// it doesn't. We'll handle the most common variables here as a convenience. +export function expandPathVariables(path: string, rootPath: Uri, workspaces: WorkspaceFolder[]): string { + // Make sure all replacements look like URI paths too. + const replace = (match: RegExp, replaceValue: string) => { + path = path.replace(match, replaceValue); + }; + + // Replace everything inline. + path = path.replace(/\$\{workspaceFolder\}/g, rootPath.getPath()); + + // this is for vscode multiroot workspace supports. + // https://code.visualstudio.com/docs/editor/variables-reference#_variables-scoped-per-workspace-folder + for (const workspace of workspaces) { + if (!workspace.rootUri) { + continue; + } + + const escapedWorkspaceName = escapeRegExp(workspace.workspaceName); + const ws_regexp = RegExp(`\\$\\{workspaceFolder:${escapedWorkspaceName}\\}`, 'g'); + path = path.replace(ws_regexp, workspace.rootUri.getPath()); + } + + if (process.env.HOME !== undefined) { + replace(/\$\{env:HOME\}/g, process.env.HOME || ''); + } + if (process.env.USERNAME !== undefined) { + replace(/\$\{env:USERNAME\}/g, process.env.USERNAME || ''); + } + if (process.env.VIRTUAL_ENV !== undefined) { + replace(/\$\{env:VIRTUAL_ENV\}/g, process.env.VIRTUAL_ENV || ''); + } + if (os.homedir) { + replace(/(?:^|\/)~(?=\/)/g, os.homedir() || process.env.HOME || process.env.USERPROFILE || '~'); + } + + return path; +} diff --git a/python-parser/packages/pyright-internal/src/common/extensibility.ts b/python-parser/packages/pyright-internal/src/common/extensibility.ts new file mode 100644 index 00000000..bd807b5a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/extensibility.ts @@ -0,0 +1,166 @@ +/* +* extensibility.ts +* Copyright (c) Microsoft Corporation. +* Licensed under the MIT license. + +* Language service extensibility. +*/ + +import { CancellationToken } from 'vscode-languageserver'; + +import { Declaration } from '../analyzer/declaration'; +import { ImportResolver } from '../analyzer/importResolver'; +import * as prog from '../analyzer/program'; +import { IPythonMode } from '../analyzer/sourceFile'; +import { SourceMapper } from '../analyzer/sourceMapper'; +import { SymbolTable } from '../analyzer/symbol'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { Diagnostic } from '../common/diagnostic'; +import { ServerSettings } from '../common/languageServerInterface'; +import { ParseNode } from '../parser/parseNodes'; +import { ParseFileResults, ParserOutput } from '../parser/parser'; +import { ConfigOptions } from './configOptions'; +import { ConsoleInterface } from './console'; +import { ReadOnlyFileSystem } from './fileSystem'; +import { ServiceProvider } from './serviceProvider'; +import { Range } from './textRange'; +import { Uri } from './uri/uri'; + +export interface SourceFile { + // See whether we can convert these to regular properties. + isStubFile(): boolean; + isTypingStubFile(): boolean; + + isThirdPartyPyTypedPresent(): boolean; + + getIPythonMode(): IPythonMode; + getUri(): Uri; + getFileContent(): string | undefined; + getClientVersion(): number | undefined; + getOpenFileContents(): string | undefined; + getModuleSymbolTable(): SymbolTable | undefined; + getDiagnostics(options: ConfigOptions): Diagnostic[] | undefined; + getParserOutput(): ParserOutput | undefined; +} + +export interface SourceFileInfo { + // We don't want to expose the real SourceFile since + // one can mess up program state by calling some methods on it directly. + // For example, calling sourceFile.parse() directly will mess up + // dependency graph maintained by the program. + readonly uri: Uri; + readonly contents: string; + readonly ipythonMode: IPythonMode; + + // Information about the source file + readonly isTypeshedFile: boolean; + readonly isThirdPartyImport: boolean; + readonly isThirdPartyPyTypedPresent: boolean; + readonly isTypingStubFile: boolean; + readonly hasTypeAnnotations: boolean; + readonly diagnosticsVersion: number | undefined; + readonly semanticVersion: number; + readonly clientVersion: number | undefined; + + readonly chainedSourceFile?: SourceFileInfo | undefined; + + readonly isTracked: boolean; + readonly isOpenByClient: boolean; + + readonly imports: readonly SourceFileInfo[]; + readonly importedBy: readonly SourceFileInfo[]; + readonly shadows: readonly SourceFileInfo[]; + readonly shadowedBy: readonly SourceFileInfo[]; +} + +// Readonly wrapper around a Program. Makes sure it doesn't mutate the program. +export interface ProgramView { + readonly id: string; + readonly rootPath: Uri; + readonly console: ConsoleInterface; + readonly evaluator: TypeEvaluator | undefined; + readonly configOptions: ConfigOptions; + readonly importResolver: ImportResolver; + readonly fileSystem: ReadOnlyFileSystem; + readonly serviceProvider: ServiceProvider; + + owns(uri: Uri): boolean; + getSourceFileInfoList(): readonly SourceFileInfo[]; + getParserOutput(fileUri: Uri): ParserOutput | undefined; + getParseResults(fileUri: Uri): ParseFileResults | undefined; + getSourceFileInfo(fileUri: Uri): SourceFileInfo | undefined; + getModuleSymbolTable(fileUri: Uri): SymbolTable | undefined; + getChainedUri(fileUri: Uri): Uri | undefined; + getSourceMapper(fileUri: Uri, token: CancellationToken, mapCompiled?: boolean, preferStubs?: boolean): SourceMapper; + + // Consider getDiagnosticsForRange to call `analyzeFile` automatically if the file is not analyzed. + analyzeFile(fileUri: Uri, token: CancellationToken): boolean; + getDiagnosticsForRange(fileUri: Uri, range: Range): readonly Diagnostic[]; + getParseDiagnostics(fileUri: Uri): readonly Diagnostic[] | undefined; + + // See whether we can get rid of these methods + handleMemoryHighUsage(): void; + clone(): prog.Program; +} + +// This exposes some APIs to mutate program. Unlike ProgramMutator, this will only mutate this program +// and doesn't forward the request to the BG thread. +// One can use this when edits are temporary such as `runEditMode` or `test` +export interface EditableProgram extends ProgramView { + addInterimFile(uri: Uri): void; + setFileOpened(fileUri: Uri, version: number | null, contents: string, options?: prog.OpenFileOptions): void; + updateChainedUri(fileUri: Uri, chainedUri: Uri | undefined): void; +} + +// Mutable wrapper around a program. Allows the FG thread to forward this request to the BG thread +// Any edits made to this program will persist and mutate the program's state permanently. +export interface ProgramMutator { + addInterimFile(fileUri: Uri): void; + setFileOpened( + fileUri: Uri, + version: number | null, + contents: string, + ipythonMode: IPythonMode, + chainedFilePath?: Uri + ): void; + updateOpenFileContents(path: Uri, version: number | null, contents: string, ipythonMode: IPythonMode): void; +} + +export enum ReferenceUseCase { + Rename, + References, +} + +export interface SymbolDefinitionProvider { + tryGetDeclarations(node: ParseNode, offset: number, token: CancellationToken): Declaration[]; +} + +export interface SymbolUsageProviderFactory { + tryCreateProvider( + useCase: ReferenceUseCase, + declarations: readonly Declaration[], + token: CancellationToken + ): SymbolUsageProvider | undefined; +} + +/** + * All Apis are supposed to be `idempotent` and `deterministic` + * + * All Apis should return the same results regardless how often there are called + * in whatever orders for the same inputs. + */ +export interface SymbolUsageProvider { + appendSymbolNamesTo(symbolNames: Set): void; + appendDeclarationsTo(to: Declaration[]): void; + appendDeclarationsAt(context: ParseNode, from: readonly Declaration[], to: Declaration[]): void; +} + +export interface StatusMutationListener { + onFileDirty?: (fileUri: Uri) => void; + onClearCache?: () => void; + onUpdateSettings?: (settings: T) => void; +} + +export interface DebugInfoInspector { + getCycleDetail(program: ProgramView, fileInfo: SourceFileInfo): string; +} diff --git a/python-parser/packages/pyright-internal/src/common/extensions.ts b/python-parser/packages/pyright-internal/src/common/extensions.ts new file mode 100644 index 00000000..a8c8b36f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/extensions.ts @@ -0,0 +1,16 @@ +/* + * extensions.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Extension methods to various types. + */ + +/* eslint-disable @typescript-eslint/no-empty-function */ +// Explicitly tells that promise should be run asynchronously. +Promise.prototype.ignoreErrors = function (this: Promise) { + this.catch((e) => { + console.log(e); + }); +}; diff --git a/python-parser/packages/pyright-internal/src/common/fileBasedCancellationUtils.ts b/python-parser/packages/pyright-internal/src/common/fileBasedCancellationUtils.ts new file mode 100644 index 00000000..466e142b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/fileBasedCancellationUtils.ts @@ -0,0 +1,213 @@ +/* + * fileBasedCancellationUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Helper methods relating to file-based cancellation. + */ + +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import { CancellationId, CancellationTokenSource } from 'vscode-jsonrpc'; +import { + AbstractCancellationTokenSource, + CancellationReceiverStrategy, + CancellationSenderStrategy, + CancellationStrategy, + CancellationToken, +} from 'vscode-languageserver'; + +import { + CancellationProvider, + CancelledTokenId, + FileBasedToken, + getCancellationFolderName, + setCancellationFolderName, +} from './cancellationUtils'; +import { Uri } from './uri/uri'; +import { UriEx } from './uri/uriUtils'; + +class StatSyncFromFs { + statSync(uri: Uri) { + return fs.statSync(uri.getFilePath()); + } +} + +class OwningFileToken extends FileBasedToken { + private _disposed = false; + + constructor(cancellationId: string) { + super(cancellationId, new StatSyncFromFs()); + } + + override get isCancellationRequested(): boolean { + // Since this object owns the file and it gets created when the + // token is cancelled, there's no point in checking the pipe. + return this.isCancelled; + } + + override cancel() { + if (!this._disposed && !this.isCancelled) { + this._createPipe(); + super.cancel(); + } + } + + override dispose(): void { + this._disposed = true; + + super.dispose(); + this._removePipe(); + } + + private _createPipe() { + try { + fs.writeFileSync(this.cancellationFilePath.getFilePath(), '', { flag: 'w' }); + } catch { + // Ignore the exception. + } + } + + private _removePipe() { + try { + fs.unlinkSync(this.cancellationFilePath.getFilePath()); + } catch { + // Ignore the exception. + } + } +} + +class FileBasedCancellationTokenSource implements AbstractCancellationTokenSource { + private _token: CancellationToken | undefined; + + constructor(private _cancellationId: string, private _ownFile: boolean = false) { + // Empty + } + + get token(): CancellationToken { + if (!this._token) { + // Be lazy and create the token only when actually needed. + this._token = this._ownFile + ? new OwningFileToken(this._cancellationId) + : new FileBasedToken(this._cancellationId, new StatSyncFromFs()); + } + return this._token; + } + + cancel(): void { + if (!this._token) { + // Save an object by returning the default + // cancelled token when cancellation happens + // before someone asks for the token. + this._token = CancellationToken.Cancelled; + } else if (this._token.isCancellationRequested) { + // Already cancelled. + return; + } else { + (this._token as FileBasedToken).cancel(); + } + } + + dispose(): void { + if (!this._token) { + // Make sure to initialize with an empty token if we had none. + this._token = CancellationToken.None; + } else if (this._token instanceof FileBasedToken) { + // Actually dispose. + this._token.dispose(); + } + } +} + +export function getCancellationFolderPath(folderName: string) { + return path.join(os.tmpdir(), 'python-languageserver-cancellation', folderName); +} + +function getCancellationFileUri(folderName: string, id: CancellationId): string { + return UriEx.file(path.join(getCancellationFolderPath(folderName), `cancellation-${String(id)}.tmp`)).toString(); +} + +// See this issue for why the implements is commented out: +// https://github.com/microsoft/vscode-languageserver-node/issues/1425 +class FileCancellationReceiverStrategy { + // implements IdCancellationReceiverStrategy { + constructor(readonly folderName: string) {} + + createCancellationTokenSource(id: CancellationId): AbstractCancellationTokenSource { + return new FileBasedCancellationTokenSource(getCancellationFileUri(this.folderName, id)); + } +} + +export function getCancellationStrategyFromArgv(argv: string[]): CancellationStrategy { + let receiver: CancellationReceiverStrategy | undefined; + + for (let i = 0; i < argv.length; i++) { + const arg = argv[i]; + if (arg === '--cancellationReceive') { + receiver = createReceiverStrategyFromArgv(argv[i + 1]); + } else { + const args = arg.split('='); + if (args[0] === '--cancellationReceive') { + receiver = createReceiverStrategyFromArgv(args[1]); + } + } + } + + if (receiver && !getCancellationFolderName()) { + setCancellationFolderName((receiver as FileCancellationReceiverStrategy).folderName); + } + + receiver = receiver ? receiver : CancellationReceiverStrategy.Message; + return { receiver, sender: CancellationSenderStrategy.Message }; + + function createReceiverStrategyFromArgv(arg: string): CancellationReceiverStrategy | undefined { + const folderName = extractCancellationFolderName(arg); + return folderName ? new FileCancellationReceiverStrategy(folderName) : undefined; + } + + function extractCancellationFolderName(arg: string): string | undefined { + const fileRegex = /^file:(.+)$/; + const folderName = arg.match(fileRegex); + return folderName ? folderName[1] : undefined; + } +} + +export function disposeCancellationToken(token: CancellationToken) { + if (token instanceof FileBasedToken) { + token.dispose(); + } +} + +export function getCancellationTokenFromId(cancellationId: string) { + if (!cancellationId) { + return CancellationToken.None; + } + + if (cancellationId === CancelledTokenId) { + return CancellationToken.Cancelled; + } + + return new FileBasedToken(cancellationId, new StatSyncFromFs()); +} + +let cancellationSourceId = 0; +export class FileBasedCancellationProvider implements CancellationProvider { + constructor(private _prefix: string) { + // empty + } + + createCancellationTokenSource(): AbstractCancellationTokenSource { + const folderName = getCancellationFolderName(); + if (!folderName) { + // File-based cancellation is not used. + // Return regular cancellation token source. + return new CancellationTokenSource(); + } + + return new FileBasedCancellationTokenSource( + getCancellationFileUri(folderName, `${this._prefix}-${String(cancellationSourceId++)}`), + /* ownFile */ true + ); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/fileSystem.ts b/python-parser/packages/pyright-internal/src/common/fileSystem.ts new file mode 100644 index 00000000..1028a8d8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/fileSystem.ts @@ -0,0 +1,149 @@ +/* + * fileSystem.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * A "file system provider" abstraction that allows us to swap out a + * real file system implementation for a virtual (mocked) implementation + * for testing. + */ + +// * NOTE * except tests, this should be only file that import "fs" +import type * as fs from 'fs'; +import { FileWatcher, FileWatcherEventHandler } from './fileWatcher'; +import { Uri } from './uri/uri'; +import { Disposable } from 'vscode-jsonrpc'; + +export interface Stats { + size: number; + mtimeMs: number; + ctimeMs: number; + + isFile(): boolean; + isDirectory(): boolean; + isBlockDevice(): boolean; + isCharacterDevice(): boolean; + isSymbolicLink(): boolean; + isFIFO(): boolean; + isSocket(): boolean; + isZipDirectory?: () => boolean; +} + +export interface MkDirOptions { + recursive: boolean; + // Not supported on Windows so commented out. + // mode: string | number; +} + +export interface ReadOnlyFileSystem { + existsSync(uri: Uri): boolean; + chdir(uri: Uri): void; + readdirEntriesSync(uri: Uri): fs.Dirent[]; + readdirSync(uri: Uri): string[]; + readFileSync(uri: Uri, encoding?: null): Buffer; + readFileSync(uri: Uri, encoding: BufferEncoding): string; + readFileSync(uri: Uri, encoding?: BufferEncoding | null): string | Buffer; + + statSync(uri: Uri): Stats; + realpathSync(uri: Uri): Uri; + getModulePath(): Uri; + // Async I/O + readFile(uri: Uri): Promise; + readFileText(uri: Uri, encoding?: BufferEncoding): Promise; + // Return path in casing on OS. + realCasePath(uri: Uri): Uri; + + // See whether the file is mapped to another location. + isMappedUri(uri: Uri): boolean; + + // Get original uri if the given uri is mapped. + getOriginalUri(mappedUri: Uri): Uri; + + // Get mapped uri if the given uri is mapped. + getMappedUri(originalUri: Uri): Uri; + + isInZip(uri: Uri): boolean; +} + +export interface FileSystem extends ReadOnlyFileSystem { + mkdirSync(uri: Uri, options?: MkDirOptions): void; + writeFileSync(uri: Uri, data: string | Buffer, encoding: BufferEncoding | null): void; + + unlinkSync(uri: Uri): void; + rmdirSync(uri: Uri): void; + + createFileSystemWatcher(uris: Uri[], listener: FileWatcherEventHandler): FileWatcher; + createReadStream(uri: Uri): fs.ReadStream; + createWriteStream(uri: Uri): fs.WriteStream; + copyFileSync(uri: Uri, dst: Uri): void; + + mapDirectory(mappedUri: Uri, originalUri: Uri, filter?: (originalUri: Uri, fs: FileSystem) => boolean): Disposable; +} + +export interface TmpfileOptions { + postfix?: string; + prefix?: string; +} + +export interface TempFile { + // The directory returned by tmpdir must exist and be the same each time tmpdir is called. + tmpdir(): Uri; + tmpfile(options?: TmpfileOptions): Uri; +} + +export namespace FileSystem { + export function is(value: any): value is FileSystem { + return value.createFileSystemWatcher && value.createReadStream && value.createWriteStream && value.copyFileSync; + } +} + +export namespace TempFile { + export function is(value: any): value is TempFile { + return value.tmpdir && value.tmpfile; + } +} + +export class VirtualDirent implements fs.Dirent { + parentPath: string; + + constructor(public name: string, private _file: boolean, parentPath: string) { + this.parentPath = parentPath; + } + + /** + * Alias for `dirent.parentPath`. + * @since v20.1.0 + * @deprecated Since v20.12.0 + */ + get path(): string { + return this.parentPath; + } + + isFile(): boolean { + return this._file; + } + + isDirectory(): boolean { + return !this._file; + } + + isBlockDevice(): boolean { + return false; + } + + isCharacterDevice(): boolean { + return false; + } + + isSymbolicLink(): boolean { + return false; + } + + isFIFO(): boolean { + return false; + } + + isSocket(): boolean { + return false; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/fileWatcher.ts b/python-parser/packages/pyright-internal/src/common/fileWatcher.ts new file mode 100644 index 00000000..ca35c614 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/fileWatcher.ts @@ -0,0 +1,57 @@ +/* + * fileWatcher.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * file watcher related functionality. + */ +import { Stats } from './fileSystem'; +import { Uri } from './uri/uri'; + +export type FileWatcherEventType = 'add' | 'addDir' | 'change' | 'unlink' | 'unlinkDir'; +export type FileWatcherEventHandler = (eventName: FileWatcherEventType, path: string, stats?: Stats) => void; + +export interface FileWatcher { + close(): void; +} + +export interface FileWatcherHandler { + onFileChange(eventType: FileWatcherEventType, uri: Uri): void; +} + +export interface FileWatcherProvider { + createFileWatcher(paths: string[], listener: FileWatcherEventHandler): FileWatcher; +} + +export const nullFileWatcherHandler: FileWatcherHandler = { + onFileChange(_1: FileWatcherEventType, _2: Uri): void { + // do nothing + }, +}; + +export const nullFileWatcherProvider: FileWatcherProvider = { + createFileWatcher(_1: string[], _2: FileWatcherEventHandler): FileWatcher { + return nullFileWatcher; + }, +}; + +// File watchers can give "changed" event even for a file open. but for those cases, +// it will give relative path rather than absolute path. To get rid of such cases, +// we will drop any event with relative paths. this trick is copied from VS Code +// (https://github.com/microsoft/vscode/blob/main/src/vs/platform/files/node/watcher/unix/chokidarWatcherService.ts) +export function ignoredWatchEventFunction(paths: string[]) { + const normalizedPaths = paths.map((p) => p.toLowerCase()); + return (path: string): boolean => { + if (!path || path.indexOf('__pycache__') >= 0) { + return true; + } + const normalizedPath = path.toLowerCase(); + return normalizedPaths.every((p) => normalizedPath.indexOf(p) < 0); + }; +} + +const nullFileWatcher: FileWatcher = { + close() { + // empty; + }, +}; diff --git a/python-parser/packages/pyright-internal/src/common/fullAccessHost.ts b/python-parser/packages/pyright-internal/src/common/fullAccessHost.ts new file mode 100644 index 00000000..f881b25b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/fullAccessHost.ts @@ -0,0 +1,368 @@ +/* + * fullAccessHost.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implementation of host where it is allowed to run external executables. + */ + +import * as child_process from 'child_process'; +import { CancellationToken } from 'vscode-languageserver'; + +import { ImportLogger } from '../analyzer/importLogger'; +import { PythonPathResult } from '../analyzer/pythonPathUtils'; +import { OperationCanceledException, onCancellationRequested, throwIfCancellationRequested } from './cancellationUtils'; +import { PythonPlatform } from './configOptions'; +import { assertNever } from './debug'; +import { HostKind, NoAccessHost, ScriptOutput } from './host'; +import { getAnyExtensionFromPath, normalizePath } from './pathUtils'; +import { terminateChild } from './processUtils'; +import { PythonVersion } from './pythonVersion'; +import { ServiceKeys } from './serviceKeys'; +import { ServiceProvider } from './serviceProvider'; +import { Uri } from './uri/uri'; +import { isDirectory } from './uri/uriUtils'; + +// preventLocalImports removes the working directory from sys.path. +// The -c flag adds it automatically, which can allow some stdlib +// modules (like json) to be overridden by other files (like json.py). +const removeCwdFromSysPath = [ + 'import os, os.path, sys', + 'normalize = lambda p: os.path.normcase(os.path.normpath(p))', + 'cwd = normalize(os.getcwd())', + 'orig_sys_path = [p for p in sys.path if p != ""]', + 'sys.path[:] = [p for p in sys.path if p != "" and normalize(p) != cwd]', +]; + +const extractSys = [ + ...removeCwdFromSysPath, + 'import sys, json', + 'json.dump(dict(path=orig_sys_path, prefix=sys.prefix), sys.stdout)', +].join('; '); + +const extractVersion = [ + ...removeCwdFromSysPath, + 'import sys, json', + 'json.dump(tuple(sys.version_info), sys.stdout)', +].join('; '); + +export class LimitedAccessHost extends NoAccessHost { + override get kind(): HostKind { + return HostKind.LimitedAccess; + } + + override getPythonPlatform(importLogger?: ImportLogger): PythonPlatform | undefined { + if (process.platform === 'darwin') { + return PythonPlatform.Darwin; + } else if (process.platform === 'linux') { + return PythonPlatform.Linux; + } else if (process.platform === 'win32') { + return PythonPlatform.Windows; + } else if (process.platform === 'android') { + return PythonPlatform.Android; + } + + return undefined; + } +} + +export class FullAccessHost extends LimitedAccessHost { + constructor(protected serviceProvider: ServiceProvider) { + super(); + } + + override get kind(): HostKind { + return HostKind.FullAccess; + } + + static createHost(kind: HostKind, serviceProvider: ServiceProvider) { + switch (kind) { + case HostKind.NoAccess: + return new NoAccessHost(); + case HostKind.LimitedAccess: + return new LimitedAccessHost(); + case HostKind.FullAccess: + return new FullAccessHost(serviceProvider); + default: + assertNever(kind); + } + } + + override getPythonSearchPaths(pythonPath?: Uri, importLogger?: ImportLogger): PythonPathResult { + let result = this._executePythonInterpreter(pythonPath?.getFilePath(), (p) => + this._getSearchPathResultFromInterpreter(p, importLogger) + ); + + if (!result) { + result = { + paths: [], + prefix: undefined, + }; + } + + importLogger?.log(`Received ${result.paths.length} paths from interpreter`); + result.paths.forEach((path) => { + importLogger?.log(` ${path}`); + }); + + return result; + } + + override getPythonVersion(pythonPath?: Uri, importLogger?: ImportLogger): PythonVersion | undefined { + try { + const execOutput = this._executePythonInterpreter(pythonPath?.getFilePath(), (p) => + this._executeCodeInInterpreter(p, ['-I'], extractVersion) + ); + + const versionJson: any[] = JSON.parse(execOutput!); + + if (!Array.isArray(versionJson) || versionJson.length < 5) { + importLogger?.log(`Python version ${execOutput} from interpreter is unexpected format`); + return undefined; + } + + const version = PythonVersion.create( + versionJson[0], + versionJson[1], + versionJson[2], + versionJson[3], + versionJson[4] + ); + + if (version === undefined) { + importLogger?.log(`Python version ${execOutput} from interpreter is unsupported`); + return undefined; + } + + return version; + } catch { + importLogger?.log('Unable to get Python version from interpreter'); + return undefined; + } + } + + override runScript( + pythonPath: Uri | undefined, + script: Uri, + args: string[], + cwd: Uri, + token: CancellationToken + ): Promise { + // If it is already cancelled, don't bother to run script. + throwIfCancellationRequested(token); + + // What to do about conda here? + return new Promise((resolve, reject) => { + let stdout = ''; + let stderr = ''; + const commandLineArgs = ['-I', script.getFilePath(), ...args]; + + const child = this._executePythonInterpreter(pythonPath?.getFilePath(), (p) => + child_process.spawn(p, commandLineArgs, { + cwd: cwd.getFilePath(), + shell: this.shouldUseShellToRunInterpreter(p), + }) + ); + const tokenWatch = onCancellationRequested(token, () => { + if (child) { + terminateChild(child); + } + reject(new OperationCanceledException()); + }); + if (child) { + child.stdout.on('data', (d) => (stdout = stdout.concat(d))); + child.stderr.on('data', (d) => (stderr = stderr.concat(d))); + child.on('error', (e) => { + tokenWatch.dispose(); + reject(e); + }); + child.on('close', (code) => { + tokenWatch.dispose(); + resolve({ stdout, stderr, exitCode: code ?? undefined }); + }); + } else { + tokenWatch.dispose(); + reject(new Error(`Cannot start python interpreter with script ${script}`)); + } + }); + } + + override runSnippet( + pythonPath: Uri | undefined, + code: string, + args: string[], + cwd: Uri, + token: CancellationToken, + forceIsolated: boolean = false + ): Promise { + // If it is already cancelled, don't bother to run snippet. + throwIfCancellationRequested(token); + + // What to do about conda here? + return new Promise((resolve, reject) => { + const commandLineArgs = forceIsolated ? ['-I', '-c', code, ...args] : ['-c', code, ...args]; + + const child = this._executePythonInterpreter(pythonPath?.getFilePath(), (p) => + child_process.spawn(p, commandLineArgs, { + cwd: cwd.getFilePath(), + stdio: ['pipe', 'pipe', 'pipe'], + shell: this.shouldUseShellToRunInterpreter(p), + }) + ); + const tokenWatch = onCancellationRequested(token, () => { + if (child) { + terminateChild(child); + } + reject(new OperationCanceledException()); + }); + if (child) { + let stdout = ''; + let stderr = ''; + let output = ''; + + // Interleave stdout and stderr by capturing them with timestamps + const outputLines: Array<{ timestamp: number; type: 'stdout' | 'stderr'; data: string }> = []; + + child.stdout?.on('data', (data) => { + const text = data.toString(); + stdout += text; + outputLines.push({ timestamp: Date.now(), type: 'stdout', data: text }); + }); + + child.stderr?.on('data', (data) => { + const text = data.toString(); + stderr += text; + outputLines.push({ timestamp: Date.now(), type: 'stderr', data: text }); + }); + + child.on('error', (error) => { + reject(new Error(`Failed to start Python process: ${error.message}`)); + }); + + child.on('close', (code) => { + tokenWatch.dispose(); + + // Sort by timestamp to get proper interleaving + outputLines.sort((a, b) => a.timestamp - b.timestamp); + + // Combine output in chronological order + output = outputLines.map((line) => line.data).join(''); + + resolve({ stdout, stderr, output, exitCode: code ?? undefined }); + }); + } else { + tokenWatch.dispose(); + reject(new Error(`Cannot start python interpreter with the given code snippet.`)); + } + }); + } + + protected shouldUseShellToRunInterpreter(interpreterPath: string): boolean { + // Windows bat/cmd files must me executed with the shell due to the following breaking change: + // https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2#command-injection-via-args-parameter-of-child_processspawn-without-shell-option-enabled-on-windows-cve-2024-27980---high + return ( + process.platform === 'win32' && + !!getAnyExtensionFromPath(interpreterPath, ['.bat', '.cmd'], /* ignoreCase */ true) + ); + } + + private _executePythonInterpreter( + pythonPath: string | undefined, + execute: (path: string) => T | undefined + ): T | undefined { + if (pythonPath) { + return execute(pythonPath); + } else { + let result: T | undefined; + try { + // On non-Windows platforms, always default to python3 first. We want to + // avoid this on Windows because it might invoke a script that displays + // a dialog box indicating that python can be downloaded from the app store. + if (process.platform !== 'win32') { + result = execute('python3'); + } + } catch { + // Ignore failure on python3 + } + + if (result !== undefined) { + return result; + } + + // On some platforms, 'python3' might not exist. Try 'python' instead. + return execute('python'); + } + } + + /** + * Executes a chunk of Python code via the provided interpreter and returns the output. + * @param interpreterPath Path to interpreter. + * @param commandLineArgs Command line args for interpreter other than the code to execute. + * @param code Code to execute. + */ + private _executeCodeInInterpreter(interpreterPath: string, commandLineArgs: string[], code: string): string { + const useShell = this.shouldUseShellToRunInterpreter(interpreterPath); + if (useShell) { + code = '"' + code + '"'; + } + + commandLineArgs.push('-c', code); + + const execOutput = child_process.execFileSync(interpreterPath, commandLineArgs, { + encoding: 'utf8', + shell: useShell, + }); + + return execOutput; + } + + private _getSearchPathResultFromInterpreter( + interpreterPath: string, + importLogger?: ImportLogger + ): PythonPathResult | undefined { + const result: PythonPathResult = { + paths: [], + prefix: undefined, + }; + + try { + importLogger?.log(`Executing interpreter: '${interpreterPath}'`); + const execOutput = this._executeCodeInInterpreter(interpreterPath, [], extractSys); + const caseDetector = this.serviceProvider.get(ServiceKeys.caseSensitivityDetector); + + // Parse the execOutput. It should be a JSON-encoded array of paths. + try { + const execSplit = JSON.parse(execOutput); + for (let execSplitEntry of execSplit.path) { + execSplitEntry = execSplitEntry.trim(); + if (execSplitEntry) { + const normalizedPath = normalizePath(execSplitEntry); + const normalizedUri = Uri.file(normalizedPath, caseDetector); + // Skip non-existent paths and broken zips/eggs. + if ( + this.serviceProvider.fs().existsSync(normalizedUri) && + isDirectory(this.serviceProvider.fs(), normalizedUri) + ) { + result.paths.push(normalizedUri); + } else { + importLogger?.log(`Skipping '${normalizedPath}' because it is not a valid directory`); + } + } + } + + result.prefix = Uri.file(execSplit.prefix, caseDetector); + + if (result.paths.length === 0) { + importLogger?.log(`Found no valid directories`); + } + } catch (err) { + importLogger?.log(`Could not parse output: '${execOutput}'`); + throw err; + } + } catch { + return undefined; + } + + return result; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/host.ts b/python-parser/packages/pyright-internal/src/common/host.ts new file mode 100644 index 00000000..86490d9b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/host.ts @@ -0,0 +1,97 @@ +/* + * host.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provides access to the host environment the language service is running on. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { ImportLogger } from '../analyzer/importLogger'; +import { PythonPathResult } from '../analyzer/pythonPathUtils'; +import { PythonPlatform } from './configOptions'; +import { PythonVersion } from './pythonVersion'; +import { Uri } from './uri/uri'; + +export const enum HostKind { + FullAccess, + LimitedAccess, + NoAccess, +} + +export interface ScriptOutput { + stdout: string; + stderr: string; + + // Optional output that contains both stdout and stderr interleaved in choronological order. + output?: string; + exitCode?: number; +} + +export interface Host { + readonly kind: HostKind; + getPythonSearchPaths(pythonPath?: Uri, failureLogger?: ImportLogger): PythonPathResult; + getPythonVersion(pythonPath?: Uri, failureLogger?: ImportLogger): PythonVersion | undefined; + getPythonPlatform(failureLogger?: ImportLogger): PythonPlatform | undefined; + runScript( + pythonPath: Uri | undefined, + script: Uri, + args: string[], + cwd: Uri, + token: CancellationToken + ): Promise; + runSnippet( + pythonPath: Uri | undefined, + code: string, + args: string[], + cwd: Uri, + token: CancellationToken, + forceIsolated?: boolean + ): Promise; +} + +export class NoAccessHost implements Host { + get kind(): HostKind { + return HostKind.NoAccess; + } + + getPythonSearchPaths(pythonPath?: Uri, failureLogger?: ImportLogger): PythonPathResult { + failureLogger?.log('No access to python executable.'); + + return { + paths: [], + prefix: undefined, + }; + } + + getPythonVersion(pythonPath?: Uri, failureLogger?: ImportLogger): PythonVersion | undefined { + return undefined; + } + + getPythonPlatform(failureLogger?: ImportLogger): PythonPlatform | undefined { + return undefined; + } + + async runScript( + pythonPath: Uri | undefined, + scriptPath: Uri, + args: string[], + cwd: Uri, + token: CancellationToken + ): Promise { + return { stdout: '', stderr: '' }; + } + + async runSnippet( + pythonPath: Uri | undefined, + code: string, + args: string[], + cwd: Uri, + token: CancellationToken + ): Promise { + return { stdout: '', stderr: '' }; + } +} + +export type HostFactory = () => Host; diff --git a/python-parser/packages/pyright-internal/src/common/languageInfoUtils.ts b/python-parser/packages/pyright-internal/src/common/languageInfoUtils.ts new file mode 100644 index 00000000..c538ae13 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/languageInfoUtils.ts @@ -0,0 +1,1303 @@ +/* + * languageInfoUtils.ts + * Copyright (c) Microsoft Corporation. + * + * Utilities for dumping token, syntax, and type information. + * Shared between DumpFileDebugInfo command and MCP tools. + */ + +import { findNodeByOffset, printParseNodeType } from '../analyzer/parseTreeUtils'; +import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { + ClassType, + ClassTypeFlags, + FunctionParam, + FunctionType, + FunctionTypeFlags, + TypeBase, + TypeCategory, + TypeFlags, + TypeVarDetailsShared, + TypeVarType, + Variance, +} from '../analyzer/types'; +import { isNumber, isString } from './core'; +import { convertOffsetToPosition, convertOffsetsToRange } from './positionUtils'; +import { TextRange } from './textRange'; +import { TextRangeCollection } from './textRangeCollection'; +import { Uri } from './uri/uri'; +import { + ArgCategory, + ArgumentNode, + AssertNode, + AssignmentExpressionNode, + AssignmentNode, + AugmentedAssignmentNode, + AwaitNode, + BinaryOperationNode, + BreakNode, + CallNode, + CaseNode, + ClassNode, + ComprehensionForNode, + ComprehensionIfNode, + ComprehensionNode, + ConstantNode, + ContinueNode, + DecoratorNode, + DelNode, + DictionaryExpandEntryNode, + DictionaryKeyEntryNode, + DictionaryNode, + EllipsisNode, + ErrorExpressionCategory, + ErrorNode, + ExceptNode, + ExpressionNode, + ForNode, + FormatStringNode, + FunctionAnnotationNode, + FunctionNode, + GlobalNode, + IfNode, + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + ImportNode, + IndexNode, + LambdaNode, + ListNode, + MatchNode, + MemberAccessNode, + ModuleNameNode, + ModuleNode, + NameNode, + NonlocalNode, + NumberNode, + ParamCategory, + ParameterNode, + ParseNode, + ParseNodeType, + PassNode, + PatternAsNode, + PatternCaptureNode, + PatternClassArgumentNode, + PatternClassNode, + PatternLiteralNode, + PatternMappingExpandEntryNode, + PatternMappingKeyEntryNode, + PatternMappingNode, + PatternSequenceNode, + PatternValueNode, + RaiseNode, + ReturnNode, + SetNode, + SliceNode, + StatementListNode, + StringListNode, + StringNode, + SuiteNode, + TernaryNode, + TryNode, + TupleNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParamKind, + TypeParameterListNode, + TypeParameterNode, + UnaryOperationNode, + UnpackNode, + WhileNode, + WithItemNode, + WithNode, + YieldFromNode, + YieldNode, + isExpressionNode, +} from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { KeywordType, NewLineType, OperatorType, StringTokenFlags, Token, TokenType } from '../parser/tokenizerTypes'; + +/** + * Dumps token information for a parsed file. + */ +export function dumpTokenInfo(uri: Uri, parseResults: ParseFileResults): string { + const output: string[] = []; + const tokens = parseResults.tokenizerOutput.tokens; + const lines = parseResults.tokenizerOutput.lines; + + output.push(`* Token info (${tokens.count} tokens)`); + + for (let i = 0; i < tokens.count; i++) { + const token = tokens.getItemAt(i); + output.push(`[${i}] ${getTokenString(uri, token, lines)}`); + } + + return output.join('\n'); +} + +/** + * Dumps syntax tree (AST) information for a parsed file. + */ +export function dumpSyntaxInfo(uri: Uri, parseResults: ParseFileResults, range?: TextRange): string { + const output: string[] = []; + output.push(`* Node info`); + + const dumper = new TreeDumper(uri, parseResults.tokenizerOutput.lines, range); + dumper.walk(parseResults.parserOutput.parseTree); + + output.push(dumper.output); + + return output.join('\n'); +} + +/** + * Dumps type information at a specific location. + */ +export function dumpTypeInfo( + uri: Uri, + evaluator: TypeEvaluator, + parseResults: ParseFileResults, + start: number, + end: number, + cacheOnly?: boolean +): string { + const output: string[] = []; + output.push(cacheOnly ? `* Cached Type info` : `* Type info`); + output.push(getTypeEvaluatorString(uri, evaluator, parseResults, start, end, cacheOnly)); + + return output.join('\n'); +} + +function stringify(value: any, replacer: (this: any, key: string, value: any) => any): string { + const json = JSON.stringify(value, replacer, 2); + + // Unescape any paths so VS code shows them as clickable. + return json.replace(/\\\\/g, '\\'); +} + +function getTypeEvaluatorString( + uri: Uri, + evaluator: TypeEvaluator, + results: ParseFileResults, + start: number, + end: number, + cacheOnly?: boolean +) { + const dumper = new TreeDumper(uri, results.tokenizerOutput.lines); + const node = + findNodeByOffset(results.parserOutput.parseTree, start) ?? + findNodeByOffset(results.parserOutput.parseTree, end); + if (!node) { + return 'N/A'; + } + + const set = new Set(); + + if (node.nodeType === ParseNodeType.Name) { + switch (node.parent?.nodeType) { + case ParseNodeType.Class: { + const result = cacheOnly + ? evaluator.getCachedType(node.parent.d.name) + : evaluator.getTypeOfClass(node.parent as ClassNode); + if (!result) { + return 'N/A'; + } + + return stringify(result, replacer); + } + case ParseNodeType.Function: { + const result = cacheOnly + ? evaluator.getCachedType(node.parent.d.name) + : evaluator.getTypeOfFunction(node.parent as FunctionNode); + if (!result) { + return 'N/A'; + } + + return stringify(result, replacer); + } + } + } + + const range = TextRange.fromBounds(start, end); + const expr = getExpressionNodeWithRange(node, range); + if (!expr) { + return 'N/A'; + } + + const sb = `Expression node found at ${getTextSpanString( + expr, + results.tokenizerOutput.lines + )} from the given span ${getTextSpanString(range, results.tokenizerOutput.lines)}\r\n`; + + const result = cacheOnly ? evaluator.getCachedType(expr) : evaluator.getType(expr); + if (!result) { + return sb + 'No result'; + } + + return sb + stringify(result, replacer); + + function getExpressionNodeWithRange(node: ParseNode, range: TextRange): ExpressionNode | undefined { + // find best expression node that contains both start and end. + let current: ParseNode | undefined = node; + while (current && !TextRange.containsRange(current, range)) { + current = current.parent; + } + + if (!current) { + return undefined; + } + + while (!isExpressionNode(current!)) { + current = current!.parent; + } + + return current; + } + + function replacer(this: any, key: string, value: any) { + if (value === undefined) { + return undefined; + } + + if (!isNumber(value) && !isString(value)) { + if (set.has(value)) { + if (isClassType(value)) { + return ` class '${value.shared.fullName}' typeSourceId:${value.shared.typeSourceId}`; + } + + if (isFunctionType(value)) { + return ` function '${value.shared.fullName}' parameter count:${value.shared.parameters.length}`; + } + + if (isTypeVarType(value)) { + return ` function '${value.shared.name}' scope id:${value.priv.nameWithScope}`; + } + + return undefined; + } else { + set.add(value); + } + } + + if (isTypeBase(this) && key === 'category') { + return getTypeCategoryString(value, this); + } + + if (isTypeBase(this) && key === 'flags') { + return getTypeFlagsString(value); + } + + if (isClassDetail(this) && key === 'flags') { + return getClassTypeFlagsString(value); + } + + if (isFunctionDetail(this) && key === 'flags') { + return getFunctionTypeFlagsString(value); + } + + if (isTypeVarDetails(this) && key === 'variance') { + return getVarianceString(value); + } + + if (isParameter(this) && key === 'category') { + return getParameterCategoryString(value); + } + + if (value.nodeType && value.id) { + dumper.visitNode(value as ParseNode); + + const output = dumper.output; + dumper.reset(); + return output; + } + + return value; + } + + function isTypeBase(type: any): boolean { + return type.category && type.flags; + } + + function isClassType(type: any): type is ClassType { + return isTypeBase(type) && type.details && isClassDetail(type.details); + } + + function isClassDetail(type: any): boolean { + return ( + type.name !== undefined && type.fullName !== undefined && type.moduleName !== undefined && type.baseClasses + ); + } + + function isFunctionType(type: any): type is FunctionType { + return isTypeBase(type) && type.details && isFunctionDetail(type.details); + } + + function isFunctionDetail(type: any): boolean { + return ( + type.name !== undefined && type.fullName !== undefined && type.moduleName !== undefined && type.parameters + ); + } + + function isTypeVarType(type: any): type is TypeVarType { + return isTypeBase(type) && type.details && isTypeVarDetails(type.details); + } + + function isTypeVarDetails(type: any): type is TypeVarDetailsShared { + return type.name !== undefined && type.constraints && type.variance !== undefined; + } + + function isParameter(type: any): type is FunctionParam { + return type.category && type.type; + } +} + +function getVarianceString(type: Variance) { + switch (type) { + case Variance.Invariant: + return 'Invariant'; + case Variance.Covariant: + return 'Covariant'; + case Variance.Contravariant: + return 'Contravariant'; + default: + return `Unknown Value!! (${type})`; + } +} + +function getFlagEnumString(enumMap: [E, string][], enumValue: E): string { + const str: string[] = []; + enumMap.forEach((e) => { + if (enumValue & e[0]) { + str.push(e[1]); + } + }); + if (str.length === 0) { + if (enumValue === 0) { + return 'None'; + } + return ''; + } + + return str.join(','); +} + +const FunctionTypeFlagsToString: [FunctionTypeFlags, string][] = [ + [FunctionTypeFlags.AbstractMethod, 'AbstractMethod'], + [FunctionTypeFlags.Async, 'Async'], + [FunctionTypeFlags.ClassMethod, 'ClassMethod'], + [FunctionTypeFlags.ConstructorMethod, 'ConstructorMethod'], + [FunctionTypeFlags.DisableDefaultChecks, 'DisableDefaultChecks'], + [FunctionTypeFlags.Final, 'Final'], + [FunctionTypeFlags.Generator, 'Generator'], + [FunctionTypeFlags.Overloaded, 'Overloaded'], + [FunctionTypeFlags.ParamSpecValue, 'ParamSpecValue'], + [FunctionTypeFlags.PartiallyEvaluated, 'PartiallyEvaluated'], + [FunctionTypeFlags.PyTypedDefinition, 'PyTypedDefinition'], + [FunctionTypeFlags.GradualCallableForm, 'SkipArgsKwargsCompatibilityCheck'], + [FunctionTypeFlags.StaticMethod, 'StaticMethod'], + [FunctionTypeFlags.StubDefinition, 'StubDefinition'], + [FunctionTypeFlags.SynthesizedMethod, 'SynthesizedMethod'], + [FunctionTypeFlags.UnannotatedParams, 'UnannotatedParams'], +]; + +function getFunctionTypeFlagsString(flags: FunctionTypeFlags) { + return getFlagEnumString(FunctionTypeFlagsToString, flags); +} + +const ClassTypeFlagsToString: [ClassTypeFlags, string][] = [ + [ClassTypeFlags.BuiltIn, 'BuiltIn'], + [ClassTypeFlags.CanOmitDictValues, 'CanOmitDictValues'], + [ClassTypeFlags.ClassProperty, 'ClassProperty'], + [ClassTypeFlags.DefinedInStub, 'DefinedInStub'], + [ClassTypeFlags.EnumClass, 'EnumClass'], + [ClassTypeFlags.Final, 'Final'], + [ClassTypeFlags.HasCustomClassGetItem, 'HasCustomClassGetItem'], + [ClassTypeFlags.PartiallyEvaluated, 'PartiallyEvaluated'], + [ClassTypeFlags.PropertyClass, 'PropertyClass'], + [ClassTypeFlags.ProtocolClass, 'ProtocolClass'], + [ClassTypeFlags.PseudoGenericClass, 'PseudoGenericClass'], + [ClassTypeFlags.RuntimeCheckable, 'RuntimeCheckable'], + [ClassTypeFlags.SpecialBuiltIn, 'SpecialBuiltIn'], + [ClassTypeFlags.SupportsAbstractMethods, 'SupportsAbstractMethods'], + [ClassTypeFlags.TupleClass, 'TupleClass'], + [ClassTypeFlags.TypedDictClass, 'TypedDictClass'], + [ClassTypeFlags.TypingExtensionClass, 'TypingExtensionClass'], +]; + +function getClassTypeFlagsString(flags: ClassTypeFlags) { + return getFlagEnumString(ClassTypeFlagsToString, flags); +} + +function getTypeFlagsString(flags: TypeFlags) { + const str = []; + + if (flags & TypeFlags.Instantiable) { + str.push('Instantiable'); + } + + if (flags & TypeFlags.Instance) { + str.push('Instance'); + } + + if (str.length === 0) return 'None'; + + return str.join(','); +} + +function getTypeCategoryString(typeCategory: TypeCategory, type: any) { + switch (typeCategory) { + case TypeCategory.Unbound: + return 'Unbound'; + case TypeCategory.Unknown: + return 'Unknown'; + case TypeCategory.Any: + return 'Any'; + case TypeCategory.Never: + return 'Never'; + case TypeCategory.Function: + return 'Function'; + case TypeCategory.Overloaded: + return 'Overloaded'; + case TypeCategory.Class: + if (TypeBase.isInstantiable(type)) { + return 'Class'; + } else { + return 'Object'; + } + case TypeCategory.Module: + return 'Module'; + case TypeCategory.Union: + return 'Union'; + case TypeCategory.TypeVar: + return 'TypeVar'; + default: + return `Unknown Value!! (${typeCategory})`; + } +} + +export class TreeDumper extends ParseTreeWalker { + private _indentation = ''; + private _output = ''; + + constructor(private _uri: Uri, private _lines: TextRangeCollection, private _range?: TextRange) { + super(); + } + + get output(): string { + return this._output; + } + + override walk(node: ParseNode): void { + if (!this._isNodeInRange(node)) { + return; + } + const childrenToWalk = this.visitNode(node); + if (childrenToWalk.length > 0) { + this._indentation += ' '; + this.walkMultiple(childrenToWalk); + this._indentation = this._indentation.substr(0, this._indentation.length - 2); + } + } + + reset() { + this._indentation = ''; + this._output = ''; + } + + override visitArgument(node: ArgumentNode) { + this._log(`${this._getPrefix(node)} ${getArgCategoryString(node.d.argCategory)}`); + return true; + } + + override visitAssert(node: AssertNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitAssignment(node: AssignmentNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitAssignmentExpression(node: AssignmentExpressionNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitAugmentedAssignment(node: AugmentedAssignmentNode) { + this._log(`${this._getPrefix(node)} ${getOperatorTypeString(node.d.operator)}`); + return true; + } + + override visitAwait(node: AwaitNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitBinaryOperation(node: BinaryOperationNode) { + this._log( + `${this._getPrefix(node)} ${getTokenString( + this._uri, + node.d.operatorToken, + this._lines + )} ${getOperatorTypeString(node.d.operator)}} parenthesized:(${node.d.hasParens})` + ); + return true; + } + + override visitBreak(node: BreakNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitCall(node: CallNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitClass(node: ClassNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitComprehension(node: ComprehensionNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitComprehensionFor(node: ComprehensionForNode) { + this._log(`${this._getPrefix(node)} async:(${node.d.isAsync})`); + return true; + } + + override visitComprehensionIf(node: ComprehensionIfNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitContinue(node: ContinueNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitConstant(node: ConstantNode) { + this._log(`${this._getPrefix(node)} ${getKeywordTypeString(node.d.constType)}`); + return true; + } + + override visitDecorator(node: DecoratorNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitDel(node: DelNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitDictionary(node: DictionaryNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitDictionaryKeyEntry(node: DictionaryKeyEntryNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitDictionaryExpandEntry(node: DictionaryExpandEntryNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitError(node: ErrorNode) { + this._log(`${this._getPrefix(node)} ${getErrorExpressionCategoryString(node.d.category)}`); + return true; + } + + override visitEllipsis(node: EllipsisNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitIf(node: IfNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitImport(node: ImportNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitImportAs(node: ImportAsNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitImportFrom(node: ImportFromNode) { + this._log( + `${this._getPrefix(node)} wildcard import:(${node.d.isWildcardImport}) paren:(${ + node.d.usesParens + }) wildcard token:(${ + node.d.wildcardToken ? getTokenString(this._uri, node.d.wildcardToken, this._lines) : 'N/A' + }) missing import keyword:(${node.d.missingImport})` + ); + return true; + } + + override visitImportFromAs(node: ImportFromAsNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitIndex(node: IndexNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitExcept(node: ExceptNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitFor(node: ForNode) { + this._log(`${this._getPrefix(node)} async:(${node.d.isAsync})`); + return true; + } + + override visitFormatString(node: FormatStringNode) { + this._log(`${this._getPrefix(node)} f-string`); + return true; + } + + override visitFunction(node: FunctionNode) { + this._log(`${this._getPrefix(node)} async:(${node.d.isAsync})`); + return true; + } + + override visitFunctionAnnotation(node: FunctionAnnotationNode) { + this._log(`${this._getPrefix(node)} ellipsis:(${node.d.isEllipsis})`); + return true; + } + + override visitGlobal(node: GlobalNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitLambda(node: LambdaNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitList(node: ListNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitMemberAccess(node: MemberAccessNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitModule(node: ModuleNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitModuleName(node: ModuleNameNode) { + this._log( + `${this._getPrefix(node)} leading dots:(${node.d.leadingDots}) trailing dot:(${node.d.hasTrailingDot})` + ); + return true; + } + + override visitName(node: NameNode) { + this._log(`${this._getPrefix(node)} ${getTokenString(this._uri, node.d.token, this._lines)} ${node.d.value}`); + return true; + } + + override visitNonlocal(node: NonlocalNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitNumber(node: NumberNode) { + this._log( + `${this._getPrefix(node)} ${node.d.value} int:(${node.d.isInteger}) imaginary:(${node.d.isImaginary})` + ); + return true; + } + + override visitParameter(node: ParameterNode) { + this._log(`${this._getPrefix(node)} ${getParameterCategoryString(node.d.category)}`); + return true; + } + + override visitPass(node: PassNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitRaise(node: RaiseNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitReturn(node: ReturnNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitSet(node: SetNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitSlice(node: SliceNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitStatementList(node: StatementListNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitString(node: StringNode) { + this._log(`${this._getPrefix(node)} ${getTokenString(this._uri, node.d.token, this._lines)} ${node.d.value}`); + return true; + } + + override visitStringList(node: StringListNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitSuite(node: SuiteNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitTernary(node: TernaryNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitTuple(node: TupleNode) { + this._log(`${this._getPrefix(node)} paren:(${node.d.hasParens})`); + return true; + } + + override visitTry(node: TryNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitTypeAnnotation(node: TypeAnnotationNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitUnaryOperation(node: UnaryOperationNode) { + this._log( + `${this._getPrefix(node)} ${getTokenString( + this._uri, + node.d.operatorToken, + this._lines + )} ${getOperatorTypeString(node.d.operator)}` + ); + return true; + } + + override visitUnpack(node: UnpackNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitWhile(node: WhileNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitWith(node: WithNode) { + this._log(`${this._getPrefix(node)} async:(${node.d.isAsync})`); + return true; + } + + override visitWithItem(node: WithItemNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitYield(node: YieldNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitYieldFrom(node: YieldFromNode) { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitCase(node: CaseNode): boolean { + this._log(`${this._getPrefix(node)} isIrrefutable: ${node.d.isIrrefutable}`); + return true; + } + + override visitMatch(node: MatchNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternAs(node: PatternAsNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternCapture(node: PatternCaptureNode): boolean { + this._log(`${this._getPrefix(node)} isStar:${node.d.isStar} isWildcard:${node.d.isWildcard}`); + return true; + } + + override visitPatternClass(node: PatternClassNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternClassArgument(node: PatternClassArgumentNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternLiteral(node: PatternLiteralNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternMapping(node: PatternMappingNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternMappingExpandEntry(node: PatternMappingExpandEntryNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternMappingKeyEntry(node: PatternMappingKeyEntryNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitPatternSequence(node: PatternSequenceNode): boolean { + this._log(`${this._getPrefix(node)} starEntryIndex: ${node.d.starEntryIndex}`); + return true; + } + + override visitPatternValue(node: PatternValueNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitTypeAlias(node: TypeAliasNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + override visitTypeParameter(node: TypeParameterNode): boolean { + this._log(`${this._getPrefix(node)} typeParamCategory:${getTypeParameterCategoryString(node.d.typeParamKind)}`); + return true; + } + + override visitTypeParameterList(node: TypeParameterListNode): boolean { + this._log(`${this._getPrefix(node)}`); + return true; + } + + private _log(value: string) { + this._output += `${this._indentation}${value}\r\n`; + } + + private _getPrefix(node: ParseNode) { + const pos = convertOffsetToPosition(node.start, this._lines); + // VS code's output window expects 1 based values, print the line/char with 1 based. + return `[${node.id}] '${this._uri.toString()}:${pos.line + 1}:${pos.character + 1}' => ${printParseNodeType( + node.nodeType + )} ${getTextSpanString(node, this._lines)} =>`; + } + + private _isNodeInRange(node: ParseNode) { + if (this._range === undefined) { + return true; + } + + return TextRange.overlapsRange(this._range, node); + } +} + +function getTypeParameterCategoryString(type: TypeParamKind) { + switch (type) { + case TypeParamKind.TypeVar: + return 'TypeVar'; + case TypeParamKind.TypeVarTuple: + return 'TypeVarTuple'; + case TypeParamKind.ParamSpec: + return 'ParamSpec'; + } +} + +function getParameterCategoryString(type: ParamCategory) { + switch (type) { + case ParamCategory.Simple: + return 'Simple'; + case ParamCategory.ArgsList: + return 'ArgsList'; + case ParamCategory.KwargsDict: + return 'KwargsDict'; + } +} + +function getArgCategoryString(type: ArgCategory) { + switch (type) { + case ArgCategory.Simple: + return 'Simple'; + case ArgCategory.UnpackedList: + return 'UnpackedList'; + case ArgCategory.UnpackedDictionary: + return 'UnpackedDictionary'; + default: + return `Unknown Value!! (${type})`; + } +} + +function getErrorExpressionCategoryString(type: ErrorExpressionCategory) { + switch (type) { + case ErrorExpressionCategory.MissingIn: + return 'MissingIn'; + case ErrorExpressionCategory.MissingElse: + return 'MissingElse'; + case ErrorExpressionCategory.MissingExpression: + return 'MissingExpression'; + case ErrorExpressionCategory.MissingIndexOrSlice: + return 'MissingIndexOrSlice'; + case ErrorExpressionCategory.MissingDecoratorCallName: + return 'MissingDecoratorCallName'; + case ErrorExpressionCategory.MissingCallCloseParen: + return 'MissingCallCloseParen'; + case ErrorExpressionCategory.MissingIndexCloseBracket: + return 'MissingIndexCloseBracket'; + case ErrorExpressionCategory.MissingMemberAccessName: + return 'MissingMemberAccessName'; + case ErrorExpressionCategory.MissingTupleCloseParen: + return 'MissingTupleCloseParen'; + case ErrorExpressionCategory.MissingListCloseBracket: + return 'MissingListCloseBracket'; + case ErrorExpressionCategory.MissingFunctionParameterList: + return 'MissingFunctionParameterList'; + case ErrorExpressionCategory.MissingPattern: + return 'MissingPattern'; + case ErrorExpressionCategory.MissingPatternSubject: + return 'MissingPatternSubject'; + case ErrorExpressionCategory.MissingDictValue: + return 'MissingDictValue'; + case ErrorExpressionCategory.MaxDepthExceeded: + return 'MaxDepthExceeded'; + default: + return `Unknown Value!! (${type})`; + } +} + +export function getTokenString(uri: Uri, token: Token, lines: TextRangeCollection) { + const pos = convertOffsetToPosition(token.start, lines); + let str = `'${uri.toUserVisibleString()}:${pos.line + 1}:${pos.character + 1}' (`; + str += getTokenTypeString(token.type); + str += getNewLineInfo(token); + str += getOperatorInfo(token); + str += getKeywordInfo(token); + str += getStringTokenFlags(token); + str += `, ${getTextSpanString(token, lines)}`; + str += ') '; + str += JSON.stringify(token); + + return str; + + function getNewLineInfo(t: any) { + return t.newLineType ? `, ${getNewLineTypeString(t.newLineType)}` : ''; + } + + function getOperatorInfo(t: any) { + return t.operatorType ? `, ${getOperatorTypeString(t.operatorType)}` : ''; + } + + function getKeywordInfo(t: any) { + return t.keywordType ? `, ${getKeywordTypeString(t.keywordType)}` : ''; + } + + function getStringTokenFlags(t: any) { + return t.flags ? `, [${getStringTokenFlagsString(t.flags)}]` : ''; + } +} + +export function getTextSpanString(span: TextRange, lines: TextRangeCollection) { + const range = convertOffsetsToRange(span.start, TextRange.getEnd(span), lines); + return `(${range.start.line},${range.start.character})-(${range.end.line},${range.end.character})`; +} + +function getTokenTypeString(type: TokenType) { + switch (type) { + case TokenType.Invalid: + return 'Invalid'; + case TokenType.EndOfStream: + return 'EndOfStream'; + case TokenType.NewLine: + return 'NewLine'; + case TokenType.Indent: + return 'Indent'; + case TokenType.Dedent: + return 'Dedent'; + case TokenType.String: + return 'String'; + case TokenType.Number: + return 'Number'; + case TokenType.Identifier: + return 'Identifier'; + case TokenType.Keyword: + return 'Keyword'; + case TokenType.Operator: + return 'Operator'; + case TokenType.Colon: + return 'Colon'; + case TokenType.Semicolon: + return 'Semicolon'; + case TokenType.Comma: + return 'Comma'; + case TokenType.OpenParenthesis: + return 'OpenParenthesis'; + case TokenType.CloseParenthesis: + return 'CloseParenthesis'; + case TokenType.OpenBracket: + return 'OpenBracket'; + case TokenType.CloseBracket: + return 'CloseBracket'; + case TokenType.OpenCurlyBrace: + return 'OpenCurlyBrace'; + case TokenType.CloseCurlyBrace: + return 'CloseCurlyBrace'; + case TokenType.Ellipsis: + return 'Ellipsis'; + case TokenType.Dot: + return 'Dot'; + case TokenType.Arrow: + return 'Arrow'; + case TokenType.Backtick: + return 'Backtick'; + default: + return `Unknown Value!! (${type})`; + } +} + +function getNewLineTypeString(type: NewLineType) { + switch (type) { + case NewLineType.CarriageReturn: + return 'CarriageReturn'; + case NewLineType.LineFeed: + return 'LineFeed'; + case NewLineType.CarriageReturnLineFeed: + return 'CarriageReturnLineFeed'; + case NewLineType.Implied: + return 'Implied'; + default: + return `Unknown Value!! (${type})`; + } +} + +function getOperatorTypeString(type: OperatorType) { + switch (type) { + case OperatorType.Add: + return 'Add'; + case OperatorType.AddEqual: + return 'AddEqual'; + case OperatorType.Assign: + return 'Assign'; + case OperatorType.BitwiseAnd: + return 'BitwiseAnd'; + case OperatorType.BitwiseAndEqual: + return 'BitwiseAndEqual'; + case OperatorType.BitwiseInvert: + return 'BitwiseInvert'; + case OperatorType.BitwiseOr: + return 'BitwiseOr'; + case OperatorType.BitwiseOrEqual: + return 'BitwiseOrEqual'; + case OperatorType.BitwiseXor: + return 'BitwiseXor'; + case OperatorType.BitwiseXorEqual: + return 'BitwiseXorEqual'; + case OperatorType.Divide: + return 'Divide'; + case OperatorType.DivideEqual: + return 'DivideEqual'; + case OperatorType.Equals: + return 'Equals'; + case OperatorType.FloorDivide: + return 'FloorDivide'; + case OperatorType.FloorDivideEqual: + return 'FloorDivideEqual'; + case OperatorType.GreaterThan: + return 'GreaterThan'; + case OperatorType.GreaterThanOrEqual: + return 'GreaterThanOrEqual'; + case OperatorType.LeftShift: + return 'LeftShift'; + case OperatorType.LeftShiftEqual: + return 'LeftShiftEqual'; + case OperatorType.LessOrGreaterThan: + return 'LessOrGreaterThan'; + case OperatorType.LessThan: + return 'LessThan'; + case OperatorType.LessThanOrEqual: + return 'LessThanOrEqual'; + case OperatorType.MatrixMultiply: + return 'MatrixMultiply'; + case OperatorType.MatrixMultiplyEqual: + return 'MatrixMultiplyEqual'; + case OperatorType.Mod: + return 'Mod'; + case OperatorType.ModEqual: + return 'ModEqual'; + case OperatorType.Multiply: + return 'Multiply'; + case OperatorType.MultiplyEqual: + return 'MultiplyEqual'; + case OperatorType.NotEquals: + return 'NotEquals'; + case OperatorType.Power: + return 'Power'; + case OperatorType.PowerEqual: + return 'PowerEqual'; + case OperatorType.RightShift: + return 'RightShift'; + case OperatorType.RightShiftEqual: + return 'RightShiftEqual'; + case OperatorType.Subtract: + return 'Subtract'; + case OperatorType.SubtractEqual: + return 'SubtractEqual'; + case OperatorType.Walrus: + return 'Walrus'; + case OperatorType.And: + return 'And'; + case OperatorType.Or: + return 'Or'; + case OperatorType.Not: + return 'Not'; + case OperatorType.Is: + return 'Is'; + case OperatorType.IsNot: + return 'IsNot'; + case OperatorType.In: + return 'In'; + case OperatorType.NotIn: + return 'NotIn'; + default: + return `Unknown Value!! (${type})`; + } +} + +function getKeywordTypeString(type: KeywordType) { + switch (type) { + case KeywordType.And: + return 'And'; + case KeywordType.As: + return 'As'; + case KeywordType.Assert: + return 'Assert'; + case KeywordType.Async: + return 'Async'; + case KeywordType.Await: + return 'Await'; + case KeywordType.Break: + return 'Break'; + case KeywordType.Class: + return 'Class'; + case KeywordType.Continue: + return 'Continue'; + case KeywordType.Debug: + return 'Debug'; + case KeywordType.Def: + return 'Def'; + case KeywordType.Del: + return 'Del'; + case KeywordType.Elif: + return 'Elif'; + case KeywordType.Else: + return 'Else'; + case KeywordType.Except: + return 'Except'; + case KeywordType.False: + return 'False'; + case KeywordType.Finally: + return 'Finally'; + case KeywordType.For: + return 'For'; + case KeywordType.From: + return 'From'; + case KeywordType.Global: + return 'Global'; + case KeywordType.If: + return 'If'; + case KeywordType.Import: + return 'Import'; + case KeywordType.In: + return 'In'; + case KeywordType.Is: + return 'Is'; + case KeywordType.Lambda: + return 'Lambda'; + case KeywordType.None: + return 'None'; + case KeywordType.Nonlocal: + return 'Nonlocal'; + case KeywordType.Not: + return 'Not'; + case KeywordType.Or: + return 'Or'; + case KeywordType.Pass: + return 'Pass'; + case KeywordType.Raise: + return 'Raise'; + case KeywordType.Return: + return 'Return'; + case KeywordType.True: + return 'True'; + case KeywordType.Try: + return 'Try'; + case KeywordType.While: + return 'While'; + case KeywordType.With: + return 'With'; + case KeywordType.Yield: + return 'Yield'; + default: + return `Unknown Value!! (${type})`; + } +} + +const StringTokenFlagsStrings: [StringTokenFlags, string][] = [ + [StringTokenFlags.Bytes, 'Bytes'], + [StringTokenFlags.DoubleQuote, 'DoubleQuote'], + [StringTokenFlags.Format, 'Format'], + [StringTokenFlags.Template, 'Template'], + [StringTokenFlags.Raw, 'Raw'], + [StringTokenFlags.SingleQuote, 'SingleQuote'], + [StringTokenFlags.Triplicate, 'Triplicate'], + [StringTokenFlags.Unicode, 'Unicode'], + [StringTokenFlags.Unterminated, 'Unterminated'], +]; + +function getStringTokenFlagsString(flags: StringTokenFlags) { + return getFlagEnumString(StringTokenFlagsStrings, flags); +} diff --git a/python-parser/packages/pyright-internal/src/common/languageServerInterface.ts b/python-parser/packages/pyright-internal/src/common/languageServerInterface.ts new file mode 100644 index 00000000..46019507 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/languageServerInterface.ts @@ -0,0 +1,133 @@ +/* + * languageServerInterface.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Interface for language server + */ + +import { MaxAnalysisTime } from '../analyzer/program'; +import { IBackgroundAnalysis } from '../backgroundAnalysisBase'; +import { Workspace } from '../workspaceFactory'; +import { DiagnosticBooleanOverridesMap, DiagnosticSeverityOverridesMap } from './commandLineOptions'; +import { SignatureDisplayType } from './configOptions'; +import { ConsoleInterface, LogLevel } from './console'; +import { TaskListToken } from './diagnostic'; +import { FileSystem } from './fileSystem'; +import { FileWatcherHandler } from './fileWatcher'; +import { ServiceProvider } from './serviceProvider'; +import { Uri } from './uri/uri'; + +export interface ServerSettings { + venvPath?: Uri | undefined; + pythonPath?: Uri | undefined; + typeshedPath?: Uri | undefined; + stubPath?: Uri | undefined; + openFilesOnly?: boolean | undefined; + typeCheckingMode?: string | undefined; + useLibraryCodeForTypes?: boolean | undefined; + disableLanguageServices?: boolean | undefined; + disableTaggedHints?: boolean | undefined; + disableOrganizeImports?: boolean | undefined; + autoSearchPaths?: boolean | undefined; + extraPaths?: Uri[] | undefined; + watchForSourceChanges?: boolean | undefined; + watchForLibraryChanges?: boolean | undefined; + watchForConfigChanges?: boolean | undefined; + diagnosticSeverityOverrides?: DiagnosticSeverityOverridesMap | undefined; + diagnosticBooleanOverrides?: DiagnosticBooleanOverridesMap | undefined; + logLevel?: LogLevel | undefined; + autoImportCompletions?: boolean | undefined; + indexing?: boolean | undefined; + logTypeEvaluationTime?: boolean | undefined; + typeEvaluationTimeThreshold?: number | undefined; + includeFileSpecs?: string[]; + excludeFileSpecs?: string[]; + ignoreFileSpecs?: string[]; + taskListTokens?: TaskListToken[]; + functionSignatureDisplay?: SignatureDisplayType | undefined; +} + +export interface MessageAction { + title: string; + [key: string]: string | boolean | number | object; +} + +export interface WindowInterface { + showErrorMessage(message: string): void; + showErrorMessage(message: string, ...actions: MessageAction[]): Promise; + + showWarningMessage(message: string): void; + showWarningMessage(message: string, ...actions: MessageAction[]): Promise; + + showInformationMessage(message: string): void; + showInformationMessage(message: string, ...actions: MessageAction[]): Promise; +} + +export namespace WindowInterface { + export function is(obj: any): obj is WindowInterface { + return ( + !!obj && + obj.showErrorMessage !== undefined && + obj.showWarningMessage !== undefined && + obj.showInformationMessage !== undefined + ); + } +} + +export interface WorkspaceServices { + fs: FileSystem | undefined; + backgroundAnalysis: IBackgroundAnalysis | undefined; +} + +export interface ServerOptions { + productName: string; + rootDirectory: Uri; + version: string; + serviceProvider: ServiceProvider; + fileWatcherHandler: FileWatcherHandler; + maxAnalysisTimeInForeground?: MaxAnalysisTime; + disableChecker?: boolean; + supportedCommands?: string[]; + supportedCodeActions?: string[]; + supportsTelemetry?: boolean; +} + +export interface LanguageServerBaseInterface { + readonly console: ConsoleInterface; + readonly window: WindowInterface; + readonly supportAdvancedEdits: boolean; + readonly serviceProvider: ServiceProvider; + + createBackgroundAnalysis(serviceId: string, workspaceRoot: Uri): IBackgroundAnalysis | undefined; + reanalyze(): void; + restart(): void; + + getWorkspaces(): Promise; + getSettings(workspace: Workspace): Promise; +} + +export interface LanguageServerInterface extends LanguageServerBaseInterface { + getWorkspaceForFile(fileUri: Uri, pythonPath?: Uri): Promise; +} + +export interface WindowService extends WindowInterface { + createGoToOutputAction(): MessageAction; + createOpenUriAction(title: string, uri: string): MessageAction; +} + +export namespace WindowService { + export function is(obj: any): obj is WindowService { + return obj.createGoToOutputAction !== undefined && WindowInterface.is(obj); + } +} + +export interface CommandService { + sendCommand(id: string, ...args: string[]): void; +} + +export namespace CommandService { + export function is(obj: any): obj is CommandService { + return !!obj && obj.sendCommand !== undefined; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/logTracker.ts b/python-parser/packages/pyright-internal/src/common/logTracker.ts new file mode 100644 index 00000000..fb4ecb96 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/logTracker.ts @@ -0,0 +1,212 @@ +/* + * logTracker.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * A simple logging class that can be used to track nested loggings. + */ + +import { ConsoleInterface, LogLevel, SupportName } from './console'; +import { ReadOnlyFileSystem } from './fileSystem'; +import { Duration, timingStats } from './timing'; +import { Uri } from './uri/uri'; + +// Consider an operation "long running" if it goes longer than this. +const durationThresholdForInfoInMs = 2000; + +export function getPathForLogging(fs: ReadOnlyFileSystem, fileUri: Uri) { + if (fs.isMappedUri(fileUri)) { + return fs.getOriginalUri(fileUri); + } + + return fileUri; +} + +export class LogTracker implements SupportName { + private readonly _dummyState = new State(); + private readonly _previousTitles: string[] = []; + private readonly _header: string; + + private _indentation = ''; + + constructor(private readonly _console: ConsoleInterface | undefined, private readonly _name: string) { + this._header = SupportName.is(_console) && _console.name ? '' : `[${_name}] `; + } + + get name() { + return SupportName.is(this._console) && this._console.name ? this._console.name : this._name; + } + + get logLevel() { + const level = (this._console as any).level; + return level ?? LogLevel.Error; + } + + log(title: string, callback: (state: LogState) => T): T; + log(title: string, callback: (state: LogState) => Promise): Promise; + log(title: string, callback: (state: LogState) => T, minimalDuration: number, logParsingPerf: boolean): T; + log( + title: string, + callback: (state: LogState) => Promise, + minimalDuration: number, + logParsingPerf: boolean + ): Promise; + log( + title: string, + callback: (state: LogState) => T | Promise, + minimalDuration = -1, + logParsingPerf = false + ): T | Promise { + // If no console is given, don't do anything. + if (this._console === undefined) { + return callback(this._dummyState); + } + + // This is enabled only when level is LogLevel.Log or does not exist. + const level = (this._console as any).level; + if (level === undefined || (level !== LogLevel.Log && level !== LogLevel.Info)) { + return callback(this._dummyState); + } + + // Since this is only used when LogLevel.Log or LogLevel.Info is set or BG, + // we don't care much about extra logging cost. + const current = this._indentation; + this._previousTitles.push(`${current}${title} ...`); + + this._indentation += ' '; + const state = new State(); + + try { + const maybePromise = callback(state); + if (maybePromise instanceof Promise) { + return maybePromise + .then((result) => { + this._onComplete(state, current, title, minimalDuration, logParsingPerf); + return result; + }) + .catch((err) => { + this._onComplete(state, current, title, minimalDuration, logParsingPerf); + throw err; + }); + } + this._onComplete(state, current, title, minimalDuration, logParsingPerf); + return maybePromise; + } catch (err) { + this._onComplete(state, current, title, minimalDuration, logParsingPerf); + throw err; + } + } + + private _onComplete(state: State, current: string, title: string, minimalDuration = -1, logParsingPerf = false) { + const msDuration = state.duration; + this._indentation = current; + + // if we already printed our header (by nested calls), then it can't be skipped. + if (this._previousTitles.length > 0 && (state.isSuppressed() || msDuration <= minimalDuration)) { + // Get rid of myself so we don't even show header. + this._previousTitles.pop(); + } else { + this._printPreviousTitles(); + + let output = `${this._header}${this._indentation}${title}${state.get()} (${msDuration}ms)`; + + // Report parsing related perf info only if they occurred. + if ( + logParsingPerf && + state.fileReadTotal + + state.tokenizeTotal + + state.parsingTotal + + state.resolveImportsTotal + + state.bindingTotal > + 0 + ) { + output += ` [f:${state.fileReadTotal}, t:${state.tokenizeTotal}, p:${state.parsingTotal}, i:${state.resolveImportsTotal}, b:${state.bindingTotal}]`; + } + + this._console?.log(output); + + // If the operation took really long, log it as "info" so it is more visible. + if (msDuration >= durationThresholdForInfoInMs) { + this._console?.info(`${this._header}Long operation: ${title} (${msDuration}ms)`); + } + } + } + + private _printPreviousTitles() { + // Get rid of myself + this._previousTitles.pop(); + + if (this._previousTitles.length <= 0) { + return; + } + + for (const previousTitle of this._previousTitles) { + this._console!.log(`${this._header}${previousTitle}`); + } + + this._previousTitles.length = 0; + } +} + +export interface LogState { + add(addendum: string | undefined): void; + suppress(): void; +} + +class State { + private _addendum: string | undefined; + private _suppress: boolean | undefined; + + private _start = new Duration(); + private _startFile = timingStats.readFileTime.totalTime; + private _startToken = timingStats.tokenizeFileTime.totalTime; + private _startParse = timingStats.parseFileTime.totalTime; + private _startImport = timingStats.resolveImportsTime.totalTime; + private _startBind = timingStats.bindTime.totalTime; + + get duration() { + return this._start.getDurationInMilliseconds(); + } + + get fileReadTotal() { + return timingStats.readFileTime.totalTime - this._startFile; + } + + get tokenizeTotal() { + return timingStats.tokenizeFileTime.totalTime - this._startToken; + } + + get parsingTotal() { + return timingStats.parseFileTime.totalTime - this._startParse; + } + + get resolveImportsTotal() { + return timingStats.resolveImportsTime.totalTime - this._startImport; + } + + get bindingTotal() { + return timingStats.bindTime.totalTime - this._startBind; + } + + add(addendum: string | undefined) { + if (addendum) { + this._addendum = addendum; + } + } + + get() { + if (this._addendum) { + return ` [${this._addendum}]`; + } + + return ''; + } + + suppress() { + this._suppress = true; + } + + isSuppressed() { + return !!this._suppress; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/lspUtils.ts b/python-parser/packages/pyright-internal/src/common/lspUtils.ts new file mode 100644 index 00000000..249a3acd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/lspUtils.ts @@ -0,0 +1,73 @@ +/* + * lspUtils.ts + * Copyright (c) Microsoft Corporation. + * + * Helper functions related to the Language Server Protocol (LSP). + */ + +import { LSPAny, SymbolKind, WorkDoneProgressReporter } from 'vscode-languageserver'; +import { Declaration, DeclarationType } from '../analyzer/declaration'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { isMaybeDescriptorInstance } from '../analyzer/typeUtils'; + +// Converts an internal object to LSPAny to be sent out via LSP +export function toLSPAny(obj: any) { + return obj as any as LSPAny; +} + +// Converts an LSPAny object received via LSP to our internal representation. +export function fromLSPAny(lspAny: LSPAny | undefined) { + return lspAny as any as T; +} + +export function getSymbolKind(declaration: Declaration, evaluator?: TypeEvaluator, name = ''): SymbolKind | undefined { + switch (declaration.type) { + case DeclarationType.Class: + case DeclarationType.SpecialBuiltInClass: + return SymbolKind.Class; + + case DeclarationType.Function: { + if (!declaration.isMethod) { + return SymbolKind.Function; + } + + const declType = evaluator?.getTypeForDeclaration(declaration)?.type; + if (declType && isMaybeDescriptorInstance(declType, /* requireSetter */ false)) { + return SymbolKind.Property; + } + + return SymbolKind.Method; + } + + case DeclarationType.Alias: + return SymbolKind.Module; + + case DeclarationType.Param: + if (name === 'self' || name === 'cls' || name === '_') { + return undefined; + } + + return SymbolKind.Variable; + + case DeclarationType.TypeParam: + return SymbolKind.TypeParameter; + + case DeclarationType.Variable: + if (name === '_') { + return undefined; + } + + return declaration.isConstant || declaration.isFinal ? SymbolKind.Constant : SymbolKind.Variable; + + default: + return SymbolKind.Variable; + } +} + +export function isNullProgressReporter(reporter: WorkDoneProgressReporter) { + // We can't tell if this is a NullProgressReporter (well because this type isn't exposed from vscode-languageserver), + // but we're going to assume if the toString for the begin method is empty, then it's a NullProgressReporter. + const beginStr = reporter.begin.toString(); + const contents = beginStr.substring(beginStr.indexOf('{') + 1, beginStr.lastIndexOf('}')); + return contents.trim() === ''; +} diff --git a/python-parser/packages/pyright-internal/src/common/memUtils.ts b/python-parser/packages/pyright-internal/src/common/memUtils.ts new file mode 100644 index 00000000..29bdec3e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/memUtils.ts @@ -0,0 +1,47 @@ +/* + * memUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Platform-independent helper functions for memory. + */ +import * as os from 'os'; +type HeapInfo = ReturnType; + +function getHeapStatisticsFunc(): () => HeapInfo { + try { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const getHeapStatistics = require('v8').getHeapStatistics; + if (getHeapStatistics) { + return getHeapStatistics; + } + } catch { + // empty on purpose + } + + return () => ({ + total_heap_size: 0, + total_heap_size_executable: 0, + total_physical_size: 0, + total_available_size: 0, + used_heap_size: 0, + heap_size_limit: 0, + malloced_memory: 0, + peak_malloced_memory: 0, + does_zap_garbage: 0, + number_of_native_contexts: 0, + number_of_detached_contexts: 0, + total_global_handles_size: 0, + used_global_handles_size: 0, + external_memory: 0, + }); +} +export const getHeapStatistics = getHeapStatisticsFunc(); + +export function getSystemMemoryInfo() { + if (!os.totalmem || !os.freemem) { + return { total: 0, free: 0 }; + } + + return { total: os.totalmem(), free: os.freemem() }; +} diff --git a/python-parser/packages/pyright-internal/src/common/pathConsts.ts b/python-parser/packages/pyright-internal/src/common/pathConsts.ts new file mode 100644 index 00000000..870ff0ca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/pathConsts.ts @@ -0,0 +1,21 @@ +/* + * pathConsts.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Path-related constants. + */ + +export const typeshedFallback = 'typeshed-fallback'; +export const lib = 'lib'; +export const libAlternate = 'Lib'; +export const lib64 = 'lib64'; +export const sitePackages = 'site-packages'; +export const distPackages = 'dist-packages'; +export const src = 'src'; +export const stubsSuffix = '-stubs'; +export const defaultStubsDirectory = 'typings'; +export const requirementsFileName = 'requirements.txt'; +export const pyprojectTomlName = 'pyproject.toml'; +export const dotPythonVersionName = '.python-version'; +export const configFileName = 'pyrightconfig.json'; diff --git a/python-parser/packages/pyright-internal/src/common/pathUtils.ts b/python-parser/packages/pyright-internal/src/common/pathUtils.ts new file mode 100644 index 00000000..b0b39dc3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/pathUtils.ts @@ -0,0 +1,694 @@ +/* + * pathUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Pathname utility functions. + */ + +import * as path from 'path'; + +import { Char } from './charCodes'; +import { some } from './collectionUtils'; +import { identity } from './core'; +import * as debug from './debug'; +import { equateStringsCaseInsensitive, equateStringsCaseSensitive } from './stringUtils'; + +export type GetCanonicalFileName = (fileName: string) => string; + +export interface FileSpec { + // File specs can contain wildcard characters (**, *, ?). This + // specifies the first portion of the file spec that contains + // no wildcards. + wildcardRoot: string; + + // Regular expression that can be used to match against this + // file spec. + regExp: RegExp; + + // Indicates whether the file spec has a directory wildcard (**). + // When present, the search cannot terminate without exploring to + // an arbitrary depth. + hasDirectoryWildcard: boolean; +} + +const _includeFileRegex = /\.pyi?$/; +const _wildcardRootRegex = /[*?]/; + +export namespace FileSpec { + export function is(value: any): value is FileSpec { + const candidate: FileSpec = value as FileSpec; + return candidate && !!candidate.wildcardRoot && !!candidate.regExp; + } + export function isInPath(path: string, paths: FileSpec[]) { + return !!paths.find((p) => p.regExp.test(path)); + } + + export function matchesIncludeFileRegex(filePath: string, isFile = true) { + return isFile ? _includeFileRegex.test(filePath) : true; + } + + export function matchIncludeFileSpec(includeRegExp: RegExp, exclude: FileSpec[], filePath: string, isFile = true) { + if (includeRegExp.test(filePath)) { + if (!FileSpec.isInPath(filePath, exclude) && FileSpec.matchesIncludeFileRegex(filePath, isFile)) { + return true; + } + } + + return false; + } +} + +export interface FileSystemEntries { + files: string[]; + directories: string[]; +} + +export function getDirectoryPath(pathString: string): string { + return pathString.substr(0, Math.max(getRootLength(pathString), pathString.lastIndexOf(path.sep))); +} + +/** + * Returns length of the root part of a path or URL (i.e. length of "/", "x:/", "//server/"). + */ +export function getRootLength(pathString: string, sep = path.sep): number { + if (pathString.charAt(0) === sep) { + if (pathString.charAt(1) !== sep) { + return 1; // POSIX: "/" (or non-normalized "\") + } + const p1 = pathString.indexOf(sep, 2); + if (p1 < 0) { + return pathString.length; // UNC: "//server" or "\\server" + } + return p1 + 1; // UNC: "//server/" or "\\server\" + } + if (pathString.charAt(1) === ':') { + if (pathString.charAt(2) === sep) { + return 3; // DOS: "c:/" or "c:\" + } + if (pathString.length === 2) { + return 2; // DOS: "c:" (but not "c:d") + } + } + + return 0; +} + +export function getPathSeparator(pathString: string) { + return path.sep; +} + +export function getPathComponents(pathString: string) { + const normalizedPath = normalizeSlashes(pathString); + const rootLength = getRootLength(normalizedPath); + const root = normalizedPath.substring(0, rootLength); + const sep = getPathSeparator(pathString); + const rest = normalizedPath.substring(rootLength).split(sep); + if (rest.length > 0 && !rest[rest.length - 1]) { + rest.pop(); + } + + return reducePathComponents([root, ...rest]); +} + +export function reducePathComponents(components: readonly string[]) { + if (!some(components)) { + return []; + } + + // Reduce the path components by eliminating + // any '.' or '..'. + const reduced = [components[0]]; + for (let i = 1; i < components.length; i++) { + const component = components[i]; + if (!component || component === '.') { + continue; + } + + if (component === '..') { + if (reduced.length > 1) { + if (reduced[reduced.length - 1] !== '..') { + reduced.pop(); + continue; + } + } else if (reduced[0]) { + continue; + } + } + reduced.push(component); + } + + return reduced; +} + +export function combinePathComponents(components: string[]): string { + if (components.length === 0) { + return ''; + } + + const root = components[0] && ensureTrailingDirectorySeparator(components[0]); + const sep = getPathSeparator(root); + return normalizeSlashes(root + components.slice(1).join(sep)); +} + +export function getRelativePath(dirPath: string, relativeTo: string) { + if (!dirPath.startsWith(ensureTrailingDirectorySeparator(relativeTo))) { + return undefined; + } + + const pathComponents = getPathComponents(dirPath); + const relativeToComponents = getPathComponents(relativeTo); + const sep = getPathSeparator(dirPath); + + let relativePath = '.'; + for (let i = relativeToComponents.length; i < pathComponents.length; i++) { + relativePath += sep + pathComponents[i]; + } + + return relativePath; +} + +const separatorRegExp = /[\\/]/g; +const getInvalidSeparator = (sep: string) => (sep === '/' ? '\\' : '/'); +export function normalizeSlashes(pathString: string, sep = path.sep): string { + if (pathString.includes(getInvalidSeparator(sep))) { + return pathString.replace(separatorRegExp, sep); + } + + return pathString; +} + +/** + * Combines and resolves paths. If a path is absolute, it replaces any previous path. Any + * `.` and `..` path components are resolved. Trailing directory separators are preserved. + * + * ```ts + * resolvePath("/path", "to", "file.ext") === "path/to/file.ext" + * resolvePath("/path", "to", "file.ext/") === "path/to/file.ext/" + * resolvePath("/path", "dir", "..", "to", "file.ext") === "path/to/file.ext" + * ``` + */ +export function resolvePaths(path: string, ...paths: (string | undefined)[]): string { + return normalizePath(some(paths) ? combinePaths(path, ...paths) : normalizeSlashes(path)); +} + +export function combinePaths(pathString: string, ...paths: (string | undefined)[]): string { + if (pathString) { + pathString = normalizeSlashes(pathString); + } + + for (let relativePath of paths) { + if (!relativePath) { + continue; + } + + relativePath = normalizeSlashes(relativePath); + + if (!pathString || getRootLength(relativePath) !== 0) { + pathString = relativePath; + } else { + pathString = ensureTrailingDirectorySeparator(pathString) + relativePath; + } + } + + return pathString; +} + +/** + * Determines whether a `parent` path contains a `child` path using the provide case sensitivity. + */ +export function containsPath(parent: string, child: string, ignoreCase?: boolean): boolean; +export function containsPath(parent: string, child: string, currentDirectory: string, ignoreCase?: boolean): boolean; +export function containsPath(parent: string, child: string, currentDirectory?: string | boolean, ignoreCase?: boolean) { + if (typeof currentDirectory === 'string') { + parent = combinePaths(currentDirectory, parent); + child = combinePaths(currentDirectory, child); + } else if (typeof currentDirectory === 'boolean') { + ignoreCase = currentDirectory; + } + + if (parent === undefined || child === undefined) { + return false; + } + if (parent === child) { + return true; + } + + const parentComponents = getPathComponents(parent); + const childComponents = getPathComponents(child); + + if (childComponents.length < parentComponents.length) { + return false; + } + + const componentEqualityComparer = ignoreCase ? equateStringsCaseInsensitive : equateStringsCaseSensitive; + for (let i = 0; i < parentComponents.length; i++) { + const equalityComparer = i === 0 ? equateStringsCaseInsensitive : componentEqualityComparer; + if (!equalityComparer(parentComponents[i], childComponents[i])) { + return false; + } + } + + return true; +} + +/** + * Changes the extension of a path to the provided extension. + * + * ```ts + * changeAnyExtension("/path/to/file.ext", ".js") === "/path/to/file.js" + * ``` + */ +export function changeAnyExtension(path: string, ext: string): string; + +/** + * Changes the extension of a path to the provided extension if it has one of the provided extensions. + * + * ```ts + * changeAnyExtension("/path/to/file.ext", ".js", ".ext") === "/path/to/file.js" + * changeAnyExtension("/path/to/file.ext", ".js", ".ts") === "/path/to/file.ext" + * changeAnyExtension("/path/to/file.ext", ".js", [".ext", ".ts"]) === "/path/to/file.js" + * ``` + */ +export function changeAnyExtension( + path: string, + ext: string, + extensions: string | readonly string[], + ignoreCase: boolean +): string; +export function changeAnyExtension( + path: string, + ext: string, + extensions?: string | readonly string[], + ignoreCase?: boolean +): string { + const pathExt = + extensions !== undefined && ignoreCase !== undefined + ? getAnyExtensionFromPath(path, extensions, ignoreCase) + : getAnyExtensionFromPath(path); + + return pathExt ? path.slice(0, path.length - pathExt.length) + (ext.startsWith('.') ? ext : '.' + ext) : path; +} + +/** + * Gets the file extension for a path. + * + * ```ts + * getAnyExtensionFromPath("/path/to/file.ext") === ".ext" + * getAnyExtensionFromPath("/path/to/file.ext/") === ".ext" + * getAnyExtensionFromPath("/path/to/file") === "" + * getAnyExtensionFromPath("/path/to.ext/file") === "" + * ``` + */ +export function getAnyExtensionFromPath(path: string): string; +/** + * Gets the file extension for a path, provided it is one of the provided extensions. + * + * ```ts + * getAnyExtensionFromPath("/path/to/file.ext", ".ext", true) === ".ext" + * getAnyExtensionFromPath("/path/to/file.js", ".ext", true) === "" + * getAnyExtensionFromPath("/path/to/file.js", [".ext", ".js"], true) === ".js" + * getAnyExtensionFromPath("/path/to/file.ext", ".EXT", false) === "" + */ +export function getAnyExtensionFromPath( + path: string, + extensions: string | readonly string[], + ignoreCase: boolean +): string; +export function getAnyExtensionFromPath( + path: string, + extensions?: string | readonly string[], + ignoreCase?: boolean +): string { + // Retrieves any string from the final "." onwards from a base file name. + // Unlike extensionFromPath, which throws an exception on unrecognized extensions. + if (extensions) { + return getAnyExtensionFromPathWorker( + stripTrailingDirectorySeparator(path), + extensions, + ignoreCase ? equateStringsCaseInsensitive : equateStringsCaseSensitive + ); + } + const baseFileName = getBaseFileName(path); + const extensionIndex = baseFileName.lastIndexOf('.'); + if (extensionIndex >= 0) { + return baseFileName.substring(extensionIndex); + } + return ''; +} + +/** + * Returns the path except for its containing directory name. + * Semantics align with NodeJS's `path.basename` except that we support URLs as well. + * + * ```ts + * // POSIX + * getBaseFileName("/path/to/file.ext") === "file.ext" + * getBaseFileName("/path/to/") === "to" + * getBaseFileName("/") === "" + * // DOS + * getBaseFileName("c:/path/to/file.ext") === "file.ext" + * getBaseFileName("c:/path/to/") === "to" + * getBaseFileName("c:/") === "" + * getBaseFileName("c:") === "" + * ``` + */ +export function getBaseFileName(pathString: string): string; +/** + * Gets the portion of a path following the last (non-terminal) separator (`/`). + * Semantics align with NodeJS's `path.basename` except that we support URLs as well. + * If the base name has any one of the provided extensions, it is removed. + * + * ```ts + * getBaseFileName("/path/to/file.ext", ".ext", true) === "file" + * getBaseFileName("/path/to/file.js", ".ext", true) === "file.js" + * getBaseFileName("/path/to/file.js", [".ext", ".js"], true) === "file" + * getBaseFileName("/path/to/file.ext", ".EXT", false) === "file.ext" + * ``` + */ +export function getBaseFileName( + pathString: string, + extensions: string | readonly string[], + ignoreCase: boolean +): string; +export function getBaseFileName(pathString: string, extensions?: string | readonly string[], ignoreCase?: boolean) { + pathString = normalizeSlashes(pathString); + + // if the path provided is itself the root, then it has not file name. + const rootLength = getRootLength(pathString); + if (rootLength === pathString.length) { + return ''; + } + + // return the trailing portion of the path starting after the last (non-terminal) directory + // separator but not including any trailing directory separator. + pathString = stripTrailingDirectorySeparator(pathString); + const name = pathString.slice(Math.max(getRootLength(pathString), pathString.lastIndexOf(path.sep) + 1)); + const extension = + extensions !== undefined && ignoreCase !== undefined + ? getAnyExtensionFromPath(name, extensions, ignoreCase) + : undefined; + + return extension ? name.slice(0, name.length - extension.length) : name; +} + +/** + * Gets a relative path that can be used to traverse between `from` and `to`. + */ +export function getRelativePathFromDirectory(from: string, to: string, ignoreCase: boolean): string; +/** + * Gets a relative path that can be used to traverse between `from` and `to`. + */ +export function getRelativePathFromDirectory( + fromDirectory: string, + to: string, + getCanonicalFileName: GetCanonicalFileName +): string; +export function getRelativePathFromDirectory( + fromDirectory: string, + to: string, + getCanonicalFileNameOrIgnoreCase: GetCanonicalFileName | boolean +) { + const pathComponents = getRelativePathComponentsFromDirectory(fromDirectory, to, getCanonicalFileNameOrIgnoreCase); + return combinePathComponents(pathComponents); +} + +export function getRelativePathComponentsFromDirectory( + fromDirectory: string, + to: string, + getCanonicalFileNameOrIgnoreCase: GetCanonicalFileName | boolean +) { + debug.assert( + getRootLength(fromDirectory) > 0 === getRootLength(to) > 0, + 'Paths must either both be absolute or both be relative' + ); + const getCanonicalFileName = + typeof getCanonicalFileNameOrIgnoreCase === 'function' ? getCanonicalFileNameOrIgnoreCase : identity; + const ignoreCase = typeof getCanonicalFileNameOrIgnoreCase === 'boolean' ? getCanonicalFileNameOrIgnoreCase : false; + const pathComponents = getPathComponentsRelativeTo( + fromDirectory, + to, + ignoreCase ? equateStringsCaseInsensitive : equateStringsCaseSensitive, + getCanonicalFileName + ); + + return pathComponents; +} + +export function ensureTrailingDirectorySeparator(pathString: string): string { + const sep = getPathSeparator(pathString); + if (!hasTrailingDirectorySeparator(pathString)) { + return pathString + sep; + } + + return pathString; +} + +export function hasTrailingDirectorySeparator(pathString: string) { + if (pathString.length === 0) { + return false; + } + + const ch = pathString.charCodeAt(pathString.length - 1); + return ch === Char.Slash || ch === Char.Backslash; +} + +export function stripTrailingDirectorySeparator(pathString: string) { + if (!hasTrailingDirectorySeparator(pathString)) { + return pathString; + } + return pathString.slice(0, pathString.length - 1); +} + +export function getFileExtension(fileName: string, multiDotExtension = false) { + if (!multiDotExtension) { + return path.extname(fileName); + } + + fileName = getFileName(fileName); + const firstDotIndex = fileName.indexOf('.'); + return fileName.slice(firstDotIndex); +} + +export function getFileName(pathString: string) { + return path.basename(pathString); +} + +export function getShortenedFileName(pathString: string, maxDirLength = 15) { + const fileName = getFileName(pathString); + const dirName = getDirectoryPath(pathString); + if (dirName.length > maxDirLength) { + return `...${dirName.slice(dirName.length - maxDirLength)}${path.sep}${fileName}`; + } + return pathString; +} + +export function stripFileExtension(fileName: string, multiDotExtension = false) { + const ext = getFileExtension(fileName, multiDotExtension); + return fileName.substr(0, fileName.length - ext.length); +} + +export function normalizePath(pathString: string): string { + return normalizeSlashes(path.normalize(pathString)); +} + +// Transforms a relative file spec (one that potentially contains +// escape characters **, * or ?) and returns a regular expression +// that can be used for matching against. +export function getWildcardRegexPattern(rootPath: string, fileSpec: string): string { + let absolutePath = normalizePath(combinePaths(rootPath, fileSpec)); + if (!hasPythonExtension(absolutePath)) { + absolutePath = ensureTrailingDirectorySeparator(absolutePath); + } + + const pathComponents = getPathComponents(absolutePath); + + const escapedSeparator = getRegexEscapedSeparator(getPathSeparator(rootPath)); + const doubleAsteriskRegexFragment = `(${escapedSeparator}[^${escapedSeparator}][^${escapedSeparator}]*)*?`; + const reservedCharacterPattern = new RegExp(`[^\\w\\s${escapedSeparator}]`, 'g'); + + // Strip the directory separator from the root component. + if (pathComponents.length > 0) { + pathComponents[0] = stripTrailingDirectorySeparator(pathComponents[0]); + + if (pathComponents[0].startsWith('\\\\')) { + pathComponents[0] = '\\\\' + pathComponents[0]; + } + } + + let regExPattern = ''; + let firstComponent = true; + + for (let component of pathComponents) { + if (component === '**') { + regExPattern += doubleAsteriskRegexFragment; + } else { + if (!firstComponent) { + component = escapedSeparator + component; + } + + regExPattern += component.replace(reservedCharacterPattern, (match) => { + if (match === '*') { + return `[^${escapedSeparator}]*`; + } else if (match === '?') { + return `[^${escapedSeparator}]`; + } else { + // escaping anything that is not reserved characters - word/space/separator + return '\\' + match; + } + }); + + firstComponent = false; + } + } + + return regExPattern; +} + +// Determines whether the file spec contains a directory wildcard pattern ("**"). +export function isDirectoryWildcardPatternPresent(fileSpec: string): boolean { + const path = normalizePath(fileSpec); + const pathComponents = getPathComponents(path); + + for (const component of pathComponents) { + if (component === '**') { + return true; + } + } + + return false; +} + +// Returns the topmost path that contains no wildcard characters. +export function getWildcardRoot(rootPath: string, fileSpec: string): string { + let absolutePath = normalizePath(combinePaths(rootPath, fileSpec)); + if (!hasPythonExtension(absolutePath)) { + absolutePath = ensureTrailingDirectorySeparator(absolutePath); + } + + const pathComponents = getPathComponents(absolutePath); + const sep = getPathSeparator(absolutePath); + + // Strip the directory separator from the root component. + if (pathComponents.length > 0) { + pathComponents[0] = stripTrailingDirectorySeparator(pathComponents[0]); + } + + if (pathComponents.length === 1 && !pathComponents[0]) { + return sep; + } + + let wildcardRoot = ''; + let firstComponent = true; + + for (let component of pathComponents) { + if (component === '**') { + break; + } else { + if (component.match(_wildcardRootRegex)) { + break; + } + + if (!firstComponent) { + component = sep + component; + } + + wildcardRoot += component; + firstComponent = false; + } + } + + return wildcardRoot; +} + +export function hasPythonExtension(path: string) { + return path.endsWith('.py') || path.endsWith('.pyi'); +} + +export function getRegexEscapedSeparator(pathSep: string = path.sep) { + // we don't need to escape "/" in typescript regular expression + return pathSep === '/' ? '/' : '\\\\'; +} + +/** + * Determines whether a path is an absolute disk path (e.g. starts with `/`, or a dos path + * like `c:`, `c:\` or `c:/`). + */ +export function isRootedDiskPath(path: string) { + return getRootLength(path) > 0; +} + +/** + * Determines whether a path consists only of a path root. + */ +export function isDiskPathRoot(path: string) { + const rootLength = getRootLength(path); + return rootLength > 0 && rootLength === path.length; +} + +function getAnyExtensionFromPathWorker( + path: string, + extensions: string | readonly string[], + stringEqualityComparer: (a: string, b: string) => boolean +) { + if (typeof extensions === 'string') { + return tryGetExtensionFromPath(path, extensions, stringEqualityComparer) || ''; + } + for (const extension of extensions) { + const result = tryGetExtensionFromPath(path, extension, stringEqualityComparer); + if (result) { + return result; + } + } + return ''; +} + +function tryGetExtensionFromPath( + path: string, + extension: string, + stringEqualityComparer: (a: string, b: string) => boolean +) { + if (!extension.startsWith('.')) { + extension = '.' + extension; + } + if (path.length >= extension.length && path.charCodeAt(path.length - extension.length) === Char.Period) { + const pathExtension = path.slice(path.length - extension.length); + if (stringEqualityComparer(pathExtension, extension)) { + return pathExtension; + } + } + + return undefined; +} + +function getPathComponentsRelativeTo( + from: string, + to: string, + stringEqualityComparer: (a: string, b: string) => boolean, + getCanonicalFileName: GetCanonicalFileName +) { + const fromComponents = getPathComponents(from); + const toComponents = getPathComponents(to); + + let start: number; + for (start = 0; start < fromComponents.length && start < toComponents.length; start++) { + const fromComponent = getCanonicalFileName(fromComponents[start]); + const toComponent = getCanonicalFileName(toComponents[start]); + const comparer = start === 0 ? equateStringsCaseInsensitive : stringEqualityComparer; + if (!comparer(fromComponent, toComponent)) { + break; + } + } + + if (start === 0) { + return toComponents; + } + + const components = toComponents.slice(start); + const relative: string[] = []; + for (; start < fromComponents.length; start++) { + relative.push('..'); + } + return ['', ...relative, ...components]; +} diff --git a/python-parser/packages/pyright-internal/src/common/positionUtils.ts b/python-parser/packages/pyright-internal/src/common/positionUtils.ts new file mode 100644 index 00000000..37ca376b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/positionUtils.ts @@ -0,0 +1,95 @@ +/* + * positionUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility routines for converting between file offsets and + * line/column positions. + */ + +import { TokenizerOutput } from '../parser/tokenizer'; +import { assert } from './debug'; +import { Position, Range, TextRange } from './textRange'; +import { TextRangeCollection } from './textRangeCollection'; + +// Translates a file offset into a line/column pair. +export function convertOffsetToPosition(offset: number, lines: TextRangeCollection): Position { + // Handle the case where the file is empty. + if (lines.end === 0) { + return { + line: 0, + character: 0, + }; + } + + const itemIndex = offset >= lines.end ? lines.count - 1 : lines.getItemContaining(offset); + assert(itemIndex >= 0 && itemIndex <= lines.count); + const lineRange = lines.getItemAt(itemIndex); + assert(lineRange !== undefined); + return { + line: itemIndex, + character: Math.max(0, Math.min(lineRange.length, offset - lineRange.start)), + }; +} + +// Translates a start/end file offset into a pair of line/column positions. +export function convertOffsetsToRange( + startOffset: number, + endOffset: number, + lines: TextRangeCollection +): Range { + const start = convertOffsetToPosition(startOffset, lines); + const end = convertOffsetToPosition(endOffset, lines); + return { start, end }; +} + +// Translates a position (line and col) into a file offset. +export function convertPositionToOffset(position: Position, lines: TextRangeCollection): number | undefined { + if (position.line >= lines.count) { + return undefined; + } + + return lines.getItemAt(position.line).start + position.character; +} + +export function convertRangeToTextRange(range: Range, lines: TextRangeCollection): TextRange | undefined { + const start = convertPositionToOffset(range.start, lines); + if (start === undefined) { + return undefined; + } + + const end = convertPositionToOffset(range.end, lines); + if (end === undefined) { + return undefined; + } + + return TextRange.fromBounds(start, end); +} + +export function convertTextRangeToRange(range: TextRange, lines: TextRangeCollection): Range { + return convertOffsetsToRange(range.start, TextRange.getEnd(range), lines); +} + +// Returns the position of the last character in a line (before the newline). +export function getLineEndPosition(tokenizerOutput: TokenizerOutput, text: string, line: number): Position { + return convertOffsetToPosition(getLineEndOffset(tokenizerOutput, text, line), tokenizerOutput.lines); +} + +export function getLineEndOffset(tokenizerOutput: TokenizerOutput, text: string, line: number): number { + const lineRange = tokenizerOutput.lines.getItemAt(line); + + const lineEndOffset = TextRange.getEnd(lineRange); + let newLineLength = 0; + for (let i = lineEndOffset - 1; i >= lineRange.start; i--) { + const char = text[i]; + if (char !== '\r' && char !== '\n') { + break; + } + + newLineLength++; + } + + // Character should be at the end of the line but before the newline. + return lineEndOffset - newLineLength; +} diff --git a/python-parser/packages/pyright-internal/src/common/processUtils.ts b/python-parser/packages/pyright-internal/src/common/processUtils.ts new file mode 100644 index 00000000..6799a334 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/processUtils.ts @@ -0,0 +1,32 @@ +/* + * processUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Utility routines for dealing with node processes. + */ +import * as child_process from 'child_process'; + +export function terminateProcessTree(pid: number) { + try { + if (process.platform === 'win32') { + // Windows doesn't support SIGTERM, so execute taskkill to kill the process + child_process.execSync(`taskkill /pid ${pid} /T /F > NUL 2>&1`); + } else { + // Send SIGTERM to the process and all its children + process.kill(pid, 'SIGTERM'); + } + } catch { + // Ignore. + } +} + +export function terminateChild(child: child_process.ChildProcess) { + try { + if (child.pid && child.exitCode === null) { + terminateProcessTree(child.pid); + } + } catch { + // Ignore. + } +} diff --git a/python-parser/packages/pyright-internal/src/common/progressReporter.ts b/python-parser/packages/pyright-internal/src/common/progressReporter.ts new file mode 100644 index 00000000..47c14570 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/progressReporter.ts @@ -0,0 +1,61 @@ +/* + * progressReporter.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Implements a mechanism for reporting progress in a language server client. + */ + +export interface ProgressReporter { + isEnabled(data: any): boolean; + isDisplayingProgress(): boolean; + begin(): void; + report(message: string): void; + end(): void; +} + +export class ProgressReportTracker implements ProgressReporter { + // Tracks whether we're currently displaying progress. + private _isDisplayingProgress = false; + + constructor(private _reporter: ProgressReporter) {} + + isDisplayingProgress(): boolean { + return this._isDisplayingProgress; + } + + isEnabled(data: any): boolean { + if (this._isDisplayingProgress) { + return true; + } + + return this._reporter.isEnabled(data) ?? false; + } + + begin(): void { + if (this._isDisplayingProgress) { + return; + } + + this._isDisplayingProgress = true; + this._reporter.begin(); + } + + report(message: string): void { + if (!this._isDisplayingProgress) { + return; + } + + this._reporter.report(message); + } + + end(): void { + if (!this._isDisplayingProgress) { + return; + } + + this._isDisplayingProgress = false; + this._reporter.end(); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/pythonVersion.ts b/python-parser/packages/pyright-internal/src/common/pythonVersion.ts new file mode 100644 index 00000000..c10b0486 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/pythonVersion.ts @@ -0,0 +1,219 @@ +/* + * pythonLanguageVersion.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Types and functions that relate to the Python language version. + */ + +export type PythonReleaseLevel = 'alpha' | 'beta' | 'candidate' | 'final'; + +export interface PythonVersion { + major: number; + minor: number; + micro?: number; + releaseLevel?: PythonReleaseLevel; + serial?: number; +} + +export namespace PythonVersion { + export function is(value: any): value is PythonVersion { + if (value && typeof value.major === 'number' && typeof value.minor === 'number') { + if (value.micro !== undefined && typeof value.micro !== 'number') { + return false; + } + if (value.releaseLevel !== undefined && typeof value.releaseLevel !== 'string') { + return false; + } + if (value.serial !== undefined && typeof value.serial !== 'number') { + return false; + } + return true; + } + + return false; + } + export function create( + major: number, + minor: number, + micro?: number, + releaseLevel?: PythonReleaseLevel, + serial?: number + ): PythonVersion { + return { + major, + minor, + micro, + releaseLevel, + serial, + }; + } + + export function isEqualTo(version: PythonVersion, other: PythonVersion) { + if (version.major !== other.major || version.minor !== other.minor) { + return false; + } + + if (version.micro === undefined || other.micro === undefined) { + return true; + } else if (version.micro !== other.micro) { + return false; + } + + if (version.releaseLevel === undefined || other.releaseLevel === undefined) { + return true; + } else if (version.releaseLevel !== other.releaseLevel) { + return false; + } + + if (version.serial === undefined || other.serial === undefined) { + return true; + } else if (version.serial !== other.serial) { + return false; + } + + return true; + } + + export function isGreaterThan(version: PythonVersion, other: PythonVersion) { + if (version.major > other.major) { + return true; + } else if (version.major < other.major) { + return false; + } + + if (version.minor > other.minor) { + return true; + } else if (version.minor < other.minor) { + return false; + } + + if (version.micro === undefined || other.micro === undefined || version.micro < other.micro) { + return false; + } else if (version.micro > other.micro) { + return true; + } + + // We leverage the fact that the alphabetical ordering + // of the release level designators are ordered by increasing + // release level. + if ( + version.releaseLevel === undefined || + other.releaseLevel === undefined || + version.releaseLevel < other.releaseLevel + ) { + return false; + } else if (version.releaseLevel > other.releaseLevel) { + return true; + } + + if (version.serial === undefined || other.serial === undefined || version.serial < other.serial) { + return false; + } else if (version.serial > other.serial) { + return true; + } + + // They are exactly equal! + return false; + } + + export function isGreaterOrEqualTo(version: PythonVersion, other: PythonVersion) { + return isEqualTo(version, other) || isGreaterThan(version, other); + } + + export function isLessThan(version: PythonVersion, other: PythonVersion) { + return !isGreaterOrEqualTo(version, other); + } + + export function isLessOrEqualTo(version: PythonVersion, other: PythonVersion) { + return !isGreaterThan(version, other); + } + + export function toMajorMinorString(version: PythonVersion): string { + return `${version.major}.${version.minor}`; + } + + export function toString(version: PythonVersion): string { + let versString = toMajorMinorString(version); + + if (version.micro === undefined) { + return versString; + } + + versString += `.${version.micro}`; + + if (version.releaseLevel === undefined) { + return versString; + } + + versString += `.${version.releaseLevel}`; + + if (version.serial === undefined) { + return versString; + } + + versString += `.${version.serial}`; + return versString; + } + + export function fromString(val: string): PythonVersion | undefined { + const split = val.split('.'); + + if (split.length < 2) { + return undefined; + } + + const major = parseInt(split[0], 10); + const minor = parseInt(split[1], 10); + + if (isNaN(major) || isNaN(minor)) { + return undefined; + } + + let micro: number | undefined; + if (split.length >= 3) { + micro = parseInt(split[2], 10); + if (isNaN(micro)) { + micro = undefined; + } + } + + let releaseLevel: PythonReleaseLevel | undefined; + if (split.length >= 4) { + const releaseLevels: PythonReleaseLevel[] = ['alpha', 'beta', 'candidate', 'final']; + if (releaseLevels.some((level) => level === split[3])) { + releaseLevel = split[3] as PythonReleaseLevel; + } + } + + let serial: number | undefined; + if (split.length >= 5) { + serial = parseInt(split[4], 10); + if (isNaN(serial)) { + serial = undefined; + } + } + + return create(major, minor, micro, releaseLevel, serial); + } +} + +// Predefine some versions. +export const pythonVersion3_0 = PythonVersion.create(3, 0); +export const pythonVersion3_1 = PythonVersion.create(3, 1); +export const pythonVersion3_2 = PythonVersion.create(3, 2); +export const pythonVersion3_3 = PythonVersion.create(3, 3); +export const pythonVersion3_4 = PythonVersion.create(3, 4); +export const pythonVersion3_5 = PythonVersion.create(3, 5); +export const pythonVersion3_6 = PythonVersion.create(3, 6); +export const pythonVersion3_7 = PythonVersion.create(3, 7); +export const pythonVersion3_8 = PythonVersion.create(3, 8); +export const pythonVersion3_9 = PythonVersion.create(3, 9); +export const pythonVersion3_10 = PythonVersion.create(3, 10); +export const pythonVersion3_11 = PythonVersion.create(3, 11); +export const pythonVersion3_12 = PythonVersion.create(3, 12); +export const pythonVersion3_13 = PythonVersion.create(3, 13); +export const pythonVersion3_14 = PythonVersion.create(3, 14); + +export const latestStablePythonVersion = pythonVersion3_14; diff --git a/python-parser/packages/pyright-internal/src/common/realFileSystem.ts b/python-parser/packages/pyright-internal/src/common/realFileSystem.ts new file mode 100644 index 00000000..c1eeb850 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/realFileSystem.ts @@ -0,0 +1,608 @@ +/* + * realFileSystem.ts + * + * Helper functions that require real filesystem access. + */ + +import { FakeFS, NativePath, PortablePath, PosixFS, ppath, VirtualFS, ZipFS, ZipOpenFS } from '@yarnpkg/fslib'; +import { getLibzipSync } from '@yarnpkg/libzip'; +import * as fs from 'fs'; +import * as tmp from 'tmp'; +import { isMainThread } from 'worker_threads'; + +import { Disposable } from 'vscode-jsonrpc'; +import { CaseSensitivityDetector } from './caseSensitivityDetector'; +import { ConsoleInterface, NullConsole } from './console'; +import { randomBytesHex } from './crypto'; +import { FileSystem, MkDirOptions, TempFile, TmpfileOptions } from './fileSystem'; +import { + FileWatcher, + FileWatcherEventHandler, + FileWatcherEventType, + FileWatcherHandler, + FileWatcherProvider, + nullFileWatcherProvider, +} from './fileWatcher'; +import { combinePaths, getRootLength } from './pathUtils'; +import { FileUri, FileUriSchema } from './uri/fileUri'; +import { Uri } from './uri/uri'; +import { getRootUri, UriEx } from './uri/uriUtils'; + +// Automatically remove files created by tmp at process exit. +tmp.setGracefulCleanup(); + +// Callers can specify a different file watcher provider if desired. +// By default, we'll use the file watcher based on chokidar. +export function createFromRealFileSystem( + caseSensitiveDetector: CaseSensitivityDetector, + console?: ConsoleInterface, + fileWatcherProvider?: FileWatcherProvider +): FileSystem { + return new RealFileSystem( + caseSensitiveDetector, + console ?? new NullConsole(), + fileWatcherProvider ?? nullFileWatcherProvider + ); +} + +const DOT_ZIP = `.zip`; +const DOT_EGG = `.egg`; +const DOT_JAR = `.jar`; + +const zipPathRegEx = /[^\\/]\.(?:egg|zip|jar)[\\/]/; + +// Exactly the same as ZipOpenFS's getArchivePart, but supporting .egg files. +// https://github.com/yarnpkg/berry/blob/64a16b3603ef2ccb741d3c44f109c9cfc14ba8dd/packages/yarnpkg-fslib/sources/ZipOpenFS.ts#L23 +function getArchivePart(path: string) { + let idx = path.indexOf(DOT_ZIP); + if (idx <= 0) { + idx = path.indexOf(DOT_EGG); + if (idx <= 0) { + idx = path.indexOf(DOT_JAR); + if (idx <= 0) { + return null; + } + } + } + + // Disallow files named ".zip" + if (path[idx - 1] === ppath.sep) return null; + + const nextCharIdx = idx + DOT_ZIP.length; // DOT_ZIP and DOT_EGG are the same length. + + // The path either has to end in ".zip" or contain an archive subpath (".zip/...") + if (path.length > nextCharIdx && path[nextCharIdx] !== ppath.sep) return null; + + return path.slice(0, nextCharIdx) as PortablePath; +} + +function hasZipExtension(p: string): boolean { + return p.endsWith(DOT_ZIP) || p.endsWith(DOT_EGG) || p.endsWith(DOT_JAR); +} + +// "Magic" values for the zip file type. https://en.wikipedia.org/wiki/List_of_file_signatures +const zipMagic = [ + Buffer.from([0x50, 0x4b, 0x03, 0x04]), + Buffer.from([0x50, 0x4b, 0x05, 0x06]), + Buffer.from([0x50, 0x4b, 0x07, 0x08]), +]; + +function hasZipMagic(fs: FakeFS, p: PortablePath): boolean { + let fd: number | undefined; + try { + fd = fs.openSync(p, 'r'); + const buffer = Buffer.alloc(4); + const bytesRead = fs.readSync(fd, buffer, 0, 4, 0); + if (bytesRead < 4) { + return false; + } + + for (const magic of zipMagic) { + if (buffer.compare(magic) === 0) { + return true; + } + } + + return false; + } catch { + return false; + } finally { + if (fd !== undefined) { + fs.closeSync(fd); + } + } +} + +/* eslint-disable @typescript-eslint/naming-convention */ + +// Patch fslib's ZipOpenFS to also consider .egg files to be .zip files. +// +// For now, override findZip (even though it's private), with the intent +// to upstream a change to allow overriding getArchivePart or add some +// other mechanism to support more extensions as zips (or, to remove this +// hack in favor of a full ZipOpenFS fork). +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +//@ts-expect-error +class EggZipOpenFS extends ZipOpenFS { + override findZip(p: PortablePath) { + // Access private fields from parent via any cast since they're not accessible in subclass + const baseFs = (this as any).baseFs as FakeFS; + const filter = (this as any).filter as RegExp | null; + const isZip = (this as any).isZip as Set; + const notZip = (this as any).notZip as Set; + + if (filter && !filter.test(p)) return null; + + let filePath = `` as PortablePath; + + while (true) { + const archivePart = getArchivePart(p.substr(filePath.length)); + if (!archivePart) return null; + + filePath = this.pathUtils.join(filePath, archivePart); + + if (isZip.has(filePath) === false) { + if (notZip.has(filePath)) continue; + + try { + if (!baseFs.lstatSync(filePath).isFile()) { + notZip.add(filePath); + continue; + } + + if (!hasZipMagic(baseFs, filePath)) { + notZip.add(filePath); + continue; + } + + try { + // We're pretty sure that it's a zip at this point (it has the magic), but + // try accessing the zipfile anyway; if it's corrupt in some way, this will throw. + // We don't need to do anything with the ZipFS instance given to the callback + // below; ZipOpenFS already manages their lifetimes and we're very likely to + // immediately call back into the FS to obtain info from the zip anyway. + // eslint-disable-next-line @typescript-eslint/no-empty-function + this.getZipSync(filePath, () => {}); + } catch { + notZip.add(filePath); + continue; + } + } catch { + return null; + } + + isZip.add(filePath); + } + + return { + archivePath: filePath, + subPath: this.pathUtils.join(PortablePath.root, p.substr(filePath.length) as PortablePath), + }; + } + } + + // Hack to provide typed access to this private method. + private override getZipSync(p: PortablePath, accept: (zipFs: ZipFS) => T): T { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + //@ts-expect-error + return super.getZipSync(p, accept); + } +} + +/* eslint-enable @typescript-eslint/naming-convention */ + +class YarnFS extends PosixFS { + private readonly _eggZipOpenFS: EggZipOpenFS; + + constructor() { + const eggZipOpenFS = new EggZipOpenFS({ + libzip: () => getLibzipSync(), + useCache: true, + maxOpenFiles: 80, + readOnlyArchives: true, + }); + + super( + new VirtualFS({ + baseFs: eggZipOpenFS, + }) + ); + + this._eggZipOpenFS = eggZipOpenFS; + } + + isZip(p: NativePath): boolean { + return !!this._eggZipOpenFS.findZip(this.mapToBase(p)); + } +} + +const yarnFS = new YarnFS(); + +// Use `createFromRealFileSystem` instead of `new RealFileSystem` +// unless you are creating a new file system that inherits from `RealFileSystem` +export class RealFileSystem implements FileSystem { + constructor( + private readonly _caseSensitiveDetector: CaseSensitivityDetector, + private readonly _console: ConsoleInterface, + private readonly _fileWatcherProvider: FileWatcherProvider + ) { + // Empty + } + + existsSync(uri: Uri) { + if (uri.isEmpty() || !FileUri.isFileUri(uri)) { + return false; + } + const path = uri.getFilePath(); + try { + // Catch zip open errors. existsSync is assumed to never throw by callers. + return yarnFS.existsSync(path); + } catch { + return false; + } + } + + mkdirSync(uri: Uri, options?: MkDirOptions) { + const path = uri.getFilePath(); + yarnFS.mkdirSync(path, options); + } + + chdir(uri: Uri) { + const path = uri.getFilePath(); + // If this file system happens to be running in a worker thread, + // then we can't call 'chdir'. + if (isMainThread) { + process.chdir(path); + } + } + + readdirSync(uri: Uri): string[] { + const path = uri.getFilePath(); + return yarnFS.readdirSync(path); + } + + readdirEntriesSync(uri: Uri): fs.Dirent[] { + const path = uri.getFilePath(); + return yarnFS.readdirSync(path, { withFileTypes: true }).map((entry): fs.Dirent => { + // Treat zip/egg files as directories. + // See: https://github.com/yarnpkg/berry/blob/master/packages/vscode-zipfs/sources/ZipFSProvider.ts + if (hasZipExtension(entry.name)) { + if (entry.isFile() && yarnFS.isZip(path)) { + return { + name: entry.name, + parentPath: path, + path: path, + isFile: () => false, + isDirectory: () => true, + isBlockDevice: () => false, + isCharacterDevice: () => false, + isSymbolicLink: () => false, + isFIFO: () => false, + isSocket: () => false, + }; + } + } + return entry; + }); + } + + readFileSync(uri: Uri, encoding?: null): Buffer; + readFileSync(uri: Uri, encoding: BufferEncoding): string; + readFileSync(uri: Uri, encoding?: BufferEncoding | null): Buffer | string; + readFileSync(uri: Uri, encoding: BufferEncoding | null = null) { + const path = uri.getFilePath(); + if (encoding === 'utf8' || encoding === 'utf-8') { + return yarnFS.readFileSync(path, 'utf8'); + } + return yarnFS.readFileSync(path); + } + + writeFileSync(uri: Uri, data: string | Buffer, encoding: BufferEncoding | null) { + const path = uri.getFilePath(); + yarnFS.writeFileSync(path, data, encoding || undefined); + } + + statSync(uri: Uri): fs.Stats { + if (FileUri.isFileUri(uri)) { + const path = uri.getFilePath(); + const stat = yarnFS.statSync(path); + // Treat zip/egg files as directories. + // See: https://github.com/yarnpkg/berry/blob/master/packages/vscode-zipfs/sources/ZipFSProvider.ts + if (hasZipExtension(path)) { + if (stat.isFile() && yarnFS.isZip(path)) { + stat.isFile = () => false; + stat.isDirectory = () => true; + (stat as any).isZipDirectory = () => true; + return stat; + } + } + return stat; + } else { + return { + isFile: () => false, + isDirectory: () => false, + isBlockDevice: () => false, + isCharacterDevice: () => false, + isSymbolicLink: () => false, + isFIFO: () => false, + isSocket: () => false, + dev: 0, + atimeMs: 0, + mtimeMs: 0, + ctimeMs: 0, + birthtimeMs: 0, + size: 0, + blksize: 0, + blocks: 0, + ino: 0, + mode: 0, + nlink: 0, + uid: 0, + gid: 0, + rdev: 0, + atime: new Date(), + mtime: new Date(), + ctime: new Date(), + birthtime: new Date(), + }; + } + } + + rmdirSync(uri: Uri): void { + const path = uri.getFilePath(); + yarnFS.rmdirSync(path); + } + + unlinkSync(uri: Uri) { + const path = uri.getFilePath(); + yarnFS.unlinkSync(path); + } + + realpathSync(uri: Uri) { + try { + const path = uri.getFilePath(); + return Uri.file(yarnFS.realpathSync(path), this._caseSensitiveDetector); + } catch (e: any) { + return uri; + } + } + + getModulePath(): Uri { + // The entry point to the tool should have set the __rootDirectory + // global variable to point to the directory that contains the + // typeshed-fallback directory. + return getRootUri(this._caseSensitiveDetector) || Uri.empty(); + } + + createFileSystemWatcher(paths: Uri[], listener: FileWatcherEventHandler): FileWatcher { + return this._fileWatcherProvider.createFileWatcher( + paths.map((p) => p.getFilePath()), + listener + ); + } + + createReadStream(uri: Uri): fs.ReadStream { + const path = uri.getFilePath(); + return yarnFS.createReadStream(path); + } + + createWriteStream(uri: Uri): fs.WriteStream { + const path = uri.getFilePath(); + return yarnFS.createWriteStream(path); + } + + copyFileSync(src: Uri, dst: Uri): void { + const srcPath = src.getFilePath(); + const destPath = dst.getFilePath(); + yarnFS.copyFileSync(srcPath, destPath); + } + + readFile(uri: Uri): Promise { + const path = uri.getFilePath(); + return yarnFS.readFilePromise(path); + } + + async readFileText(uri: Uri, encoding: BufferEncoding): Promise { + const path = uri.getFilePath(); + if (encoding === 'utf8' || encoding === 'utf-8') { + return yarnFS.readFilePromise(path, 'utf8'); + } + const buffer = await yarnFS.readFilePromise(path); + return buffer.toString(encoding); + } + + realCasePath(uri: Uri): Uri { + try { + // If it doesn't exist in the real FS, then just use this path. + if (!this.existsSync(uri)) { + return uri; + } + + // realpathSync.native will return casing as in OS rather than + // trying to preserve casing given. + const realCase = fs.realpathSync.native(uri.getFilePath()); + + // If the original and real case paths differ by anything other than case, + // then there's a symbolic link or something of that sort involved. Return + // the original path instead. + if (uri.getFilePath().toLowerCase() !== realCase.toLowerCase()) { + return uri; + } + + // On UNC mapped drives we want to keep the original drive letter. + if (getRootLength(realCase) !== getRootLength(uri.getFilePath())) { + return uri; + } + + return Uri.file(realCase, this._caseSensitiveDetector); + } catch (e: any) { + // Return as it is, if anything failed. + this._console.log(`Failed to get real file system casing for ${uri}: ${e}`); + + return uri; + } + } + + isMappedUri(uri: Uri): boolean { + return false; + } + + getOriginalUri(mappedUri: Uri) { + return mappedUri; + } + + getMappedUri(originalUri: Uri) { + return originalUri; + } + + mapDirectory(mappedUri: Uri, originalUri: Uri, filter?: (originalUri: Uri, fs: FileSystem) => boolean): Disposable { + // Not supported at this level. + return { + dispose: () => { + // Do nothing. + }, + }; + } + + isInZip(uri: Uri): boolean { + const path = uri.getFilePath(); + return zipPathRegEx.test(path) && yarnFS.isZip(path); + } +} + +interface WorkspaceFileWatcher extends FileWatcher { + // Paths that are being watched within the workspace + workspacePaths: string[]; + + // Event handler to call + eventHandler: FileWatcherEventHandler; +} + +export class WorkspaceFileWatcherProvider implements FileWatcherProvider, FileWatcherHandler { + private _fileWatchers: WorkspaceFileWatcher[] = []; + + createFileWatcher(workspacePaths: string[], listener: FileWatcherEventHandler): FileWatcher { + const self = this; + const fileWatcher: WorkspaceFileWatcher = { + close() { + // Stop listening for workspace paths. + self._fileWatchers = self._fileWatchers.filter((watcher) => watcher !== fileWatcher); + }, + workspacePaths, + eventHandler: listener, + }; + + // Record the file watcher. + self._fileWatchers.push(fileWatcher); + + return fileWatcher; + } + + onFileChange(eventType: FileWatcherEventType, fileUri: Uri): void { + // Since file watcher is a server wide service, we don't know which watcher is + // for which workspace (for multi workspace case), also, we don't know which watcher + // is for source or library. so we need to solely rely on paths that can cause us + // to raise events both for source and library if .venv is inside of workspace root + // for a file change. It is event handler's job to filter those out. + this._fileWatchers.forEach((watcher) => { + const dirUris = watcher.workspacePaths.map((d) => UriEx.file(d, fileUri.isCaseSensitive)); + if (dirUris.some((dir) => fileUri.startsWith(dir))) { + watcher.eventHandler(eventType, fileUri.getFilePath()); + } + }); + } +} + +export class RealTempFile implements TempFile, CaseSensitivityDetector { + private _caseSensitivity?: boolean; + private _tmpdir?: tmp.DirResult; + + constructor(name?: string) { + if (name) { + this._tmpdir = { + name, + removeCallback: () => { + // If a name is provided, the temp folder is not managed by this instance. + // Do nothing. + }, + }; + } + } + + tmpdir(): Uri { + return Uri.file(this._getTmpDir().name, this); + } + + tmpfile(options?: TmpfileOptions): Uri { + const f = tmp.fileSync({ dir: this._getTmpDir().name, discardDescriptor: true, ...options }); + return Uri.file(f.name, this); + } + + mktmpdir(): Uri { + const d = tmp.dirSync(); + return Uri.file(d.name, this); + } + + dispose(): void { + try { + this._tmpdir?.removeCallback(); + this._tmpdir = undefined; + } catch { + // ignore + } + } + + isCaseSensitive(uri: string): boolean { + if (uri.startsWith(FileUriSchema)) { + return this._isLocalFileSystemCaseSensitive(); + } + + return true; + } + + private _isLocalFileSystemCaseSensitive() { + if (this._caseSensitivity === undefined) { + this._caseSensitivity = this._isFileSystemCaseSensitiveInternal(); + } + + return this._caseSensitivity; + } + + private _getTmpDir(): tmp.DirResult { + if (!this._tmpdir) { + this._tmpdir = tmp.dirSync({ prefix: 'pyright' }); + } + + return this._tmpdir; + } + + private _isFileSystemCaseSensitiveInternal() { + let filePath: string | undefined = undefined; + try { + // Make unique file name. + let name: string; + let mangledFilePath: string; + do { + name = `${randomBytesHex(21)}-a`; + filePath = combinePaths(this._getTmpDir().name, name); + mangledFilePath = combinePaths(this._getTmpDir().name, name.toUpperCase()); + } while (fs.existsSync(filePath) || fs.existsSync(mangledFilePath)); + + fs.writeFileSync(filePath, '', 'utf8'); + + // If file exists, then it is insensitive. + return !fs.existsSync(mangledFilePath); + } catch (e: any) { + return false; + } finally { + if (filePath) { + // remove temp file created + try { + fs.unlinkSync(filePath); + } catch (e: any) { + /* ignored */ + } + } + } + } +} diff --git a/python-parser/packages/pyright-internal/src/common/serviceKeys.ts b/python-parser/packages/pyright-internal/src/common/serviceKeys.ts new file mode 100644 index 00000000..3617bc2b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/serviceKeys.ts @@ -0,0 +1,44 @@ +/* + * serviceKeys.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Define service keys. + */ + +import { CacheManager } from '../analyzer/cacheManager'; +import { ISourceFileFactory } from '../analyzer/programTypes'; +import { SupportPartialStubs } from '../partialStubService'; +import { CancellationProvider } from './cancellationUtils'; +import { CaseSensitivityDetector } from './caseSensitivityDetector'; +import { ConsoleInterface } from './console'; +import { DocStringService } from './docStringService'; +import { + DebugInfoInspector, + StatusMutationListener, + SymbolDefinitionProvider, + SymbolUsageProviderFactory, +} from './extensibility'; +import { FileSystem, TempFile } from './fileSystem'; +import { CommandService, WindowService } from './languageServerInterface'; +import { GroupServiceKey, ServiceKey } from './serviceProvider'; + +export namespace ServiceKeys { + export const fs = new ServiceKey('fs'); + export const console = new ServiceKey('ConsoleInterface'); + export const sourceFileFactory = new ServiceKey('ISourceFileFactory'); + export const partialStubs = new ServiceKey('SupportPartialStubs'); + export const symbolDefinitionProvider = new GroupServiceKey('SymbolDefinitionProvider'); + export const symbolUsageProviderFactory = new GroupServiceKey( + 'SymbolUsageProviderFactory' + ); + export const stateMutationListeners = new GroupServiceKey('StatusMutationListener'); + export const tempFile = new ServiceKey('TempFile'); + export const cacheManager = new ServiceKey('CacheManager'); + export const debugInfoInspector = new ServiceKey('DebugInfoInspector'); + export const caseSensitivityDetector = new ServiceKey('CaseSensitivityDetector'); + export const docStringService = new ServiceKey('DocStringService'); + export const windowService = new ServiceKey('WindowService'); + export const commandService = new ServiceKey('CommandService'); + export const cancellationProvider = new ServiceKey('CancellationProvider'); +} diff --git a/python-parser/packages/pyright-internal/src/common/serviceProvider.ts b/python-parser/packages/pyright-internal/src/common/serviceProvider.ts new file mode 100644 index 00000000..182f38b6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/serviceProvider.ts @@ -0,0 +1,165 @@ +/* + * serviceProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Container for different services used within the application. + */ + +import { addIfUnique, removeArrayElements } from './collectionUtils'; +import { Disposable } from './core'; +import * as debug from './debug'; + +abstract class InternalKey { + abstract readonly kind: 'singleton' | 'group'; + abstract readonly id: string; +} + +/** + * Key for singleton service T. + */ +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export class ServiceKey extends InternalKey { + readonly kind = 'singleton'; + constructor(readonly id: string) { + super(); + } +} + +/** + * Key for group of service T. + */ +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export class GroupServiceKey extends InternalKey { + readonly kind = 'group'; + constructor(readonly id: string) { + super(); + } +} + +export type AllServiceKeys = ServiceKey | GroupServiceKey; + +export class ServiceProvider { + private _container = new Map(); + private _disposed = false; + + get disposed() { + return this._disposed; + } + add(key: ServiceKey, value: T | undefined): void; + add(key: GroupServiceKey, value: T): void; + add(key: AllServiceKeys, value: T | undefined): void { + if (key.kind === 'group') { + this._addGroupService(key, value); + return; + } + + if (key.kind === 'singleton') { + if (value !== undefined) { + this._container.set(key.id, value); + } else { + this.remove(key); + } + return; + } + + debug.assertNever(key, `Unknown key type ${typeof key}`); + } + + remove(key: ServiceKey): void; + remove(key: GroupServiceKey, value: T): void; + remove(key: AllServiceKeys, value?: T): void { + if (key.kind === 'group') { + this._removeGroupService(key, value); + return; + } + + if (key.kind === 'singleton') { + this._container.delete(key.id); + return; + } + + debug.assertNever(key, `Unknown key type ${typeof key}`); + } + + tryGet(key: ServiceKey): T | undefined; + tryGet(key: GroupServiceKey): readonly T[] | undefined; + tryGet(key: AllServiceKeys): T | readonly T[] | undefined { + return this._container.get(key.id); + } + + get(key: ServiceKey): T; + get(key: GroupServiceKey): readonly T[]; + get(key: AllServiceKeys): T | readonly T[] { + const value = key.kind === 'group' ? this.tryGet(key) : this.tryGet(key); + if (value === undefined) { + throw new Error(`Global service provider not initialized for ${key.id}.`); + } + + return value; + } + + clone() { + const serviceProvider = new ServiceProvider(); + this._container.forEach((value, key) => { + if (Array.isArray(value)) { + serviceProvider._container.set(key, [...(value ?? [])]); + } else if (value.clone !== undefined) { + serviceProvider._container.set(key, value.clone()); + } else { + serviceProvider._container.set(key, value); + } + }); + + return serviceProvider; + } + + dispose() { + if (this._disposed) { + return; + } + this._disposed = true; + for (const service of this._container.values()) { + if (Disposable.is(service)) { + service.dispose(); + } + } + + // Clear out everything except for some essential services, + // as those may be needed during shutdown. + const essentialList = ['fs', 'CaseSensitivityDetector', 'TempFile']; + const essentials = new Map(); + for (const key of essentialList) { + const service = this._container.get(key); + if (service) { + essentials.set(key, service); + } + } + this._container.clear(); + essentials.forEach((value, key) => { + this._container.set(key, value); + }); + } + + private _addGroupService(key: GroupServiceKey, newValue: T | undefined) { + // Explicitly cast to remove `readonly` + const services = this.tryGet(key) as T[] | undefined; + if (services === undefined) { + this._container.set(key.id, [newValue]); + return; + } + + if (newValue !== undefined) { + addIfUnique(services, newValue); + } + } + + private _removeGroupService(key: GroupServiceKey, oldValue: T) { + const services = this.tryGet(key) as T[]; + if (services === undefined) { + return; + } + + removeArrayElements(services, (s) => s === oldValue); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/serviceProviderExtensions.ts b/python-parser/packages/pyright-internal/src/common/serviceProviderExtensions.ts new file mode 100644 index 00000000..477f1756 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/serviceProviderExtensions.ts @@ -0,0 +1,143 @@ +/* + * serviceProviderExtensions.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Shortcuts to common services. + */ +import { CacheManager } from '../analyzer/cacheManager'; +import { ISourceFileFactory } from '../analyzer/programTypes'; +import { IPythonMode, SourceFile, SourceFileEditMode } from '../analyzer/sourceFile'; +import { PartialStubService, SupportPartialStubs } from '../partialStubService'; +import { CancellationProvider, DefaultCancellationProvider } from './cancellationUtils'; +import { CaseSensitivityDetector } from './caseSensitivityDetector'; +import { ConsoleInterface, NullConsole } from './console'; +import { DocStringService, PyrightDocStringService } from './docStringService'; +import { FileSystem, TempFile } from './fileSystem'; +import { CommandService, WindowService } from './languageServerInterface'; +import { LogTracker } from './logTracker'; +import { ServiceKeys } from './serviceKeys'; +import { ServiceProvider } from './serviceProvider'; +import { Uri } from './uri/uri'; + +declare module './serviceProvider' { + interface ServiceProvider { + fs(): FileSystem; + console(): ConsoleInterface; + cancellationProvider(): CancellationProvider; + tmp(): TempFile | undefined; + sourceFileFactory(): ISourceFileFactory; + partialStubs(): SupportPartialStubs; + cacheManager(): CacheManager | undefined; + docStringService(): DocStringService; + } +} + +export function createServiceProvider(...services: any): ServiceProvider { + const sp = new ServiceProvider(); + + // For known interfaces, register the service. + services.forEach((service: any) => { + if (FileSystem.is(service)) { + sp.add(ServiceKeys.fs, service); + } + if (ConsoleInterface.is(service)) { + sp.add(ServiceKeys.console, service); + } + if (ISourceFileFactory.is(service)) { + sp.add(ServiceKeys.sourceFileFactory, service); + } + if (SupportPartialStubs.is(service)) { + sp.add(ServiceKeys.partialStubs, service); + } + if (TempFile.is(service)) { + sp.add(ServiceKeys.tempFile, service); + } + if (CaseSensitivityDetector.is(service)) { + sp.add(ServiceKeys.caseSensitivityDetector, service); + } + if (CacheManager.is(service)) { + sp.add(ServiceKeys.cacheManager, service); + } + if (DocStringService.is(service)) { + sp.add(ServiceKeys.docStringService, service); + } + if (WindowService.is(service)) { + sp.add(ServiceKeys.windowService, service); + } + if (CommandService.is(service)) { + sp.add(ServiceKeys.commandService, service); + } + if (CancellationProvider.is(service)) { + sp.add(ServiceKeys.cancellationProvider, service); + } + }); + return sp; +} + +ServiceProvider.prototype.fs = function () { + return this.get(ServiceKeys.fs); +}; +ServiceProvider.prototype.console = function () { + const cons = this.tryGet(ServiceKeys.console); + if (!cons && this.disposed) { + // During shutdown this can be undefined, so create a default console. + this.add(ServiceKeys.console, new NullConsole()); + } + return this.get(ServiceKeys.console); +}; +ServiceProvider.prototype.partialStubs = function () { + const result = this.tryGet(ServiceKeys.partialStubs); + if (!result) { + this.add(ServiceKeys.partialStubs, new PartialStubService(this.fs())); + } + return this.get(ServiceKeys.partialStubs); +}; +ServiceProvider.prototype.tmp = function () { + return this.tryGet(ServiceKeys.tempFile); +}; + +ServiceProvider.prototype.cancellationProvider = function () { + return this.tryGet(ServiceKeys.cancellationProvider) ?? new DefaultCancellationProvider(); +}; + +ServiceProvider.prototype.sourceFileFactory = function () { + const result = this.tryGet(ServiceKeys.sourceFileFactory); + return result || DefaultSourceFileFactory; +}; + +ServiceProvider.prototype.docStringService = function () { + const result = this.tryGet(ServiceKeys.docStringService); + return result || new PyrightDocStringService(); +}; + +ServiceProvider.prototype.cacheManager = function () { + const result = this.tryGet(ServiceKeys.cacheManager); + return result; +}; + +const DefaultSourceFileFactory: ISourceFileFactory = { + createSourceFile( + serviceProvider: ServiceProvider, + fileUri: Uri, + moduleNameGetter: (file: Uri) => string, + isThirdPartyImport: boolean, + isThirdPartyPyTypedPresent: boolean, + editMode: SourceFileEditMode, + console?: ConsoleInterface, + logTracker?: LogTracker, + ipythonMode?: IPythonMode + ) { + return new SourceFile( + serviceProvider, + fileUri, + moduleNameGetter, + isThirdPartyImport, + isThirdPartyPyTypedPresent, + editMode, + console, + logTracker, + ipythonMode + ); + }, +}; diff --git a/python-parser/packages/pyright-internal/src/common/streamUtils.ts b/python-parser/packages/pyright-internal/src/common/streamUtils.ts new file mode 100644 index 00000000..34d6d5ff --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/streamUtils.ts @@ -0,0 +1,30 @@ +/* + * streamUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Utility functions for dealing with standard IO streams in node. + */ + +import { stdin } from 'process'; + +export async function getStdinBuffer() { + if (stdin.isTTY) { + return Buffer.alloc(0); + } + + const result = []; + let length = 0; + + for await (const chunk of stdin) { + result.push(chunk); + length += chunk.length; + } + + return Buffer.concat(result, length); +} + +export async function getStdin() { + const buffer = await getStdinBuffer(); + return buffer.toString(); +} diff --git a/python-parser/packages/pyright-internal/src/common/stringUtils.ts b/python-parser/packages/pyright-internal/src/common/stringUtils.ts new file mode 100644 index 00000000..a341383a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/stringUtils.ts @@ -0,0 +1,127 @@ +/* + * stringUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility methods for manipulating and comparing strings. + */ + +import { compareComparableValues, Comparison } from './core'; + +// Determines if typed string matches a symbol +// name. Characters must appear in order. +// Return true if all typed characters are in symbol +export function isPatternInSymbol(typedValue: string, symbolName: string): boolean { + const typedLower = typedValue.toLocaleLowerCase(); + const symbolLower = symbolName.toLocaleLowerCase(); + const typedLength = typedLower.length; + const symbolLength = symbolLower.length; + let typedPos = 0; + let symbolPos = 0; + while (typedPos < typedLength && symbolPos < symbolLength) { + if (typedLower[typedPos] === symbolLower[symbolPos]) { + typedPos += 1; + } + symbolPos += 1; + } + return typedPos === typedLength; +} + +// This is a simple, non-cryptographic hash function for text. +export function hashString(contents: string) { + let hash = 0; + + for (let i = 0; i < contents.length; i++) { + hash = ((hash << 5) - hash + contents.charCodeAt(i)) | 0; + } + return hash; +} + +/** + * Compare two strings using a case-insensitive ordinal comparison. + * + * Ordinal comparisons are based on the difference between the unicode code points of both + * strings. Characters with multiple unicode representations are considered unequal. Ordinal + * comparisons provide predictable ordering, but place "a" after "B". + * + * Case-insensitive comparisons compare both strings one code-point at a time using the integer + * value of each code-point after applying `toUpperCase` to each string. We always map both + * strings to their upper-case form as some unicode characters do not properly round-trip to + * lowercase (such as `ẞ` (German sharp capital s)). + */ +export function compareStringsCaseInsensitive(a: string | undefined, b: string | undefined): Comparison { + return a === b + ? Comparison.EqualTo + : a === undefined + ? Comparison.LessThan + : b === undefined + ? Comparison.GreaterThan + : compareComparableValues(a.toUpperCase(), b.toUpperCase()); +} + +/** + * Compare two strings using a case-sensitive ordinal comparison. + * + * Ordinal comparisons are based on the difference between the unicode code points of both + * strings. Characters with multiple unicode representations are considered unequal. Ordinal + * comparisons provide predictable ordering, but place "a" after "B". + * + * Case-sensitive comparisons compare both strings one code-point at a time using the integer + * value of each code-point. + */ +export function compareStringsCaseSensitive(a: string | undefined, b: string | undefined): Comparison { + return compareComparableValues(a, b); +} + +export function getStringComparer(ignoreCase?: boolean) { + return ignoreCase ? compareStringsCaseInsensitive : compareStringsCaseSensitive; +} + +/** + * Compare the equality of two strings using a case-insensitive ordinal comparison. + * + * Case-insensitive comparisons compare both strings one code-point at a time using the integer + * value of each code-point after applying `toUpperCase` to each string. We always map both + * strings to their upper-case form as some unicode characters do not properly round-trip to + * lowercase (such as `ẞ` (German sharp capital s)). + */ +export function equateStringsCaseInsensitive(a: string, b: string) { + return compareStringsCaseInsensitive(a, b) === Comparison.EqualTo; +} + +/** + * Compare the equality of two strings using a case-sensitive ordinal comparison. + * + * Case-sensitive comparisons compare both strings one code-point at a time using the + * integer value of each code-point. + */ +export function equateStringsCaseSensitive(a: string, b: string) { + return compareStringsCaseSensitive(a, b) === Comparison.EqualTo; +} + +export function getCharacterCount(value: string, ch: string) { + let result = 0; + for (let i = 0; i < value.length; i++) { + if (value[i] === ch) { + result++; + } + } + return result; +} + +export function getLastDottedString(text: string) { + const index = text.lastIndexOf('.'); + return index > 0 ? text.substring(index + 1) : text; +} + +export function truncate(text: string, maxLength: number) { + if (text.length > maxLength) { + return text.substring(0, maxLength - '...'.length) + '...'; + } + return text; +} + +export function escapeRegExp(text: string) { + return text.replace(/[\\^$.*+?()[\]{}|]/g, '\\$&'); +} diff --git a/python-parser/packages/pyright-internal/src/common/textEditTracker.ts b/python-parser/packages/pyright-internal/src/common/textEditTracker.ts new file mode 100644 index 00000000..e9875f7f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/textEditTracker.ts @@ -0,0 +1,439 @@ +/* + * textEditTracker.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tracks text edits on a per-file basis. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { getFileInfo } from '../analyzer/analyzerNodeInfo'; +import { + getAllImportNames, + getContainingImportStatement, + getTextEditsForAutoImportInsertion, + getTextEditsForAutoImportSymbolAddition, + getTextRangeForImportNameDeletion, + haveSameParentModule, + ImportGroup, + ImportNameInfo, + ImportStatements, + ModuleNameInfo, +} from '../analyzer/importStatementUtils'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + ImportNode, + ParseNode, + ParseNodeType, +} from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { appendArray, getOrAdd, removeArrayElements } from './collectionUtils'; +import * as debug from './debug'; +import { FileEditAction } from './editAction'; +import { convertOffsetToPosition, convertTextRangeToRange } from './positionUtils'; +import { doRangesIntersect, extendRange, isRangeInRange, Range, TextRange } from './textRange'; +import { Uri } from './uri/uri'; + +export class TextEditTracker { + private readonly _nodesRemoved: Map = new Map(); + private readonly _results = new Map(); + + private readonly _pendingNodeToRemove: NodeToRemove[] = []; + + constructor(private _mergeOnlyDuplications = true) { + // Empty + } + + addEdits(...edits: FileEditAction[]) { + edits.forEach((e) => this.addEdit(e.fileUri, e.range, e.replacementText)); + } + + addEdit(fileUri: Uri, range: Range, replacementText: string) { + const edits = getOrAdd(this._results, fileUri.key, () => []); + + // If there is any overlapping edit, see whether we can merge edits. + // We can merge edits, if one of them is 'deletion' or 2 edits has the same + // replacement text with containing range. + const overlappingEdits = this._getEditsToMerge(edits, range, replacementText); + if (overlappingEdits.length > 0) { + // Merge the given edit with the existing edits by + // first deleting existing edits and expanding the current edit's range + // to cover all existing edits. + this._removeEdits(edits, overlappingEdits); + overlappingEdits.forEach((e) => { + extendRange(range, e.range); + }); + } + + edits.push({ fileUri: fileUri, range, replacementText }); + } + + addEditWithTextRange(parseFileResults: ParseFileResults, range: TextRange, replacementText: string) { + const filePath = getFileInfo(parseFileResults.parserOutput.parseTree).fileUri; + + const existing = parseFileResults.text.substr(range.start, range.length); + if (existing === replacementText) { + // No change. Return as it is. + return; + } + + this.addEdit(filePath, convertTextRangeToRange(range, parseFileResults.tokenizerOutput.lines), replacementText); + } + + deleteImportName(parseFileResults: ParseFileResults, importToDelete: ImportFromAsNode | ImportAsNode) { + // TODO: remove all these manual text handling and merge it to _processNodeRemoved that is + // used by remove unused imports. + const imports: ImportFromAsNode[] | ImportAsNode[] = + importToDelete.nodeType === ParseNodeType.ImportAs + ? (importToDelete.parent as ImportNode).d.list + : (importToDelete.parent as ImportFromNode).d.imports; + + const filePath = getFileInfo(parseFileResults.parserOutput.parseTree).fileUri; + const ranges = getTextRangeForImportNameDeletion( + parseFileResults, + imports, + imports.findIndex((v) => v === importToDelete) + ); + + ranges.forEach((r) => this.addEditWithTextRange(parseFileResults, r, '')); + + this._markNodeRemoved(importToDelete, parseFileResults); + + // Check whether we have deleted all trailing import names. + // If either no trailing import is deleted or handled properly + // then, there is nothing to do. otherwise, either delete the whole statement + // or remove trailing comma. + // ex) from x import [y], z or from x import y[, z] + let lastImportIndexNotDeleted = 0; + for ( + lastImportIndexNotDeleted = imports.length - 1; + lastImportIndexNotDeleted >= 0; + lastImportIndexNotDeleted-- + ) { + if (!this._nodesRemoved.has(imports[lastImportIndexNotDeleted])) { + break; + } + } + + if (lastImportIndexNotDeleted === -1) { + // Whole statement is deleted. Remove the statement itself. + // ex) [from x import a, b, c] or [import a] + const importStatement = importToDelete.parent; + if (importStatement) { + this.addEdit(filePath, ParseTreeUtils.getFullStatementRange(importStatement, parseFileResults), ''); + } + } else if (lastImportIndexNotDeleted >= 0 && lastImportIndexNotDeleted < imports.length - 2) { + // We need to delete trailing comma + // ex) from x import a, [b, c] + const start = TextRange.getEnd(imports[lastImportIndexNotDeleted]); + const length = TextRange.getEnd(imports[lastImportIndexNotDeleted + 1]) - start; + this.addEditWithTextRange(parseFileResults, { start, length }, ''); + } + } + + addOrUpdateImport( + parseFileResults: ParseFileResults, + importStatements: ImportStatements, + moduleNameInfo: ModuleNameInfo, + importGroup: ImportGroup, + importNameInfo?: ImportNameInfo[], + updateOptions?: { + currentFromImport: ImportFromNode; + originalModuleName: string; + } + ): void { + // TODO: remove all these manual text handling and merge it to _processNodeRemoved that is + // used by remove unused imports. + if ( + importNameInfo && + this._tryUpdateImport(parseFileResults, importStatements, moduleNameInfo, importNameInfo, updateOptions) + ) { + return; + } + + this._addImport(parseFileResults, importStatements, moduleNameInfo, importGroup, importNameInfo); + } + + removeNodes(...nodes: { node: ParseNode; parseFileResults: ParseFileResults }[]) { + this._pendingNodeToRemove.push(...nodes); + } + + isNodeRemoved(node: ParseNode) { + return this._nodesRemoved.has(node); + } + + getEdits(token: CancellationToken) { + this._processNodeRemoved(token); + + const edits: FileEditAction[] = []; + this._results.forEach((v) => appendArray(edits, v)); + + return edits; + } + + private _addImport( + parseFileResults: ParseFileResults, + importStatements: ImportStatements, + moduleNameInfo: ModuleNameInfo, + importGroup: ImportGroup, + importNameInfo?: ImportNameInfo[] + ) { + const fileUri = getFileInfo(parseFileResults.parserOutput.parseTree).fileUri; + + this.addEdits( + ...getTextEditsForAutoImportInsertion( + importNameInfo ?? [], + moduleNameInfo, + importStatements, + importGroup, + parseFileResults, + convertOffsetToPosition( + parseFileResults.parserOutput.parseTree.length, + parseFileResults.tokenizerOutput.lines + ) + ).map((e) => ({ fileUri, range: e.range, replacementText: e.replacementText })) + ); + } + + private _tryUpdateImport( + parseFileResults: ParseFileResults, + importStatements: ImportStatements, + moduleNameInfo: ModuleNameInfo, + importNameInfo: ImportNameInfo[], + updateOptions?: UpdateOption + ): boolean { + if (!updateOptions) { + return false; + } + + // See whether we have existing from import statement for the same module + // ex) from [|moduleName|] import subModule + const imported = importStatements.orderedImports.find( + (i) => + i.node.nodeType === ParseNodeType.ImportFrom && + (i.moduleName === moduleNameInfo.nameForImportFrom || i.moduleName === moduleNameInfo.name) + ); + + if (!imported || imported.node.nodeType !== ParseNodeType.ImportFrom || imported.node.d.isWildcardImport) { + return false; + } + + const fileUri = getFileInfo(parseFileResults.parserOutput.parseTree).fileUri; + + const edits = getTextEditsForAutoImportSymbolAddition(importNameInfo, imported, parseFileResults); + if (imported.node !== updateOptions.currentFromImport) { + // Add what we want to the existing "import from" statement as long as it is not the same import + // node we are working on. + // ex) from xxx import yyy <= we are working on here. + // from xxx import zzz <= but we found this. + this.addEdits(...edits.map((e) => ({ fileUri, range: e.range, replacementText: e.replacementText }))); + return true; + } + + const moduleNames = updateOptions.originalModuleName.split('.'); + const newModuleNames = moduleNameInfo.name.split('.'); + + if (!haveSameParentModule(moduleNames, newModuleNames)) { + // Module has moved. + return false; + } + + // Check whether we can avoid creating a new statement. We can't just merge with existing one since + // we could create invalid text edits (2 edits that change the same span, or invalid replacement text since + // texts on the node has changed) + if (importNameInfo.length !== 1 || edits.length !== 1) { + return false; + } + + const deletions = this._getDeletionsForSpan(fileUri, edits[0].range); + if (deletions.length === 0) { + this.addEdit(fileUri, edits[0].range, edits[0].replacementText); + return true; + } + + const lastModuleName = moduleNames[moduleNames.length - 1]; + const newLastModuleName = newModuleNames[newModuleNames.length - 1]; + + const alias = importNameInfo[0].alias === newLastModuleName ? lastModuleName : importNameInfo[0].alias; + const importName = updateOptions.currentFromImport.d.imports.find( + (i) => i.d.name.d.value === lastModuleName && i.d.alias?.d.value === alias + ); + + if (!importName) { + return false; + } + + this._removeEdits(fileUri, deletions); + if (importName.d.alias) { + this._nodesRemoved.delete(importName.d.alias); + } + + this.addEdit( + fileUri, + convertTextRangeToRange(importName.d.name, parseFileResults.tokenizerOutput.lines), + newLastModuleName + ); + + return true; + } + + private _getDeletionsForSpan(fileUriOrEdit: Uri | FileEditAction[], range: Range) { + const edits = this._getOverlappingForSpan(fileUriOrEdit, range); + return edits.filter((e) => e.replacementText === ''); + } + + private _removeEdits(fileUriOrEdit: Uri | FileEditAction[], edits: FileEditAction[]) { + if (Uri.is(fileUriOrEdit)) { + fileUriOrEdit = this._results.get(fileUriOrEdit.key) ?? []; + } + + removeArrayElements(fileUriOrEdit, (f) => edits.some((e) => FileEditAction.areEqual(f, e))); + } + + private _getEditsToMerge(edits: FileEditAction[], range: Range, replacementText: string) { + const overlappingEdits = this._getOverlappingForSpan(edits, range); + if (this._mergeOnlyDuplications && overlappingEdits.length > 0) { + // Merge duplicated deletion. For deletion, we can even merge edits + // intersecting each other. + if (replacementText === '') { + return overlappingEdits.filter((e) => e.replacementText === ''); + } + + // Merge duplicated edits as long as one of them contains the other. + return overlappingEdits.filter( + (e) => + e.replacementText === replacementText && + (isRangeInRange(range, e.range) || isRangeInRange(e.range, range)) + ); + } + + // We are allowed to merge more than exact duplication. If the existing edit + // is deletion or duplicated text with containing ranges, merge them to 1. + return overlappingEdits.filter( + (e) => + e.replacementText === '' || + (e.replacementText === replacementText && + (isRangeInRange(range, e.range) || isRangeInRange(e.range, range))) + ); + } + + private _getOverlappingForSpan(fileUriOrEdit: Uri | FileEditAction[], range: Range) { + if (Uri.is(fileUriOrEdit)) { + fileUriOrEdit = this._results.get(fileUriOrEdit.key) ?? []; + } + + return fileUriOrEdit.filter((e) => doRangesIntersect(e.range, range)); + } + + private _processNodeRemoved(token: CancellationToken) { + while (this._pendingNodeToRemove.length > 0) { + const numberOfNodesBeforeProcessing = this._pendingNodeToRemove.length; + + const peekNodeToRemove = this._pendingNodeToRemove[this._pendingNodeToRemove.length - 1]; + this._handleImportNameNode(peekNodeToRemove, token); + + if (this._pendingNodeToRemove.length === numberOfNodesBeforeProcessing) { + // It looks like we don't know how to handle the node, + // Please add code to handle the case. + debug.assert(`please add handler for ${peekNodeToRemove.node.nodeType}`); + + // As a default behavior, we will just remove the node + this._pendingNodeToRemove.pop(); + + const info = getFileInfo(peekNodeToRemove.parseFileResults.parserOutput.parseTree); + this.addEdit(info.fileUri, convertTextRangeToRange(peekNodeToRemove.node, info.lines), ''); + } + } + } + + private _handleImportNameNode(nodeToRemove: NodeToRemove, token: CancellationToken) { + const node = nodeToRemove.node; + if (node.nodeType !== ParseNodeType.Name) { + return false; + } + + const module = nodeToRemove.parseFileResults.parserOutput.parseTree; + const info = getFileInfo(module); + const importNode = getContainingImportStatement(ParseTreeUtils.findNodeByOffset(module, node.start), token); + if (!importNode) { + return false; + } + + const nameNodes = getAllImportNames(importNode); + + // check various different cases + // 1. check whether all imported names in the import statement is not used. + const nodesRemoved = this._pendingNodeToRemove.filter((nodeToRemove) => + nameNodes.some((n) => TextRange.overlapsRange(nodeToRemove.node, n)) + ); + + if (nameNodes.length === nodesRemoved.length) { + this.addEdit( + info.fileUri, + ParseTreeUtils.getFullStatementRange(importNode, nodeToRemove.parseFileResults), + '' + ); + + // Remove nodes that are handled from queue. + this._removeNodesHandled(nodesRemoved); + return true; + } + + // 2. some of modules in the import statement is used. + const indices: number[] = []; + for (let i = 0; i < nameNodes.length; i++) { + const nameNode = nameNodes[i]; + + if (nodesRemoved.some((r) => TextRange.overlapsRange(r.node, nameNode))) { + indices.push(i); + } + } + + if (indices.length === 0) { + // can't find module user wants to remove + return false; + } + + const editSpans = getTextRangeForImportNameDeletion(nodeToRemove.parseFileResults, nameNodes, ...indices); + editSpans.forEach((e) => this.addEdit(info.fileUri, convertTextRangeToRange(e, info.lines), '')); + + this._removeNodesHandled(nodesRemoved); + return true; + } + + private _removeNodesHandled(nodesRemoved: NodeToRemove[]) { + nodesRemoved.forEach((n) => this._markNodeRemoved(n.node, n.parseFileResults)); + removeArrayElements(this._pendingNodeToRemove, (n) => this._nodesRemoved.has(n.node)); + } + + private _markNodeRemoved(nodeToDelete: ParseNode, parseFileResults: ParseFileResults) { + // Mark that we don't need to process these node again later. + this._nodesRemoved.set(nodeToDelete, parseFileResults); + if (nodeToDelete.nodeType === ParseNodeType.ImportAs) { + this._nodesRemoved.set(nodeToDelete.d.module, parseFileResults); + nodeToDelete.d.module.d.nameParts.forEach((n) => this._nodesRemoved.set(n, parseFileResults)); + if (nodeToDelete.d.alias) { + this._nodesRemoved.set(nodeToDelete.d.alias, parseFileResults); + } + } else if (nodeToDelete.nodeType === ParseNodeType.ImportFromAs) { + this._nodesRemoved.set(nodeToDelete.d.name, parseFileResults); + if (nodeToDelete.d.alias) { + this._nodesRemoved.set(nodeToDelete.d.alias, parseFileResults); + } + } + } +} + +interface UpdateOption { + currentFromImport: ImportFromNode; + originalModuleName: string; +} + +interface NodeToRemove { + node: ParseNode; + parseFileResults: ParseFileResults; +} diff --git a/python-parser/packages/pyright-internal/src/common/textRange.ts b/python-parser/packages/pyright-internal/src/common/textRange.ts new file mode 100644 index 00000000..082d33ae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/textRange.ts @@ -0,0 +1,207 @@ +/* + * textRange.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Specifies the range of text within a larger string. + */ + +import { fail } from './debug'; + +export interface TextRange { + readonly start: number; + readonly length: number; +} + +export namespace TextRange { + export function create(start: number, length: number): TextRange { + if (start < 0) { + fail('start must be non-negative'); + } + if (length < 0) { + fail('length must be non-negative'); + } + return { start, length }; + } + + export function fromBounds(start: number, end: number): TextRange { + if (start < 0) { + fail('start must be non-negative'); + } + if (start > end) { + fail('end must be greater than or equal to start'); + } + return create(start, end - start); + } + + export function getEnd(range: TextRange): number { + return range.start + range.length; + } + + export function contains(range: TextRange, position: number): boolean { + return position >= range.start && position < getEnd(range); + } + + export function containsRange(range: TextRange, span: TextRange): boolean { + return span.start >= range.start && getEnd(span) <= getEnd(range); + } + + export function overlaps(range: TextRange, position: number): boolean { + return position >= range.start && position <= getEnd(range); + } + + export function overlapsRange(range: TextRange, other: TextRange): boolean { + return overlaps(range, other.start) || overlaps(other, range.start); + } + + export function extend(range: TextRange, extension: TextRange): TextRange { + let result = range; + + if (extension.start < result.start) { + result = { + start: extension.start, + length: result.length + result.start - extension.start, + }; + } + + const extensionEnd = getEnd(extension); + const resultEnd = getEnd(result); + if (extensionEnd > resultEnd) { + result = { + start: result.start, + length: result.length + extensionEnd - resultEnd, + }; + } + + return result; + } + + export function combine(ranges: TextRange[]): TextRange | undefined { + if (ranges.length === 0) { + return undefined; + } + + let combinedRange: TextRange = { start: ranges[0].start, length: ranges[0].length }; + for (let i = 1; i < ranges.length; i++) { + combinedRange = extend(combinedRange, ranges[i]); + } + return combinedRange; + } +} + +export interface Position { + // Both line and column are zero-based + line: number; + character: number; +} + +export namespace Position { + export function print(value: Position): string { + return `(${value.line}:${value.character})`; + } +} + +export interface Range { + start: Position; + end: Position; +} + +export namespace Range { + export function print(value: Range): string { + return `${Position.print(value.start)}-${Position.print(value.end)}`; + } +} + +// Represents a range within a particular document. +export function comparePositions(a: Position, b: Position) { + if (a.line < b.line) { + return -1; + } else if (a.line > b.line) { + return 1; + } else if (a.character < b.character) { + return -1; + } else if (a.character > b.character) { + return 1; + } + return 0; +} + +export function getEmptyPosition(): Position { + return { + line: 0, + character: 0, + }; +} + +export function doRangesOverlap(a: Range, b: Range) { + if (comparePositions(b.start, a.end) >= 0) { + return false; + } else if (comparePositions(a.start, b.end) >= 0) { + return false; + } + return true; +} + +export function doRangesIntersect(a: Range, b: Range) { + if (comparePositions(b.start, a.end) > 0) { + return false; + } else if (comparePositions(a.start, b.end) > 0) { + return false; + } + return true; +} + +export function isPositionInRange(range: Range, position: Position): boolean { + return comparePositions(range.start, position) <= 0 && comparePositions(range.end, position) >= 0; +} + +export function isRangeInRange(range: Range, containedRange: Range): boolean { + return isPositionInRange(range, containedRange.start) && isPositionInRange(range, containedRange.end); +} + +export function positionsAreEqual(a: Position, b: Position) { + return comparePositions(a, b) === 0; +} + +export function rangesAreEqual(a: Range, b: Range) { + return positionsAreEqual(a.start, b.start) && positionsAreEqual(a.end, b.end); +} + +export function getEmptyRange(): Range { + return { + start: getEmptyPosition(), + end: getEmptyPosition(), + }; +} + +export function isEmptyPosition(pos: Position) { + return pos.character === 0 && pos.line === 0; +} + +export function isEmptyRange(range: Range) { + return isEmptyPosition(range.start) && isEmptyPosition(range.end); +} + +export function extendRange(range: Range, extension: Range) { + if (comparePositions(extension.start, range.start) < 0) { + range.start = extension.start; + } + + if (comparePositions(extension.end, range.end) > 0) { + range.end = extension.end; + } +} + +export function combineRange(ranges: Range[]): Range | undefined { + if (ranges.length === 0) { + return undefined; + } + + const combinedRange = ranges[0]; + for (let i = 1; i < ranges.length; i++) { + extendRange(combinedRange, ranges[i]); + } + + return combinedRange; +} diff --git a/python-parser/packages/pyright-internal/src/common/textRangeCollection.ts b/python-parser/packages/pyright-internal/src/common/textRangeCollection.ts new file mode 100644 index 00000000..7b0d256d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/textRangeCollection.ts @@ -0,0 +1,172 @@ +/* + * textRangeCollection.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Based on code from vscode-python repository: + * https://github.com/Microsoft/vscode-python + * + * Class that maintains an ordered list of text ranges and allows + * for indexing and fast lookups within this list. + */ + +import { fail } from './debug'; +import { TextRange } from './textRange'; + +export class TextRangeCollection { + private _items: T[]; + + constructor(items: T[]) { + this._items = items; + } + + get start(): number { + return this._items.length > 0 ? this._items[0].start : 0; + } + + get end(): number { + const lastItem = this._items[this._items.length - 1]; + return this._items.length > 0 ? lastItem.start + lastItem.length : 0; + } + + get length(): number { + return this.end - this.start; + } + + get count(): number { + return this._items.length; + } + + contains(position: number) { + return position >= this.start && position < this.end; + } + + getItemAt(index: number): T { + if (index < 0 || index >= this._items.length) { + fail('index is out of range'); + } + return this._items[index]; + } + + // Returns the nearest item prior to the position. + // The position may not be contained within the item. + getItemAtPosition(position: number): number { + if (this.count === 0) { + return -1; + } + if (position < this.start) { + return -1; + } + if (position > this.end) { + return -1; + } + + let min = 0; + let max = this.count - 1; + + while (min < max) { + const mid = min + ((max - min) >> 1); + const item = this._items[mid]; + + // Is the position past the start of this item but before + // the start of the next item? If so, we found our item. + if (position >= item.start) { + if (mid >= this.count - 1 || position < this._items[mid + 1].start) { + return mid; + } + } + + if (position < item.start) { + max = mid - 1; + } else { + min = mid + 1; + } + } + return min; + } + + getItemContaining(position: number): number { + if (this.count === 0) { + return -1; + } + if (position < this.start) { + return -1; + } + if (position > this.end) { + return -1; + } + + return getIndexContaining(this._items, position); + } +} + +export function getIndexContaining( + arr: (T | undefined)[], + position: number, + inRange: (item: T, position: number) => boolean = TextRange.contains +) { + if (arr.length === 0) { + return -1; + } + + let min = 0; + let max = arr.length - 1; + while (min <= max) { + const mid = Math.floor(min + (max - min) / 2); + const element = findNonNullElement(arr, mid, min, max); + if (element === undefined) { + return -1; + } + + if (inRange(element.item, position)) { + return element.index; + } + + const nextElement = findNonNullElement(arr, mid + 1, mid + 1, max); + if (nextElement === undefined) { + return -1; + } + + if (mid < arr.length - 1 && TextRange.getEnd(element.item) <= position && position < nextElement.item.start) { + return -1; + } + + if (position < element.item.start) { + max = mid - 1; + } else { + min = mid + 1; + } + } + + return -1; +} + +function findNonNullElement( + arr: (T | undefined)[], + position: number, + min: number, + max: number +): { index: number; item: T } | undefined { + const item = arr[position]; + if (item) { + return { index: position, item }; + } + + // Search forward and backward until it finds non-null value. + for (let i = position + 1; i <= max; i++) { + const item = arr[i]; + if (item) { + return { index: i, item }; + } + } + + for (let i = position - 1; i >= min; i--) { + const item = arr[i]; + if (item) { + return { index: i, item }; + } + } + + return undefined; +} diff --git a/python-parser/packages/pyright-internal/src/common/timing.ts b/python-parser/packages/pyright-internal/src/common/timing.ts new file mode 100644 index 00000000..29b41ad3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/timing.ts @@ -0,0 +1,105 @@ +/* + * timing.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * A simple duration class that can be used to record and report + * durations at the millisecond level of resolution. + */ + +import { ConsoleInterface } from './console'; + +export class Duration { + private _startTime: number; + + constructor() { + this._startTime = Date.now(); + } + + getDurationInMilliseconds() { + const curTime = Date.now(); + return curTime - this._startTime; + } + + getDurationInSeconds() { + return this.getDurationInMilliseconds() / 1000; + } +} + +export class TimingStat { + totalTime = 0; + callCount = 0; + isTiming = false; + + timeOperation any>(callback: T, ...args: any[]): ReturnType { + this.callCount++; + + // Handle reentrancy. + if (this.isTiming) { + return callback(...args); + } else { + this.isTiming = true; + const duration = new Duration(); + const result = callback(...args); + this.totalTime += duration.getDurationInMilliseconds(); + this.isTiming = false; + + return result; + } + } + + subtractFromTime(callback: () => void) { + if (this.isTiming) { + this.isTiming = false; + const duration = new Duration(); + callback(); + this.totalTime -= duration.getDurationInMilliseconds(); + this.isTiming = true; + } else { + callback(); + } + } + + printTime(): string { + const totalTimeInSec = this.totalTime / 1000; + const roundedTime = Math.round(totalTimeInSec * 100) / 100; + return roundedTime.toString() + 'sec'; + } +} + +export class TimingStats { + totalDuration = new Duration(); + findFilesTime = new TimingStat(); + readFileTime = new TimingStat(); + tokenizeFileTime = new TimingStat(); + parseFileTime = new TimingStat(); + resolveImportsTime = new TimingStat(); + cycleDetectionTime = new TimingStat(); + bindTime = new TimingStat(); + typeCheckerTime = new TimingStat(); + typeEvaluationTime = new TimingStat(); + + printSummary(console: ConsoleInterface) { + console.info(`Completed in ${this.totalDuration.getDurationInSeconds()}sec`); + } + + printDetails(console: ConsoleInterface) { + console.info(''); + console.info('Timing stats'); + console.info('Find Source Files: ' + this.findFilesTime.printTime()); + console.info('Read Source Files: ' + this.readFileTime.printTime()); + console.info('Tokenize: ' + this.tokenizeFileTime.printTime()); + console.info('Parse: ' + this.parseFileTime.printTime()); + console.info('Resolve Imports: ' + this.resolveImportsTime.printTime()); + console.info('Bind: ' + this.bindTime.printTime()); + console.info('Check: ' + this.typeCheckerTime.printTime()); + console.info('Detect Cycles: ' + this.cycleDetectionTime.printTime()); + } + + getTotalDuration() { + return this.totalDuration.getDurationInSeconds(); + } +} + +export const timingStats = new TimingStats(); diff --git a/python-parser/packages/pyright-internal/src/common/tomlUtils.ts b/python-parser/packages/pyright-internal/src/common/tomlUtils.ts new file mode 100644 index 00000000..418c162f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/tomlUtils.ts @@ -0,0 +1,36 @@ +/* + * tomlUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * helpers related to TOML + */ + +type TomlPrimitive = + | string + | number + | boolean + | { + [key: string]: TomlPrimitive; + } + | TomlPrimitive[]; + +// Dynamically load `smol-toml` to address module loading issues and +// maintain existing module resolution to support multiple environments. +let TOML: any; +const loadTomlModule = (async () => { + // Use a magic comment to prevent webpack from creating an extra chunk for the dynamic import by default. + // An extra chunk will still be created if explicitly configured in the webpack config. + TOML = await import(/* webpackMode: "eager" */ 'smol-toml'); +})(); + +export async function ensureTomlModuleLoaded() { + await loadTomlModule; +} + +export const parse = (toml: string): Record => { + if (!TOML) { + throw new Error('TOML module not loaded'); + } + return TOML.parse(toml); +}; diff --git a/python-parser/packages/pyright-internal/src/common/uri/baseUri.ts b/python-parser/packages/pyright-internal/src/common/uri/baseUri.ts new file mode 100644 index 00000000..5a7a1090 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/baseUri.ts @@ -0,0 +1,306 @@ +/* + * baseUri.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Base URI class for storing and manipulating URIs. + */ + +import { some } from '../collectionUtils'; +import { getRootLength, getShortenedFileName } from '../pathUtils'; +import { cacheProperty } from './memoization'; +import type { Uri } from './uriInterface'; + +export type JsonObjType = any; + +const backslashRegEx = /\\/g; + +export abstract class BaseUri implements Uri { + protected constructor(private readonly _key: string) {} + + // Unique key for storing in maps. + get key() { + return this._key; + } + + // Returns the scheme of the URI. + abstract get scheme(): string; + + // Returns whether the underlying file system is case sensitive or not. + abstract get isCaseSensitive(): boolean; + + // Returns the last segment of the URI, similar to the UNIX basename command. + abstract get fileName(): string; + + // Returns just the fileName without any extensions + get fileNameWithoutExtensions(): string { + const fileName = this.fileName; + const index = fileName.lastIndexOf('.'); + if (index > 0) { + return fileName.slice(0, index); + } else { + return fileName; + } + } + + // Returns the extension of the URI, similar to the UNIX extname command. + abstract get lastExtension(): string; + + // Returns a URI where the path just contains the root folder. + abstract get root(): Uri; + + // Returns a URI where the path contains the path with .py appended. + @cacheProperty() + get packageUri(): Uri { + // This is assuming that the current path is a file already. + return this.addExtension('.py'); + } + + // Returns a URI where the path contains the path with .pyi appended. + @cacheProperty() + get packageStubUri(): Uri { + // This is assuming that the current path is a file already. + return this.addExtension('.pyi'); + } + + // Returns a URI where the path has __init__.py appended. + @cacheProperty() + get initPyUri(): Uri { + // This is assuming that the current path is a directory already. + return this.combinePathsUnsafe('__init__.py'); + } + + // Returns a URI where the path has __init__.pyi appended. + @cacheProperty() + get initPyiUri(): Uri { + // This is assuming that the current path is a directory already. + return this.combinePathsUnsafe('__init__.pyi'); + } + + // Returns a URI where the path has py.typed appended. + @cacheProperty() + get pytypedUri(): Uri { + // This is assuming that the current path is a directory already. + return this.combinePathsUnsafe('py.typed'); + } + + abstract get fragment(): string; + abstract get query(): string; + + isEmpty(): boolean { + return false; + } + + abstract toString(): string; + + abstract toUserVisibleString(): string; + + abstract toJsonObj(): JsonObjType; + + abstract matchesRegex(regex: RegExp): boolean; + + replaceExtension(ext: string): Uri { + const dir = this.getDirectory(); + const base = this.fileName; + const newBase = base.slice(0, base.length - this.lastExtension.length) + ext; + return dir.combinePathsUnsafe(newBase); + } + + addExtension(ext: string): Uri { + return this.addPath(ext); + } + + hasExtension(ext: string): boolean { + return this.isCaseSensitive + ? this.lastExtension === ext + : this.lastExtension.toLowerCase() === ext.toLowerCase(); + } + + containsExtension(ext: string): boolean { + const fileName = this.fileName; + // Use a regex so we keep the . on the front of the extension. + const extensions = fileName.split(/(?=\.)/g); + return extensions.some((e) => (this.isCaseSensitive ? e === ext : e.toLowerCase() === ext.toLowerCase())); + } + + abstract withFragment(fragment: string): Uri; + abstract withQuery(query: string): Uri; + + abstract addPath(extra: string): Uri; + + // Returns a URI where the path is the directory name of the original URI, similar to the UNIX dirname command. + abstract getDirectory(): Uri; + + getRootPathLength(): number { + return this.getRootPath().length; + } + + // Determines whether a path consists only of a path root. + abstract isRoot(): boolean; + + // Determines whether a Uri is a child of some parent Uri. + abstract isChild(parent: Uri, ignoreCase?: boolean): boolean; + + abstract isLocal(): boolean; + + isUntitled(): boolean { + return this.scheme === 'untitled'; + } + + equals(other: Uri | undefined): boolean { + return this.key === other?.key; + } + + abstract startsWith(other: Uri | undefined, ignoreCase?: boolean): boolean; + + pathStartsWith(name: string): boolean { + // We're making an assumption here that the name is already normalized. + return this.getComparablePath().startsWith(name); + } + + pathEndsWith(name: string): boolean { + // We're making an assumption here that the name is already normalized. + return this.getComparablePath().endsWith(name); + } + + pathIncludes(include: string): boolean { + // We're making an assumption here that the name is already normalized. + return this.getComparablePath().includes(include); + } + + // How long the path for this Uri is. + abstract getPathLength(): number; + + // Resolves paths to create a new Uri. Any '..' or '.' path components will be normalized. + abstract resolvePaths(...paths: string[]): Uri; + + // Combines paths to create a new Uri. Any '..' or '.' path components will be normalized. + abstract combinePaths(...paths: string[]): Uri; + + // Combines paths to create a new Uri. Any '..' or '.' path components will NOT be normalized. + abstract combinePathsUnsafe(...paths: string[]): Uri; + + getRelativePath(child: Uri): string | undefined { + if (this.scheme !== child.scheme) { + return undefined; + } + + // Unlike getRelativePathComponents, this function should not return relative path + // markers for non children. + if (child.isChild(this)) { + const relativeToComponents = this.getRelativePathComponents(child); + if (relativeToComponents.length > 0) { + return ['.', ...relativeToComponents].join('/'); + } + } + return undefined; + } + + getPathComponents(): readonly string[] { + // Make sure to freeze the result so that it can't be modified. + return Object.freeze(this.getPathComponentsImpl()); + } + + abstract getPath(): string; + + abstract getFilePath(): string; + + getRelativePathComponents(to: Uri): readonly string[] { + const fromComponents = this.getPathComponents(); + const toComponents = to.getPathComponents(); + + let start: number; + for (start = 0; start < fromComponents.length && start < toComponents.length; start++) { + const fromComponent = fromComponents[start]; + const toComponent = toComponents[start]; + + const match = this.isCaseSensitive + ? fromComponent === toComponent + : fromComponent.toLowerCase() === toComponent.toLowerCase(); + + if (!match) { + break; + } + } + + if (start === 0) { + return toComponents; + } + + const components = toComponents.slice(start); + const relative: string[] = []; + for (; start < fromComponents.length; start++) { + relative.push('..'); + } + return [...relative, ...components]; + } + + getShortenedFileName(maxDirLength: number = 15): string { + return getShortenedFileName(this.getPath(), maxDirLength); + } + + abstract stripExtension(): Uri; + + abstract stripAllExtensions(): Uri; + + protected abstract getRootPath(): string; + + protected normalizeSlashes(path: string): string { + if (path.includes('\\')) { + return path.replace(backslashRegEx, '/'); + } + return path; + } + + protected static combinePathElements(pathString: string, separator: string, ...paths: (string | undefined)[]) { + // Borrowed this algorithm from the pathUtils combinePaths function. This is + // a quicker implementation that's possible because we assume all paths are normalized already. + for (const relativePath of paths) { + if (!relativePath) { + continue; + } + if (!pathString || getRootLength(relativePath) !== 0) { + pathString = relativePath; + } else if (pathString.endsWith(separator)) { + pathString += relativePath; + } else { + pathString += separator + relativePath; + } + } + + return pathString; + } + protected reducePathComponents(components: string[]): string[] { + if (!some(components)) { + return []; + } + + // Reduce the path components by eliminating + // any '.' or '..'. We start at 1 because the first component is + // always the root. + const reduced = [components[0]]; + for (let i = 1; i < components.length; i++) { + const component = components[i]; + if (!component || component === '.') { + continue; + } + + if (component === '..') { + if (reduced.length > 1) { + if (reduced[reduced.length - 1] !== '..') { + reduced.pop(); + continue; + } + } else if (reduced[0]) { + continue; + } + } + reduced.push(component); + } + + return reduced; + } + + protected abstract getComparablePath(): string; + protected abstract getPathComponentsImpl(): string[]; +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/constantUri.ts b/python-parser/packages/pyright-internal/src/common/uri/constantUri.ts new file mode 100644 index 00000000..3955aade --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/constantUri.ts @@ -0,0 +1,141 @@ +/* + * constantUri.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * URI class that represents a constant/marker URI. + */ + +import { BaseUri } from './baseUri'; +import { Uri } from './uri'; + +export class ConstantUri extends BaseUri { + constructor(name: string) { + super(name); + } + + override get scheme(): string { + return ''; + } + + override get isCaseSensitive(): boolean { + return true; + } + + override get fileName(): string { + return ''; + } + + override get lastExtension(): string { + return ''; + } + + override get root(): Uri { + return this; + } + + override get fragment(): string { + return ''; + } + + override get query(): string { + return ''; + } + + override equals(other: Uri | undefined): boolean { + // For constant Uri, reference equality must be used instead of value equality. + return this === other; + } + + override toJsonObj() { + throw new Error(`constant uri can't be serialized`); + } + + override toString(): string { + return this.key; + } + + override toUserVisibleString(): string { + return ''; + } + + override matchesRegex(regex: RegExp): boolean { + return false; + } + + override withFragment(fragment: string): Uri { + return this; + } + + override withQuery(query: string): Uri { + return this; + } + + override addPath(extra: string): Uri { + return this; + } + + override getDirectory(): Uri { + return this; + } + + override isRoot(): boolean { + return false; + } + + override isChild(parent: Uri, ignoreCase?: boolean | undefined): boolean { + return false; + } + + override isLocal(): boolean { + return false; + } + + override startsWith(other: Uri | undefined, ignoreCase?: boolean | undefined): boolean { + return false; + } + + override getPathLength(): number { + return 0; + } + + override resolvePaths(...paths: string[]): Uri { + return this; + } + + override combinePaths(...paths: string[]): Uri { + return this; + } + + override combinePathsUnsafe(...paths: string[]): Uri { + return this; + } + + override getPath(): string { + return ''; + } + + override getFilePath(): string { + return ''; + } + + override stripExtension(): Uri { + return this; + } + + override stripAllExtensions(): Uri { + return this; + } + + protected override getRootPath(): string { + return ''; + } + + protected override getComparablePath(): string { + return ''; + } + + protected override getPathComponentsImpl(): string[] { + return []; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/emptyUri.ts b/python-parser/packages/pyright-internal/src/common/uri/emptyUri.ts new file mode 100644 index 00000000..f3f86ff2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/emptyUri.ts @@ -0,0 +1,42 @@ +/* + * emptyUri.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * URI class that represents an empty URI. + */ + +import { JsonObjType } from './baseUri'; +import { ConstantUri } from './constantUri'; + +const EmptyKey = ''; + +export class EmptyUri extends ConstantUri { + private static _instance = new EmptyUri(); + + private constructor() { + super(EmptyKey); + } + + static get instance() { + return EmptyUri._instance; + } + + override toJsonObj(): JsonObjType { + return { + _key: EmptyKey, + }; + } + + static isEmptyUri(uri: any): uri is EmptyUri { + return uri?._key === EmptyKey; + } + + override isEmpty(): boolean { + return true; + } + + override toString(): string { + return ''; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/fileUri.ts b/python-parser/packages/pyright-internal/src/common/uri/fileUri.ts new file mode 100644 index 00000000..2ef91967 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/fileUri.ts @@ -0,0 +1,301 @@ +/* + * fileUri.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * URI class that represents a file path. These URIs are always 'file' schemed. + */ + +import { URI } from 'vscode-uri'; +import { isArray } from '../core'; +import { + ensureTrailingDirectorySeparator, + getDirectoryPath, + getFileExtension, + getFileName, + getPathComponents, + getPathSeparator, + getRootLength, + hasTrailingDirectorySeparator, + isDiskPathRoot, + resolvePaths, + stripFileExtension, +} from '../pathUtils'; +import { BaseUri, JsonObjType } from './baseUri'; +import { cacheMethodWithNoArgs, cacheProperty, cacheStaticFunc } from './memoization'; +import { SerializedType, Uri, UriKinds } from './uri'; + +type FileUriSerializedType = [0, string, string, string, string | undefined, 1 | 0]; + +export const FileUriSchema = 'file'; + +export class FileUri extends BaseUri { + private _formattedString: string | undefined; + private _normalizedPath: string | undefined; + private static _separator = getPathSeparator(''); + protected constructor( + key: string, + private readonly _filePath: string, + private readonly _query: string, + private readonly _fragment: string, + private readonly _originalString: string | undefined, + private readonly _isCaseSensitive: boolean + ) { + super(_isCaseSensitive ? key : key.toLowerCase()); + } + + override get scheme(): string { + return FileUriSchema; + } + + get fragment(): string { + return this._fragment; + } + + get query(): string { + return this._query; + } + + @cacheProperty() + override get fileName(): string { + return getFileName(this._filePath); + } + + @cacheProperty() + override get lastExtension(): string { + return getFileExtension(this._filePath); + } + + @cacheProperty() + override get root(): Uri { + const rootPath = this.getRootPath(); + if (rootPath !== this._filePath) { + return FileUri.createFileUri(rootPath, '', '', undefined, this._isCaseSensitive); + } + return this; + } + + get isCaseSensitive(): boolean { + return this._isCaseSensitive; + } + + @cacheStaticFunc() + static createFileUri( + filePath: string, + query: string, + fragment: string, + originalString: string | undefined, + isCaseSensitive: boolean + ): FileUri { + filePath = isDiskPathRoot(filePath) ? ensureTrailingDirectorySeparator(filePath) : filePath; + + const key = FileUri._createKey(filePath, query, fragment); + return new FileUri(key, filePath, query, fragment, originalString, isCaseSensitive); + } + + static isFileUri(uri: any): uri is FileUri { + return uri?._filePath !== undefined && uri?._key !== undefined; + } + + static fromJsonObj(obj: FileUri | SerializedType) { + if (isArray(obj)) { + const so = obj as FileUriSerializedType; + return FileUri.createFileUri(so[1], so[2], so[3], so[4], so[5] === 1 ? true : false); + } + + return FileUri.createFileUri( + obj._filePath, + obj._query, + obj._fragment, + obj._originalString, + obj._isCaseSensitive + ); + } + + toJsonObj(): JsonObjType { + const jsonObj: SerializedType = [ + UriKinds.file, + this._filePath, + this._query, + this._fragment, + this._originalString, + this._isCaseSensitive ? 1 : 0, + ]; + return jsonObj; + } + + override matchesRegex(regex: RegExp): boolean { + // Compare the regex to our path but normalize it for comparison. + // The regex assumes it's comparing itself to a URI path. + return regex.test(this._getNormalizedPath()); + } + + override toString(): string { + if (!this._formattedString) { + this._formattedString = + this._originalString || + URI.file(this._filePath).with({ query: this._query, fragment: this._fragment }).toString(); + } + return this._formattedString; + } + + override toUserVisibleString(): string { + return this._filePath; + } + + override addPath(extra: string): Uri { + return FileUri.createFileUri(this._filePath + extra, '', '', undefined, this._isCaseSensitive); + } + + override isRoot(): boolean { + return isDiskPathRoot(this._filePath); + } + + override isChild(parent: Uri): boolean { + if (!FileUri.isFileUri(parent)) { + return false; + } + + return parent._filePath.length < this._filePath.length && this.startsWith(parent); + } + + override isLocal(): boolean { + return true; + } + + override startsWith(other: Uri | undefined): boolean { + if (other?.scheme !== this.scheme) { + return false; + } + const otherFileUri = other as FileUri; + if (this._filePath.length >= otherFileUri._filePath.length) { + // Make sure the other ends with a / when comparing longer paths, otherwise we might + // say that /a/food is a child of /a/foo. + const otherPath = + this._filePath.length > otherFileUri._filePath.length && + !hasTrailingDirectorySeparator(otherFileUri._filePath) + ? ensureTrailingDirectorySeparator(otherFileUri._filePath) + : otherFileUri._filePath; + + if (!this.isCaseSensitive) { + return this._filePath.toLowerCase().startsWith(otherPath.toLowerCase()); + } + return this._filePath.startsWith(otherPath); + } + return false; + } + + override getPathLength(): number { + return this._filePath.length; + } + + override getPath(): string { + return this._getNormalizedPath(); + } + + override getFilePath(): string { + return this._filePath; + } + + override resolvePaths(...paths: string[]): Uri { + // Resolve and combine paths, never want URIs with '..' in the middle. + let combined = resolvePaths(this._filePath, ...paths); + + // Make sure to remove any trailing directory chars. + if (hasTrailingDirectorySeparator(combined) && combined.length > 1) { + combined = combined.slice(0, combined.length - 1); + } + if (combined !== this._filePath) { + return FileUri.createFileUri(combined, '', '', undefined, this._isCaseSensitive); + } + return this; + } + + override combinePaths(...paths: string[]): Uri { + if (paths.some((p) => p.includes('..') || p.includes(FileUri._separator) || p.includes('/') || p === '.')) { + // This is a slow path that handles paths that contain '..' or '.'. + return this.resolvePaths(...paths); + } + + // Paths don't have any thing special that needs to be combined differently, so just + // use the quick method. + return this.combinePathsUnsafe(...paths); + } + + override combinePathsUnsafe(...paths: string[]): Uri { + // Combine paths using the quicker path implementation as we + // assume all data is already normalized. + const combined = BaseUri.combinePathElements(this._filePath, FileUri._separator, ...paths); + if (combined !== this._filePath) { + return FileUri.createFileUri(combined, '', '', undefined, this._isCaseSensitive); + } + return this; + } + + @cacheMethodWithNoArgs() + override getDirectory(): Uri { + const filePath = this._filePath; + let dir = getDirectoryPath(filePath); + if (hasTrailingDirectorySeparator(dir) && dir.length > 1) { + dir = dir.slice(0, -1); + } + if (dir !== filePath) { + return FileUri.createFileUri(dir, '', '', undefined, this._isCaseSensitive); + } else { + return this; + } + } + + withFragment(fragment: string): Uri { + return FileUri.createFileUri(this._filePath, this._query, fragment, undefined, this._isCaseSensitive); + } + + withQuery(query: string): Uri { + return FileUri.createFileUri(this._filePath, query, this._fragment, undefined, this._isCaseSensitive); + } + + override stripExtension(): Uri { + const stripped = stripFileExtension(this._filePath); + if (stripped !== this._filePath) { + return FileUri.createFileUri(stripped, this._query, this._fragment, undefined, this._isCaseSensitive); + } + return this; + } + + override stripAllExtensions(): Uri { + const stripped = stripFileExtension(this._filePath, /* multiDotExtension */ true); + if (stripped !== this._filePath) { + return FileUri.createFileUri(stripped, this._query, this._fragment, undefined, this._isCaseSensitive); + } + return this; + } + + protected override getPathComponentsImpl(): string[] { + const components = getPathComponents(this._filePath); + // Remove the first one if it's empty. The new algorithm doesn't + // expect this to be there. + if (components.length > 0 && components[0] === '') { + components.shift(); + } + return components.map((component) => this.normalizeSlashes(component)); + } + + protected override getRootPath(): string { + return this._filePath.slice(0, getRootLength(this._filePath)); + } + + protected override getComparablePath(): string { + return this._getNormalizedPath(); + } + + private static _createKey(filePath: string, query: string, fragment: string) { + return `${filePath}${query ? '?' + query : ''}${fragment ? '#' + fragment : ''}`; + } + + private _getNormalizedPath(): string { + if (this._normalizedPath === undefined) { + this._normalizedPath = this.normalizeSlashes(this._filePath); + } + return this._normalizedPath; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/memoization.ts b/python-parser/packages/pyright-internal/src/common/uri/memoization.ts new file mode 100644 index 00000000..eba915d4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/memoization.ts @@ -0,0 +1,85 @@ +/* + * memoization.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Decorators used to memoize the result of a function call. + */ + +// Cache for static method results with LRU eviction. +const staticCache = new Map(); + +// Max number of static method values that are cached. +const maxStaticCacheEntries = 256; + +// Caches the results of a getter property. +export function cacheProperty() { + return function (target: any, functionName: string, descriptor: PropertyDescriptor) { + const originalMethod = descriptor.get; + descriptor.get = function (this: any, ...args: any) { + // Call the function once to get the result. + const result = originalMethod!.apply(this, args); + + // Then we replace the original function with one that just returns the result. + Object.defineProperty(this, functionName, { + get() { + return result; + }, + }); + return result; + }; + return descriptor; + }; +} + +// Caches the results of method that takes no args. +// This situation can be optimized because the parameters are always the same. +export function cacheMethodWithNoArgs() { + return function (target: any, functionName: string, descriptor: PropertyDescriptor) { + const originalMethod = descriptor.value; + descriptor.value = function (this: any, ...args: any) { + // Call the function once to get the result. + const result = originalMethod.apply(this, args); + + // Then we replace the original function with one that just returns the result. + this[functionName] = () => { + // Note that this poses a risk. The result is passed by reference, so if the caller + // modifies the result, it will modify the cached result. + return result; + }; + return result; + }; + return descriptor; + }; +} + +// Create a decorator to cache the results of a static method. +export function cacheStaticFunc() { + return function cacheStaticFunc_Fast(target: any, functionName: string, descriptor: PropertyDescriptor) { + const originalMethod = descriptor.value; + descriptor.value = function (...args: any) { + const key = `${functionName}+${args?.map((a: any) => a?.toString()).join(',')}`; + if (staticCache.has(key)) { + // Promote to most-recently used by re-inserting. + const value = staticCache.get(key); + staticCache.delete(key); + staticCache.set(key, value); + return value; + } + + // Miss: compute and insert, evict LRU if over capacity. + const result = originalMethod.apply(this, args); + + if (staticCache.size >= maxStaticCacheEntries) { + // Remove least-recently used (the first key in insertion order). + const lruKey = staticCache.keys().next().value as string | undefined; + if (lruKey !== undefined) { + staticCache.delete(lruKey); + } + } + staticCache.set(key, result); + return result; + }; + return descriptor; + }; +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/uri.ts b/python-parser/packages/pyright-internal/src/common/uri/uri.ts new file mode 100644 index 00000000..6efbf2be --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/uri.ts @@ -0,0 +1,226 @@ +/* + * uri.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * URI namespace for storing and manipulating URIs. + */ + +import { URI, Utils } from 'vscode-uri'; +import { CaseSensitivityDetector } from '../caseSensitivityDetector'; +import { isArray } from '../core'; +import { combinePaths, isRootedDiskPath, normalizeSlashes } from '../pathUtils'; +import { ServiceKeys } from '../serviceKeys'; +import { ServiceKey } from '../serviceProvider'; +import { JsonObjType } from './baseUri'; +import { ConstantUri } from './constantUri'; +import { EmptyUri } from './emptyUri'; +import { FileUri, FileUriSchema } from './fileUri'; +import { WebUri } from './webUri'; + +export const enum UriKinds { + file, + web, + empty, +} + +export type SerializedType = [UriKinds, ...any[]]; + +// Re-export Uri interface from uriInterface.ts to maintain backward compatibility +// Use interface merging instead of direct export to avoid conflict with Uri namespace +import type { Uri as UriInterface } from './uriInterface'; +export interface Uri extends UriInterface {} + +const _dosPathRegex = /^\/[a-zA-Z]:\//; +const _win32NormalizationRegex = /\//g; + +// Returns just the fsPath path portion of a vscode URI. +function getFilePath(uri: URI): string { + let filePath: string | undefined; + + // Compute the file path ourselves. The vscode.URI class doesn't + // treat UNC shares with a single slash as UNC paths. + // https://github.com/microsoft/vscode-uri/blob/53e4ca6263f2e4ddc35f5360c62bc1b1d30f27dd/src/uri.ts#L567 + if (uri.authority && uri.path[0] === '/' && uri.path.length === 1) { + filePath = `//${uri.authority}${uri.path}`; + } else { + // Otherwise use the vscode.URI version + filePath = uri.fsPath; + } + + // If this is a DOS-style path with a drive letter, remove + // the leading slash. + if (filePath.match(_dosPathRegex)) { + filePath = filePath.slice(1); + } + + // vscode.URI normalizes the path to use the correct path separators. + // We need to do the same. + if (process?.platform === 'win32') { + filePath = filePath.replace(_win32NormalizationRegex, '\\'); + } + + return filePath; +} + +// Function called to normalize input URIs. This gets rid of '..' and '.' in the path. +// It also removes any '/' on the end of the path. +// This is slow but should only be called when the URI is first created. +function normalizeUri(uri: string | URI): { uri: URI; str: string } { + // Make sure the drive letter is lower case. This + // is consistent with what VS code does for URIs. + const parsed = URI.isUri(uri) ? uri : URI.parse(uri); + + // Original URI may not have resolved all the `..` in the path, so remove them. + // Note: this also has the effect of removing any trailing slashes. + const finalURI = parsed.path.length > 0 ? Utils.resolvePath(parsed) : parsed; + const finalString = finalURI.toString(); + return { uri: finalURI, str: finalString }; +} + +const windowsUriRegEx = /^[a-zA-Z]:\\?/; +const uriRegEx = /^[a-zA-Z][a-zA-Z0-9+.-]*:\/?\/?/; + +export namespace Uri { + export interface IServiceProvider { + get(key: ServiceKey): T; + } + + export function maybeUri(value: string) { + return uriRegEx.test(value) && !windowsUriRegEx.test(value); + } + + export function create(value: string, serviceProvider: IServiceProvider, checkRelative?: boolean): Uri; + export function create( + value: string, + caseSensitivityDetector: CaseSensitivityDetector, + checkRelative?: boolean + ): Uri; + export function create(value: string, arg: IServiceProvider | CaseSensitivityDetector, checkRelative = false): Uri { + arg = CaseSensitivityDetector.is(arg) ? arg : arg.get(ServiceKeys.caseSensitivityDetector); + + if (maybeUri(value)) { + return parse(value, arg); + } + + return file(value, arg, checkRelative); + } + + export function file(path: string, serviceProvider: IServiceProvider, checkRelative?: boolean): Uri; + export function file(path: string, caseSensitivityDetector: CaseSensitivityDetector, checkRelative?: boolean): Uri; + export function file(path: string, arg: IServiceProvider | CaseSensitivityDetector, checkRelative = false): Uri { + arg = CaseSensitivityDetector.is(arg) ? arg : arg.get(ServiceKeys.caseSensitivityDetector); + + // Fix path if we're checking for relative paths and this is not a rooted path. + path = checkRelative && !isRootedDiskPath(path) ? combinePaths(process.cwd(), path) : path; + + // If this already starts with 'file:', then we can + // parse it normally. It's actually a uri string. Otherwise parse it as a file path. + const normalized = path.startsWith('file:') + ? normalizeUri(path) + : normalizeUri(URI.file(normalizeSlashes(path))); + + // Turn the path into a file URI. + return FileUri.createFileUri( + getFilePath(normalized.uri), + normalized.uri.query, + normalized.uri.fragment, + normalized.str, + arg.isCaseSensitive(normalized.str) + ); + } + + export function parse(uriStr: string | undefined, serviceProvider: IServiceProvider): Uri; + export function parse(uriStr: string | undefined, caseSensitivityDetector: CaseSensitivityDetector): Uri; + export function parse(uriStr: string | undefined, arg: IServiceProvider | CaseSensitivityDetector): Uri { + if (!uriStr) { + return Uri.empty(); + } + + arg = CaseSensitivityDetector.is(arg) ? arg : arg.get(ServiceKeys.caseSensitivityDetector); + + // Normalize the value here. This gets rid of '..' and '.' in the path. It also removes any + // '/' on the end of the path. + const normalized = normalizeUri(uriStr); + if (normalized.uri.scheme === FileUriSchema) { + return FileUri.createFileUri( + getFilePath(normalized.uri), + normalized.uri.query, + normalized.uri.fragment, + normalized.str, + arg.isCaseSensitive(normalized.str) + ); + } + + // Web URIs are always case sensitive. + return WebUri.createWebUri( + normalized.uri.scheme, + normalized.uri.authority, + normalized.uri.path, + normalized.uri.query, + normalized.uri.fragment, + normalized.str + ); + } + + export function constant(markerName: string): Uri { + return new ConstantUri(markerName); + } + + export function empty(): Uri { + return EmptyUri.instance; + } + + // Excel's copy of tests\harness\vfs\pathValidation.ts knows about this constant. + // If the value is changed, the Excel team should be told. + export const DefaultWorkspaceRootComponent = ''; + export const DefaultWorkspaceRootPath = `/${DefaultWorkspaceRootComponent}`; + + export function defaultWorkspace(serviceProvider: IServiceProvider): Uri; + export function defaultWorkspace(caseSensitivityDetector: CaseSensitivityDetector): Uri; + export function defaultWorkspace(arg: IServiceProvider | CaseSensitivityDetector): Uri { + arg = CaseSensitivityDetector.is(arg) ? arg : arg.get(ServiceKeys.caseSensitivityDetector); + return Uri.file(DefaultWorkspaceRootPath, arg); + } + + export function fromJsonObj(jsonObj: JsonObjType) { + if (isArray(jsonObj)) { + // Currently only file uri supports SerializedType. + switch (jsonObj[0]) { + case UriKinds.file: + return FileUri.fromJsonObj(jsonObj); + } + } + + if (FileUri.isFileUri(jsonObj)) { + return FileUri.fromJsonObj(jsonObj); + } + if (WebUri.isWebUri(jsonObj)) { + return WebUri.fromJsonObj(jsonObj); + } + if (EmptyUri.isEmptyUri(jsonObj)) { + return EmptyUri.instance; + } + return jsonObj; + } + + export function is(thing: any): thing is Uri { + return !!thing && typeof thing._key === 'string'; + } + + export function isEmpty(uri: Uri | undefined): boolean { + return !uri || uri.isEmpty(); + } + + export function equals(a: Uri | undefined, b: Uri | undefined): boolean { + if (a === b) { + return true; + } + + return a?.equals(b) ?? false; + } + + export function isDefaultWorkspace(uri: Uri) { + return uri.fileName.includes(DefaultWorkspaceRootComponent); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/uriInterface.ts b/python-parser/packages/pyright-internal/src/common/uri/uriInterface.ts new file mode 100644 index 00000000..bafaf553 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/uriInterface.ts @@ -0,0 +1,101 @@ +/* + * uriInterface.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * URI interface definition (extracted to break circular dependency). + */ + +export interface Uri { + // Unique key for storing in maps. + readonly key: string; + + // Returns the scheme of the URI. + readonly scheme: string; + + // Returns the last segment of the URI, similar to the UNIX basename command. + readonly fileName: string; + + // Returns the extension of the URI, similar to the UNIX extname command. This includes '.' on the extension. + readonly lastExtension: string; + + // Returns a URI where the path just contains the root folder. + readonly root: Uri; + + // Returns a URI where the path contains the directory name with .py appended. + readonly packageUri: Uri; + + // Returns a URI where the path contains the directory name with .pyi appended. + readonly packageStubUri: Uri; + + // Returns a URI where the path has __init__.py appended. + readonly initPyUri: Uri; + + // Returns a URI where the path has __init__.pyi appended. + readonly initPyiUri: Uri; + + // Returns a URI where the path has py.typed appended. + readonly pytypedUri: Uri; + + // Returns the filename without any extensions + readonly fileNameWithoutExtensions: string; + + // Indicates if the underlying file system for this URI is case sensitive or not. + // This should never be used to create another Uri. + // Use `CaseSensitivityDetector` when creating new Uri using `Uri.parse/file` + readonly isCaseSensitive: boolean; + + // Returns the fragment part of a URI. + readonly fragment: string; + + // Returns the query part of a URI. + readonly query: string; + + isEmpty(): boolean; + toString(): string; + toUserVisibleString(): string; + // Determines whether a path consists only of a path root. + isRoot(): boolean; + // Determines whether a Uri is a child of some parent Uri. Meaning the parent Uri is a prefix of this Uri. + isChild(parent: Uri): boolean; + isLocal(): boolean; + isUntitled(): boolean; + equals(other: Uri | undefined): boolean; + // Returns true if the `other` is the parent of `this`. Meaning `other` is a prefix of `this`. + startsWith(other: Uri | undefined): boolean; + pathStartsWith(name: string): boolean; + pathEndsWith(name: string): boolean; + pathIncludes(include: string): boolean; + matchesRegex(regex: RegExp): boolean; + addPath(extra: string): Uri; + // Returns a URI where the path is the directory name of the original URI, similar to the UNIX dirname command. + getDirectory(): Uri; + getRootPathLength(): number; + // How long the path for this Uri is. + getPathLength(): number; + // Combines paths with the URI and resolves any relative paths. This should be used for combining paths with user input. + // Input can be of the form `.` or `./` or `../` or `../foo` or `foo/bar` or `/foo/bar` or `c:\foo\bar` or `file:///foo/bar` + // Meaning relative or rooted paths are allowed. + resolvePaths(...paths: string[]): Uri; + // Combines paths with the URI and resolves any relative paths. When the paths contain separators or '..', this will + // use resolvePaths to combine the paths. Otherwise it calls the quicker version. + combinePaths(...paths: string[]): Uri; + // Combines paths with the URI and DOES NOT resolve any '..' or '.' in the path. + // This should only be used when the input is known to be relative and contains no separators (as separators are not normalized) + combinePathsUnsafe(...paths: string[]): Uri; + getRelativePath(child: Uri): string | undefined; + getPathComponents(): readonly string[]; + getPath(): string; + getFilePath(): string; + getRelativePathComponents(to: Uri): readonly string[]; + getShortenedFileName(maxDirLength?: number): string; + stripExtension(): Uri; + stripAllExtensions(): Uri; + replaceExtension(ext: string): Uri; + addExtension(ext: string): Uri; + hasExtension(ext: string): boolean; + containsExtension(ext: string): boolean; + withFragment(fragment: string): Uri; + withQuery(query: string): Uri; + toJsonObj(): any; +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/uriMap.ts b/python-parser/packages/pyright-internal/src/common/uri/uriMap.ts new file mode 100644 index 00000000..1ab0580d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/uriMap.ts @@ -0,0 +1,79 @@ +/* + * uriMap.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Map specifically made to support a URI as a key. + */ + +import { Uri } from './uri'; + +export class UriMap implements Map { + private _keys = new Map(); + private _values = new Map(); + + get size(): number { + return this._values.size; + } + get [Symbol.toStringTag](): string { + return ''; + } + clear(): void { + this._keys.clear(); + this._values.clear(); + } + forEach(callbackfn: (value: T, key: Uri, map: Map) => void, thisArg?: any): void { + this._keys.forEach((v, k) => { + callbackfn(this._values.get(k)!, v, this); + }); + } + values(): IterableIterator { + return this._values.values(); + } + [Symbol.iterator](): IterableIterator<[Uri, T]> { + return this.entries(); + } + get(key: Uri | undefined): T | undefined { + return key ? this._values.get(key.key) : undefined; + } + + set(key: Uri | undefined, value: T): this { + if (key) { + this._keys.set(key.key, key); + this._values.set(key.key, value); + } + return this; + } + + has(key: Uri): boolean { + return this._values.has(key.key); + } + + delete(key: Uri): boolean { + this._keys.delete(key.key); + return this._values.delete(key.key); + } + + entries(): IterableIterator<[Uri, T]> { + const keys = this._keys.entries(); + const values = this._values.entries(); + + return new (class implements IterableIterator<[Uri, T]> { + [Symbol.iterator](): IterableIterator<[Uri, T]> { + return this; + } + next(...args: [] | [undefined]): IteratorResult<[Uri, T], any> { + const key = keys.next(); + const value = values.next(); + if (key.done || value.done) { + return { done: true, value: undefined }; + } + return { done: false, value: [key.value[1], value.value[1]] }; + } + })(); + } + + keys(): IterableIterator { + return this._keys.values(); + } +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/uriUtils.ts b/python-parser/packages/pyright-internal/src/common/uri/uriUtils.ts new file mode 100644 index 00000000..b421a4ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/uriUtils.ts @@ -0,0 +1,426 @@ +/* + * uriUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Utility functions for manipulating URIs. + */ + +import type { Dirent } from 'fs'; + +import { CaseSensitivityDetector } from '../caseSensitivityDetector'; +import { FileSystem, ReadOnlyFileSystem, Stats } from '../fileSystem'; +import { + getRegexEscapedSeparator, + isDirectoryWildcardPatternPresent, + stripTrailingDirectorySeparator, +} from '../pathUtils'; +import { ServiceKeys } from '../serviceKeys'; +import { ServiceProvider } from '../serviceProvider'; +import { Uri } from './uri'; + +export interface FileSpec { + // File specs can contain wildcard characters (**, *, ?). This + // specifies the first portion of the file spec that contains + // no wildcards. + wildcardRoot: Uri; + + // Regular expression that can be used to match against this + // file spec. + regExp: RegExp; + + // Indicates whether the file spec has a directory wildcard (**). + // When present, the search cannot terminate without exploring to + // an arbitrary depth. + hasDirectoryWildcard: boolean; +} + +const _includeFileRegex = /\.pyi?$/; +const _wildcardRegex = /[*?]/; + +export namespace FileSpec { + export function is(value: any): value is FileSpec { + const candidate: FileSpec = value as FileSpec; + return candidate && !!candidate.wildcardRoot && !!candidate.regExp; + } + export function isInPath(uri: Uri, paths: FileSpec[]) { + return !!paths.find((p) => uri.matchesRegex(p.regExp)); + } + + export function matchesIncludeFileRegex(uri: Uri, isFile = true) { + return isFile ? uri.matchesRegex(_includeFileRegex) : true; + } + + export function matchIncludeFileSpec(includeRegExp: RegExp, exclude: FileSpec[], uri: Uri, isFile = true) { + if (uri.matchesRegex(includeRegExp)) { + if (!FileSpec.isInPath(uri, exclude) && FileSpec.matchesIncludeFileRegex(uri, isFile)) { + return true; + } + } + + return false; + } +} + +export interface FileSystemEntries { + files: Uri[]; + directories: Uri[]; +} + +export function forEachAncestorDirectory( + directory: Uri, + callback: (directory: Uri) => Uri | undefined +): Uri | undefined { + while (true) { + const result = callback(directory); + if (result !== undefined) { + return result; + } + + const parentPath = directory.getDirectory(); + if (parentPath.equals(directory)) { + return undefined; + } + + directory = parentPath; + } +} + +// Creates a directory hierarchy for a path, starting from some ancestor path. +export function makeDirectories(fs: FileSystem, dir: Uri, startingFrom: Uri) { + if (!dir.startsWith(startingFrom)) { + return; + } + + const pathComponents = dir.getPathComponents(); + const relativeToComponents = startingFrom.getPathComponents(); + let curPath = startingFrom; + + for (let i = relativeToComponents.length; i < pathComponents.length; i++) { + curPath = curPath.combinePaths(pathComponents[i]); + if (!fs.existsSync(curPath)) { + fs.mkdirSync(curPath); + } + } +} + +export function getFileSize(fs: ReadOnlyFileSystem, uri: Uri) { + const stat = tryStat(fs, uri); + if (stat?.isFile()) { + return stat.size; + } + return 0; +} + +export function fileExists(fs: ReadOnlyFileSystem, uri: Uri): boolean { + return fileSystemEntryExists(fs, uri, FileSystemEntryKind.File); +} + +export function directoryExists(fs: ReadOnlyFileSystem, uri: Uri): boolean { + return fileSystemEntryExists(fs, uri, FileSystemEntryKind.Directory); +} + +export function isDirectory(fs: ReadOnlyFileSystem, uri: Uri): boolean { + return tryStat(fs, uri)?.isDirectory() ?? false; +} + +export function isFile(fs: ReadOnlyFileSystem, uri: Uri, treatZipDirectoryAsFile = false): boolean { + const stats = tryStat(fs, uri); + if (stats?.isFile()) { + return true; + } + + if (!treatZipDirectoryAsFile) { + return false; + } + + return stats?.isZipDirectory?.() ?? false; +} + +export function tryStat(fs: ReadOnlyFileSystem, uri: Uri): Stats | undefined { + try { + if (fs.existsSync(uri)) { + return fs.statSync(uri); + } + } catch (e: any) { + return undefined; + } + return undefined; +} + +export function tryRealpath(fs: ReadOnlyFileSystem, uri: Uri): Uri | undefined { + try { + return fs.realpathSync(uri); + } catch (e: any) { + return undefined; + } +} + +export function getFileSystemEntries(fs: ReadOnlyFileSystem, uri: Uri): FileSystemEntries { + try { + return getFileSystemEntriesFromDirEntries(fs.readdirEntriesSync(uri), fs, uri); + } catch (e: any) { + return { files: [], directories: [] }; + } +} + +// Sorts the entires into files and directories, including any symbolic links. +export function getFileSystemEntriesFromDirEntries( + dirEntries: Iterable, + fs: ReadOnlyFileSystem, + uri: Uri +): FileSystemEntries { + const entries = Array.isArray(dirEntries) ? dirEntries.slice() : Array.from(dirEntries); + entries.sort((a, b) => { + if (a.name < b.name) { + return -1; + } else if (a.name > b.name) { + return 1; + } else { + return 0; + } + }); + const files: Uri[] = []; + const directories: Uri[] = []; + for (const entry of entries) { + // This is necessary because on some file system node fails to exclude + // "." and "..". See https://github.com/nodejs/node/issues/4002 + if (entry.name === '.' || entry.name === '..') { + continue; + } + + const entryUri = uri.combinePaths(entry.name); + if (entry.isFile()) { + files.push(entryUri); + } else if (entry.isDirectory()) { + directories.push(entryUri); + } else if (entry.isSymbolicLink()) { + const stat = tryStat(fs, entryUri); + if (stat?.isFile()) { + files.push(entryUri); + } else if (stat?.isDirectory()) { + directories.push(entryUri); + } + } + } + return { files, directories }; +} + +// Transforms a relative file spec (one that potentially contains +// escape characters **, * or ?) and returns a regular expression +// that can be used for matching against. +export function getWildcardRegexPattern(root: Uri, fileSpec: string): string { + const absolutePath = root.resolvePaths(fileSpec); + const pathComponents = Array.from(absolutePath.getPathComponents()); + const escapedSeparator = getRegexEscapedSeparator('/'); + const doubleAsteriskRegexFragment = `(${escapedSeparator}[^${escapedSeparator}][^${escapedSeparator}]*)*?`; + const reservedCharacterPattern = new RegExp(`[^\\w\\s${escapedSeparator}]`, 'g'); + + // Strip the directory separator from the root component. + if (pathComponents.length > 0) { + pathComponents[0] = stripTrailingDirectorySeparator(pathComponents[0]); + } + + let regExPattern = ''; + let firstComponent = true; + + for (let component of pathComponents) { + if (component === '**') { + regExPattern += doubleAsteriskRegexFragment; + } else { + if (!firstComponent) { + component = escapedSeparator + component; + } + + regExPattern += component.replace(reservedCharacterPattern, (match) => { + if (match === '*') { + return `[^${escapedSeparator}]*`; + } else if (match === '?') { + return `[^${escapedSeparator}]`; + } else { + // escaping anything that is not reserved characters - word/space/separator + return '\\' + match; + } + }); + + firstComponent = false; + } + } + + return regExPattern; +} + +// Returns the topmost path that contains no wildcard characters. +export function getWildcardRoot(root: Uri, fileSpec: string): Uri { + const absolutePath = root.resolvePaths(fileSpec); + // make a copy of the path components so we can modify them. + const pathComponents = Array.from(absolutePath.getPathComponents()); + let wildcardRoot = absolutePath.root; + + // Remove the root component. + if (pathComponents.length > 0) { + pathComponents.shift(); + } + + for (const component of pathComponents) { + if (component === '**') { + break; + } else { + if (_wildcardRegex.test(component)) { + break; + } + + wildcardRoot = wildcardRoot.resolvePaths(component); + } + } + + return wildcardRoot; +} + +export function hasPythonExtension(uri: Uri) { + return uri.hasExtension('.py') || uri.hasExtension('.pyi'); +} + +export function getFileSpec(root: Uri, fileSpec: string): FileSpec { + let regExPattern = getWildcardRegexPattern(root, fileSpec); + const escapedSeparator = getRegexEscapedSeparator('/'); + regExPattern = `^(${regExPattern})($|${escapedSeparator})`; + + const regExp = new RegExp(regExPattern, root.isCaseSensitive ? undefined : 'i'); + const wildcardRoot = getWildcardRoot(root, fileSpec); + const hasDirectoryWildcard = isDirectoryWildcardPatternPresent(fileSpec); + + return { + wildcardRoot, + regExp, + hasDirectoryWildcard, + }; +} + +const enum FileSystemEntryKind { + File, + Directory, +} + +function fileSystemEntryExists(fs: ReadOnlyFileSystem, uri: Uri, entryKind: FileSystemEntryKind): boolean { + try { + const stat = fs.statSync(uri); + switch (entryKind) { + case FileSystemEntryKind.File: + return stat.isFile(); + case FileSystemEntryKind.Directory: + return stat.isDirectory(); + default: + return false; + } + } catch (e: any) { + return false; + } +} + +export function getDirectoryChangeKind( + fs: ReadOnlyFileSystem, + oldDirectory: Uri, + newDirectory: Uri +): 'Same' | 'Renamed' | 'Moved' { + if (oldDirectory.equals(newDirectory)) { + return 'Same'; + } + + const relativePaths = oldDirectory.getRelativePathComponents(newDirectory); + + // 2 means only last folder name has changed. + if (relativePaths.length === 2 && relativePaths[0] === '..' && relativePaths[1] !== '..') { + return 'Renamed'; + } + + return 'Moved'; +} + +export function deduplicateFolders(listOfFolders: Uri[][], excludes: Uri[] = []): Uri[] { + const foldersToWatch = new Map(); + + listOfFolders.forEach((folders) => { + folders.forEach((p) => { + if (foldersToWatch.has(p.key)) { + // Bail out on exact match. + return; + } + + for (const exclude of excludes) { + if (p.startsWith(exclude)) { + return; + } + } + + for (const existing of foldersToWatch) { + // ex) p: "/user/test" existing: "/user" + if (p.startsWith(existing[1])) { + // We already have the parent folder in the watch list + return; + } + + // ex) p: "/user" folderToWatch: "/user/test" + if (existing[1].startsWith(p)) { + // We found better one to watch. replace. + foldersToWatch.delete(existing[0]); + foldersToWatch.set(p.key, p); + return; + } + } + + foldersToWatch.set(p.key, p); + }); + }); + + return [...foldersToWatch.values()]; +} + +export function getRootUri(serviceProvider: ServiceProvider): Uri | undefined; +export function getRootUri(caseDetector: CaseSensitivityDetector): Uri | undefined; +export function getRootUri(csdOrSp: CaseSensitivityDetector | ServiceProvider): Uri | undefined { + csdOrSp = CaseSensitivityDetector.is(csdOrSp) ? csdOrSp : csdOrSp.get(ServiceKeys.caseSensitivityDetector); + + if ((global as any).__rootDirectory) { + return Uri.file((global as any).__rootDirectory, csdOrSp); + } + + return undefined; +} + +export function convertUriToLspUriString(fs: ReadOnlyFileSystem, uri: Uri): string { + // Convert to a URI string that the LSP client understands (mapped files are only local to the server). + return fs.getOriginalUri(uri).toString(); +} + +export namespace UriEx { + export function file(path: string): Uri; + export function file(path: string, isCaseSensitive: boolean, checkRelative?: boolean): Uri; + export function file(path: string, arg?: boolean, checkRelative?: boolean): Uri { + const caseDetector = _getCaseSensitivityDetector(arg); + return Uri.file(path, caseDetector, checkRelative); + } + + export function parse(path: string | undefined): Uri; + export function parse(path: string | undefined, isCaseSensitive: boolean): Uri; + export function parse(value: string | undefined, arg?: boolean): Uri { + const caseDetector = _getCaseSensitivityDetector(arg); + return Uri.parse(value, caseDetector); + } + + const caseSensitivityDetector: CaseSensitivityDetector = { + isCaseSensitive: () => true, + }; + + const caseInsensitivityDetector: CaseSensitivityDetector = { + isCaseSensitive: () => false, + }; + + function _getCaseSensitivityDetector(arg: boolean | undefined) { + if (arg === undefined) { + return caseSensitivityDetector; + } + + return arg ? caseSensitivityDetector : caseInsensitivityDetector; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/uri/webUri.ts b/python-parser/packages/pyright-internal/src/common/uri/webUri.ts new file mode 100644 index 00000000..f00386b5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/uri/webUri.ts @@ -0,0 +1,294 @@ +/* + * webUri.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * URI class that represents a URI that isn't 'file' schemed. + * This can be URIs like: + * - http://www.microsoft.com/file.txt + * - untitled:Untitled-1 + * - vscode:extension/ms-python.python + * - vscode-vfs://github.com/microsoft/debugpy/debugpy/launcher/debugAdapter.py + */ + +import { URI } from 'vscode-uri'; +import { getRootLength, hasTrailingDirectorySeparator, resolvePaths } from '../pathUtils'; +import { BaseUri, JsonObjType } from './baseUri'; +import { cacheMethodWithNoArgs, cacheProperty, cacheStaticFunc } from './memoization'; +import { Uri } from './uri'; + +export class WebUri extends BaseUri { + private constructor( + key: string, + private readonly _scheme: string, + private readonly _authority: string, + private readonly _path: string, + private readonly _query: string, + private readonly _fragment: string, + private _originalString: string | undefined + ) { + super(key); + } + + override get scheme(): string { + return this._scheme; + } + + get isCaseSensitive(): boolean { + // Web URIs are always case sensitive + return true; + } + + get fragment(): string { + return this._fragment; + } + + get query(): string { + return this._query; + } + + @cacheProperty() + override get root(): Uri { + const rootPath = this.getRootPath(); + if (rootPath !== this._path) { + return WebUri.createWebUri(this._scheme, this._authority, rootPath, '', '', undefined); + } + return this; + } + + @cacheProperty() + override get fileName(): string { + // Path should already be normalized, just get the last on a split of '/'. + const components = this._path.split('/'); + return components[components.length - 1]; + } + + @cacheProperty() + override get lastExtension(): string { + const basename = this.fileName; + const index = basename.lastIndexOf('.'); + if (index >= 0) { + return basename.slice(index); + } + return ''; + } + + @cacheStaticFunc() + static createWebUri( + scheme: string, + authority: string, + path: string, + query: string, + fragment: string, + originalString: string | undefined + ): WebUri { + const key = WebUri._createKey(scheme, authority, path, query, fragment); + return new WebUri(key, scheme, authority, path, query, fragment, originalString); + } + + override toString(): string { + if (!this._originalString) { + const vscodeUri = URI.revive({ + scheme: this._scheme, + authority: this._authority, + path: this._path, + query: this._query, + fragment: this._fragment, + }); + this._originalString = vscodeUri.toString(); + } + return this._originalString; + } + override toUserVisibleString(): string { + return this.toString(); + } + + static isWebUri(uri: any): uri is WebUri { + return uri?._scheme !== undefined && uri?._key !== undefined; + } + + static fromJsonObj(obj: WebUri) { + return WebUri.createWebUri( + obj._scheme, + obj._authority, + obj._path, + obj._query, + obj._fragment, + obj._originalString + ); + } + + toJsonObj(): JsonObjType { + return { + _scheme: this._scheme, + _authority: this._authority, + _path: this._path, + _query: this._query, + _fragment: this._fragment, + _originalString: this._originalString, + _key: this.key, + }; + } + + override matchesRegex(regex: RegExp): boolean { + return regex.test(this._path); + } + + override addPath(extra: string): Uri { + const newPath = this._path + extra; + return WebUri.createWebUri(this._scheme, this._authority, newPath, this._query, this._fragment, undefined); + } + + override isRoot(): boolean { + return this._path === this.getRootPath() && this._path.length > 0; + } + + override isChild(parent: Uri): boolean { + if (!WebUri.isWebUri(parent)) { + return false; + } + + return parent._path.length < this._path.length && this.startsWith(parent); + } + + override isLocal(): boolean { + return false; + } + + override startsWith(other: Uri | undefined): boolean { + if (other?.scheme !== this.scheme) { + return false; + } + const otherWebUri = other as WebUri; + if (this._path.length >= otherWebUri._path.length) { + // Make sure the other ends with a / when comparing longer paths, otherwise we might + // say that /a/food is a child of /a/foo. + const otherPath = + this._path.length > otherWebUri._path.length && !hasTrailingDirectorySeparator(otherWebUri._path) + ? `${otherWebUri._path}/` + : otherWebUri._path; + + return this._path.startsWith(otherPath); + } + return false; + } + override getPathLength(): number { + return this._path.length; + } + + override getPath(): string { + return this._path; + } + + override getFilePath(): string { + return ''; // Web URIs don't have file paths so this is always empty. + } + + override resolvePaths(...paths: string[]): Uri { + // Resolve and combine paths, never want URIs with '..' in the middle. + let combined = this.normalizeSlashes(resolvePaths(this._path, ...paths)); + + // Make sure to remove any trailing directory chars. + if (hasTrailingDirectorySeparator(combined) && combined.length > 1) { + combined = combined.slice(0, combined.length - 1); + } + if (combined !== this._path) { + return WebUri.createWebUri(this._scheme, this._authority, combined, '', '', undefined); + } + return this; + } + override combinePaths(...paths: string[]): Uri { + if (paths.some((p) => p.includes('..') || p.includes('/') || p === '.')) { + // This is a slow path that handles paths that contain '..' or '.'. + return this.resolvePaths(...paths); + } + + // Paths don't have any thing special that needs to be combined differently, so just + // use the quick method. + return this.combinePathsUnsafe(...paths); + } + + override combinePathsUnsafe(...paths: string[]): Uri { + // Combine paths using the quick path implementation. + const combined = BaseUri.combinePathElements(this._path, '/', ...paths); + if (combined !== this._path) { + return WebUri.createWebUri(this._scheme, this._authority, combined, '', '', undefined); + } + return this; + } + + @cacheMethodWithNoArgs() + override getDirectory(): Uri { + if (this._path.length === 0) { + return this; + } + + const index = this._path.lastIndexOf('/'); + const newPath = index > 0 ? this._path.slice(0, index) : index === 0 ? '/' : ''; + + return WebUri.createWebUri(this._scheme, this._authority, newPath, this._query, this._fragment, undefined); + } + + withFragment(fragment: string): Uri { + return WebUri.createWebUri(this._scheme, this._authority, this._path, this._query, fragment, undefined); + } + + withQuery(query: string): Uri { + return WebUri.createWebUri(this._scheme, this._authority, this._path, query, this._fragment, undefined); + } + + override stripExtension(): Uri { + const path = this._path; + const index = path.lastIndexOf('.'); + if (index > 0) { + return WebUri.createWebUri( + this._scheme, + this._authority, + path.slice(0, index), + this._query, + this._fragment, + undefined + ); + } + return this; + } + + override stripAllExtensions(): Uri { + const path = this._path; + const sepIndex = path.lastIndexOf('/'); + const index = path.indexOf('.', sepIndex > 0 ? sepIndex : 0); + if (index > 0) { + return WebUri.createWebUri( + this._scheme, + this._authority, + path.slice(0, index), + this._query, + this._fragment, + undefined + ); + } + return this; + } + + protected override getPathComponentsImpl(): string[] { + // Get the root path and the rest of the path components. + const rootPath = this.getRootPath(); + const otherPaths = this._path.slice(rootPath.length).split('/'); + return this.reducePathComponents([rootPath, ...otherPaths]).map((component) => + this.normalizeSlashes(component) + ); + } + + protected override getRootPath(): string { + const rootLength = getRootLength(this._path, '/'); + return this._path.slice(0, rootLength); + } + + protected override getComparablePath(): string { + return this._path; // Should already have the correct '/' + } + + private static _createKey(scheme: string, authority: string, path: string, query: string, fragment: string) { + return `${scheme}:${authority}${path}${query ? '?' + query : ''}${fragment ? '#' + fragment : ''}`; + } +} diff --git a/python-parser/packages/pyright-internal/src/common/workspaceEditUtils.ts b/python-parser/packages/pyright-internal/src/common/workspaceEditUtils.ts new file mode 100644 index 00000000..4b8bdfa3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/common/workspaceEditUtils.ts @@ -0,0 +1,288 @@ +/* + * workspaceEditUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Convert pyright's FileEditActions to LanguageServer's WorkspaceEdits. + */ + +import { + ChangeAnnotation, + CreateFile, + DeleteFile, + RenameFile, + TextDocumentEdit, + TextEdit, + WorkspaceEdit, +} from 'vscode-languageserver'; + +import { TextDocument } from 'vscode-languageserver-textdocument'; +import { AnalyzerService } from '../analyzer/service'; +import { FileEditAction, FileEditActions, TextEditAction } from '../common/editAction'; +import { createMapFromItems } from './collectionUtils'; +import { isArray } from './core'; +import { assertNever } from './debug'; +import { EditableProgram, SourceFileInfo } from './extensibility'; +import { ReadOnlyFileSystem } from './fileSystem'; +import { convertRangeToTextRange, convertTextRangeToRange } from './positionUtils'; +import { TextRange } from './textRange'; +import { TextRangeCollection } from './textRangeCollection'; +import { Uri } from './uri/uri'; +import { convertUriToLspUriString } from './uri/uriUtils'; + +export function convertToTextEdits(editActions: TextEditAction[]): TextEdit[] { + return editActions.map((editAction) => ({ + range: editAction.range, + newText: editAction.replacementText, + })); +} + +export function convertToFileTextEdits(fileUri: Uri, editActions: TextEditAction[]): FileEditAction[] { + return editActions.map((a) => ({ fileUri, ...a })); +} + +export function convertToWorkspaceEdit(fs: ReadOnlyFileSystem, edits: FileEditAction[]): WorkspaceEdit; +export function convertToWorkspaceEdit(fs: ReadOnlyFileSystem, edits: FileEditActions): WorkspaceEdit; +export function convertToWorkspaceEdit( + fs: ReadOnlyFileSystem, + edits: FileEditActions, + changeAnnotations: { + [id: string]: ChangeAnnotation; + }, + defaultAnnotationId: string +): WorkspaceEdit; +export function convertToWorkspaceEdit( + fs: ReadOnlyFileSystem, + edits: FileEditActions | FileEditAction[], + changeAnnotations?: { + [id: string]: ChangeAnnotation; + }, + defaultAnnotationId = 'default' +): WorkspaceEdit { + if (isArray(edits)) { + return _convertToWorkspaceEditWithChanges(fs, edits); + } + + return _convertToWorkspaceEditWithDocumentChanges(fs, edits, changeAnnotations, defaultAnnotationId); +} + +export function appendToWorkspaceEdit(fs: ReadOnlyFileSystem, edits: FileEditAction[], workspaceEdit: WorkspaceEdit) { + edits.forEach((edit) => { + const uri = convertUriToLspUriString(fs, edit.fileUri); + workspaceEdit.changes![uri] = workspaceEdit.changes![uri] || []; + workspaceEdit.changes![uri].push({ range: edit.range, newText: edit.replacementText }); + }); +} + +export function applyTextEditsToString( + edits: TextEditAction[], + lines: TextRangeCollection, + originalText: string +) { + const editsWithOffset = edits + .map((e) => ({ + range: convertRangeToTextRange(e.range, lines) ?? { start: originalText.length, length: 0 }, + text: e.replacementText, + })) + .sort((e1, e2) => { + const result = e2.range.start - e1.range.start; + if (result !== 0) { + return result; + } + + return TextRange.getEnd(e2.range) - TextRange.getEnd(e1.range); + }); + + // Apply change in reverse order. + let current = originalText; + for (const change of editsWithOffset) { + current = current.substr(0, change.range.start) + change.text + current.substr(TextRange.getEnd(change.range)); + } + + return current; +} + +export function applyWorkspaceEdit(program: EditableProgram, edits: WorkspaceEdit, filesChanged: Map) { + if (edits.changes) { + for (const kv of Object.entries(edits.changes)) { + const fileUri = Uri.parse(kv[0], program.serviceProvider); + const fileInfo = program.getSourceFileInfo(fileUri); + if (!fileInfo || !fileInfo.isTracked) { + // We don't allow non user file being modified. + continue; + } + + applyDocumentChanges(program, fileInfo, kv[1]); + filesChanged.set(fileUri.key, fileUri); + } + } + + // For now, we don't support annotations. + if (edits.documentChanges) { + for (const change of edits.documentChanges) { + if (TextDocumentEdit.is(change)) { + const fileUri = Uri.parse(change.textDocument.uri, program.serviceProvider); + const fileInfo = program.getSourceFileInfo(fileUri); + if (!fileInfo || !fileInfo.isTracked) { + // We don't allow non user file being modified. + continue; + } + + applyDocumentChanges(program, fileInfo, change.edits.filter((e) => TextEdit.is(e)) as TextEdit[]); + filesChanged.set(fileUri.key, fileUri); + } + + // For now, we don't support other kinds of text changes. + // But if we want to add support for those in future, we should add them here. + } + } +} + +export function applyDocumentChanges(program: EditableProgram, fileInfo: SourceFileInfo, edits: TextEdit[]) { + if (!fileInfo.isOpenByClient) { + const fileContent = fileInfo.contents; + program.setFileOpened(fileInfo.uri, 0, fileContent ?? '', { + isTracked: fileInfo.isTracked, + ipythonMode: fileInfo.ipythonMode, + chainedFileUri: fileInfo.chainedSourceFile?.uri, + }); + } + + const version = fileInfo.clientVersion ?? 0; + const fileUri = fileInfo.uri; + const filePath = fileUri.getFilePath(); + const sourceDoc = TextDocument.create(filePath, 'python', version, fileInfo.contents ?? ''); + + program.setFileOpened(fileUri, version + 1, TextDocument.applyEdits(sourceDoc, edits), { + isTracked: fileInfo.isTracked, + ipythonMode: fileInfo.ipythonMode, + chainedFileUri: fileInfo.chainedSourceFile?.uri, + }); +} + +export function generateWorkspaceEdit( + fs: ReadOnlyFileSystem, + originalService: AnalyzerService, + clonedService: AnalyzerService, + filesChanged: Map +) { + // For now, we won't do text diff to find out minimal text changes. instead, we will + // consider whole text of the files are changed. In future, we could consider + // doing minimal changes using vscode's differ (https://github.com/microsoft/vscode/blob/main/src/vs/base/common/diff/diff.ts) + // to support annotation. + const edits: WorkspaceEdit = { changes: {} }; + + for (const uri of filesChanged.values()) { + const original = originalService.backgroundAnalysisProgram.program.getBoundSourceFile(uri); + const final = clonedService.backgroundAnalysisProgram.program.getBoundSourceFile(uri); + if (!original || !final) { + // Both must exist. + continue; + } + + const parseResults = original.getParseResults(); + if (!parseResults) { + continue; + } + + edits.changes![convertUriToLspUriString(fs, uri)] = [ + { + range: convertTextRangeToRange(parseResults.parserOutput.parseTree, parseResults.tokenizerOutput.lines), + newText: final.getFileContent() ?? '', + }, + ]; + } + + return edits; +} + +function _convertToWorkspaceEditWithChanges(fs: ReadOnlyFileSystem, edits: FileEditAction[]) { + const workspaceEdit: WorkspaceEdit = { + changes: {}, + }; + + appendToWorkspaceEdit(fs, edits, workspaceEdit); + return workspaceEdit; +} + +function _convertToWorkspaceEditWithDocumentChanges( + fs: ReadOnlyFileSystem, + editActions: FileEditActions, + changeAnnotations?: { + [id: string]: ChangeAnnotation; + }, + defaultAnnotationId = 'default' +) { + const workspaceEdit: WorkspaceEdit = { + documentChanges: [], + changeAnnotations: changeAnnotations, + }; + + // Ordering of documentChanges are important. + // Make sure create operation happens before edits. + for (const operation of editActions.fileOperations) { + switch (operation.kind) { + case 'create': + workspaceEdit.documentChanges!.push( + CreateFile.create( + convertUriToLspUriString(fs, operation.fileUri), + /* options */ undefined, + changeAnnotations !== undefined ? defaultAnnotationId : undefined + ) + ); + break; + case 'rename': + case 'delete': + break; + default: + assertNever(operation); + } + } + + // Text edit's file path must refer to original file paths unless it is a new file just created. + const mapPerFile = createMapFromItems(editActions.edits, (e) => convertUriToLspUriString(fs, e.fileUri)); + for (const [uri, value] of mapPerFile) { + workspaceEdit.documentChanges!.push( + TextDocumentEdit.create( + { uri: uri, version: null }, + Array.from( + value.map((v) => ({ + range: v.range, + newText: v.replacementText, + annotationId: changeAnnotations !== undefined ? defaultAnnotationId : undefined, + })) + ) + ) + ); + } + + for (const operation of editActions.fileOperations) { + switch (operation.kind) { + case 'create': + break; + case 'rename': + workspaceEdit.documentChanges!.push( + RenameFile.create( + convertUriToLspUriString(fs, operation.oldFileUri), + convertUriToLspUriString(fs, operation.newFileUri), + /* options */ undefined, + changeAnnotations !== undefined ? defaultAnnotationId : undefined + ) + ); + break; + case 'delete': + workspaceEdit.documentChanges!.push( + DeleteFile.create( + convertUriToLspUriString(fs, operation.fileUri), + /* options */ undefined, + changeAnnotations !== undefined ? defaultAnnotationId : undefined + ) + ); + break; + default: + assertNever(operation); + } + } + + return workspaceEdit; +} diff --git a/python-parser/packages/pyright-internal/src/languageServerBase.ts b/python-parser/packages/pyright-internal/src/languageServerBase.ts new file mode 100644 index 00000000..6addb98c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageServerBase.ts @@ -0,0 +1,1612 @@ +/* + * languageServerBase.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements common language server functionality. + * This is split out as a base class to allow for + * different language server variants to be created + * from the same core functionality. + */ + +import './common/extensions'; + +import { + AbstractCancellationTokenSource, + CallHierarchyIncomingCallsParams, + CallHierarchyItem, + CallHierarchyOutgoingCall, + CallHierarchyOutgoingCallsParams, + CallHierarchyPrepareParams, + CancellationToken, + CodeAction, + CodeActionParams, + Command, + CompletionItem, + CompletionList, + CompletionParams, + CompletionTriggerKind, + ConfigurationItem, + Connection, + Declaration, + DeclarationLink, + Definition, + DefinitionLink, + Diagnostic, + DiagnosticRefreshRequest, + DiagnosticRelatedInformation, + DiagnosticSeverity, + DiagnosticTag, + DidChangeConfigurationParams, + DidChangeTextDocumentParams, + DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, + DidOpenTextDocumentParams, + Disposable, + DocumentDiagnosticParams, + DocumentDiagnosticReport, + DocumentHighlight, + DocumentHighlightParams, + DocumentSymbol, + DocumentSymbolParams, + ExecuteCommandParams, + HoverParams, + InitializeParams, + InitializeResult, + LSPObject, + Location, + MarkupKind, + PrepareRenameParams, + PublishDiagnosticsParams, + ReferenceParams, + RemoteWindow, + RenameParams, + ResultProgressReporter, + SignatureHelp, + SignatureHelpParams, + SymbolInformation, + TextDocumentPositionParams, + TextDocumentSyncKind, + WorkDoneProgressReporter, + WorkspaceDiagnosticParams, + WorkspaceDiagnosticReport, + WorkspaceDiagnosticReportPartialResult, + WorkspaceEdit, + WorkspaceFoldersChangeEvent, + WorkspaceSymbol, + WorkspaceSymbolParams, +} from 'vscode-languageserver'; + +import { TextDocument } from 'vscode-languageserver-textdocument'; +import { AnalysisResults } from './analyzer/analysis'; +import { BackgroundAnalysisProgram, InvalidatedReason } from './analyzer/backgroundAnalysisProgram'; +import { ImportResolver } from './analyzer/importResolver'; +import { MaxAnalysisTime } from './analyzer/program'; +import { AnalyzerService, LibraryReanalysisTimeProvider, getNextServiceId } from './analyzer/service'; +import { IPythonMode } from './analyzer/sourceFile'; +import type { IBackgroundAnalysis } from './backgroundAnalysisBase'; +import { CommandResult } from './commands/commandResult'; +import { CancelAfter } from './common/cancellationUtils'; +import { CaseSensitivityDetector } from './common/caseSensitivityDetector'; +import { getNestedProperty } from './common/collectionUtils'; +import { DiagnosticSeverityOverrides, getDiagnosticSeverityOverrides } from './common/commandLineOptions'; +import { ConfigOptions, getDiagLevelDiagnosticRules, parseDiagLevel } from './common/configOptions'; +import { ConsoleInterface, ConsoleWithLogLevel, LogLevel } from './common/console'; +import { Diagnostic as AnalyzerDiagnostic, DiagnosticCategory } from './common/diagnostic'; +import { DiagnosticRule } from './common/diagnosticRules'; +import { FileDiagnostics } from './common/diagnosticSink'; +import { DocumentRange } from './common/docRange'; +import { FileSystem, ReadOnlyFileSystem } from './common/fileSystem'; +import { FileWatcherEventType } from './common/fileWatcher'; +import { Host } from './common/host'; +import { + LanguageServerInterface, + ServerOptions, + ServerSettings, + WorkspaceServices, +} from './common/languageServerInterface'; +import { fromLSPAny, isNullProgressReporter } from './common/lspUtils'; +import { ProgressReportTracker, ProgressReporter } from './common/progressReporter'; +import { ServiceKeys } from './common/serviceKeys'; +import { ServiceProvider } from './common/serviceProvider'; +import { Position, Range } from './common/textRange'; +import { Uri } from './common/uri/uri'; +import { convertUriToLspUriString } from './common/uri/uriUtils'; +import { AnalyzerServiceExecutor } from './languageService/analyzerServiceExecutor'; +import { CallHierarchyProvider } from './languageService/callHierarchyProvider'; +import { CompletionItemData, CompletionProvider } from './languageService/completionProvider'; +import { DefinitionFilter, DefinitionProvider, TypeDefinitionProvider } from './languageService/definitionProvider'; +import { DocumentHighlightProvider } from './languageService/documentHighlightProvider'; +import { CollectionResult } from './languageService/documentSymbolCollector'; +import { DocumentSymbolProvider } from './languageService/documentSymbolProvider'; +import { DynamicFeature, DynamicFeatures } from './languageService/dynamicFeature'; +import { FileWatcherDynamicFeature } from './languageService/fileWatcherDynamicFeature'; +import { HoverProvider } from './languageService/hoverProvider'; +import { canNavigateToFile } from './languageService/navigationUtils'; +import { ReferencesProvider } from './languageService/referencesProvider'; +import { RenameProvider } from './languageService/renameProvider'; +import { SignatureHelpProvider } from './languageService/signatureHelpProvider'; +import { WorkspaceSymbolProvider } from './languageService/workspaceSymbolProvider'; +import { Localizer, setLocaleOverride } from './localization/localize'; +import { ParseFileResults } from './parser/parser'; +import { ClientCapabilities, InitializationOptions } from './types'; +import { + InitStatus, + IWorkspaceFactory, + WellKnownWorkspaceKinds, + Workspace, + WorkspaceFactory, +} from './workspaceFactory'; +import { PullDiagnosticsDynamicFeature } from './languageService/pullDiagnosticsDynamicFeature'; + +const UncomputedDiagnosticsVersion = -1; + +export function wrapProgressReporter(reporter: WorkDoneProgressReporter): ProgressReporter { + let isDisplayingProgress = false; + return { + isDisplayingProgress: () => { + return isDisplayingProgress; + }, + isEnabled: () => { + return true; + }, + begin: () => { + isDisplayingProgress = true; + reporter.begin('', /* percentage */ undefined, /* message */ undefined, /* cancellable */ false); + }, + report: (message) => { + reporter.report(message); + }, + end: () => { + isDisplayingProgress = false; + reporter.done(); + }, + }; +} + +export abstract class LanguageServerBase implements LanguageServerInterface, Disposable { + // We support running only one "find all reference" at a time. + private _pendingFindAllRefsCancellationSource: AbstractCancellationTokenSource | undefined; + + // We support running only one command at a time. + private _pendingCommandCancellationSource: AbstractCancellationTokenSource | undefined; + + private _progressReporter: ProgressReporter; + private _progressReportCounter = 0; + + private _lastTriggerKind: CompletionTriggerKind | undefined = CompletionTriggerKind.Invoked; + + private _initialized = false; + private _workspaceFoldersChangedDisposable: Disposable | undefined; + private _workspaceDiagnosticsReporter: ResultProgressReporter | undefined; + private _workspaceDiagnosticsProgressReporter: ProgressReporter | undefined; + private _workspaceDiagnosticsResolve: ((value: WorkspaceDiagnosticReport) => void) | undefined; + + protected client: ClientCapabilities = { + hasConfigurationCapability: false, + hasVisualStudioExtensionsCapability: false, + hasWorkspaceFoldersCapability: false, + hasWatchFileCapability: false, + hasWatchFileRelativePathCapability: false, + hasActiveParameterCapability: false, + hasSignatureLabelOffsetCapability: false, + hasHierarchicalDocumentSymbolCapability: false, + hasWindowProgressCapability: false, + hasGoToDeclarationCapability: false, + hasDocumentChangeCapability: false, + hasDocumentAnnotationCapability: false, + hasCompletionCommitCharCapability: false, + hoverContentFormat: MarkupKind.PlainText, + completionDocFormat: MarkupKind.PlainText, + completionSupportsSnippet: false, + signatureDocFormat: MarkupKind.PlainText, + supportsDeprecatedDiagnosticTag: false, + supportsUnnecessaryDiagnosticTag: false, + supportsTaskItemDiagnosticTag: false, + completionItemResolveSupportsAdditionalTextEdits: false, + supportsPullDiagnostics: false, + requiresPullRelatedInformationCapability: false, + }; + + protected defaultClientConfig: any; + + protected readonly workspaceFactory: IWorkspaceFactory; + protected readonly openFileMap = new Map(); + protected readonly fs: FileSystem; + protected readonly caseSensitiveDetector: CaseSensitivityDetector; + + // The URIs for which diagnostics are reported + protected readonly documentsWithDiagnostics = new Set(); + + protected readonly dynamicFeatures = new DynamicFeatures(); + + constructor(protected serverOptions: ServerOptions, protected connection: Connection) { + // Stash the base directory into a global variable. + // This must happen before fs.getModulePath(). + (global as any).__rootDirectory = serverOptions.rootDirectory.getFilePath(); + + this.console.info( + `${serverOptions.productName} language server ${ + serverOptions.version && serverOptions.version + ' ' + }starting` + ); + + this.console.info(`Server root directory: ${serverOptions.rootDirectory}`); + + this.fs = this.serverOptions.serviceProvider.fs(); + this.caseSensitiveDetector = this.serverOptions.serviceProvider.get(ServiceKeys.caseSensitivityDetector); + + this.workspaceFactory = this.createWorkspaceFactory(); + + // Set the working directory to a known location within + // the extension directory. Otherwise the execution of + // python can have unintended and surprising results. + const moduleDirectory = this.fs.getModulePath(); + if (moduleDirectory && this.fs.existsSync(moduleDirectory)) { + this.fs.chdir(moduleDirectory); + } + + // Set up callbacks. + this.setupConnection(serverOptions.supportedCommands ?? [], serverOptions.supportedCodeActions ?? []); + + this._progressReporter = new ProgressReportTracker(this.createProgressReporter()); + + // Listen on the connection. + this.connection.listen(); + } + + get console(): ConsoleInterface { + return this.serverOptions.serviceProvider.console(); + } + + // Provides access to the client's window. + get window(): RemoteWindow { + return this.connection.window; + } + + get supportAdvancedEdits(): boolean { + return this.client.hasDocumentChangeCapability && this.client.hasDocumentAnnotationCapability; + } + + get serviceProvider() { + return this.serverOptions.serviceProvider; + } + + dispose() { + this.workspaceFactory.clear(); + this.openFileMap.clear(); + this.dynamicFeatures.unregister(); + this._workspaceFoldersChangedDisposable?.dispose(); + } + + abstract createBackgroundAnalysis(serviceId: string, workspaceRoot: Uri): IBackgroundAnalysis | undefined; + + abstract getSettings(workspace: Workspace): Promise; + + // Creates a service instance that's used for analyzing a + // program within a workspace. + createAnalyzerService( + name: string, + workspaceRoot: Uri, + services?: WorkspaceServices, + libraryReanalysisTimeProvider?: LibraryReanalysisTimeProvider + ): AnalyzerService { + this.console.info(`Starting service instance "${name}"`); + + const serviceId = getNextServiceId(name); + const service = new AnalyzerService(name, this.serverOptions.serviceProvider, { + console: this.console, + hostFactory: this.createHost.bind(this), + importResolverFactory: this.createImportResolver.bind(this), + backgroundAnalysis: services + ? services.backgroundAnalysis + : this.createBackgroundAnalysis(serviceId, workspaceRoot), + maxAnalysisTime: this.serverOptions.maxAnalysisTimeInForeground, + backgroundAnalysisProgramFactory: this.createBackgroundAnalysisProgram.bind(this), + libraryReanalysisTimeProvider, + serviceId, + fileSystem: services?.fs ?? this.serverOptions.serviceProvider.fs(), + onInvalidated: (reason) => { + // If we're in openFilesOnly mode and the client supports pull diagnostics, request a refresh. In + // workspace mode we just use the 'push' notification to respond to the workspace diagnostics + if (this.client.supportsPullDiagnostics && service.checkOnlyOpenFiles) { + void this.connection.sendRequest(DiagnosticRefreshRequest.type); + } + }, + shouldRunAnalysis: () => { + // We should run analysis if: + // The client doesn't support pull diagnostics (meaning we have to run analysis ourselves) + // or + // We have a workspace partial result callback (meaning in pull mode, we're waiting for workspace results) + return !this.client.supportsPullDiagnostics || this._workspaceDiagnosticsReporter !== undefined; + }, + }); + + service.setCompletionCallback((results) => this.onAnalysisCompletedHandler(service.fs, results)); + return service; + } + + async getWorkspaces(): Promise { + const workspaces = this.workspaceFactory.items(); + for (const workspace of workspaces) { + await workspace.isInitialized.promise; + } + + return workspaces; + } + + async getWorkspaceForFile(fileUri: Uri, pythonPath?: Uri): Promise { + return this.workspaceFactory.getWorkspaceForFile(fileUri, pythonPath); + } + + async getContainingWorkspacesForFile(fileUri: Uri): Promise { + return this.workspaceFactory.getContainingWorkspacesForFile(fileUri); + } + + reanalyze() { + this.workspaceFactory.items().forEach((workspace) => { + workspace.service.invalidateAndForceReanalysis(InvalidatedReason.Reanalyzed); + }); + } + + restart() { + this.workspaceFactory.items().forEach((workspace) => { + workspace.service.restart(); + }); + } + + updateSettingsForAllWorkspaces(): void { + const tasks: Promise[] = []; + this.workspaceFactory.items().forEach((workspace) => { + // Updating settings can change workspace's file ownership. Make workspace uninitialized so that + // features can wait until workspace gets new settings. + // the file's ownership can also changed by `pyrightconfig.json` changes, but those are synchronous + // operation, so it won't affect this. + workspace.isInitialized = workspace.isInitialized.reset(); + tasks.push(this.updateSettingsForWorkspace(workspace, workspace.isInitialized)); + }); + + Promise.all(tasks).then(() => { + this.dynamicFeatures.register(); + }); + } + + async updateSettingsForWorkspace( + workspace: Workspace, + status: InitStatus | undefined, + serverSettings?: ServerSettings + ): Promise { + try { + status?.markCalled(); + + serverSettings = serverSettings ?? (await this.getSettings(workspace)); + + // Set logging level first. + (this.console as ConsoleWithLogLevel).level = serverSettings.logLevel ?? LogLevel.Info; + + this.dynamicFeatures.update(serverSettings); + + // If the workspace mode has changed, we may need to resolve the workspace diagnostics promise. + if (serverSettings.openFilesOnly && this._workspaceDiagnosticsResolve) { + this._workspaceDiagnosticsResolve({ items: [] }); + this._workspaceDiagnosticsResolve = undefined; + } + + // Then use the updated settings to restart the service. + this.updateOptionsAndRestartService(workspace, serverSettings); + + workspace.disableLanguageServices = !!serverSettings.disableLanguageServices; + workspace.disableTaggedHints = !!serverSettings.disableTaggedHints; + workspace.disableOrganizeImports = !!serverSettings.disableOrganizeImports; + } finally { + // Don't use workspace.isInitialized directly since it might have been + // reset due to pending config change event. + // The workspace is now open for business. + status?.resolve(); + } + } + + updateOptionsAndRestartService( + workspace: Workspace, + serverSettings: ServerSettings, + typeStubTargetImportName?: string + ) { + AnalyzerServiceExecutor.runWithOptions(workspace, serverSettings, { typeStubTargetImportName }); + workspace.searchPathsToWatch = workspace.service.librarySearchUrisToWatch ?? []; + } + protected get workspaceDiagnosticsReporter() { + return this._workspaceDiagnosticsReporter; + } + + protected abstract executeCommand(params: ExecuteCommandParams, token: CancellationToken): Promise; + + protected abstract isLongRunningCommand(command: string): boolean; + protected abstract isRefactoringCommand(command: string): boolean; + + protected abstract executeCodeAction( + params: CodeActionParams, + token: CancellationToken + ): Promise<(Command | CodeAction)[] | undefined | null>; + + protected async getConfiguration(scopeUri: Uri | undefined, section: string) { + if (this.client.hasConfigurationCapability) { + const item: ConfigurationItem = {}; + if (scopeUri !== undefined) { + item.scopeUri = scopeUri.toString(); + } + if (section !== undefined) { + item.section = section; + } + return this.connection.workspace.getConfiguration(item); + } + + if (this.defaultClientConfig) { + return getNestedProperty(this.defaultClientConfig, section); + } + + return undefined; + } + + protected isOpenFilesOnly(diagnosticMode: string): boolean { + return diagnosticMode !== 'workspace'; + } + + protected getSeverityOverrides(value: string | boolean): DiagnosticSeverityOverrides | undefined { + const enumValue = parseDiagLevel(value); + if (!enumValue) { + return undefined; + } + if (getDiagnosticSeverityOverrides().includes(enumValue)) { + return enumValue; + } + + return undefined; + } + + protected getDiagnosticRuleName(value: string): DiagnosticRule | undefined { + const enumValue = value as DiagnosticRule; + if (getDiagLevelDiagnosticRules().includes(enumValue)) { + return enumValue; + } + + return undefined; + } + + protected abstract createHost(): Host; + protected abstract createImportResolver( + serviceProvider: ServiceProvider, + options: ConfigOptions, + host: Host + ): ImportResolver; + + protected createBackgroundAnalysisProgram( + serviceId: string, + serviceProvider: ServiceProvider, + configOptions: ConfigOptions, + importResolver: ImportResolver, + backgroundAnalysis?: IBackgroundAnalysis, + maxAnalysisTime?: MaxAnalysisTime + ): BackgroundAnalysisProgram { + return new BackgroundAnalysisProgram( + serviceId, + serviceProvider, + configOptions, + importResolver, + backgroundAnalysis, + maxAnalysisTime, + /* disableChecker */ undefined + ); + } + + protected createWorkspaceFactory(): IWorkspaceFactory { + return new WorkspaceFactory( + this.console, + this.createAnalyzerServiceForWorkspace.bind(this), + this.onWorkspaceCreated.bind(this), + this.onWorkspaceRemoved.bind(this), + this.serviceProvider + ); + } + + protected setupConnection(supportedCommands: string[], supportedCodeActions: string[]): void { + // After the server has started the client sends an initialize request. The server receives + // in the passed params the rootPath of the workspace plus the client capabilities. + this.connection.onInitialize((params) => this.initialize(params, supportedCommands, supportedCodeActions)); + + this.connection.onInitialized(() => this.onInitialized()); + + this.connection.onDidChangeConfiguration((params) => this.onDidChangeConfiguration(params)); + + this.connection.onCodeAction((params, token) => this.executeCodeAction(params, token)); + + this.connection.onDefinition(async (params, token) => this.onDefinition(params, token)); + this.connection.onDeclaration(async (params, token) => this.onDeclaration(params, token)); + this.connection.onTypeDefinition(async (params, token) => this.onTypeDefinition(params, token)); + + this.connection.onReferences(async (params, token, workDoneReporter, resultReporter) => + this.onReferences(params, token, workDoneReporter, resultReporter) + ); + + this.connection.onDocumentSymbol(async (params, token) => this.onDocumentSymbol(params, token)); + this.connection.onWorkspaceSymbol(async (params, token, _, resultReporter) => + this.onWorkspaceSymbol(params, token, resultReporter) + ); + + this.connection.onHover(async (params, token) => this.onHover(params, token)); + + this.connection.onDocumentHighlight(async (params, token) => this.onDocumentHighlight(params, token)); + + this.connection.onSignatureHelp(async (params, token) => this.onSignatureHelp(params, token)); + + this.connection.onCompletion((params, token) => this.onCompletion(params, token)); + this.connection.onCompletionResolve(async (params, token) => this.onCompletionResolve(params, token)); + + this.connection.onPrepareRename(async (params, token) => this.onPrepareRenameRequest(params, token)); + this.connection.onRenameRequest(async (params, token) => this.onRenameRequest(params, token)); + + const callHierarchy = this.connection.languages.callHierarchy; + callHierarchy.onPrepare(async (params, token) => this.onCallHierarchyPrepare(params, token)); + callHierarchy.onIncomingCalls(async (params, token) => this.onCallHierarchyIncomingCalls(params, token)); + callHierarchy.onOutgoingCalls(async (params, token) => this.onCallHierarchyOutgoingCalls(params, token)); + + this.connection.onDidOpenTextDocument(async (params) => this.onDidOpenTextDocument(params)); + this.connection.onDidChangeTextDocument(async (params) => this.onDidChangeTextDocument(params)); + this.connection.onDidCloseTextDocument(async (params) => this.onDidCloseTextDocument(params)); + this.connection.onDidChangeWatchedFiles((params) => this.onDidChangeWatchedFiles(params)); + + this.connection.languages.diagnostics.on(async (params, token) => this.onDiagnostics(params, token)); + this.connection.languages.diagnostics.onWorkspace(async (params, token, progress, reporter) => + this.onWorkspaceDiagnostics(params, token, progress, reporter) + ); + this.connection.onExecuteCommand(async (params, token, reporter) => + this.onExecuteCommand(params, token, reporter) + ); + this.connection.onShutdown(async (token) => this.onShutdown(token)); + } + + protected async initialize( + params: InitializeParams, + supportedCommands: string[], + supportedCodeActions: string[] + ): Promise { + if (params.locale) { + setLocaleOverride(params.locale); + } + + const initializationOptions = (params.initializationOptions ?? {}) as LSPObject & InitializationOptions; + const capabilities = params.capabilities; + this.client.hasConfigurationCapability = !!capabilities.workspace?.configuration; + this.client.hasWatchFileCapability = !!capabilities.workspace?.didChangeWatchedFiles?.dynamicRegistration; + this.client.hasWatchFileRelativePathCapability = + !!capabilities.workspace?.didChangeWatchedFiles?.relativePatternSupport; + this.client.hasWorkspaceFoldersCapability = !!capabilities.workspace?.workspaceFolders; + this.client.hasVisualStudioExtensionsCapability = !!(capabilities as any)._vs_supportsVisualStudioExtensions; + this.client.hasActiveParameterCapability = + !!capabilities.textDocument?.signatureHelp?.signatureInformation?.activeParameterSupport; + this.client.hasSignatureLabelOffsetCapability = + !!capabilities.textDocument?.signatureHelp?.signatureInformation?.parameterInformation?.labelOffsetSupport; + this.client.hasHierarchicalDocumentSymbolCapability = + !!capabilities.textDocument?.documentSymbol?.hierarchicalDocumentSymbolSupport; + this.client.hasDocumentChangeCapability = + !!capabilities.workspace?.workspaceEdit?.documentChanges && + !!capabilities.workspace.workspaceEdit?.resourceOperations; + this.client.hasDocumentAnnotationCapability = !!capabilities.workspace?.workspaceEdit?.changeAnnotationSupport; + this.client.hasCompletionCommitCharCapability = + !!capabilities.textDocument?.completion?.completionList?.itemDefaults && + !!capabilities.textDocument.completion.completionItem?.commitCharactersSupport; + + this.client.hoverContentFormat = this._getCompatibleMarkupKind(capabilities.textDocument?.hover?.contentFormat); + this.client.completionDocFormat = this._getCompatibleMarkupKind( + capabilities.textDocument?.completion?.completionItem?.documentationFormat + ); + this.client.completionSupportsSnippet = !!capabilities.textDocument?.completion?.completionItem?.snippetSupport; + this.client.signatureDocFormat = this._getCompatibleMarkupKind( + capabilities.textDocument?.signatureHelp?.signatureInformation?.documentationFormat + ); + const supportedDiagnosticTags = capabilities.textDocument?.publishDiagnostics?.tagSupport?.valueSet || []; + this.client.supportsUnnecessaryDiagnosticTag = supportedDiagnosticTags.some( + (tag) => tag === DiagnosticTag.Unnecessary + ); + this.client.supportsDeprecatedDiagnosticTag = supportedDiagnosticTags.some( + (tag) => tag === DiagnosticTag.Deprecated + ); + // if the client is running in VS, it always supports task item diagnostics + this.client.supportsTaskItemDiagnosticTag = this.client.hasVisualStudioExtensionsCapability; + this.client.hasWindowProgressCapability = !!capabilities.window?.workDoneProgress; + this.client.hasGoToDeclarationCapability = !!capabilities.textDocument?.declaration; + this.client.completionItemResolveSupportsAdditionalTextEdits = + !!capabilities.textDocument?.completion?.completionItem?.resolveSupport?.properties.some( + (p) => p === 'additionalTextEdits' + ); + this.client.supportsPullDiagnostics = + !!capabilities.textDocument?.diagnostic?.dynamicRegistration && + initializationOptions?.disablePullDiagnostics !== true; + this.client.requiresPullRelatedInformationCapability = + !!capabilities.textDocument?.diagnostic?.relatedInformation && + initializationOptions?.disablePullDiagnostics !== true; + + // Create a service instance for each of the workspace folders. + this.workspaceFactory.handleInitialize(params); + + if (this.client.hasWatchFileCapability) { + this.addDynamicFeature( + new FileWatcherDynamicFeature( + this.connection, + this.client.hasWatchFileRelativePathCapability, + this.fs, + this.workspaceFactory + ) + ); + } + + const result: InitializeResult = { + capabilities: { + textDocumentSync: TextDocumentSyncKind.Incremental, + definitionProvider: { workDoneProgress: true }, + declarationProvider: { workDoneProgress: true }, + typeDefinitionProvider: { workDoneProgress: true }, + referencesProvider: { workDoneProgress: true }, + documentSymbolProvider: { workDoneProgress: true }, + workspaceSymbolProvider: { workDoneProgress: true }, + hoverProvider: { workDoneProgress: true }, + documentHighlightProvider: { workDoneProgress: true }, + renameProvider: { prepareProvider: true, workDoneProgress: true }, + completionProvider: { + triggerCharacters: this.client.hasVisualStudioExtensionsCapability + ? ['.', '[', '@', '"', "'"] + : ['.', '[', '"', "'"], + resolveProvider: true, + workDoneProgress: true, + completionItem: { + labelDetailsSupport: true, + }, + }, + signatureHelpProvider: { + triggerCharacters: ['(', ',', ')'], + workDoneProgress: true, + }, + codeActionProvider: { + codeActionKinds: supportedCodeActions, + workDoneProgress: true, + }, + executeCommandProvider: { + commands: supportedCommands, + workDoneProgress: true, + }, + callHierarchyProvider: true, + workspace: { + workspaceFolders: { + supported: true, + changeNotifications: true, + }, + }, + }, + }; + + if (this.client.supportsPullDiagnostics) { + this.addDynamicFeature(new PullDiagnosticsDynamicFeature(this.connection, this.serverOptions.productName)); + } + + return result; + } + + protected onInitialized() { + this.handleInitialized((event) => { + this.workspaceFactory.handleWorkspaceFoldersChanged(event, null); + this.dynamicFeatures.register(); + }); + } + + protected handleInitialized(changeWorkspaceFolderHandler: (e: WorkspaceFoldersChangeEvent) => any) { + // Mark as initialized. We need this to make sure to + // not send config updates before this point. + this._initialized = true; + + if (this.client.hasWorkspaceFoldersCapability) { + this._workspaceFoldersChangedDisposable = + this.connection.workspace.onDidChangeWorkspaceFolders(changeWorkspaceFolderHandler); + } + + this.dynamicFeatures.register(); + this.updateSettingsForAllWorkspaces(); + } + + protected onDidChangeConfiguration(params: DidChangeConfigurationParams) { + this.console.log(`Received updated settings`); + if (params?.settings) { + this.defaultClientConfig = params?.settings; + } + this.updateSettingsForAllWorkspaces(); + } + + protected async onDefinition( + params: TextDocumentPositionParams, + token: CancellationToken + ): Promise { + return this.getDefinitions( + params, + token, + this.client.hasGoToDeclarationCapability ? DefinitionFilter.PreferSource : DefinitionFilter.All, + (workspace, filePath, position, filter, token) => + workspace.service.run((program) => { + return new DefinitionProvider(program, filePath, position, filter, token).getDefinitions(); + }, token) + ); + } + + protected async onDeclaration( + params: TextDocumentPositionParams, + token: CancellationToken + ): Promise { + return this.getDefinitions( + params, + token, + this.client.hasGoToDeclarationCapability ? DefinitionFilter.PreferStubs : DefinitionFilter.All, + (workspace, filePath, position, filter, token) => + workspace.service.run((program) => { + return new DefinitionProvider(program, filePath, position, filter, token).getDefinitions(); + }, token) + ); + } + + protected async onTypeDefinition( + params: TextDocumentPositionParams, + token: CancellationToken + ): Promise { + return this.getDefinitions(params, token, DefinitionFilter.All, (workspace, filePath, position, _, token) => + workspace.service.run((program) => { + return new TypeDefinitionProvider(program, filePath, position, token).getDefinitions(); + }, token) + ); + } + + protected async getDefinitions( + params: TextDocumentPositionParams, + token: CancellationToken, + filter: DefinitionFilter, + getDefinitionsFunc: ( + workspace: Workspace, + fileUri: Uri, + position: Position, + filter: DefinitionFilter, + token: CancellationToken + ) => DocumentRange[] | undefined + ) { + this.recordUserInteractionTime(); + + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return undefined; + } + + const locations = getDefinitionsFunc(workspace, uri, params.position, filter, token); + if (!locations) { + return undefined; + } + return locations + .filter((loc) => this.canNavigateToFile(loc.uri, workspace.service.fs)) + .map((loc) => Location.create(convertUriToLspUriString(workspace.service.fs, loc.uri), loc.range)); + } + + protected async onReferences( + params: ReferenceParams, + token: CancellationToken, + workDoneReporter: WorkDoneProgressReporter, + resultReporter: ResultProgressReporter | undefined, + createDocumentRange?: (uri: Uri, result: CollectionResult, parseResults: ParseFileResults) => DocumentRange, + convertToLocation?: (fs: ReadOnlyFileSystem, ranges: DocumentRange) => Location | undefined + ): Promise { + if (this._pendingFindAllRefsCancellationSource) { + this._pendingFindAllRefsCancellationSource.cancel(); + this._pendingFindAllRefsCancellationSource = undefined; + } + + // VS Code doesn't support cancellation of "find all references". + // We provide a progress bar a cancellation button so the user can cancel + // any long-running actions. + const progress = await this.getProgressReporter( + workDoneReporter, + Localizer.CodeAction.findingReferences(), + token + ); + + const source = progress.source; + this._pendingFindAllRefsCancellationSource = source; + + try { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return; + } + + return workspace.service.run((program) => { + return new ReferencesProvider( + program, + source.token, + createDocumentRange, + convertToLocation + ).reportReferences(uri, params.position, params.context.includeDeclaration, resultReporter); + }, token); + } finally { + progress.reporter.done(); + source.dispose(); + } + } + + protected async onDocumentSymbol( + params: DocumentSymbolParams, + token: CancellationToken + ): Promise { + this.recordUserInteractionTime(); + + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return undefined; + } + + return workspace.service.run((program) => { + return new DocumentSymbolProvider( + program, + uri, + this.client.hasHierarchicalDocumentSymbolCapability, + { includeAliases: false }, + token + ).getSymbols(); + }, token); + } + + protected onWorkspaceSymbol( + params: WorkspaceSymbolParams, + token: CancellationToken, + resultReporter: ResultProgressReporter | undefined + ): Promise { + const result = new WorkspaceSymbolProvider( + this.workspaceFactory.items(), + resultReporter, + params.query, + token + ).reportSymbols(); + + return Promise.resolve(result); + } + + protected async onHover(params: HoverParams, token: CancellationToken) { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return undefined; + } + + return workspace.service.run((program) => { + return new HoverProvider(program, uri, params.position, this.client.hoverContentFormat, token).getHover(); + }, token); + } + + protected async onDocumentHighlight( + params: DocumentHighlightParams, + token: CancellationToken + ): Promise { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const workspace = await this.getWorkspaceForFile(uri); + + return workspace.service.run((program) => { + return new DocumentHighlightProvider(program, uri, params.position, token).getDocumentHighlight(); + }, token); + } + + protected async onSignatureHelp( + params: SignatureHelpParams, + token: CancellationToken + ): Promise { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return; + } + + return workspace.service.run((program) => { + return new SignatureHelpProvider( + program, + uri, + params.position, + this.client.signatureDocFormat, + this.client.hasSignatureLabelOffsetCapability, + this.client.hasActiveParameterCapability, + params.context, + program.serviceProvider.docStringService(), + token + ).getSignatureHelp(); + }, token); + } + + protected setCompletionIncomplete(params: CompletionParams, completions: CompletionList | null) { + // We set completion incomplete for the first invocation and next consecutive call, + // but after that we mark it as completed so the client doesn't repeatedly call back. + // We mark the first one as incomplete because completion could be invoked without + // any meaningful character provided, such as an explicit completion invocation (ctrl+space) + // or a period. That might cause us to not include some items (e.g., auto-imports). + // The next consecutive call provides some characters to help us to pick + // better completion items. After that, we are not going to introduce new items, + // so we can let the client to do the filtering and caching. + const completionIncomplete = + this._lastTriggerKind !== CompletionTriggerKind.TriggerForIncompleteCompletions || + params.context?.triggerKind !== CompletionTriggerKind.TriggerForIncompleteCompletions; + + this._lastTriggerKind = params.context?.triggerKind; + + if (completions) { + completions.isIncomplete = completionIncomplete; + } + } + + protected async onCompletion(params: CompletionParams, token: CancellationToken): Promise { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return null; + } + + return await workspace.service.run(async (program) => { + const completions = await new CompletionProvider( + program, + uri, + params.position, + { + format: this.client.completionDocFormat, + snippet: this.client.completionSupportsSnippet, + lazyEdit: false, + triggerCharacter: params?.context?.triggerCharacter, + }, + token + ).getCompletions(); + + this.setCompletionIncomplete(params, completions); + return completions; + }, token); + } + + // Cancellation bugs in vscode and LSP: + // https://github.com/microsoft/vscode-languageserver-node/issues/615 + // https://github.com/microsoft/vscode/issues/95485 + // + // If resolver throws cancellation exception, LSP and VSCode + // cache that result and never call us back. + protected async onCompletionResolve(params: CompletionItem, token: CancellationToken): Promise { + const completionItemData = fromLSPAny(params.data); + if (completionItemData && completionItemData.uri) { + const uri = Uri.parse(completionItemData.uri, this.caseSensitiveDetector); + const workspace = await this.getWorkspaceForFile(uri); + workspace.service.run((program) => { + return new CompletionProvider( + program, + uri, + completionItemData.position, + { + format: this.client.completionDocFormat, + snippet: this.client.completionSupportsSnippet, + lazyEdit: false, + }, + token + ).resolveCompletionItem(params); + }, token); + } + return params; + } + + protected async onPrepareRenameRequest( + params: PrepareRenameParams, + token: CancellationToken + ): Promise { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const isUntitled = uri.isUntitled(); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return null; + } + + return workspace.service.run((program) => { + return new RenameProvider(program, uri, params.position, token).canRenameSymbol( + workspace.kinds.includes(WellKnownWorkspaceKinds.Default), + isUntitled + ); + }, token); + } + + protected async onRenameRequest( + params: RenameParams, + token: CancellationToken + ): Promise { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const isUntitled = uri.isUntitled(); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return; + } + + return workspace.service.run((program) => { + return new RenameProvider(program, uri, params.position, token).renameSymbol( + params.newName, + workspace.kinds.includes(WellKnownWorkspaceKinds.Default), + isUntitled + ); + }, token); + } + + protected async onCallHierarchyPrepare( + params: CallHierarchyPrepareParams, + token: CancellationToken + ): Promise { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return null; + } + + return workspace.service.run((program) => { + return new CallHierarchyProvider(program, uri, params.position, token).onPrepare(); + }, token); + } + + protected async onCallHierarchyIncomingCalls(params: CallHierarchyIncomingCallsParams, token: CancellationToken) { + const uri = this.convertLspUriStringToUri(params.item.uri); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return null; + } + + return workspace.service.run((program) => { + return new CallHierarchyProvider(program, uri, params.item.range.start, token).getIncomingCalls(); + }, token); + } + + protected async onCallHierarchyOutgoingCalls( + params: CallHierarchyOutgoingCallsParams, + token: CancellationToken + ): Promise { + const uri = this.convertLspUriStringToUri(params.item.uri); + + const workspace = await this.getWorkspaceForFile(uri); + if (workspace.disableLanguageServices) { + return null; + } + + return workspace.service.run((program) => { + return new CallHierarchyProvider(program, uri, params.item.range.start, token).getOutgoingCalls(); + }, token); + } + + protected async onDidOpenTextDocument(params: DidOpenTextDocumentParams, ipythonMode = IPythonMode.None) { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + + let doc = this.openFileMap.get(uri.key); + if (doc) { + // We shouldn't get an open text document request for an already-opened doc. + this.console.error(`Received redundant open text document command for ${uri}`); + TextDocument.update(doc, [{ text: params.textDocument.text }], params.textDocument.version); + } else { + doc = TextDocument.create( + params.textDocument.uri, + 'python', + params.textDocument.version, + params.textDocument.text + ); + } + this.openFileMap.set(uri.key, doc); + + // Send this open to all the workspaces that might contain this file. + const workspaces = await this.getContainingWorkspacesForFile(uri); + workspaces.forEach((w) => { + w.service.setFileOpened(uri, params.textDocument.version, params.textDocument.text, ipythonMode); + }); + } + + protected async onDidChangeTextDocument(params: DidChangeTextDocumentParams, ipythonMode = IPythonMode.None) { + this.recordUserInteractionTime(); + + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const doc = this.openFileMap.get(uri.key); + if (!doc) { + // We shouldn't get a change text request for a closed doc. + this.console.error(`Received change text document command for closed file ${uri}`); + return; + } + + TextDocument.update(doc, params.contentChanges, params.textDocument.version); + const newContents = doc.getText(); + + // Send this change to all the workspaces that might contain this file. + const workspaces = await this.getContainingWorkspacesForFile(uri); + workspaces.forEach((w) => { + w.service.updateOpenFileContents(uri, params.textDocument.version, newContents, ipythonMode); + }); + } + + protected async onDidCloseTextDocument(params: DidCloseTextDocumentParams) { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + + // Send this close to all the workspaces that might contain this file. + const workspaces = await this.getContainingWorkspacesForFile(uri); + workspaces.forEach((w) => { + w.service.setFileClosed(uri); + }); + + this.openFileMap.delete(uri.key); + } + + protected async onDiagnostics(params: DocumentDiagnosticParams, token: CancellationToken) { + const uri = this.convertLspUriStringToUri(params.textDocument.uri); + const workspace = await this.getWorkspaceForFile(uri); + let sourceFile = workspace.service.getSourceFile(uri); + let diagnosticsVersion = sourceFile?.isCheckingRequired() + ? UncomputedDiagnosticsVersion + : sourceFile?.getDiagnosticVersion() ?? UncomputedDiagnosticsVersion; + const result: DocumentDiagnosticReport = { + kind: 'full', + resultId: sourceFile?.getDiagnosticVersion()?.toString(), + items: [], + }; + if ( + workspace.disableLanguageServices || + !canNavigateToFile(workspace.service.fs, uri) || + token.isCancellationRequested + ) { + return result; + } + + // Send a progress message to the client. + this.incrementAnalysisProgress(); + + try { + // Reanalyze the file if it's not up to date. + if (params.previousResultId !== diagnosticsVersion.toString() && sourceFile) { + let diagnosticsVersionAfter = UncomputedDiagnosticsVersion - 1; // Just has to be different + let serverDiagnostics: AnalyzerDiagnostic[] = []; + + // Loop until we analyze the same version that we started with. + while (diagnosticsVersion !== diagnosticsVersionAfter && !token.isCancellationRequested && sourceFile) { + // Reset the version we're analyzing + sourceFile = workspace.service.getSourceFile(uri); + diagnosticsVersion = sourceFile?.getDiagnosticVersion() ?? UncomputedDiagnosticsVersion; + + // Then reanalyze the file (this should go to the background thread so this thread can handle other requests). + if (sourceFile) { + serverDiagnostics = await workspace.service.analyzeFileAndGetDiagnostics(uri, token); + } + + // If any text edits came in, make sure we reanalyze the file. Diagnostics version should be reset to zero + // if a text edit comes in. + const sourceFileAfter = workspace.service.getSourceFile(uri); + diagnosticsVersionAfter = sourceFileAfter?.getDiagnosticVersion() ?? UncomputedDiagnosticsVersion; + } + + // Then convert the diagnostics to the LSP format. + const lspDiagnostics = this._convertDiagnostics(workspace.service.fs, serverDiagnostics).filter( + (d) => d !== undefined + ) as Diagnostic[]; + + result.resultId = + diagnosticsVersionAfter === UncomputedDiagnosticsVersion + ? undefined + : diagnosticsVersionAfter.toString(); + result.items = lspDiagnostics; + } else { + (result as any).kind = 'unchanged'; + result.resultId = + diagnosticsVersion === UncomputedDiagnosticsVersion ? undefined : diagnosticsVersion.toString(); + delete (result as any).items; + } + } finally { + this.decrementAnalysisProgress(); + } + + return result; + } + + protected async onWorkspaceDiagnostics( + params: WorkspaceDiagnosticParams, + token: CancellationToken, + workDoneProgress: WorkDoneProgressReporter, + resultReporter?: ResultProgressReporter + ) { + // Resolve any pending workspace diagnostics. We only allow one at a time. + this._workspaceDiagnosticsResolve?.({ items: [] }); + this._workspaceDiagnosticsResolve = undefined; + + // Save the progress reporters and force a refresh of analysis. + this._workspaceDiagnosticsProgressReporter = !isNullProgressReporter(workDoneProgress) + ? wrapProgressReporter(workDoneProgress) + : undefined; + this._workspaceDiagnosticsReporter = resultReporter; + this.workspaceFactory.getNonDefaultWorkspaces().forEach((workspace) => { + workspace.service.invalidateAndScheduleReanalysis(InvalidatedReason.Reanalyzed); + }); + + return new Promise((resolve, reject) => { + // We never resolve as this should be a continually occurring process. Scheduling analysis + // should cause a new workspace diagnostic to be generated. + + // Save the resolve callback to be used during shutdown so that tests don't crash + // on the unresolved promise for the workspace diagnostics. + this._workspaceDiagnosticsResolve = resolve; + }); + } + + protected onDidChangeWatchedFiles(params: DidChangeWatchedFilesParams) { + params.changes.forEach((change) => { + const filePath = this.fs.realCasePath(this.convertLspUriStringToUri(change.uri)); + const eventType: FileWatcherEventType = change.type === 1 ? 'add' : 'change'; + this.serverOptions.fileWatcherHandler.onFileChange(eventType, filePath); + }); + } + + protected async onExecuteCommand( + params: ExecuteCommandParams, + token: CancellationToken, + reporter: WorkDoneProgressReporter + ) { + // Cancel running command if there is one. + if (this._pendingCommandCancellationSource) { + this._pendingCommandCancellationSource.cancel(); + this._pendingCommandCancellationSource = undefined; + } + + const executeCommand = async (token: CancellationToken) => { + const result = await this.executeCommand(params, token); + if (WorkspaceEdit.is(result)) { + // Tell client to apply edits. + // Do not await; the client isn't expecting a result. + this.connection.workspace.applyEdit({ + label: `Command '${params.command}'`, + edit: result, + metadata: { isRefactoring: this.isRefactoringCommand(params.command) }, + }); + } + + if (CommandResult.is(result)) { + // Tell client to apply edits. + // Await so that we return after the edit is complete. + await this.connection.workspace.applyEdit({ + label: result.label, + edit: result.edits, + metadata: { isRefactoring: this.isRefactoringCommand(params.command) }, + }); + } + + return result; + }; + + if (this.isLongRunningCommand(params.command)) { + // Create a progress dialog for long-running commands. + const progress = await this.getProgressReporter(reporter, Localizer.CodeAction.executingCommand(), token); + + const source = progress.source; + this._pendingCommandCancellationSource = source; + + try { + const result = await executeCommand(source.token); + return result; + } finally { + progress.reporter.done(); + source.dispose(); + } + } else { + const result = await executeCommand(token); + return result; + } + } + + protected onShutdown(token: CancellationToken) { + // Shutdown remaining workspaces. + this._workspaceDiagnosticsResolve?.({ items: [] }); + this.workspaceFactory.clear(); + + // Stop tracking all open files. + this.openFileMap.clear(); + this.serviceProvider.dispose(); + + return Promise.resolve(); + } + + protected convertDiagnostics(fs: FileSystem, fileDiagnostics: FileDiagnostics): PublishDiagnosticsParams[] { + return [ + { + uri: convertUriToLspUriString(fs, fileDiagnostics.fileUri), + version: fileDiagnostics.version, + diagnostics: this._convertDiagnostics(fs, fileDiagnostics.diagnostics), + }, + ]; + } + + protected getDiagCode(_diag: AnalyzerDiagnostic, rule: string | undefined): string | undefined { + return rule; + } + + protected onAnalysisCompletedHandler(fs: FileSystem, results: AnalysisResults): void { + // Send the computed diagnostics to the client. + results.diagnostics.forEach((fileDiag) => { + if (!this.canNavigateToFile(fileDiag.fileUri, fs)) { + return; + } + + this.sendDiagnostics(this.convertDiagnostics(fs, fileDiag)); + }); + + const reporter = this.getAnalysisProgressReporter(); + if (!reporter.isEnabled(results)) { + // Make sure to disable progress bar if it is currently active. + // This can happen if a user changes typeCheckingMode in the middle + // of analysis. + // end() is noop if there is no active progress bar. + reporter.end(); + return; + } + + // Update progress. + this.sendProgressMessage(results.requiringAnalysisCount.files, results.requiringAnalysisCount.cells); + } + + protected incrementAnalysisProgress() { + this._progressReportCounter += 1; + this.sendProgressMessage(this._progressReportCounter); + } + + protected decrementAnalysisProgress() { + this._progressReportCounter -= 1; + if (this._progressReportCounter < 0) { + this._progressReportCounter = 0; + } + this.sendProgressMessage(this._progressReportCounter); + } + + protected getAnalysisProgressReporter(): ProgressReporter { + if (this._workspaceDiagnosticsProgressReporter) { + return this._workspaceDiagnosticsProgressReporter; + } + return this._progressReporter; + } + + protected sendProgressMessage(fileCount: number, cellCount?: number) { + const reporter = this.getAnalysisProgressReporter(); + if (fileCount <= 0) { + reporter.end(); + return; + } + const progressMessage = + fileCount === 1 + ? Localizer.CodeAction.filesToAnalyzeOne() + : Localizer.CodeAction.filesToAnalyzeCount().format({ + count: fileCount, + }); + + // Update progress. + if (!reporter.isDisplayingProgress()) { + reporter.begin(); + } + reporter.report(progressMessage); + } + + protected onWorkspaceCreated(workspace: Workspace) { + // Update settings on this workspace (but only if initialize has happened) + if (this._initialized) { + this.updateSettingsForWorkspace(workspace, workspace.isInitialized).catch(() => {}); + } + + // Otherwise the initialize completion should cause settings to be updated on all workspaces. + } + + protected onWorkspaceRemoved(workspace: Workspace) { + const documentsWithDiagnosticsList = [...this.documentsWithDiagnostics]; + const otherWorkspaces = this.workspaceFactory.items().filter((w) => w !== workspace); + + for (const uri of documentsWithDiagnosticsList) { + const fileUri = this.convertLspUriStringToUri(uri); + + if (workspace.service.isTracked(fileUri)) { + // Do not clean up diagnostics for files tracked by multiple workspaces + if (otherWorkspaces.some((w) => w.service.isTracked(fileUri))) { + continue; + } + this.sendDiagnostics([ + { + uri: uri, + diagnostics: [], + }, + ]); + } + } + } + + protected createAnalyzerServiceForWorkspace( + name: string, + workspaceRoot: Uri | undefined, + kinds: string[], + services?: WorkspaceServices + ): AnalyzerService { + // 5 seconds default + const defaultBackOffTime = 5 * 1000; + + return this.createAnalyzerService(name, workspaceRoot || Uri.empty(), services, () => defaultBackOffTime); + } + + protected recordUserInteractionTime() { + // Tell all of the services that the user is actively + // interacting with one or more editors, so they should + // back off from performing any work. + this.workspaceFactory.items().forEach((workspace: { service: { recordUserInteractionTime: () => void } }) => { + workspace.service.recordUserInteractionTime(); + }); + } + + protected getDocumentationUrlForDiagnostic(diag: AnalyzerDiagnostic): string | undefined { + const rule = diag.getRule(); + if (rule) { + // Configuration.md is configured to have a link for every rule name. + return `https://github.com/microsoft/pyright/blob/main/docs/configuration.md#${rule}`; + } + return undefined; + } + + protected abstract createProgressReporter(): ProgressReporter; + + protected canNavigateToFile(path: Uri, fs: FileSystem): boolean { + return canNavigateToFile(fs, path); + } + + protected async getProgressReporter(reporter: WorkDoneProgressReporter, title: string, token: CancellationToken) { + // This is a bit ugly, but we need to determine whether the provided reporter + // is an actual client-side progress reporter or a dummy (null) progress reporter + // created by the LSP library. If it's the latter, we'll create a server-initiated + // progress reporter. + if (!isNullProgressReporter(reporter)) { + return { reporter: reporter, source: CancelAfter(this.serviceProvider.cancellationProvider(), token) }; + } + + const serverInitiatedReporter = await this.connection.window.createWorkDoneProgress(); + serverInitiatedReporter.begin( + title, + /* percentage */ undefined, + /* message */ undefined, + /* cancellable */ true + ); + + return { + reporter: serverInitiatedReporter, + source: CancelAfter(this.serviceProvider.cancellationProvider(), token, serverInitiatedReporter.token), + }; + } + + protected sendDiagnostics(params: PublishDiagnosticsParams[]) { + for (const param of params) { + if (param.diagnostics.length === 0) { + this.documentsWithDiagnostics.delete(param.uri); + } else { + this.documentsWithDiagnostics.add(param.uri); + } + // If we're waiting for a pending workspace diagnostic, send a partial result. + if (this._workspaceDiagnosticsReporter) { + // Skip storing previous result ids, just send new results every time. + this._workspaceDiagnosticsReporter.report({ + items: [{ ...param, kind: 'full', version: param.version || null, items: param.diagnostics }], + }); + } else { + // Otherwise send a publish diagnostic notification. + this.connection.sendDiagnostics(param); + } + } + } + + protected convertLspUriStringToUri(uri: string) { + return Uri.parse(uri, this.serverOptions.serviceProvider); + } + + protected addDynamicFeature(feature: DynamicFeature) { + this.dynamicFeatures.add(feature); + } + + private _getCompatibleMarkupKind(clientSupportedFormats: MarkupKind[] | undefined) { + const serverSupportedFormats = [MarkupKind.PlainText, MarkupKind.Markdown]; + + for (const format of clientSupportedFormats ?? []) { + if (serverSupportedFormats.includes(format)) { + return format; + } + } + + return MarkupKind.PlainText; + } + private _convertDiagnostics(fs: FileSystem, diags: AnalyzerDiagnostic[]): Diagnostic[] { + const convertedDiags: Diagnostic[] = []; + + diags.forEach((diag) => { + const severity = convertCategoryToSeverity(diag.category); + const rule = diag.getRule(); + const code = this.getDiagCode(diag, rule); + const vsDiag = Diagnostic.create(diag.range, diag.message, severity, code, this.serverOptions.productName); + + // Save all of the actions in the data. + const actions = diag.getActions(); + if (actions?.length) { + vsDiag.data = { ...vsDiag.data, actions: actions }; + } + + if ( + diag.category === DiagnosticCategory.UnusedCode || + diag.category === DiagnosticCategory.UnreachableCode + ) { + vsDiag.tags = [DiagnosticTag.Unnecessary]; + vsDiag.severity = DiagnosticSeverity.Hint; + vsDiag.data = { ...vsDiag.data, category: diag.category, rule: rule }; + + // If the client doesn't support "unnecessary" tags, don't report unused code. + if (!this.client.supportsUnnecessaryDiagnosticTag) { + return; + } + } else if (diag.category === DiagnosticCategory.Deprecated) { + vsDiag.tags = [DiagnosticTag.Deprecated]; + vsDiag.severity = DiagnosticSeverity.Hint; + + // If the client doesn't support "deprecated" tags, don't report. + if (!this.client.supportsDeprecatedDiagnosticTag) { + return; + } + } else if (diag.category === DiagnosticCategory.TaskItem) { + // TaskItem is not supported. + return; + } + + if (rule) { + const ruleDocUrl = this.getDocumentationUrlForDiagnostic(diag); + if (ruleDocUrl) { + vsDiag.codeDescription = { + href: ruleDocUrl, + }; + } + } + + const relatedInfo = diag.getRelatedInfo(); + if (relatedInfo.length > 0) { + vsDiag.relatedInformation = relatedInfo + .filter((info) => this.canNavigateToFile(info.uri, fs)) + .map((info) => + DiagnosticRelatedInformation.create( + Location.create(convertUriToLspUriString(fs, info.uri), info.range), + info.message + ) + ); + } + + convertedDiags.push(vsDiag); + }); + + function convertCategoryToSeverity(category: DiagnosticCategory) { + switch (category) { + case DiagnosticCategory.Error: + return DiagnosticSeverity.Error; + + case DiagnosticCategory.Warning: + return DiagnosticSeverity.Warning; + + case DiagnosticCategory.Information: + case DiagnosticCategory.TaskItem: // task items only show up in the task list if they are information or above. + return DiagnosticSeverity.Information; + + case DiagnosticCategory.UnusedCode: + case DiagnosticCategory.UnreachableCode: + case DiagnosticCategory.Deprecated: + return DiagnosticSeverity.Hint; + } + } + + return convertedDiags; + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/analyzerServiceExecutor.ts b/python-parser/packages/pyright-internal/src/languageService/analyzerServiceExecutor.ts new file mode 100644 index 00000000..cae66a9b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/analyzerServiceExecutor.ts @@ -0,0 +1,165 @@ +/* + * analyzerServiceExecutor.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Runs the analyzer service of a given workspace service instance + * with a specified set of options. + */ + +import { isPythonBinary } from '../analyzer/pythonPathUtils'; +import { AnalyzerService, getNextServiceId } from '../analyzer/service'; +import { CommandLineOptions } from '../common/commandLineOptions'; +import { LogLevel } from '../common/console'; +import { FileSystem } from '../common/fileSystem'; +import { LanguageServerBaseInterface, ServerSettings } from '../common/languageServerInterface'; +import { EmptyUri } from '../common/uri/emptyUri'; +import { Uri } from '../common/uri/uri'; + +import { WellKnownWorkspaceKinds, Workspace, createInitStatus } from '../workspaceFactory'; + +export interface CloneOptions { + useBackgroundAnalysis?: boolean; + typeStubTargetImportName?: string; + fileSystem?: FileSystem; +} + +export interface RunOptions { + typeStubTargetImportName?: string; + trackFiles?: boolean; + pythonEnvironmentName?: string; +} + +export class AnalyzerServiceExecutor { + static runWithOptions(workspace: Workspace, serverSettings: ServerSettings, options?: RunOptions): void { + const commandLineOptions = getEffectiveCommandLineOptions( + workspace.rootUri, + serverSettings, + options?.trackFiles ?? true, + options?.typeStubTargetImportName, + options?.pythonEnvironmentName + ); + + // Setting options causes the analyzer service to re-analyze everything. + workspace.service.setOptions(commandLineOptions); + } + + static async cloneService( + ls: LanguageServerBaseInterface, + workspace: Workspace, + options?: CloneOptions + ): Promise { + // Allocate a temporary pseudo-workspace to perform this job. + const instanceName = 'cloned service'; + const serviceId = getNextServiceId(instanceName); + + options = options ?? {}; + + const tempWorkspace: Workspace = { + ...workspace, + workspaceName: `temp workspace for cloned service`, + rootUri: workspace.rootUri, + kinds: [...workspace.kinds, WellKnownWorkspaceKinds.Cloned], + service: workspace.service.clone( + instanceName, + serviceId, + options.useBackgroundAnalysis + ? ls.createBackgroundAnalysis(serviceId, workspace.rootUri || EmptyUri.instance) + : undefined, + options.fileSystem + ), + disableLanguageServices: true, + disableTaggedHints: true, + disableOrganizeImports: true, + disableWorkspaceSymbol: true, + isInitialized: createInitStatus(), + searchPathsToWatch: [], + }; + + const serverSettings = await ls.getSettings(workspace); + AnalyzerServiceExecutor.runWithOptions(tempWorkspace, serverSettings, { + typeStubTargetImportName: options.typeStubTargetImportName, + trackFiles: false, + }); + + return tempWorkspace.service; + } +} + +export function getEffectiveCommandLineOptions( + workspaceRootUri: Uri | undefined, + serverSettings: ServerSettings, + trackFiles: boolean, + typeStubTargetImportName?: string, + pythonEnvironmentName?: string +) { + const commandLineOptions = new CommandLineOptions(workspaceRootUri, true); + commandLineOptions.languageServerSettings.checkOnlyOpenFiles = serverSettings.openFilesOnly; + commandLineOptions.configSettings.useLibraryCodeForTypes = serverSettings.useLibraryCodeForTypes; + commandLineOptions.configSettings.typeCheckingMode = serverSettings.typeCheckingMode; + commandLineOptions.languageServerSettings.autoImportCompletions = serverSettings.autoImportCompletions; + commandLineOptions.languageServerSettings.indexing = serverSettings.indexing; + commandLineOptions.languageServerSettings.taskListTokens = serverSettings.taskListTokens; + commandLineOptions.languageServerSettings.logTypeEvaluationTime = serverSettings.logTypeEvaluationTime ?? false; + commandLineOptions.languageServerSettings.typeEvaluationTimeThreshold = + serverSettings.typeEvaluationTimeThreshold ?? 50; + commandLineOptions.languageServerSettings.enableAmbientAnalysis = trackFiles; + commandLineOptions.configSettings.pythonEnvironmentName = pythonEnvironmentName; + commandLineOptions.languageServerSettings.disableTaggedHints = serverSettings.disableTaggedHints; + + if (!trackFiles) { + commandLineOptions.languageServerSettings.watchForSourceChanges = false; + commandLineOptions.languageServerSettings.watchForLibraryChanges = false; + commandLineOptions.languageServerSettings.watchForConfigChanges = false; + } else { + commandLineOptions.languageServerSettings.watchForSourceChanges = serverSettings.watchForSourceChanges; + commandLineOptions.languageServerSettings.watchForLibraryChanges = serverSettings.watchForLibraryChanges; + commandLineOptions.languageServerSettings.watchForConfigChanges = serverSettings.watchForConfigChanges; + } + + if (serverSettings.venvPath) { + commandLineOptions.languageServerSettings.venvPath = serverSettings.venvPath.getFilePath(); + } + + if (serverSettings.pythonPath) { + // The Python VS Code extension treats the value "python" specially. This means + // the local python interpreter should be used rather than interpreting the + // setting value as a path to the interpreter. We'll simply ignore it in this case. + if (!isPythonBinary(serverSettings.pythonPath.getFilePath())) { + commandLineOptions.languageServerSettings.pythonPath = serverSettings.pythonPath.getFilePath(); + } + } + + if (serverSettings.typeshedPath) { + // Pyright supports only one typeshed path currently, whereas the + // official VS Code Python extension supports multiple typeshed paths. + // We'll use the first one specified and ignore the rest. + commandLineOptions.configSettings.typeshedPath = serverSettings.typeshedPath.getFilePath(); + } + + if (serverSettings.stubPath) { + commandLineOptions.configSettings.stubPath = serverSettings.stubPath.getFilePath(); + } + + if (serverSettings.logLevel === LogLevel.Log) { + // When logLevel is "Trace", turn on verboseOutput as well + // so we can get detailed log from analysis service. + commandLineOptions.configSettings.verboseOutput = true; + } + + if (typeStubTargetImportName) { + commandLineOptions.languageServerSettings.typeStubTargetImportName = typeStubTargetImportName; + } + + commandLineOptions.configSettings.autoSearchPaths = serverSettings.autoSearchPaths; + commandLineOptions.configSettings.extraPaths = serverSettings.extraPaths?.map((e) => e.getFilePath()) ?? []; + commandLineOptions.configSettings.diagnosticSeverityOverrides = serverSettings.diagnosticSeverityOverrides; + commandLineOptions.configSettings.diagnosticBooleanOverrides = serverSettings.diagnosticBooleanOverrides; + + commandLineOptions.configSettings.includeFileSpecs = serverSettings.includeFileSpecs ?? []; + commandLineOptions.configSettings.excludeFileSpecs = serverSettings.excludeFileSpecs ?? []; + commandLineOptions.configSettings.ignoreFileSpecs = serverSettings.ignoreFileSpecs ?? []; + + return commandLineOptions; +} diff --git a/python-parser/packages/pyright-internal/src/languageService/autoImporter.ts b/python-parser/packages/pyright-internal/src/languageService/autoImporter.ts new file mode 100644 index 00000000..8cad91b4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/autoImporter.ts @@ -0,0 +1,858 @@ +/* + * autoImporter.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Logic for performing auto-import completions. + */ + +import { CancellationToken, CompletionItem, CompletionItemKind, SymbolKind } from 'vscode-languageserver'; + +import { DeclarationType } from '../analyzer/declaration'; +import { ImportResolver, ModuleNameAndType } from '../analyzer/importResolver'; +import { ImportType } from '../analyzer/importResult'; +import { + ImportGroup, + ImportNameInfo, + ImportStatements, + ModuleNameInfo, + getImportGroup, + getImportGroupFromModuleNameAndType, + getTextEditsForAutoImportInsertion, + getTextEditsForAutoImportSymbolAddition, + getTopLevelImports, +} from '../analyzer/importStatementUtils'; +import { isUserCode } from '../analyzer/sourceFileInfoUtils'; +import { Symbol } from '../analyzer/symbol'; +import * as SymbolNameUtils from '../analyzer/symbolNameUtils'; +import { isVisibleExternally } from '../analyzer/symbolUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { ExecutionEnvironment } from '../common/configOptions'; +import { TextEditAction } from '../common/editAction'; +import { ProgramView, SourceFileInfo } from '../common/extensibility'; +import { stripFileExtension } from '../common/pathUtils'; +import * as StringUtils from '../common/stringUtils'; +import { Position } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { CompletionItemData, CompletionMap } from './completionProvider'; +import { IndexAliasData } from './symbolIndexer'; +import { fromLSPAny } from '../common/lspUtils'; + +export interface AutoImportSymbol { + readonly name: string; + readonly library: boolean; + + readonly kind?: SymbolKind; + readonly itemKind?: CompletionItemKind; + readonly importAlias?: IndexAliasData; + + readonly symbol?: Symbol; + readonly inDunderAll?: boolean; + readonly hasRedundantAlias?: boolean; +} + +export interface ModuleSymbolTable { + readonly uri: Uri; + getSymbols(): Generator; +} + +export type ModuleSymbolMap = Map; + +export interface AutoImportResult { + readonly name: string; + readonly declUri: Uri; + readonly originalName: string; + readonly originalDeclUri: Uri; + readonly insertionText: string; + readonly symbol?: Symbol; + readonly source?: string; + readonly edits?: TextEditAction[]; + readonly alias?: string; + readonly kind?: CompletionItemKind; +} + +export interface AutoImportOptions { + readonly patternMatcher?: (pattern: string, name: string) => boolean; + readonly lazyEdit?: boolean; +} + +export interface ImportParts { + // The name of the module or symbol including alias from the `import` or `from ... import` statement + readonly importName: string; + + // The actual name of the symbol (not alias) + readonly symbolName?: string; + + // The name of the module from `from ... import` statement + readonly importFrom?: string; + + // Uri of the module + readonly fileUri: Uri; + + // The number of dots in the module name, indicating its depth in the module hierarchy + readonly dotCount: number; + + // `ModuleNameAndType` of the module. + readonly moduleNameAndType: ModuleNameAndType; +} + +export interface ImportAliasData { + readonly importParts: ImportParts; + readonly importGroup: ImportGroup; + readonly symbol?: Symbol; + readonly kind?: SymbolKind; + readonly itemKind?: CompletionItemKind; + readonly inDunderAll?: boolean; + readonly hasRedundantAlias?: boolean; + + // Uri pointing to the original module that contains the actual symbol that the alias resolves to. + readonly fileUri: Uri; +} + +export type AutoImportResultMap = Map; + +// Build a map of all modules within this program and the module- +// level scope that contains the symbol table for the module. +export function buildModuleSymbolsMap(program: ProgramView, files: readonly SourceFileInfo[]): ModuleSymbolMap { + const moduleSymbolMap = new Map(); + + files.forEach((file) => { + if (file.shadows.length > 0) { + // There is corresponding stub file. Don't add + // duplicated files in the map. + return; + } + + const uri = file.uri; + const symbolTable = program.getModuleSymbolTable(uri); + if (!symbolTable) { + return; + } + + const fileName = stripFileExtension(uri.fileName); + + // Don't offer imports from files that are named with private + // naming semantics like "_ast.py" unless they're in the current userfile list. + if (SymbolNameUtils.isPrivateOrProtectedName(fileName) && !isUserCode(file)) { + return; + } + + moduleSymbolMap.set(uri.key, { + uri, + *getSymbols() { + for (const [name, symbol] of symbolTable) { + if (!isVisibleExternally(symbol)) { + continue; + } + + const declarations = symbol.getDeclarations(); + if (!declarations || declarations.length === 0) { + continue; + } + + const declaration = declarations[0]; + if (!declaration) { + continue; + } + + if (declaration.type === DeclarationType.Alias && isUserCode(file)) { + // We don't include import alias in auto import + // for workspace files. + continue; + } + + const variableKind = + declaration.type === DeclarationType.Variable && !declaration.isConstant && !declaration.isFinal + ? SymbolKind.Variable + : undefined; + + yield { + name, + symbol, + kind: variableKind, + library: !isUserCode(file), + inDunderAll: symbol.isInDunderAll(), + }; + } + }, + }); + return; + }); + + return moduleSymbolMap; +} + +export class AutoImporter { + private readonly _importStatements: ImportStatements; + + constructor( + protected readonly program: ProgramView, + protected readonly execEnvironment: ExecutionEnvironment, + protected readonly parseResults: ParseFileResults, + private readonly _invocationPosition: Position, + private readonly _excludes: CompletionMap, + protected readonly moduleSymbolMap: ModuleSymbolMap, + protected readonly options: AutoImportOptions + ) { + this._importStatements = getTopLevelImports( + this.parseResults.parserOutput.parseTree, + /* includeImplicitImports */ true + ); + } + + getAutoImportCandidates( + word: string, + similarityLimit: number, + abbrFromUsers: string | undefined, + token: CancellationToken + ) { + const results: AutoImportResult[] = []; + const map = this.getCandidates(word, similarityLimit, abbrFromUsers, token); + + map.forEach((v) => appendArray(results, v)); + return results; + } + + protected get importResolver(): ImportResolver { + return this.program.importResolver; + } + + protected getCompletionItemData(item: CompletionItem): CompletionItemData | undefined { + return fromLSPAny(item.data); + } + + protected getCandidates( + word: string, + similarityLimit: number, + abbrFromUsers: string | undefined, + token: CancellationToken + ) { + const resultMap = new Map(); + const importAliasMap = new Map>(); + + this.addImportsFromModuleMap(word, similarityLimit, abbrFromUsers, importAliasMap, resultMap, token); + this.addImportsFromImportAliasMap(importAliasMap, abbrFromUsers, resultMap, token); + + return resultMap; + } + + protected addImportsFromModuleMap( + word: string, + similarityLimit: number, + abbrFromUsers: string | undefined, + aliasMap: Map>, + results: AutoImportResultMap, + token: CancellationToken + ) { + this.moduleSymbolMap.forEach((topLevelSymbols, key) => { + // See if this file should be offered as an implicit import. + const uriProperties = this.getUriProperties(this.moduleSymbolMap!, topLevelSymbols.uri); + this.processModuleSymbolTable( + topLevelSymbols, + topLevelSymbols.uri, + word, + similarityLimit, + uriProperties, + abbrFromUsers, + aliasMap, + results, + token + ); + }); + } + + protected addImportsFromImportAliasMap( + importAliasMap: Map>, + abbrFromUsers: string | undefined, + results: AutoImportResultMap, + token: CancellationToken + ) { + throwIfCancellationRequested(token); + + importAliasMap.forEach((mapPerSymbolName) => { + mapPerSymbolName.forEach((importAliasData, originalName) => { + if (abbrFromUsers) { + // When alias name is used, our regular exclude mechanism would not work. we need to check + // whether import, the alias is referring to, already exists. + // ex) import numpy + // np| <= auto-import here. + // or + // from scipy import io as spio + // io| <= auto-import here + + // If import statement for the module already exist, then bail out. + // ex) import module[.submodule] or from module[.submodule] import symbol + if (this._importStatements.mapByFilePath.has(importAliasData.importParts.fileUri.key)) { + return; + } + + // If it is the module itself that got imported, make sure we don't import it again. + // ex) from module import submodule as ss + // submodule <= auto-import here + if (importAliasData.importParts.importFrom) { + const imported = this._importStatements.orderedImports.find( + (i) => i.moduleName === importAliasData.importParts.importFrom + ); + if ( + imported && + imported.node.nodeType === ParseNodeType.ImportFrom && + imported.node.d.imports.some( + (i) => i.d.name.d.value === importAliasData.importParts.symbolName + ) + ) { + return; + } + } + } + + const alreadyIncluded = this._containsName( + importAliasData.importParts.importName, + importAliasData.importParts.importFrom, + results + ); + if (alreadyIncluded) { + return; + } + + const autoImportTextEdits = this._getTextEditsForAutoImportByFilePath( + { name: importAliasData.importParts.symbolName, alias: abbrFromUsers }, + { + name: importAliasData.importParts.importFrom ?? importAliasData.importParts.importName, + }, + importAliasData.importParts.importName, + importAliasData.importGroup, + importAliasData.importParts.fileUri + ); + + this._addResult(results, { + name: importAliasData.importParts.importName, + alias: abbrFromUsers, + symbol: importAliasData.symbol, + kind: importAliasData.itemKind ?? convertSymbolKindToCompletionItemKind(importAliasData.kind), + source: importAliasData.importParts.importFrom, + insertionText: autoImportTextEdits.insertionText, + edits: autoImportTextEdits.edits, + declUri: importAliasData.importParts.fileUri, + originalName, + originalDeclUri: importAliasData.fileUri, + }); + }); + }); + } + + protected processModuleSymbolTable( + topLevelSymbols: ModuleSymbolTable, + moduleUri: Uri, + word: string, + similarityLimit: number, + fileProperties: { isStub: boolean; hasInit: boolean; isUserCode: boolean }, + abbrFromUsers: string | undefined, + importAliasMap: Map>, + results: AutoImportResultMap, + token: CancellationToken + ) { + throwIfCancellationRequested(token); + + const [importSource, importGroup, moduleNameAndType] = this._getImportPartsForSymbols(moduleUri); + if (!importSource) { + return; + } + + const dotCount = StringUtils.getCharacterCount(importSource, '.'); + for (const autoSymbol of topLevelSymbols.getSymbols()) { + if (!this.shouldIncludeVariable(autoSymbol, fileProperties.isStub)) { + continue; + } + + // For very short matching strings, we will require an exact match. Otherwise + // we will tend to return a list that's too long. Once we get beyond two + // characters, we can do a fuzzy match. + const name = autoSymbol.name; + const isSimilar = this._isSimilar(word, name, similarityLimit); + if (!isSimilar) { + continue; + } + + const alreadyIncluded = this._containsName(name, importSource, results); + if (alreadyIncluded) { + continue; + } + + // We will collect all aliases and then process it later + if (autoSymbol.importAlias) { + this._addToImportAliasMap( + autoSymbol.importAlias, + { + importParts: { + symbolName: name, + importName: name, + importFrom: importSource, + fileUri: moduleUri, + dotCount, + moduleNameAndType, + }, + importGroup, + symbol: autoSymbol.symbol, + kind: autoSymbol.importAlias.kind, + itemKind: autoSymbol.importAlias.itemKind, + inDunderAll: autoSymbol.inDunderAll, + hasRedundantAlias: autoSymbol.hasRedundantAlias, + fileUri: autoSymbol.importAlias.moduleUri, + }, + importAliasMap + ); + continue; + } + + const nameForImportFrom = this.getNameForImportFrom(/* library */ !fileProperties.isUserCode, moduleUri); + const autoImportTextEdits = this._getTextEditsForAutoImportByFilePath( + { name, alias: abbrFromUsers }, + { name: importSource, nameForImportFrom }, + name, + importGroup, + moduleUri + ); + + this._addResult(results, { + name, + alias: abbrFromUsers, + symbol: autoSymbol.symbol, + source: importSource, + kind: autoSymbol.itemKind ?? convertSymbolKindToCompletionItemKind(autoSymbol.kind), + insertionText: autoImportTextEdits.insertionText, + edits: autoImportTextEdits.edits, + declUri: moduleUri, + originalName: name, + originalDeclUri: moduleUri, + }); + } + + // If the current file is in a directory that also contains an "__init__.py[i]" + // file, we can use that directory name as an implicit import target. + // Or if the file is a stub file, we can use it as import target. + // Skip this check for user code. + if (!fileProperties.isStub && !fileProperties.hasInit && !fileProperties.isUserCode) { + return; + } + + const importParts = this._getImportParts(moduleUri); + if (!importParts) { + return; + } + + const isSimilar = this._isSimilar(word, importParts.importName, similarityLimit); + if (!isSimilar) { + return; + } + + const alreadyIncluded = this._containsName(importParts.importName, importParts.importFrom, results); + if (alreadyIncluded) { + return; + } + + this._addToImportAliasMap( + { + moduleUri, + originalName: importParts.importName, + kind: SymbolKind.Module, + itemKind: CompletionItemKind.Module, + }, + { + importParts, + importGroup, + kind: SymbolKind.Module, + itemKind: CompletionItemKind.Module, + fileUri: moduleUri, + }, + importAliasMap + ); + } + + protected getNameForImportFrom(library: boolean, moduleUri: Uri): string | undefined { + return undefined; + } + + protected getUriProperties(map: Map, uri: Uri) { + const fileDir = uri.getDirectory(); + const initPathPy = fileDir.initPyUri; + const initPathPyi = fileDir.initPyiUri; + const isStub = uri.hasExtension('.pyi'); + const hasInit = map.has(initPathPy.key) || map.has(initPathPyi.key); + const sourceFileInfo = this.program.getSourceFileInfo(uri); + return { isStub, hasInit, isUserCode: isUserCode(sourceFileInfo) }; + } + + protected compareImportAliasData(left: ImportAliasData, right: ImportAliasData) { + // Choose a better alias for the same declaration based on where the alias is defined. + // For example, we would prefer alias defined in builtin over defined in user files. + const groupComparison = left.importGroup - right.importGroup; + if (groupComparison !== 0) { + return groupComparison; + } + + const dotComparison = left.importParts.dotCount - right.importParts.dotCount; + if (dotComparison !== 0) { + return dotComparison; + } + + if (left.symbol && !right.symbol) { + return -1; + } + + if (!left.symbol && right.symbol) { + return 1; + } + + return StringUtils.getStringComparer()(left.importParts.importName, right.importParts.importName); + } + + protected shouldIncludeVariable(autoSymbol: AutoImportSymbol, isStub: boolean) { + // If it is not a stub file and symbol is Variable, we only include it if + // name is public constant or type alias + if (isStub || autoSymbol.kind !== SymbolKind.Variable) { + return true; + } + + return SymbolNameUtils.isPublicConstantOrTypeAlias(autoSymbol.name); + } + + private _addToImportAliasMap( + alias: IndexAliasData, + data: ImportAliasData, + importAliasMap: Map> + ) { + // Since we don't resolve alias declaration using type evaluator, there is still a chance + // where we show multiple aliases for same symbols. but this should still reduce number of + // such cases. + if (!importAliasMap.has(alias.moduleUri.key)) { + const map = new Map(); + map.set(alias.originalName, data); + importAliasMap.set(alias.moduleUri.key, map); + return; + } + + const map = importAliasMap.get(alias.moduleUri.key)!; + if (!map.has(alias.originalName)) { + map.set(alias.originalName, data); + return; + } + + const existingData = map.get(alias.originalName)!; + const comparison = this.compareImportAliasData(existingData, data); + if (comparison <= 0) { + // Existing data is better than new one. + return; + } + + // Keep the new data. + map.set(alias.originalName, data); + } + + private _getImportPartsForSymbols(uri: Uri): [string | undefined, ImportGroup, ModuleNameAndType] { + const localImport = this._importStatements.mapByFilePath.get(uri.key); + if (localImport) { + return [ + localImport.moduleName, + getImportGroup(localImport), + { + importType: ImportType.Local, + isLocalTypingsFile: false, + moduleName: localImport.moduleName, + }, + ]; + } else { + const moduleNameAndType = this._getModuleNameAndTypeFromFilePath(uri); + return [ + moduleNameAndType.moduleName, + getImportGroupFromModuleNameAndType(moduleNameAndType), + moduleNameAndType, + ]; + } + } + + private _getImportParts(uri: Uri) { + const name = stripFileExtension(uri.fileName); + + // See if we can import module as "import xxx" + if (name === '__init__') { + return createImportParts(this._getModuleNameAndTypeFromFilePath(uri.getDirectory())); + } + + return createImportParts(this._getModuleNameAndTypeFromFilePath(uri)); + + function createImportParts(module: ModuleNameAndType): ImportParts | undefined { + const moduleName = module.moduleName; + if (!moduleName) { + return undefined; + } + + const index = moduleName.lastIndexOf('.'); + const importNamePart = index > 0 ? moduleName.substring(index + 1) : undefined; + const importFrom = index > 0 ? moduleName.substring(0, index) : undefined; + return { + symbolName: importNamePart, + importName: importNamePart ?? moduleName, + importFrom, + fileUri: uri, + dotCount: StringUtils.getCharacterCount(moduleName, '.'), + moduleNameAndType: module, + }; + } + } + + private _isSimilar(word: string, name: string, similarityLimit: number) { + if (similarityLimit === 1) { + return word === name; + } + + if (word.length <= 0 || name.length <= 0) { + return false; + } + + if (!this.options.patternMatcher) { + const index = word[0] !== '_' && name[0] === '_' && name.length > 1 ? 1 : 0; + if (word[0].toLocaleLowerCase() !== name[index].toLocaleLowerCase()) { + return false; + } + + return StringUtils.isPatternInSymbol(word, name); + } + + return this.options.patternMatcher(word, name); + } + + private _shouldExclude(name: string) { + return this._excludes.has(name, (i) => + CompletionMap.labelOnlyIgnoringAutoImports(i, this.getCompletionItemData.bind(this)) + ); + } + + private _containsName(name: string, source: string | undefined, results: AutoImportResultMap) { + if (this._shouldExclude(name)) { + return true; + } + + const match = results.get(name); + if (match?.some((r) => r.source === source)) { + return true; + } + + return false; + } + + // Given the file path of a module that we want to import, + // convert to a module name that can be used in an + // 'import from' statement. + private _getModuleNameAndTypeFromFilePath(uri: Uri): ModuleNameAndType { + return this.importResolver.getModuleNameForImport(uri, this.execEnvironment); + } + + private _getTextEditsForAutoImportByFilePath( + importNameInfo: ImportNameInfo, + moduleNameInfo: ModuleNameInfo, + insertionText: string, + importGroup: ImportGroup, + fileUri: Uri + ): { insertionText: string; edits?: TextEditAction[] | undefined } { + // If there is no symbolName, there can't be existing import statement. + const importStatement = this._importStatements.mapByFilePath.get(fileUri.key); + if (importStatement) { + // Found import for given module. See whether we can use the module as it is. + if (importStatement.node.nodeType === ParseNodeType.Import) { + // For now, we don't check whether alias or moduleName got overwritten at + // given position + const importAlias = importStatement.subnode?.d.alias?.d.value; + if (importNameInfo.name) { + // ex) import module + // method | <= auto-import + return { + insertionText: `${importAlias ?? importStatement.moduleName}.${importNameInfo.name}`, + edits: [], + }; + } else if (importAlias) { + // ex) import module as m + // m | <= auto-import + return { + insertionText: `${importAlias}`, + edits: [], + }; + } + } + + // Does an 'import from' statement already exist? + if ( + importNameInfo.name && + importStatement.node.nodeType === ParseNodeType.ImportFrom && + !importStatement.node.d.isWildcardImport + ) { + // If so, see whether what we want already exist. + const importNode = importStatement.node.d.imports.find((i) => i.d.name.d.value === importNameInfo.name); + if (importNode) { + // For now, we don't check whether alias or moduleName got overwritten at + // given position + const importAlias = importNode.d.alias?.d.value; + return { + insertionText: `${importAlias ?? importNameInfo.name}`, + edits: [], + }; + } + + // If not, add what we want at the existing 'import from' statement as long as + // what is imported is not module itself. + // ex) don't add "path" to existing "from os.path import dirname" statement. + if (moduleNameInfo.name === importStatement.moduleName) { + return { + insertionText: importNameInfo.alias ?? insertionText, + edits: this.options.lazyEdit + ? undefined + : getTextEditsForAutoImportSymbolAddition( + importNameInfo, + importStatement, + this.parseResults + ), + }; + } + } + } else if (importNameInfo.name) { + // If it is the module itself that got imported, make sure we don't import it again. + // ex) from module import submodule + const imported = this._importStatements.orderedImports.find((i) => i.moduleName === moduleNameInfo.name); + if (imported && imported.node.nodeType === ParseNodeType.ImportFrom && !imported.node.d.isWildcardImport) { + const importFrom = imported.node.d.imports.find((i) => i.d.name.d.value === importNameInfo.name); + if (importFrom) { + // For now, we don't check whether alias or moduleName got overwritten at + // given position. only move to alias, but not the other way around + const importAlias = importFrom.d.alias?.d.value; + if (importAlias) { + return { + insertionText: `${importAlias}`, + edits: [], + }; + } + } else { + // If not, add what we want at the existing import from statement. + return { + insertionText: importNameInfo.alias ?? insertionText, + edits: this.options.lazyEdit + ? undefined + : getTextEditsForAutoImportSymbolAddition(importNameInfo, imported, this.parseResults), + }; + } + } + + // Check whether it is one of implicit imports + const importFrom = this._importStatements.implicitImports?.get(fileUri.key); + if (importFrom) { + // For now, we don't check whether alias or moduleName got overwritten at + // given position + const importAlias = importFrom.d.alias?.d.value; + return { + insertionText: `${importAlias ?? importFrom.d.name.d.value}.${importNameInfo.name}`, + edits: [], + }; + } + } + + return { + insertionText: importNameInfo.alias ?? insertionText, + edits: this.options.lazyEdit + ? undefined + : getTextEditsForAutoImportInsertion( + importNameInfo, + moduleNameInfo, + this._importStatements, + importGroup, + this.parseResults, + this._invocationPosition + ), + }; + } + + private _addResult(results: AutoImportResultMap, result: AutoImportResult) { + let entries = results.get(result.name); + if (!entries) { + entries = []; + results.set(result.name, entries); + } + + entries.push(result); + } +} + +export function convertSymbolKindToCompletionItemKind(kind: SymbolKind | undefined) { + switch (kind) { + case SymbolKind.File: + return CompletionItemKind.File; + + case SymbolKind.Module: + case SymbolKind.Namespace: + return CompletionItemKind.Module; + + case SymbolKind.Package: + return CompletionItemKind.Folder; + + case SymbolKind.Class: + return CompletionItemKind.Class; + + case SymbolKind.Method: + return CompletionItemKind.Method; + + case SymbolKind.Property: + return CompletionItemKind.Property; + + case SymbolKind.Field: + return CompletionItemKind.Field; + + case SymbolKind.Constructor: + return CompletionItemKind.Constructor; + + case SymbolKind.Enum: + return CompletionItemKind.Enum; + + case SymbolKind.Interface: + return CompletionItemKind.Interface; + + case SymbolKind.Function: + return CompletionItemKind.Function; + + case SymbolKind.Variable: + case SymbolKind.Array: + return CompletionItemKind.Variable; + + case SymbolKind.String: + return CompletionItemKind.Constant; + + case SymbolKind.Number: + case SymbolKind.Boolean: + return CompletionItemKind.Value; + + case SymbolKind.Constant: + case SymbolKind.Null: + return CompletionItemKind.Constant; + + case SymbolKind.Object: + case SymbolKind.Key: + return CompletionItemKind.Value; + + case SymbolKind.EnumMember: + return CompletionItemKind.EnumMember; + + case SymbolKind.Struct: + return CompletionItemKind.Struct; + + case SymbolKind.Event: + return CompletionItemKind.Event; + + case SymbolKind.Operator: + return CompletionItemKind.Operator; + + case SymbolKind.TypeParameter: + return CompletionItemKind.TypeParameter; + + default: + return undefined; + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/callHierarchyProvider.ts b/python-parser/packages/pyright-internal/src/languageService/callHierarchyProvider.ts new file mode 100644 index 00000000..75ec9aeb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/callHierarchyProvider.ts @@ -0,0 +1,627 @@ +/* + * callHierarchyProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that provides a list of callers or callees associated with + * a position. + */ + +import { CancellationToken, SymbolKind } from 'vscode-languageserver'; +import { + CallHierarchyIncomingCall, + CallHierarchyItem, + CallHierarchyOutgoingCall, + Range, +} from 'vscode-languageserver-types'; + +import { Declaration, DeclarationType } from '../analyzer/declaration'; +import * as DeclarationUtils from '../analyzer/declarationUtils'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import { isUserCode } from '../analyzer/sourceFileInfoUtils'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { MemberAccessFlags, doForEachSubtype, lookUpClassMember, lookUpObjectMember } from '../analyzer/typeUtils'; +import { ClassType, isClassInstance, isFunction, isInstantiableClass } from '../analyzer/types'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { isDefined } from '../common/core'; +import { ProgramView, ReferenceUseCase, SymbolUsageProvider } from '../common/extensibility'; +import { ReadOnlyFileSystem } from '../common/fileSystem'; +import { getSymbolKind } from '../common/lspUtils'; +import { convertOffsetsToRange } from '../common/positionUtils'; +import { ServiceKeys } from '../common/serviceKeys'; +import { Position, rangesAreEqual } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { convertUriToLspUriString } from '../common/uri/uriUtils'; +import { ReferencesProvider, ReferencesResult } from '../languageService/referencesProvider'; +import { CallNode, MemberAccessNode, NameNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { DocumentSymbolCollector } from './documentSymbolCollector'; +import { canNavigateToFile } from './navigationUtils'; + +export class CallHierarchyProvider { + private readonly _parseResults: ParseFileResults | undefined; + + constructor( + private _program: ProgramView, + private _fileUri: Uri, + private _position: Position, + private _token: CancellationToken + ) { + this._parseResults = this._program.getParseResults(this._fileUri); + } + + onPrepare(): CallHierarchyItem[] | null { + throwIfCancellationRequested(this._token); + if (!this._parseResults) { + return null; + } + + const referencesResult = this._getDeclaration(); + if (!referencesResult || referencesResult.declarations.length === 0) { + return null; + } + + const { targetDecl, callItemUri, symbolName } = this._getTargetDeclaration(referencesResult); + if ( + targetDecl.type !== DeclarationType.Function && + targetDecl.type !== DeclarationType.Class && + targetDecl.type !== DeclarationType.Alias + ) { + return null; + } + + // make sure the alias is resolved to class or function + if (targetDecl.type === DeclarationType.Alias) { + const resolvedDecl = this._evaluator.resolveAliasDeclaration(targetDecl, true); + if (!resolvedDecl) { + return null; + } + + if (resolvedDecl.type !== DeclarationType.Function && resolvedDecl.type !== DeclarationType.Class) { + return null; + } + } + + const callItem: CallHierarchyItem = { + name: symbolName, + kind: getSymbolKind(targetDecl, this._evaluator, symbolName) ?? SymbolKind.Module, + uri: convertUriToLspUriString(this._program.fileSystem, callItemUri), + range: targetDecl.range, + selectionRange: targetDecl.range, + }; + + if (!canNavigateToFile(this._program.fileSystem, Uri.parse(callItem.uri, this._program.serviceProvider))) { + return null; + } + + return [callItem]; + } + + getIncomingCalls(): CallHierarchyIncomingCall[] | null { + throwIfCancellationRequested(this._token); + if (!this._parseResults) { + return null; + } + + const referencesResult = this._getDeclaration(); + if (!referencesResult || referencesResult.declarations.length === 0) { + return null; + } + + const { targetDecl, symbolName } = this._getTargetDeclaration(referencesResult); + + const items: CallHierarchyIncomingCall[] = []; + const sourceFiles = + targetDecl.type === DeclarationType.Alias + ? [this._program.getSourceFileInfo(this._fileUri)!] + : this._program.getSourceFileInfoList(); + for (const curSourceFileInfo of sourceFiles) { + if (isUserCode(curSourceFileInfo) || curSourceFileInfo.isOpenByClient) { + const filePath = curSourceFileInfo.uri; + const itemsToAdd = this._getIncomingCallsForDeclaration(filePath, symbolName, targetDecl); + + if (itemsToAdd) { + appendArray(items, itemsToAdd); + } + + // This operation can consume significant memory, so check + // for situations where we need to discard the type cache. + this._program.handleMemoryHighUsage(); + } + } + + if (items.length === 0) { + return null; + } + + return items.filter((item) => + canNavigateToFile(this._program.fileSystem, Uri.parse(item.from.uri, this._program.serviceProvider)) + ); + } + + getOutgoingCalls(): CallHierarchyOutgoingCall[] | null { + throwIfCancellationRequested(this._token); + if (!this._parseResults) { + return null; + } + + const referencesResult = this._getDeclaration(); + if (!referencesResult || referencesResult.declarations.length === 0) { + return null; + } + + const { targetDecl } = this._getTargetDeclaration(referencesResult); + + // Find the parse node root corresponding to the function or class. + let parseRoot: ParseNode | undefined; + const resolvedDecl = this._evaluator.resolveAliasDeclaration(targetDecl, /* resolveLocalNames */ true); + if (!resolvedDecl) { + return null; + } + + if (resolvedDecl.type === DeclarationType.Function) { + parseRoot = resolvedDecl.node; + } else if (resolvedDecl.type === DeclarationType.Class) { + // Look up the __init__ method for this class. + const classType = this._evaluator.getTypeForDeclaration(resolvedDecl)?.type; + if (classType && isInstantiableClass(classType)) { + // Don't perform a recursive search of parent classes in this + // case because we don't want to find an inherited __init__ + // method defined in a different module. + const initMethodMember = lookUpClassMember( + classType, + '__init__', + MemberAccessFlags.SkipInstanceMembers | + MemberAccessFlags.SkipObjectBaseClass | + MemberAccessFlags.SkipBaseClasses + ); + if (initMethodMember) { + const initMethodType = this._evaluator.getTypeOfMember(initMethodMember); + if (initMethodType && isFunction(initMethodType)) { + const initDecls = initMethodMember.symbol.getDeclarations(); + if (initDecls && initDecls.length > 0) { + const primaryInitDecl = initDecls[0]; + if (primaryInitDecl.type === DeclarationType.Function) { + parseRoot = primaryInitDecl.node; + } + } + } + } + } + } + + if (!parseRoot) { + return null; + } + + const callFinder = new FindOutgoingCallTreeWalker( + this._program.fileSystem, + parseRoot, + this._parseResults, + this._evaluator, + this._token + ); + const outgoingCalls = callFinder.findCalls(); + if (outgoingCalls.length === 0) { + return null; + } + + return outgoingCalls.filter((item) => + canNavigateToFile(this._program.fileSystem, Uri.parse(item.to.uri, this._program.serviceProvider)) + ); + } + + private get _evaluator(): TypeEvaluator { + return this._program.evaluator!; + } + + private _getTargetDeclaration(referencesResult: ReferencesResult): { + targetDecl: Declaration; + callItemUri: Uri; + symbolName: string; + } { + // If there's more than one declaration, pick the target one. + // We'll always prefer one with a declared type, and we'll always + // prefer later declarations. + const declarations = referencesResult.declarations; + const node = referencesResult.nodeAtOffset; + let targetDecl = declarations[0]; + for (const decl of declarations) { + if (DeclarationUtils.hasTypeForDeclaration(decl) || !DeclarationUtils.hasTypeForDeclaration(targetDecl)) { + if (decl.type === DeclarationType.Function || decl.type === DeclarationType.Class) { + targetDecl = decl; + + // If the specified node is an exact match, use this declaration + // as the primary even if it's not the last. + if (decl.node === node) { + break; + } + } + } + } + + let symbolName; + + // Although the LSP specification requires a URI, we are using a file path + // here because it is converted to the proper URI by the caller. + // This simplifies our code and ensures compatibility with the LSP specification. + let callItemUri: Uri; + if (targetDecl.type === DeclarationType.Alias) { + symbolName = (referencesResult.nodeAtOffset as NameNode).d.value; + callItemUri = this._fileUri; + } else { + symbolName = DeclarationUtils.getNameFromDeclaration(targetDecl) || referencesResult.symbolNames[0]; + callItemUri = targetDecl.uri; + } + + return { targetDecl, callItemUri, symbolName }; + } + + private _getIncomingCallsForDeclaration( + fileUri: Uri, + symbolName: string, + declaration: Declaration + ): CallHierarchyIncomingCall[] | undefined { + throwIfCancellationRequested(this._token); + + const callFinder = new FindIncomingCallTreeWalker(this._program, fileUri, symbolName, declaration, this._token); + + const incomingCalls = callFinder.findCalls(); + return incomingCalls.length > 0 ? incomingCalls : undefined; + } + + private _getDeclaration(): ReferencesResult | undefined { + return ReferencesProvider.getDeclarationForPosition( + this._program, + this._fileUri, + this._position, + /* reporter */ undefined, + ReferenceUseCase.References, + this._token + ); + } +} + +class FindOutgoingCallTreeWalker extends ParseTreeWalker { + private _outgoingCalls: CallHierarchyOutgoingCall[] = []; + + constructor( + private _fs: ReadOnlyFileSystem, + private _parseRoot: ParseNode, + private _parseResults: ParseFileResults, + private _evaluator: TypeEvaluator, + private _cancellationToken: CancellationToken + ) { + super(); + } + + findCalls(): CallHierarchyOutgoingCall[] { + this.walk(this._parseRoot); + return this._outgoingCalls; + } + + override visitCall(node: CallNode): boolean { + throwIfCancellationRequested(this._cancellationToken); + + let nameNode: NameNode | undefined; + + if (node.d.leftExpr.nodeType === ParseNodeType.Name) { + nameNode = node.d.leftExpr; + } else if (node.d.leftExpr.nodeType === ParseNodeType.MemberAccess) { + nameNode = node.d.leftExpr.d.member; + } + + if (nameNode) { + const declarations = this._evaluator.getDeclInfoForNameNode(nameNode)?.decls; + + if (declarations) { + // TODO - it would be better if we could match the call to the + // specific declaration (e.g. a specific overload of a property + // setter vs getter). For now, add callees for all declarations. + declarations.forEach((decl) => { + this._addOutgoingCallForDeclaration(nameNode!, decl); + }); + } + } + + return true; + } + + override visitMemberAccess(node: MemberAccessNode): boolean { + throwIfCancellationRequested(this._cancellationToken); + + // Determine whether the member corresponds to a property. + // If so, we'll treat it as a function call for purposes of + // finding outgoing calls. + const leftHandType = this._evaluator.getType(node.d.leftExpr); + if (leftHandType) { + doForEachSubtype(leftHandType, (subtype) => { + let baseType = subtype; + + // This could be a bound TypeVar (e.g. used for "self" and "cls"). + baseType = this._evaluator.makeTopLevelTypeVarsConcrete(baseType); + + if (!isClassInstance(baseType)) { + return; + } + + const memberInfo = lookUpObjectMember(baseType, node.d.member.d.value); + if (!memberInfo) { + return; + } + + const memberType = this._evaluator.getTypeOfMember(memberInfo); + const propertyDecls = memberInfo.symbol.getDeclarations(); + + if (!memberType) { + return; + } + + if (isClassInstance(memberType) && ClassType.isPropertyClass(memberType)) { + propertyDecls.forEach((decl) => { + this._addOutgoingCallForDeclaration(node.d.member, decl); + }); + } + }); + } + + return true; + } + + private _addOutgoingCallForDeclaration(nameNode: NameNode, declaration: Declaration) { + const resolvedDecl = this._evaluator.resolveAliasDeclaration(declaration, /* resolveLocalNames */ true); + if (!resolvedDecl) { + return; + } + + if (resolvedDecl.type !== DeclarationType.Function && resolvedDecl.type !== DeclarationType.Class) { + return; + } + + const callDest: CallHierarchyItem = { + name: nameNode.d.value, + kind: getSymbolKind(resolvedDecl, this._evaluator, nameNode.d.value) ?? SymbolKind.Module, + uri: convertUriToLspUriString(this._fs, resolvedDecl.uri), + range: resolvedDecl.range, + selectionRange: resolvedDecl.range, + }; + + // Is there already a call recorded for this destination? If so, + // we'll simply add a new range. Otherwise, we'll create a new entry. + let outgoingCall: CallHierarchyOutgoingCall | undefined = this._outgoingCalls.find( + (outgoing) => outgoing.to.uri === callDest.uri && rangesAreEqual(outgoing.to.range, callDest.range) + ); + + if (!outgoingCall) { + outgoingCall = { + to: callDest, + fromRanges: [], + }; + this._outgoingCalls.push(outgoingCall); + } + + if (outgoingCall && outgoingCall.to.name !== nameNode.d.value) { + // If both the function and its alias are called in the same function, + // the name of the call item will be the resolved declaration name, not the alias. + outgoingCall.to.name = DeclarationUtils.getNameFromDeclaration(resolvedDecl) ?? nameNode.d.value; + } + + const fromRange: Range = convertOffsetsToRange( + nameNode.start, + nameNode.start + nameNode.length, + this._parseResults.tokenizerOutput.lines + ); + outgoingCall.fromRanges.push(fromRange); + } +} + +class FindIncomingCallTreeWalker extends ParseTreeWalker { + private readonly _incomingCalls: CallHierarchyIncomingCall[] = []; + private readonly _declarations: Declaration[] = []; + + private readonly _usageProviders: SymbolUsageProvider[]; + private readonly _parseResults: ParseFileResults; + + constructor( + private readonly _program: ProgramView, + private readonly _fileUri: Uri, + private readonly _symbolName: string, + private readonly _targetDeclaration: Declaration, + private readonly _cancellationToken: CancellationToken + ) { + super(); + + this._parseResults = this._program.getParseResults(this._fileUri)!; + this._usageProviders = (this._program.serviceProvider.tryGet(ServiceKeys.symbolUsageProviderFactory) ?? []) + .map((f) => + f.tryCreateProvider(ReferenceUseCase.References, [this._targetDeclaration], this._cancellationToken) + ) + .filter(isDefined); + + this._declarations.push(this._targetDeclaration); + this._usageProviders.forEach((p) => p.appendDeclarationsTo(this._declarations)); + } + + findCalls(): CallHierarchyIncomingCall[] { + this.walk(this._parseResults.parserOutput.parseTree); + return this._incomingCalls; + } + + override visitCall(node: CallNode): boolean { + throwIfCancellationRequested(this._cancellationToken); + + let nameNode: NameNode | undefined; + if (node.d.leftExpr.nodeType === ParseNodeType.Name) { + nameNode = node.d.leftExpr; + } else if (node.d.leftExpr.nodeType === ParseNodeType.MemberAccess) { + nameNode = node.d.leftExpr.d.member; + } + + // Don't bother doing any more work if the name doesn't match. + if (nameNode && nameNode.d.value === this._symbolName) { + const declarations = this._getDeclarations(nameNode); + if (declarations) { + if (this._targetDeclaration.type === DeclarationType.Alias) { + const resolvedCurDecls = this._evaluator.resolveAliasDeclaration( + this._targetDeclaration, + /* resolveLocalNames */ true + ); + if ( + resolvedCurDecls && + declarations.some((decl) => DeclarationUtils.areDeclarationsSame(decl!, resolvedCurDecls)) + ) { + this._addIncomingCallForDeclaration(nameNode!); + } + } else if ( + declarations.some((decl) => + this._declarations.some((t) => DeclarationUtils.areDeclarationsSame(decl, t)) + ) + ) { + this._addIncomingCallForDeclaration(nameNode!); + } + } + } + + return true; + } + + override visitMemberAccess(node: MemberAccessNode): boolean { + throwIfCancellationRequested(this._cancellationToken); + + if (node.d.member.d.value === this._symbolName) { + // Determine whether the member corresponds to a property. + // If so, we'll treat it as a function call for purposes of + // finding outgoing calls. + const leftHandType = this._evaluator.getType(node.d.leftExpr); + if (leftHandType) { + doForEachSubtype(leftHandType, (subtype) => { + let baseType = subtype; + + // This could be a bound TypeVar (e.g. used for "self" and "cls"). + baseType = this._evaluator.makeTopLevelTypeVarsConcrete(baseType); + + if (!isClassInstance(baseType)) { + return; + } + + const memberInfo = lookUpObjectMember(baseType, node.d.member.d.value); + if (!memberInfo) { + return; + } + + const memberType = this._evaluator.getTypeOfMember(memberInfo); + const propertyDecls = memberInfo.symbol.getDeclarations(); + + if (!memberType) { + return; + } + + if ( + propertyDecls.some((decl) => + DeclarationUtils.areDeclarationsSame(decl!, this._targetDeclaration) + ) + ) { + this._addIncomingCallForDeclaration(node.d.member); + } + }); + } + } + + return true; + } + + private get _evaluator(): TypeEvaluator { + return this._program.evaluator!; + } + + private _getDeclarations(node: NameNode) { + const declarations = DocumentSymbolCollector.getDeclarationsForNode( + this._program, + node, + this._cancellationToken, + { resolveLocalNames: true } + ); + + const results = [...declarations]; + this._usageProviders.forEach((p) => p.appendDeclarationsAt(node, declarations, results)); + + return results; + } + + private _addIncomingCallForDeclaration(nameNode: NameNode) { + let executionNode = ParseTreeUtils.getExecutionScopeNode(nameNode); + while (executionNode && executionNode.nodeType === ParseNodeType.TypeParameterList) { + executionNode = ParseTreeUtils.getExecutionScopeNode(executionNode); + } + + if (!executionNode) { + return; + } + + let callSource: CallHierarchyItem; + if (executionNode.nodeType === ParseNodeType.Module) { + const moduleRange = convertOffsetsToRange(0, 0, this._parseResults.tokenizerOutput.lines); + const fileName = this._program.fileSystem.getOriginalUri(this._fileUri).fileName; + + callSource = { + name: `(module) ${fileName}`, + kind: SymbolKind.Module, + uri: convertUriToLspUriString(this._program.fileSystem, this._fileUri), + range: moduleRange, + selectionRange: moduleRange, + }; + } else if (executionNode.nodeType === ParseNodeType.Lambda) { + const lambdaRange = convertOffsetsToRange( + executionNode.start, + executionNode.start + executionNode.length, + this._parseResults.tokenizerOutput.lines + ); + + callSource = { + name: '(lambda)', + kind: SymbolKind.Function, + uri: convertUriToLspUriString(this._program.fileSystem, this._fileUri), + range: lambdaRange, + selectionRange: lambdaRange, + }; + } else { + const functionRange = convertOffsetsToRange( + executionNode.d.name.start, + executionNode.d.name.start + executionNode.d.name.length, + this._parseResults.tokenizerOutput.lines + ); + + callSource = { + name: executionNode.d.name.d.value, + kind: SymbolKind.Function, + uri: convertUriToLspUriString(this._program.fileSystem, this._fileUri), + range: functionRange, + selectionRange: functionRange, + }; + } + + // Is there already a call recorded for this caller? If so, + // we'll simply add a new range. Otherwise, we'll create a new entry. + let incomingCall: CallHierarchyIncomingCall | undefined = this._incomingCalls.find( + (incoming) => incoming.from.uri === callSource.uri && rangesAreEqual(incoming.from.range, callSource.range) + ); + + if (!incomingCall) { + incomingCall = { + from: callSource, + fromRanges: [], + }; + this._incomingCalls.push(incomingCall); + } + + const fromRange: Range = convertOffsetsToRange( + nameNode.start, + nameNode.start + nameNode.length, + this._parseResults.tokenizerOutput.lines + ); + incomingCall.fromRanges.push(fromRange); + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/codeActionProvider.ts b/python-parser/packages/pyright-internal/src/languageService/codeActionProvider.ts new file mode 100644 index 00000000..cc71cafa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/codeActionProvider.ts @@ -0,0 +1,77 @@ +/* + * codeActionProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Handles 'code actions' requests from the client. + */ + +import { CancellationToken, CodeAction, CodeActionKind } from 'vscode-languageserver'; + +import { Commands } from '../commands/commands'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { createCommand } from '../common/commandUtils'; +import { CreateTypeStubFileAction } from '../common/diagnostic'; +import { Range } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { Localizer } from '../localization/localize'; +import { Workspace } from '../workspaceFactory'; + +export class CodeActionProvider { + static mightSupport(kinds: CodeActionKind[] | undefined): boolean { + if (!kinds || kinds.length === 0) { + return true; + } + + // Only support quick fix actions + return kinds.some((s) => s.startsWith(CodeActionKind.QuickFix)); + } + + static async getCodeActionsForPosition( + workspace: Workspace, + fileUri: Uri, + range: Range, + kinds: CodeActionKind[] | undefined, + token: CancellationToken + ) { + throwIfCancellationRequested(token); + + const codeActions: CodeAction[] = []; + if (!workspace.rootUri || workspace.disableLanguageServices) { + return codeActions; + } + + if (!this.mightSupport(kinds)) { + // Early exit if code actions are going to be filtered anyway. + return codeActions; + } + + const diags = await workspace.service.getDiagnosticsForRange(fileUri, range, token); + const typeStubDiag = diags.find((d) => { + const actions = d.getActions(); + return actions && actions.find((a) => a.action === Commands.createTypeStub); + }); + + if (typeStubDiag) { + const action = typeStubDiag + .getActions()! + .find((a) => a.action === Commands.createTypeStub) as CreateTypeStubFileAction; + if (action) { + const createTypeStubAction = CodeAction.create( + Localizer.CodeAction.createTypeStubFor().format({ moduleName: action.moduleName }), + createCommand( + Localizer.CodeAction.createTypeStub(), + Commands.createTypeStub, + workspace.rootUri.toString(), + action.moduleName, + fileUri.toString() + ), + CodeActionKind.QuickFix + ); + codeActions.push(createTypeStubAction); + } + } + + return codeActions; + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/completionProvider.ts b/python-parser/packages/pyright-internal/src/languageService/completionProvider.ts new file mode 100644 index 00000000..ac986dbf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/completionProvider.ts @@ -0,0 +1,3312 @@ +/* + * completionProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that maps a position within a Python program file into + * a list of zero or more text completions that apply in the context. + */ + +import { + CancellationToken, + CompletionItem, + CompletionItemKind, + CompletionList, + InsertTextFormat, + MarkupKind, + Range, + TextEdit, +} from 'vscode-languageserver'; + +import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; +import { + Declaration, + DeclarationType, + FunctionDeclaration, + isIntrinsicDeclaration, + isVariableDeclaration, + VariableDeclaration, +} from '../analyzer/declaration'; +import { isDefinedInFile } from '../analyzer/declarationUtils'; +import { transformTypeForEnumMember } from '../analyzer/enums'; +import { ImportedModuleDescriptor, ImportResolver } from '../analyzer/importResolver'; +import { ImportResult } from '../analyzer/importResult'; +import { getParamListDetails, ParamKind } from '../analyzer/parameterUtils'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { getCallNodeAndActiveParamIndex } from '../analyzer/parseTreeUtils'; +import { getScopeForNode } from '../analyzer/scopeUtils'; +import { isStubFile, SourceMapper } from '../analyzer/sourceMapper'; +import { Symbol, SymbolTable } from '../analyzer/symbol'; +import * as SymbolNameUtils from '../analyzer/symbolNameUtils'; +import { getLastTypedDeclarationForSymbol, isVisibleExternally } from '../analyzer/symbolUtils'; +import { getTypedDictMembersForClass } from '../analyzer/typedDicts'; +import { getModuleDocStringFromUris, isBuiltInModule } from '../analyzer/typeDocStringUtils'; +import { CallSignatureInfo, TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { printLiteralValue } from '../analyzer/typePrinter'; +import { + ClassType, + combineTypes, + EnumLiteral, + FunctionType, + isClass, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isModule, + isUnknown, + Type, + TypeBase, + TypeCategory, +} from '../analyzer/types'; +import { + containsLiteralType, + doForEachSignature, + doForEachSubtype, + getMembersForClass, + getMembersForModule, + isLiteralType, + isMaybeDescriptorInstance, + isNoneInstance, + lookUpClassMember, + MemberAccessFlags, +} from '../analyzer/typeUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { ExecutionEnvironment } from '../common/configOptions'; +import * as debug from '../common/debug'; +import { fail } from '../common/debug'; +import { ProgramView } from '../common/extensibility'; +import { fromLSPAny, toLSPAny } from '../common/lspUtils'; +import { convertOffsetToPosition, convertPositionToOffset } from '../common/positionUtils'; +import { PythonVersion, pythonVersion3_10, pythonVersion3_5 } from '../common/pythonVersion'; +import '../common/serviceProviderExtensions'; +import * as StringUtils from '../common/stringUtils'; +import { comparePositions, Position, TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { Uri } from '../common/uri/uri'; +import { convertToTextEdits } from '../common/workspaceEditUtils'; +import { Localizer } from '../localization/localize'; +import { + ArgCategory, + DecoratorNode, + DictionaryKeyEntryNode, + DictionaryNode, + ErrorExpressionCategory, + ErrorNode, + ExpressionNode, + FormatStringNode, + ImportFromNode, + IndexNode, + isExpressionNode, + ModuleNameNode, + NameNode, + ParamCategory, + ParameterNode, + ParseNode, + ParseNodeType, + SetNode, + StringNode, + TypeAnnotationNode, +} from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { Tokenizer } from '../parser/tokenizer'; +import { + FStringStartToken, + OperatorToken, + OperatorType, + StringToken, + StringTokenFlags, + Token, + TokenType, +} from '../parser/tokenizerTypes'; +import { AutoImporter, AutoImportResult, buildModuleSymbolsMap } from './autoImporter'; +import { + CompletionDetail, + getCompletionItemDocumentation, + getTypeDetail, + SymbolDetail, +} from './completionProviderUtils'; +import { DocumentSymbolCollector } from './documentSymbolCollector'; +import { getAutoImportText, getDocumentationPartsForTypeAndDecl } from './tooltipUtils'; + +namespace Keywords { + const base: string[] = [ + // Expression keywords + 'True', + 'False', + 'None', + 'and', + 'or', + 'not', + 'is', + 'lambda', + 'yield', + + // Statement keywords + 'assert', + 'break', + 'class', + 'continue', + 'def', + 'del', + 'elif', + 'else', + 'except', + 'finally', + 'for', + 'from', + 'global', + 'if', + 'import', + 'in', + 'nonlocal', + 'pass', + 'raise', + 'return', + 'try', + 'type', + 'while', + 'with', + ]; + + const python3_5: string[] = [...base, 'async', 'await']; + + const python3_10: string[] = [...python3_5, 'case', 'match']; + + export function forVersion(version: PythonVersion): string[] { + if (PythonVersion.isGreaterOrEqualTo(version, pythonVersion3_10)) { + return python3_10; + } + if (PythonVersion.isGreaterOrEqualTo(version, pythonVersion3_5)) { + return python3_5; + } + return base; + } +} + +enum SortCategory { + // The order of the following is important. We use + // this to order the completion suggestions. + + // A keyword that must be entered for the syntax to be correct. + LikelyKeyword, + + // A module name recently used in an import statement. + RecentImportModuleName, + + // A module name used in an import statement. + ImportModuleName, + + // A literal string. + LiteralValue, + + // A named parameter in a call expression. + NamedParameter, + + // A keyword or symbol that was recently used for completion. + RecentKeywordOrSymbol, + + // An auto-import symbol that was recently used for completion. + RecentAutoImport, + + // A keyword in the python syntax. + Keyword, + + // An enum member. + EnumMember, + + // A normal symbol. + NormalSymbol, + + // A symbol that starts with _ or __ (used only when there is + // no matching filter). + PrivateSymbol, + + // A symbol with a dunder name (e.g. __init__). + DunderSymbol, + + // An auto-import symbol. + AutoImport, +} + +// Completion items can have arbitrary data hanging off them. +// This data allows the resolve handling to disambiguate +// which item was selected. +export interface CompletionItemData { + uri: string; // Have to be strings because this data is passed across the LSP boundary. + position: Position; + autoImportText?: string; + symbolLabel?: string; + funcParensDisabled?: boolean; + moduleUri?: string; +} + +export interface CompletionOptions { + readonly format: MarkupKind; + readonly snippet: boolean; + readonly lazyEdit: boolean; + readonly triggerCharacter?: string; +} + +interface RecentCompletionInfo { + label: string; + autoImportText: string; +} + +interface QuoteInfo { + priorWord: string; + priorText: string; + filterText: string | undefined; + stringValue: string | undefined; + quoteCharacter: string; +} + +export const autoImportDetail = Localizer.Completion.autoImportDetail(); +export const indexValueDetail = Localizer.Completion.indexValueDetail(); + +// We'll use a somewhat-arbitrary cutoff value here to determine +// whether it's sufficiently similar. +const similarityLimit = 0.25; + +// We'll remember this many completions in the MRU list. +const maxRecentCompletions = 128; + +export class CompletionProvider { + private static _mostRecentCompletions: RecentCompletionInfo[] = []; + + // Indicates whether invocation position is inside of string literal + // token or an f-string expression. + private _stringLiteralContainer: StringToken | FStringStartToken | undefined = undefined; + + protected readonly execEnv: ExecutionEnvironment; + protected readonly parseResults: ParseFileResults; + protected readonly sourceMapper: SourceMapper; + + // If we're being asked to resolve a completion item, we run the + // original completion algorithm and look for this symbol. + protected itemToResolve: CompletionItem | undefined; + + constructor( + protected readonly program: ProgramView, + protected readonly fileUri: Uri, + protected readonly position: Position, + protected readonly options: CompletionOptions, + protected readonly cancellationToken: CancellationToken + ) { + this.execEnv = this.configOptions.findExecEnvironment(this.fileUri); + + this.parseResults = this.program.getParseResults(this.fileUri)!; + this.sourceMapper = this.program.getSourceMapper(this.fileUri, this.cancellationToken, /* mapCompiled */ true); + } + + getCompletions(): CompletionList | null { + if (!this.program.getSourceFileInfo(this.fileUri)) { + return null; + } + + const completionMap = this._getCompletions(); + return CompletionList.create(completionMap?.toArray()); + } + + // When the user selects a completion, this callback is invoked, + // allowing us to record what was selected. This allows us to + // build our MRU cache so we can better predict entries. + resolveCompletionItem(completionItem: CompletionItem) { + throwIfCancellationRequested(this.cancellationToken); + + const completionItemData = this.getCompletionItemData(completionItem); + + const label = completionItem.label; + let autoImportText = ''; + if (completionItemData.autoImportText) { + autoImportText = completionItemData.autoImportText; + } + + const curIndex = CompletionProvider._mostRecentCompletions.findIndex( + (item) => item.label === label && item.autoImportText === autoImportText + ); + + if (curIndex > 0) { + // If there's an existing entry with the same name that's not at the + // beginning of the array, remove it. + CompletionProvider._mostRecentCompletions = CompletionProvider._mostRecentCompletions.splice(curIndex, 1); + } + + if (curIndex !== 0) { + // Add to the start of the array. + CompletionProvider._mostRecentCompletions.unshift({ label, autoImportText }); + } + + if (CompletionProvider._mostRecentCompletions.length > maxRecentCompletions) { + // Prevent the MRU list from growing indefinitely. + CompletionProvider._mostRecentCompletions.pop(); + } + + if (!completionItemData.symbolLabel) { + return; + } + + if ( + completionItemData.moduleUri && + ImportResolver.isSupportedImportSourceFile( + Uri.parse(completionItemData.moduleUri, this.program.serviceProvider) + ) + ) { + const moduleUri = Uri.parse(completionItemData.moduleUri, this.program.serviceProvider); + const documentation = getModuleDocStringFromUris([moduleUri], this.sourceMapper); + if (!documentation) { + return; + } + + if (this.options.format === MarkupKind.Markdown) { + const markdownString = this.program.serviceProvider + .docStringService() + .convertDocStringToMarkdown(documentation, isBuiltInModule(moduleUri)); + completionItem.documentation = { + kind: MarkupKind.Markdown, + value: markdownString, + }; + } else if (this.options.format === MarkupKind.PlainText) { + const plainTextString = this.program.serviceProvider + .docStringService() + .convertDocStringToPlainText(documentation); + completionItem.documentation = { + kind: MarkupKind.PlainText, + value: plainTextString, + }; + } + return; + } + + this.itemToResolve = completionItem; + if (!completionItemData.autoImportText) { + // Rerun the completion lookup. It will fill in additional information + // about the item to be resolved. We'll ignore the rest of the returned + // list. This is a bit wasteful, but all of that information should be + // cached, so it's not as bad as it might seem. + this.getCompletions(); + } else if (!completionItem.additionalTextEdits) { + const completionMap = new CompletionMap(); + this.addAutoImportCompletions( + completionItemData.symbolLabel, + /* similarityLimit */ 1, + /* lazyEdit */ false, + completionMap + ); + } + } + + protected get evaluator() { + return this.program.evaluator!; + } + + protected get importResolver() { + return this.program.importResolver; + } + + protected get configOptions() { + return this.program.configOptions; + } + + protected getCompletionItemData(item: CompletionItem): CompletionItemData { + return fromLSPAny(item.data); + } + + protected getMethodOverrideCompletions( + priorWord: string, + partialName: NameNode, + decorators?: DecoratorNode[] + ): CompletionMap | undefined { + const enclosingClass = ParseTreeUtils.getEnclosingClass(partialName, /* stopAtFunction */ true); + if (!enclosingClass) { + return undefined; + } + + const classResults = this.evaluator.getTypeOfClass(enclosingClass); + if (!classResults) { + return undefined; + } + + const symbolTable = new Map(); + for (let i = 1; i < classResults.classType.shared.mro.length; i++) { + const mroClass = classResults.classType.shared.mro[i]; + if (isInstantiableClass(mroClass)) { + getMembersForClass(mroClass, symbolTable, /* includeInstanceVars */ false); + } + } + + const staticmethod = decorators?.some((d) => ParseTreeUtils.checkDecorator(d, 'staticmethod')) ?? false; + const classmethod = decorators?.some((d) => ParseTreeUtils.checkDecorator(d, 'classmethod')) ?? false; + + const completionMap = new CompletionMap(); + + symbolTable.forEach((symbol, name) => { + let decl = getLastTypedDeclarationForSymbol(symbol); + if (decl && decl.type === DeclarationType.Function) { + if (StringUtils.isPatternInSymbol(partialName.d.value, name)) { + const declaredType = this.evaluator.getTypeForDeclaration(decl)?.type; + if (!declaredType) { + return; + } + + let isProperty = isClassInstance(declaredType) && ClassType.isPropertyClass(declaredType); + + if (SymbolNameUtils.isDunderName(name)) { + // Don't offer suggestions for built-in properties like "__class__", etc. + isProperty = false; + } + + if (!isFunction(declaredType) && !isProperty) { + return; + } + + if (isProperty) { + // For properties, we should override the "getter", which is typically + // the first declaration. + const typedDecls = symbol.getTypedDeclarations(); + if (typedDecls.length > 0 && typedDecls[0].type === DeclarationType.Function) { + decl = typedDecls[0]; + } + } + + const isDeclaredStaticMethod = + isFunction(declaredType) && FunctionType.isStaticMethod(declaredType); + + // Special-case the "__init_subclass__" method because it's an implicit + // classmethod that the type evaluator flags as a real classmethod. + const isDeclaredClassMethod = + isFunction(declaredType) && + FunctionType.isClassMethod(declaredType) && + name !== '__init_subclass__'; + + if (staticmethod !== isDeclaredStaticMethod || classmethod !== isDeclaredClassMethod) { + return; + } + + const methodSignature = this._printMethodSignature(classResults.classType, decl); + + let text: string; + if (isStubFile(this.fileUri)) { + text = `${methodSignature}: ...`; + } else { + const methodBody = this.printOverriddenMethodBody( + classResults.classType, + isDeclaredStaticMethod, + isProperty, + decl, + decl.node.d.isAsync + ); + text = `${methodSignature}:\n${methodBody}`; + } + + const textEdit = this.createReplaceEdits(priorWord, partialName, text); + + this.addSymbol(name, symbol, partialName.d.value, completionMap, { + // method signature already contains () + funcParensDisabled: true, + edits: { + format: this.options.snippet ? InsertTextFormat.Snippet : undefined, + textEdit, + }, + }); + } + } + }); + + return completionMap; + } + + protected printOverriddenMethodBody( + classType: ClassType, + isStaticMethod: boolean, + isProperty: boolean, + decl: FunctionDeclaration, + insertAwait?: boolean + ) { + let sb = this.parseResults.tokenizerOutput.predominantTabSequence; + + if ( + classType.shared.baseClasses.length === 1 && + isClass(classType.shared.baseClasses[0]) && + classType.shared.baseClasses[0].shared.fullName === 'builtins.object' + ) { + sb += this.options.snippet ? '${0:pass}' : 'pass'; + return sb; + } + + if (decl.node.d.params.length === 0) { + sb += this.options.snippet ? '${0:pass}' : 'pass'; + return sb; + } + + const parameters = getParameters(isStaticMethod ? decl.node.d.params : decl.node.d.params.slice(1)); + if (decl.node.d.name.d.value !== '__init__') { + sb += 'return '; + } + + if (insertAwait) { + sb += 'await '; + } + + if (isProperty) { + return sb + `super().${decl.node.d.name.d.value}`; + } + + return sb + `super().${decl.node.d.name.d.value}(${parameters.map(convertToString).join(', ')})`; + + function getParameters(parameters: ParameterNode[]) { + const results: [node: ParameterNode, keywordOnly: boolean][] = []; + + let sawKeywordOnlySeparator = false; + for (const parameter of parameters) { + if (parameter.d.name) { + results.push([ + parameter, + parameter.d.category === ParamCategory.Simple && !!parameter.d.name && sawKeywordOnlySeparator, + ]); + } + + // All simple parameters after a `*` or `*args` parameter + // are considered keyword only. + if (parameter.d.category === ParamCategory.ArgsList) { + sawKeywordOnlySeparator = true; + } + } + + return results; + } + + function convertToString(parameter: [node: ParameterNode, keywordOnly: boolean]) { + const name = parameter[0].d.name?.d.value; + if (parameter[0].d.category === ParamCategory.ArgsList) { + return `*${name}`; + } + + if (parameter[0].d.category === ParamCategory.KwargsDict) { + return `**${name}`; + } + + return parameter[1] ? `${name}=${name}` : name; + } + } + + protected createReplaceEdits(priorWord: string, node: ParseNode | undefined, text: string) { + const replaceOrInsertEndChar = + node?.nodeType === ParseNodeType.Name + ? this.position.character - priorWord.length + node.d.value.length + : this.position.character; + + const range: Range = { + start: { line: this.position.line, character: this.position.character - priorWord.length }, + end: { line: this.position.line, character: replaceOrInsertEndChar }, + }; + + return TextEdit.replace(range, text); + } + + protected shouldProcessDeclaration(declaration: Declaration | undefined) { + // By default, we allow all symbol/decl to be included in the completion. + return true; + } + + protected addSymbol( + name: string, + symbol: Symbol, + priorWord: string, + completionMap: CompletionMap, + detail: SymbolDetail + ) { + // Make sure we don't crash due to OOM. + this.program.handleMemoryHighUsage(); + + let primaryDecl = getLastTypedDeclarationForSymbol(symbol); + if (!primaryDecl) { + const declarations = symbol.getDeclarations(); + if (declarations.length > 0) { + primaryDecl = declarations[declarations.length - 1]; + } + } + + if (!this.shouldProcessDeclaration(primaryDecl)) { + return; + } + + primaryDecl = primaryDecl + ? this.evaluator.resolveAliasDeclaration(primaryDecl, /* resolveLocalNames */ true) ?? primaryDecl + : undefined; + + const autoImportText = + detail.autoImportSource && this.program.configOptions.autoImportCompletions + ? this.getAutoImportText(name, detail.autoImportSource, detail.autoImportAlias) + : undefined; + + // Are we resolving a completion item? If so, see if this symbol + // is the one that we're trying to match. + if (this.itemToResolve) { + const completionItemData = this.getCompletionItemData(this.itemToResolve); + + if (completionItemData.symbolLabel !== name) { + // It's not what we are looking for. + return; + } + + if ( + this.itemToResolve.additionalTextEdits === undefined && + detail.edits?.additionalTextEdits !== undefined + ) { + this.itemToResolve.additionalTextEdits = convertToTextEdits(detail.edits.additionalTextEdits); + } + + if (completionItemData.autoImportText) { + return; + } + + // This call can be expensive to perform on every completion item + // that we return, so we do it lazily in the "resolve" callback. + const type = this.evaluator.getEffectiveTypeOfSymbol(symbol); + if (!type) { + // Can't resolve. so bail out. + return; + } + + const typeDetail = getTypeDetail( + this.evaluator, + type, + primaryDecl, + name, + detail, + this.configOptions.functionSignatureDisplay + ); + const documentation = getDocumentationPartsForTypeAndDecl( + this.sourceMapper, + type, + primaryDecl, + this.evaluator, + { + name, + symbol, + boundObjectOrClass: detail.boundObjectOrClass, + } + ); + + if (this.options.format === MarkupKind.Markdown || this.options.format === MarkupKind.PlainText) { + this.itemToResolve.documentation = getCompletionItemDocumentation( + this.program.serviceProvider, + typeDetail, + documentation, + this.options.format, + primaryDecl + ); + } else { + fail(`Unsupported markup type: ${this.options.format}`); + } + + // Bail out. We don't need to add items to completion. + return; + } + + if (primaryDecl) { + let itemKind = this._convertDeclarationTypeToItemKind(primaryDecl); + + // Handle enum members specially. Enum members normally look like + // variables, but the are declared using assignment expressions + // within an enum class. + if (this._isEnumMember(detail.boundObjectOrClass, name)) { + itemKind = CompletionItemKind.EnumMember; + } + + this.addNameToCompletions(detail.autoImportAlias ?? name, itemKind, priorWord, completionMap, { + autoImportText, + extraCommitChars: detail.extraCommitChars, + funcParensDisabled: detail.funcParensDisabled, + edits: detail.edits, + }); + } else { + // Does the symbol have no declaration but instead has a synthesized type? + const synthesizedType = symbol.getSynthesizedType()?.type; + if (synthesizedType) { + const itemKind: CompletionItemKind = this._convertTypeToItemKind(synthesizedType); + this.addNameToCompletions(name, itemKind, priorWord, completionMap, { + extraCommitChars: detail.extraCommitChars, + funcParensDisabled: detail.funcParensDisabled, + edits: detail.edits, + }); + } + } + } + + protected getMemberAccessCompletions(leftExprNode: ExpressionNode, priorWord: string): CompletionMap | undefined { + const symbolTable = new Map(); + const completionMap = new CompletionMap(); + + let leftType = this.evaluator.getType(leftExprNode); + if (!leftType) { + return completionMap; + } + + leftType = this.evaluator.makeTopLevelTypeVarsConcrete(leftType); + + // If this is an unknown type with a "possible type" associated with + // it, use the possible type. + if (isUnknown(leftType) && leftType.priv.possibleType) { + leftType = this.evaluator.makeTopLevelTypeVarsConcrete(leftType.priv.possibleType); + } + + doForEachSubtype(leftType, (subtype) => { + subtype = this.evaluator.makeTopLevelTypeVarsConcrete(subtype); + + if (isClass(subtype)) { + const instance = TypeBase.isInstance(subtype); + getMembersForClass(subtype, symbolTable, instance); + + if (ClassType.isEnumClass(subtype) && instance) { + // Don't show enum member out of another enum member + // ex) Enum.Member. <= shouldn't show `Member` again. + for (const name of symbolTable.keys()) { + if (this._isEnumMember(subtype, name)) { + symbolTable.delete(name); + } + } + } + } else if (isModule(subtype)) { + getMembersForModule(subtype, symbolTable); + } else if (isFunctionOrOverloaded(subtype)) { + const functionClass = this.evaluator.getBuiltInType(leftExprNode, 'function'); + if (functionClass && isInstantiableClass(functionClass)) { + getMembersForClass(functionClass, symbolTable, /* includeInstanceVars */ true); + } + } else if (isNoneInstance(subtype)) { + const objectClass = this.evaluator.getBuiltInType(leftExprNode, 'object'); + if (objectClass && isInstantiableClass(objectClass)) { + getMembersForClass(objectClass, symbolTable, TypeBase.isInstance(subtype)); + } + } + + this._addSymbolsForSymbolTable( + symbolTable, + () => true, + priorWord, + leftExprNode, + /* isInImport */ false, + isClass(subtype) ? subtype : undefined, + completionMap + ); + }); + + return completionMap; + } + + protected createAutoImporter(completionMap: CompletionMap, lazyEdit: boolean) { + const currentFile = this.program.getSourceFileInfo(this.fileUri); + const moduleSymbolMap = buildModuleSymbolsMap( + this.program, + this.program.getSourceFileInfoList().filter((s) => s !== currentFile) + ); + + return new AutoImporter( + this.program, + this.execEnv, + this.parseResults, + this.position, + completionMap, + moduleSymbolMap, + { + lazyEdit, + } + ); + } + + protected addAutoImportCompletions( + priorWord: string, + similarityLimit: number, + lazyEdit: boolean, + completionMap: CompletionMap, + parensDisabled?: boolean + ) { + if (!this.configOptions.autoImportCompletions) { + // If auto import on the server is turned off or this particular invocation + // is turned off (ex, notebook), don't do any thing. + return; + } + + const autoImporter = this.createAutoImporter(completionMap, lazyEdit); + + const results: AutoImportResult[] = []; + appendArray( + results, + autoImporter.getAutoImportCandidates( + priorWord, + similarityLimit, + /* abbrFromUsers */ undefined, + this.cancellationToken + ) + ); + + this.addImportResults(results, priorWord, completionMap, parensDisabled); + } + + protected addImportResults( + results: AutoImportResult[], + priorWord: string, + completionMap: CompletionMap, + parensDisabled?: boolean + ) { + for (const result of results) { + if (result.symbol) { + this.addSymbol(result.name, result.symbol, priorWord, completionMap, { + extraCommitChars: true, + autoImportSource: result.source, + autoImportAlias: result.alias, + edits: { + textEdit: this.createReplaceEdits(priorWord, /* node */ undefined, result.insertionText), + additionalTextEdits: result.edits, + }, + funcParensDisabled: parensDisabled, + }); + } else { + this.addNameToCompletions( + result.alias ?? result.name, + result.kind ?? CompletionItemKind.Module, + this.program.configOptions.autoImportCompletions ? priorWord : result.alias ?? result.name, + completionMap, + this.program.configOptions.autoImportCompletions + ? { + extraCommitChars: true, + autoImportText: this.getAutoImportText(result.name, result.source, result.alias), + edits: { + textEdit: this.createReplaceEdits( + priorWord, + /* node */ undefined, + result.insertionText + ), + additionalTextEdits: result.edits, + }, + funcParensDisabled: parensDisabled, + } + : undefined + ); + } + } + } + + protected addExtraCommitChar(item: CompletionItem) { + // extra commit char is not supported. + } + + protected addNameToCompletions( + name: string, + itemKind: CompletionItemKind, + filter: string, + completionMap: CompletionMap, + detail?: CompletionDetail + ) { + // Auto importer already filtered out unnecessary ones. No need to do it again. + const similarity = detail?.autoImportText ? true : StringUtils.isPatternInSymbol(filter, name); + if (!similarity) { + return; + } + + if ( + completionMap.has( + name, + (i) => CompletionMap.matchKindAndImportText(i, this.getCompletionItemData.bind(this), itemKind), + itemKind, + detail?.autoImportText?.importText + ) + ) { + return; + } + + const completionItem = CompletionItem.create(name); + completionItem.kind = itemKind; + + if (detail?.extraCommitChars) { + this.addExtraCommitChar(completionItem); + } + + const completionItemData: CompletionItemData = { + uri: this.fileUri.toString(), + position: this.position, + }; + + if (detail?.funcParensDisabled || !this.options.snippet) { + completionItemData.funcParensDisabled = true; + } + + if (detail?.moduleUri) { + completionItemData.moduleUri = detail.moduleUri.toString(); + } + + completionItem.data = toLSPAny(completionItemData); + + if (detail?.sortText || detail?.itemDetail) { + completionItem.sortText = detail.sortText; + completionItem.detail = detail.itemDetail; + } else if (detail?.autoImportText) { + // Force auto-import entries to the end. + completionItem.sortText = this._makeSortText( + SortCategory.AutoImport, + `${name}.${this._formatInteger(detail.autoImportText.source.length, 2)}.${ + detail.autoImportText.source + }`, + detail.autoImportText.importText + ); + completionItemData.autoImportText = detail.autoImportText.importText; + completionItem.detail = autoImportDetail; + + if (detail.autoImportText.source) { + completionItem.labelDetails = { description: detail.autoImportText.source }; + } + } else if (itemKind === CompletionItemKind.EnumMember) { + // Handle enum members separately so they are sorted above other symbols. + completionItem.sortText = this._makeSortText(SortCategory.EnumMember, name); + } else if (SymbolNameUtils.isDunderName(name)) { + // Force dunder-named symbols to appear after all other symbols. + completionItem.sortText = this._makeSortText(SortCategory.DunderSymbol, name); + } else if (filter === '' && SymbolNameUtils.isPrivateOrProtectedName(name)) { + // Distinguish between normal and private symbols only if there is + // currently no filter text. Once we get a single character to filter + // upon, we'll no longer differentiate. + completionItem.sortText = this._makeSortText(SortCategory.PrivateSymbol, name); + } else { + completionItem.sortText = this._makeSortText(SortCategory.NormalSymbol, name); + } + + completionItemData.symbolLabel = name; + + if (this.options.format === MarkupKind.Markdown) { + let markdownString = ''; + + if (detail?.autoImportText) { + markdownString += detail.autoImportText.importText; + if (detail.typeDetail || detail.documentation) { + // Micro perf optimization to not create new string from trimEnd. + markdownString += '\n\n'; + } + } + + if (detail?.typeDetail) { + markdownString += '```python\n' + detail.typeDetail + '\n```\n'; + } + + if (detail?.documentation) { + markdownString += '---\n'; + markdownString += this.program.serviceProvider + .docStringService() + .convertDocStringToMarkdown(detail.documentation, isBuiltInModule(detail.moduleUri)); + } + + markdownString = markdownString.trimEnd(); + + if (markdownString) { + completionItem.documentation = { + kind: MarkupKind.Markdown, + value: markdownString, + }; + } + } else if (this.options.format === MarkupKind.PlainText) { + let plainTextString = ''; + + if (detail?.autoImportText) { + plainTextString += detail.autoImportText.importText; + if (detail.typeDetail || detail.documentation) { + // Micro perf optimization to not create new string from trimEnd. + plainTextString += '\n\n'; + } + } + + if (detail?.typeDetail) { + plainTextString += detail.typeDetail + '\n'; + } + + if (detail?.documentation) { + plainTextString += + '\n' + + this.program.serviceProvider.docStringService().convertDocStringToPlainText(detail.documentation); + } + + plainTextString = plainTextString.trimEnd(); + + if (plainTextString) { + completionItem.documentation = { + kind: MarkupKind.PlainText, + value: plainTextString, + }; + } + } else { + fail(`Unsupported markup type: ${this.options.format}`); + } + + if (detail?.edits?.format) { + completionItem.insertTextFormat = detail.edits.format; + } + + if (detail?.edits?.textEdit) { + completionItem.textEdit = detail.edits.textEdit; + } + + if (detail?.edits?.additionalTextEdits) { + completionItem.additionalTextEdits = convertToTextEdits(detail.edits.additionalTextEdits); + + // This is for auto import entries from indices which skip symbols. + if (this.itemToResolve) { + const data = this.getCompletionItemData(this.itemToResolve); + if (data.autoImportText === completionItemData.autoImportText) { + this.itemToResolve.additionalTextEdits = completionItem.additionalTextEdits; + } + } + } + + completionMap.set(completionItem); + } + + protected getAutoImportText(importName: string, importFrom?: string, importAlias?: string) { + const autoImportText = getAutoImportText(importName, importFrom, importAlias); + + let importText = ''; + if (this.options.format === MarkupKind.Markdown) { + importText = `\`\`\`\n${autoImportText}\n\`\`\``; + } else if (this.options.format === MarkupKind.PlainText) { + importText = autoImportText; + } else { + fail(`Unsupported markup type: ${this.options.format}`); + } + + return { + source: importFrom ?? '', + importText, + }; + } + + private get _fileContents() { + return this.parseResults?.text ?? ''; + } + + private _getCompletions(): CompletionMap | undefined { + const offset = convertPositionToOffset(this.position, this.parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return undefined; + } + + let node = ParseTreeUtils.findNodeByOffset(this.parseResults.parserOutput.parseTree, offset); + + // See if we're inside a string literal or an f-string statement. + const token = ParseTreeUtils.getTokenOverlapping(this.parseResults.tokenizerOutput.tokens, offset); + if (token?.type === TokenType.String) { + const stringToken = token as StringToken; + this._stringLiteralContainer = TextRange.contains(stringToken, offset) + ? stringToken + : stringToken.flags & StringTokenFlags.Unterminated + ? stringToken + : undefined; + } else if (node) { + const fStringContainer = ParseTreeUtils.getParentNodeOfType( + node, + ParseNodeType.FormatString + ); + if (fStringContainer) { + this._stringLiteralContainer = fStringContainer.d.token; + } + } + + // See if we can get to a "better" node by backing up a few columns. + // A "better" node is defined as one that's deeper than the current + // node. + const initialNode = node; + const initialDepth = node ? ParseTreeUtils.getNodeDepth(node) : 0; + + if (!initialNode || initialNode.nodeType !== ParseNodeType.Name) { + let curOffset = offset; + let sawComma = false; + + while (curOffset >= 0) { + curOffset--; + + // Stop scanning backward if we hit certain stop characters. + const curChar = this._fileContents.substr(curOffset, 1); + if (curChar === '(' || curChar === '\n' || curChar === '}') { + break; + } + if (curChar === ',') { + sawComma = true; + } + + const curNode = ParseTreeUtils.findNodeByOffset(this.parseResults.parserOutput.parseTree, curOffset); + if (curNode && curNode !== initialNode) { + if (ParseTreeUtils.getNodeDepth(curNode) > initialDepth) { + node = curNode; + + // If we're at the end of a list with a hanging comma, handle the + // special case of "from x import y, ". + if (sawComma && node.parent?.nodeType === ParseNodeType.ImportFromAs) { + node = node.parent; + } + } + break; + } + } + } + + if (node === undefined) { + return undefined; + } + + // Get the text on that line prior to the insertion point. + const lineTextRange = this.parseResults.tokenizerOutput.lines.getItemAt(this.position.line); + const textOnLine = this._fileContents.substr(lineTextRange.start, lineTextRange.length); + const priorText = textOnLine.substr(0, this.position.character); + const postText = textOnLine.substr(this.position.character); + const priorWordIndex = priorText.search(/[\p{L}\p{N}\p{Pc}\p{Mn}\p{Mc}]+$/u); + const priorWord = priorWordIndex >= 0 ? priorText.substr(priorWordIndex) : ''; + + // Don't offer completions if we're within a comment. + if (this._isWithinComment(offset)) { + return undefined; + } + + // See if the node is part of an error node. If so, that takes + // precedence. + let errorNode: ParseNode | undefined = node; + while (errorNode) { + if (errorNode.nodeType === ParseNodeType.Error) { + break; + } + + errorNode = errorNode.parent; + } + + // Determine the context based on the parse node's type and + // that of its ancestors. + let curNode = errorNode || node; + while (true) { + throwIfCancellationRequested(this.cancellationToken); + + if (curNode.nodeType === ParseNodeType.String) { + return this._getLiteralCompletions(curNode, offset, priorWord, priorText, postText); + } + + if (curNode.nodeType === ParseNodeType.StringList || curNode.nodeType === ParseNodeType.FormatString) { + return undefined; + } + + if (curNode.nodeType === ParseNodeType.ModuleName) { + return this._getImportModuleCompletions(curNode); + } + + if (curNode.nodeType === ParseNodeType.Error) { + return this._getExpressionErrorCompletions(curNode, offset, priorWord, priorText, postText); + } + + if (curNode.nodeType === ParseNodeType.MemberAccess) { + return this.getMemberAccessCompletions(curNode.d.leftExpr, priorWord); + } + + if (curNode.nodeType === ParseNodeType.Dictionary) { + const completionMap = new CompletionMap(); + if ( + this._tryAddTypedDictKeysFromDictionary( + curNode, + /* stringNode */ undefined, + priorWord, + priorText, + postText, + completionMap + ) + ) { + return completionMap; + } + } + + const dictionaryEntry = ParseTreeUtils.getFirstAncestorOrSelfOfKind( + curNode, + ParseNodeType.DictionaryKeyEntry + ); + if (dictionaryEntry) { + if (dictionaryEntry.parent?.nodeType === ParseNodeType.Dictionary) { + const dictionaryNode = dictionaryEntry.parent; + if (dictionaryNode.d.trailingCommaToken && dictionaryNode.d.trailingCommaToken.start < offset) { + const completionMap = new CompletionMap(); + if ( + this._tryAddTypedDictKeysFromDictionary( + dictionaryNode, + /* stringNode */ undefined, + priorWord, + priorText, + postText, + completionMap + ) + ) { + return completionMap; + } + } + } + } + + if (curNode.nodeType === ParseNodeType.Name) { + // This condition is little different than others since it does its own + // tree walk up to find context and let outer tree walk up to proceed if it can't find + // one to show completion. + const result = this._tryGetNameCompletions(curNode, offset, priorWord, priorText, postText); + if (result || result === undefined) { + return result; + } + } + + if (curNode.nodeType === ParseNodeType.List && this.options.triggerCharacter === '[') { + // If this is an empty list, don't start putting completions up yet. + return undefined; + } + + if (curNode.nodeType === ParseNodeType.ImportFrom) { + return this._getImportFromCompletions(curNode, offset, priorWord); + } + + if (isExpressionNode(curNode)) { + return this._getExpressionCompletions(curNode, priorWord, priorText, postText); + } + + if (curNode.nodeType === ParseNodeType.Suite) { + if ( + curNode.parent && + curNode.parent.nodeType === ParseNodeType.Except && + !curNode.parent.d.name && + curNode.parent.d.typeExpr && + TextRange.getEnd(curNode.parent.d.typeExpr) < offset && + offset <= curNode.parent.d.exceptSuite.start + ) { + // except Exception as [] + return undefined; + } + + if ( + curNode.parent && + curNode.parent.nodeType === ParseNodeType.Class && + (!curNode.parent.d.name || !curNode.parent.d.name.d.value) && + curNode.parent.d.arguments.length === 0 && + offset <= curNode.parent.d.suite.start + ) { + // class [] + return undefined; + } + + return this._getStatementCompletions(curNode, priorWord, priorText, postText); + } + + if (curNode.nodeType === ParseNodeType.Module) { + return this._getStatementCompletions(curNode, priorWord, priorText, postText); + } + + if ( + curNode.nodeType === ParseNodeType.Parameter && + curNode.length === 0 && + curNode.parent && + curNode.parent.nodeType === ParseNodeType.Lambda + ) { + // lambda [] or lambda x, [] + return undefined; + } + + if (!curNode.parent) { + break; + } + + curNode = curNode.parent; + } + + return undefined; + } + + // This method returns false if it wants the caller to walk up the + // tree. It returns CompletionResults or undefined if it wants the + // caller to return. + private _tryGetNameCompletions( + curNode: NameNode, + offset: number, + priorWord: string, + priorText: string, + postText: string + ): false | CompletionMap | undefined { + if (!curNode.parent) { + return false; + } + + if (curNode.parent.nodeType === ParseNodeType.ImportAs && curNode.parent.d.alias === curNode) { + // Are we within a "import Y as [Z]"? + return undefined; + } + + if (curNode.parent.nodeType === ParseNodeType.ModuleName) { + // Are we within a "import Y as []"? + if ( + curNode.parent.parent && + curNode.parent.parent.nodeType === ParseNodeType.ImportAs && + !curNode.parent.parent.d.alias && + TextRange.getEnd(curNode.parent.parent) < offset + ) { + return undefined; + } + + // Are we within a "from X import Y as Z" statement and + // more specifically within the "Y"? + return this._getImportModuleCompletions(curNode.parent); + } + + if (curNode.parent.nodeType === ParseNodeType.ImportFromAs) { + if (curNode.parent.d.alias === curNode) { + // Are we within a "from X import Y as [Z]"? + return undefined; + } + + const parentNode = curNode.parent.parent; + if (parentNode && parentNode.nodeType === ParseNodeType.ImportFrom) { + // Are we within a "from X import Y as []"? + if (!curNode.parent.d.alias && TextRange.getEnd(curNode.parent) < offset) { + return undefined; + } + + if (curNode.parent.d.name === curNode) { + return this._getImportFromCompletions(parentNode, offset, priorWord); + } + + return this._getImportFromCompletions(parentNode, offset, ''); + } + + return false; + } + + if (curNode.parent.nodeType === ParseNodeType.MemberAccess && curNode === curNode.parent.d.member) { + return this.getMemberAccessCompletions(curNode.parent.d.leftExpr, priorWord); + } + + if (curNode.parent.nodeType === ParseNodeType.Except && curNode === curNode.parent.d.name) { + return undefined; + } + + if (curNode.parent.nodeType === ParseNodeType.Function && curNode === curNode.parent.d.name) { + if (curNode.parent.d.decorators?.some((d) => this._isOverload(d))) { + return this._getMethodOverloadsCompletions(priorWord, curNode); + } + + return undefined; + } + + if (curNode.parent.nodeType === ParseNodeType.Parameter && curNode === curNode.parent.d.name) { + return undefined; + } + + if (curNode.parent.nodeType === ParseNodeType.Class && curNode === curNode.parent.d.name) { + return undefined; + } + + if ( + curNode.parent.nodeType === ParseNodeType.For && + TextRange.contains(curNode.parent.d.targetExpr, curNode.start) + ) { + return undefined; + } + + if ( + curNode.parent.nodeType === ParseNodeType.ComprehensionFor && + TextRange.contains(curNode.parent.d.targetExpr, curNode.start) + ) { + return undefined; + } + + // For assignments that implicitly declare variables, remove itself (var decl) from completion. + if ( + curNode.parent.nodeType === ParseNodeType.Assignment || + curNode.parent.nodeType === ParseNodeType.AssignmentExpression + ) { + const leftNode = + curNode.parent.nodeType === ParseNodeType.AssignmentExpression + ? curNode.parent.d.name + : curNode.parent.d.leftExpr; + + if (leftNode !== curNode || priorWord.length === 0) { + return false; + } + + const decls = this.evaluator.getDeclInfoForNameNode(curNode)?.decls; + if (decls?.length !== 1 || !isVariableDeclaration(decls[0]) || decls[0].node !== curNode) { + return false; + } + + const completionMap = this._getExpressionCompletions(curNode, priorWord, priorText, postText); + if (completionMap) { + completionMap.delete(curNode.d.value); + } + + return completionMap; + } + + // Defining class variables. + // ex) class A: + // variable = 1 + if ( + curNode.parent.nodeType === ParseNodeType.StatementList && + curNode.parent.parent?.nodeType === ParseNodeType.Suite && + curNode.parent.parent.parent?.nodeType === ParseNodeType.Class + ) { + const completionList = this._getClassVariableCompletions(curNode); + if (completionList) { + return completionList; + } + } + + return false; + } + + private _isWithinComment(offset: number): boolean { + const token = getTokenAfter(offset, this.parseResults.tokenizerOutput.tokens); + if (!token) { + // If we're in the middle of a token, we're not in a comment. + return false; + } + + return token.comments?.some((c) => TextRange.overlaps(c, offset)) ?? false; + + function getTokenAfter(offset: number, tokens: TextRangeCollection) { + const tokenIndex = tokens.getItemAtPosition(offset); + if (tokenIndex < 0) { + return undefined; + } + + let token = tokens.getItemAt(tokenIndex); + // If we're in the middle of a token, we can't be within a comment. + if (offset > token.start && offset < token.start + token.length) { + return undefined; + } + + // Multiple zero length tokens can occupy same position. + // But comment is associated with the first one. loop + // backward to find the first token if position is same. + for (let i = tokenIndex - 1; i >= 0; i--) { + const prevToken = tokens.getItemAt(i); + if (token.start !== prevToken.start) { + break; + } + + token = prevToken; + } + + if (offset <= token.start) { + return token; + } + + // If offset > token.start, tokenIndex + 1 < tokens.length + // should be always true. + debug.assert(tokenIndex + 1 < tokens.length); + return tokens.getItemAt(tokenIndex + 1); + } + } + + private _getExpressionErrorCompletions( + node: ErrorNode, + offset: number, + priorWord: string, + priorText: string, + postText: string + ): CompletionMap | undefined { + // Is the error due to a missing member access name? If so, + // we can evaluate the left side of the member access expression + // to determine its type and offer suggestions based on it. + switch (node.d.category) { + case ErrorExpressionCategory.MissingIn: { + return this._createSingleKeywordCompletion('in'); + } + + case ErrorExpressionCategory.MissingElse: { + return this._createSingleKeywordCompletion('else'); + } + + case ErrorExpressionCategory.MissingMemberAccessName: + case ErrorExpressionCategory.MissingExpression: { + // Don't show completion after random dots. + const tokenizerOutput = this.parseResults.tokenizerOutput; + const offset = convertPositionToOffset(this.position, tokenizerOutput.lines); + const index = ParseTreeUtils.getTokenIndexAtLeft(tokenizerOutput.tokens, offset!); + const token = ParseTreeUtils.getTokenAtIndex(tokenizerOutput.tokens, index); + const prevToken = ParseTreeUtils.getTokenAtIndex(tokenizerOutput.tokens, index - 1); + + if (node.d.category === ErrorExpressionCategory.MissingExpression) { + // Skip dots on expressions. + if (token?.type === TokenType.Dot || token?.type === TokenType.Ellipsis) { + break; + } + + // ex) class MyType: + // def is_str(self): ... + // myType = MyType() + // + // In incomplete code such as "myType.is" <= "is" will be tokenized as keyword not identifier, + // so even if user's intention is writing "is_str", completion after "is" won't include "is_str" + // since parser won't see "is" as partially written member name instead it will see it as + // expression statement with missing expression after "is" keyword. + // In such case, use "MyType." to get completion. + if (token?.type !== TokenType.Keyword || TextRange.getEnd(token) !== offset) { + return this._getExpressionCompletions(node, priorWord, priorText, postText); + } + + if (prevToken?.type !== TokenType.Dot) { + return this._getExpressionCompletions(node, priorWord, priorText, postText); + } + + const previousOffset = TextRange.getEnd(prevToken); + const previousNode = ParseTreeUtils.findNodeByOffset( + this.parseResults.parserOutput.parseTree, + previousOffset + ); + if ( + previousNode?.nodeType !== ParseNodeType.Error || + previousNode.d.category !== ErrorExpressionCategory.MissingMemberAccessName + ) { + return this._getExpressionCompletions(node, priorWord, priorText, postText); + } else { + // Update node to previous node so we get the member access completions. + node = previousNode; + } + } else if (node.d.category === ErrorExpressionCategory.MissingMemberAccessName) { + // Skip double dots on member access. + if ( + (token?.type === TokenType.Dot || token?.type === TokenType.Ellipsis) && + (prevToken?.type === TokenType.Dot || prevToken?.type === TokenType.Ellipsis) + ) { + return undefined; + } + } + + return this._getMissingMemberAccessNameCompletions(node, priorWord); + } + + case ErrorExpressionCategory.MissingDecoratorCallName: { + return this._getExpressionCompletions(node, priorWord, priorText, postText); + } + + case ErrorExpressionCategory.MissingPattern: + case ErrorExpressionCategory.MissingIndexOrSlice: { + let completionResults = this._getLiteralCompletions(node, offset, priorWord, priorText, postText); + + if (!completionResults) { + completionResults = this._getExpressionCompletions(node, priorWord, priorText, postText); + } + + return completionResults; + } + + case ErrorExpressionCategory.MissingFunctionParameterList: { + if (node.d.child && node.d.child.nodeType === ParseNodeType.Name) { + if (node.d.decorators?.some((d) => this._isOverload(d))) { + return this._getMethodOverloadsCompletions(priorWord, node.d.child); + } + + // Determine if the partial name is a method that's overriding + // a method in a base class. + return this.getMethodOverrideCompletions(priorWord, node.d.child, node.d.decorators); + } + break; + } + + case ErrorExpressionCategory.MissingTupleCloseParen: { + return this._getExpressionCompletions(node, priorWord, priorText, postText); + } + } + + return undefined; + } + + private _getMissingMemberAccessNameCompletions(node: ErrorNode, priorWord: string) { + if (!node.d.child || !isExpressionNode(node.d.child)) { + return undefined; + } + + return this.getMemberAccessCompletions(node.d.child, priorWord); + } + + private _isOverload(node: DecoratorNode): boolean { + return ParseTreeUtils.checkDecorator(node, 'overload'); + } + + private _createSingleKeywordCompletion(keyword: string): CompletionMap { + const completionItem = CompletionItem.create(keyword); + completionItem.kind = CompletionItemKind.Keyword; + completionItem.sortText = this._makeSortText(SortCategory.LikelyKeyword, keyword); + const completionMap = new CompletionMap(); + completionMap.set(completionItem); + return completionMap; + } + + private _addClassVariableTypeAnnotationCompletions( + priorWord: string, + parseNode: ParseNode, + completionMap: CompletionMap + ): void { + // class T: + // f: |<= here + const isTypeAnnotationOfClassVariable = + parseNode.parent?.nodeType === ParseNodeType.TypeAnnotation && + parseNode.parent.d.valueExpr.nodeType === ParseNodeType.Name && + parseNode.parent.d.annotation === parseNode && + parseNode.parent.parent?.nodeType === ParseNodeType.StatementList && + parseNode.parent.parent.parent?.nodeType === ParseNodeType.Suite && + parseNode.parent.parent.parent.parent?.nodeType === ParseNodeType.Class; + + if (!isTypeAnnotationOfClassVariable) { + return; + } + + const enclosingClass = ParseTreeUtils.getEnclosingClass(parseNode, false); + if (!enclosingClass) { + return; + } + + const classResults = this.evaluator.getTypeOfClass(enclosingClass); + if (!classResults) { + return undefined; + } + + const classVariableName = ((parseNode.parent as TypeAnnotationNode).d.valueExpr as NameNode).d.value; + const classMember = lookUpClassMember( + classResults.classType, + classVariableName, + MemberAccessFlags.SkipInstanceMembers | MemberAccessFlags.SkipOriginalClass + ); + + // First, see whether we can use semantic info to get variable type. + if (classMember) { + const memberType = this.evaluator.getTypeOfMember(classMember); + + const text = this.evaluator.printType(memberType, { + enforcePythonSyntax: true, + expandTypeAlias: false, + }); + + this.addNameToCompletions(text, CompletionItemKind.Reference, priorWord, completionMap, { + sortText: this._makeSortText(SortCategory.LikelyKeyword, text), + }); + return; + } + + // If we can't do that using semantic info, then try syntactic info. + const symbolTable = new Map(); + for (const mroClass of classResults.classType.shared.mro) { + if (mroClass === classResults.classType) { + // Ignore current type. + continue; + } + + if (isInstantiableClass(mroClass)) { + getMembersForClass(mroClass, symbolTable, /* includeInstanceVars */ false); + } + } + + const symbol = symbolTable.get(classVariableName); + if (!symbol) { + return; + } + + const decls = symbol + .getDeclarations() + .filter((d) => isVariableDeclaration(d) && d.moduleName !== 'builtins') as VariableDeclaration[]; + + // Skip any symbols invalid such as defined in the same class. + if ( + decls.length === 0 || + decls.some((d) => d.node && ParseTreeUtils.getEnclosingClass(d.node, false) === enclosingClass) + ) { + return; + } + + const declWithTypeAnnotations = decls.filter((d) => d.typeAnnotationNode); + if (declWithTypeAnnotations.length === 0) { + return; + } + + const printFlags = isStubFile(this.fileUri) + ? ParseTreeUtils.PrintExpressionFlags.ForwardDeclarations | + ParseTreeUtils.PrintExpressionFlags.DoNotLimitStringLength + : ParseTreeUtils.PrintExpressionFlags.DoNotLimitStringLength; + + const text = `${ParseTreeUtils.printExpression( + declWithTypeAnnotations[declWithTypeAnnotations.length - 1].typeAnnotationNode!, + printFlags + )}`; + + this.addNameToCompletions(text, CompletionItemKind.Reference, priorWord, completionMap, { + sortText: this._makeSortText(SortCategory.LikelyKeyword, text), + }); + } + + private _getClassVariableCompletions(partialName: NameNode): CompletionMap | undefined { + const enclosingClass = ParseTreeUtils.getEnclosingClass(partialName, false); + if (!enclosingClass) { + return undefined; + } + + const classResults = this.evaluator.getTypeOfClass(enclosingClass); + if (!classResults) { + return undefined; + } + + const symbolTable = new Map(); + for (const mroClass of classResults.classType.shared.mro) { + if (isInstantiableClass(mroClass)) { + getMembersForClass(mroClass, symbolTable, /* includeInstanceVars */ false); + } + } + + const completionMap = new CompletionMap(); + symbolTable.forEach((symbol, name) => { + if ( + SymbolNameUtils.isPrivateName(name) || + symbol.isPrivateMember() || + symbol.isExternallyHidden() || + !StringUtils.isPatternInSymbol(partialName.d.value, name) + ) { + return; + } + + const decls = symbol + .getDeclarations() + .filter((d) => isVariableDeclaration(d) && d.moduleName !== 'builtins') as VariableDeclaration[]; + + // Skip any symbols invalid such as defined in the same class. + if ( + decls.length === 0 || + decls.some((d) => d.node && ParseTreeUtils.getEnclosingClass(d.node, false) === enclosingClass) + ) { + return; + } + + this.addSymbol(name, symbol, partialName.d.value, completionMap, {}); + }); + + return completionMap.size > 0 ? completionMap : undefined; + } + + private _getMethodOverloadsCompletions(priorWord: string, partialName: NameNode): CompletionMap | undefined { + const symbolTable = getSymbolTable(this.evaluator, partialName); + if (!symbolTable) { + return undefined; + } + + const funcParensDisabled = partialName.parent?.nodeType === ParseNodeType.Function ? true : undefined; + const completionMap = new CompletionMap(); + + const enclosingFunc = ParseTreeUtils.getEnclosingFunction(partialName); + symbolTable.forEach((symbol, name) => { + const decl = getLastTypedDeclarationForSymbol(symbol); + if (!decl || decl.type !== DeclarationType.Function) { + return; + } + + if (!decl.node.d.decorators.some((d) => this._isOverload(d))) { + // Only consider ones that have overload decorator. + return; + } + + const decls = symbol.getDeclarations(); + if (decls.length === 1 && decls.some((d) => d.node === enclosingFunc)) { + // Don't show itself. + return; + } + + if (StringUtils.isPatternInSymbol(partialName.d.value, name)) { + const textEdit = this.createReplaceEdits(priorWord, partialName, decl.node.d.name.d.value); + this.addSymbol(name, symbol, partialName.d.value, completionMap, { + funcParensDisabled, + edits: { textEdit }, + }); + } + }); + + return completionMap; + + function getSymbolTable(evaluator: TypeEvaluator, partialName: NameNode) { + const enclosingClass = ParseTreeUtils.getEnclosingClass(partialName, false); + if (enclosingClass) { + const classResults = evaluator.getTypeOfClass(enclosingClass); + if (!classResults) { + return undefined; + } + + const symbolTable = new Map(); + for (const mroClass of classResults.classType.shared.mro) { + if (isInstantiableClass(mroClass)) { + getMembersForClass(mroClass, symbolTable, /* includeInstanceVars */ false); + } + } + + return symbolTable; + } + + // For function overload, we only care about top level functions + const moduleNode = ParseTreeUtils.getEnclosingModule(partialName); + if (moduleNode) { + const moduleScope = AnalyzerNodeInfo.getScope(moduleNode); + return moduleScope?.symbolTable; + } + + return undefined; + } + } + + private _printMethodSignature(classType: ClassType, decl: FunctionDeclaration): string { + const node = decl.node; + + let ellipsisForDefault: boolean | undefined; + if (isStubFile(this.fileUri)) { + // In stubs, always use "...". + ellipsisForDefault = true; + } else if (classType.shared.moduleName === decl.moduleName) { + // In the same file, always print the full default. + ellipsisForDefault = false; + } + + const printFlags = isStubFile(this.fileUri) + ? ParseTreeUtils.PrintExpressionFlags.ForwardDeclarations | + ParseTreeUtils.PrintExpressionFlags.DoNotLimitStringLength + : ParseTreeUtils.PrintExpressionFlags.DoNotLimitStringLength; + + const paramList = node.d.params + .map((param, index) => { + let paramString = ''; + if (param.d.category === ParamCategory.ArgsList) { + paramString += '*'; + } else if (param.d.category === ParamCategory.KwargsDict) { + paramString += '**'; + } + + if (param.d.name) { + paramString += param.d.name.d.value; + } + + // Currently, we don't automatically add import if the type used in the annotation is not imported + // in current file. + const paramTypeAnnotation = ParseTreeUtils.getTypeAnnotationForParam(node, index); + if (paramTypeAnnotation) { + paramString += ': ' + ParseTreeUtils.printExpression(paramTypeAnnotation, printFlags); + } + + if (param.d.defaultValue) { + paramString += paramTypeAnnotation ? ' = ' : '='; + + const useEllipsis = ellipsisForDefault ?? !ParseTreeUtils.isSimpleDefault(param.d.defaultValue); + paramString += useEllipsis + ? '...' + : ParseTreeUtils.printExpression(param.d.defaultValue, printFlags); + } + + if (!paramString && !param.d.name && param.d.category === ParamCategory.Simple) { + return '/'; + } + + return paramString; + }) + .join(', '); + + let methodSignature = node.d.name.d.value + '(' + paramList + ')'; + + if (node.d.returnAnnotation) { + methodSignature += ' -> ' + ParseTreeUtils.printExpression(node.d.returnAnnotation, printFlags); + } else if (node.d.funcAnnotationComment) { + methodSignature += + ' -> ' + ParseTreeUtils.printExpression(node.d.funcAnnotationComment.d.returnAnnotation, printFlags); + } + + return methodSignature; + } + + private _getStatementCompletions( + parseNode: ParseNode, + priorWord: string, + priorText: string, + postText: string + ): CompletionMap | undefined { + // For now, use the same logic for expressions and statements. + return this._getExpressionCompletions(parseNode, priorWord, priorText, postText); + } + + private _getExpressionCompletions( + parseNode: ParseNode, + priorWord: string, + priorText: string, + postText: string + ): CompletionMap | undefined { + const isIndexArgument = this._isIndexArgument(parseNode); + + // If the user typed a "." as part of a number, don't present + // any completion options. + if (!isIndexArgument && parseNode.nodeType === ParseNodeType.Number) { + return undefined; + } + + // Are we within a "with Y as []"? + // Don't add any completion options. + if ( + parseNode.parent?.nodeType === ParseNodeType.WithItem && + parseNode.parent === parseNode.parent.d.target?.parent + ) { + return undefined; + } + + const completionMap = new CompletionMap(); + + // Return empty completionList for Ellipsis + if (priorText.slice(-2) === '..') { + return completionMap; + } + + // Defining type annotation for class variables. + // ex) class A: + // variable: | <= here + this._addClassVariableTypeAnnotationCompletions(priorWord, parseNode, completionMap); + + // Add call argument completions. + this._addCallArgumentCompletions( + parseNode, + priorWord, + priorText, + postText, + /* atArgument */ false, + completionMap + ); + + // Add symbols that are in scope. + this._addSymbols(parseNode, priorWord, completionMap); + + // Add keywords. + this._findMatchingKeywords(Keywords.forVersion(this.execEnv.pythonVersion), priorWord).map((keyword) => { + if (completionMap.has(keyword)) { + return; + } + const completionItem = CompletionItem.create(keyword); + completionItem.kind = CompletionItemKind.Keyword; + completionItem.sortText = this._makeSortText(SortCategory.Keyword, keyword); + completionMap.set(completionItem); + }); + + // Add auto-import suggestions from other modules. + // Ignore this check for privates, since they are not imported. + if (!priorWord.startsWith('_') && !this.itemToResolve) { + const parensDisabled = parseNode.parent?.nodeType === ParseNodeType.Decorator; + this.addAutoImportCompletions( + priorWord, + similarityLimit, + this.options.lazyEdit, + completionMap, + parensDisabled + ); + } + + // Add literal values if appropriate. + this._tryAddLiterals(parseNode, priorWord, priorText, postText, completionMap); + + return completionMap; + } + + private _isIndexArgument(node: ParseNode) { + const currentNode = node.parent; + return ( + currentNode && + currentNode.nodeType === ParseNodeType.Argument && + currentNode.d.argCategory === ArgCategory.Simple && + currentNode.parent && + currentNode.parent.nodeType === ParseNodeType.Index && + currentNode.parent.d.leftExpr && + currentNode.parent.d.leftExpr.nodeType === ParseNodeType.Name + ); + } + + private _addCallArgumentCompletions( + parseNode: ParseNode, + priorWord: string, + priorText: string, + postText: string, + atArgument: boolean, + completionMap: CompletionMap + ) { + // If we're within the argument list of a call, add parameter names. + const offset = convertPositionToOffset(this.position, this.parseResults.tokenizerOutput.lines)!; + const callInfo = getCallNodeAndActiveParamIndex(parseNode, offset, this.parseResults.tokenizerOutput.tokens); + + if (!callInfo) { + return; + } + + const signatureInfo = this.evaluator.getCallSignatureInfo( + callInfo.callNode, + callInfo.activeIndex, + callInfo.activeOrFake + ); + + if (signatureInfo) { + // Are we past the call expression and within the argument list? + const callNameEnd = convertOffsetToPosition( + signatureInfo.callNode.d.leftExpr.start + signatureInfo.callNode.d.leftExpr.length, + this.parseResults.tokenizerOutput.lines + ); + + if (comparePositions(this.position, callNameEnd) > 0) { + if (!atArgument) { + this._addNamedParameters(signatureInfo, priorWord, completionMap); + } + + // Add literals that apply to this parameter. + this._addLiteralValuesForArgument(signatureInfo, priorWord, priorText, postText, completionMap); + } + } + } + + private _addLiteralValuesForArgument( + signatureInfo: CallSignatureInfo, + priorWord: string, + priorText: string, + postText: string, + completionMap: CompletionMap + ) { + signatureInfo.signatures.forEach((signature) => { + if (!signature.activeParam) { + return undefined; + } + + const type = signature.type; + const paramIndex = type.shared.parameters.indexOf(signature.activeParam); + + if (paramIndex < 0) { + return undefined; + } + + const paramType = FunctionType.getParamType(type, paramIndex); + this._addLiteralValuesForTargetType(paramType, priorWord, priorText, postText, completionMap); + return undefined; + }); + } + + private _addLiteralValuesForTargetType( + type: Type, + priorWord: string, + priorText: string, + postText: string, + completionMap: CompletionMap + ) { + const quoteValue = this._getQuoteInfo(priorWord, priorText); + this._getSubTypesWithLiteralValues(type).forEach((v) => { + if (ClassType.isBuiltIn(v, 'str')) { + const value = printLiteralValue(v, quoteValue.quoteCharacter); + if (quoteValue.stringValue === undefined) { + this.addNameToCompletions(value, CompletionItemKind.Constant, priorWord, completionMap, { + sortText: this._makeSortText(SortCategory.LiteralValue, v.priv.literalValue as string), + }); + } else { + this._addStringLiteralToCompletions( + value.substr(1, value.length - 2), + quoteValue, + postText, + completionMap + ); + } + } + }); + } + + private _getDictExpressionStringKeys(parseNode: ParseNode, excludeIds?: Set) { + const node = getDictionaryLikeNode(parseNode); + if (!node) { + return []; + } + + return node.d.items.flatMap((entry) => { + if (entry.nodeType !== ParseNodeType.DictionaryKeyEntry || excludeIds?.has(entry.d.keyExpr.id)) { + return []; + } + + if (entry.d.keyExpr.nodeType === ParseNodeType.StringList) { + return [entry.d.keyExpr.d.strings.map((s) => s.d.value).join('')]; + } + + return []; + }); + + function getDictionaryLikeNode(parseNode: ParseNode) { + // this method assumes the given parseNode is either a child of a dictionary or a dictionary itself + if (parseNode.nodeType === ParseNodeType.Dictionary) { + return parseNode; + } + + let curNode: ParseNode | undefined = parseNode; + while (curNode && curNode.nodeType !== ParseNodeType.Dictionary && curNode.nodeType !== ParseNodeType.Set) { + curNode = curNode.parent; + if (!curNode) { + return; + } + } + + return curNode; + } + } + + private _getSubTypesWithLiteralValues(type: Type) { + const values: ClassType[] = []; + + doForEachSubtype(type, (subtype) => { + if (isClassInstance(subtype) && isLiteralType(subtype)) { + values.push(subtype); + } + }); + + return values; + } + + private _getIndexKeyType(baseType: ClassType) { + // Handle __getitem__. + const getItemType = this.evaluator.getBoundMagicMethod(baseType, '__getitem__'); + if (getItemType) { + const typesToCombine: Type[] = []; + + // Handle both overloaded and non-overloaded functions. + doForEachSignature(getItemType, (signature) => { + if ( + signature.shared.parameters.length >= 1 && + signature.shared.parameters[0].category === ParamCategory.Simple + ) { + typesToCombine.push(FunctionType.getParamType(signature, 0)); + } + }); + + if (typesToCombine.length > 0) { + return combineTypes(typesToCombine); + } + } + + return undefined; + } + + private _getIndexKeys(indexNode: IndexNode, invocationNode: ParseNode) { + const baseType = this.evaluator.getType(indexNode.d.leftExpr); + if (!baseType || !isClassInstance(baseType)) { + return []; + } + + // See whether subscript is typed using Literal types. If it is, return those literal keys. + const subscriptType = this._getIndexKeyType(baseType); + if (subscriptType) { + const keys: string[] = []; + + this._getSubTypesWithLiteralValues(subscriptType).forEach((v) => { + if ( + !ClassType.isBuiltIn(v, 'str') && + !ClassType.isBuiltIn(v, 'int') && + !ClassType.isBuiltIn(v, 'bool') && + !ClassType.isBuiltIn(v, 'bytes') && + !ClassType.isEnumClass(v) + ) { + return; + } + + keys.push(printLiteralValue(v, this.parseResults.tokenizerOutput.predominantSingleQuoteCharacter)); + }); + + if (keys.length > 0) { + return keys; + } + } + + if (indexNode.d.leftExpr.nodeType !== ParseNodeType.Name) { + // This completion only supports simple name case + return []; + } + + // Must be local variable/parameter + const declarations = this.evaluator.getDeclInfoForNameNode(indexNode.d.leftExpr)?.decls ?? []; + const declaration = declarations.length > 0 ? declarations[0] : undefined; + if ( + !declaration || + (declaration.type !== DeclarationType.Variable && declaration.type !== DeclarationType.Param) + ) { + return []; + } + + if (!declaration.uri.equals(this.fileUri)) { + return []; + } + + let startingNode: ParseNode = indexNode.d.leftExpr; + if (declaration.node) { + const scopeRoot = ParseTreeUtils.getEvaluationScopeNode(declaration.node).node; + + // Find the lowest tree to search the symbol. + if ( + ParseTreeUtils.getFileInfoFromNode(startingNode)?.fileUri.equals( + ParseTreeUtils.getFileInfoFromNode(scopeRoot)?.fileUri + ) + ) { + startingNode = scopeRoot; + } + } + + const results = DocumentSymbolCollector.collectFromNode( + this.program, + indexNode.d.leftExpr, + this.cancellationToken, + startingNode + ); + + const keys: Set = new Set(); + for (const result of results) { + const node = + result.node.parent?.nodeType === ParseNodeType.TypeAnnotation ? result.node.parent : result.node; + + if ( + node.parent?.nodeType === ParseNodeType.Assignment || + node.parent?.nodeType === ParseNodeType.AssignmentExpression + ) { + if (node.parent.d.rightExpr.nodeType === ParseNodeType.Dictionary) { + const dictionary = node.parent.d.rightExpr; + for (const entry of dictionary.d.items.filter( + (e) => e.nodeType === ParseNodeType.DictionaryKeyEntry + ) as DictionaryKeyEntryNode[]) { + const key = this.parseResults.text.substr(entry.d.keyExpr.start, entry.d.keyExpr.length).trim(); + if (key.length > 0) keys.add(key); + } + } + + if (node.parent.d.rightExpr.nodeType === ParseNodeType.Call) { + const call = node.parent.d.rightExpr; + const type = this.evaluator.getType(call.d.leftExpr); + if (!type || !isInstantiableClass(type) || !ClassType.isBuiltIn(type, 'dict')) { + continue; + } + + for (const arg of call.d.args) { + const key = arg.d.name?.d.value.trim() ?? ''; + const quote = this.parseResults.tokenizerOutput.predominantSingleQuoteCharacter; + if (key.length > 0) { + keys.add(`${quote}${key}${quote}`); + } + } + } + } + + if ( + node.parent?.nodeType === ParseNodeType.Index && + node.parent.d.items.length === 1 && + node.parent.d.items[0].d.valueExpr.nodeType !== ParseNodeType.Error && + !TextRange.containsRange(node.parent, invocationNode) + ) { + const indexArgument = node.parent.d.items[0]; + const key = this.parseResults.text + .substr(indexArgument.d.valueExpr.start, indexArgument.d.valueExpr.length) + .trim(); + if (key.length > 0) keys.add(key); + } + } + + return Array.from(keys); + } + + private _getLiteralCompletions( + parseNode: StringNode | ErrorNode, + offset: number, + priorWord: string, + priorText: string, + postText: string + ): CompletionMap | undefined { + if (this.options.triggerCharacter === '"' || this.options.triggerCharacter === "'") { + if (parseNode.start !== offset - 1) { + // If completion is triggered by typing " or ', it must be the one that starts a string + // literal. In another word, it can't be something inside of another string or comment + return undefined; + } + } + + const completionMap = new CompletionMap(); + if (!this._tryAddLiterals(parseNode, priorWord, priorText, postText, completionMap)) { + return undefined; + } + + return completionMap; + } + + private _tryAddLiterals( + parseNode: ParseNode, + priorWord: string, + priorText: string, + postText: string, + completionMap: CompletionMap + ): boolean { + const parentAndChild = getParentSkippingStringList(parseNode); + if (!parentAndChild) { + return false; + } + + // See if the type evaluator can determine the expected type for this node. + // ex) a: Literal["str"] = /* here */ + const nodeForExpectedType = + parentAndChild.parent.nodeType === ParseNodeType.Assignment + ? parentAndChild.parent.d.rightExpr === parentAndChild.child + ? parentAndChild.child + : undefined + : isExpressionNode(parentAndChild.child) + ? parentAndChild.child + : undefined; + + const offset = convertPositionToOffset(this.position, this.parseResults.tokenizerOutput.lines)!; + const inCallArg = !!getCallNodeAndActiveParamIndex(parseNode, offset, this.parseResults.tokenizerOutput.tokens); + + if (nodeForExpectedType) { + const expectedTypeResult = this.evaluator.getExpectedType(nodeForExpectedType); + if (expectedTypeResult && containsLiteralType(expectedTypeResult.type)) { + this._addLiteralValuesForTargetType( + expectedTypeResult.type, + priorWord, + priorText, + postText, + completionMap + ); + + if (!inCallArg) { + return true; + } + } + } + + // ex) a: TypedDictType = { "/* here */" } or a: TypedDictType = { A/* here */ } + const nodeForKey = parentAndChild.parent; + if (nodeForKey) { + // If the dictionary is not yet filled in, it will appear as though it's + // a set initially. + let dictOrSet: DictionaryNode | SetNode | undefined; + + if ( + nodeForKey.nodeType === ParseNodeType.DictionaryKeyEntry && + nodeForKey.d.keyExpr === parentAndChild.child && + nodeForKey.parent?.nodeType === ParseNodeType.Dictionary + ) { + dictOrSet = nodeForKey.parent; + } else if (nodeForKey?.nodeType === ParseNodeType.Set) { + dictOrSet = nodeForKey; + } + + if (dictOrSet) { + if ( + this._tryAddTypedDictKeysFromDictionary( + dictOrSet, + parseNode.nodeType === ParseNodeType.String ? parseNode : undefined, + priorWord, + priorText, + postText, + completionMap + ) + ) { + return true; + } + } + } + + // a: DictType = { .... } + // a[/* here */] or a['/* here */'] or a[variable/*here*/] + const argument = parentAndChild.parent; + if (argument.nodeType === ParseNodeType.Argument && argument.parent?.nodeType === ParseNodeType.Index) { + const priorTextInString = parseNode.nodeType === ParseNodeType.String ? priorText : ''; + if ( + this._tryAddTypedDictKeysFromIndexer( + argument.parent, + priorWord, + priorTextInString, + postText, + completionMap + ) + ) { + return true; + } + + const quoteInfo = this._getQuoteInfo(priorWord, priorTextInString); + const keys = this._getIndexKeys(argument.parent, parseNode); + + let keyFound = false; + for (const key of keys) { + if (completionMap.has(key)) { + // Don't add key if it already exists in the completion. + // ex) key = "dictKey" + // dict[key] = 1 + // print(dict[])) + continue; + } + + const stringLiteral = /^["|'].*["|']$/.test(key); + if (parseNode.nodeType === ParseNodeType.String && !stringLiteral) { + continue; + } + + keyFound = true; + if (stringLiteral) { + const keyWithoutQuote = key.substr(1, key.length - 2); + + this._addStringLiteralToCompletions( + keyWithoutQuote, + quoteInfo, + postText, + completionMap, + indexValueDetail + ); + } else { + this.addNameToCompletions(key, CompletionItemKind.Constant, priorWord, completionMap, { + sortText: this._makeSortText(SortCategory.LiteralValue, key), + itemDetail: indexValueDetail, + }); + } + } + + if (keyFound) { + return true; + } + } + + // if c == "/* here */" + const comparison = parentAndChild.parent; + const supportedOperators = [OperatorType.Assign, OperatorType.Equals, OperatorType.NotEquals]; + if ( + comparison.nodeType === ParseNodeType.BinaryOperation && + supportedOperators.includes(comparison.d.operator) + ) { + const type = this.evaluator.getType(comparison.d.leftExpr); + if (type && containsLiteralType(type)) { + this._addLiteralValuesForTargetType(type, priorWord, priorText, postText, completionMap); + return true; + } + } + + // if c := "/* here */" + const assignmentExpression = parentAndChild.parent; + if ( + assignmentExpression.nodeType === ParseNodeType.AssignmentExpression && + assignmentExpression.d.rightExpr === parentAndChild.child + ) { + const type = this.evaluator.getType(assignmentExpression.d.name); + if (type && containsLiteralType(type)) { + this._addLiteralValuesForTargetType(type, priorWord, priorText, postText, completionMap); + return true; + } + } + + // For now, we only support simple cases. no complex pattern matching. + // match c: + // case /* here */ + const caseNode = parentAndChild.parent; + if ( + caseNode.nodeType === ParseNodeType.Case && + caseNode.d.pattern.nodeType === ParseNodeType.Error && + caseNode.d.pattern.d.category === ErrorExpressionCategory.MissingPattern && + caseNode.d.suite === parentAndChild.child && + caseNode.parent?.nodeType === ParseNodeType.Match + ) { + const type = this.evaluator.getType(caseNode.parent.d.expr); + if (type && containsLiteralType(type)) { + this._addLiteralValuesForTargetType(type, priorWord, priorText, postText, completionMap); + return true; + } + } + + // match c: + // case "/* here */" + // case Sym/*here*/ + const patternLiteral = parentAndChild.parent; + if ( + (patternLiteral.nodeType === ParseNodeType.PatternLiteral || + patternLiteral.nodeType === ParseNodeType.PatternCapture) && + patternLiteral.parent?.nodeType === ParseNodeType.PatternAs && + patternLiteral.parent.parent?.nodeType === ParseNodeType.Case && + patternLiteral.parent.parent.parent?.nodeType === ParseNodeType.Match + ) { + const type = this.evaluator.getType(patternLiteral.parent.parent.parent.d.expr); + if (type && containsLiteralType(type)) { + this._addLiteralValuesForTargetType(type, priorWord, priorText, postText, completionMap); + return true; + } + } + + if (parseNode.nodeType === ParseNodeType.String) { + const offset = convertPositionToOffset(this.position, this.parseResults.tokenizerOutput.lines)!; + const atArgument = parseNode.parent!.start < offset && offset < TextRange.getEnd(parseNode); + this._addCallArgumentCompletions(parseNode, priorWord, priorText, postText, atArgument, completionMap); + return true; + } + + return false; + + function getParentSkippingStringList(node: ParseNode): { parent: ParseNode; child: ParseNode } | undefined { + if (!node.parent) { + return undefined; + } + + if (node.nodeType !== ParseNodeType.String) { + return { parent: node.parent, child: node }; + } + + if (!node.parent.parent) { + return undefined; + } + + if (node.parent?.nodeType !== ParseNodeType.StringList || node.parent.d.strings.length > 1) { + return undefined; + } + + return { parent: node.parent.parent, child: node.parent }; + } + } + + private _tryAddTypedDictKeys( + type: Type, + existingKeys: string[], + priorWord: string, + priorText: string, + postText: string, + completionMap: CompletionMap + ) { + let typedDicts: ClassType[] = []; + + doForEachSubtype(type, (subtype) => { + if (isClassInstance(subtype) && ClassType.isTypedDictClass(subtype)) { + typedDicts.push(subtype); + } + }); + + if (typedDicts.length === 0) { + return false; + } + + typedDicts = this._tryNarrowTypedDicts(typedDicts, existingKeys); + + const quoteInfo = this._getQuoteInfo(priorWord, priorText); + const excludes = new Set(existingKeys); + + typedDicts.forEach((typedDict) => { + getTypedDictMembersForClass(this.evaluator, typedDict, /* allowNarrowed */ true).knownItems.forEach( + (_, key) => { + // Unions of TypedDicts may define the same key. + if (excludes.has(key) || completionMap.has(key)) { + return; + } + + excludes.add(key); + + this._addStringLiteralToCompletions(key, quoteInfo, postText, completionMap); + } + ); + }); + + return true; + } + + private _tryAddTypedDictKeysFromDictionary( + dictionaryNode: DictionaryNode | SetNode, + stringNode: StringNode | undefined, + priorWord: string, + priorText: string, + postText: string, + completionMap: CompletionMap + ) { + const expectedTypeResult = this.evaluator.getExpectedType(dictionaryNode); + if (!expectedTypeResult) { + return false; + } + + // If the expected type result is associated with a node above the + // dictionaryNode in the parse tree, there are no typed dict keys to add. + if (ParseTreeUtils.getNodeDepth(expectedTypeResult.node) < ParseTreeUtils.getNodeDepth(dictionaryNode)) { + return false; + } + + const keys = this._getDictExpressionStringKeys( + dictionaryNode, + stringNode ? new Set([stringNode.parent?.id]) : undefined + ); + + return this._tryAddTypedDictKeys(expectedTypeResult.type, keys, priorWord, priorText, postText, completionMap); + } + + private _tryNarrowTypedDicts(types: ClassType[], keys: string[]): ClassType[] { + const newTypes = types.flatMap((type) => { + const entries = getTypedDictMembersForClass(this.evaluator, type, /* allowNarrowed */ true); + + for (let index = 0; index < keys.length; index++) { + if (!entries.knownItems.has(keys[index])) { + return []; + } + } + + return [type]; + }); + + if (newTypes.length === 0) { + // Couldn't narrow to any typed dicts. Just include all. + return types; + } + + return newTypes; + } + + // Find quotation and string prefix to use for string literals + // completion under current context. + private _getQuoteInfo(priorWord: string, priorText: string): QuoteInfo { + let filterText = priorWord; + let stringValue = undefined; + let quoteCharacter = this.parseResults.tokenizerOutput.predominantSingleQuoteCharacter; + + // If completion is not inside of the existing string literal + // ex) typedDict[ |<= here + // use default quotation char without any string prefix. + if (!this._stringLiteralContainer) { + return { priorWord, priorText, filterText, stringValue, quoteCharacter }; + } + + const singleQuote = "'"; + const doubleQuote = '"'; + + // If completion is inside of string literal and has prior text + // ex) typedDict["key |<= here + // find quotation user has used (ex, ") and string prefix (ex, key) + if (priorText !== undefined) { + const lastSingleQuote = priorText.lastIndexOf(singleQuote); + const lastDoubleQuote = priorText.lastIndexOf(doubleQuote); + + if (lastSingleQuote > lastDoubleQuote) { + stringValue = priorText.substr(lastSingleQuote + 1); + quoteCharacter = singleQuote; + } else if (lastDoubleQuote > lastSingleQuote) { + stringValue = priorText.substr(lastDoubleQuote + 1); + quoteCharacter = doubleQuote; + } + } + + // If the invocation position is within an f-string, use a double or + // single quote that doesn't match the f-string. Prior to Python 3.12, + // using the same quotation mark nested within an f-string was not + // permitted. For example, f"..{typedDict[|<= here ]}", we need to use + // single quotes. Note that this doesn't account for deeper nested + // f-strings. + if (this._stringLiteralContainer.flags & (StringTokenFlags.Format | StringTokenFlags.Template)) { + quoteCharacter = + this._stringLiteralContainer.flags & StringTokenFlags.SingleQuote ? doubleQuote : singleQuote; + } + + if (stringValue) { + filterText = stringValue; + } + + return { priorWord, priorText, filterText, stringValue, quoteCharacter }; + } + + private _tryAddTypedDictKeysFromIndexer( + indexNode: IndexNode, + priorWord: string, + priorText: string, + postText: string, + completionMap: CompletionMap + ) { + if (!indexNode) { + return false; + } + + const baseType = this.evaluator.getType(indexNode.d.leftExpr); + if (!baseType) { + return false; + } + + return this._tryAddTypedDictKeys(baseType, [], priorWord, priorText, postText, completionMap); + } + + private _addStringLiteralToCompletions( + value: string, + quoteInfo: QuoteInfo, + postText: string | undefined, + completionMap: CompletionMap, + detail?: string + ) { + if (!StringUtils.isPatternInSymbol(quoteInfo.filterText || '', value)) { + return; + } + + const valueWithQuotes = `${quoteInfo.quoteCharacter}${value}${quoteInfo.quoteCharacter}`; + if (completionMap.has(valueWithQuotes)) { + return; + } + + const completionItem = CompletionItem.create(valueWithQuotes); + + completionItem.kind = CompletionItemKind.Constant; + completionItem.sortText = this._makeSortText(SortCategory.LiteralValue, valueWithQuotes); + let rangeStartCol = this.position.character; + if (quoteInfo.stringValue !== undefined) { + rangeStartCol -= quoteInfo.stringValue.length + 1; + } else if (quoteInfo.priorWord) { + rangeStartCol -= quoteInfo.priorWord.length; + } + + // If the text after the insertion point is the closing quote, + // replace it. + let rangeEndCol = this.position.character; + if (postText !== undefined) { + if (postText.startsWith(quoteInfo.quoteCharacter)) { + rangeEndCol++; + } + } + + const range: Range = { + start: { line: this.position.line, character: rangeStartCol }, + end: { line: this.position.line, character: rangeEndCol }, + }; + completionItem.textEdit = TextEdit.replace(range, valueWithQuotes); + completionItem.detail = detail; + + completionMap.set(completionItem); + } + + private _getImportFromCompletions( + importFromNode: ImportFromNode, + offset: number, + priorWord: string + ): CompletionMap | undefined { + // Don't attempt to provide completions for "from X import *". + if (importFromNode.d.isWildcardImport) { + return undefined; + } + + // Access the imported module information, which is hanging + // off the ImportFromNode. + const importInfo = AnalyzerNodeInfo.getImportInfo(importFromNode.d.module); + if (!importInfo) { + return undefined; + } + + const completionMap = new CompletionMap(); + const resolvedPath = + importInfo.resolvedUris.length > 0 + ? importInfo.resolvedUris[importInfo.resolvedUris.length - 1] + : Uri.empty(); + + const parseResults = this.program.getParseResults(resolvedPath); + if (!parseResults) { + // Add the implicit imports. + this._addImplicitImportsToCompletion(importInfo, importFromNode, priorWord, completionMap); + return completionMap; + } + + const symbolTable = AnalyzerNodeInfo.getScope(parseResults.parserOutput.parseTree)?.symbolTable; + if (!symbolTable) { + return completionMap; + } + + this._addSymbolsForSymbolTable( + symbolTable, + (symbol, name) => { + return ( + // Don't suggest built in symbols. + symbol.getDeclarations().some((d) => !isIntrinsicDeclaration(d)) && + // Don't suggest symbols that have already been imported elsewhere + // in this import statement. + !importFromNode.d.imports.find( + (imp) => + imp.d.name.d.value === name && + !(TextRange.contains(imp, offset) || TextRange.getEnd(imp) === offset) + ) + ); + }, + priorWord, + importFromNode, + /* isInImport */ true, + /* boundObject */ undefined, + completionMap + ); + + // Add the implicit imports. + this._addImplicitImportsToCompletion(importInfo, importFromNode, priorWord, completionMap); + return completionMap; + } + + private _addImplicitImportsToCompletion( + importInfo: ImportResult, + importFromNode: ImportFromNode, + priorWord: string, + completionMap: CompletionMap + ) { + importInfo.implicitImports?.forEach((implImport) => { + if (!importFromNode.d.imports.find((imp) => imp.d.name.d.value === implImport.name)) { + this.addNameToCompletions(implImport.name, CompletionItemKind.Module, priorWord, completionMap, { + moduleUri: implImport.uri, + }); + } + }); + } + + private _findMatchingKeywords(keywordList: string[], partialMatch: string): string[] { + return keywordList.filter((keyword) => { + if (partialMatch) { + return StringUtils.isPatternInSymbol(partialMatch, keyword); + } else { + return true; + } + }); + } + + private _addNamedParameters(signatureInfo: CallSignatureInfo, priorWord: string, completionMap: CompletionMap) { + const argNameSet = new Set(); + + signatureInfo.signatures.forEach((signature) => { + this._addNamedParametersToMap(signature.type, argNameSet); + }); + + // Add keys from typed dict outside signatures. + signatureInfo.signatures.forEach((signature) => { + if (signature.type.priv.boundToType) { + const keys = Array.from( + signature.type.priv.boundToType.shared.typedDictEntries?.knownItems.keys() || [] + ); + keys.forEach((key: string) => argNameSet.add(key)); + } + }); + + // Remove any named parameters that are already provided. + signatureInfo.callNode.d.args!.forEach((arg) => { + if (arg.d.name) { + argNameSet.delete(arg.d.name.d.value); + } + }); + + // Add the remaining unique parameter names to the completion list. + argNameSet.forEach((argName) => { + if (StringUtils.isPatternInSymbol(priorWord, argName)) { + const label = argName + '='; + if (completionMap.has(label)) { + return; + } + + const completionItem = CompletionItem.create(label); + completionItem.kind = CompletionItemKind.Variable; + + const completionItemData: CompletionItemData = { + uri: this.fileUri.toString(), + position: this.position, + }; + completionItem.data = toLSPAny(completionItemData); + completionItem.sortText = this._makeSortText(SortCategory.NamedParameter, argName); + completionItem.filterText = argName; + + completionMap.set(completionItem); + } + }); + } + + private _addNamedParametersToMap(type: FunctionType, names: Set) { + const paramDetails = getParamListDetails(type); + + paramDetails.params.forEach((paramInfo) => { + if ( + paramInfo.param.name && + paramInfo.kind !== ParamKind.Positional && + paramInfo.kind !== ParamKind.ExpandedArgs + ) { + if ( + !SymbolNameUtils.isPrivateOrProtectedName(paramInfo.param.name) && + Tokenizer.isPythonIdentifier(paramInfo.param.name) + ) { + names.add(paramInfo.param.name); + } + } + }); + } + + private _addSymbols(node: ParseNode, priorWord: string, completionMap: CompletionMap) { + let curNode: ParseNode | undefined = node; + + while (curNode) { + // Does this node have a scope associated with it? + let scope = getScopeForNode(curNode); + if (scope) { + while (scope) { + this._addSymbolsForSymbolTable( + scope.symbolTable, + () => true, + priorWord, + node, + /* isInImport */ false, + /* boundObject */ undefined, + completionMap + ); + scope = scope.parent; + } + + // If this is a class scope, add symbols from parent classes. + if (curNode.nodeType === ParseNodeType.Class) { + const classType = this.evaluator.getTypeOfClass(curNode); + if (classType && isInstantiableClass(classType.classType)) { + classType.classType.shared.mro.forEach((baseClass, index) => { + if (isInstantiableClass(baseClass)) { + this._addSymbolsForSymbolTable( + ClassType.getSymbolTable(baseClass), + (symbol) => { + if (!symbol.isClassMember()) { + return false; + } + + // Return only variables, not methods or classes. + return symbol + .getDeclarations() + .some((decl) => decl.type === DeclarationType.Variable); + }, + priorWord, + node, + /* isInImport */ false, + /* boundObject */ undefined, + completionMap + ); + } + }); + } + } + break; + } + + curNode = curNode.parent; + } + } + + private _addSymbolsForSymbolTable( + symbolTable: SymbolTable, + includeSymbolCallback: (symbol: Symbol, name: string) => boolean, + priorWord: string, + node: ParseNode, + isInImport: boolean, + boundObjectOrClass: ClassType | undefined, + completionMap: CompletionMap + ) { + const insideTypeAnnotation = + ParseTreeUtils.isWithinAnnotationComment(node) || + ParseTreeUtils.isWithinTypeAnnotation(node, /* requireQuotedAnnotation */ false); + symbolTable.forEach((symbol, name) => { + // If there are no declarations or the symbol is not + // exported from this scope, don't include it in the + // suggestion list unless we are in the same file. + const hidden = + !isVisibleExternally(symbol) && !symbol.getDeclarations().some((d) => isDefinedInFile(d, this.fileUri)); + if (!hidden && includeSymbolCallback(symbol, name)) { + // Don't add a symbol more than once. It may have already been + // added from an inner scope's symbol table. + if (!completionMap.has(name)) { + // Skip func parens for classes when not a direct assignment or an argument (passed as a value) + const skipForClass = !this._shouldShowAutoParensForClass(symbol, node); + const skipForDecorator = node.parent?.nodeType === ParseNodeType.Decorator; + this.addSymbol(name, symbol, priorWord, completionMap, { + boundObjectOrClass, + funcParensDisabled: isInImport || insideTypeAnnotation || skipForClass || skipForDecorator, + extraCommitChars: !isInImport && !!priorWord, + }); + } + } + }); + } + + private _shouldShowAutoParensForClass(symbol: Symbol, node: ParseNode) { + if (symbol.getDeclarations().every((d) => d.type !== DeclarationType.Class)) { + // Not actually a class, so yes show parens. + return true; + } + + // If an argument then show parens for classes if not a class argument. + if (node.parent?.nodeType === ParseNodeType.Argument && node.parent.parent?.nodeType !== ParseNodeType.Class) { + return true; + } + + // Otherwise only show when the class is being assigned to a variable. + const nodeIndex = ParseTreeUtils.getTokenIndexAtLeft(this.parseResults.tokenizerOutput.tokens, node.start); + const prevToken = ParseTreeUtils.getTokenAtIndex(this.parseResults.tokenizerOutput.tokens, nodeIndex); + return ( + prevToken && + prevToken.type === TokenType.Operator && + (prevToken as OperatorToken).operatorType === OperatorType.Assign + ); + } + + private _getRecentListIndex(name: string, autoImportText: string) { + return CompletionProvider._mostRecentCompletions.findIndex( + (item) => item.label === name && item.autoImportText === autoImportText + ); + } + + private _makeSortText(sortCategory: SortCategory, name: string, autoImportText = ''): string { + const recentListIndex = this._getRecentListIndex(name, autoImportText); + + // If the label is in the recent list, modify the category + // so it appears higher in our list. + if (recentListIndex >= 0) { + if (sortCategory === SortCategory.AutoImport) { + sortCategory = SortCategory.RecentAutoImport; + } else if (sortCategory === SortCategory.ImportModuleName) { + sortCategory = SortCategory.RecentImportModuleName; + } else if ( + sortCategory === SortCategory.Keyword || + sortCategory === SortCategory.NormalSymbol || + sortCategory === SortCategory.PrivateSymbol || + sortCategory === SortCategory.DunderSymbol + ) { + sortCategory = SortCategory.RecentKeywordOrSymbol; + } + } + + // Generate a sort string of the format + // XX.YYYY.name + // where XX is the sort category + // and YYYY is the index of the item in the MRU list + return this._formatInteger(sortCategory, 2) + '.' + this._formatInteger(recentListIndex, 4) + '.' + name; + } + + private _formatInteger(val: number, digits: number): string { + const charCodeZero = '0'.charCodeAt(0); + + let result = ''; + for (let i = 0; i < digits; i++) { + // Prepend the next digit. + let digit = Math.floor(val % 10); + if (digit < 0) { + digit = 9; + } + result = String.fromCharCode(digit + charCodeZero) + result; + val = Math.floor(val / 10); + } + + return result; + } + + private _convertDeclarationTypeToItemKind(declaration: Declaration): CompletionItemKind { + const resolvedDeclaration = this.evaluator.resolveAliasDeclaration(declaration, /* resolveLocalNames */ true); + if (!resolvedDeclaration) { + return CompletionItemKind.Variable; + } + + switch (resolvedDeclaration.type) { + case DeclarationType.Intrinsic: + return resolvedDeclaration.intrinsicType === '__class__' + ? CompletionItemKind.Class + : CompletionItemKind.Variable; + + case DeclarationType.Param: + return CompletionItemKind.Variable; + + case DeclarationType.TypeParam: + return CompletionItemKind.TypeParameter; + + case DeclarationType.Variable: + return resolvedDeclaration.isConstant || resolvedDeclaration.isFinal + ? CompletionItemKind.Constant + : CompletionItemKind.Variable; + + case DeclarationType.TypeAlias: + return CompletionItemKind.Variable; + + case DeclarationType.Function: { + if (this._isPossiblePropertyDeclaration(resolvedDeclaration)) { + const functionType = this.evaluator.getTypeOfFunction(resolvedDeclaration.node); + if ( + functionType && + isMaybeDescriptorInstance(functionType.decoratedType, /* requireSetter */ false) + ) { + return CompletionItemKind.Property; + } + } + return resolvedDeclaration.isMethod ? CompletionItemKind.Method : CompletionItemKind.Function; + } + + case DeclarationType.Class: + case DeclarationType.SpecialBuiltInClass: + return CompletionItemKind.Class; + + case DeclarationType.Alias: + return CompletionItemKind.Module; + } + } + + private _convertTypeToItemKind(type: Type): CompletionItemKind { + switch (type.category) { + case TypeCategory.Module: + return CompletionItemKind.Module; + case TypeCategory.Class: + return CompletionItemKind.Class; + case TypeCategory.Function: + case TypeCategory.Overloaded: + if (isMaybeDescriptorInstance(type, /* requireSetter */ false)) { + return CompletionItemKind.Property; + } + + return CompletionItemKind.Function; + case TypeCategory.TypeVar: + return CompletionItemKind.TypeParameter; + + default: + return CompletionItemKind.Variable; + } + } + + private _getImportModuleCompletions(node: ModuleNameNode): CompletionMap { + const moduleDescriptor: ImportedModuleDescriptor = { + leadingDots: node.d.leadingDots, + hasTrailingDot: node.d.hasTrailingDot || false, + nameParts: node.d.nameParts.map((part) => part.d.value), + importedSymbols: new Set(), + }; + + const completions = this.importResolver.getCompletionSuggestions(this.fileUri, this.execEnv, moduleDescriptor); + + const completionMap = new CompletionMap(); + + // If we're in the middle of a "from X import Y" statement, offer + // the "import" keyword as a completion. + if ( + !node.d.hasTrailingDot && + node.parent && + node.parent.nodeType === ParseNodeType.ImportFrom && + node.parent.d.missingImport + ) { + const keyword = 'import'; + const completionItem = CompletionItem.create(keyword); + completionItem.kind = CompletionItemKind.Keyword; + completionItem.sortText = this._makeSortText(SortCategory.Keyword, keyword); + completionMap.set(completionItem); + } + + completions.forEach((modulePath, completionName) => { + this.addNameToCompletions(completionName, CompletionItemKind.Module, '', completionMap, { + sortText: this._makeSortText(SortCategory.ImportModuleName, completionName), + moduleUri: modulePath, + }); + }); + + return completionMap; + } + + private _isPossiblePropertyDeclaration(decl: FunctionDeclaration) { + // Do cheap check using only nodes that will cover 99.9% cases + // before doing more expensive type evaluation. + return decl.isMethod && decl.node.d.decorators.length > 0; + } + + private _isEnumMember(containingType: ClassType | undefined, name: string) { + if (!containingType || !ClassType.isEnumClass(containingType)) { + return false; + } + + const symbolType = transformTypeForEnumMember(this.evaluator, containingType, name); + + return ( + symbolType && + isClassInstance(symbolType) && + ClassType.isSameGenericClass( + symbolType, + TypeBase.isInstance(containingType) ? containingType : ClassType.cloneAsInstance(containingType) + ) && + symbolType.priv.literalValue instanceof EnumLiteral + ); + } +} + +export class CompletionMap { + private _completions: Map = new Map(); + + get size() { + return this._completions.size; + } + + set(value: CompletionItem): void { + const existing = this._completions.get(value.label); + if (!existing) { + this._completions.set(value.label, value); + } else if (Array.isArray(existing)) { + existing.push(value); + } else { + this._completions.set(value.label, [existing, value]); + } + } + + get(key: string): CompletionItem | CompletionItem[] | undefined { + return this._completions.get(key); + } + + has( + label: string, + predicate?: ( + other: CompletionItem | CompletionItem[], + kind?: CompletionItemKind, + autoImportText?: string + ) => boolean, + kind?: CompletionItemKind, + autImportText?: string + ): boolean { + const existing = this._completions.get(label); + if (!existing) { + return false; + } + + if (predicate) { + return predicate(existing, kind, autImportText); + } + return true; + } + + clear(): void { + this._completions.clear(); + } + + delete(key: string): boolean { + return this._completions.delete(key); + } + + toArray(): CompletionItem[] { + const items: CompletionItem[] = []; + this._completions?.forEach((value) => { + if (Array.isArray(value)) { + value.forEach((item) => { + items.push(item); + }); + } else { + items.push(value); + } + }); + return items; + } + + static matchKindAndImportText( + completionItemOrItems: CompletionItem | CompletionItem[], + getCompletionData: (d: CompletionItem) => CompletionItemData | undefined, + kind?: CompletionItemKind, + autoImportText?: string + ): boolean { + if (!Array.isArray(completionItemOrItems)) { + return ( + completionItemOrItems.kind === kind && + getCompletionData(completionItemOrItems)?.autoImportText === autoImportText + ); + } else { + return !!completionItemOrItems.find( + (c) => c.kind === kind && getCompletionData(c)?.autoImportText === autoImportText + ); + } + } + + static labelOnlyIgnoringAutoImports( + completionItemOrItems: CompletionItem | CompletionItem[], + getCompletionData: (d: CompletionItem) => CompletionItemData | undefined + ): boolean { + if (!Array.isArray(completionItemOrItems)) { + if (!getCompletionData(completionItemOrItems)?.autoImportText) { + return true; + } + } else { + if (completionItemOrItems.find((c) => !getCompletionData(c)?.autoImportText)) { + return true; + } + } + + return false; + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/completionProviderUtils.ts b/python-parser/packages/pyright-internal/src/languageService/completionProviderUtils.ts new file mode 100644 index 00000000..4e278f46 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/completionProviderUtils.ts @@ -0,0 +1,192 @@ +/* + * completionProviderUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Helper functions for providing completions + */ + +import { InsertTextFormat, MarkupContent, MarkupKind, TextEdit } from 'vscode-languageserver-types'; + +import { Declaration, DeclarationType } from '../analyzer/declaration'; +import { isBuiltInModule } from '../analyzer/typeDocStringUtils'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { isProperty } from '../analyzer/typeUtils'; +import { + ClassType, + Type, + TypeBase, + TypeCategory, + UnknownType, + getTypeAliasInfo, + isClassInstance, + isFunctionOrOverloaded, + isModule, +} from '../analyzer/types'; +import { SignatureDisplayType } from '../common/configOptions'; +import { TextEditAction } from '../common/editAction'; +import { ServiceProvider } from '../common/serviceProvider'; +import { Uri } from '../common/uri/uri'; +import { getToolTipForType } from './tooltipUtils'; + +export interface Edits { + format?: InsertTextFormat; + textEdit?: TextEdit; + additionalTextEdits?: TextEditAction[]; +} + +export interface CommonDetail { + funcParensDisabled?: boolean; + edits?: Edits; + extraCommitChars?: boolean; +} + +export interface SymbolDetail extends CommonDetail { + autoImportSource?: string; + autoImportAlias?: string; + boundObjectOrClass?: ClassType; +} + +export interface CompletionDetail extends CommonDetail { + typeDetail?: string; + documentation?: string; + autoImportText?: { + source: string; + importText: string; + }; + sortText?: string; + itemDetail?: string; + moduleUri?: Uri; +} + +export function getTypeDetail( + evaluator: TypeEvaluator, + type: Type, + primaryDecl: Declaration | undefined, + name: string, + detail: SymbolDetail | undefined, + functionSignatureDisplay: SignatureDisplayType +) { + if (!primaryDecl) { + if (isModule(type)) { + // Special casing import modules. + // submodule imported through `import` statement doesn't have + // corresponding decls. so use given name as it is. + // + // ex) import X.Y + // X.[Y] + return name; + } + + return; + } + + switch (primaryDecl.type) { + case DeclarationType.Intrinsic: + case DeclarationType.Variable: + case DeclarationType.Param: + case DeclarationType.TypeParam: { + let expandTypeAlias = false; + if (type && TypeBase.isInstantiable(type)) { + const typeAliasInfo = getTypeAliasInfo(type); + if (typeAliasInfo) { + if (typeAliasInfo.shared.name === name) { + expandTypeAlias = true; + } + } + } + // Handle the case where type is a function and was assigned to a variable. + if (type.category === TypeCategory.Overloaded || type.category === TypeCategory.Function) { + return getToolTipForType( + type, + /* label */ '', + name, + evaluator, + /* isProperty */ false, + functionSignatureDisplay + ); + } else { + return name + ': ' + evaluator.printType(type, { expandTypeAlias }); + } + } + + case DeclarationType.Function: { + const functionType = + detail?.boundObjectOrClass && isFunctionOrOverloaded(type) + ? evaluator.bindFunctionToClassOrObject(detail.boundObjectOrClass, type) + : type; + if (!functionType) { + return undefined; + } + + if (isProperty(functionType) && detail?.boundObjectOrClass && isClassInstance(detail.boundObjectOrClass)) { + const propertyType = + evaluator.getGetterTypeFromProperty(functionType as ClassType) || UnknownType.create(); + return name + ': ' + evaluator.printType(propertyType) + ' (property)'; + } + + return getToolTipForType( + functionType, + /* label */ '', + name, + evaluator, + /* isProperty */ false, + functionSignatureDisplay + ); + } + + case DeclarationType.Class: + case DeclarationType.SpecialBuiltInClass: { + return 'class ' + name + '()'; + } + + case DeclarationType.Alias: { + return name; + } + + default: { + return name; + } + } +} + +export function getCompletionItemDocumentation( + serviceProvider: ServiceProvider, + typeDetail: string | undefined, + documentation: string | undefined, + markupKind: MarkupKind, + declaration: Declaration | undefined +): MarkupContent | undefined { + if (markupKind === MarkupKind.Markdown) { + let markdownString = '```python\n' + typeDetail + '\n```\n'; + + if (documentation) { + markdownString += '---\n'; + markdownString += serviceProvider + .docStringService() + .convertDocStringToMarkdown(documentation, isBuiltInModule(declaration?.uri)); + } + + markdownString = markdownString.trimEnd(); + + return { + kind: MarkupKind.Markdown, + value: markdownString, + }; + } else if (markupKind === MarkupKind.PlainText) { + let plainTextString = typeDetail + '\n'; + + if (documentation) { + plainTextString += '\n'; + plainTextString += serviceProvider.docStringService().convertDocStringToPlainText(documentation); + } + + plainTextString = plainTextString.trimEnd(); + + return { + kind: MarkupKind.PlainText, + value: plainTextString, + }; + } + return undefined; +} diff --git a/python-parser/packages/pyright-internal/src/languageService/definitionProvider.ts b/python-parser/packages/pyright-internal/src/languageService/definitionProvider.ts new file mode 100644 index 00000000..bedc71fc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/definitionProvider.ts @@ -0,0 +1,347 @@ +/* + * definitionProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that maps a position within a Python program file into + * a "definition" of the item that is referred to at that position. + * For example, if the location is within an import name, the + * definition is the top of the resolved import file. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { getFileInfo } from '../analyzer/analyzerNodeInfo'; +import { + Declaration, + DeclarationType, + isFunctionDeclaration, + isUnresolvedAliasDeclaration, +} from '../analyzer/declaration'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { SourceMapper, isStubFile } from '../analyzer/sourceMapper'; +import { SynthesizedTypeInfo } from '../analyzer/symbol'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { doForEachSubtype } from '../analyzer/typeUtils'; +import { OverloadedType, TypeCategory, isOverloaded } from '../analyzer/types'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { isDefined } from '../common/core'; +import { DocumentRange } from '../common/docRange'; +import { ProgramView } from '../common/extensibility'; +import { convertOffsetsToRange, convertPositionToOffset } from '../common/positionUtils'; +import { ServiceKeys } from '../common/serviceKeys'; +import { ServiceProvider } from '../common/serviceProvider'; +import { Position, rangesAreEqual } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; + +export enum DefinitionFilter { + All = 'all', + PreferSource = 'preferSource', + PreferStubs = 'preferStubs', +} + +export function addDeclarationsToDefinitions( + evaluator: TypeEvaluator, + sourceMapper: SourceMapper, + declarations: Declaration[] | undefined, + definitions: DocumentRange[] +) { + if (!declarations) { + return; + } + + declarations.forEach((decl) => { + let resolvedDecl = evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ true, { + allowExternallyHiddenAccess: true, + }); + + if (!resolvedDecl || resolvedDecl.uri.isEmpty()) { + return; + } + + // If the decl is an unresolved import, skip it. + if (resolvedDecl.type === DeclarationType.Alias) { + if (resolvedDecl.isUnresolved || isUnresolvedAliasDeclaration(resolvedDecl)) { + return; + } + } + + // If the resolved decl is still an alias, it means it + // resolved to a module. We need to apply loader actions + // to determine its path. + if ( + resolvedDecl.type === DeclarationType.Alias && + resolvedDecl.symbolName && + resolvedDecl.submoduleFallback && + !resolvedDecl.submoduleFallback.uri.isEmpty() + ) { + resolvedDecl = resolvedDecl.submoduleFallback; + } + + _addIfUnique(definitions, { + uri: resolvedDecl.uri, + range: resolvedDecl.range, + }); + + if (isFunctionDeclaration(resolvedDecl)) { + // Handle overloaded function case + const functionType = evaluator.getTypeForDeclaration(resolvedDecl)?.type; + if (functionType && isOverloaded(functionType)) { + for (const overloadDecl of OverloadedType.getOverloads(functionType) + .map((o) => o.shared.declaration) + .filter(isDefined)) { + _addIfUnique(definitions, { + uri: overloadDecl.uri, + range: overloadDecl.range, + }); + } + } + } + + if (!isStubFile(resolvedDecl.uri)) { + return; + } + + if (resolvedDecl.type === DeclarationType.Alias) { + // Add matching source module + sourceMapper + .findModules(resolvedDecl.uri) + .map((m) => getFileInfo(m)?.fileUri) + .filter(isDefined) + .forEach((f) => _addIfUnique(definitions, _createModuleEntry(f))); + return; + } + + const implDecls = sourceMapper.findDeclarations(resolvedDecl); + for (const implDecl of implDecls) { + if (implDecl && !implDecl.uri.isEmpty()) { + _addIfUnique(definitions, { + uri: implDecl.uri, + range: implDecl.range, + }); + } + } + }); +} + +export function filterDefinitions(filter: DefinitionFilter, definitions: DocumentRange[]) { + if (filter === DefinitionFilter.All) { + return definitions; + } + + // If go-to-declaration is supported, attempt to only show only pyi files in go-to-declaration + // and none in go-to-definition, unless filtering would produce an empty list. + const preferStubs = filter === DefinitionFilter.PreferStubs; + const wantedFile = (v: DocumentRange) => preferStubs === isStubFile(v.uri); + if (definitions.find(wantedFile)) { + return definitions.filter(wantedFile); + } + + return definitions; +} + +class DefinitionProviderBase { + protected constructor( + protected readonly sourceMapper: SourceMapper, + protected readonly evaluator: TypeEvaluator, + private readonly _serviceProvider: ServiceProvider | undefined, + protected readonly node: ParseNode | undefined, + protected readonly offset: number, + private readonly _filter: DefinitionFilter, + protected readonly token: CancellationToken + ) {} + + getDefinitionsForNode(node: ParseNode, offset: number) { + throwIfCancellationRequested(this.token); + + const definitions: DocumentRange[] = []; + + const factories = this._serviceProvider?.tryGet(ServiceKeys.symbolDefinitionProvider); + if (factories) { + factories.forEach((f) => { + const declarations = f.tryGetDeclarations(node, offset, this.token); + this.resolveDeclarations(declarations, definitions); + }); + } + + // There should be only one 'definition', so only if extensions failed should we try again. + if (definitions.length === 0) { + if (node.nodeType === ParseNodeType.Name) { + const declInfo = this.evaluator.getDeclInfoForNameNode(node); + if (declInfo) { + this.resolveDeclarations(declInfo.decls, definitions); + this.addSynthesizedTypes(declInfo.synthesizedTypes, definitions); + } + } else if (node.nodeType === ParseNodeType.String) { + const declInfo = this.evaluator.getDeclInfoForStringNode(node); + if (declInfo) { + this.resolveDeclarations(declInfo.decls, definitions); + this.addSynthesizedTypes(declInfo.synthesizedTypes, definitions); + } + } + } + + if (definitions.length === 0) { + return undefined; + } + + return filterDefinitions(this._filter, definitions); + } + + protected resolveDeclarations(declarations: Declaration[] | undefined, definitions: DocumentRange[]) { + addDeclarationsToDefinitions(this.evaluator, this.sourceMapper, declarations, definitions); + } + + protected addSynthesizedTypes(synthTypes: SynthesizedTypeInfo[], definitions: DocumentRange[]) { + for (const synthType of synthTypes) { + if (!synthType.node) { + continue; + } + + const fileInfo = getFileInfo(synthType.node); + const range = convertOffsetsToRange( + synthType.node.start, + synthType.node.start + synthType.node.length, + fileInfo.lines + ); + + definitions.push({ uri: fileInfo.fileUri, range }); + } + } +} + +export class DefinitionProvider extends DefinitionProviderBase { + constructor( + program: ProgramView, + fileUri: Uri, + position: Position, + filter: DefinitionFilter, + token: CancellationToken + ) { + const sourceMapper = program.getSourceMapper(fileUri, token); + const parseResults = program.getParseResults(fileUri); + const { node, offset } = _tryGetNode(parseResults, position); + + super(sourceMapper, program.evaluator!, program.serviceProvider, node, offset, filter, token); + } + + static getDefinitionsForNode( + sourceMapper: SourceMapper, + evaluator: TypeEvaluator, + node: ParseNode, + offset: number, + token: CancellationToken + ) { + const provider = new DefinitionProviderBase( + sourceMapper, + evaluator, + undefined, + node, + offset, + DefinitionFilter.All, + token + ); + return provider.getDefinitionsForNode(node, offset); + } + + getDefinitions(): DocumentRange[] | undefined { + if (this.node === undefined) { + return undefined; + } + + return this.getDefinitionsForNode(this.node, this.offset); + } +} + +export class TypeDefinitionProvider extends DefinitionProviderBase { + private readonly _fileUri: Uri; + + constructor(program: ProgramView, fileUri: Uri, position: Position, token: CancellationToken) { + const sourceMapper = program.getSourceMapper(fileUri, token, /*mapCompiled*/ false, /*preferStubs*/ true); + const parseResults = program.getParseResults(fileUri); + const { node, offset } = _tryGetNode(parseResults, position); + + super(sourceMapper, program.evaluator!, program.serviceProvider, node, offset, DefinitionFilter.All, token); + this._fileUri = fileUri; + } + + getDefinitions(): DocumentRange[] | undefined { + throwIfCancellationRequested(this.token); + if (this.node === undefined) { + return undefined; + } + + const definitions: DocumentRange[] = []; + + if (this.node.nodeType === ParseNodeType.Name) { + const type = this.evaluator.getType(this.node); + + if (type) { + let declarations: Declaration[] = []; + + doForEachSubtype(type, (subtype) => { + if (subtype?.category === TypeCategory.Class) { + appendArray( + declarations, + this.sourceMapper.findClassDeclarationsByType(this._fileUri, subtype) + ); + } + }); + + // Fall back to Go To Definition if the type can't be found (ex. Go To Type Definition + // was executed on a type name) + if (declarations.length === 0) { + declarations = this.evaluator.getDeclInfoForNameNode(this.node)?.decls ?? []; + } + + this.resolveDeclarations(declarations, definitions); + } + } else if (this.node.nodeType === ParseNodeType.String) { + const declarations = this.evaluator.getDeclInfoForStringNode(this.node)?.decls; + this.resolveDeclarations(declarations, definitions); + } + + if (definitions.length === 0) { + return undefined; + } + + return definitions; + } +} + +function _tryGetNode(parseResults: ParseFileResults | undefined, position: Position) { + if (!parseResults) { + return { node: undefined, offset: 0 }; + } + + const offset = convertPositionToOffset(position, parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return { node: undefined, offset: 0 }; + } + + return { node: ParseTreeUtils.findNodeByOffset(parseResults.parserOutput.parseTree, offset), offset }; +} + +function _createModuleEntry(uri: Uri): DocumentRange { + return { + uri, + range: { + start: { line: 0, character: 0 }, + end: { line: 0, character: 0 }, + }, + }; +} + +function _addIfUnique(definitions: DocumentRange[], itemToAdd: DocumentRange) { + for (const def of definitions) { + if (def.uri.equals(itemToAdd.uri) && rangesAreEqual(def.range, itemToAdd.range)) { + return; + } + } + + definitions.push(itemToAdd); +} diff --git a/python-parser/packages/pyright-internal/src/languageService/documentHighlightProvider.ts b/python-parser/packages/pyright-internal/src/languageService/documentHighlightProvider.ts new file mode 100644 index 00000000..a807d6de --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/documentHighlightProvider.ts @@ -0,0 +1,75 @@ +/* + * documentHighlightProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that maps a position within a Python program file into + * one or more highlight types. + */ + +import { CancellationToken, DocumentHighlight, DocumentHighlightKind } from 'vscode-languageserver'; + +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { ProgramView, ReferenceUseCase } from '../common/extensibility'; +import { convertOffsetsToRange, convertPositionToOffset } from '../common/positionUtils'; +import { Position, TextRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { DocumentSymbolCollector } from './documentSymbolCollector'; + +export class DocumentHighlightProvider { + private readonly _parseResults: ParseFileResults | undefined; + + constructor( + private _program: ProgramView, + private _fileUri: Uri, + private _position: Position, + private _token: CancellationToken + ) { + this._parseResults = this._program.getParseResults(this._fileUri); + } + + getDocumentHighlight(): DocumentHighlight[] | undefined { + throwIfCancellationRequested(this._token); + if (!this._parseResults) { + return undefined; + } + + const offset = convertPositionToOffset(this._position, this._parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return undefined; + } + + const node = ParseTreeUtils.findNodeByOffset(this._parseResults.parserOutput.parseTree, offset); + if (node === undefined) { + return undefined; + } + + if (node.nodeType !== ParseNodeType.Name) { + return undefined; + } + + const results = DocumentSymbolCollector.collectFromNode( + this._program, + node, + this._token, + this._parseResults.parserOutput.parseTree, + { + treatModuleInImportAndFromImportSame: true, + useCase: ReferenceUseCase.References, + } + ); + + const lines = this._parseResults.tokenizerOutput.lines; + return results.map((r) => ({ + kind: + r.node.nodeType === ParseNodeType.Name && ParseTreeUtils.isWriteAccess(r.node) + ? DocumentHighlightKind.Write + : DocumentHighlightKind.Read, + range: convertOffsetsToRange(r.range.start, TextRange.getEnd(r.range), lines), + })); + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/documentSymbolCollector.ts b/python-parser/packages/pyright-internal/src/languageService/documentSymbolCollector.ts new file mode 100644 index 00000000..ca033101 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/documentSymbolCollector.ts @@ -0,0 +1,584 @@ +/* + * documentSymbolCollector.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Collects symbols within the given tree that are semantically + * equivalent to the requested symbol. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; +import { AliasDeclaration, Declaration, DeclarationType, isAliasDeclaration } from '../analyzer/declaration'; +import { + areDeclarationsSame, + getDeclarationsWithUsesLocalNameRemoved, + synthesizeAliasDeclaration, +} from '../analyzer/declarationUtils'; +import { getModuleNode, getStringNodeValueRange } from '../analyzer/parseTreeUtils'; +import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import { ScopeType } from '../analyzer/scope'; +import * as ScopeUtils from '../analyzer/scopeUtils'; +import { IPythonMode } from '../analyzer/sourceFile'; +import { collectImportedByCells } from '../analyzer/sourceFileInfoUtils'; +import { isStubFile } from '../analyzer/sourceMapper'; +import { Symbol } from '../analyzer/symbol'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { TypeCategory } from '../analyzer/types'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { isDefined } from '../common/core'; +import { assert } from '../common/debug'; +import { ProgramView, ReferenceUseCase, SymbolUsageProvider } from '../common/extensibility'; +import { ServiceKeys } from '../common/serviceKeys'; +import { TextRange } from '../common/textRange'; +import { ImportAsNode, NameNode, ParseNode, ParseNodeType, StringListNode, StringNode } from '../parser/parseNodes'; + +export type CollectionResult = { + node: NameNode | StringNode; + range: TextRange; +}; + +export interface DocumentSymbolCollectorOptions { + readonly treatModuleInImportAndFromImportSame?: boolean; + readonly skipUnreachableCode?: boolean; + readonly useCase?: ReferenceUseCase; + + /** + * If `providers` are set, `collector` will assume + * `appendSymbolNamesTo` and `appendDeclarationsTo` have already + * been handled and will not call them again. + * + * If `collector` will result in the same `providers`, `symbolNames`, and `decls` for + * all files, set `providers` so that `collector` doesn't need to perform the same work + * repeatedly for all files. + */ + readonly providers?: readonly SymbolUsageProvider[]; +} + +// 99% of time, `find all references` is looking for a symbol imported from the other file to this file. +// By caching the result of `resolveAlias` we only need to resolve it once per a file. +const withLocalNamesCacheIndex = 0; +const withoutLocalNamesCacheIndex = 1; + +type CacheEntry = { original: Declaration; resolved: Declaration | undefined } | undefined; + +export class AliasResolver { + private readonly _caches: CacheEntry[] = [undefined, undefined]; + + constructor(private readonly _evaluator: TypeEvaluator) { + // Empty + } + + resolve(declaration: Declaration, resolveLocalNames: boolean): Declaration | undefined { + const index = resolveLocalNames ? withLocalNamesCacheIndex : withoutLocalNamesCacheIndex; + + if (this._caches[index] && this._caches[index]!.original === declaration) { + return this._caches[index]!.resolved; + } + + const resolved = this._evaluator.resolveAliasDeclaration(declaration, resolveLocalNames, { + allowExternallyHiddenAccess: true, + skipFileNeededCheck: true, + }); + + this._caches[index] = { original: declaration, resolved }; + return resolved; + } +} + +// This walker looks for symbols that are semantically equivalent +// to the requested symbol. +export class DocumentSymbolCollector extends ParseTreeWalker { + private readonly _results: CollectionResult[] = []; + private readonly _dunderAllNameNodes = new Set(); + private readonly _symbolNames: Set = new Set(); + private readonly _declarations: Declaration[] = []; + + private readonly _usageProviders: readonly SymbolUsageProvider[]; + private readonly _treatModuleInImportAndFromImportSame: boolean; + private readonly _skipUnreachableCode: boolean; + private readonly _useCase: ReferenceUseCase; + + private _aliasResolver: AliasResolver; + + constructor( + private readonly _program: ProgramView, + symbolNames: string[], + declarations: Declaration[], + private readonly _startingNode: ParseNode, + private readonly _cancellationToken: CancellationToken, + options?: DocumentSymbolCollectorOptions + ) { + super(); + + this._aliasResolver = new AliasResolver(this._program.evaluator!); + + // Start with the symbols passed in + symbolNames.forEach((s) => this._symbolNames.add(s)); + this._declarations.push(...declarations); + + this._treatModuleInImportAndFromImportSame = options?.treatModuleInImportAndFromImportSame ?? false; + this._skipUnreachableCode = options?.skipUnreachableCode ?? true; + this._useCase = options?.useCase ?? ReferenceUseCase.References; + + this._usageProviders = + options?.providers ?? + (this._program.serviceProvider.tryGet(ServiceKeys.symbolUsageProviderFactory) ?? []) + .map((f) => f.tryCreateProvider(this._useCase, declarations, this._cancellationToken)) + .filter(isDefined); + + if (options?.providers === undefined) { + // Check whether we need to add new symbol names and declarations. + this._usageProviders.forEach((p) => { + p.appendSymbolNamesTo(this._symbolNames); + p.appendDeclarationsTo(this._declarations); + }); + } + + // Don't report strings in __all__ right away, that will + // break the assumption on the result ordering. + this._setDunderAllNodes(this._startingNode); + } + + static collectFromNode( + program: ProgramView, + node: NameNode, + cancellationToken: CancellationToken, + startingNode?: ParseNode, + options?: DocumentSymbolCollectorOptions + ): CollectionResult[] { + const declarations = this.getDeclarationsForNode(program, node, cancellationToken, { resolveLocalNames: true }); + + startingNode = startingNode ?? getModuleNode(node); + if (!startingNode) { + return []; + } + + const collector = new DocumentSymbolCollector( + program, + [node.d.value], + declarations, + startingNode, + cancellationToken, + options + ); + + return collector.collect(); + } + + static getDeclarationsForNode( + program: ProgramView, + node: NameNode, + token: CancellationToken, + options?: { + resolveLocalNames?: boolean; + findImplementations?: boolean; + } + ): Declaration[] { + throwIfCancellationRequested(token); + + const evaluator = program.evaluator; + if (!evaluator) { + return []; + } + + const declarations = getDeclarationsForNameNode(evaluator, node, /* skipUnreachableCode */ false); + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const fileUri = fileInfo.fileUri; + + const resolveLocalNames = options?.resolveLocalNames ?? true; + const findImplementations = options?.findImplementations ?? true; + + const resolvedDeclarations: Declaration[] = []; + const sourceMapper = findImplementations ? program.getSourceMapper(fileUri, token) : undefined; + declarations.forEach((decl) => { + const resolvedDecl = evaluator.resolveAliasDeclaration(decl, resolveLocalNames); + if (resolvedDecl) { + addDeclarationIfUnique(resolvedDeclarations, resolvedDecl); + if (sourceMapper && isStubFile(resolvedDecl.uri)) { + const implDecls = sourceMapper.findDeclarations(resolvedDecl); + for (const implDecl of implDecls) { + if (implDecl && !implDecl.uri.isEmpty()) { + addDeclarationIfUnique(resolvedDeclarations, implDecl); + } + } + } + } + }); + + const sourceFileInfo = program.getSourceFileInfo(fileUri); + if (sourceFileInfo && sourceFileInfo.ipythonMode === IPythonMode.CellDocs) { + // Add declarations from chained source files + let builtinsScope = fileInfo.builtinsScope; + while (builtinsScope && builtinsScope.type === ScopeType.Module) { + const symbol = builtinsScope?.lookUpSymbol(node.d.value); + appendSymbolDeclarations(symbol, resolvedDeclarations); + builtinsScope = builtinsScope?.parent; + } + + // Add declarations from files that implicitly import the target file. + const implicitlyImportedBy = collectImportedByCells(program, sourceFileInfo); + implicitlyImportedBy.forEach((implicitImport) => { + const parseTree = program.getParseResults(implicitImport.uri)?.parserOutput.parseTree; + if (parseTree) { + const scope = AnalyzerNodeInfo.getScope(parseTree); + const symbol = scope?.lookUpSymbol(node.d.value); + appendSymbolDeclarations(symbol, resolvedDeclarations); + } + }); + } + + return resolvedDeclarations; + + function appendSymbolDeclarations(symbol: Symbol | undefined, declarations: Declaration[]) { + symbol + ?.getDeclarations() + .filter((d) => !isAliasDeclaration(d)) + .forEach((decl) => { + const resolvedDecl = evaluator!.resolveAliasDeclaration(decl, resolveLocalNames); + if (resolvedDecl) { + addDeclarationIfUnique(declarations, resolvedDecl); + } + }); + } + } + + collect() { + this.walk(this._startingNode); + return this._results; + } + + override walk(node: ParseNode) { + if (!this._skipUnreachableCode || !AnalyzerNodeInfo.isCodeUnreachable(node)) { + super.walk(node); + } + } + + override visitName(node: NameNode): boolean { + throwIfCancellationRequested(this._cancellationToken); + + // No need to do any more work if the symbol name doesn't match. + if (!this._symbolNames.has(node.d.value)) { + return false; + } + + if (this._declarations.length > 0) { + const declarations = getDeclarationsForNameNode(this._evaluator, node, this._skipUnreachableCode); + if (declarations && declarations.length > 0) { + // Does this name share a declaration with the symbol of interest? + if (this._resultsContainsDeclaration(node, declarations)) { + this._addResult(node); + } + } + } else { + // There were no declarations + this._addResult(node); + } + + return false; + } + + override visitStringList(node: StringListNode): boolean { + // See if we have reference that matches this node. + if (this._declarations.some((d) => d.node?.id === node.id)) { + // Then the matching string should be included + const matching = node.d.strings.find((s) => this._symbolNames.has(s.d.value)); + if (matching && matching.nodeType === ParseNodeType.String) { + this._addResult(matching); + } + } + + return super.visitStringList(node); + } + + override visitString(node: StringNode): boolean { + throwIfCancellationRequested(this._cancellationToken); + + if (this._dunderAllNameNodes.has(node)) { + this._addResult(node); + } + + return false; + } + + private get _evaluator(): TypeEvaluator { + return this._program.evaluator!; + } + + private _addResult(node: NameNode | StringNode) { + const range: TextRange = node.nodeType === ParseNodeType.Name ? node.d.token : getStringNodeValueRange(node); + this._results.push({ node, range }); + } + + private _isDeclarationAllowed(resolvedDecl: Declaration) { + return this._declarations.some((decl) => + areDeclarationsSame( + decl, + resolvedDecl, + this._treatModuleInImportAndFromImportSame, + /* skipRangeForAliases */ true + ) + ); + } + + private _resultsContainsDeclaration(usage: ParseNode, declarations: readonly Declaration[]) { + const results = [...declarations]; + this._usageProviders.forEach((p) => p.appendDeclarationsAt(usage, declarations, results)); + + return results.some((declaration) => { + // Resolve the declaration. + const resolvedDecl = this._aliasResolver.resolve(declaration, /* resolveLocalNames */ false); + if (!resolvedDecl) { + return false; + } + + // The reference results declarations are already resolved, so we don't + // need to call resolveAliasDeclaration on them. + if (this._isDeclarationAllowed(resolvedDecl)) { + return true; + } + + // We didn't find the declaration using local-only alias resolution. Attempt + // it again by fully resolving the alias. + const resolvedDeclNonlocal = this._getResolveAliasDeclaration(resolvedDecl); + if (!resolvedDeclNonlocal || resolvedDeclNonlocal === resolvedDecl) { + return false; + } + + return this._isDeclarationAllowed(resolvedDeclNonlocal); + }); + } + + private _getResolveAliasDeclaration(declaration: Declaration) { + // TypeEvaluator.resolveAliasDeclaration only resolve alias in AliasDeclaration in the form of + // "from x import y as [y]" but don't do thing for alias in "import x as [x]" + // Here, alias should have same name as module name. + if (isAliasDeclFromImportAsWithAlias(declaration)) { + return getDeclarationsWithUsesLocalNameRemoved([declaration])[0]; + } + + const resolvedDecl = this._aliasResolver.resolve(declaration, /* resolveLocalNames */ true); + return isAliasDeclFromImportAsWithAlias(resolvedDecl) + ? getDeclarationsWithUsesLocalNameRemoved([resolvedDecl])[0] + : resolvedDecl; + + function isAliasDeclFromImportAsWithAlias(decl?: Declaration): decl is AliasDeclaration { + return ( + !!decl && + decl.type === DeclarationType.Alias && + decl.node && + decl.usesLocalName && + decl.node.nodeType === ParseNodeType.ImportAs + ); + } + } + + private _setDunderAllNodes(node: ParseNode) { + if (node.nodeType !== ParseNodeType.Module) { + return; + } + + const dunderAllInfo = AnalyzerNodeInfo.getDunderAllInfo(node); + if (!dunderAllInfo) { + return; + } + + const moduleScope = ScopeUtils.getScopeForNode(node); + if (!moduleScope) { + return; + } + + dunderAllInfo.stringNodes.forEach((stringNode) => { + if (!this._symbolNames.has(stringNode.d.value)) { + return; + } + + const symbolInScope = moduleScope.lookUpSymbolRecursive(stringNode.d.value); + if (!symbolInScope) { + return; + } + + if (!this._resultsContainsDeclaration(stringNode, symbolInScope.symbol.getDeclarations())) { + return; + } + + this._dunderAllNameNodes.add(stringNode); + }); + } +} + +export function getDeclarationsForNameNode(evaluator: TypeEvaluator, node: NameNode, skipUnreachableCode = true) { + // This can handle symbols brought in by wildcard (import *) as long as the declarations that the symbol collector + // compares against point to the actual alias declaration, not one that uses local name (ex, import alias) + if (node.parent?.nodeType !== ParseNodeType.ModuleName) { + return _getDeclarationsForNonModuleNameNode(evaluator, node, skipUnreachableCode); + } + + return _getDeclarationsForModuleNameNode(evaluator, node); +} + +export function addDeclarationIfUnique(declarations: Declaration[], itemToAdd: Declaration) { + for (const def of declarations) { + if ( + areDeclarationsSame( + def, + itemToAdd, + /* treatModuleInImportAndFromImportSame */ false, + /* skipRangeForAliases */ true + ) + ) { + return; + } + } + + declarations.push(itemToAdd); +} + +function _getDeclarationsForNonModuleNameNode( + evaluator: TypeEvaluator, + node: NameNode, + skipUnreachableCode = true +): Declaration[] { + assert(node.parent?.nodeType !== ParseNodeType.ModuleName); + + let decls = evaluator.getDeclInfoForNameNode(node, skipUnreachableCode)?.decls || []; + if (node.parent?.nodeType === ParseNodeType.ImportFromAs) { + // Make sure we get the decl for this specific "from import" statement + decls = decls.filter((d) => d.node === node.parent); + } + + // If we can't get decl, see whether we can get type from the node. + // Some might have synthesized type for the node such as subModule in import X.Y statement. + if (decls.length === 0) { + const type = evaluator.getType(node); + if (type?.category === TypeCategory.Module) { + // Synthesize decl for the module. + return [synthesizeAliasDeclaration(type.priv.fileUri)]; + } + } + + // We would like to make X in import X and import X.Y as Y to match, but path for + // X in import X and one in import X.Y as Y might not match since path in X.Y will point + // to X.Y rather than X if import statement has an alias. + // so, for such case, we put synthesized one so we can treat X in both statement same. + for (const aliasDecl of decls.filter((d) => isAliasDeclaration(d) && !d.loadSymbolsFromPath)) { + const node = (aliasDecl as AliasDeclaration).node; + if (node.nodeType === ParseNodeType.ImportFromAs) { + // from ... import X case, decl in the submodule fallback has the path. + continue; + } + + appendArray( + decls, + evaluator.getDeclInfoForNameNode(node.d.module.d.nameParts[0], skipUnreachableCode)?.decls || [] + ); + } + + return decls; +} + +function _getDeclarationsForModuleNameNode(evaluator: TypeEvaluator, node: NameNode): Declaration[] { + assert(node.parent?.nodeType === ParseNodeType.ModuleName); + + // We don't have symbols corresponding to ModuleName in our system since those + // are not referenceable. but in "find all reference", we want to match those + // if it refers to the same module file. Code below handles different kind of + // ModuleName cases. + const moduleName = node.parent; + if ( + moduleName.parent?.nodeType === ParseNodeType.ImportAs || + moduleName.parent?.nodeType === ParseNodeType.ImportFrom + ) { + const index = moduleName.d.nameParts.findIndex((n) => n === node); + + // Special case, first module name part. + if (index === 0) { + // 1. import X or from X import ... + const decls: Declaration[] = []; + + // First, we need to put decls for module names type evaluator synthesized so that + // we can match both "import X" and "from X import ..." + appendArray( + decls, + evaluator + .getDeclInfoForNameNode(moduleName.d.nameParts[0]) + ?.decls?.filter((d) => isAliasDeclaration(d)) || [] + ); + + if (decls.length === 0 || moduleName.parent.nodeType !== ParseNodeType.ImportAs) { + return decls; + } + + // If module name belong to "import xxx" not "from xxx", then see whether + // we can get regular decls (decls created from binder, not synthesized from type eval) + // from symbol as well. + // ex, import X as x + const isImportAsWithAlias = + moduleName.d.nameParts.length === 1 && + moduleName.parent.nodeType === ParseNodeType.ImportAs && + !!moduleName.parent.d.alias; + + // if "import" has alias, symbol is assigned to alias, not the module. + const importName = isImportAsWithAlias + ? (moduleName.parent as ImportAsNode).d.alias!.d.value + : moduleName.d.nameParts[0].d.value; + + // And we also need to re-use "decls for X" binder has created + // so that it matches with decls type evaluator returns for "references for X". + // ex) import X or from .X import ... in init file and etc. + const symbolWithScope = ScopeUtils.getScopeForNode(node)?.lookUpSymbolRecursive(importName); + if (symbolWithScope && moduleName.d.nameParts.length === 1) { + let declsFromSymbol: Declaration[] = []; + + appendArray( + declsFromSymbol, + symbolWithScope.symbol.getDeclarations().filter((d) => isAliasDeclaration(d)) + ); + + // If symbols are re-used, then find one that belong to this import statement. + if (declsFromSymbol.length > 1) { + declsFromSymbol = declsFromSymbol.filter((d) => { + d = d as AliasDeclaration; + + if (d.firstNamePart !== undefined) { + // For multiple import statements with sub modules, decl can be re-used. + // ex) import X.Y and import X.Z or from .X import ... in init file. + // Decls for X will be reused for both import statements, and node will point + // to first import statement. For those case, use firstNamePart instead to check. + return d.firstNamePart === moduleName.d.nameParts[0].d.value; + } + + return d.node === moduleName.parent; + }); + } + + // ex, import X as x + // We have decls for the alias "x" not the module name "X". Convert decls for the "X" + if (isImportAsWithAlias) { + declsFromSymbol = getDeclarationsWithUsesLocalNameRemoved(declsFromSymbol); + } + + appendArray(decls, declsFromSymbol); + } + + return decls; + } + + if (index > 0) { + // 2. import X.Y or from X.Y import .... + // For submodule "Y", we just use synthesized decls from type evaluator. + // Decls for these sub module don't actually exist in the system. Instead, symbol for Y in + // "import X.Y" hold onto synthesized module type (without any decl). + // And "from X.Y import ..." doesn't have any symbol associated module names. + // they can't be referenced in the module. + return evaluator.getDeclInfoForNameNode(moduleName.d.nameParts[index])?.decls || []; + } + + return []; + } + + return []; +} diff --git a/python-parser/packages/pyright-internal/src/languageService/documentSymbolProvider.ts b/python-parser/packages/pyright-internal/src/languageService/documentSymbolProvider.ts new file mode 100644 index 00000000..203252e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/documentSymbolProvider.ts @@ -0,0 +1,146 @@ +/* + * documentSymbolProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that enumerates all of the symbols within a specified + * source file document. + */ + +import { CancellationToken, DocumentSymbol, Location, SymbolInformation } from 'vscode-languageserver'; + +import { getFileInfo } from '../analyzer/analyzerNodeInfo'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { ProgramView } from '../common/extensibility'; +import { ReadOnlyFileSystem } from '../common/fileSystem'; +import { Uri } from '../common/uri/uri'; +import { convertUriToLspUriString } from '../common/uri/uriUtils'; +import { ParseFileResults } from '../parser/parser'; +import { IndexOptions, IndexSymbolData, SymbolIndexer } from './symbolIndexer'; + +export function convertToFlatSymbols( + program: ProgramView, + uri: Uri, + symbolList: DocumentSymbol[] +): SymbolInformation[] { + const flatSymbols: SymbolInformation[] = []; + + for (const symbol of symbolList) { + _appendToFlatSymbolsRecursive(program.fileSystem, flatSymbols, uri, symbol); + } + + return flatSymbols; +} + +export class DocumentSymbolProvider { + private _parseResults: ParseFileResults | undefined; + + constructor( + protected readonly program: ProgramView, + protected readonly uri: Uri, + private readonly _supportHierarchicalDocumentSymbol: boolean, + private readonly _indexOptions: IndexOptions, + private readonly _token: CancellationToken + ) { + this._parseResults = this.program.getParseResults(this.uri); + } + + getSymbols(): DocumentSymbol[] | SymbolInformation[] { + if (!this._parseResults) { + return []; + } + + const symbolList = this.getHierarchicalSymbols(); + if (this._supportHierarchicalDocumentSymbol) { + return symbolList; + } + + return convertToFlatSymbols(this.program, this.uri, symbolList); + } + + protected getHierarchicalSymbols() { + const symbolList: DocumentSymbol[] = []; + const parseResults = this.program.getParseResults(this.uri); + if (!parseResults) { + return symbolList; + } + + const fileInfo = getFileInfo(parseResults.parserOutput.parseTree); + if (!fileInfo) { + return symbolList; + } + + const indexSymbolData = SymbolIndexer.indexSymbols(fileInfo, parseResults, this._indexOptions, this._token); + this.appendDocumentSymbolsRecursive(indexSymbolData, symbolList); + + return symbolList; + } + + protected appendDocumentSymbolsRecursive( + indexSymbolData: IndexSymbolData[] | undefined, + symbolList: DocumentSymbol[] + ) { + throwIfCancellationRequested(this._token); + + if (!indexSymbolData) { + return; + } + + for (const symbolData of indexSymbolData) { + if (symbolData.alias) { + continue; + } + + // It's possible for a name to be '' under certain error + // conditions (such as a decorator with no associated function + // or class). + if (!symbolData.name) { + continue; + } + + const children: DocumentSymbol[] = []; + this.appendDocumentSymbolsRecursive(symbolData.children, children); + + const symbolInfo: DocumentSymbol = { + name: symbolData.name, + kind: symbolData.kind, + range: symbolData.range!, + selectionRange: symbolData.selectionRange!, + children: children!, + }; + + symbolList.push(symbolInfo); + } + } +} + +function _appendToFlatSymbolsRecursive( + fs: ReadOnlyFileSystem, + flatSymbols: SymbolInformation[], + documentUri: Uri, + symbol: DocumentSymbol, + parent?: DocumentSymbol +) { + const flatSymbol: SymbolInformation = { + name: symbol.name, + kind: symbol.kind, + location: Location.create(convertUriToLspUriString(fs, documentUri), symbol.range), + }; + + if (symbol.tags) { + flatSymbol.tags = symbol.tags; + } + + if (parent) { + flatSymbol.containerName = parent.name; + } + + flatSymbols.push(flatSymbol); + + if (symbol.children) { + for (const child of symbol.children) { + _appendToFlatSymbolsRecursive(fs, flatSymbols, documentUri, child, symbol); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/dynamicFeature.ts b/python-parser/packages/pyright-internal/src/languageService/dynamicFeature.ts new file mode 100644 index 00000000..efd61559 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/dynamicFeature.ts @@ -0,0 +1,68 @@ +/* + * dynamicFeature.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * LanguageServer features that can be dynamically added or removed from LSP server + */ +import { Disposable } from 'vscode-languageserver'; +import { ServerSettings } from '../common/languageServerInterface'; + +export abstract class DynamicFeature { + private _lastRegistration: Disposable | undefined; + + constructor(readonly name: string) { + // Empty + } + + register() { + this.registerFeature().then((d) => { + this.dispose(); + this._lastRegistration = d; + }); + } + + update(settings: ServerSettings) { + // Default is no-op + } + + dispose() { + this._lastRegistration?.dispose(); + this._lastRegistration = undefined; + } + + protected abstract registerFeature(): Promise; +} + +export class DynamicFeatures { + private readonly _map = new Map(); + + add(feature: DynamicFeature) { + const old = this._map.get(feature.name); + if (old) { + old.dispose(); + } + + this._map.set(feature.name, feature); + } + + update(settings: ServerSettings) { + for (const feature of this._map.values()) { + feature.update(settings); + } + } + + register() { + for (const feature of this._map.values()) { + feature.register(); + } + } + + unregister() { + for (const feature of this._map.values()) { + feature.dispose(); + } + + this._map.clear(); + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/fileWatcherDynamicFeature.ts b/python-parser/packages/pyright-internal/src/languageService/fileWatcherDynamicFeature.ts new file mode 100644 index 00000000..fcfcb929 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/fileWatcherDynamicFeature.ts @@ -0,0 +1,78 @@ +/* + * fileWatcherDynamicFeature.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * implementation of file watcher feature dynamic registration + */ +import { + Connection, + DidChangeWatchedFilesNotification, + Disposable, + FileSystemWatcher, + WatchKind, +} from 'vscode-languageserver'; +import { FileSystem } from '../common/fileSystem'; +import { deduplicateFolders, isFile } from '../common/uri/uriUtils'; +import { DynamicFeature } from './dynamicFeature'; +import { Workspace } from '../workspaceFactory'; +import { isDefined } from '../common/core'; +import { configFileName } from '../common/pathConsts'; + +export class FileWatcherDynamicFeature extends DynamicFeature { + constructor( + private readonly _connection: Connection, + private readonly _hasWatchFileRelativePathCapability: boolean, + private readonly _fs: FileSystem, + private readonly _workspaceFactory: IWorkspaceFactory + ) { + super('file watcher'); + } + + protected override registerFeature(): Promise { + const watchKind = WatchKind.Create | WatchKind.Change | WatchKind.Delete; + + // Set default (config files and all workspace files) first. + const watchers: FileSystemWatcher[] = [ + { globPattern: `**/${configFileName}`, kind: watchKind }, + { globPattern: '**', kind: watchKind }, + ]; + + // Add all python search paths to watch list + if (this._hasWatchFileRelativePathCapability) { + // Dedup search paths from all workspaces. + // Get rid of any search path under workspace root since it is already watched by + // "**" above. + const searchPaths = this._workspaceFactory.getNonDefaultWorkspaces().map((w) => [ + ...w.searchPathsToWatch, + ...w.service + .getConfigOptions() + .getExecutionEnvironments() + .map((e) => e.extraPaths) + .flat(), + ]); + + const foldersToWatch = deduplicateFolders( + searchPaths, + this._workspaceFactory + .getNonDefaultWorkspaces() + .map((w) => w.rootUri) + .filter(isDefined) + ); + + foldersToWatch.forEach((p) => { + const globPattern = isFile(this._fs, p, /* treatZipDirectoryAsFile */ true) + ? { baseUri: p.getDirectory().toString(), pattern: p.fileName } + : { baseUri: p.toString(), pattern: '**' }; + + watchers.push({ globPattern, kind: watchKind }); + }); + } + + return this._connection.client.register(DidChangeWatchedFilesNotification.type, { watchers }); + } +} + +interface IWorkspaceFactory { + getNonDefaultWorkspaces(kind?: string): Workspace[]; +} diff --git a/python-parser/packages/pyright-internal/src/languageService/hoverProvider.ts b/python-parser/packages/pyright-internal/src/languageService/hoverProvider.ts new file mode 100644 index 00000000..d96fa43c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/hoverProvider.ts @@ -0,0 +1,601 @@ +/* + * hoverProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that maps a position within a Python program file into + * markdown text that is displayed when the user hovers over that + * position within a smart editor. + */ + +import { CancellationToken, Hover, MarkupKind } from 'vscode-languageserver'; + +import { + Declaration, + DeclarationType, + VariableDeclaration, + isUnresolvedAliasDeclaration, +} from '../analyzer/declaration'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { SourceMapper } from '../analyzer/sourceMapper'; +import { SynthesizedTypeInfo } from '../analyzer/symbol'; +import { isBuiltInModule } from '../analyzer/typeDocStringUtils'; +import { PrintTypeOptions, TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { convertToInstance, doForEachSubtype, isMaybeDescriptorInstance } from '../analyzer/typeUtils'; +import { + ClassType, + Type, + TypeCategory, + getTypeAliasInfo, + isAnyOrUnknown, + isClassInstance, + isFunctionOrOverloaded, + isModule, + isParamSpec, + isTypeVar, +} from '../analyzer/types'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { SignatureDisplayType } from '../common/configOptions'; +import { assertNever, fail } from '../common/debug'; +import { ProgramView } from '../common/extensibility'; +import { convertOffsetToPosition, convertPositionToOffset } from '../common/positionUtils'; +import { ServiceProvider } from '../common/serviceProvider'; +import { Position, Range, TextRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { ExpressionNode, NameNode, ParseNode, ParseNodeType, StringNode } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { + getClassAndConstructorTypes, + getConstructorTooltip, + getDocumentationPartsForTypeAndDecl, + getToolTipForType, + getTypeForToolTip, +} from './tooltipUtils'; + +export interface HoverTextPart { + python?: boolean; + text: string; +} + +export interface HoverResults { + parts: HoverTextPart[]; + range: Range; +} + +export function convertHoverResults(hoverResults: HoverResults | null, format: MarkupKind): Hover | null { + if (!hoverResults) { + return null; + } + + const markupString = hoverResults.parts + .map((part) => { + if (part.python) { + if (format === MarkupKind.Markdown) { + return '```python\n' + part.text + '\n```\n'; + } else if (format === MarkupKind.PlainText) { + return part.text + '\n\n'; + } else { + fail(`Unsupported markup type: ${format}`); + } + } + return part.text; + }) + .join('') + .trimEnd(); + + return { + contents: { + kind: format, + value: markupString, + }, + range: hoverResults.range, + }; +} + +export function addParameterResultsPart( + serviceProvider: ServiceProvider, + paramNameNode: NameNode, + resolvedDecl: Declaration | undefined, + format: MarkupKind, + parts: HoverTextPart[] +) { + // See if we have a docstring for the parent function. + let docString: string | undefined = undefined; + const funcNode = ParseTreeUtils.getEnclosingFunction(resolvedDecl?.node || paramNameNode); + if (funcNode) { + docString = ParseTreeUtils.getDocString(funcNode?.d.suite?.d.statements ?? []); + if (docString) { + // Compute the docstring now. + docString = serviceProvider + .docStringService() + .extractParameterDocumentation(docString, paramNameNode.d.value, format); + } + } + if (!docString) { + return; + } + + parts.push({ + python: false, + text: docString, + }); +} + +export function addDocumentationResultsPart( + serviceProvider: ServiceProvider, + docString: string | undefined, + format: MarkupKind, + parts: HoverTextPart[], + resolvedDecl: Declaration | undefined +) { + if (!docString) { + return; + } + + if (format === MarkupKind.Markdown) { + const markDown = serviceProvider + .docStringService() + .convertDocStringToMarkdown(docString, isBuiltInModule(resolvedDecl?.uri)); + + if (parts.length > 0 && markDown.length > 0) { + parts.push({ text: '---\n' }); + } + + parts.push({ text: markDown, python: false }); + return; + } + + if (format === MarkupKind.PlainText) { + parts.push({ text: serviceProvider.docStringService().convertDocStringToPlainText(docString), python: false }); + return; + } + + fail(`Unsupported markup type: ${format}`); +} + +export function getVariableTypeText( + evaluator: TypeEvaluator, + declaration: VariableDeclaration | undefined, + name: string, + type: Type, + typeNode: ExpressionNode, + functionSignatureDisplay: SignatureDisplayType +) { + let label = 'variable'; + if (declaration) { + label = declaration.isConstant || evaluator.isFinalVariableDeclaration(declaration) ? 'constant' : 'variable'; + } + + let typeVarName: string | undefined; + + if (type.props?.typeAliasInfo && typeNode.nodeType === ParseNodeType.Name) { + const typeAliasInfo = getTypeAliasInfo(type); + if (typeAliasInfo?.shared.name === typeNode.d.value) { + if (isTypeVar(type)) { + label = isParamSpec(type) ? 'param spec' : 'type variable'; + typeVarName = type.shared.name; + } else { + // Handle type aliases specially. + const typeText = evaluator.printType(convertToInstance(getTypeForToolTip(evaluator, typeNode)), { + expandTypeAlias: true, + }); + + return `(type) ${name} = ` + typeText; + } + } + } + + if ( + type.category === TypeCategory.Function || + type.category === TypeCategory.Overloaded || + typeNode.parent?.nodeType === ParseNodeType.Call + ) { + return getToolTipForType( + type, + label, + name, + evaluator, + /* isProperty */ false, + functionSignatureDisplay, + typeNode + ); + } + + const typeText = typeVarName ?? name + ': ' + evaluator.printType(getTypeForToolTip(evaluator, typeNode)); + + return `(${label}) ` + typeText; +} + +export class HoverProvider { + private readonly _parseResults: ParseFileResults | undefined; + private readonly _sourceMapper: SourceMapper; + + constructor( + private readonly _program: ProgramView, + private readonly _fileUri: Uri, + private readonly _position: Position, + private readonly _format: MarkupKind, + private readonly _token: CancellationToken + ) { + this._parseResults = this._program.getParseResults(this._fileUri); + this._sourceMapper = this._program.getSourceMapper(this._fileUri, this._token, /* mapCompiled */ true); + } + + getHover(): Hover | null { + return convertHoverResults(this._getHoverResult(), this._format); + } + + static getPrimaryDeclaration(declarations: Declaration[]) { + // In most cases, it's best to treat the first declaration as the + // "primary". This works well for properties that have setters + // which often have doc strings on the getter but not the setter. + // The one case where using the first declaration doesn't work as + // well is the case where an import statement within an __init__.py + // file uses the form "from .A import A". In this case, if we use + // the first declaration, it will show up as a module rather than + // the imported symbol type. + const primaryDeclaration = declarations[0]; + if (primaryDeclaration.type === DeclarationType.Alias && declarations.length > 1) { + return declarations[1]; + } else if ( + primaryDeclaration.type === DeclarationType.Variable && + declarations.length > 1 && + primaryDeclaration.isDefinedBySlots + ) { + // Slots cannot have docstrings, so pick the secondary. + return declarations[1]; + } + + return primaryDeclaration; + } + + private get _evaluator(): TypeEvaluator { + return this._program.evaluator!; + } + + private get _functionSignatureDisplay() { + return this._program.configOptions.functionSignatureDisplay; + } + + private _getHoverResult(): HoverResults | null { + throwIfCancellationRequested(this._token); + + if (!this._parseResults) { + return null; + } + + const offset = convertPositionToOffset(this._position, this._parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return null; + } + + const node = ParseTreeUtils.findNodeByOffset(this._parseResults.parserOutput.parseTree, offset); + if (node === undefined) { + return null; + } + + const results: HoverResults = { + parts: [], + range: { + start: convertOffsetToPosition(node.start, this._parseResults.tokenizerOutput.lines), + end: convertOffsetToPosition(TextRange.getEnd(node), this._parseResults.tokenizerOutput.lines), + }, + }; + + if (node.nodeType === ParseNodeType.Name) { + const declInfo = this._evaluator.getDeclInfoForNameNode(node); + const declarations = declInfo?.decls; + + if (declarations && declarations.length > 0) { + const primaryDeclaration = HoverProvider.getPrimaryDeclaration(declarations); + this._addResultsForDeclaration(results.parts, primaryDeclaration, node); + } else if (declInfo && declInfo.synthesizedTypes.length > 0) { + const nameNode = node; + declInfo?.synthesizedTypes.forEach((type) => { + this._addResultsForSynthesizedType(results.parts, type, nameNode); + }); + this._addDocumentationPart(results.parts, node, /* resolvedDecl */ undefined); + } else if (!node.parent || node.parent.nodeType !== ParseNodeType.ModuleName) { + // If we had no declaration, see if we can provide a minimal tooltip. We'll skip + // this if it's part of a module name, since a module name part with no declaration + // is a directory (a namespace package), and we don't want to provide any hover + // information in that case. + if (results.parts.length === 0) { + const type = this._getType(node); + let typeText: string; + if (isModule(type)) { + // Handle modules specially because submodules aren't associated with + // declarations, but we want them to be presented in the same way as + // the top-level module, which does have a declaration. + typeText = '(module) ' + node.d.value; + } else { + let label = 'function'; + let isProperty = false; + + if (isMaybeDescriptorInstance(type, /* requireSetter */ false)) { + isProperty = true; + label = 'property'; + } + + typeText = getToolTipForType( + type, + label, + node.d.value, + this._evaluator, + isProperty, + this._functionSignatureDisplay + ); + } + + this._addResultsPart(results.parts, typeText, /* python */ true); + this._addDocumentationPart(results.parts, node, /* resolvedDecl */ undefined); + } + } + } else if (node.nodeType === ParseNodeType.String) { + const type = this._evaluator.getExpectedType(node)?.type; + if (type !== undefined) { + this._tryAddPartsForTypedDictKey(node, type, results.parts); + } + } + + return results.parts.length > 0 ? results : null; + } + + private _addResultsForDeclaration(parts: HoverTextPart[], declaration: Declaration, node: NameNode): void { + const resolvedDecl = + declaration.type === DeclarationType.Alias + ? this._evaluator.resolveAliasDeclaration(declaration, /* resolveLocalNames */ true) + : declaration; + if ( + !resolvedDecl || + (resolvedDecl.type === DeclarationType.Alias && isUnresolvedAliasDeclaration(resolvedDecl)) + ) { + this._addResultsPart(parts, `(import) ` + node.d.value + this._getTypeText(node), /* python */ true); + return; + } + + switch (resolvedDecl.type) { + case DeclarationType.Intrinsic: { + this._addResultsPart(parts, node.d.value + this._getTypeText(node), /* python */ true); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + case DeclarationType.Variable: { + // If the named node is an aliased import symbol, we can't call + // getType on the original name because it's not in the symbol + // table. Instead, use the node from the resolved alias. + let typeNode: ParseNode = node; + if ( + declaration.node.nodeType === ParseNodeType.ImportAs || + declaration.node.nodeType === ParseNodeType.ImportFromAs + ) { + if (declaration.node.d.alias && node !== declaration.node.d.alias) { + if (resolvedDecl.node.nodeType === ParseNodeType.Name) { + typeNode = resolvedDecl.node; + } + } + } else if (node.parent?.nodeType === ParseNodeType.Argument && node.parent.d.name === node) { + // If this is a named argument, we would normally have received a Parameter declaration + // rather than a variable declaration, but we can get here in the case of a dataclass. + // Replace the typeNode with the node of the variable declaration. + if (declaration.node.nodeType === ParseNodeType.Name) { + typeNode = declaration.node; + } + } + + // Determine if this identifier is a type alias. If so, expand + // the type alias when printing the type information. + const type = this._getType(typeNode); + const typeText = getVariableTypeText( + this._evaluator, + resolvedDecl, + node.d.value, + type, + typeNode, + this._functionSignatureDisplay + ); + + this._addResultsPart(parts, typeText, /* python */ true); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + case DeclarationType.Param: { + this._addResultsPart(parts, '(parameter) ' + node.d.value + this._getTypeText(node), /* python */ true); + addParameterResultsPart(this._program.serviceProvider, node, resolvedDecl, this._format, parts); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + case DeclarationType.TypeParam: { + // If the user is hovering over a type parameter name in a class type parameter + // list, display the computed variance of the type param. + const typeParamListNode = ParseTreeUtils.getParentNodeOfType(node, ParseNodeType.TypeParameterList); + const nodeType = typeParamListNode?.parent?.nodeType; + const printTypeVarVariance = nodeType === ParseNodeType.Class || nodeType === ParseNodeType.TypeAlias; + + this._addResultsPart( + parts, + '(type parameter) ' + node.d.value + this._getTypeText(node, { printTypeVarVariance }), + /* python */ true + ); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + case DeclarationType.Class: + case DeclarationType.SpecialBuiltInClass: { + if (this._addInitOrNewMethodInsteadIfCallNode(node, parts, resolvedDecl)) { + return; + } + + const nameNode = resolvedDecl.type === DeclarationType.Class ? resolvedDecl.node.d.name : node; + this._addResultsPart(parts, '(class) ' + nameNode.d.value, /* python */ true); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + case DeclarationType.Function: { + let label = 'function'; + let isProperty = false; + if (resolvedDecl.isMethod) { + const declaredType = this._evaluator.getTypeForDeclaration(resolvedDecl)?.type; + isProperty = !!declaredType && isMaybeDescriptorInstance(declaredType, /* requireSetter */ false); + label = isProperty ? 'property' : 'method'; + } + + let type = this._getType(node); + const resolvedType = this._getType(resolvedDecl.node.d.name); + type = isAnyOrUnknown(type) ? resolvedType : type; + const signatureString = getToolTipForType( + type, + label, + node.d.value, + this._evaluator, + isProperty, + this._functionSignatureDisplay + ); + + this._addResultsPart(parts, signatureString, /* python */ true); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + case DeclarationType.Alias: { + // First the 'module' header. + this._addResultsPart(parts, '(module) ' + node.d.value, /* python */ true); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + case DeclarationType.TypeAlias: { + const type = convertToInstance(this._getType(node)); + const typeText = this._evaluator.printType(type, { expandTypeAlias: true }); + this._addResultsPart(parts, `(type) ${node.d.value} = ${typeText}`, /* python */ true); + this._addDocumentationPart(parts, node, resolvedDecl); + break; + } + + default: + assertNever(resolvedDecl); + } + } + + private _addResultsForSynthesizedType(parts: HoverTextPart[], typeInfo: SynthesizedTypeInfo, hoverNode: NameNode) { + let typeText: string | undefined; + + if (isModule(typeInfo.type)) { + typeText = '(module) ' + hoverNode.d.value; + } else { + const node = typeInfo.node ?? hoverNode; + + const type = this._getType(node); + typeText = getVariableTypeText( + this._evaluator, + /* declaration */ undefined, + node.d.value, + type, + node, + this._functionSignatureDisplay + ); + } + + if (typeText) { + this._addResultsPart(parts, typeText, /* python */ true); + } + } + + private _tryAddPartsForTypedDictKey(node: StringNode, type: Type, parts: HoverTextPart[]) { + // If the expected type is a TypedDict and the current node is a key entry then we can provide a tooltip + // with the type of the TypedDict key and its docstring, if available. + doForEachSubtype(type, (subtype) => { + if (isClassInstance(subtype) && ClassType.isTypedDictClass(subtype)) { + const entry = subtype.shared.typedDictEntries?.knownItems.get(node.d.value); + if (entry) { + // If we have already added parts for another declaration (e.g. for a union of TypedDicts that share the same key) + // then we need to add a separator to prevent a visual bug. + if (parts.length > 0) { + parts.push({ text: '\n\n---\n' }); + } + + // e.g. (key) name: str + const text = '(key) ' + node.d.value + ': ' + this._evaluator.printType(entry.valueType); + this._addResultsPart(parts, text, /* python */ true); + + const declarations = ClassType.getSymbolTable(subtype).get(node.d.value)?.getDeclarations(); + if (declarations !== undefined && declarations?.length !== 0) { + // As we are just interested in the docString we don't have to worry about + // anything other than the first declaration. There also shouldn't be more + // than one declaration for a TypedDict key variable. + const declaration = declarations[0]; + if (declaration.type === DeclarationType.Variable && declaration.docString !== undefined) { + this._addDocumentationPartForType(parts, subtype, declaration); + } + } + } + } + }); + } + + private _addInitOrNewMethodInsteadIfCallNode(node: NameNode, parts: HoverTextPart[], declaration: Declaration) { + const result = getClassAndConstructorTypes(node, this._evaluator); + if (!result) { + return false; + } + + if (result.methodType && isFunctionOrOverloaded(result.methodType)) { + this._addResultsPart( + parts, + getConstructorTooltip(node.d.value, result.methodType, this._evaluator, this._functionSignatureDisplay), + /* python */ true + ); + + const addedDoc = this._addDocumentationPartForType(parts, result.methodType, declaration); + + if (!addedDoc) { + this._addDocumentationPartForType(parts, result.classType, declaration); + } + return true; + } + return false; + } + + private _getType(node: ExpressionNode) { + // It does common work necessary for hover for a type we got + // from raw type evaluator. + return getTypeForToolTip(this._evaluator, node); + } + + private _getTypeText(node: ExpressionNode, options?: PrintTypeOptions): string { + const type = this._getType(node); + return ': ' + this._evaluator.printType(type, options); + } + + private _addDocumentationPart(parts: HoverTextPart[], node: NameNode, resolvedDecl: Declaration | undefined) { + const type = this._getType(node); + this._addDocumentationPartForType(parts, type, resolvedDecl, node.d.value); + } + + private _addDocumentationPartForType( + parts: HoverTextPart[], + type: Type | undefined, + resolvedDecl: Declaration | undefined, + name?: string + ): boolean { + const docString = getDocumentationPartsForTypeAndDecl(this._sourceMapper, type, resolvedDecl, this._evaluator, { + name, + }); + + addDocumentationResultsPart(this._program.serviceProvider, docString, this._format, parts, resolvedDecl); + return !!docString; + } + + private _addResultsPart(parts: HoverTextPart[], text: string, python = false) { + parts.push({ + python, + text, + }); + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/importSorter.ts b/python-parser/packages/pyright-internal/src/languageService/importSorter.ts new file mode 100644 index 00000000..0e56b57c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/importSorter.ts @@ -0,0 +1,196 @@ +/* + * importSorter.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides code that sorts and formats import statements within a + * Python source file. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { + compareImportStatements, + getImportGroup, + getTopLevelImports, + ImportStatement, +} from '../analyzer/importStatementUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { TextEditAction } from '../common/editAction'; +import { convertOffsetToPosition } from '../common/positionUtils'; +import { Range, TextRange } from '../common/textRange'; +import { ImportAsNode, ImportFromAsNode, ImportFromNode, ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; + +// We choose a line length that matches the default for the popular +// "black" formatter used in many Python projects. +const _maxLineLength = 88; + +export class ImportSorter { + constructor(private _parseResults: ParseFileResults, private _cancellationToken: CancellationToken) {} + + sort(): TextEditAction[] { + throwIfCancellationRequested(this._cancellationToken); + + const actions: TextEditAction[] = []; + const importStatements = getTopLevelImports(this._parseResults.parserOutput.parseTree); + + const sortedStatements = importStatements.orderedImports + .map((s) => s) + .sort((a, b) => { + return compareImportStatements(a, b); + }); + + if (sortedStatements.length === 0) { + // Nothing to do. + return []; + } + + const primaryRange = this._getPrimaryReplacementRange(importStatements.orderedImports); + + actions.push({ + range: primaryRange, + replacementText: this._generateSortedImportText(sortedStatements), + }); + + this._addSecondaryReplacementRanges(importStatements.orderedImports, actions); + + return actions; + } + + // Determines the text range for the existing primary block of import statements. + // If there are other blocks of import statements separated by other statements, + // we'll ignore these other blocks for now. + private _getPrimaryReplacementRange(statements: ImportStatement[]): Range { + let statementLimit = statements.findIndex((s) => s.followsNonImportStatement); + if (statementLimit < 0) { + statementLimit = statements.length; + } + + const lastStatement = statements[statementLimit - 1].node; + return { + start: convertOffsetToPosition(statements[0].node.start, this._parseResults.tokenizerOutput.lines), + end: convertOffsetToPosition(TextRange.getEnd(lastStatement), this._parseResults.tokenizerOutput.lines), + }; + } + + // If import statements are separated by other statements, we will remove the old + // secondary blocks. + private _addSecondaryReplacementRanges(statements: ImportStatement[], actions: TextEditAction[]) { + let secondaryBlockStart = statements.findIndex((s) => s.followsNonImportStatement); + if (secondaryBlockStart < 0) { + return; + } + + while (true) { + let secondaryBlockLimit = statements.findIndex( + (s, index) => index > secondaryBlockStart && s.followsNonImportStatement + ); + if (secondaryBlockLimit < 0) { + secondaryBlockLimit = statements.length; + } + + actions.push({ + range: { + start: convertOffsetToPosition( + statements[secondaryBlockStart].node.start, + this._parseResults.tokenizerOutput.lines + ), + end: convertOffsetToPosition( + TextRange.getEnd(statements[secondaryBlockLimit - 1].node), + this._parseResults.tokenizerOutput.lines + ), + }, + replacementText: '', + }); + + secondaryBlockStart = secondaryBlockLimit; + if (secondaryBlockStart >= statements.length) { + break; + } + } + } + + private _generateSortedImportText(sortedStatements: ImportStatement[]): string { + let importText = ''; + let prevImportGroup = getImportGroup(sortedStatements[0]); + + for (const statement of sortedStatements) { + // Insert a blank space between import type groups. + const curImportType = getImportGroup(statement); + if (prevImportGroup !== curImportType) { + importText += this._parseResults.tokenizerOutput.predominantEndOfLineSequence; + prevImportGroup = curImportType; + } + + let importLine: string; + if (statement.node.nodeType === ParseNodeType.Import) { + importLine = this._formatImportNode(statement.subnode!, statement.moduleName); + } else { + importLine = this._formatImportFromNode(statement.node, statement.moduleName); + } + + // If this isn't the last statement, add a newline. + if (statement !== sortedStatements[sortedStatements.length - 1]) { + importLine += this._parseResults.tokenizerOutput.predominantEndOfLineSequence; + } + + importText += importLine; + } + + return importText; + } + + private _formatImportNode(subnode: ImportAsNode, moduleName: string): string { + let importText = `import ${moduleName}`; + if (subnode.d.alias) { + importText += ` as ${subnode.d.alias.d.value}`; + } + + return importText; + } + + private _formatImportFromNode(node: ImportFromNode, moduleName: string): string { + const symbols = node.d.imports + .slice(0) + .sort((a, b) => this._compareSymbols(a, b)) + .map((symbol) => { + let symbolText = symbol.d.name.d.value; + if (symbol.d.alias) { + symbolText += ` as ${symbol.d.alias.d.value}`; + } + + return symbolText; + }); + + let cumulativeText = `from ${moduleName} import `; + if (node.d.isWildcardImport) { + return cumulativeText + '*'; + } + + const symbolText = symbols.join(', '); + if (cumulativeText.length + symbolText.length <= _maxLineLength) { + return cumulativeText + symbolText; + } + + // We need to split across multiple lines with parens. + cumulativeText += '(' + this._parseResults.tokenizerOutput.predominantEndOfLineSequence; + + for (const symbol of symbols) { + cumulativeText += + this._parseResults.tokenizerOutput.predominantTabSequence + + symbol + + ',' + + this._parseResults.tokenizerOutput.predominantEndOfLineSequence; + } + + cumulativeText += ')'; + + return cumulativeText; + } + + private _compareSymbols(a: ImportFromAsNode, b: ImportFromAsNode) { + return a.d.name.d.value < b.d.name.d.value ? -1 : 1; + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/navigationUtils.ts b/python-parser/packages/pyright-internal/src/languageService/navigationUtils.ts new file mode 100644 index 00000000..dd1d522c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/navigationUtils.ts @@ -0,0 +1,32 @@ +/* + * navigationUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Helper functions for navigating files. + */ +import { Location } from 'vscode-languageserver-types'; +import { DocumentRange } from '../common/docRange'; +import { ReadOnlyFileSystem } from '../common/fileSystem'; +import { Uri } from '../common/uri/uri'; +import { convertUriToLspUriString } from '../common/uri/uriUtils'; + +export function canNavigateToFile(fs: ReadOnlyFileSystem, path: Uri): boolean { + return !fs.isInZip(path); +} + +export function convertDocumentRangesToLocation( + fs: ReadOnlyFileSystem, + ranges: DocumentRange[], + converter: (fs: ReadOnlyFileSystem, range: DocumentRange) => Location | undefined = convertDocumentRangeToLocation +): Location[] { + return ranges.map((range) => converter(fs, range)).filter((loc) => !!loc) as Location[]; +} + +export function convertDocumentRangeToLocation(fs: ReadOnlyFileSystem, range: DocumentRange): Location | undefined { + if (!canNavigateToFile(fs, range.uri)) { + return undefined; + } + + return Location.create(convertUriToLspUriString(fs, range.uri), range.range); +} diff --git a/python-parser/packages/pyright-internal/src/languageService/pullDiagnosticsDynamicFeature.ts b/python-parser/packages/pyright-internal/src/languageService/pullDiagnosticsDynamicFeature.ts new file mode 100644 index 00000000..cf09fe4d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/pullDiagnosticsDynamicFeature.ts @@ -0,0 +1,46 @@ +/* + * PullDiagnosticsDynamicFeature.ts + * Copyright (c) Microsoft Corporation. + * + * implementation of pull mode diagnostics feature registration + */ +import { + Connection, + DiagnosticRegistrationOptions, + Disposable, + DocumentDiagnosticRequest, +} from 'vscode-languageserver'; +import { DynamicFeature } from './dynamicFeature'; +import { ServerSettings } from '../common/languageServerInterface'; + +export class PullDiagnosticsDynamicFeature extends DynamicFeature { + private _workspaceSupport = false; + private _registered = false; + + constructor(private readonly _connection: Connection, private readonly _id: string = 'pyright') { + super('pull diagnostics'); + } + + override update(settings: ServerSettings): void { + // There is one caveat with these settings. These settings can be set + // per workspace, but these features apply to the entire language server. + // Therefore, if the user has set these settings differently per workspace, + // the last setting will take precedence. + const workspaceSupport = settings.openFilesOnly === false; + if (this._workspaceSupport !== workspaceSupport || !this._registered) { + this._workspaceSupport = workspaceSupport; + this.register(); + } + } + + protected override registerFeature(): Promise { + this._registered = true; + const options: DiagnosticRegistrationOptions = { + interFileDependencies: true, + workspaceDiagnostics: this._workspaceSupport, + documentSelector: null, + identifier: this._id, + }; + return this._connection.client.register(DocumentDiagnosticRequest.type, options); + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/quickActions.ts b/python-parser/packages/pyright-internal/src/languageService/quickActions.ts new file mode 100644 index 00000000..d11b731b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/quickActions.ts @@ -0,0 +1,44 @@ +/* + * quickActions.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides support for miscellaneous quick actions. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { Commands } from '../commands/commands'; +import { ProgramView } from '../common/extensibility'; +import { Uri } from '../common/uri/uri'; +import { ImportSorter } from './importSorter'; + +export function performQuickAction( + programView: ProgramView, + uri: Uri, + command: string, + args: any[], + token: CancellationToken +) { + const sourceFileInfo = programView.getSourceFileInfo(uri); + + // This command should be called only for open files, in which + // case we should have the file contents already loaded. + if (!sourceFileInfo || !sourceFileInfo.isOpenByClient) { + return []; + } + + // If we have no completed analysis job, there's nothing to do. + const parseResults = programView.getParseResults(uri); + if (!parseResults) { + return []; + } + + if (command === Commands.orderImports) { + const importSorter = new ImportSorter(parseResults, token); + return importSorter.sort(); + } + + return []; +} diff --git a/python-parser/packages/pyright-internal/src/languageService/referencesProvider.ts b/python-parser/packages/pyright-internal/src/languageService/referencesProvider.ts new file mode 100644 index 00000000..8efa697b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/referencesProvider.ts @@ -0,0 +1,526 @@ +/* + * referencesProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that finds all of the references to a symbol specified + * by a location within a file. + */ + +import { CancellationToken, Location, ResultProgressReporter } from 'vscode-languageserver'; + +import { Declaration, DeclarationType, isAliasDeclaration } from '../analyzer/declaration'; +import { getNameFromDeclaration } from '../analyzer/declarationUtils'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { isUserCode } from '../analyzer/sourceFileInfoUtils'; +import { Symbol } from '../analyzer/symbol'; +import { isVisibleExternally } from '../analyzer/symbolUtils'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { maxTypeRecursionCount } from '../analyzer/types'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { isDefined } from '../common/core'; +import { assertNever } from '../common/debug'; +import { DocumentRange } from '../common/docRange'; +import { ProgramView, ReferenceUseCase, SymbolUsageProvider } from '../common/extensibility'; +import { ReadOnlyFileSystem } from '../common/fileSystem'; +import { convertOffsetToPosition, convertPositionToOffset } from '../common/positionUtils'; +import { ServiceKeys } from '../common/serviceKeys'; +import { isRangeInRange, Position, Range, TextRange } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { NameNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { CollectionResult, DocumentSymbolCollector } from './documentSymbolCollector'; +import { convertDocumentRangesToLocation } from './navigationUtils'; + +export type ReferenceCallback = (locations: DocumentRange[]) => void; + +export interface LocationWithNode { + location: DocumentRange; + parentRange?: Range; + node: ParseNode; +} + +export class ReferencesResult { + private readonly _results: LocationWithNode[] = []; + + readonly nonImportDeclarations: Declaration[]; + + constructor( + readonly requiresGlobalSearch: boolean, + readonly nodeAtOffset: ParseNode, + readonly symbolNames: string[], + readonly declarations: Declaration[], + readonly useCase: ReferenceUseCase, + readonly providers: readonly SymbolUsageProvider[], + private readonly _reporter?: ReferenceCallback + ) { + // Filter out any import decls. but leave one with alias. + this.nonImportDeclarations = declarations.filter((d) => { + if (!isAliasDeclaration(d)) { + return true; + } + + // We must have alias and decl node that point to import statement. + if (!d.usesLocalName || !d.node) { + return false; + } + + // d.node can't be ImportFrom if usesLocalName is true. + // but we are doing this for type checker. + if (d.node.nodeType === ParseNodeType.ImportFrom) { + return false; + } + + // Extract alias for comparison (symbolNames.some can't know d is for an Alias). + const alias = d.node.d.alias?.d.value; + + // Check alias and what we are renaming is same thing. + if (!symbolNames.some((s) => s === alias)) { + return false; + } + + return true; + }); + } + + get containsOnlyImportDecls(): boolean { + return this.declarations.length > 0 && this.nonImportDeclarations.length === 0; + } + + get locations(): readonly DocumentRange[] { + return this._results.map((l) => l.location); + } + + get results(): readonly LocationWithNode[] { + return this._results; + } + + addResults(...locs: LocationWithNode[]) { + if (locs.length === 0) { + return; + } + + if (this._reporter) { + this._reporter(locs.map((l) => l.location)); + } + + appendArray(this._results, locs); + } +} + +export class FindReferencesTreeWalker { + private _parseResults: ParseFileResults | undefined; + + constructor( + private _program: ProgramView, + private _fileUri: Uri, + private _referencesResult: ReferencesResult, + private _includeDeclaration: boolean, + private _cancellationToken: CancellationToken, + private readonly _createDocumentRange: ( + fileUri: Uri, + result: CollectionResult, + parseResults: ParseFileResults + ) => DocumentRange = FindReferencesTreeWalker.createDocumentRange + ) { + this._parseResults = this._program.getParseResults(this._fileUri); + } + + findReferences(rootNode = this._parseResults?.parserOutput.parseTree) { + const results: LocationWithNode[] = []; + if (!this._parseResults) { + return results; + } + + const collector = new DocumentSymbolCollector( + this._program, + this._referencesResult.symbolNames, + this._referencesResult.declarations, + rootNode!, + this._cancellationToken, + { + treatModuleInImportAndFromImportSame: true, + skipUnreachableCode: false, + useCase: this._referencesResult.useCase, + providers: this._referencesResult.providers, + } + ); + + for (const result of collector.collect()) { + // Is it the same symbol? + if (this._includeDeclaration || result.node !== this._referencesResult.nodeAtOffset) { + results.push({ + node: result.node, + location: this._createDocumentRange(this._fileUri, result, this._parseResults), + parentRange: result.node.parent + ? { + start: convertOffsetToPosition( + result.node.parent.start, + this._parseResults.tokenizerOutput.lines + ), + end: convertOffsetToPosition( + TextRange.getEnd(result.node.parent), + this._parseResults.tokenizerOutput.lines + ), + } + : undefined, + }); + } + } + + return results; + } + + static createDocumentRange(fileUri: Uri, result: CollectionResult, parseResults: ParseFileResults): DocumentRange { + return { + uri: fileUri, + range: { + start: convertOffsetToPosition(result.range.start, parseResults.tokenizerOutput.lines), + end: convertOffsetToPosition(TextRange.getEnd(result.range), parseResults.tokenizerOutput.lines), + }, + }; + } +} + +export class ReferencesProvider { + constructor( + private _program: ProgramView, + private _token: CancellationToken, + private readonly _createDocumentRange?: ( + fileUri: Uri, + result: CollectionResult, + parseResults: ParseFileResults + ) => DocumentRange, + private readonly _convertToLocation?: (fs: ReadOnlyFileSystem, ranges: DocumentRange) => Location | undefined + ) { + // empty + } + + reportReferences( + fileUri: Uri, + position: Position, + includeDeclaration: boolean, + resultReporter?: ResultProgressReporter + ) { + const sourceFileInfo = this._program.getSourceFileInfo(fileUri); + if (!sourceFileInfo) { + return; + } + + const parseResults = this._program.getParseResults(fileUri); + if (!parseResults) { + return; + } + + const locations: Location[] = []; + const reporter: ReferenceCallback = resultReporter + ? (range) => + resultReporter.report( + convertDocumentRangesToLocation(this._program.fileSystem, range, this._convertToLocation) + ) + : (range) => + appendArray( + locations, + convertDocumentRangesToLocation(this._program.fileSystem, range, this._convertToLocation) + ); + + const invokedFromUserFile = isUserCode(sourceFileInfo); + const referencesResult = ReferencesProvider.getDeclarationForPosition( + this._program, + fileUri, + position, + reporter, + ReferenceUseCase.References, + this._token + ); + if (!referencesResult) { + return; + } + + // Do we need to do a global search as well? + if (!referencesResult.requiresGlobalSearch) { + this.addReferencesToResult(sourceFileInfo.uri, includeDeclaration, referencesResult); + } + + for (const curSourceFileInfo of this._program.getSourceFileInfoList()) { + throwIfCancellationRequested(this._token); + + // "Find all references" will only include references from user code + // unless the file is explicitly opened in the editor or it is invoked from non user files. + if (curSourceFileInfo.isOpenByClient || !invokedFromUserFile || isUserCode(curSourceFileInfo)) { + // See if the reference symbol's string is located somewhere within the file. + // If not, we can skip additional processing for the file. + const fileContents = curSourceFileInfo.contents; + if (!fileContents || referencesResult.symbolNames.some((s) => fileContents.indexOf(s) >= 0)) { + this.addReferencesToResult(curSourceFileInfo.uri, includeDeclaration, referencesResult); + } + + // This operation can consume significant memory, so check + // for situations where we need to discard the type cache. + this._program.handleMemoryHighUsage(); + } + } + + // Make sure to include declarations regardless where they are defined + // if includeDeclaration is set. + if (includeDeclaration) { + for (const decl of referencesResult.declarations) { + throwIfCancellationRequested(this._token); + + if (referencesResult.locations.some((l) => l.uri.equals(decl.uri))) { + // Already included. + continue; + } + + const declFileInfo = this._program.getSourceFileInfo(decl.uri); + if (!declFileInfo) { + // The file the declaration belongs to doesn't belong to the program. + continue; + } + + const tempResult = new ReferencesResult( + referencesResult.requiresGlobalSearch, + referencesResult.nodeAtOffset, + referencesResult.symbolNames, + referencesResult.declarations, + referencesResult.useCase, + referencesResult.providers + ); + + this.addReferencesToResult(declFileInfo.uri, includeDeclaration, tempResult); + for (const result of tempResult.results) { + // Include declarations only. And throw away any references + if (result.location.uri.equals(decl.uri) && isRangeInRange(decl.range, result.location.range)) { + referencesResult.addResults(result); + } + } + } + } + + // Deduplicate locations before returning them. + const locationsSet = new Set(); + const dedupedLocations: Location[] = []; + for (const loc of locations) { + const key = `${loc.uri.toString()}:${loc.range.start.line}:${loc.range.start.character}`; + if (!locationsSet.has(key)) { + locationsSet.add(key); + dedupedLocations.push(loc); + } + } + + return dedupedLocations; + } + + addReferencesToResult(fileUri: Uri, includeDeclaration: boolean, referencesResult: ReferencesResult): void { + const parseResults = this._program.getParseResults(fileUri); + if (!parseResults) { + return; + } + + const refTreeWalker = new FindReferencesTreeWalker( + this._program, + fileUri, + referencesResult, + includeDeclaration, + this._token, + this._createDocumentRange + ); + + referencesResult.addResults(...refTreeWalker.findReferences()); + } + + static getDeclarationForNode( + program: ProgramView, + fileUri: Uri, + node: NameNode, + reporter: ReferenceCallback | undefined, + useCase: ReferenceUseCase, + token: CancellationToken + ) { + throwIfCancellationRequested(token); + + const declarations = DocumentSymbolCollector.getDeclarationsForNode(program, node, token, { + resolveLocalNames: false, + }); + + if (declarations.length === 0) { + return undefined; + } + + const requiresGlobalSearch = isVisibleOutside(program.evaluator!, fileUri, node, declarations); + const symbolNames = new Set(declarations.map((d) => getNameFromDeclaration(d)!).filter((n) => !!n)); + symbolNames.add(node.d.value); + + const providers = (program.serviceProvider.tryGet(ServiceKeys.symbolUsageProviderFactory) ?? []) + .map((f) => f.tryCreateProvider(useCase, declarations, token)) + .filter(isDefined); + + // Check whether we need to add new symbol names and declarations. + providers.forEach((p) => { + p.appendSymbolNamesTo(symbolNames); + p.appendDeclarationsTo(declarations); + }); + + return new ReferencesResult( + requiresGlobalSearch, + node, + Array.from(symbolNames.values()), + declarations, + useCase, + providers, + reporter + ); + } + + static getDeclarationForPosition( + program: ProgramView, + fileUri: Uri, + position: Position, + reporter: ReferenceCallback | undefined, + useCase: ReferenceUseCase, + token: CancellationToken + ): ReferencesResult | undefined { + throwIfCancellationRequested(token); + const parseResults = program.getParseResults(fileUri); + if (!parseResults) { + return undefined; + } + + const offset = convertPositionToOffset(position, parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return undefined; + } + + const node = ParseTreeUtils.findNodeByOffset(parseResults.parserOutput.parseTree, offset); + if (node === undefined) { + return undefined; + } + + // If this isn't a name node, there are no references to be found. + if (node.nodeType !== ParseNodeType.Name) { + return undefined; + } + + return this.getDeclarationForNode(program, fileUri, node, reporter, useCase, token); + } +} + +function isVisibleOutside(evaluator: TypeEvaluator, currentUri: Uri, node: NameNode, declarations: Declaration[]) { + const result = evaluator.lookUpSymbolRecursive(node, node.d.value, /* honorCodeFlow */ false); + if (result && !isExternallyVisible(result.symbol)) { + return false; + } + + // A symbol's effective external visibility check is not enough to determine whether + // the symbol is visible to the outside. Something like the local variable inside + // a function will still say it is externally visible even if it can't be accessed from another module. + // So, we also need to determine whether the symbol is declared within an evaluation scope + // that is within the current file and cannot be imported directly from other modules. + return declarations.some((decl) => { + // If the declaration is outside of this file, a global search is needed. + if (!decl.uri.equals(currentUri)) { + return true; + } + + const evalScope = ParseTreeUtils.getEvaluationScopeNode(decl.node).node; + + // If the declaration is at the module level or a class level, it can be seen + // outside of the current module, so a global search is needed. + if (evalScope.nodeType === ParseNodeType.Module || evalScope.nodeType === ParseNodeType.Class) { + return true; + } + + // If the name node is a member variable, we need to do a global search. + if (decl.node?.parent?.nodeType === ParseNodeType.MemberAccess && decl.node === decl.node.parent.d.member) { + return true; + } + + return false; + }); + + // Return true if the symbol is visible outside of current module, false if not. + function isExternallyVisible(symbol: Symbol, recursionCount = 0): boolean { + if (recursionCount > maxTypeRecursionCount) { + return false; + } + + recursionCount++; + + if (!isVisibleExternally(symbol)) { + return false; + } + + return symbol.getDeclarations().reduce((isVisible, decl) => { + if (!isVisible) { + return false; + } + + switch (decl.type) { + case DeclarationType.Alias: + case DeclarationType.Intrinsic: + case DeclarationType.SpecialBuiltInClass: + return isVisible; + + case DeclarationType.Class: + case DeclarationType.Function: + return isVisible && isContainerExternallyVisible(decl.node.d.name, recursionCount); + + case DeclarationType.Param: + return isVisible && isContainerExternallyVisible(decl.node.d.name!, recursionCount); + + case DeclarationType.TypeParam: + return false; + + case DeclarationType.Variable: + case DeclarationType.TypeAlias: { + if (decl.node.nodeType === ParseNodeType.Name) { + return isVisible && isContainerExternallyVisible(decl.node, recursionCount); + } + + // Symbol without name is not visible outside. + return false; + } + + default: + assertNever(decl); + } + }, /* visible */ true); + } + + // Return true if the scope that contains the specified node is visible + // outside of the current module, false if not. + function isContainerExternallyVisible(node: NameNode, recursionCount: number) { + let scopingNodeInfo = ParseTreeUtils.getEvaluationScopeNode(node); + let scopingNode = scopingNodeInfo.node; + + // If this is a type parameter scope, it acts as a proxy for + // its outer (parent) scope. + while (scopingNodeInfo.useProxyScope && scopingNodeInfo.node.parent) { + scopingNodeInfo = ParseTreeUtils.getEvaluationScopeNode(scopingNodeInfo.node.parent); + scopingNode = scopingNodeInfo.node; + } + + switch (scopingNode.nodeType) { + case ParseNodeType.Class: + case ParseNodeType.Function: { + const name = scopingNode.d.name; + const result = evaluator.lookUpSymbolRecursive(name, name.d.value, /* honorCodeFlow */ false); + return result ? isExternallyVisible(result.symbol, recursionCount) : true; + } + + case ParseNodeType.Lambda: + case ParseNodeType.Comprehension: + case ParseNodeType.TypeParameterList: + // Symbols in this scope can't be visible outside. + return false; + + case ParseNodeType.Module: + return true; + + default: + assertNever(scopingNode); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/renameProvider.ts b/python-parser/packages/pyright-internal/src/languageService/renameProvider.ts new file mode 100644 index 00000000..c66ccc57 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/renameProvider.ts @@ -0,0 +1,203 @@ +/* + * renameProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that rename identifier on the given position and its references. + */ + +import { CancellationToken, WorkspaceEdit } from 'vscode-languageserver'; + +import { isUserCode } from '../analyzer/sourceFileInfoUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { assertNever } from '../common/debug'; +import { FileEditAction } from '../common/editAction'; +import { ProgramView, ReferenceUseCase } from '../common/extensibility'; +import { convertTextRangeToRange } from '../common/positionUtils'; +import { Position, Range } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { convertToWorkspaceEdit } from '../common/workspaceEditUtils'; +import { ReferencesProvider, ReferencesResult } from '../languageService/referencesProvider'; +import { ParseFileResults } from '../parser/parser'; + +export class RenameProvider { + private readonly _parseResults: ParseFileResults | undefined; + + constructor( + private _program: ProgramView, + private _fileUri: Uri, + private _position: Position, + private _token: CancellationToken + ) { + this._parseResults = this._program.getParseResults(this._fileUri); + } + + canRenameSymbol(isDefaultWorkspace: boolean, isUntitled: boolean): Range | null { + throwIfCancellationRequested(this._token); + if (!this._parseResults) { + return null; + } + + const referencesResult = this._getReferenceResult(); + if (!referencesResult) { + return null; + } + + const renameMode = RenameProvider.getRenameSymbolMode( + this._program, + this._fileUri, + referencesResult, + isDefaultWorkspace, + isUntitled + ); + if (renameMode === 'none') { + return null; + } + + // Return the range of the symbol. + return convertTextRangeToRange(referencesResult.nodeAtOffset, this._parseResults.tokenizerOutput.lines); + } + + renameSymbol(newName: string, isDefaultWorkspace: boolean, isUntitled: boolean): WorkspaceEdit | null { + throwIfCancellationRequested(this._token); + if (!this._parseResults) { + return null; + } + + const referencesResult = this._getReferenceResult(); + if (!referencesResult) { + return null; + } + + const referenceProvider = new ReferencesProvider(this._program, this._token); + const renameMode = RenameProvider.getRenameSymbolMode( + this._program, + this._fileUri, + referencesResult, + isDefaultWorkspace, + isUntitled + ); + + switch (renameMode) { + case 'singleFileMode': + referenceProvider.addReferencesToResult(this._fileUri, /* includeDeclaration */ true, referencesResult); + break; + + case 'multiFileMode': { + for (const curSourceFileInfo of this._program.getSourceFileInfoList()) { + // Make sure we only add user code to the references to prevent us + // from accidentally changing third party library or type stub. + if (isUserCode(curSourceFileInfo)) { + // Make sure searching symbol name exists in the file. + const content = curSourceFileInfo.contents ?? ''; + if (!referencesResult.symbolNames.some((s) => content.search(s) >= 0)) { + continue; + } + + referenceProvider.addReferencesToResult( + curSourceFileInfo.uri, + /* includeDeclaration */ true, + referencesResult + ); + } + + // This operation can consume significant memory, so check + // for situations where we need to discard the type cache. + this._program.handleMemoryHighUsage(); + } + break; + } + + case 'none': + // Rename is not allowed. + // ex) rename symbols from libraries. + return null; + + default: + assertNever(renameMode); + } + + const edits: FileEditAction[] = []; + referencesResult.results.forEach((result) => { + // Special case the renames of keyword arguments. + edits.push({ + fileUri: result.location.uri, + range: result.location.range, + replacementText: newName, + }); + }); + + return convertToWorkspaceEdit(this._program.fileSystem, { edits, fileOperations: [] }); + } + + static getRenameSymbolMode( + program: ProgramView, + fileUri: Uri, + referencesResult: ReferencesResult, + isDefaultWorkspace: boolean, + isUntitled: boolean + ) { + const sourceFileInfo = program.getSourceFileInfo(fileUri)!; + + // We have 2 different cases + // Single file mode. + // 1. rename on default workspace (ex, standalone file mode). + // 2. rename local symbols. + // 3. rename symbols defined in the non user open file. + // + // and Multi file mode. + // 1. rename public symbols defined in user files on regular workspace (ex, open folder mode). + const userFile = isUserCode(sourceFileInfo); + if ( + isDefaultWorkspace || + (userFile && !referencesResult.requiresGlobalSearch) || + (!userFile && + sourceFileInfo.isOpenByClient && + referencesResult.declarations.every((d) => program.getSourceFileInfo(d.uri) === sourceFileInfo)) + ) { + return 'singleFileMode'; + } + + if (referencesResult.declarations.every((d) => isUserCode(program.getSourceFileInfo(d.uri)))) { + return 'multiFileMode'; + } + + // Rename is not allowed. + // ex) rename symbols from libraries. + return 'none'; + } + + private _getReferenceResult() { + const referencesResult = ReferencesProvider.getDeclarationForPosition( + this._program, + this._fileUri, + this._position, + /* reporter */ undefined, + ReferenceUseCase.Rename, + this._token + ); + if (!referencesResult) { + return undefined; + } + + if (referencesResult.containsOnlyImportDecls) { + return undefined; + } + + if (referencesResult.nonImportDeclarations.length === 0) { + // There is no symbol we can rename. + return undefined; + } + + // Use declarations that doesn't contain import decls. + return new ReferencesResult( + referencesResult.requiresGlobalSearch, + referencesResult.nodeAtOffset, + referencesResult.symbolNames, + referencesResult.nonImportDeclarations, + referencesResult.useCase, + referencesResult.providers + ); + } +} diff --git a/python-parser/packages/pyright-internal/src/languageService/signatureHelpProvider.ts b/python-parser/packages/pyright-internal/src/languageService/signatureHelpProvider.ts new file mode 100644 index 00000000..92cf8547 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/signatureHelpProvider.ts @@ -0,0 +1,379 @@ +/* + * signatureHelpProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Logic that maps a position within a Python call node into info + * that can be presented to the developer to help fill in the remaining + * arguments for the call. + */ + +import { + CancellationToken, + MarkupContent, + MarkupKind, + ParameterInformation, + SignatureHelp, + SignatureHelpContext, + SignatureHelpTriggerKind, + SignatureInformation, +} from 'vscode-languageserver'; + +import { getFileInfo } from '../analyzer/analyzerNodeInfo'; +import { getParamListDetails, ParamKind } from '../analyzer/parameterUtils'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { getCallNodeAndActiveParamIndex } from '../analyzer/parseTreeUtils'; +import { SourceMapper } from '../analyzer/sourceMapper'; +import { isBuiltInModule } from '../analyzer/typeDocStringUtils'; +import { CallSignature, TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { PrintTypeFlags } from '../analyzer/typePrinter'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { DocStringService } from '../common/docStringService'; +import { ProgramView } from '../common/extensibility'; +import { convertPositionToOffset } from '../common/positionUtils'; +import { Position } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { CallNode, NameNode, ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { Tokenizer } from '../parser/tokenizer'; +import { getDocumentationPartsForTypeAndDecl, getFunctionDocStringFromType } from './tooltipUtils'; + +export class SignatureHelpProvider { + private readonly _parseResults: ParseFileResults | undefined; + private readonly _sourceMapper: SourceMapper; + + constructor( + private _program: ProgramView, + private _fileUri: Uri, + private _position: Position, + private _format: MarkupKind, + private _hasSignatureLabelOffsetCapability: boolean, + private _hasActiveParameterCapability: boolean, + private _context: SignatureHelpContext | undefined, + private _docStringService: DocStringService, + private _token: CancellationToken + ) { + this._parseResults = this._program.getParseResults(this._fileUri); + this._sourceMapper = this._program.getSourceMapper(this._fileUri, this._token, /* mapCompiled */ true); + } + + getSignatureHelp(): SignatureHelp | undefined { + return this._convert(this._getSignatureHelp()); + } + + private get _evaluator(): TypeEvaluator { + return this._program.evaluator!; + } + + private _getSignatureHelp(): SignatureHelpResults | undefined { + throwIfCancellationRequested(this._token); + if (!this._parseResults) { + return undefined; + } + + const offset = convertPositionToOffset(this._position, this._parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return undefined; + } + + let node = ParseTreeUtils.findNodeByOffset(this._parseResults.parserOutput.parseTree, offset); + + // See if we can get to a "better" node by backing up a few columns. + // A "better" node is defined as one that's deeper than the current + // node. + const initialNode = node; + const initialDepth = node ? ParseTreeUtils.getNodeDepth(node) : 0; + let curOffset = offset - 1; + while (curOffset >= 0) { + // Don't scan back across a comma because commas separate + // arguments, and we don't want to mistakenly think that we're + // pointing to a previous argument. Don't scan across open parenthesis so that + // we don't go into the wrong function call + const ch = this._parseResults.text.substr(curOffset, 1); + if (ch === ',' || ch === '(') { + break; + } + const curNode = ParseTreeUtils.findNodeByOffset(this._parseResults.parserOutput.parseTree, curOffset); + if (curNode && curNode !== initialNode) { + if (ParseTreeUtils.getNodeDepth(curNode) > initialDepth) { + node = curNode; + } + break; + } + + curOffset--; + } + + if (node === undefined) { + return undefined; + } + + const callInfo = getCallNodeAndActiveParamIndex(node, offset, this._parseResults.tokenizerOutput.tokens); + if (!callInfo) { + return; + } + + const callSignatureInfo = this._evaluator.getCallSignatureInfo( + callInfo.callNode, + callInfo.activeIndex, + callInfo.activeOrFake + ); + if (!callSignatureInfo) { + return undefined; + } + + const signatures = callSignatureInfo.signatures.map((sig) => + this._makeSignature(callSignatureInfo.callNode, sig) + ); + + const callHasParameters = !!callSignatureInfo.callNode.d.args?.length; + return { + signatures, + callHasParameters, + }; + } + + private _convert(signatureHelpResults: SignatureHelpResults | undefined) { + if (!signatureHelpResults) { + return undefined; + } + + const signatures = signatureHelpResults.signatures.map((sig) => { + let paramInfo: ParameterInformation[] = []; + if (sig.parameters) { + paramInfo = sig.parameters.map((param) => { + return { + label: this._hasSignatureLabelOffsetCapability + ? [param.startOffset, param.endOffset] + : param.text, + documentation: { + kind: this._format, + value: param.documentation ?? '', + }, + }; + }); + } + + const sigInfo = SignatureInformation.create(sig.label, /* documentation */ undefined, ...paramInfo); + if (sig.documentation !== undefined) { + sigInfo.documentation = sig.documentation; + } + + if (sig.activeParameter !== undefined) { + sigInfo.activeParameter = sig.activeParameter; + } + return sigInfo; + }); + + // A signature is active if it contains an active parameter, + // or if both the signature and its invocation have no parameters. + const isActive = (sig: SignatureInformation) => + sig.activeParameter !== undefined || (!signatureHelpResults.callHasParameters && !sig.parameters?.length); + + let activeSignature: number | undefined = signatures.findIndex(isActive); + if (activeSignature === -1) { + activeSignature = undefined; + } + + let activeParameter = activeSignature !== undefined ? signatures[activeSignature].activeParameter! : undefined; + + // Check if we should reuse the user's signature selection. If the retrigger was not "invoked" + // (i.e., the signature help call was automatically generated by the client due to some navigation + // or text change), check to see if the previous signature is still "active". If so, we mark it as + // active in our response. + // + // This isn't a perfect method. For nested calls, we can't tell when we are moving between them. + // Ideally, we would include a token in the signature help responses to compare later, allowing us + // to know when the user's navigated to a nested call (and therefore the old signature's info does + // not apply), but for now manually retriggering the signature help will work around the issue. + if (this._context?.isRetrigger && this._context.triggerKind !== SignatureHelpTriggerKind.Invoked) { + const prevActiveSignature = this._context.activeSignatureHelp?.activeSignature; + if (prevActiveSignature !== undefined && prevActiveSignature < signatures.length) { + const sig = signatures[prevActiveSignature]; + if (isActive(sig)) { + activeSignature = prevActiveSignature; + activeParameter = sig.activeParameter ?? undefined; + } + } + } + + if (this._hasActiveParameterCapability || activeSignature === undefined) { + // If there is no active parameter, then we want the client to not highlight anything. + // Unfortunately, the LSP spec says that "undefined" or "out of bounds" values should be + // treated as 0, which is the first parameter. That's not what we want, but thankfully + // VS Code (and potentially other clients) choose to handle out of bounds values by + // not highlighting them, which is what we want. + // + // The spec defines activeParameter as uinteger, so use the maximum length of any + // signature's parameter list to ensure that the value is always out of range. + // + // We always set this even if some signature has an active parameter, as this + // value is used as the fallback for signatures that don't explicitly specify an + // active parameter (and we use "undefined" to mean "no active parameter"). + // + // We could apply this hack to each individual signature such that they all specify + // activeParameter, but that would make it more difficult to determine which actually + // are active when comparing, and we already have to set this for clients which don't + // support per-signature activeParameter. + // + // See: + // - https://github.com/microsoft/language-server-protocol/issues/1271 + // - https://github.com/microsoft/pyright/pull/1783 + activeParameter = Math.max(...signatures.map((s) => s.parameters?.length ?? 0)); + } + + return { signatures, activeSignature, activeParameter }; + } + + private _makeSignature(callNode: CallNode, signature: CallSignature): SignatureInfo { + const functionType = signature.type; + const stringParts = this._evaluator.printFunctionParts(functionType, PrintTypeFlags.ExpandTypedDictArgs); + const parameters: ParamInfo[] = []; + const functionDocString = + getFunctionDocStringFromType(functionType, this._sourceMapper, this._evaluator) ?? + this._getDocStringFromCallNode(callNode); + const fileInfo = getFileInfo(callNode); + const paramListDetails = getParamListDetails(functionType); + + let label = '('; + let isFirstParamInLabel = true; + let activeParameter: number | undefined; + const params = functionType.shared.parameters; + + stringParts[0].forEach((paramString: string, paramIndex) => { + let paramName = ''; + if (paramIndex < params.length) { + paramName = params[paramIndex].name || ''; + } else if (params.length > 0) { + paramName = params[params.length - 1].name || ''; + } + + const isKeywordOnly = paramListDetails.params.some( + (param) => param.param.name === paramName && param.kind === ParamKind.Keyword + ); + + if (!isKeywordOnly || Tokenizer.isPythonIdentifier(paramName)) { + if (!isFirstParamInLabel) { + label += ', '; + } + isFirstParamInLabel = false; + + parameters.push({ + startOffset: label.length, + endOffset: label.length + paramString.length, + text: paramString, + }); + + // Name match for active parameter. The set of parameters from the function + // may not match the actual string output from the typeEvaluator (kwargs for TypedDict is an example). + if (paramName && signature.activeParam && signature.activeParam.name === paramName) { + activeParameter = parameters.length - 1; + } + + label += paramString; + } + }); + + label += ') -> ' + stringParts[1]; + + if (signature.activeParam && activeParameter === undefined) { + activeParameter = params.indexOf(signature.activeParam); + if (activeParameter === -1) { + activeParameter = undefined; + } + } + + // Extract the documentation only for the active parameter. + if (activeParameter !== undefined) { + const activeParam = parameters[activeParameter]; + if (activeParam) { + activeParam.documentation = this._docStringService.extractParameterDocumentation( + functionDocString || '', + params[activeParameter].name || '', + this._format + ); + } + } + + const sigInfo: SignatureInfo = { + label, + parameters, + activeParameter, + }; + + if (functionDocString) { + if (this._format === MarkupKind.Markdown) { + sigInfo.documentation = { + kind: MarkupKind.Markdown, + value: this._docStringService.convertDocStringToMarkdown( + functionDocString, + isBuiltInModule(fileInfo?.fileUri) + ), + }; + } else { + sigInfo.documentation = { + kind: MarkupKind.PlainText, + value: this._docStringService.convertDocStringToPlainText(functionDocString), + }; + } + } + + return sigInfo; + } + + private _getDocStringFromCallNode(callNode: CallNode): string | undefined { + // This is a heuristic to see whether we can get some docstring + // from call node when all other methods failed. + // It only works if call is off a name node. + let name: NameNode | undefined; + const expr = callNode.d.leftExpr; + if (expr.nodeType === ParseNodeType.Name) { + name = expr; + } else if (expr.nodeType === ParseNodeType.MemberAccess) { + name = expr.d.member; + } + + if (!name) { + return undefined; + } + + for (const decl of this._evaluator.getDeclInfoForNameNode(name)?.decls ?? []) { + const resolveDecl = this._evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ true); + if (!resolveDecl) { + continue; + } + + const type = this._evaluator.getType(name); + if (!type) { + continue; + } + + const part = getDocumentationPartsForTypeAndDecl(this._sourceMapper, type, resolveDecl, this._evaluator); + if (part) { + return part; + } + } + + return undefined; + } +} + +interface ParamInfo { + startOffset: number; + endOffset: number; + text: string; + documentation?: string | undefined; +} + +interface SignatureInfo { + label: string; + documentation?: MarkupContent | undefined; + parameters?: ParamInfo[] | undefined; + activeParameter?: number | undefined; +} + +interface SignatureHelpResults { + signatures: SignatureInfo[]; + callHasParameters: boolean; +} diff --git a/python-parser/packages/pyright-internal/src/languageService/symbolIndexer.ts b/python-parser/packages/pyright-internal/src/languageService/symbolIndexer.ts new file mode 100644 index 00000000..b0351d2f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/symbolIndexer.ts @@ -0,0 +1,215 @@ +/* + * symbolIndexer.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Logic that collect all symbol decl information for a specified source file. + */ + +import { CancellationToken, CompletionItemKind, SymbolKind } from 'vscode-languageserver'; + +import { AnalyzerFileInfo } from '../analyzer/analyzerFileInfo'; +import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; +import { AliasDeclaration, Declaration, DeclarationType } from '../analyzer/declaration'; +import { getLastTypedDeclarationForSymbol, isVisibleExternally } from '../analyzer/symbolUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { getSymbolKind } from '../common/lspUtils'; +import { convertOffsetsToRange, convertTextRangeToRange } from '../common/positionUtils'; +import { Range } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { ParseNodeType } from '../parser/parseNodes'; +import { ParseFileResults } from '../parser/parser'; +import { convertSymbolKindToCompletionItemKind } from './autoImporter'; + +export interface IndexOptions { + includeAliases?: boolean; +} + +export interface IndexAliasData { + readonly originalName: string; + readonly moduleUri: Uri; + readonly kind: SymbolKind; + readonly itemKind?: CompletionItemKind | undefined; +} + +export const enum IndexSymbolVisibilityFlags { + None = 0, + + // Indicates that the symbol is visible externally + ExternallyVisible = 1 << 0, + + // Indicates that the symbol is included in `__all__` + InDunderAll = 1 << 1, + + // Indicates that the symbol is redundantly aliased, such as + // in 'from module import xx as xx' or 'import yy as yy' + RedundantAlias = 1 << 2, +} + +export function isIndexSymbolVisibleFlagSet(data: IndexSymbolData, flag: IndexSymbolVisibilityFlags) { + return !!(data.visibilityFlags & flag); +} + +export interface IndexSymbolData { + readonly name: string; + readonly visibilityFlags: IndexSymbolVisibilityFlags; + readonly kind: SymbolKind; + readonly itemKind?: CompletionItemKind | undefined; + readonly alias?: IndexAliasData | undefined; + readonly range?: Range | undefined; + readonly selectionRange?: Range | undefined; + readonly children?: IndexSymbolData[] | undefined; +} + +export class SymbolIndexer { + static indexSymbols( + fileInfo: AnalyzerFileInfo, + parseResults: ParseFileResults, + indexOptions: IndexOptions, + token: CancellationToken + ): IndexSymbolData[] { + // Here are the rule of what symbols are indexed for a file. + // 1. If it is a stub file, we index every public symbols defined by "https://www.python.org/dev/peps/pep-0484/#stub-files" + // 2. If it is a py file and it is py.typed package, we index public symbols + // defined by "https://github.com/microsoft/pyright/blob/main/docs/typed-libraries.md#library-interface" + // 3. If it is a py file and it is not py.typed package, we index only symbols that appear in + // __all__ to make sure we don't include too many symbols in the index. + + const indexSymbolData: IndexSymbolData[] = []; + collectSymbolIndexData( + fileInfo, + parseResults, + parseResults.parserOutput.parseTree, + indexOptions, + indexSymbolData, + token + ); + + return indexSymbolData; + } +} + +function collectSymbolIndexData( + fileInfo: AnalyzerFileInfo, + parseResults: ParseFileResults, + node: AnalyzerNodeInfo.ScopedNode, + indexOptions: IndexOptions, + indexSymbolData: IndexSymbolData[], + token: CancellationToken +) { + throwIfCancellationRequested(token); + + const scope = AnalyzerNodeInfo.getScope(node); + if (!scope) { + return; + } + + const symbolTable = scope.symbolTable; + symbolTable.forEach((symbol, name) => { + if (symbol.isIgnoredForProtocolMatch()) { + return; + } + + // Prefer declarations with a defined type. + let declaration = getLastTypedDeclarationForSymbol(symbol); + + // Fall back to declarations without a type. + if (!declaration && symbol.hasDeclarations()) { + declaration = symbol.getDeclarations()[0]; + } + + if (!declaration) { + return; + } + + if (DeclarationType.Alias === declaration.type && !shouldAliasBeIndexed(declaration, indexOptions)) { + return; + } + + // We rely on ExternallyHidden flag to determine what + // symbols should be public (included in the index) + collectSymbolIndexDataForName( + fileInfo, + parseResults, + declaration, + indexOptions, + isVisibleExternally(symbol), + name, + indexSymbolData, + token + ); + }); +} + +function collectSymbolIndexDataForName( + fileInfo: AnalyzerFileInfo, + parseResults: ParseFileResults, + declaration: Declaration, + indexOptions: IndexOptions, + externallyVisible: boolean, + name: string, + indexSymbolData: IndexSymbolData[], + token: CancellationToken +) { + const symbolKind = getSymbolKind(declaration, undefined, name); + if (symbolKind === undefined) { + return; + } + + let selectionRange = declaration.range; + let range = selectionRange; + const children: IndexSymbolData[] = []; + + if (declaration.type === DeclarationType.Class || declaration.type === DeclarationType.Function) { + collectSymbolIndexData(fileInfo, parseResults, declaration.node, indexOptions, children, token); + + range = convertOffsetsToRange( + declaration.node.start, + declaration.node.start + declaration.node.length, + parseResults.tokenizerOutput.lines + ); + } + + if (DeclarationType.Alias === declaration.type) { + if (!shouldAliasBeIndexed(declaration, indexOptions)) { + return; + } + + // The default range for a module alias is the first character of the module's file. + // Replace that with the range of the alias token. + if (declaration.node.nodeType === ParseNodeType.ImportAs && declaration.node.d.alias) { + selectionRange = range = convertTextRangeToRange( + declaration.node.d.alias.d.token, + parseResults.tokenizerOutput.lines + ); + } + } + + const data: IndexSymbolData = { + name, + visibilityFlags: IndexSymbolVisibilityFlags.ExternallyVisible, + kind: symbolKind, + itemKind: convertSymbolKindToCompletionItemKind(symbolKind), + alias: undefined, + range: range, + selectionRange: selectionRange, + children: children, + }; + + indexSymbolData.push(data); +} + +function shouldAliasBeIndexed(declaration: AliasDeclaration, indexOptions: IndexOptions) { + if (!indexOptions.includeAliases) { + return false; + } + + // Only allow import statements with an alias (`import module as alias` or + // `from module import symbol as alias`), since the alias is a symbol specific + // to the importing file. + return ( + (declaration.node.nodeType === ParseNodeType.ImportAs || + declaration.node.nodeType === ParseNodeType.ImportFromAs) && + declaration.node.d.alias !== undefined + ); +} diff --git a/python-parser/packages/pyright-internal/src/languageService/tooltipUtils.ts b/python-parser/packages/pyright-internal/src/languageService/tooltipUtils.ts new file mode 100644 index 00000000..a7471e02 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/tooltipUtils.ts @@ -0,0 +1,533 @@ +/* + * tooltipUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Helper functions for formatting text that can appear in hover text, + * completion suggestions, etc. + */ + +import { getBoundCallMethod } from '../analyzer/constructors'; +import { Declaration, DeclarationType, VariableDeclaration } from '../analyzer/declaration'; +import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; +import { SourceMapper } from '../analyzer/sourceMapper'; +import { Symbol } from '../analyzer/symbol'; +import { + getClassDocString, + getFunctionDocStringInherited, + getModuleDocString, + getModuleDocStringFromUris, + getOverloadedDocStringsInherited, + getPropertyDocStringInherited, + getVariableDocString, +} from '../analyzer/typeDocStringUtils'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { MemberAccessFlags, lookUpClassMember } from '../analyzer/typeUtils'; +import { + ClassType, + FunctionType, + OverloadedType, + Type, + TypeBase, + TypeCategory, + UnknownType, + combineTypes, + isClassInstance, + isFunction, + isFunctionOrOverloaded, + isInstantiableClass, + isModule, + isOverloaded, +} from '../analyzer/types'; +import { SignatureDisplayType } from '../common/configOptions'; +import { isDefined } from '../common/core'; +import { ExpressionNode, NameNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; + +// The number of spaces to indent each parameter, after moving to a newline in tooltips. +const functionParamIndentOffset = 4; + +export function getToolTipForType( + type: Type, + label: string, + name: string, + evaluator: TypeEvaluator, + isProperty: boolean, + functionSignatureDisplay: SignatureDisplayType, + typeNode?: ExpressionNode +): string { + // Support __call__ method for class instances to show the signature of the method + if (type.category === TypeCategory.Class && isClassInstance(type) && typeNode) { + const callMethodResult = getBoundCallMethod(evaluator, typeNode, type); + if ( + callMethodResult?.type.category === TypeCategory.Function || + callMethodResult?.type.category === TypeCategory.Overloaded + ) { + // Eliminate overloads that are not applicable. + const methodType = limitOverloadBasedOnCall(evaluator, callMethodResult.type, typeNode); + if (methodType) { + type = methodType; + } + } + } + let signatureString = ''; + if (isOverloaded(type)) { + signatureString = label.length > 0 ? `(${label})\n` : ''; + signatureString += `${getOverloadedTooltip(type, evaluator, functionSignatureDisplay)}`; + } else if (isFunction(type)) { + signatureString = `${getFunctionTooltip(label, name, type, evaluator, isProperty, functionSignatureDisplay)}`; + } else { + signatureString = label.length > 0 ? `(${label}) ` : ''; + signatureString += `${name}: ${evaluator.printType(type)}`; + } + + return signatureString; +} + +// 70 is vscode's default hover width size. +export function getOverloadedTooltip( + type: OverloadedType, + evaluator: TypeEvaluator, + functionSignatureDisplay: SignatureDisplayType, + columnThreshold = 70 +) { + let content = ''; + const overloads = OverloadedType.getOverloads(type).map((o) => + getFunctionTooltip( + /* label */ '', + o.shared.name, + o, + evaluator, + /* isProperty */ false, + functionSignatureDisplay + ) + ); + + for (let i = 0; i < overloads.length; i++) { + if (i !== 0 && overloads[i].length > columnThreshold && overloads[i - 1].length <= columnThreshold) { + content += '\n'; + } + + content += overloads[i] + `: ...`; + + if (i < overloads.length - 1) { + content += '\n'; + if (overloads[i].length > columnThreshold) { + content += '\n'; + } + } + } + + return content; +} + +export function getFunctionTooltip( + label: string, + functionName: string, + type: FunctionType, + evaluator: TypeEvaluator, + isProperty = false, + functionSignatureDisplay: SignatureDisplayType +) { + const labelFormatted = label.length === 0 ? '' : `(${label}) `; + const indentStr = + functionSignatureDisplay === SignatureDisplayType.formatted ? '\n' + ' '.repeat(functionParamIndentOffset) : ''; + const funcParts = evaluator.printFunctionParts(type); + const paramSignature = `${formatSignature(funcParts, indentStr, functionSignatureDisplay)} -> ${funcParts[1]}`; + + if (TypeBase.isInstantiable(type)) { + return `${labelFormatted}${functionName}: type[${paramSignature}]`; + } + + const sep = isProperty ? ': ' : ''; + let defKeyword = ''; + if (!isProperty) { + defKeyword = 'def '; + + if (FunctionType.isAsync(type)) { + defKeyword = 'async ' + defKeyword; + } + } + + return `${labelFormatted}${defKeyword}${functionName}${sep}${paramSignature}`; +} + +export function getConstructorTooltip( + constructorName: string, + type: Type, + evaluator: TypeEvaluator, + functionSignatureDisplay: SignatureDisplayType +) { + const classText = `class `; + let signature = ''; + + if (isOverloaded(type)) { + const overloads = OverloadedType.getOverloads(type).map((overload) => + getConstructorTooltip(constructorName, overload, evaluator, functionSignatureDisplay) + ); + overloads.forEach((overload, index) => { + signature += overload + ': ...' + '\n\n'; + }); + } else if (isFunction(type)) { + const indentStr = + functionSignatureDisplay === SignatureDisplayType.formatted + ? '\n' + ' '.repeat(functionParamIndentOffset) + : ' '; + const funcParts = evaluator.printFunctionParts(type); + const paramSignature = formatSignature(funcParts, indentStr, functionSignatureDisplay); + signature += `${classText}${constructorName}${paramSignature}`; + } + return signature; +} + +// Only formats signature if there is more than one parameter +function formatSignature( + funcParts: [string[], string], + indentStr: string, + functionSignatureDisplay: SignatureDisplayType +) { + return functionSignatureDisplay === SignatureDisplayType.formatted && + funcParts.length > 0 && + funcParts[0].length > 1 + ? `(${indentStr}${funcParts[0].join(',' + indentStr)}\n)` + : `(${funcParts[0].join(', ')})`; +} + +export function getFunctionDocStringFromType(type: FunctionType, sourceMapper: SourceMapper, evaluator: TypeEvaluator) { + const decl = type.shared.declaration; + const enclosingClass = decl ? ParseTreeUtils.getEnclosingClass(decl.node) : undefined; + const classResults = enclosingClass ? evaluator.getTypeOfClass(enclosingClass) : undefined; + + return getFunctionDocStringInherited(type, decl, sourceMapper, classResults?.classType); +} + +export function getOverloadedDocStringsFromType( + type: OverloadedType, + sourceMapper: SourceMapper, + evaluator: TypeEvaluator +) { + const overloads = OverloadedType.getOverloads(type); + if (overloads.length === 0) { + return []; + } + + const decl = overloads[0].shared.declaration; + const enclosingClass = decl ? ParseTreeUtils.getEnclosingClass(decl.node) : undefined; + const classResults = enclosingClass ? evaluator.getTypeOfClass(enclosingClass) : undefined; + + return getOverloadedDocStringsInherited( + type, + overloads.map((o) => o.shared.declaration).filter(isDefined), + sourceMapper, + evaluator, + + classResults?.classType + ); +} + +export function getDocumentationPartForTypeAlias( + sourceMapper: SourceMapper, + resolvedDecl: Declaration | undefined, + evaluator: TypeEvaluator, + symbol?: Symbol +) { + if (!resolvedDecl) { + return undefined; + } + + if (resolvedDecl.type === DeclarationType.TypeAlias) { + return resolvedDecl.docString; + } + + if (resolvedDecl.type === DeclarationType.Variable) { + if (resolvedDecl.typeAliasName && resolvedDecl.docString) { + return resolvedDecl.docString; + } + + const decl = (symbol?.getDeclarations().find((d) => d.type === DeclarationType.Variable && !!d.docString) ?? + resolvedDecl) as VariableDeclaration; + const doc = getVariableDocString(decl, sourceMapper); + if (doc) { + return doc; + } + } + + if (resolvedDecl.type === DeclarationType.Function) { + // @property functions + const doc = getPropertyDocStringInherited(resolvedDecl, sourceMapper, evaluator); + if (doc) { + return doc; + } + } + + return undefined; +} + +export function getDocumentationPartForType( + sourceMapper: SourceMapper, + type: Type, + resolvedDecl: Declaration | undefined, + evaluator: TypeEvaluator, + boundObjectOrClass?: ClassType | undefined +) { + if (isModule(type)) { + const doc = getModuleDocString(type, resolvedDecl, sourceMapper); + if (doc) { + return doc; + } + } else if (isInstantiableClass(type)) { + const doc = getClassDocString(type, resolvedDecl, sourceMapper); + if (doc) { + return doc; + } + } else if (isFunction(type)) { + const functionType = boundObjectOrClass + ? evaluator.bindFunctionToClassOrObject(boundObjectOrClass, type) + : type; + if (functionType && isFunction(functionType)) { + const doc = getFunctionDocStringFromType(functionType, sourceMapper, evaluator); + if (doc) { + return doc; + } + } + } else if (isOverloaded(type)) { + const functionType = boundObjectOrClass + ? evaluator.bindFunctionToClassOrObject(boundObjectOrClass, type) + : type; + if (functionType && isOverloaded(functionType)) { + const doc = getOverloadedDocStringsFromType(functionType, sourceMapper, evaluator).find((d) => d); + + if (doc) { + return doc; + } + } + } + return undefined; +} + +export function getDocumentationPartsForTypeAndDecl( + sourceMapper: SourceMapper, + type: Type | undefined, + resolvedDecl: Declaration | undefined, + evaluator: TypeEvaluator, + optional?: { + name?: string; + symbol?: Symbol; + boundObjectOrClass?: ClassType | undefined; + } +): string | undefined { + // Get the alias first + const aliasDoc = getDocumentationPartForTypeAlias(sourceMapper, resolvedDecl, evaluator, optional?.symbol); + + // Combine this with the type doc + let typeDoc: string | undefined; + if (resolvedDecl?.type === DeclarationType.Alias) { + // Handle another alias decl special case. + // ex) import X.Y + // [X].Y + // Asking decl for X gives us "X.Y" rather than "X" since "X" is not actually a symbol. + // We need to get corresponding module name to use special code in type eval for this case. + if ( + resolvedDecl.type === DeclarationType.Alias && + resolvedDecl.node && + resolvedDecl.node.nodeType === ParseNodeType.ImportAs && + !!optional?.name && + !resolvedDecl.node.d.alias + ) { + const name = resolvedDecl.node.d.module.d.nameParts.find((n) => n.d.value === optional.name); + if (name) { + const aliasDecls = evaluator.getDeclInfoForNameNode(name)?.decls ?? [resolvedDecl]; + resolvedDecl = aliasDecls.length > 0 ? aliasDecls[0] : resolvedDecl; + } + } + + typeDoc = getModuleDocStringFromUris([resolvedDecl.uri], sourceMapper); + } + + typeDoc = + typeDoc ?? + (type + ? getDocumentationPartForType(sourceMapper, type, resolvedDecl, evaluator, optional?.boundObjectOrClass) + : undefined); + + // Combine with a new line if they both exist + return aliasDoc && typeDoc && aliasDoc !== typeDoc ? `${aliasDoc}\n\n${typeDoc}` : aliasDoc || typeDoc; +} + +export function getAutoImportText(name: string, from?: string, alias?: string): string { + let text: string | undefined; + if (!from) { + text = `import ${name}`; + } else { + text = `from ${from} import ${name}`; + } + + if (alias) { + text = `${text} as ${alias}`; + } + + return text; +} + +export function combineExpressionTypes(typeNodes: ExpressionNode[], evaluator: TypeEvaluator): Type { + const typeList = typeNodes.map((n) => evaluator.getType(n) || UnknownType.create()); + let result = combineTypes(typeList); + + // We're expecting a set of types, if there is only one and the outermost type is a list, take its inner type. This + // is probably an expression that at runtime would turn into a list. + if ( + typeList.length === 1 && + result.category === TypeCategory.Class && + ClassType.isBuiltIn(result, 'list') && + result.priv.typeArgs + ) { + result = result.priv.typeArgs[0]; + } else if ( + typeList.length === 1 && + result.category === TypeCategory.Class && + ClassType.isBuiltIn(result, 'range') + ) { + result = evaluator.getBuiltInObject(typeNodes[0], 'int'); + } + return result; +} + +export function getClassAndConstructorTypes(node: NameNode, evaluator: TypeEvaluator) { + // If the class is used as part of a call (i.e. it is being + // instantiated), include the constructor arguments within the + // hover text. + let callLeftNode: ParseNode | undefined = node; + + // Allow the left to be a member access chain (e.g. a.b.c) if the + // node in question is the last item in the chain. + if (callLeftNode?.parent?.nodeType === ParseNodeType.MemberAccess && node === callLeftNode.parent.d.member) { + callLeftNode = node.parent; + // Allow the left to be a generic class constructor (e.g. foo[int]()) + } else if (callLeftNode?.parent?.nodeType === ParseNodeType.Index) { + callLeftNode = node.parent; + } + + if ( + !callLeftNode || + !callLeftNode.parent || + callLeftNode.parent.nodeType !== ParseNodeType.Call || + callLeftNode.parent.d.leftExpr !== callLeftNode + ) { + return; + } + + // Get the init method for this class. + const classType = getTypeForToolTip(evaluator, node); + if (!isInstantiableClass(classType)) { + return; + } + + const instanceType = getTypeForToolTip(evaluator, callLeftNode.parent); + if (!isClassInstance(instanceType)) { + return; + } + + let methodType: Type | undefined; + + // Try to get the `__init__` method first because it typically has more type information than `__new__`. + // Don't exclude `object.__init__` since in the plain case we want to show Foo(). + const initMember = lookUpClassMember(classType, '__init__', MemberAccessFlags.SkipInstanceMembers); + + if (initMember) { + const functionType = evaluator.getTypeOfMember(initMember); + + if (isFunctionOrOverloaded(functionType)) { + methodType = bindFunctionToClassOrObjectToolTip(evaluator, node, instanceType, functionType); + } + } + + // If there was no `__init__`, excluding `object` class `__init__`, or if `__init__` only had default params (*args: Any, **kwargs: Any) or no params (), + // see if we can find a better `__new__` method. + if ( + !methodType || + (methodType && + isFunction(methodType) && + (FunctionType.hasDefaultParams(methodType) || methodType.shared.parameters.length === 0)) + ) { + const newMember = lookUpClassMember( + classType, + '__new__', + MemberAccessFlags.SkipObjectBaseClass | MemberAccessFlags.SkipInstanceMembers + ); + + if (newMember) { + const newMemberType = evaluator.getTypeOfMember(newMember); + + // Prefer `__new__` if it doesn't have default params (*args: Any, **kwargs: Any) or no params (). + if (isFunctionOrOverloaded(newMemberType)) { + // Set `treatConstructorAsClassMethod` to true to exclude `cls` as a parameter. + methodType = bindFunctionToClassOrObjectToolTip( + evaluator, + node, + instanceType, + newMemberType, + /* treatConstructorAsClassMethod */ true + ); + } + } + } + + return { methodType, classType }; +} + +export function bindFunctionToClassOrObjectToolTip( + evaluator: TypeEvaluator, + node: ExpressionNode, + baseType: ClassType | undefined, + memberType: FunctionType | OverloadedType, + treatConstructorAsClassMethod?: boolean +): FunctionType | OverloadedType | undefined { + const methodType = evaluator.bindFunctionToClassOrObject( + baseType, + memberType, + /* memberClass */ undefined, + treatConstructorAsClassMethod + ); + + if (!methodType) { + return undefined; + } + + return limitOverloadBasedOnCall(evaluator, methodType, node); +} + +export function limitOverloadBasedOnCall( + evaluator: TypeEvaluator, + type: T, + node: ExpressionNode +): T | FunctionType | OverloadedType { + // If it's an overloaded function, see if it's part of a call expression. + // If so, we may be able to eliminate some of the overloads based on + // the overload resolution. + if (!isOverloaded(type) || node.nodeType !== ParseNodeType.Name) { + return type; + } + + const callNode = ParseTreeUtils.getCallForName(node); + if (!callNode) { + return type; + } + + const callTypeResult = evaluator.getTypeResult(callNode); + if (!callTypeResult || !callTypeResult.overloadsUsedForCall || callTypeResult.overloadsUsedForCall.length === 0) { + return type; + } + + if (callTypeResult.overloadsUsedForCall.length === 1) { + return callTypeResult.overloadsUsedForCall[0]; + } + + return OverloadedType.create(callTypeResult.overloadsUsedForCall); +} + +export function getTypeForToolTip(evaluator: TypeEvaluator, node: ExpressionNode) { + // It does common work necessary for hover for a type we got + // from raw type evaluator. + const type = evaluator.getType(node) ?? UnknownType.create(); + return limitOverloadBasedOnCall(evaluator, type, node); +} diff --git a/python-parser/packages/pyright-internal/src/languageService/workspaceSymbolProvider.ts b/python-parser/packages/pyright-internal/src/languageService/workspaceSymbolProvider.ts new file mode 100644 index 00000000..b18f84b2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/languageService/workspaceSymbolProvider.ts @@ -0,0 +1,161 @@ +/* + * workspaceSymbolProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provide langue server workspace symbol functionality. + */ + +import { CancellationToken, Location, ResultProgressReporter, SymbolInformation } from 'vscode-languageserver'; +import { getFileInfo } from '../analyzer/analyzerNodeInfo'; +import { isUserCode } from '../analyzer/sourceFileInfoUtils'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { appendArray } from '../common/collectionUtils'; +import { ProgramView } from '../common/extensibility'; +import * as StringUtils from '../common/stringUtils'; +import { Uri } from '../common/uri/uri'; +import { convertUriToLspUriString } from '../common/uri/uriUtils'; +import { Workspace } from '../workspaceFactory'; +import { IndexSymbolData, SymbolIndexer } from './symbolIndexer'; + +type WorkspaceSymbolCallback = (symbols: SymbolInformation[]) => void; + +export class WorkspaceSymbolProvider { + private _reporter: WorkspaceSymbolCallback; + private _allSymbols: SymbolInformation[] = []; + + constructor( + private readonly _workspaces: Workspace[], + resultReporter: ResultProgressReporter | undefined, + private readonly _query: string, + private readonly _token: CancellationToken + ) { + this._reporter = resultReporter + ? (symbols) => resultReporter.report(symbols) + : (symbols) => appendArray(this._allSymbols, symbols); + } + + reportSymbols(): SymbolInformation[] { + for (const workspace of this._workspaces) { + if (workspace.disableLanguageServices || workspace.disableWorkspaceSymbol) { + continue; + } + + if (!workspace.isInitialized.resolved()) { + // If workspace is not resolved, ignore this workspace and move on. + // We could wait for the initialization but that cause this to be async + // so for now, we will just ignore any workspace that is not initialized yet. + continue; + } + + workspace.service.run((program) => { + this._reportSymbolsForProgram(program); + }, this._token); + } + + return this._allSymbols; + } + + protected getSymbolsForDocument(program: ProgramView, fileUri: Uri): SymbolInformation[] { + const symbolList: SymbolInformation[] = []; + + const parseResults = program.getParseResults(fileUri); + if (!parseResults) { + return symbolList; + } + + const fileInfo = getFileInfo(parseResults.parserOutput.parseTree); + if (!fileInfo) { + return symbolList; + } + + const indexSymbolData = SymbolIndexer.indexSymbols( + fileInfo, + parseResults, + { includeAliases: false }, + this._token + ); + this.appendWorkspaceSymbolsRecursive(indexSymbolData, program, fileUri, '', symbolList); + + return symbolList; + } + + protected appendWorkspaceSymbolsRecursive( + indexSymbolData: IndexSymbolData[] | undefined, + program: ProgramView, + fileUri: Uri, + container: string, + symbolList: SymbolInformation[] + ) { + throwIfCancellationRequested(this._token); + + if (!indexSymbolData) { + return; + } + + for (const symbolData of indexSymbolData) { + if (symbolData.alias) { + continue; + } + + if (StringUtils.isPatternInSymbol(this._query, symbolData.name)) { + const location: Location = { + uri: convertUriToLspUriString(program.fileSystem, fileUri), + range: symbolData.selectionRange!, + }; + + const symbolInfo: SymbolInformation = { + name: symbolData.name, + kind: symbolData.kind, + location, + }; + + if (container.length) { + symbolInfo.containerName = container; + } + + symbolList.push(symbolInfo); + } + + this.appendWorkspaceSymbolsRecursive( + symbolData.children, + program, + fileUri, + this._getContainerName(container, symbolData.name), + symbolList + ); + } + } + + private _reportSymbolsForProgram(program: ProgramView) { + // Don't do a search if the query is empty. We'll return + // too many results in this case. + if (!this._query) { + return; + } + + // "Workspace symbols" searches symbols only from user code. + for (const sourceFileInfo of program.getSourceFileInfoList()) { + if (!isUserCode(sourceFileInfo)) { + continue; + } + + const symbolList = this.getSymbolsForDocument(program, sourceFileInfo.uri); + if (symbolList.length > 0) { + this._reporter(symbolList); + } + + // This operation can consume significant memory, so check + // for situations where we need to discard the type cache. + program.handleMemoryHighUsage(); + } + } + + private _getContainerName(container: string, name: string) { + if (container.length > 0) { + return `${container}.${name}`; + } + + return name; + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/localize.ts b/python-parser/packages/pyright-internal/src/localization/localize.ts new file mode 100644 index 00000000..fa30d396 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/localize.ts @@ -0,0 +1,1680 @@ +/* + * localize.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that localizes user-visible strings. + */ + +import { fail } from '../common/debug'; + +import csStrings = require('./package.nls.cs.json'); +import deStrings = require('./package.nls.de.json'); +import enUsStrings = require('./package.nls.en-us.json'); +import esStrings = require('./package.nls.es.json'); +import frStrings = require('./package.nls.fr.json'); +import itStrings = require('./package.nls.it.json'); +import jaStrings = require('./package.nls.ja.json'); +import koStrings = require('./package.nls.ko.json'); +import plStrings = require('./package.nls.pl.json'); +import ptBrStrings = require('./package.nls.pt-br.json'); +import qpsPlocStrings = require('./package.nls.qps-ploc.json'); +import ruStrings = require('./package.nls.ru.json'); +import trStrings = require('./package.nls.tr.json'); +import zhCnStrings = require('./package.nls.zh-cn.json'); +import zhTwStrings = require('./package.nls.zh-tw.json'); + +export class ParameterizedString { + constructor(private _formatString: string) {} + + format(params: T): string { + let str = this._formatString; + Object.keys(params).forEach((key) => { + str = str.replace(new RegExp(`{${key}}`, 'g'), (params as any)[key].toString()); + }); + return str; + } + + getFormatString() { + return this._formatString; + } +} + +const defaultLocale = 'en-us'; +const stringMapsByLocale: Map = new Map([ + ['cs', csStrings as StringLookupMap], + ['de', deStrings], + ['en-us', enUsStrings], + ['en', enUsStrings], + ['es', esStrings], + ['fr', frStrings], + ['it', itStrings], + ['ja', jaStrings], + ['ko', koStrings], + ['pl', plStrings], + ['pt-br', ptBrStrings], + ['qps-ploc', qpsPlocStrings], + ['ru', ruStrings], + ['tr', trStrings], + ['zh-cn', zhCnStrings], + ['zh-tw', zhTwStrings], +]); + +type CommentedStringValue = { + message: string; + comment: string[]; +}; + +export type StringLookupMap = { [key: string]: string | CommentedStringValue | StringLookupMap }; +let localizedStrings: StringLookupMap | undefined = undefined; +let defaultStrings: StringLookupMap = {}; + +function getRawStringDefault(key: string): string { + if (localizedStrings === undefined) { + localizedStrings = initialize(); + } + + const keyParts = key.split('.'); + const isDiagnostic = keyParts[0] === 'Diagnostic' || keyParts[0] === 'DiagnosticAddendum'; + + const str = + isDiagnostic && forceEnglishDiagnostics + ? getRawStringFromMap(defaultStrings, keyParts) + : getRawStringFromMap(localizedStrings, keyParts) || getRawStringFromMap(defaultStrings, keyParts); + + if (str) { + return str; + } + + fail(`Missing localized string for key "${key}"`); +} + +let getRawString = getRawStringDefault; + +// Function allowing different strings to be used for messages. +// Returns the previous function used for getting messages. +export function setGetRawString(func: (key: string) => string): (key: string) => string { + const oldLookup = getRawString; + getRawString = func; + return oldLookup; +} + +export function getRawStringFromMap(map: StringLookupMap, keyParts: string[]): string | undefined { + let curObj: any = map; + + for (const keyPart of keyParts) { + if (!curObj[keyPart]) { + return undefined; + } + + curObj = curObj[keyPart]; + } + + return typeof curObj === 'string' ? curObj : curObj.message; +} + +function initialize(): StringLookupMap { + defaultStrings = loadDefaultStrings(); + const currentLocale = getLocaleFromEnv(); + return loadStringsForLocale(currentLocale, stringMapsByLocale); +} + +let localeOverride: string | undefined; +let forceEnglishDiagnostics = false; + +export function setLocaleOverride(locale: string) { + // Force a reload of the localized strings. + localizedStrings = undefined; + localeOverride = locale.toLowerCase(); +} + +export function setForceEnglishDiagnostics(force: boolean) { + forceEnglishDiagnostics = force; +} + +export function getLocaleFromEnv(): string { + if (localeOverride) { + return localeOverride; + } + + try { + const env = process?.env; + + // Start with the VSCode environment variables. + const vscodeConfigString = env?.VSCODE_NLS_CONFIG; + if (vscodeConfigString) { + try { + return JSON.parse(vscodeConfigString).locale || defaultLocale; + } catch { + // Fall through + } + } + + // See if there is a language env variable. + const localeString = env?.LC_ALL || env?.LC_MESSAGES || env?.LANG || env?.LANGUAGE; + if (localeString) { + // This string may contain a local followed by an encoding (e.g. "en-us.UTF-8"). + const localeStringSplit = localeString.split('.'); + if (localeStringSplit.length > 0 && localeStringSplit[0]) { + return localeStringSplit[0] || defaultLocale; + } + } + } catch { + // Just use the default locale + } + + // Fall back to the default locale. + return defaultLocale; +} + +function loadDefaultStrings(): StringLookupMap { + const defaultStrings = stringMapsByLocale.get(defaultLocale); + if (defaultStrings) { + return defaultStrings; + } + console.error('Could not load default strings'); + return {}; +} + +export function loadStringsForLocale(locale: string, localeMap: Map): StringLookupMap { + if (locale === defaultLocale) { + // No need to load override if we're using the default. + return {}; + } + + let override = localeMap.get(locale); + if (override !== undefined) { + return override; + } + + // If we couldn't find the requested locale, try to fall back on a more + // general version. + const localeSplit = locale.split('-'); + if (localeSplit.length > 0 && localeSplit[0]) { + override = localeMap.get(localeSplit[0]); + if (override !== undefined) { + return override; + } + } + + return {}; +} + +export namespace Localizer { + export namespace Diagnostic { + export const annotatedMetadataInconsistent = () => + new ParameterizedString<{ type: string; metadataType: string }>( + getRawString('Diagnostic.annotatedMetadataInconsistent') + ); + export const abstractMethodInvocation = () => + new ParameterizedString<{ method: string }>(getRawString('Diagnostic.abstractMethodInvocation')); + export const annotatedParamCountMismatch = () => + new ParameterizedString<{ expected: number; received: number }>( + getRawString('Diagnostic.annotatedParamCountMismatch') + ); + export const annotatedTypeArgMissing = () => getRawString('Diagnostic.annotatedTypeArgMissing'); + export const annotationBytesString = () => getRawString('Diagnostic.annotationBytesString'); + export const annotationFormatString = () => getRawString('Diagnostic.annotationFormatString'); + export const annotationNotSupported = () => getRawString('Diagnostic.annotationNotSupported'); + export const annotationRawString = () => getRawString('Diagnostic.annotationRawString'); + export const annotationSpansStrings = () => getRawString('Diagnostic.annotationSpansStrings'); + export const annotationStringEscape = () => getRawString('Diagnostic.annotationStringEscape'); + export const annotationTemplateString = () => getRawString('Diagnostic.annotationTemplateString'); + export const argAssignment = () => + new ParameterizedString<{ argType: string; paramType: string }>(getRawString('Diagnostic.argAssignment')); + export const argAssignmentFunction = () => + new ParameterizedString<{ argType: string; paramType: string; functionName: string }>( + getRawString('Diagnostic.argAssignmentFunction') + ); + export const argAssignmentParam = () => + new ParameterizedString<{ argType: string; paramType: string; paramName: string }>( + getRawString('Diagnostic.argAssignmentParam') + ); + export const argAssignmentParamFunction = () => + new ParameterizedString<{ argType: string; paramType: string; paramName: string; functionName: string }>( + getRawString('Diagnostic.argAssignmentParamFunction') + ); + export const argMissingForParam = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.argMissingForParam')); + export const argMissingForParams = () => + new ParameterizedString<{ names: string }>(getRawString('Diagnostic.argMissingForParams')); + export const argMorePositionalExpectedCount = () => + new ParameterizedString<{ expected: number }>(getRawString('Diagnostic.argMorePositionalExpectedCount')); + export const argMorePositionalExpectedOne = () => getRawString('Diagnostic.argMorePositionalExpectedOne'); + export const argPositional = () => getRawString('Diagnostic.argPositional'); + export const argPositionalExpectedCount = () => + new ParameterizedString<{ expected: number }>(getRawString('Diagnostic.argPositionalExpectedCount')); + export const argPositionalExpectedOne = () => getRawString('Diagnostic.argPositionalExpectedOne'); + export const argTypePartiallyUnknown = () => getRawString('Diagnostic.argTypePartiallyUnknown'); + export const argTypeUnknown = () => getRawString('Diagnostic.argTypeUnknown'); + export const assertAlwaysTrue = () => getRawString('Diagnostic.assertAlwaysTrue'); + export const assertTypeArgs = () => getRawString('Diagnostic.assertTypeArgs'); + export const assertTypeTypeMismatch = () => + new ParameterizedString<{ expected: string; received: string }>( + getRawString('Diagnostic.assertTypeTypeMismatch') + ); + export const assignmentExprContext = () => getRawString('Diagnostic.assignmentExprContext'); + export const assignmentExprComprehension = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.assignmentExprComprehension')); + export const assignmentExprInSubscript = () => getRawString('Diagnostic.assignmentExprInSubscript'); + export const assignmentInProtocol = () => getRawString('Diagnostic.assignmentInProtocol'); + export const assignmentTargetExpr = () => getRawString('Diagnostic.assignmentTargetExpr'); + export const asyncNotInAsyncFunction = () => getRawString('Diagnostic.asyncNotInAsyncFunction'); + export const awaitIllegal = () => getRawString('Diagnostic.awaitIllegal'); + export const awaitNotAllowed = () => getRawString('Diagnostic.awaitNotAllowed'); + export const awaitNotInAsync = () => getRawString('Diagnostic.awaitNotInAsync'); + export const backticksIllegal = () => getRawString('Diagnostic.backticksIllegal'); + export const baseClassCircular = () => getRawString('Diagnostic.baseClassCircular'); + export const baseClassFinal = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.baseClassFinal')); + export const baseClassIncompatible = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.baseClassIncompatible')); + export const baseClassInvalid = () => getRawString('Diagnostic.baseClassInvalid'); + export const baseClassMethodTypeIncompatible = () => + new ParameterizedString<{ classType: string; name: string }>( + getRawString('Diagnostic.baseClassMethodTypeIncompatible') + ); + export const baseClassVariableTypeIncompatible = () => + new ParameterizedString<{ classType: string; name: string }>( + getRawString('Diagnostic.baseClassVariableTypeIncompatible') + ); + export const baseClassUnknown = () => getRawString('Diagnostic.baseClassUnknown'); + export const binaryOperationNotAllowed = () => getRawString('Diagnostic.binaryOperationNotAllowed'); + export const bindParamMissing = () => + new ParameterizedString<{ methodName: string }>(getRawString('Diagnostic.bindParamMissing')); + export const bindTypeMismatch = () => + new ParameterizedString<{ type: string; methodName: string; paramName: string }>( + getRawString('Diagnostic.bindTypeMismatch') + ); + export const breakInExceptionGroup = () => getRawString('Diagnostic.breakInExceptionGroup'); + export const breakOutsideLoop = () => getRawString('Diagnostic.breakOutsideLoop'); + export const bytesUnsupportedEscape = () => getRawString('Diagnostic.bytesUnsupportedEscape'); + export const callableExtraArgs = () => getRawString('Diagnostic.callableExtraArgs'); + export const callableFirstArg = () => getRawString('Diagnostic.callableFirstArg'); + export const callableNotInstantiable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.callableNotInstantiable')); + export const callableSecondArg = () => getRawString('Diagnostic.callableSecondArg'); + export const casePatternIsIrrefutable = () => getRawString('Diagnostic.casePatternIsIrrefutable'); + export const classAlreadySpecialized = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.classAlreadySpecialized')); + export const classDecoratorTypeUnknown = () => getRawString('Diagnostic.classDecoratorTypeUnknown'); + export const classDefinitionCycle = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.classDefinitionCycle')); + export const classGetItemClsParam = () => getRawString('Diagnostic.classGetItemClsParam'); + export const classMethodClsParam = () => getRawString('Diagnostic.classMethodClsParam'); + export const classNotRuntimeSubscriptable = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.classNotRuntimeSubscriptable')); + export const classPatternBuiltInArgPositional = () => + getRawString('Diagnostic.classPatternBuiltInArgPositional'); + export const classPatternNewType = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.classPatternNewType')); + export const classPatternPositionalArgCount = () => + new ParameterizedString<{ type: string; expected: number; received: number }>( + getRawString('Diagnostic.classPatternPositionalArgCount') + ); + export const classPatternTypeAlias = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.classPatternTypeAlias')); + export const classPropertyDeprecated = () => getRawString('Diagnostic.classPropertyDeprecated'); + export const classTypeParametersIllegal = () => getRawString('Diagnostic.classTypeParametersIllegal'); + export const classVarNotAllowed = () => getRawString('Diagnostic.classVarNotAllowed'); + export const classVarFirstArgMissing = () => getRawString('Diagnostic.classVarFirstArgMissing'); + export const classVarOverridesInstanceVar = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('Diagnostic.classVarOverridesInstanceVar') + ); + export const classVarTooManyArgs = () => getRawString('Diagnostic.classVarTooManyArgs'); + export const classVarWithTypeVar = () => getRawString('Diagnostic.classVarWithTypeVar'); + export const clsSelfParamTypeMismatch = () => + new ParameterizedString<{ name: string; classType: string }>( + getRawString('Diagnostic.clsSelfParamTypeMismatch') + ); + export const codeTooComplexToAnalyze = () => getRawString('Diagnostic.codeTooComplexToAnalyze'); + export const collectionAliasInstantiation = () => + new ParameterizedString<{ type: string; alias: string }>( + getRawString('Diagnostic.collectionAliasInstantiation') + ); + export const comparisonAlwaysFalse = () => + new ParameterizedString<{ leftType: string; rightType: string }>( + getRawString('Diagnostic.comparisonAlwaysFalse') + ); + export const comparisonAlwaysTrue = () => + new ParameterizedString<{ leftType: string; rightType: string }>( + getRawString('Diagnostic.comparisonAlwaysTrue') + ); + export const comprehensionInDict = () => getRawString('Diagnostic.comprehensionInDict'); + export const comprehensionInSet = () => getRawString('Diagnostic.comprehensionInSet'); + export const concatenateContext = () => getRawString('Diagnostic.concatenateContext'); + export const concatenateParamSpecMissing = () => getRawString('Diagnostic.concatenateParamSpecMissing'); + export const concatenateTypeArgsMissing = () => getRawString('Diagnostic.concatenateTypeArgsMissing'); + export const conditionalOperandInvalid = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.conditionalOperandInvalid')); + export const constantRedefinition = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.constantRedefinition')); + export const coroutineInConditionalExpression = () => + getRawString('Diagnostic.coroutineInConditionalExpression'); + export const constructorParametersMismatch = () => + new ParameterizedString<{ classType: string }>(getRawString('Diagnostic.constructorParametersMismatch')); + export const containmentAlwaysFalse = () => + new ParameterizedString<{ leftType: string; rightType: string }>( + getRawString('Diagnostic.containmentAlwaysFalse') + ); + export const containmentAlwaysTrue = () => + new ParameterizedString<{ leftType: string; rightType: string }>( + getRawString('Diagnostic.containmentAlwaysTrue') + ); + export const continueInExceptionGroup = () => getRawString('Diagnostic.continueInExceptionGroup'); + export const continueOutsideLoop = () => getRawString('Diagnostic.continueOutsideLoop'); + export const dataClassBaseClassFrozen = () => getRawString('Diagnostic.dataClassBaseClassFrozen'); + export const dataClassBaseClassNotFrozen = () => getRawString('Diagnostic.dataClassBaseClassNotFrozen'); + export const dataClassConverterFunction = () => + new ParameterizedString<{ argType: string; fieldType: string; fieldName: string }>( + getRawString('Diagnostic.dataClassConverterFunction') + ); + export const dataClassConverterOverloads = () => + new ParameterizedString<{ funcName: string; fieldType: string; fieldName: string }>( + getRawString('Diagnostic.dataClassConverterOverloads') + ); + export const dataClassFieldInheritedDefault = () => + new ParameterizedString<{ fieldName: string }>(getRawString('Diagnostic.dataClassFieldInheritedDefault')); + export const dataClassFieldWithDefault = () => getRawString('Diagnostic.dataClassFieldWithDefault'); + export const dataClassFieldWithoutAnnotation = () => getRawString('Diagnostic.dataClassFieldWithoutAnnotation'); + export const dataClassFieldWithPrivateName = () => getRawString('Diagnostic.dataClassFieldWithPrivateName'); + export const dataClassPostInitParamCount = () => + new ParameterizedString<{ expected: number }>(getRawString('Diagnostic.dataClassPostInitParamCount')); + export const dataClassPostInitType = () => + new ParameterizedString<{ fieldName: string }>(getRawString('Diagnostic.dataClassPostInitType')); + export const dataClassSlotsOverwrite = () => getRawString('Diagnostic.dataClassSlotsOverwrite'); + export const dataClassTransformExpectedBoolLiteral = () => + getRawString('Diagnostic.dataClassTransformExpectedBoolLiteral'); + export const dataClassTransformFieldSpecifier = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.dataClassTransformFieldSpecifier')); + export const dataClassTransformPositionalParam = () => + getRawString('Diagnostic.dataClassTransformPositionalParam'); + export const dataClassTransformUnknownArgument = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.dataClassTransformUnknownArgument')); + export const dataProtocolInSubclassCheck = () => getRawString('Diagnostic.dataProtocolInSubclassCheck'); + export const declaredReturnTypePartiallyUnknown = () => + new ParameterizedString<{ returnType: string }>( + getRawString('Diagnostic.declaredReturnTypePartiallyUnknown') + ); + export const declaredReturnTypeUnknown = () => getRawString('Diagnostic.declaredReturnTypeUnknown'); + export const defaultValueContainsCall = () => getRawString('Diagnostic.defaultValueContainsCall'); + export const defaultValueNotAllowed = () => getRawString('Diagnostic.defaultValueNotAllowed'); + export const deprecatedClass = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedClass')); + export const deprecatedConstructor = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedConstructor')); + export const deprecatedDescriptorDeleter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedDescriptorDeleter')); + export const deprecatedDescriptorGetter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedDescriptorGetter')); + export const deprecatedDescriptorSetter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedDescriptorSetter')); + export const deprecatedFunction = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedFunction')); + export const deprecatedMethod = () => + new ParameterizedString<{ name: string; className: string }>(getRawString('Diagnostic.deprecatedMethod')); + export const deprecatedPropertyDeleter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedPropertyDeleter')); + export const deprecatedPropertyGetter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedPropertyGetter')); + export const deprecatedPropertySetter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.deprecatedPropertySetter')); + export const deprecatedType = () => + new ParameterizedString<{ version: string; replacement: string }>( + getRawString('Diagnostic.deprecatedType') + ); + export const dictExpandIllegalInComprehension = () => + getRawString('Diagnostic.dictExpandIllegalInComprehension'); + export const dictInAnnotation = () => getRawString('Diagnostic.dictInAnnotation'); + export const dictKeyValuePairs = () => getRawString('Diagnostic.dictKeyValuePairs'); + export const dictUnpackIsNotMapping = () => getRawString('Diagnostic.dictUnpackIsNotMapping'); + export const delTargetExpr = () => getRawString('Diagnostic.delTargetExpr'); + export const dunderAllSymbolNotPresent = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.dunderAllSymbolNotPresent')); + export const duplicateArgsParam = () => getRawString('Diagnostic.duplicateArgsParam'); + export const duplicateBaseClass = () => getRawString('Diagnostic.duplicateBaseClass'); + export const duplicateCatchAll = () => getRawString('Diagnostic.duplicateCatchAll'); + export const duplicateEnumMember = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.duplicateEnumMember')); + export const duplicateGenericAndProtocolBase = () => getRawString('Diagnostic.duplicateGenericAndProtocolBase'); + export const duplicateImport = () => + new ParameterizedString<{ importName: string }>(getRawString('Diagnostic.duplicateImport')); + export const duplicateKwargsParam = () => getRawString('Diagnostic.duplicateKwargsParam'); + export const duplicateKeywordOnly = () => getRawString('Diagnostic.duplicateKeywordOnly'); + export const duplicateParam = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.duplicateParam')); + export const duplicateCapturePatternTarget = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.duplicateCapturePatternTarget')); + export const duplicateStarPattern = () => getRawString('Diagnostic.duplicateStarPattern'); + export const duplicateStarStarPattern = () => getRawString('Diagnostic.duplicateStarStarPattern'); + export const duplicatePositionOnly = () => getRawString('Diagnostic.duplicatePositionOnly'); + export const duplicateUnpack = () => getRawString('Diagnostic.duplicateUnpack'); + export const ellipsisAfterUnpacked = () => getRawString('Diagnostic.ellipsisAfterUnpacked'); + export const ellipsisContext = () => getRawString('Diagnostic.ellipsisContext'); + export const ellipsisSecondArg = () => getRawString('Diagnostic.ellipsisSecondArg'); + export const enumClassOverride = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.enumClassOverride')); + export const enumMemberDelete = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.enumMemberDelete')); + export const enumMemberSet = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.enumMemberSet')); + export const enumMemberTypeAnnotation = () => getRawString('Diagnostic.enumMemberTypeAnnotation'); + export const exceptionGroupIncompatible = () => getRawString('Diagnostic.exceptionGroupIncompatible'); + export const exceptGroupMismatch = () => getRawString('Diagnostic.exceptGroupMismatch'); + export const exceptGroupRequiresType = () => getRawString('Diagnostic.exceptGroupRequiresType'); + export const exceptionGroupTypeIncorrect = () => getRawString('Diagnostic.exceptionGroupTypeIncorrect'); + export const exceptionTypeIncorrect = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.exceptionTypeIncorrect')); + export const exceptionTypeNotClass = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.exceptionTypeNotClass')); + export const exceptionTypeNotInstantiable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.exceptionTypeNotInstantiable')); + export const exceptRequiresParens = () => getRawString('Diagnostic.exceptRequiresParens'); + export const exceptWithAsRequiresParens = () => getRawString('Diagnostic.exceptWithAsRequiresParens'); + export const expectedAfterDecorator = () => getRawString('Diagnostic.expectedAfterDecorator'); + export const expectedArrow = () => getRawString('Diagnostic.expectedArrow'); + export const expectedAsAfterException = () => getRawString('Diagnostic.expectedAsAfterException'); + export const expectedAssignRightHandExpr = () => getRawString('Diagnostic.expectedAssignRightHandExpr'); + export const expectedBinaryRightHandExpr = () => getRawString('Diagnostic.expectedBinaryRightHandExpr'); + export const expectedBoolLiteral = () => getRawString('Diagnostic.expectedBoolLiteral'); + export const expectedCase = () => getRawString('Diagnostic.expectedCase'); + export const expectedClassName = () => getRawString('Diagnostic.expectedClassName'); + export const expectedCloseBrace = () => getRawString('Diagnostic.expectedCloseBrace'); + export const expectedCloseBracket = () => getRawString('Diagnostic.expectedCloseBracket'); + export const expectedCloseParen = () => getRawString('Diagnostic.expectedCloseParen'); + export const expectedColon = () => getRawString('Diagnostic.expectedColon'); + export const expectedComplexNumberLiteral = () => getRawString('Diagnostic.expectedComplexNumberLiteral'); + export const expectedDecoratorExpr = () => getRawString('Diagnostic.expectedDecoratorExpr'); + export const expectedDecoratorName = () => getRawString('Diagnostic.expectedDecoratorName'); + export const expectedDecoratorNewline = () => getRawString('Diagnostic.expectedDecoratorNewline'); + export const expectedDelExpr = () => getRawString('Diagnostic.expectedDelExpr'); + export const expectedElse = () => getRawString('Diagnostic.expectedElse'); + export const expectedEquals = () => getRawString('Diagnostic.expectedEquals'); + export const expectedExceptionClass = () => getRawString('Diagnostic.expectedExceptionClass'); + export const expectedExceptionObj = () => getRawString('Diagnostic.expectedExceptionObj'); + export const expectedExpr = () => getRawString('Diagnostic.expectedExpr'); + export const expectedIdentifier = () => getRawString('Diagnostic.expectedIdentifier'); + export const expectedImport = () => getRawString('Diagnostic.expectedImport'); + export const expectedImportAlias = () => getRawString('Diagnostic.expectedImportAlias'); + export const expectedImportSymbols = () => getRawString('Diagnostic.expectedImportSymbols'); + export const expectedIndentedBlock = () => getRawString('Diagnostic.expectedIndentedBlock'); + export const expectedIn = () => getRawString('Diagnostic.expectedIn'); + export const expectedInExpr = () => getRawString('Diagnostic.expectedInExpr'); + export const expectedFunctionAfterAsync = () => getRawString('Diagnostic.expectedFunctionAfterAsync'); + export const expectedFunctionName = () => getRawString('Diagnostic.expectedFunctionName'); + export const expectedMemberName = () => getRawString('Diagnostic.expectedMemberName'); + export const expectedModuleName = () => getRawString('Diagnostic.expectedModuleName'); + export const expectedNameAfterAs = () => getRawString('Diagnostic.expectedNameAfterAs'); + export const expectedNamedParameter = () => getRawString('Diagnostic.expectedNamedParameter'); + export const expectedNewline = () => getRawString('Diagnostic.expectedNewline'); + export const expectedNewlineOrSemicolon = () => getRawString('Diagnostic.expectedNewlineOrSemicolon'); + export const expectedOpenParen = () => getRawString('Diagnostic.expectedOpenParen'); + export const expectedParamName = () => getRawString('Diagnostic.expectedParamName'); + export const expectedPatternExpr = () => getRawString('Diagnostic.expectedPatternExpr'); + export const expectedPatternSubjectExpr = () => getRawString('Diagnostic.expectedPatternSubjectExpr'); + export const expectedPatternValue = () => getRawString('Diagnostic.expectedPatternValue'); + export const expectedReturnExpr = () => getRawString('Diagnostic.expectedReturnExpr'); + export const expectedSliceIndex = () => getRawString('Diagnostic.expectedSliceIndex'); + export const expectedTypeNotString = () => getRawString('Diagnostic.expectedTypeNotString'); + export const expectedTypeParameterName = () => getRawString('Diagnostic.expectedTypeParameterName'); + export const expectedYieldExpr = () => getRawString('Diagnostic.expectedYieldExpr'); + export const finalClassIsAbstract = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.finalClassIsAbstract')); + export const finalContext = () => getRawString('Diagnostic.finalContext'); + export const finalInLoop = () => getRawString('Diagnostic.finalInLoop'); + export const finallyBreak = () => getRawString('Diagnostic.finallyBreak'); + export const finallyContinue = () => getRawString('Diagnostic.finallyContinue'); + export const finallyReturn = () => getRawString('Diagnostic.finallyReturn'); + export const finalMethodOverride = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('Diagnostic.finalMethodOverride') + ); + export const finalNonMethod = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.finalNonMethod')); + export const finalReassigned = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.finalReassigned')); + export const finalRedeclaration = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.finalRedeclaration')); + export const finalRedeclarationBySubclass = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('Diagnostic.finalRedeclarationBySubclass') + ); + export const finalTooManyArgs = () => getRawString('Diagnostic.finalTooManyArgs'); + export const finalUnassigned = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.finalUnassigned')); + export const formatStringBrace = () => getRawString('Diagnostic.formatStringBrace'); + export const formatStringBytes = () => getRawString('Diagnostic.formatStringBytes'); + export const formatStringDebuggingIllegal = () => getRawString('Diagnostic.formatStringDebuggingIllegal'); + export const formatStringEscape = () => getRawString('Diagnostic.formatStringEscape'); + export const formatStringExpectedConversion = () => getRawString('Diagnostic.formatStringExpectedConversion'); + export const formatStringInPattern = () => getRawString('Diagnostic.formatStringInPattern'); + export const formatStringIllegal = () => getRawString('Diagnostic.formatStringIllegal'); + export const formatStringNestedFormatSpecifier = () => + getRawString('Diagnostic.formatStringNestedFormatSpecifier'); + export const formatStringNestedQuote = () => getRawString('Diagnostic.formatStringNestedQuote'); + export const formatStringTemplate = () => getRawString('Diagnostic.formatStringTemplate'); + export const formatStringUnicode = () => getRawString('Diagnostic.formatStringUnicode'); + export const formatStringUnterminated = () => getRawString('Diagnostic.formatStringUnterminated'); + export const functionDecoratorTypeUnknown = () => getRawString('Diagnostic.functionDecoratorTypeUnknown'); + export const functionInConditionalExpression = () => getRawString('Diagnostic.functionInConditionalExpression'); + export const functionTypeParametersIllegal = () => getRawString('Diagnostic.functionTypeParametersIllegal'); + export const futureImportLocationNotAllowed = () => getRawString('Diagnostic.futureImportLocationNotAllowed'); + export const generatorAsyncReturnType = () => + new ParameterizedString<{ yieldType: string }>(getRawString('Diagnostic.generatorAsyncReturnType')); + export const generatorNotParenthesized = () => getRawString('Diagnostic.generatorNotParenthesized'); + export const generatorSyncReturnType = () => + new ParameterizedString<{ yieldType: string }>(getRawString('Diagnostic.generatorSyncReturnType')); + export const genericBaseClassNotAllowed = () => getRawString('Diagnostic.genericBaseClassNotAllowed'); + export const genericClassAssigned = () => getRawString('Diagnostic.genericClassAssigned'); + export const genericClassDeleted = () => getRawString('Diagnostic.genericClassDeleted'); + export const genericInstanceVariableAccess = () => getRawString('Diagnostic.genericInstanceVariableAccess'); + export const genericNotAllowed = () => getRawString('Diagnostic.genericNotAllowed'); + export const genericTypeAliasBoundTypeVar = () => + new ParameterizedString<{ names: string }>(getRawString('Diagnostic.genericTypeAliasBoundTypeVar')); + export const genericTypeArgMissing = () => getRawString('Diagnostic.genericTypeArgMissing'); + export const genericTypeArgTypeVar = () => getRawString('Diagnostic.genericTypeArgTypeVar'); + export const genericTypeArgUnique = () => getRawString('Diagnostic.genericTypeArgUnique'); + export const globalReassignment = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.globalReassignment')); + export const globalRedefinition = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.globalRedefinition')); + export const implicitStringConcat = () => getRawString('Diagnostic.implicitStringConcat'); + export const importCycleDetected = () => getRawString('Diagnostic.importCycleDetected'); + export const importDepthExceeded = () => + new ParameterizedString<{ depth: number }>(getRawString('Diagnostic.importDepthExceeded')); + export const importResolveFailure = () => + new ParameterizedString<{ importName: string; venv: string }>( + getRawString('Diagnostic.importResolveFailure') + ); + export const importSourceResolveFailure = () => + new ParameterizedString<{ importName: string; venv: string }>( + getRawString('Diagnostic.importSourceResolveFailure') + ); + export const importSymbolUnknown = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.importSymbolUnknown')); + export const incompatibleMethodOverride = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('Diagnostic.incompatibleMethodOverride') + ); + export const inconsistentIndent = () => getRawString('Diagnostic.inconsistentIndent'); + export const inconsistentTabs = () => getRawString('Diagnostic.inconsistentTabs'); + export const initMethodSelfParamTypeVar = () => getRawString('Diagnostic.initMethodSelfParamTypeVar'); + export const initMustReturnNone = () => getRawString('Diagnostic.initMustReturnNone'); + export const initSubclassClsParam = () => getRawString('Diagnostic.initSubclassClsParam'); + export const initSubclassCallFailed = () => getRawString('Diagnostic.initSubclassCallFailed'); + export const initVarNotAllowed = () => getRawString('Diagnostic.initVarNotAllowed'); + export const instanceMethodSelfParam = () => getRawString('Diagnostic.instanceMethodSelfParam'); + export const instanceVarOverridesClassVar = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('Diagnostic.instanceVarOverridesClassVar') + ); + export const instantiateAbstract = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.instantiateAbstract')); + export const instantiateProtocol = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.instantiateProtocol')); + export const internalBindError = () => + new ParameterizedString<{ file: string; message: string }>(getRawString('Diagnostic.internalBindError')); + export const internalParseError = () => + new ParameterizedString<{ file: string; message: string }>(getRawString('Diagnostic.internalParseError')); + export const internalTypeCheckingError = () => + new ParameterizedString<{ file: string; message: string }>( + getRawString('Diagnostic.internalTypeCheckingError') + ); + export const invalidIdentifierChar = () => getRawString('Diagnostic.invalidIdentifierChar'); + export const invalidStubStatement = () => getRawString('Diagnostic.invalidStubStatement'); + export const invalidTokenChars = () => + new ParameterizedString<{ text: string }>(getRawString('Diagnostic.invalidTokenChars')); + export const isInstanceInvalidType = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.isInstanceInvalidType')); + export const isSubclassInvalidType = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.isSubclassInvalidType')); + export const keyValueInSet = () => getRawString('Diagnostic.keyValueInSet'); + export const keywordArgInTypeArgument = () => getRawString('Diagnostic.keywordArgInTypeArgument'); + export const keywordOnlyAfterArgs = () => getRawString('Diagnostic.keywordOnlyAfterArgs'); + export const keywordParameterMissing = () => getRawString('Diagnostic.keywordParameterMissing'); + export const keywordSubscriptIllegal = () => getRawString('Diagnostic.keywordSubscriptIllegal'); + export const lambdaReturnTypeUnknown = () => getRawString('Diagnostic.lambdaReturnTypeUnknown'); + export const lambdaReturnTypePartiallyUnknown = () => + new ParameterizedString<{ returnType: string }>( + getRawString('Diagnostic.lambdaReturnTypePartiallyUnknown') + ); + export const listAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.listAssignmentMismatch')); + export const listInAnnotation = () => getRawString('Diagnostic.listInAnnotation'); + export const literalNamedUnicodeEscape = () => getRawString('Diagnostic.literalNamedUnicodeEscape'); + export const literalUnsupportedType = () => getRawString('Diagnostic.literalUnsupportedType'); + export const literalEmptyArgs = () => getRawString('Diagnostic.literalEmptyArgs'); + export const literalNotAllowed = () => getRawString('Diagnostic.literalNotAllowed'); + export const literalNotCallable = () => getRawString('Diagnostic.literalNotCallable'); + export const matchIncompatible = () => getRawString('Diagnostic.matchIncompatible'); + export const matchIsNotExhaustive = () => getRawString('Diagnostic.matchIsNotExhaustive'); + export const maxParseDepthExceeded = () => getRawString('Diagnostic.maxParseDepthExceeded'); + export const memberAccess = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('Diagnostic.memberAccess')); + export const memberDelete = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('Diagnostic.memberDelete')); + export const memberSet = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('Diagnostic.memberSet')); + export const metaclassConflict = () => getRawString('Diagnostic.metaclassConflict'); + export const metaclassDuplicate = () => getRawString('Diagnostic.metaclassDuplicate'); + export const metaclassIsGeneric = () => getRawString('Diagnostic.metaclassIsGeneric'); + export const methodNotDefined = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.methodNotDefined')); + export const methodNotDefinedOnType = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('Diagnostic.methodNotDefinedOnType')); + export const methodOrdering = () => getRawString('Diagnostic.methodOrdering'); + export const methodOverridden = () => + new ParameterizedString<{ name: string; className: string; type: string }>( + getRawString('Diagnostic.methodOverridden') + ); + export const methodReturnsNonObject = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.methodReturnsNonObject')); + export const missingSuperCall = () => + new ParameterizedString<{ methodName: string }>(getRawString('Diagnostic.missingSuperCall')); + export const mixingBytesAndStr = () => getRawString('Diagnostic.mixingBytesAndStr'); + export const moduleAsType = () => getRawString('Diagnostic.moduleAsType'); + export const moduleNotCallable = () => getRawString('Diagnostic.moduleNotCallable'); + export const moduleUnknownMember = () => + new ParameterizedString<{ memberName: string; moduleName: string }>( + getRawString('Diagnostic.moduleUnknownMember') + ); + export const namedExceptAfterCatchAll = () => getRawString('Diagnostic.namedExceptAfterCatchAll'); + export const namedParamAfterParamSpecArgs = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.namedParamAfterParamSpecArgs')); + export const namedTupleEmptyName = () => getRawString('Diagnostic.namedTupleEmptyName'); + export const namedTupleEntryRedeclared = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('Diagnostic.namedTupleEntryRedeclared') + ); + export const namedTupleFieldUnderscore = () => getRawString('Diagnostic.namedTupleFieldUnderscore'); + export const namedTupleFirstArg = () => getRawString('Diagnostic.namedTupleFirstArg'); + export const namedTupleMultipleInheritance = () => getRawString('Diagnostic.namedTupleMultipleInheritance'); + export const namedTupleNameKeyword = () => getRawString('Diagnostic.namedTupleNameKeyword'); + export const namedTupleNameType = () => getRawString('Diagnostic.namedTupleNameType'); + export const namedTupleNameUnique = () => getRawString('Diagnostic.namedTupleNameUnique'); + export const namedTupleNoTypes = () => getRawString('Diagnostic.namedTupleNoTypes'); + export const namedTupleSecondArg = () => getRawString('Diagnostic.namedTupleSecondArg'); + export const newClsParam = () => getRawString('Diagnostic.newClsParam'); + export const newTypeAnyOrUnknown = () => getRawString('Diagnostic.newTypeAnyOrUnknown'); + export const newTypeBadName = () => getRawString('Diagnostic.newTypeBadName'); + export const newTypeLiteral = () => getRawString('Diagnostic.newTypeLiteral'); + export const newTypeNameMismatch = () => getRawString('Diagnostic.newTypeNameMismatch'); + export const newTypeNotAClass = () => getRawString('Diagnostic.newTypeNotAClass'); + export const newTypeParamCount = () => getRawString('Diagnostic.newTypeParamCount'); + export const newTypeProtocolClass = () => getRawString('Diagnostic.newTypeProtocolClass'); + export const nonDefaultAfterDefault = () => getRawString('Diagnostic.nonDefaultAfterDefault'); + export const noneNotCallable = () => getRawString('Diagnostic.noneNotCallable'); + export const noneNotIterable = () => getRawString('Diagnostic.noneNotIterable'); + export const noneNotSubscriptable = () => getRawString('Diagnostic.noneNotSubscriptable'); + export const noneNotUsableWith = () => getRawString('Diagnostic.noneNotUsableWith'); + export const noneNotUsableWithAsync = () => getRawString('Diagnostic.noneNotUsableWithAsync'); + export const noneOperator = () => + new ParameterizedString<{ operator: string }>(getRawString('Diagnostic.noneOperator')); + export const noneUnknownMember = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.noneUnknownMember')); + export const nonLocalInModule = () => getRawString('Diagnostic.nonLocalInModule'); + export const nonLocalNoBinding = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.nonLocalNoBinding')); + export const nonLocalReassignment = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.nonLocalReassignment')); + export const nonLocalRedefinition = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.nonLocalRedefinition')); + export const noOverload = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.noOverload')); + export const nonlocalTypeParam = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.nonlocalTypeParam')); + export const noReturnContainsReturn = () => getRawString('Diagnostic.noReturnContainsReturn'); + export const noReturnContainsYield = () => getRawString('Diagnostic.noReturnContainsYield'); + export const noReturnReturnsNone = () => getRawString('Diagnostic.noReturnReturnsNone'); + export const notRequiredArgCount = () => getRawString('Diagnostic.notRequiredArgCount'); + export const notRequiredNotInTypedDict = () => getRawString('Diagnostic.notRequiredNotInTypedDict'); + export const objectNotCallable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.objectNotCallable')); + export const obscuredClassDeclaration = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.obscuredClassDeclaration')); + export const obscuredFunctionDeclaration = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.obscuredFunctionDeclaration')); + export const obscuredMethodDeclaration = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.obscuredMethodDeclaration')); + export const obscuredParameterDeclaration = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.obscuredParameterDeclaration')); + export const obscuredTypeAliasDeclaration = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.obscuredTypeAliasDeclaration')); + export const obscuredVariableDeclaration = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.obscuredVariableDeclaration')); + export const operatorLessOrGreaterDeprecated = () => getRawString('Diagnostic.operatorLessOrGreaterDeprecated'); + export const optionalExtraArgs = () => getRawString('Diagnostic.optionalExtraArgs'); + export const orPatternIrrefutable = () => getRawString('Diagnostic.orPatternIrrefutable'); + export const orPatternMissingName = () => getRawString('Diagnostic.orPatternMissingName'); + export const overlappingKeywordArgs = () => + new ParameterizedString<{ names: string }>(getRawString('Diagnostic.overlappingKeywordArgs')); + export const overlappingOverload = () => + new ParameterizedString<{ name: string; obscured: number; obscuredBy: number }>( + getRawString('Diagnostic.overlappingOverload') + ); + export const overloadAbstractMismatch = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.overloadAbstractMismatch')); + export const overloadAbstractImplMismatch = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.overloadAbstractImplMismatch')); + export const overloadClassMethodInconsistent = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.overloadClassMethodInconsistent')); + export const overloadFinalImpl = () => getRawString('Diagnostic.overloadFinalImpl'); + export const overloadFinalNoImpl = () => getRawString('Diagnostic.overloadFinalNoImpl'); + export const overloadImplementationMismatch = () => + new ParameterizedString<{ name: string; index: number }>( + getRawString('Diagnostic.overloadImplementationMismatch') + ); + export const overloadOverrideImpl = () => getRawString('Diagnostic.overloadOverrideImpl'); + export const overloadOverrideNoImpl = () => getRawString('Diagnostic.overloadOverrideNoImpl'); + export const overloadReturnTypeMismatch = () => + new ParameterizedString<{ name: string; newIndex: number; prevIndex: number }>( + getRawString('Diagnostic.overloadReturnTypeMismatch') + ); + export const overloadStaticMethodInconsistent = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.overloadStaticMethodInconsistent')); + export const overloadWithoutImplementation = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.overloadWithoutImplementation')); + export const overriddenMethodNotFound = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.overriddenMethodNotFound')); + export const overrideDecoratorMissing = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('Diagnostic.overrideDecoratorMissing') + ); + export const paramAfterKwargsParam = () => getRawString('Diagnostic.paramAfterKwargsParam'); + export const paramAlreadyAssigned = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramAlreadyAssigned')); + export const paramAnnotationMissing = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramAnnotationMissing')); + export const paramNameMissing = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramNameMissing')); + export const paramSpecArgsKwargsDuplicate = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.paramSpecArgsKwargsDuplicate')); + export const paramSpecArgsKwargsUsage = () => getRawString('Diagnostic.paramSpecArgsKwargsUsage'); + export const paramSpecArgsMissing = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.paramSpecArgsMissing')); + export const paramSpecArgsUsage = () => getRawString('Diagnostic.paramSpecArgsUsage'); + export const paramSpecAssignedName = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramSpecAssignedName')); + export const paramSpecContext = () => getRawString('Diagnostic.paramSpecContext'); + export const paramSpecDefaultNotTuple = () => getRawString('Diagnostic.paramSpecDefaultNotTuple'); + export const paramSpecFirstArg = () => getRawString('Diagnostic.paramSpecFirstArg'); + export const paramSpecKwargsUsage = () => getRawString('Diagnostic.paramSpecKwargsUsage'); + export const paramSpecNotUsedByOuterScope = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramSpecNotUsedByOuterScope')); + export const paramSpecUnknownArg = () => getRawString('Diagnostic.paramSpecUnknownArg'); + export const paramSpecUnknownMember = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramSpecUnknownMember')); + export const paramSpecUnknownParam = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramSpecUnknownParam')); + export const paramTypeCovariant = () => getRawString('Diagnostic.paramTypeCovariant'); + export const paramTypeUnknown = () => + new ParameterizedString<{ paramName: string }>(getRawString('Diagnostic.paramTypeUnknown')); + export const paramAssignmentMismatch = () => + new ParameterizedString<{ sourceType: string; paramType: string }>( + getRawString('Diagnostic.paramAssignmentMismatch') + ); + export const paramTypePartiallyUnknown = () => + new ParameterizedString<{ paramName: string }>(getRawString('Diagnostic.paramTypePartiallyUnknown')); + export const parenthesizedContextManagerIllegal = () => + getRawString('Diagnostic.parenthesizedContextManagerIllegal'); + export const patternNeverMatches = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.patternNeverMatches')); + export const positionArgAfterNamedArg = () => getRawString('Diagnostic.positionArgAfterNamedArg'); + export const positionArgAfterUnpackedDictArg = () => getRawString('Diagnostic.positionArgAfterUnpackedDictArg'); + export const privateImportFromPyTypedModule = () => + new ParameterizedString<{ name: string; module: string }>( + getRawString('Diagnostic.privateImportFromPyTypedModule') + ); + export const positionOnlyAfterArgs = () => getRawString('Diagnostic.positionOnlyAfterArgs'); + export const positionOnlyAfterKeywordOnly = () => getRawString('Diagnostic.positionOnlyAfterKeywordOnly'); + export const positionOnlyAfterNon = () => getRawString('Diagnostic.positionOnlyAfterNon'); + export const positionOnlyIncompatible = () => getRawString('Diagnostic.positionOnlyIncompatible'); + export const positionOnlyFirstParam = () => getRawString('Diagnostic.positionOnlyFirstParam'); + export const privateUsedOutsideOfClass = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.privateUsedOutsideOfClass')); + export const privateUsedOutsideOfModule = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.privateUsedOutsideOfModule')); + export const propertyOverridden = () => + new ParameterizedString<{ name: string; className: string }>(getRawString('Diagnostic.propertyOverridden')); + export const propertyStaticMethod = () => getRawString('Diagnostic.propertyStaticMethod'); + export const protectedUsedOutsideOfClass = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.protectedUsedOutsideOfClass')); + export const protocolBaseClass = () => + new ParameterizedString<{ classType: string; baseType: string }>( + getRawString('Diagnostic.protocolBaseClass') + ); + export const protocolBaseClassWithTypeArgs = () => getRawString('Diagnostic.protocolBaseClassWithTypeArgs'); + export const protocolIllegal = () => getRawString('Diagnostic.protocolIllegal'); + export const protocolNotAllowed = () => getRawString('Diagnostic.protocolNotAllowed'); + export const protocolTypeArgMustBeTypeParam = () => getRawString('Diagnostic.protocolTypeArgMustBeTypeParam'); + export const protocolUnsafeOverlap = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.protocolUnsafeOverlap')); + export const protocolVarianceContravariant = () => + new ParameterizedString<{ variable: string; class: string }>( + getRawString('Diagnostic.protocolVarianceContravariant') + ); + export const protocolVarianceCovariant = () => + new ParameterizedString<{ variable: string; class: string }>( + getRawString('Diagnostic.protocolVarianceCovariant') + ); + export const protocolVarianceInvariant = () => + new ParameterizedString<{ variable: string; class: string }>( + getRawString('Diagnostic.protocolVarianceInvariant') + ); + export const pyrightCommentInvalidDiagnosticBoolValue = () => + getRawString('Diagnostic.pyrightCommentInvalidDiagnosticBoolValue'); + export const pyrightCommentInvalidDiagnosticSeverityValue = () => + getRawString('Diagnostic.pyrightCommentInvalidDiagnosticSeverityValue'); + export const pyrightCommentMissingDirective = () => getRawString('Diagnostic.pyrightCommentMissingDirective'); + export const pyrightCommentNotOnOwnLine = () => getRawString('Diagnostic.pyrightCommentNotOnOwnLine'); + export const pyrightCommentUnknownDirective = () => + new ParameterizedString<{ directive: string }>(getRawString('Diagnostic.pyrightCommentUnknownDirective')); + export const pyrightCommentUnknownDiagnosticRule = () => + new ParameterizedString<{ rule: string }>(getRawString('Diagnostic.pyrightCommentUnknownDiagnosticRule')); + export const readOnlyArgCount = () => getRawString('Diagnostic.readOnlyArgCount'); + export const readOnlyNotInTypedDict = () => getRawString('Diagnostic.readOnlyNotInTypedDict'); + export const recursiveDefinition = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.recursiveDefinition')); + export const relativeImportNotAllowed = () => getRawString('Diagnostic.relativeImportNotAllowed'); + export const requiredArgCount = () => getRawString('Diagnostic.requiredArgCount'); + export const requiredNotInTypedDict = () => getRawString('Diagnostic.requiredNotInTypedDict'); + export const returnInAsyncGenerator = () => getRawString('Diagnostic.returnInAsyncGenerator'); + export const returnMissing = () => + new ParameterizedString<{ returnType: string }>(getRawString('Diagnostic.returnMissing')); + export const returnInExceptionGroup = () => getRawString('Diagnostic.returnInExceptionGroup'); + export const returnOutsideFunction = () => getRawString('Diagnostic.returnOutsideFunction'); + export const returnTypeContravariant = () => getRawString('Diagnostic.returnTypeContravariant'); + export const returnTypeMismatch = () => + new ParameterizedString<{ exprType: string; returnType: string }>( + getRawString('Diagnostic.returnTypeMismatch') + ); + export const returnTypeUnknown = () => getRawString('Diagnostic.returnTypeUnknown'); + export const returnTypePartiallyUnknown = () => + new ParameterizedString<{ returnType: string }>(getRawString('Diagnostic.returnTypePartiallyUnknown')); + export const revealLocalsArgs = () => getRawString('Diagnostic.revealLocalsArgs'); + export const revealLocalsNone = () => getRawString('Diagnostic.revealLocalsNone'); + export const revealTypeArgs = () => getRawString('Diagnostic.revealTypeArgs'); + export const revealTypeExpectedTextArg = () => getRawString('Diagnostic.revealTypeExpectedTextArg'); + export const revealTypeExpectedTextMismatch = () => + new ParameterizedString<{ expected: string; received: string }>( + getRawString('Diagnostic.revealTypeExpectedTextMismatch') + ); + export const revealTypeExpectedTypeMismatch = () => + new ParameterizedString<{ expected: string; received: string }>( + getRawString('Diagnostic.revealTypeExpectedTypeMismatch') + ); + export const selfTypeContext = () => getRawString('Diagnostic.selfTypeContext'); + export const selfTypeMetaclass = () => getRawString('Diagnostic.selfTypeMetaclass'); + export const selfTypeWithTypedSelfOrCls = () => getRawString('Diagnostic.selfTypeWithTypedSelfOrCls'); + export const sentinelBadName = () => getRawString('Diagnostic.sentinelBadName'); + export const sentinelNameMismatch = () => getRawString('Diagnostic.sentinelNameMismatch'); + export const sentinelParamCount = () => getRawString('Diagnostic.sentinelParamCount'); + export const setterGetterTypeMismatch = () => getRawString('Diagnostic.setterGetterTypeMismatch'); + export const starPatternInAsPattern = () => getRawString('Diagnostic.starPatternInAsPattern'); + export const starPatternInOrPattern = () => getRawString('Diagnostic.starPatternInOrPattern'); + export const singleOverload = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.singleOverload')); + export const slotsAttributeError = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.slotsAttributeError')); + export const slotsClassVarConflict = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.slotsClassVarConflict')); + export const starStarWildcardNotAllowed = () => getRawString('Diagnostic.starStarWildcardNotAllowed'); + export const staticClsSelfParam = () => getRawString('Diagnostic.staticClsSelfParam'); + export const stringNonAsciiBytes = () => getRawString('Diagnostic.stringNonAsciiBytes'); + export const stringNotSubscriptable = () => getRawString('Diagnostic.stringNotSubscriptable'); + export const stringUnsupportedEscape = () => getRawString('Diagnostic.stringUnsupportedEscape'); + export const stringUnterminated = () => getRawString('Diagnostic.stringUnterminated'); + export const stubFileMissing = () => + new ParameterizedString<{ importName: string }>(getRawString('Diagnostic.stubFileMissing')); + export const stubUsesGetAttr = () => getRawString('Diagnostic.stubUsesGetAttr'); + export const sublistParamsIncompatible = () => getRawString('Diagnostic.sublistParamsIncompatible'); + export const superCallArgCount = () => getRawString('Diagnostic.superCallArgCount'); + export const superCallFirstArg = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.superCallFirstArg')); + export const superCallSecondArg = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.superCallSecondArg')); + export const superCallZeroArgForm = () => getRawString('Diagnostic.superCallZeroArgForm'); + export const superCallZeroArgFormStaticMethod = () => + getRawString('Diagnostic.superCallZeroArgFormStaticMethod'); + export const symbolIsUnbound = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.symbolIsUnbound')); + export const symbolIsUndefined = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.symbolIsUndefined')); + export const symbolIsPossiblyUnbound = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.symbolIsPossiblyUnbound')); + export const symbolOverridden = () => + new ParameterizedString<{ name: string; className: string }>(getRawString('Diagnostic.symbolOverridden')); + export const templateStringBytes = () => getRawString('Diagnostic.templateStringBytes'); + export const templateStringIllegal = () => getRawString('Diagnostic.templateStringIllegal'); + export const templateStringUnicode = () => getRawString('Diagnostic.templateStringUnicode'); + export const ternaryNotAllowed = () => getRawString('Diagnostic.ternaryNotAllowed'); + export const totalOrderingMissingMethod = () => getRawString('Diagnostic.totalOrderingMissingMethod'); + export const trailingCommaInFromImport = () => getRawString('Diagnostic.trailingCommaInFromImport'); + export const tryWithoutExcept = () => getRawString('Diagnostic.tryWithoutExcept'); + export const tupleAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.tupleAssignmentMismatch')); + export const tupleInAnnotation = () => getRawString('Diagnostic.tupleInAnnotation'); + export const tupleIndexOutOfRange = () => + new ParameterizedString<{ type: string; index: number }>(getRawString('Diagnostic.tupleIndexOutOfRange')); + export const typeAliasIllegalExpressionForm = () => getRawString('Diagnostic.typeAliasIllegalExpressionForm'); + export const typeAliasIsRecursiveDirect = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeAliasIsRecursiveDirect')); + export const typeAliasNotInModuleOrClass = () => getRawString('Diagnostic.typeAliasNotInModuleOrClass'); + export const typeAliasRedeclared = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeAliasRedeclared')); + export const typeAliasStatementIllegal = () => getRawString('Diagnostic.typeAliasStatementIllegal'); + export const typeAliasStatementBadScope = () => getRawString('Diagnostic.typeAliasStatementBadScope'); + export const typeAliasTypeBadScope = () => getRawString('Diagnostic.typeAliasTypeBadScope'); + export const typeAliasTypeBaseClass = () => getRawString('Diagnostic.typeAliasTypeBaseClass'); + export const typeAliasTypeMustBeAssigned = () => getRawString('Diagnostic.typeAliasTypeMustBeAssigned'); + export const typeAliasTypeNameArg = () => getRawString('Diagnostic.typeAliasTypeNameArg'); + export const typeAliasTypeNameMismatch = () => getRawString('Diagnostic.typeAliasTypeNameMismatch'); + export const typeAliasTypeParamInvalid = () => getRawString('Diagnostic.typeAliasTypeParamInvalid'); + export const typeAnnotationCall = () => getRawString('Diagnostic.typeAnnotationCall'); + export const typeAnnotationVariable = () => getRawString('Diagnostic.typeAnnotationVariable'); + export const typeAnnotationWithCallable = () => getRawString('Diagnostic.typeAnnotationWithCallable'); + export const typeArgListExpected = () => getRawString('Diagnostic.typeArgListExpected'); + export const typeArgListNotAllowed = () => getRawString('Diagnostic.typeArgListNotAllowed'); + export const typeArgsExpectingNone = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeArgsExpectingNone')); + export const typeArgsMismatchOne = () => + new ParameterizedString<{ received: number }>(getRawString('Diagnostic.typeArgsMismatchOne')); + export const typeArgsMissingForAlias = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeArgsMissingForAlias')); + export const typeArgsMissingForClass = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeArgsMissingForClass')); + export const typeArgsTooFew = () => + new ParameterizedString<{ name: string; expected: number; received: number }>( + getRawString('Diagnostic.typeArgsTooFew') + ); + export const typeArgsTooMany = () => + new ParameterizedString<{ name: string; expected: number; received: number }>( + getRawString('Diagnostic.typeArgsTooMany') + ); + export const typeAssignmentMismatch = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('Diagnostic.typeAssignmentMismatch') + ); + export const typeAssignmentMismatchWildcard = () => + new ParameterizedString<{ name: string; sourceType: string; destType: string }>( + getRawString('Diagnostic.typeAssignmentMismatchWildcard') + ); + export const typeCallNotAllowed = () => getRawString('Diagnostic.typeCallNotAllowed'); + export const typeCheckOnly = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeCheckOnly')); + export const typeCommentDeprecated = () => getRawString('Diagnostic.typeCommentDeprecated'); + export const typedDictAccess = () => getRawString('Diagnostic.typedDictAccess'); + export const typedDictAssignedName = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictAssignedName')); + export const typedDictBadVar = () => getRawString('Diagnostic.typedDictBadVar'); + export const typedDictBaseClass = () => getRawString('Diagnostic.typedDictBaseClass'); + export const typedDictBoolParam = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictBoolParam')); + export const typedDictClosedExtras = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('Diagnostic.typedDictClosedExtras')); + export const typedDictClosedNoExtras = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictClosedNoExtras')); + export const typedDictClosedFalseNonOpenBase = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictClosedFalseNonOpenBase')); + export const typedDictDelete = () => getRawString('Diagnostic.typedDictDelete'); + export const typedDictEmptyName = () => getRawString('Diagnostic.typedDictEmptyName'); + export const typedDictEntryName = () => getRawString('Diagnostic.typedDictEntryName'); + export const typedDictEntryUnique = () => getRawString('Diagnostic.typedDictEntryUnique'); + export const typedDictExtraArgs = () => getRawString('Diagnostic.typedDictExtraArgs'); + export const typedDictExtraItemsClosed = () => getRawString('Diagnostic.typedDictExtraItemsClosed'); + export const typedDictFieldNotRequiredRedefinition = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictFieldNotRequiredRedefinition')); + export const typedDictFieldReadOnlyRedefinition = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictFieldReadOnlyRedefinition')); + export const typedDictFieldRequiredRedefinition = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictFieldRequiredRedefinition')); + export const typedDictFirstArg = () => getRawString('Diagnostic.typedDictFirstArg'); + export const typedDictInClassPattern = () => getRawString('Diagnostic.typedDictInClassPattern'); + export const typedDictInitsubclassParameter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typedDictInitsubclassParameter')); + export const typedDictNotAllowed = () => getRawString('Diagnostic.typedDictNotAllowed'); + export const typedDictSecondArgDict = () => getRawString('Diagnostic.typedDictSecondArgDict'); + export const typedDictSecondArgDictEntry = () => getRawString('Diagnostic.typedDictSecondArgDictEntry'); + export const typedDictSet = () => getRawString('Diagnostic.typedDictSet'); + export const typeExpectedClass = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.typeExpectedClass')); + export const typeFormArgs = () => getRawString('Diagnostic.typeFormArgs'); + export const typeGuardArgCount = () => getRawString('Diagnostic.typeGuardArgCount'); + export const typeGuardParamCount = () => getRawString('Diagnostic.typeGuardParamCount'); + export const typeIsReturnType = () => + new ParameterizedString<{ type: string; returnType: string }>(getRawString('Diagnostic.typeIsReturnType')); + export const typeNotAwaitable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.typeNotAwaitable')); + export const typeNotIntantiable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.typeNotIntantiable')); + export const typeNotIterable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.typeNotIterable')); + export const typeNotSpecializable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.typeNotSpecializable')); + export const typeNotSubscriptable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.typeNotSubscriptable')); + export const typeNotUsableWith = () => + new ParameterizedString<{ type: string; method: string }>(getRawString('Diagnostic.typeNotUsableWith')); + export const typeNotUsableWithAsync = () => + new ParameterizedString<{ type: string; method: string }>( + getRawString('Diagnostic.typeNotUsableWithAsync') + ); + export const typeNotSupportBinaryOperator = () => + new ParameterizedString<{ leftType: string; rightType: string; operator: string }>( + getRawString('Diagnostic.typeNotSupportBinaryOperator') + ); + export const typeNotSupportBinaryOperatorBidirectional = () => + new ParameterizedString<{ leftType: string; rightType: string; expectedType: string; operator: string }>( + getRawString('Diagnostic.typeNotSupportBinaryOperatorBidirectional') + ); + export const typeNotSupportUnaryOperator = () => + new ParameterizedString<{ type: string; operator: string }>( + getRawString('Diagnostic.typeNotSupportUnaryOperator') + ); + export const typeNotSupportUnaryOperatorBidirectional = () => + new ParameterizedString<{ type: string; expectedType: string; operator: string }>( + getRawString('Diagnostic.typeNotSupportUnaryOperatorBidirectional') + ); + export const typeParameterBoundNotAllowed = () => getRawString('Diagnostic.typeParameterBoundNotAllowed'); + export const typeParameterConstraintTuple = () => getRawString('Diagnostic.typeParameterConstraintTuple'); + export const typeParameterExistingTypeParameter = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeParameterExistingTypeParameter')); + export const typeParametersMissing = () => getRawString('Diagnostic.typeParametersMissing'); + export const typeParameterNotDeclared = () => + new ParameterizedString<{ name: string; container: string }>( + getRawString('Diagnostic.typeParameterNotDeclared') + ); + export const typePartiallyUnknown = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typePartiallyUnknown')); + export const typeUnknown = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeUnknown')); + export const typeVarAssignedName = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarAssignedName')); + export const typeVarAssignmentMismatch = () => + new ParameterizedString<{ type: string; name: string }>( + getRawString('Diagnostic.typeVarAssignmentMismatch') + ); + export const typeVarBoundAndConstrained = () => getRawString('Diagnostic.typeVarBoundAndConstrained'); + export const typeVarBoundGeneric = () => getRawString('Diagnostic.typeVarBoundGeneric'); + export const typeVarConstraintGeneric = () => getRawString('Diagnostic.typeVarConstraintGeneric'); + export const typeVarDefaultBoundMismatch = () => getRawString('Diagnostic.typeVarDefaultBoundMismatch'); + export const typeVarDefaultConstraintMismatch = () => + getRawString('Diagnostic.typeVarDefaultConstraintMismatch'); + export const typeVarDefaultIllegal = () => getRawString('Diagnostic.typeVarDefaultIllegal'); + export const typeVarDefaultInvalidTypeVar = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarDefaultInvalidTypeVar')); + export const typeVarFirstArg = () => getRawString('Diagnostic.typeVarFirstArg'); + export const typeVarInvalidForMemberVariable = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarInvalidForMemberVariable')); + export const typeVarNoMember = () => + new ParameterizedString<{ type: string; name: string }>(getRawString('Diagnostic.typeVarNoMember')); + export const typeVarNotSubscriptable = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.typeVarNotSubscriptable')); + export const typeVarNotUsedByOuterScope = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarNotUsedByOuterScope')); + export const typeVarPossiblyUnsolvable = () => + new ParameterizedString<{ name: string; param: string }>( + getRawString('Diagnostic.typeVarPossiblyUnsolvable') + ); + export const typeVarSingleConstraint = () => getRawString('Diagnostic.typeVarSingleConstraint'); + export const typeVarsNotInGenericOrProtocol = () => getRawString('Diagnostic.typeVarsNotInGenericOrProtocol'); + export const typeVarTupleContext = () => getRawString('Diagnostic.typeVarTupleContext'); + export const typeVarTupleDefaultNotUnpacked = () => getRawString('Diagnostic.typeVarTupleDefaultNotUnpacked'); + export const typeVarTupleMustBeUnpacked = () => getRawString('Diagnostic.typeVarTupleMustBeUnpacked'); + export const typeVarTupleConstraints = () => getRawString('Diagnostic.typeVarTupleConstraints'); + export const typeVarTupleUnknownParam = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarTupleUnknownParam')); + export const typeVarUnknownParam = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarUnknownParam')); + export const typeVarUsedByOuterScope = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarUsedByOuterScope')); + export const typeVarUsedOnlyOnce = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarUsedOnlyOnce')); + export const typeVarVariance = () => getRawString('Diagnostic.typeVarVariance'); + export const typeVarWithDefaultFollowsVariadic = () => + new ParameterizedString<{ variadicName: string; typeVarName: string }>( + getRawString('Diagnostic.typeVarWithDefaultFollowsVariadic') + ); + export const typeVarWithoutDefault = () => + new ParameterizedString<{ name: string; other: string }>(getRawString('Diagnostic.typeVarWithoutDefault')); + export const unaccessedClass = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.unaccessedClass')); + export const unaccessedFunction = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.unaccessedFunction')); + export const unaccessedImport = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.unaccessedImport')); + export const unaccessedSymbol = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.unaccessedSymbol')); + export const unaccessedVariable = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.unaccessedVariable')); + export const unannotatedFunctionSkipped = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.unannotatedFunctionSkipped')); + export const unaryOperationNotAllowed = () => getRawString('Diagnostic.unaryOperationNotAllowed'); + export const unexpectedAsyncToken = () => getRawString('Diagnostic.unexpectedAsyncToken'); + export const unexpectedEof = () => getRawString('Diagnostic.unexpectedEof'); + export const unexpectedExprToken = () => getRawString('Diagnostic.unexpectedExprToken'); + export const unexpectedIndent = () => getRawString('Diagnostic.unexpectedIndent'); + export const unexpectedUnindent = () => getRawString('Diagnostic.unexpectedUnindent'); + export const unhashableDictKey = () => getRawString('Diagnostic.unhashableDictKey'); + export const unhashableSetEntry = () => getRawString('Diagnostic.unhashableSetEntry'); + export const unionForwardReferenceNotAllowed = () => getRawString('Diagnostic.unionForwardReferenceNotAllowed'); + export const unionSyntaxIllegal = () => getRawString('Diagnostic.unionSyntaxIllegal'); + export const unionTypeArgCount = () => getRawString('Diagnostic.unionTypeArgCount'); + export const uninitializedAbstractVariables = () => + new ParameterizedString<{ classType: string }>(getRawString('Diagnostic.uninitializedAbstractVariables')); + export const uninitializedInstanceVariable = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.uninitializedInstanceVariable')); + export const unionUnpackedTuple = () => getRawString('Diagnostic.unionUnpackedTuple'); + export const unionUnpackedTypeVarTuple = () => getRawString('Diagnostic.unionUnpackedTypeVarTuple'); + export const unnecessaryCast = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.unnecessaryCast')); + export const unnecessaryIsInstanceAlways = () => + new ParameterizedString<{ testType: string; classType: string }>( + getRawString('Diagnostic.unnecessaryIsInstanceAlways') + ); + export const unnecessaryIsSubclassAlways = () => + new ParameterizedString<{ testType: string; classType: string }>( + getRawString('Diagnostic.unnecessaryIsSubclassAlways') + ); + export const unnecessaryIsInstanceNever = () => + new ParameterizedString<{ testType: string; classType: string }>( + getRawString('Diagnostic.unnecessaryIsInstanceNever') + ); + export const unnecessaryIsSubclassNever = () => + new ParameterizedString<{ testType: string; classType: string }>( + getRawString('Diagnostic.unnecessaryIsSubclassNever') + ); + export const unnecessaryPyrightIgnore = () => getRawString('Diagnostic.unnecessaryPyrightIgnore'); + export const unnecessaryPyrightIgnoreRule = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.unnecessaryPyrightIgnoreRule')); + export const unnecessaryTypeIgnore = () => getRawString('Diagnostic.unnecessaryTypeIgnore'); + export const unpackArgCount = () => getRawString('Diagnostic.unpackArgCount'); + export const unpackedArgInTypeArgument = () => getRawString('Diagnostic.unpackedArgInTypeArgument'); + export const unpackedArgWithVariadicParam = () => getRawString('Diagnostic.unpackedArgWithVariadicParam'); + export const unpackedDictArgumentNotMapping = () => getRawString('Diagnostic.unpackedDictArgumentNotMapping'); + export const unpackedDictSubscriptIllegal = () => getRawString('Diagnostic.unpackedDictSubscriptIllegal'); + export const unpackedSubscriptIllegal = () => getRawString('Diagnostic.unpackedSubscriptIllegal'); + export const unpackedTypedDictArgument = () => getRawString('Diagnostic.unpackedTypedDictArgument'); + export const unpackedTypeVarTupleExpected = () => + new ParameterizedString<{ name1: string; name2: string }>( + getRawString('Diagnostic.unpackedTypeVarTupleExpected') + ); + export const unpackExpectedTypedDict = () => getRawString('Diagnostic.unpackExpectedTypedDict'); + export const unpackExpectedTypeVarTuple = () => getRawString('Diagnostic.unpackExpectedTypeVarTuple'); + export const unpackIllegalInComprehension = () => getRawString('Diagnostic.unpackIllegalInComprehension'); + export const unpackInAnnotation = () => getRawString('Diagnostic.unpackInAnnotation'); + export const unpackInDict = () => getRawString('Diagnostic.unpackInDict'); + export const unpackInSet = () => getRawString('Diagnostic.unpackInSet'); + export const unpackNotAllowed = () => getRawString('Diagnostic.unpackNotAllowed'); + export const unpackOperatorNotAllowed = () => getRawString('Diagnostic.unpackOperatorNotAllowed'); + export const unpackTuplesIllegal = () => getRawString('Diagnostic.unpackTuplesIllegal'); + export const unreachableCodeCondition = () => getRawString('Diagnostic.unreachableCodeCondition'); + export const unreachableCodeStructure = () => getRawString('Diagnostic.unreachableCodeStructure'); + export const unreachableCodeType = () => getRawString('Diagnostic.unreachableCodeType'); + export const unreachableExcept = () => getRawString('Diagnostic.unreachableExcept'); + export const unsupportedDunderAllOperation = () => getRawString('Diagnostic.unsupportedDunderAllOperation'); + export const unusedCallResult = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.unusedCallResult')); + export const unusedCoroutine = () => getRawString('Diagnostic.unusedCoroutine'); + export const unusedExpression = () => getRawString('Diagnostic.unusedExpression'); + export const varAnnotationIllegal = () => getRawString('Diagnostic.varAnnotationIllegal'); + export const variableFinalOverride = () => + new ParameterizedString<{ className: string; name: string }>( + getRawString('Diagnostic.variableFinalOverride') + ); + export const variadicTypeArgsTooMany = () => getRawString('Diagnostic.variadicTypeArgsTooMany'); + export const variadicTypeParamTooManyAlias = () => + new ParameterizedString<{ names: string }>(getRawString('Diagnostic.variadicTypeParamTooManyAlias')); + export const variadicTypeParamTooManyClass = () => + new ParameterizedString<{ names: string }>(getRawString('Diagnostic.variadicTypeParamTooManyClass')); + export const walrusIllegal = () => getRawString('Diagnostic.walrusIllegal'); + export const walrusNotAllowed = () => getRawString('Diagnostic.walrusNotAllowed'); + export const wildcardInFunction = () => getRawString('Diagnostic.wildcardInFunction'); + export const wildcardPatternTypeUnknown = () => getRawString('Diagnostic.wildcardPatternTypeUnknown'); + export const wildcardPatternTypePartiallyUnknown = () => + getRawString('Diagnostic.wildcardPatternTypePartiallyUnknown'); + export const wildcardLibraryImport = () => getRawString('Diagnostic.wildcardLibraryImport'); + export const yieldFromIllegal = () => getRawString('Diagnostic.yieldFromIllegal'); + export const yieldFromOutsideAsync = () => getRawString('Diagnostic.yieldFromOutsideAsync'); + export const yieldOutsideFunction = () => getRawString('Diagnostic.yieldOutsideFunction'); + export const yieldWithinComprehension = () => getRawString('Diagnostic.yieldWithinComprehension'); + export const zeroCaseStatementsFound = () => getRawString('Diagnostic.zeroCaseStatementsFound'); + export const zeroLengthTupleNotAllowed = () => getRawString('Diagnostic.zeroLengthTupleNotAllowed'); + } + + export namespace DiagnosticAddendum { + export const annotatedNotAllowed = () => getRawString('DiagnosticAddendum.annotatedNotAllowed'); + export const argParam = () => + new ParameterizedString<{ paramName: string }>(getRawString('DiagnosticAddendum.argParam')); + export const argParamFunction = () => + new ParameterizedString<{ paramName: string; functionName: string }>( + getRawString('DiagnosticAddendum.argParamFunction') + ); + export const argsParamMissing = () => + new ParameterizedString<{ paramName: string }>(getRawString('DiagnosticAddendum.argsParamMissing')); + export const argsPositionOnly = () => + new ParameterizedString<{ expected: number; received: number }>( + getRawString('DiagnosticAddendum.argsPositionOnly') + ); + export const argumentType = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.argumentType')); + export const argumentTypes = () => + new ParameterizedString<{ types: string }>(getRawString('DiagnosticAddendum.argumentTypes')); + export const assignToNone = () => getRawString('DiagnosticAddendum.assignToNone'); + export const asyncHelp = () => getRawString('DiagnosticAddendum.asyncHelp'); + export const baseClassIncompatible = () => + new ParameterizedString<{ baseClass: string; type: string }>( + getRawString('DiagnosticAddendum.baseClassIncompatible') + ); + export const baseClassIncompatibleSubclass = () => + new ParameterizedString<{ baseClass: string; subclass: string; type: string }>( + getRawString('DiagnosticAddendum.baseClassIncompatibleSubclass') + ); + export const baseClassOverriddenType = () => + new ParameterizedString<{ baseClass: string; type: string }>( + getRawString('DiagnosticAddendum.baseClassOverriddenType') + ); + export const baseClassOverridesType = () => + new ParameterizedString<{ baseClass: string; type: string }>( + getRawString('DiagnosticAddendum.baseClassOverridesType') + ); + export const bytesTypePromotions = () => getRawString('DiagnosticAddendum.bytesTypePromotions'); + export const conditionalRequiresBool = () => + new ParameterizedString<{ operandType: string; boolReturnType: string }>( + getRawString('DiagnosticAddendum.conditionalRequiresBool') + ); + export const dataClassFrozen = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.dataClassFrozen')); + export const dataClassFieldLocation = () => getRawString('DiagnosticAddendum.dataClassFieldLocation'); + export const dataProtocolUnsupported = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.dataProtocolUnsupported')); + export const descriptorAccessBindingFailed = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('DiagnosticAddendum.descriptorAccessBindingFailed') + ); + export const descriptorAccessCallFailed = () => + new ParameterizedString<{ name: string; className: string }>( + getRawString('DiagnosticAddendum.descriptorAccessCallFailed') + ); + export const finalMethod = () => getRawString('DiagnosticAddendum.finalMethod'); + export const functionParamDefaultMissing = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.functionParamDefaultMissing')); + export const functionParamName = () => + new ParameterizedString<{ destName: string; srcName: string }>( + getRawString('DiagnosticAddendum.functionParamName') + ); + export const functionParamPositionOnly = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.functionParamPositionOnly')); + export const functionReturnTypeMismatch = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.functionReturnTypeMismatch') + ); + export const functionTooFewParams = () => + new ParameterizedString<{ expected: number; received: number }>( + getRawString('DiagnosticAddendum.functionTooFewParams') + ); + export const genericClassNotAllowed = () => getRawString('DiagnosticAddendum.genericClassNotAllowed'); + export const incompatibleGetter = () => getRawString('DiagnosticAddendum.incompatibleGetter'); + export const incompatibleSetter = () => getRawString('DiagnosticAddendum.incompatibleSetter'); + export const incompatibleDeleter = () => getRawString('DiagnosticAddendum.incompatibleDeleter'); + export const initMethodLocation = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.initMethodLocation')); + export const initMethodSignature = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.initMethodSignature')); + export const initSubclassLocation = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.initSubclassLocation')); + export const invariantSuggestionDict = () => getRawString('DiagnosticAddendum.invariantSuggestionDict'); + export const invariantSuggestionList = () => getRawString('DiagnosticAddendum.invariantSuggestionList'); + export const invariantSuggestionSet = () => getRawString('DiagnosticAddendum.invariantSuggestionSet'); + export const isinstanceClassNotSupported = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.isinstanceClassNotSupported')); + export const functionTooManyParams = () => + new ParameterizedString<{ expected: number; received: number }>( + getRawString('DiagnosticAddendum.functionTooManyParams') + ); + export const keyNotRequired = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('DiagnosticAddendum.keyNotRequired')); + export const keyReadOnly = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('DiagnosticAddendum.keyReadOnly')); + export const keyRequiredDeleted = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.keyRequiredDeleted')); + export const keyUndefined = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('DiagnosticAddendum.keyUndefined')); + export const kwargsParamMissing = () => + new ParameterizedString<{ paramName: string }>(getRawString('DiagnosticAddendum.kwargsParamMissing')); + export const listAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.listAssignmentMismatch')); + export const literalAssignmentMismatch = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.literalAssignmentMismatch') + ); + export const literalNotAllowed = () => getRawString('DiagnosticAddendum.literalNotAllowed'); + export const matchIsNotExhaustiveType = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.matchIsNotExhaustiveType')); + export const matchIsNotExhaustiveHint = () => getRawString('DiagnosticAddendum.matchIsNotExhaustiveHint'); + export const memberAssignment = () => + new ParameterizedString<{ type: string; name: string; classType: string }>( + getRawString('DiagnosticAddendum.memberAssignment') + ); + export const memberIsAbstract = () => + new ParameterizedString<{ type: string; name: string }>( + getRawString('DiagnosticAddendum.memberIsAbstract') + ); + export const memberIsAbstractMore = () => + new ParameterizedString<{ count: number }>(getRawString('DiagnosticAddendum.memberIsAbstractMore')); + export const memberIsClassVarInProtocol = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsClassVarInProtocol')); + export const memberIsInitVar = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsInitVar')); + export const memberIsInvariant = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsInvariant')); + export const memberIsNotClassVarInClass = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsNotClassVarInClass')); + export const memberIsNotClassVarInProtocol = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsNotClassVarInProtocol')); + export const memberIsNotReadOnlyInProtocol = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsNotReadOnlyInProtocol')); + export const memberIsReadOnlyInProtocol = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsReadOnlyInProtocol')); + export const memberIsWritableInProtocol = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsWritableInProtocol')); + export const memberSetClassVar = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberSetClassVar')); + export const memberTypeMismatch = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberTypeMismatch')); + export const memberUnknown = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberUnknown')); + export const metaclassConflict = () => + new ParameterizedString<{ metaclass1: string; metaclass2: string }>( + getRawString('DiagnosticAddendum.metaclassConflict') + ); + export const missingGetter = () => getRawString('DiagnosticAddendum.missingGetter'); + export const missingSetter = () => getRawString('DiagnosticAddendum.missingSetter'); + export const missingDeleter = () => getRawString('DiagnosticAddendum.missingDeleter'); + export const namedParamMissingInDest = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.namedParamMissingInDest')); + export const namedParamMissingInSource = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.namedParamMissingInSource')); + export const namedParamTypeMismatch = () => + new ParameterizedString<{ name: string; sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.namedParamTypeMismatch') + ); + export const namedTupleNotAllowed = () => getRawString('DiagnosticAddendum.namedTupleNotAllowed'); + export const newMethodLocation = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.newMethodLocation')); + export const newMethodSignature = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.newMethodSignature')); + export const noneNotAllowed = () => getRawString('DiagnosticAddendum.noneNotAllowed'); + export const newTypeClassNotAllowed = () => getRawString('DiagnosticAddendum.newTypeClassNotAllowed'); + export const noOverloadAssignable = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.noOverloadAssignable')); + export const orPatternMissingName = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.orPatternMissingName')); + export const overloadIndex = () => + new ParameterizedString<{ index: number }>(getRawString('DiagnosticAddendum.overloadIndex')); + export const overloadSignature = () => getRawString('DiagnosticAddendum.overloadSignature'); + export const overloadNotAssignable = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.overloadNotAssignable')); + export const overriddenMethod = () => getRawString('DiagnosticAddendum.overriddenMethod'); + export const overriddenSymbol = () => getRawString('DiagnosticAddendum.overriddenSymbol'); + export const overrideIsInvariant = () => getRawString('DiagnosticAddendum.overrideIsInvariant'); + export const overrideInvariantMismatch = () => + new ParameterizedString<{ overrideType: string; baseType: string }>( + getRawString('DiagnosticAddendum.overrideInvariantMismatch') + ); + export const overrideNoOverloadMatches = () => getRawString('DiagnosticAddendum.overrideNoOverloadMatches'); + export const overrideNotClassMethod = () => getRawString('DiagnosticAddendum.overrideNotClassMethod'); + export const overrideNotInstanceMethod = () => getRawString('DiagnosticAddendum.overrideNotInstanceMethod'); + export const overrideNotStaticMethod = () => getRawString('DiagnosticAddendum.overrideNotStaticMethod'); + export const overrideOverloadNoMatch = () => getRawString('DiagnosticAddendum.overrideOverloadNoMatch'); + export const overrideOverloadOrder = () => getRawString('DiagnosticAddendum.overrideOverloadOrder'); + export const overrideParamKeywordNoDefault = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.overrideParamKeywordNoDefault')); + export const overrideParamKeywordType = () => + new ParameterizedString<{ name: string; baseType: string; overrideType: string }>( + getRawString('DiagnosticAddendum.overrideParamKeywordType') + ); + export const overrideParamName = () => + new ParameterizedString<{ index: number; baseName: string; overrideName: string }>( + getRawString('DiagnosticAddendum.overrideParamName') + ); + export const overrideParamNameExtra = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.overrideParamNameExtra')); + export const overrideParamNameMissing = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.overrideParamNameMissing')); + export const overrideParamNamePositionOnly = () => + new ParameterizedString<{ index: number; baseName: string }>( + getRawString('DiagnosticAddendum.overrideParamNamePositionOnly') + ); + export const overrideParamNoDefault = () => + new ParameterizedString<{ index: number }>(getRawString('DiagnosticAddendum.overrideParamNoDefault')); + export const overrideParamType = () => + new ParameterizedString<{ index: number; baseType: string; overrideType: string }>( + getRawString('DiagnosticAddendum.overrideParamType') + ); + export const overridePositionalParamCount = () => + new ParameterizedString<{ baseCount: number; overrideCount: number }>( + getRawString('DiagnosticAddendum.overridePositionalParamCount') + ); + export const overrideReturnType = () => + new ParameterizedString<{ baseType: string; overrideType: string }>( + getRawString('DiagnosticAddendum.overrideReturnType') + ); + export const overrideType = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.overrideType')); + export const paramAssignment = () => + new ParameterizedString<{ index: number; sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.paramAssignment') + ); + export const paramSpecMissingInOverride = () => getRawString('DiagnosticAddendum.paramSpecMissingInOverride'); + export const paramType = () => + new ParameterizedString<{ paramType: string }>(getRawString('DiagnosticAddendum.paramType')); + export const privateImportFromPyTypedSource = () => + new ParameterizedString<{ module: string }>( + getRawString('DiagnosticAddendum.privateImportFromPyTypedSource') + ); + export const propertyAccessFromProtocolClass = () => + getRawString('DiagnosticAddendum.propertyAccessFromProtocolClass'); + export const propertyMethodIncompatible = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.propertyMethodIncompatible')); + export const propertyMethodMissing = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.propertyMethodMissing')); + export const propertyMissingDeleter = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.propertyMissingDeleter')); + export const propertyMissingSetter = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.propertyMissingSetter')); + export const protocolIncompatible = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.protocolIncompatible') + ); + export const protocolMemberMissing = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.protocolMemberMissing')); + export const protocolRequiresRuntimeCheckable = () => + getRawString('DiagnosticAddendum.protocolRequiresRuntimeCheckable'); + export const protocolSourceIsNotConcrete = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.protocolSourceIsNotConcrete') + ); + export const protocolUnsafeOverlap = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.protocolUnsafeOverlap')); + export const pyrightCommentIgnoreTip = () => getRawString('DiagnosticAddendum.pyrightCommentIgnoreTip'); + export const readOnlyAttribute = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.readOnlyAttribute')); + export const seeDeclaration = () => getRawString('DiagnosticAddendum.seeDeclaration'); + export const seeClassDeclaration = () => getRawString('DiagnosticAddendum.seeClassDeclaration'); + export const seeFunctionDeclaration = () => getRawString('DiagnosticAddendum.seeFunctionDeclaration'); + export const seeMethodDeclaration = () => getRawString('DiagnosticAddendum.seeMethodDeclaration'); + export const seeParameterDeclaration = () => getRawString('DiagnosticAddendum.seeParameterDeclaration'); + export const seeTypeAliasDeclaration = () => getRawString('DiagnosticAddendum.seeTypeAliasDeclaration'); + export const seeVariableDeclaration = () => getRawString('DiagnosticAddendum.seeVariableDeclaration'); + export const tupleEntryTypeMismatch = () => + new ParameterizedString<{ entry: number }>(getRawString('DiagnosticAddendum.tupleEntryTypeMismatch')); + export const tupleAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.tupleAssignmentMismatch')); + export const tupleSizeIndeterminateSrc = () => + new ParameterizedString<{ expected: number }>(getRawString('DiagnosticAddendum.tupleSizeIndeterminateSrc')); + export const tupleSizeIndeterminateSrcDest = () => + new ParameterizedString<{ expected: number }>( + getRawString('DiagnosticAddendum.tupleSizeIndeterminateSrcDest') + ); + export const tupleSizeMismatch = () => + new ParameterizedString<{ expected: number; received: number }>( + getRawString('DiagnosticAddendum.tupleSizeMismatch') + ); + export const tupleSizeMismatchIndeterminateDest = () => + new ParameterizedString<{ expected: number; received: number }>( + getRawString('DiagnosticAddendum.tupleSizeMismatchIndeterminateDest') + ); + export const typeAliasInstanceCheck = () => getRawString('DiagnosticAddendum.typeAliasInstanceCheck'); + export const typeAssignmentMismatch = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.typeAssignmentMismatch') + ); + export const typeBound = () => + new ParameterizedString<{ sourceType: string; destType: string; name: string }>( + getRawString('DiagnosticAddendum.typeBound') + ); + export const typeConstrainedTypeVar = () => + new ParameterizedString<{ type: string; name: string }>( + getRawString('DiagnosticAddendum.typeConstrainedTypeVar') + ); + export const typedDictBaseClass = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.typedDictBaseClass')); + export const typedDictClassNotAllowed = () => getRawString('DiagnosticAddendum.typedDictClassNotAllowed'); + export const typedDictExtraFieldNotAllowed = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictExtraFieldNotAllowed') + ); + export const typedDictExtraFieldTypeMismatch = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictExtraFieldTypeMismatch') + ); + export const typedDictFieldMissing = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictFieldMissing') + ); + export const typedDictClosedExtraNotAllowed = () => + new ParameterizedString<{ name: string }>( + getRawString('DiagnosticAddendum.typedDictClosedExtraNotAllowed') + ); + export const typedDictClosedExtraTypeMismatch = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictClosedExtraTypeMismatch') + ); + export const typedDictClosedFieldNotReadOnly = () => + new ParameterizedString<{ name: string }>( + getRawString('DiagnosticAddendum.typedDictClosedFieldNotReadOnly') + ); + export const typedDictClosedFieldNotRequired = () => + new ParameterizedString<{ name: string }>( + getRawString('DiagnosticAddendum.typedDictClosedFieldNotRequired') + ); + export const typedDictFieldNotReadOnly = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictFieldNotReadOnly') + ); + export const typedDictFieldNotRequired = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictFieldNotRequired') + ); + export const typedDictFieldRequired = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictFieldRequired') + ); + export const typedDictFieldTypeMismatch = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictFieldTypeMismatch') + ); + export const typedDictFieldUndefined = () => + new ParameterizedString<{ name: string; type: string }>( + getRawString('DiagnosticAddendum.typedDictFieldUndefined') + ); + export const typedDictKeyAccess = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.typedDictKeyAccess')); + export const typedDictNotAllowed = () => getRawString('DiagnosticAddendum.typedDictNotAllowed'); + export const typeIncompatible = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.typeIncompatible') + ); + export const typeNotClass = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.typeNotClass')); + export const typeParamSpec = () => + new ParameterizedString<{ type: string; name: string }>(getRawString('DiagnosticAddendum.typeParamSpec')); + export const typeNotStringLiteral = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.typeNotStringLiteral')); + export const typeOfSymbol = () => + new ParameterizedString<{ name: string; type: string }>(getRawString('DiagnosticAddendum.typeOfSymbol')); + export const typeUnsupported = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.typeUnsupported')); + export const typeVarDefaultOutOfScope = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.typeVarDefaultOutOfScope')); + export const typeVarIsContravariant = () => + new ParameterizedString<{ name: string; sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.typeVarIsContravariant') + ); + export const typeVarIsCovariant = () => + new ParameterizedString<{ name: string; sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.typeVarIsCovariant') + ); + export const typeVarIsInvariant = () => + new ParameterizedString<{ name: string; sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.typeVarIsInvariant') + ); + export const typeVarsMissing = () => + new ParameterizedString<{ names: string }>(getRawString('DiagnosticAddendum.typeVarsMissing')); + export const typeVarNotAllowed = () => getRawString('DiagnosticAddendum.typeVarNotAllowed'); + export const typeVarTupleRequiresKnownLength = () => + getRawString('DiagnosticAddendum.typeVarTupleRequiresKnownLength'); + export const typeVarUnnecessarySuggestion = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.typeVarUnnecessarySuggestion')); + export const typeVarUnsolvableRemedy = () => getRawString('DiagnosticAddendum.typeVarUnsolvableRemedy'); + export const unhashableType = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.unhashableType')); + export const uninitializedAbstractVariable = () => + new ParameterizedString<{ name: string; classType: string }>( + getRawString('DiagnosticAddendum.uninitializedAbstractVariable') + ); + export const unreachableExcept = () => + new ParameterizedString<{ exceptionType: string; parentType: string }>( + getRawString('DiagnosticAddendum.unreachableExcept') + ); + export const useDictInstead = () => getRawString('DiagnosticAddendum.useDictInstead'); + export const useListInstead = () => getRawString('DiagnosticAddendum.useListInstead'); + export const useTupleInstead = () => getRawString('DiagnosticAddendum.useTupleInstead'); + export const useTypeInstead = () => getRawString('DiagnosticAddendum.useTypeInstead'); + export const varianceMismatchForClass = () => + new ParameterizedString<{ typeVarName: string; className: string }>( + getRawString('DiagnosticAddendum.varianceMismatchForClass') + ); + export const varianceMismatchForTypeAlias = () => + new ParameterizedString<{ typeVarName: string; typeAliasParam: string }>( + getRawString('DiagnosticAddendum.varianceMismatchForTypeAlias') + ); + } + + export namespace CodeAction { + export const createTypeStub = () => getRawString('CodeAction.createTypeStub'); + export const createTypeStubFor = () => + new ParameterizedString<{ moduleName: string }>(getRawString('CodeAction.createTypeStubFor')); + export const executingCommand = () => getRawString('CodeAction.executingCommand'); + export const filesToAnalyzeOne = () => getRawString('CodeAction.filesToAnalyzeOne'); + export const filesToAnalyzeCount = () => + new ParameterizedString<{ count: number }>(getRawString('CodeAction.filesToAnalyzeCount')); + export const findingReferences = () => getRawString('CodeAction.findingReferences'); + export const organizeImports = () => getRawString('CodeAction.organizeImports'); + } + + export namespace Completion { + export const autoImportDetail = () => getRawString('Completion.autoImportDetail'); + export const indexValueDetail = () => getRawString('Completion.indexValueDetail'); + } + + export namespace Service { + export const longOperation = () => getRawString('Service.longOperation'); + } +} + +export const LocMessage = Localizer.Diagnostic; +export const LocAddendum = Localizer.DiagnosticAddendum; diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.cs.json b/python-parser/packages/pyright-internal/src/localization/package.nls.cs.json new file mode 100644 index 00000000..230553aa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.cs.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Vytvořit zástupnou proceduru (Stub) typu", + "createTypeStubFor": "Vytvořit zástupnou proceduru typu (Stub) pro modul {moduleName}", + "executingCommand": "Spouští se příkaz", + "filesToAnalyzeCount": "Počet souborů k analýze: {count}", + "filesToAnalyzeOne": "1 soubor k analýze", + "findingReferences": "Hledají se odkazy", + "organizeImports": "Uspořádat direktivy Import" + }, + "Completion": { + "autoImportDetail": "Automatický import", + "indexValueDetail": "Hodnota indexu" + }, + "Diagnostic": { + "abstractMethodInvocation": "Metodu „{method}“ nelze volat, protože je abstraktní a neimplementovaná.", + "annotatedMetadataInconsistent": "Typ metadat s poznámkami „{metadataType}“ není kompatibilní s typem „{type}“.", + "annotatedParamCountMismatch": "Počet poznámek parametrů se neshoduje: očekával(o/y) se {expected}, ale přijal(o/y) se {received}.", + "annotatedTypeArgMissing": "Byl očekáván jeden argument typu a jedna nebo více poznámek pro Annotated", + "annotationBytesString": "Výrazy typu nemůžou používat řetězcové literály bajtů.", + "annotationFormatString": "Výrazy typu nemůžou používat formátovací řetězcové literály (f-strings).", + "annotationNotSupported": "Poznámka typu není pro tento příkaz podporována", + "annotationRawString": "Výrazy typu nemůžou používat literály nezpracovaného řetězce.", + "annotationSpansStrings": "Výrazy typu nemůžou zahrnovat více řetězcových literálů.", + "annotationStringEscape": "Výrazy typu nemůžou obsahovat řídicí znaky.", + "annotationTemplateString": "Výrazy typu nemůžou používat řetězcové literály šablon (f-strings)", + "argAssignment": "Argument typu {argType} není možné přiřadit k parametru typu {paramType}", + "argAssignmentFunction": "Argument typu {argType} není možné přiřadit k parametru typu {paramType} ve funkci {functionName}", + "argAssignmentParam": "Argument typu {argType} není možné přiřadit k parametru {paramName} typu {paramType}", + "argAssignmentParamFunction": "Argument typu {argType} není možné přiřadit k parametru {paramName} typu {paramType} ve funkci {functionName}", + "argMissingForParam": "Chybí argument pro parametr {name}", + "argMissingForParams": "Chybí argumenty pro parametry {names}", + "argMorePositionalExpectedCount": "Očekával se tento počet dalších pozičních argumentů: {expected}", + "argMorePositionalExpectedOne": "Očekával se 1 další poziční argument", + "argPositional": "Očekávaný poziční argument", + "argPositionalExpectedCount": "Očekávaný počet pozičních argumentů: {expected}", + "argPositionalExpectedOne": "Očekával se 1 poziční argument", + "argTypePartiallyUnknown": "Typ argumentu je částečně neznámý", + "argTypeUnknown": "Typ argumentu je neznámý", + "assertAlwaysTrue": "Výraz Assert se vždy vyhodnotí jako true", + "assertTypeArgs": "assert_type očekává dva poziční argumenty", + "assertTypeTypeMismatch": "Neshoda assert_type: očekávalo se {expected}, ale přijalo se {received}", + "assignmentExprComprehension": "Cíl výrazu přiřazení „{name}“ nemůže používat stejný název jako porozumění cíli", + "assignmentExprContext": "Výraz přiřazení musí být uvnitř modulu, funkce nebo výrazu lambda", + "assignmentExprInSubscript": "Výrazy přiřazení v dolním indexu se podporují jenom v Pythonu 3.10 a novějším", + "assignmentInProtocol": "Proměnné instance nebo třídy v rámci třídy Protocol musí být explicitně deklarovány v těle třídy", + "assignmentTargetExpr": "Výraz nemůže být cílem přiřazení", + "asyncNotInAsyncFunction": "Použití „async“ není povolené mimo funkci async", + "awaitIllegal": "Použití operátoru await vyžaduje Python 3.5 nebo novější", + "awaitNotAllowed": "Výrazy typu nemůžou používat výraz await.", + "awaitNotInAsync": "Operátor await je povolený jenom v rámci asynchronní funkce", + "backticksIllegal": "V Pythonu 3.x nejsou podporovány výrazy obklopené zpětnými tečkami; místo toho použijte repr", + "baseClassCircular": "Třída se nemůže odvozovat od sebe sama", + "baseClassFinal": "Základní třída {type} je označená jako final a nemůže být podtřídou", + "baseClassIncompatible": "Základní třídy typu {type} jsou vzájemně nekompatibilní", + "baseClassInvalid": "Argument třídy musí být základní třída", + "baseClassMethodTypeIncompatible": "Základní třídy pro třídu {classType} definují metodu {name} nekompatibilním způsobem", + "baseClassUnknown": "Typ základní třídy je neznámý, co zakrývá typ odvozené třídy", + "baseClassVariableTypeIncompatible": "Základní třídy pro třídu {classType} definují proměnnou {name} nekompatibilním způsobem", + "binaryOperationNotAllowed": "Ve výrazu typu není povolený binární operátor.", + "bindParamMissing": "Nepovedlo se vytvořit vazbu metody {methodName}, protože chybí parametr self nebo cls", + "bindTypeMismatch": "Nepovedlo se vytvořit vazbu metody „{methodName}“, protože „{type}“ nejde přiřadit k parametru „{paramName}“", + "breakInExceptionGroup": "V bloku except* není povolená možnost break.", + "breakOutsideLoop": "„break“ se dá použít jenom ve smyčce", + "bytesUnsupportedEscape": "Nepodporovaná řídicí sekvence v literálu typu bytes", + "callableExtraArgs": "Pro Callable se očekávaly pouze dva argumenty typu", + "callableFirstArg": "Očekával se seznam typů parametrů nebo ...", + "callableNotInstantiable": "Není možné vytvořit instanci typu {type}", + "callableSecondArg": "Očekával se návratový typ jako druhý argument typu pro Callable", + "casePatternIsIrrefutable": "Nevyvratitelný vzorec je povolený jenom pro poslední výraz velikosti písmen", + "classAlreadySpecialized": "Typ {type} je už specializovaný", + "classDecoratorTypeUnknown": "Dekoratér netypové třídy překrývá typ třídy. dekoratér se ignoruje", + "classDefinitionCycle": "Definice třídy pro „{name}“ závisí sama na sobě", + "classGetItemClsParam": "Přepsání __class_getitem__ by mělo mít parametr cls", + "classMethodClsParam": "Metody třídy by měly mít parametr „cls“", + "classNotRuntimeSubscriptable": "Dolní index pro třídu {name} vygeneruje výjimku modulu runtime; výraz typu uzavřete do uvozovek.", + "classPatternBuiltInArgPositional": "Vzor třídy přijímá pouze poziční dílčí vzor", + "classPatternNewType": "„{type}“ nelze použít ve vzoru třídy, protože je definován pomocí NewType", + "classPatternPositionalArgCount": "Příliš mnoho pozičních vzorů pro třídu \"{type}\"; očekávalo se {expected}, ale přijalo se {received}", + "classPatternTypeAlias": "Typ „{type}“ nelze použít ve vzorci třídy, protože se jedná o specializovaný alias typu", + "classPropertyDeprecated": "Vlastnosti třídy jsou v Pythonu 3.11 zastaralé a v Pythonu 3.13 se nebudou podporovat.", + "classTypeParametersIllegal": "Syntaxe parametru typu třídy vyžaduje Python 312 nebo novější", + "classVarFirstArgMissing": "Za ClassVar byl očekáván argument typu", + "classVarNotAllowed": "ClassVar se v tomto kontextu nepovoluje", + "classVarOverridesInstanceVar": "Proměnná třídy {name} přepíše proměnnou instance se stejným názvem ve třídě {className}", + "classVarTooManyArgs": "Za „ClassVar“ byl očekáván pouze jeden argument typu", + "classVarWithTypeVar": "Typ ClassVar nemůže obsahovat proměnné typu", + "clsSelfParamTypeMismatch": "Typ parametru „{name}“ musí být nadtyp třídy „{classType}“", + "codeTooComplexToAnalyze": "Kód je příliš složitý na analýzu; snižte složitost refaktorizací do podprogramů nebo redukcí podmíněných cest kódu", + "collectionAliasInstantiation": "Nelze vytvořit instanci typu „{type}“. Použijte místo toho „{alias}“", + "comparisonAlwaysFalse": "Podmínka se vždy vyhodnotí jako False, protože typy {leftType} a {rightType} se nepřekrývají", + "comparisonAlwaysTrue": "Podmínka se vždy vyhodnotí jako True, protože typy {leftType} a {rightType} se nepřekrývají", + "comprehensionInDict": "Porozumění není možné použít s jinými položkami slovníku", + "comprehensionInSet": "Porozumění nelze použít s jinými položkami sady (set).", + "concatenateContext": "Možnost „Concatenate“ není v tomto kontextu povolená.", + "concatenateParamSpecMissing": "Poslední argument typu pro „Concatenate“ musí být „ParamSpec“ nebo „...“", + "concatenateTypeArgsMissing": "Možnost „Concatenate“ vyžaduje alespoň dva argumenty typu", + "conditionalOperandInvalid": "Neplatný podmíněný operand typu {type}", + "constantRedefinition": "„{name}“ je konstanta (protože je velkými písmeny) a nedá se předefinovat", + "constructorParametersMismatch": "Neshoda mezi signaturou __new__ a __init__ ve třídě“ {classType}“", + "containmentAlwaysFalse": "Výraz se vždy vyhodnotí jako False, protože typy „{leftType}“ a „{rightType}“ se nepřekrývají", + "containmentAlwaysTrue": "Výraz se vždy vyhodnotí jako True, protože typy „{leftType}“ a „{rightType}“ se nepřekrývají.", + "continueInExceptionGroup": "V bloku except* není povolená možnost continue.", + "continueOutsideLoop": "continue se dá použít jenom ve smyčce", + "coroutineInConditionalExpression": "Podmíněný výraz odkazuje na korutinu, která se vždy vyhodnotí jako True.", + "dataClassBaseClassFrozen": "Nezablokovaná třída nemůže dědit z zmrazené třídy", + "dataClassBaseClassNotFrozen": "Zablokovaná třída nemůže dědit z třídy, která není zablokovaná", + "dataClassConverterFunction": "Argument typu {argType} není platný převaděč pro pole {fieldName} typu {fieldType}", + "dataClassConverterOverloads": "Žádná přetížení {funcName} nejsou platné převaděče pro pole {fieldName} typu {fieldType}", + "dataClassFieldInheritedDefault": "{fieldName} přepíše pole se stejným názvem, ale chybí mu výchozí hodnota.", + "dataClassFieldWithDefault": "Pole bez výchozích hodnot se nemůžou zobrazit po polích s výchozími hodnotami", + "dataClassFieldWithPrivateName": "Pole datové třídy nemůže používat privátní název", + "dataClassFieldWithoutAnnotation": "Pole dataclass bez poznámky typu způsobí výjimku modulu runtime", + "dataClassPostInitParamCount": "Datová třída __post_init__ má nesprávný počet parametrů; počet polí InitVar je {expected}", + "dataClassPostInitType": "Neshoda typu parametru metody __post_init__ datové třídy pro pole {fieldName}", + "dataClassSlotsOverwrite": "__slots__ je už ve třídě definovaný", + "dataClassTransformExpectedBoolLiteral": "Očekával se výraz, který se staticky vyhodnotí jako True nebo False", + "dataClassTransformFieldSpecifier": "Očekávala se řazená kolekce členů (tuple) tříd nebo funkcí, ale byl přijat typ „{type}“.", + "dataClassTransformPositionalParam": "Všechny argumenty dataclass_transform musí být argumenty klíčových slov", + "dataClassTransformUnknownArgument": "Argument {name} není v dataclass_transform podporován", + "dataProtocolInSubclassCheck": "Datové protokoly (které zahrnují atributy bez metody) nejsou ve voláních issubclass povolené.", + "declaredReturnTypePartiallyUnknown": "Deklarovaný návratový typ {returnType} je částečně neznámý", + "declaredReturnTypeUnknown": "Deklarovaný návratový typ je neznámý", + "defaultValueContainsCall": "Volání funkcí a měnitelné objekty nejsou povoleny ve výrazu výchozí hodnoty parametru", + "defaultValueNotAllowed": "Parametr s * nebo ** nemůže mít výchozí hodnotu", + "delTargetExpr": "Výraz se nedá odstranit", + "deprecatedClass": "Třída {name} je zastaralá", + "deprecatedConstructor": "Konstruktor pro třídu {name} je zastaralý", + "deprecatedDescriptorDeleter": "Metoda „__delete__“ pro popisovač „{name}“ je zastaralá", + "deprecatedDescriptorGetter": "Metoda „__get__“ pro popisovač „{name}“ je zastaralá", + "deprecatedDescriptorSetter": "Metoda „__set__“ pro popisovač „{name}“ je zastaralá", + "deprecatedFunction": "Funkce {name} je zastaralá.", + "deprecatedMethod": "Metoda {name} ve třídě {className} je zastaralá.", + "deprecatedPropertyDeleter": "Metoda deleter pro property „{name}“ je zastaralá.", + "deprecatedPropertyGetter": "Metoda getter pro property „{name}“ je zastaralá.", + "deprecatedPropertySetter": "Metoda setter pro property „{name}“ je zastaralá.", + "deprecatedType": "Tento typ je zastaralý jako Python {version}; místo toho použijte {replacement}", + "dictExpandIllegalInComprehension": "Rozšíření slovníku není v porozumění povoleno", + "dictInAnnotation": "Výraz slovníku není ve výrazu typu povolený.", + "dictKeyValuePairs": "Položky slovníku musí obsahovat páry klíč/hodnota", + "dictUnpackIsNotMapping": "Očekávalo se mapování pro operátor rozbalení slovníku", + "dunderAllSymbolNotPresent": "{name} je zadáno v __all__, ale v modulu se nenachází", + "duplicateArgsParam": "Je povolený jenom jeden parametr *", + "duplicateBaseClass": "Duplicitní základní třída není povolena", + "duplicateCapturePatternTarget": "Cíl zachytávání {name} se v rámci stejného vzoru nemůže vyskytovat více než jednou", + "duplicateCatchAll": "Je povolena pouze jedna klauzule catch-all except", + "duplicateEnumMember": "Člen Enum {name} je už deklarovaný.", + "duplicateGenericAndProtocolBase": "Je povolena pouze jedna základní třída Generic[...] nebo Protocol[...].", + "duplicateImport": "Import {importName} je importován více než jednou", + "duplicateKeywordOnly": "Je povolený jenom jeden oddělovač *", + "duplicateKwargsParam": "Je povolený jenom jeden parametr **", + "duplicateParam": "duplicitní parametr {name}", + "duplicatePositionOnly": "Je povolený jenom jeden parametr „/“", + "duplicateStarPattern": "V sekvenci vzorů je povolený jenom jeden vzor „*“", + "duplicateStarStarPattern": "Je povolena pouze jedna položka „**“", + "duplicateUnpack": "V seznamu (list) je povolena pouze jedna operace rozbalení.", + "ellipsisAfterUnpacked": "„…“ nelze použít s rozbalenou kolekcí TypeVarTuple nebo tuple.", + "ellipsisContext": "„...“ se v tomto kontextu nepovoluje", + "ellipsisSecondArg": "„...“ je povoleno pouze jako druhý ze dvou argumentů", + "enumClassOverride": "Třída Enum {name} je final a nemůže být podtřídou.", + "enumMemberDelete": "Člen Enum {name} se nedá odstranit.", + "enumMemberSet": "Člen Enum {name} se nedá přiřadit.", + "enumMemberTypeAnnotation": "Poznámky typu nejsou pro členy enum povolené.", + "exceptGroupMismatch": "Příkaz Try nemůže obsahovat jak except, tak i except*.", + "exceptGroupRequiresType": "Syntaxe skupiny výjimek (\"except*\") vyžaduje typ výjimky.", + "exceptRequiresParens": "Před Pythonem 3.14 musí být více typů výjimek v závorkách", + "exceptWithAsRequiresParens": "Při použití „as“ musí být více typů výjimek v závorkách", + "exceptionGroupIncompatible": "Syntaxe skupiny výjimek (\"except*\") vyžaduje Python 3.11 nebo novější", + "exceptionGroupTypeIncorrect": "Typ výjimky v except* se nedá odvodit z BaseGroupException.", + "exceptionTypeIncorrect": "„{type}“ se neodvozuje od BaseException", + "exceptionTypeNotClass": "{type} není platná třída výjimky", + "exceptionTypeNotInstantiable": "Konstruktor pro výjimku typu {type} vyžaduje jeden nebo více argumentů", + "expectedAfterDecorator": "Očekávaná deklarace funkce nebo třídy po dekoratéru", + "expectedArrow": "Byl očekáván znak -> následovaný anotací návratového typu", + "expectedAsAfterException": "Za typem výjimky byl očekáván znak „as“", + "expectedAssignRightHandExpr": "Byl očekáván výraz napravo od znaku =", + "expectedBinaryRightHandExpr": "Očekával se výraz napravo od operátoru", + "expectedBoolLiteral": "Očekávala se hodnota True nebo False", + "expectedCase": "Očekával se příkaz case", + "expectedClassName": "Očekával se název třídy", + "expectedCloseBrace": "{ nebyla uzavřena", + "expectedCloseBracket": "[ nebyla uzavřena", + "expectedCloseParen": "( nebyla uzavřena", + "expectedColon": "Očekával se znak :", + "expectedComplexNumberLiteral": "Očekával se komplexní číselný literál pro porovnávání vzorů", + "expectedDecoratorExpr": "Forma výrazu není podporována pro dekorátor před verzí Python 3.9", + "expectedDecoratorName": "Očekával se název dekoratéru", + "expectedDecoratorNewline": "Na konci dekoratéru byl očekáván nový řádek", + "expectedDelExpr": "Za del se očekával výraz", + "expectedElse": "Očekávalo se else", + "expectedEquals": "Očekával se znak =", + "expectedExceptionClass": "Neplatná třída nebo objekt výjimky", + "expectedExceptionObj": "Byl očekáván objekt výjimky, třída výjimky nebo None", + "expectedExpr": "Očekávaný výraz", + "expectedFunctionAfterAsync": "Očekávaná definice funkce po „async“", + "expectedFunctionName": "Za def se očekával název funkce", + "expectedIdentifier": "Očekávaný identifikátor", + "expectedImport": "Očekával se import", + "expectedImportAlias": "Za as byl očekáván symbol", + "expectedImportSymbols": "Po možnosti import se očekával jeden nebo více názvů symbolů.", + "expectedIn": "Očekávalo se in", + "expectedInExpr": "Za in byl očekáván výraz", + "expectedIndentedBlock": "Očekával se odsazený blok", + "expectedMemberName": "Za tečkou (.) byl očekáván název atributu.", + "expectedModuleName": "Očekávaný název modulu", + "expectedNameAfterAs": "Za „as“ se očekával název symbolu", + "expectedNamedParameter": "Parametr klíčového slova musí následovat za znakem *", + "expectedNewline": "Očekával se nový řádek", + "expectedNewlineOrSemicolon": "Příkazy musí být oddělené novými řádky nebo středníky", + "expectedOpenParen": "Očekával se znak „(“", + "expectedParamName": "Očekával se název parametru", + "expectedPatternExpr": "Očekávaný výraz vzoru", + "expectedPatternSubjectExpr": "Očekávaný výraz předmětu vzoru", + "expectedPatternValue": "Očekávaný výraz hodnoty vzoru ve formátu a.b", + "expectedReturnExpr": "Za return se očekával výraz", + "expectedSliceIndex": "Očekávaný výraz indexu nebo řezu", + "expectedTypeNotString": "Očekával se typ, ale přijal se řetězcový literál", + "expectedTypeParameterName": "Očekávaný název parametru typu", + "expectedYieldExpr": "Očekávaný výraz v příkazu yield", + "finalClassIsAbstract": "Třída „{type}“ je označena jako final a musí implementovat všechny abstraktní symboly.", + "finalContext": "Final se v tomto kontextu nepovoluje", + "finalInLoop": "Proměnnou Final nelze přiřadit ve smyčce.", + "finalMethodOverride": "Metoda {name} nemůže přepsat metodu final definovanou ve třídě {className}.", + "finalNonMethod": "Funkci „{name}“ nelze označit @final, protože se nejedná o metodu.", + "finalReassigned": "„{name}“ se deklaruje jako Final a nedá se znovu přiřadit", + "finalRedeclaration": "{name} se dříve deklarovalo jako Final", + "finalRedeclarationBySubclass": "{name} se nedá deklarovat znovu, protože nadřazená třída {className} ji deklaruje jako Final.", + "finalTooManyArgs": "Za Final byl očekáván jeden argument typu", + "finalUnassigned": "{name} se deklaruje jako Final, ale hodnota není přiřazená.", + "finallyBreak": "K ukončení bloku „finally“ nelze použít „break“.", + "finallyContinue": "K ukončení bloku „finally“ nelze použít „continue“.", + "finallyReturn": "K ukončení bloku „finally“ nelze použít „return“.", + "formatStringBrace": "Jednoduchá pravá složená závorka není v literálu f-string povolena. použijte dvojitou pravou složenou závorku", + "formatStringBytes": "Formátovací řetězcové literály (f-strings) nemůžou být binární", + "formatStringDebuggingIllegal": "Specifikátor ladění F-string „=“ vyžaduje Python 3.8 nebo novější", + "formatStringEscape": "Řídicí sekvence (zpětné lomítko) není povolená v části výrazu f-string před Python 3.12", + "formatStringExpectedConversion": "V řetězci f-string byl za znakem ! očekáván specifikátor převodu", + "formatStringIllegal": "Formátovací řetězcové literály (f-string) vyžadují Python 3.6 nebo novější", + "formatStringInPattern": "Formátovací řetězec není ve vzoru povolený", + "formatStringNestedFormatSpecifier": "Příliš hluboko vnořené výrazy v rámci specifikátoru formátovacího řetězce", + "formatStringNestedQuote": "Řetězce vnořené v řetězci f-string nemůžou používat stejný znak uvozovek jako řetězec f-string před Python 3.12", + "formatStringTemplate": "Řetězcové literály šablon (f-string) nemůžou být také řetězce šablon (t-strings)", + "formatStringUnicode": "Formátovací řetězcové literály (f-strings) nemůžou být unicode", + "formatStringUnterminated": "Neukončený výraz v f-string; očekává se „}“", + "functionDecoratorTypeUnknown": "Dekorátor netypové funkce překrývá typ funkce; ignoruje se dekoratér", + "functionInConditionalExpression": "Podmíněný výraz odkazuje na funkci, která se vždy vyhodnotí jako True", + "functionTypeParametersIllegal": "Syntaxe parametru typu funkce vyžaduje Python 3.12 nebo novější", + "futureImportLocationNotAllowed": "Importy z __future__ musí být na začátku souboru", + "generatorAsyncReturnType": "Návratový typ funkce asynchronního generátoru musí být kompatibilní s typem AsyncGenerator[{yieldType}, Any]", + "generatorNotParenthesized": "Výrazy generátoru musí být v závorkách, pokud nejsou jediným argumentem", + "generatorSyncReturnType": "Návratový typ funkce generátoru musí být kompatibilní s Generator[{yieldType}, Any, Any]", + "genericBaseClassNotAllowed": "Základní třídu „Generic“ nejde použít se syntaxí parametru typu", + "genericClassAssigned": "Obecný typ třídy není možné přiřadit", + "genericClassDeleted": "Obecný typ třídy nelze odstranit", + "genericInstanceVariableAccess": "Přístup k obecné proměnné instance prostřednictvím třídy je nejednoznačný.", + "genericNotAllowed": "Generic není v tomto kontextu platný", + "genericTypeAliasBoundTypeVar": "Alias obecného typu v rámci třídy nemůže používat proměnné vázaného typu {names}", + "genericTypeArgMissing": "Generic vyžaduje alespoň jeden argument typu", + "genericTypeArgTypeVar": "Argument typu pro Generic musí být proměnná typu", + "genericTypeArgUnique": "Argumenty typu pro Generic musí být jedinečné", + "globalReassignment": "{name} je přiřazen před deklarací global.", + "globalRedefinition": "Název {name} už je deklarován jako global.", + "implicitStringConcat": "Implicitní zřetězení řetězců není povolené", + "importCycleDetected": "V řetězci importu byl zjištěn cyklus", + "importDepthExceeded": "Hloubka řetězu importu překročila {depth}", + "importResolveFailure": "Import {importName} se nepovedlo vyřešit", + "importSourceResolveFailure": "Import {importName} se nepovedlo přeložit ze zdroje", + "importSymbolUnknown": "{name} je neznámý symbol importu", + "incompatibleMethodOverride": "Metoda {name} přepisuje třídu {className} nekompatibilním způsobem", + "inconsistentIndent": "Množství zrušeného odsazení neodpovídá předchozímu odsazení", + "inconsistentTabs": "Nekonzistentní použití tabulátorů a mezer v odsazení", + "initMethodSelfParamTypeVar": "Anotace typu pro parametr self metody __init__ nemůže obsahovat proměnné typu s oborem třídy.", + "initMustReturnNone": "Návratový typ __init__ musí být None", + "initSubclassCallFailed": "Nesprávné argumenty klíčového slova pro metodu __init_subclass__", + "initSubclassClsParam": "Přepsání __init_subclass__ by mělo mít parametr cls", + "initVarNotAllowed": "InitVar se v tomto kontextu nepovoluje.", + "instanceMethodSelfParam": "Metody instance by měly mít parametr self", + "instanceVarOverridesClassVar": "Proměnná instance „{name}“ přepíše proměnnou třídy se stejným názvem ve třídě „{className}“", + "instantiateAbstract": "Nelze vytvořit instanci abstraktní třídy „{type}“", + "instantiateProtocol": "Nelze vytvořit instanci třídy Protocol „{type}“.", + "internalBindError": "Při vytváření vazby souboru {file} došlo k vnitřní chybě: {message}", + "internalParseError": "Při analýze souboru {file} došlo k vnitřní chybě: {message}", + "internalTypeCheckingError": "Při kontrole typu souboru {file} došlo k vnitřní chybě: {message}", + "invalidIdentifierChar": "Neplatný znak v identifikátoru", + "invalidStubStatement": "Příkaz je v souboru zástupné procedury (stub) typu bezvýznamný.", + "invalidTokenChars": "Neplatný znak „{text}“ v tokenu", + "isInstanceInvalidType": "Druhý argument pro „isinstance“ musí být třída nebo řazená kolekce členů (tuple) tříd.", + "isSubclassInvalidType": "Druhý argument pro issubclass musí být třída nebo řazená kolekce členů (tuple) tříd.", + "keyValueInSet": "Páry klíč-hodnota nejsou v rámci sady (set) povoleny.", + "keywordArgInTypeArgument": "Argumenty klíčových slov nelze použít v seznamech argumentů typu", + "keywordOnlyAfterArgs": "Oddělovač argumentů jen pro klíčová slova není povolený za parametrem *", + "keywordParameterMissing": "Jeden nebo více parametrů klíčového slova musí následovat za parametrem „*“", + "keywordSubscriptIllegal": "Argumenty klíčových slov v dolních indexech nejsou podporovány", + "lambdaReturnTypePartiallyUnknown": "Návratový typ lambda {returnType} je částečně neznámý", + "lambdaReturnTypeUnknown": "Návratový typ výrazu lambda je neznámý", + "listAssignmentMismatch": "Výraz s typem {type} se nedá přiřadit k cílovému seznamu", + "listInAnnotation": "Výraz List není ve výrazu typu povolený.", + "literalEmptyArgs": "Za literálem (Literal) se očekával jeden nebo více argumentů typu.", + "literalNamedUnicodeEscape": "Pojmenované řídicí sekvence Unicode nejsou v poznámkách řetězců Literal podporovány.", + "literalNotAllowed": "„Literal“ nejde v tomto kontextu použít bez argumentu typu.", + "literalNotCallable": "Není možné vytvořit instanci typu Literal.", + "literalUnsupportedType": "Argumenty typu pro Literal musí být None, hodnota literálu (int, bool, str nebo bytes) nebo hodnota enum.", + "matchIncompatible": "Příkazy match vyžadují Python 3.10 nebo novější", + "matchIsNotExhaustive": "Případy v rámci příkazu match nezpracovávají kompletně všechny hodnoty", + "maxParseDepthExceeded": "Byla překročena maximální hloubka analýzy; rozdělte výraz na dílčí výrazy", + "memberAccess": "Nelze získat přístup k atributu {name} pro třídu {type}.", + "memberDelete": "Nelze odstranit atribut {name} pro třídu {type}.", + "memberSet": "Nelze přiřadit k atributu {name} pro třídu {type}.", + "metaclassConflict": "Metatřída odvozené třídy musí být podtřídou metatříd všech jejích základních tříd", + "metaclassDuplicate": "Je možné zadat pouze jednu metatřídu", + "metaclassIsGeneric": "Metatřída nemůže být obecná", + "methodNotDefined": "Metoda {name} není definována", + "methodNotDefinedOnType": "Metoda {name} není u typu {type} definována", + "methodOrdering": "Není možné vytvořit konzistentní řazení metod", + "methodOverridden": "„{name}“ přepisuje metodu se stejným názvem ve třídě „{className}“ s nekompatibilním typem {type}", + "methodReturnsNonObject": "Metoda {name} nevrací objekt", + "missingSuperCall": "Metoda {methodName} nevolá metodu se stejným názvem v nadřazené třídě", + "mixingBytesAndStr": "Hodnoty bytes a str nelze zřetězit.", + "moduleAsType": "Modul nejde použít jako typ", + "moduleNotCallable": "Modul není volatelný", + "moduleUnknownMember": "{memberName} není známý atribut modulu {moduleName}.", + "namedExceptAfterCatchAll": "Za klauzulí catch-all except se nemůže objevit pojmenovaná klauzule except", + "namedParamAfterParamSpecArgs": "Parametr klíčového slova {name} se nemůže objevit v signatuře za parametrem ParamSpec args.", + "namedTupleEmptyName": "Názvy v pojmenované řazené kolekci členů (tuple) nemůžou být prázdné.", + "namedTupleEntryRedeclared": "{name} nejde přepsat, protože nadřazená třída {className} je pojmenovaná řazená kolekce členů (tuple).", + "namedTupleFieldUnderscore": "Názvy polí pojmenované řazené kolekce členů (Named tuple) nemůžou začínat podtržítkem.", + "namedTupleFirstArg": "Jako první argument byl očekáván název pojmenované třídy řazené kolekce členů (tuple).", + "namedTupleMultipleInheritance": "Vícenásobná dědičnost s NamedTuple se nepodporuje", + "namedTupleNameKeyword": "Názvy polí nemůžou být klíčové slovo.", + "namedTupleNameType": "Očekávala se řazená kolekce členů (tuple) se dvěma položkami určující název a typ položky.", + "namedTupleNameUnique": "Názvy v pojmenované řazené kolekci členů (tuple) musí být jedinečné.", + "namedTupleNoTypes": "namedtuple neposkytuje žádné typy pro položky tuple; místo toho použijte NamedTuple", + "namedTupleSecondArg": "Jako druhý argument byl očekáván pojmenovaný seznam (list) řazené kolekce členů (tuple).", + "newClsParam": "Přepsání __new__ by mělo mít parametr cls", + "newTypeAnyOrUnknown": "Druhý argument pro NewType musí být známá třída, nikoli Any nebo Unknown.", + "newTypeBadName": "Prvním argumentem pro NewType musí být řetězcový literál", + "newTypeLiteral": "Typ NewType není možné použít s typem Literal", + "newTypeNameMismatch": "Typ NewType musí být přiřazen proměnné se stejným názvem.", + "newTypeNotAClass": "Očekávaná třída jako druhý argument pro NewType", + "newTypeParamCount": "NewType vyžaduje dva poziční argumenty", + "newTypeProtocolClass": "NewType nelze použít se strukturálním typem (třída Protocol nebo TypedDict).", + "noOverload": "Zadaným argumentům neodpovídají žádná přetížení pro {name}", + "noReturnContainsReturn": "Funkce s deklarovaným návratovým typem return type NoReturn nemůže obsahovat příkaz return.", + "noReturnContainsYield": "Funkce s deklarovaným návratovým typem NoReturn nemůže obsahovat příkaz yield", + "noReturnReturnsNone": "Funkce s deklarovaným návratovým typem „NoReturn“ nemůže vrátit „None“.", + "nonDefaultAfterDefault": "Nevýchozí argument následuje za výchozím argumentem", + "nonLocalInModule": "Deklarace Nonlocal není povolená na úrovni modulu.", + "nonLocalNoBinding": "Nenašla se žádná vazba pro nonlocal {name}.", + "nonLocalReassignment": "{name} je přiřazeno před deklarací nonlocal.", + "nonLocalRedefinition": "{name} již bylo deklarováno jako nonlocal.", + "noneNotCallable": "Objekt typu „None“ nelze volat.", + "noneNotIterable": "Objekt typu None není možné použít jako iterovatelnou hodnotu", + "noneNotSubscriptable": "Objekt typu “None“ nelze zadat jako dolní index", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "Objekt typu None není možné použít s typem async with.", + "noneOperator": "Operátor {operator} se pro None nepodporuje", + "noneUnknownMember": "{name} není známý atribut None.", + "nonlocalTypeParam": "Pro parametr typu {name} není povolená vazba nonlocal.", + "notRequiredArgCount": "Za NotRequired byl očekáván jeden argument typu", + "notRequiredNotInTypedDict": "NotRequired není v tomto kontextu povolené", + "objectNotCallable": "Objekt typu {type} není možné volat", + "obscuredClassDeclaration": "Deklarace třídy {name} je zakrytá deklarací stejného jména", + "obscuredFunctionDeclaration": "Deklarace funkce {name} je zakrytá deklarací stejného názvu", + "obscuredMethodDeclaration": "Deklarace metody {name} je zakrytá deklarací stejného názvu", + "obscuredParameterDeclaration": "Deklarace parametru {name} je zakrytá deklarací stejného názvu", + "obscuredTypeAliasDeclaration": "Deklarace aliasu typu {name} je zakrytá deklarací stejného názvu", + "obscuredVariableDeclaration": "Deklarace {name} je zakrytá deklarací stejného názvu", + "operatorLessOrGreaterDeprecated": "Operátor <> se v Pythonu 3 nepodporuje; místo toho použijte !=", + "optionalExtraArgs": "Za nepovinnou hodnotou (Optional) se očekával jeden argument typu.", + "orPatternIrrefutable": "Nevratný vzor je povolený jenom jako poslední dílčí vzorec ve vzorci „or“", + "orPatternMissingName": "Všechny dílčí vzory v rámci vzoru or musí cílit na stejné názvy", + "overlappingKeywordArgs": "Slovník silného typu se překrývá s parametrem klíčového slova: {names}", + "overlappingOverload": "Přetížení {obscured} pro {name} se nikdy nepoužije, protože jeho parametry se překrývají s přetížením {obscuredBy}", + "overloadAbstractImplMismatch": "Přetížení musí odpovídat abstraktnímu stavu implementace.", + "overloadAbstractMismatch": "Buď musí být všechna přetížení abstraktní, nebo naopak nesmí být žádné z nich abstraktní.", + "overloadClassMethodInconsistent": "Přetížení pro {name} používají @classmethod nekonzistentně.", + "overloadFinalImpl": "dekoratér @final by se měl používat jenom pro implementaci.", + "overloadFinalNoImpl": "Pouze první přetížení by mělo být označeno @final", + "overloadImplementationMismatch": "Přetížená implementace není konzistentní se signaturou přetížení {index}", + "overloadOverrideImpl": "dekoratér @override by se měl používat jenom pro implementaci.", + "overloadOverrideNoImpl": "Pouze první přetížení by mělo být označeno @override", + "overloadReturnTypeMismatch": "Přetížení {prevIndex} pro {name} se překrývá s přetížením {newIndex} a vrací nekompatibilní typ", + "overloadStaticMethodInconsistent": "Přetížení pro {name} používají @staticmethod nekonzistentně.", + "overloadWithoutImplementation": "„{name}“ je označené jako přetížení (overload), ale není zadaná žádná implementace.", + "overriddenMethodNotFound": "Metoda „{name}“ je označená jako přepsání (override), ale neexistuje žádná základní metoda se stejným názvem.", + "overrideDecoratorMissing": "Metoda „{name}“ není označená jako přepsání (override), ale přepisuje metodu ve třídě „{className}“.", + "paramAfterKwargsParam": "Parametr nemůže následovat za parametrem „**“", + "paramAlreadyAssigned": "Parametr {name} je už přiřazený", + "paramAnnotationMissing": "Chybí poznámka typu pro parametr „{name}“", + "paramAssignmentMismatch": "Výraz typu „{sourceType}“ nelze přiřadit k parametru typu „{paramType}“", + "paramNameMissing": "Žádný parametr s názvem {name}", + "paramSpecArgsKwargsDuplicate": "Argumenty pro ParamSpec {type} již byly zadány.", + "paramSpecArgsKwargsUsage": "Atributy args a kwargs ParamSpec se musí vyskytovat v signatuře funkce.", + "paramSpecArgsMissing": "Chybí argumenty pro parametr ParamSpec {type}", + "paramSpecArgsUsage": "Atribut args ParamSpec je platný jenom v případě, že se používá s parametrem *args.", + "paramSpecAssignedName": "Parametr ParamSpec musí být přiřazen proměnné s názvem {name}", + "paramSpecContext": "ParamSpec se v tomto kontextu nepovoluje", + "paramSpecDefaultNotTuple": "Očekávaly se tři tečky, výraz řazené kolekce členů (tuple) nebo ParamSpec pro výchozí hodnotu ParamSpec.", + "paramSpecFirstArg": "Očekával se název parametru ParamSpec jako první argument", + "paramSpecKwargsUsage": "Atribut kwargs ParamSpec je platný jenom v případě, že se používá s parametrem **kwargs.", + "paramSpecNotUsedByOuterScope": "Parametr ParamSpec {name} nemá v tomto kontextu žádný význam", + "paramSpecUnknownArg": "Parametr ParamSpec nepodporuje více než jeden argument", + "paramSpecUnknownMember": "{name} není známý atribut ParamSpec.", + "paramSpecUnknownParam": "„{name}“ je neznámý parametr parametru ParamSpec", + "paramTypeCovariant": "Proměnnou kovariantní typu není možné použít v typu parametru", + "paramTypePartiallyUnknown": "Typ parametru {paramName} je částečně neznámý", + "paramTypeUnknown": "Typ parametru {paramName} je neznámý", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "Vzor se nikdy nebude shodovat s typem předmětu {type}", + "positionArgAfterNamedArg": "Poziční argument se nemůže objevit za argumenty klíčového slova", + "positionArgAfterUnpackedDictArg": "Poziční argument se nemůže objevit po rozbalení argumentu klíčového slova.", + "positionOnlyAfterArgs": "Oddělovač parametrů jen pro pozici není povolený za parametrem *.", + "positionOnlyAfterKeywordOnly": "Parametr / se musí zobrazit před parametrem *", + "positionOnlyAfterNon": "Parametr jen pro pozici není povolený za parametrem, který není jen pro pozici.", + "positionOnlyFirstParam": "Oddělovač parametrů jen pro pozici není povolený jako první parametr.", + "positionOnlyIncompatible": "Oddělovač parametrů jen pro pozici vyžaduje Python 3.8 nebo novější.", + "privateImportFromPyTypedModule": "{name} se neexportuje z modulu {module}", + "privateUsedOutsideOfClass": "{name} je privátní a používá se mimo třídu, ve které je deklarovaná", + "privateUsedOutsideOfModule": "{name} je privátní a používá se mimo modul, ve kterém je deklarován", + "propertyOverridden": "„{name}“ nesprávně přepíše vlastnost (property) se stejným názvem ve třídě „{className}“.", + "propertyStaticMethod": "Statické metody nejsou povoleny pro metodu getter, setter nebo deleter vlastnosti (property).", + "protectedUsedOutsideOfClass": "„{name}“ je chráněn(ý/o/é) a používá se mimo třídu, ve které je deklarovaná", + "protocolBaseClass": "Třída Protocol {classType} se nemůže odvozovat od třídy, která není třídou Protocol {baseType}.", + "protocolBaseClassWithTypeArgs": "Argumenty typu nejsou u třídy Protocol povoleny při použití syntaxe parametru typu", + "protocolIllegal": "Použití třídy Protocol vyžaduje Python 3.7 nebo novější.", + "protocolNotAllowed": "„Protocol“ nejde v tomto kontextu použít.", + "protocolTypeArgMustBeTypeParam": "Argument typu pro „Protocol“ musí být parametr typu.", + "protocolUnsafeOverlap": "Třída se nebezpečně překrývá s názvem „{name}“ a může vytvořit shodu při spuštění.", + "protocolVarianceContravariant": "Proměnná typu {variable} použitá v obecné třídě Protocol {class} by měla být kontravariantní.", + "protocolVarianceCovariant": "Proměnná typu {variable} použitá v obecné třídě Protocol {class} by měla být kovariantní.", + "protocolVarianceInvariant": "Proměnná typu {variable} použitá v obecné třídě Protocol {class} by měla být invariantní.", + "pyrightCommentInvalidDiagnosticBoolValue": "Za direktivou komentářů Pyright musí následovat znak =a hodnota true nebo false", + "pyrightCommentInvalidDiagnosticSeverityValue": "Za direktivou komentářů Pyright musí následovat = a hodnota true, false, error, warning, information nebo none", + "pyrightCommentMissingDirective": "Za komentářem Pyright musí následovat direktiva (basic nebo strict) nebo diagnostické pravidlo.", + "pyrightCommentNotOnOwnLine": "Komentáře Pyright používané k řízení nastavení na úrovni souborů se musí zobrazovat na vlastním řádku", + "pyrightCommentUnknownDiagnosticRule": "{rule} je neznámé diagnostické pravidlo pro komentář pyright", + "pyrightCommentUnknownDiagnosticSeverityValue": "{value} je neplatná hodnota pro komentář pyright; očekávalo se true, false, error, warning, information nebo none", + "pyrightCommentUnknownDirective": "Direktiva {directive} je neznámá direktiva pro komentář pyright; očekávalo se strict, standard nebo basic", + "readOnlyArgCount": "Za „ReadOnly“ se očekával jeden argument typu", + "readOnlyNotInTypedDict": "ReadOnly není v tomto kontextu povolené", + "recursiveDefinition": "Typ „{name}“ nelze určit, protože odkazuje sám na sebe", + "relativeImportNotAllowed": "Relativní importy se nedají použít s formulářem „import .a“; místo toho použijte „from . import a“.", + "requiredArgCount": "Za povinným argumentem (Required) se očekával jeden argument typu.", + "requiredNotInTypedDict": "Required není v tomto kontextu povoleno", + "returnInAsyncGenerator": "Příkaz Return s hodnotou není v asynchronním generátoru povolený", + "returnInExceptionGroup": "V bloku except* není povolená možnost return.", + "returnMissing": "Funkce s deklarovaným návratovým typem „{returnType}“ musí vracet hodnotu na všech cestách kódu", + "returnOutsideFunction": "„return“ se dá použít jenom v rámci funkce", + "returnTypeContravariant": "Kontravariantní proměnnou typu nejde použít v návratovém typu", + "returnTypeMismatch": "Typ {exprType} se nedá přiřadit k návratovému typu {returnType}.", + "returnTypePartiallyUnknown": "Návratový typ {returnType} je částečně neznámý", + "returnTypeUnknown": "Návratový typ je neznámý", + "revealLocalsArgs": "Pro volání reveal_locals se neočekávaly žádné argumenty", + "revealLocalsNone": "V tomto oboru nejsou žádné místní hodnoty (locals).", + "revealTypeArgs": "Pro volání reveal_type byl očekáván jeden poziční argument", + "revealTypeExpectedTextArg": "Argument „expected_text“ pro funkci „reveal_type“ musí být hodnota literálu str", + "revealTypeExpectedTextMismatch": "Neshoda typu textu; očekávaný počet: {expected}, počet, který byl přijat: {received}", + "revealTypeExpectedTypeMismatch": "Neshoda typů; Očekával(o/y) se „{expected}“, ale přijal(o/y) se „{received}“", + "selfTypeContext": "Self není v tomto kontextu platné", + "selfTypeMetaclass": "„Self“ nelze použít v rámci metatřídy (podtřídy „type“).", + "selfTypeWithTypedSelfOrCls": "Self není možné použít ve funkci s parametrem self nebo cls, která má jinou poznámku typu než Self", + "sentinelBadName": "Prvním argumentem pro Sentinel musí být řetězcový literál", + "sentinelNameMismatch": "Sentinel musí být přiřazen proměnné se stejným názvem", + "sentinelParamCount": "Sentinel vyžaduje jeden poziční argument", + "setterGetterTypeMismatch": "Typ hodnoty metody setter vlastnosti (property) není možné přiřadit návratovému typu getter.", + "singleOverload": "{name} je označené jako přetížení, ale chybí další přetížení", + "slotsAttributeError": "„{name}“ není zadaný v __slots__", + "slotsClassVarConflict": "{name} je v konfliktu s proměnnou instance deklarovanou v __slots__", + "starPatternInAsPattern": "Vzor hvězdy se nedá použít s cílem as", + "starPatternInOrPattern": "Vzor hvězdy nemůže být ORed v jiných vzorech", + "starStarWildcardNotAllowed": "** není možné použít se zástupným znakem _", + "staticClsSelfParam": "Statické metody by neměly přijímat parametr self nebo cls", + "stringNonAsciiBytes": "Znak jiný než ASCII není povolený v bajtech řetězcového literálu", + "stringNotSubscriptable": "Řetězcový výraz není možné ve výrazu typu zadat jako dolní index. Uzavřete celý výraz do uvozovek.", + "stringUnsupportedEscape": "Nepodporovaná řídicí sekvence v řetězcovém literálu", + "stringUnterminated": "Řetězcový literál je neukončený", + "stubFileMissing": "Soubor zástupné procedury (stub) pro „{importName}“ se nenašel.", + "stubUsesGetAttr": "Soubor zástupné procedury (stub) typu je neúplný; __getattr__ zakrývá typové chyby pro modul.", + "sublistParamsIncompatible": "Parametry sublist nejsou v Pythonu 3.x podporované.", + "superCallArgCount": "Pro volání „super“ se očekávaly maximálně dva argumenty", + "superCallFirstArg": "Jako první argument pro volání super se očekával typ třídy, ale přijal se {type}", + "superCallSecondArg": "Druhý argument volání super musí být objekt nebo třída odvozená z typu {type}", + "superCallZeroArgForm": "Forma nulového argumentu „super“ volání je platná pouze v rámci metody.", + "superCallZeroArgFormStaticMethod": "Forma nulového argumentu „super“ volání není platná v rámci statické metody.", + "symbolIsPossiblyUnbound": "{name} je pravděpodobně nevázané", + "symbolIsUnbound": "Název {name} je nevázaný", + "symbolIsUndefined": "{name} není definované", + "symbolOverridden": "{name} přepíše symbol stejného názvu ve třídě {className}", + "templateStringBytes": "Řetězcové literály šablon (f-strings) nemůžou být binární", + "templateStringIllegal": "Řetězcové literály šablon (t-string) vyžadují Python 3.14 nebo novější", + "templateStringUnicode": "Řetězcové literály šablon (f-strings) nemůžou být unicode", + "ternaryNotAllowed": "Výraz ternary není ve výrazu typu povolený.", + "totalOrderingMissingMethod": "Třída musí definovat jednu z __lt__, __le__, __gt__ nebo __ge__, aby bylo možné používat total_ordering", + "trailingCommaInFromImport": "Koncová čárka není povolena bez okolních závorek", + "tryWithoutExcept": "Příkaz Try musí mít alespoň jednu klauzuli except nebo finally", + "tupleAssignmentMismatch": "Výraz s typem „{type}“ se nedá přiřadit cílové řazené kolekci členů (tuple).", + "tupleInAnnotation": "Výraz řazené kolekce členů (tuple) není ve výrazu typu povolený.", + "tupleIndexOutOfRange": "Index {index} je pro typ {type} mimo rozsah", + "typeAliasIllegalExpressionForm": "Neplatný formulář výrazu pro definici aliasu typu", + "typeAliasIsRecursiveDirect": "Alias typu „{name}“ nemůže ve své definici používat sám sebe", + "typeAliasNotInModuleOrClass": "Typ TypeAlias je možné definovat pouze v rámci oboru modulu nebo třídy", + "typeAliasRedeclared": "„{name}“ se deklaruje jako TypeAlias a dá se přiřadit jenom jednou.", + "typeAliasStatementBadScope": "Příkaz type se dá použít jenom v rámci oboru modulu nebo třídy.", + "typeAliasStatementIllegal": "Příkaz alias typu vyžaduje Python 3.12 nebo novější", + "typeAliasTypeBadScope": "Alias typu se dá definovat jenom v rámci oboru modulu nebo třídy", + "typeAliasTypeBaseClass": "Alias typu definovaný v příkazu \"type\" nejde použít jako základní třídu.", + "typeAliasTypeMustBeAssigned": "Typ TypeAliasType musí být přiřazen proměnné se stejným názvem jako alias typu", + "typeAliasTypeNameArg": "První argument TypeAliasType musí být řetězcový literál představující název aliasu typu.", + "typeAliasTypeNameMismatch": "Název aliasu typu se musí shodovat s názvem proměnné, ke které je přiřazená", + "typeAliasTypeParamInvalid": "Seznam parametrů typu musí být řazená kolekce členů (tuple) obsahující pouze TypeVar, TypeVarTuple nebo ParamSpec.", + "typeAnnotationCall": "Výraz volání není ve výrazu typu povolený", + "typeAnnotationVariable": "Proměnná není ve výrazu typu povolená", + "typeAnnotationWithCallable": "Argument typu pro „type“ musí být třída; volatelné objekty se nepodporují.", + "typeArgListExpected": "Očekával se parametr ParamSpec, tři tečky nebo seznam (list) typů.", + "typeArgListNotAllowed": "Výraz seznamu (list) není pro tento argument typu povolený.", + "typeArgsExpectingNone": "Pro třídu {name} se neočekávaly žádné argumenty typu", + "typeArgsMismatchOne": "Očekával se jeden argument typu, ale bylo přijato {received}", + "typeArgsMissingForAlias": "Pro alias obecného typu {name} se očekávaly argumenty typu", + "typeArgsMissingForClass": "Očekávané argumenty typu pro obecnou třídu „{name}“", + "typeArgsTooFew": "Pro {name} se zadalo příliš málo argumentů typu. Očekávalo se {expected}, ale přijalo se {received}", + "typeArgsTooMany": "Pro „{name}“ se zadalo příliš mnoho argumentů typu. Očekával(o/y) se {expected}, ale přijal(o/y) se {received}", + "typeAssignmentMismatch": "Typ {sourceType} se nedá přiřadit k deklarovanému typu {destType}.", + "typeAssignmentMismatchWildcard": "Symbol importu {name} má typ {sourceType}, který se nedá přiřadit k deklarovanému typu {destType}.", + "typeCallNotAllowed": "Volání type() by se nemělo používat ve výrazu typu.", + "typeCheckOnly": "Název {name} je označený jako @type_check_only a dá se použít jenom v poznámkách typu", + "typeCommentDeprecated": "Použití komentářů type je zastaralé místo toho použít anotaci type.", + "typeExpectedClass": "Očekávala se třída, ale byl přijat typ {type}.", + "typeFormArgs": "TypeForm přijímá jeden poziční argument.", + "typeGuardArgCount": "Za TypeGuard nebo TypeIs byl očekáván jeden argument typu.", + "typeGuardParamCount": "Funkce a metody ochrany typů definované uživatelem musí mít alespoň jeden vstupní parametr", + "typeIsReturnType": "Návratový typ TypeIs ({returnType}) není konzistentní s typem parametru hodnoty ({type}).", + "typeNotAwaitable": "„{type}“ není awaitable.", + "typeNotIntantiable": "Není možné vytvořit instanci {type}", + "typeNotIterable": "{type} není možné iterovat", + "typeNotSpecializable": "Nepovedlo se specializovat typ „{type}“", + "typeNotSubscriptable": "Objekt typu {type} není možné zadat jako dolní index", + "typeNotSupportBinaryOperator": "Operátor „{operator}“ se pro typy „{leftType}“ a „{rightType}“ nepodporuje", + "typeNotSupportBinaryOperatorBidirectional": "Operátor {operator} není podporován pro typy „{leftType}“ a „{rightType}“, pokud se očekává typ „{expectedType}“", + "typeNotSupportUnaryOperator": "Operátor {operator} se pro typ {type} nepodporuje", + "typeNotSupportUnaryOperatorBidirectional": "Operátor {operator} není podporován pro typ {type}, když očekávaný typ je {expectedType}", + "typeNotUsableWith": "Objekt typu {type} není možné použít s typem with, protože neimplementuje správně metodu {method}.", + "typeNotUsableWithAsync": "Objekt typu {type} není možné použít s typem async with, protože neimplementuje správně metodu {method}.", + "typeParameterBoundNotAllowed": "Vazbu nebo omezení nelze použít s parametrem variadického typu ani s parametrem ParamSpec", + "typeParameterConstraintTuple": "Omezení parametru typu musí být řazená kolekce členů dvou nebo více typů", + "typeParameterExistingTypeParameter": "Parametr typu {name} se už používá", + "typeParameterNotDeclared": "Parametr typu {name} není zahrnutý v seznamu parametrů typu pro kontejner {container}", + "typeParametersMissing": "Musí být zadán alespoň jeden parametr typu", + "typePartiallyUnknown": "Typ {name} je částečně neznámý", + "typeUnknown": "Typ {name} je neznámý", + "typeVarAssignedName": "Typ TypeVar musí být přiřazen proměnné s názvem {name}", + "typeVarAssignmentMismatch": "Typ „{type}“ se nedá přiřadit proměnné typu „{name}“", + "typeVarBoundAndConstrained": "TypeVar nemůže být současně vázaný i omezený", + "typeVarBoundGeneric": "Vázaný typ TypeVar nemůže být obecný", + "typeVarConstraintGeneric": "Typ omezení TypeVar nemůže být obecný", + "typeVarDefaultBoundMismatch": "Výchozí typ TypeVar musí být podtyp vázaného typu", + "typeVarDefaultConstraintMismatch": "Výchozí typ TypeVar musí být jeden z omezených typů", + "typeVarDefaultIllegal": "Výchozí typy proměnných typů vyžadují Python 3.13 nebo novější", + "typeVarDefaultInvalidTypeVar": "Parametr typu {name} má výchozí typ, který odkazuje na jednu nebo více proměnných typu, které jsou mimo obor", + "typeVarFirstArg": "Očekával se název TypeVar jako první argument", + "typeVarInvalidForMemberVariable": "Typ atributu nemůže používat proměnnou typu {name} vymezenou na místní metodu.", + "typeVarNoMember": "TypeVar {type} nemá žádný atribut {name}.", + "typeVarNotSubscriptable": "TypeVar {type} není možné zadat jako dolní index", + "typeVarNotUsedByOuterScope": "Proměnná typu {name} nemá v tomto kontextu žádný význam", + "typeVarPossiblyUnsolvable": "Proměnná typu „{name}“ může být nevyřešená, pokud volající nezadá žádný argument pro parametr „{param}“", + "typeVarSingleConstraint": "TypeVar musí mít alespoň dva omezené typy", + "typeVarTupleConstraints": "TypeVarTuple nemůže mít omezení hodnoty", + "typeVarTupleContext": "TypeVarTuple se v tomto kontextu nepovoluje", + "typeVarTupleDefaultNotUnpacked": "Výchozí typ TypeVarTuple musí být rozbalená řazená kolekce členů (tuple) nebo TypeVarTuple.", + "typeVarTupleMustBeUnpacked": "Pro hodnotu TypeVarTuple se vyžaduje operátor rozbalení", + "typeVarTupleUnknownParam": "{name} je neznámý parametr pro TypeVarTuple", + "typeVarUnknownParam": "„{name}“ je neznámý parametr pro TypeVar", + "typeVarUsedByOuterScope": "TypeVar „{name}“ se už používá ve vnějším oboru", + "typeVarUsedOnlyOnce": "TypeVar {name} se v signatuře obecné funkce zobrazuje jenom jednou", + "typeVarVariance": "TypeVar nemůže být kovariantní i kontravariantní", + "typeVarWithDefaultFollowsVariadic": "TypeVar {typeVarName} má výchozí hodnotu a nemůže následovat po TypeVarTuple {variadicName}.", + "typeVarWithoutDefault": "„{name}“ se v seznamu parametrů typu nemůže zobrazit za „{other}“ , protože nemá žádný výchozí typ", + "typeVarsNotInGenericOrProtocol": "Generic[] nebo Protocol[] musí obsahovat všechny proměnné typu", + "typedDictAccess": "Nepovedlo se získat přístup k položce v TypedDict", + "typedDictAssignedName": "TypedDict se musí přiřadit proměnné s názvem „{name}“.", + "typedDictBadVar": "Třídy TypedDict můžou obsahovat jenom poznámky typu", + "typedDictBaseClass": "Všechny základní třídy pro třídy TypedDict musí být také třídami TypedDict", + "typedDictBoolParam": "Očekávalo se, že parametr {name} bude mít hodnotu True nebo False", + "typedDictClosedExtras": "Základní \"{name}\" třídy je TypedDict, který omezuje typ dalších položek na typ \"{type}\"", + "typedDictClosedFalseNonOpenBase": "Základní třída {name} není otevřená třída TypedDict; closed=False není povolené", + "typedDictClosedNoExtras": "Základní třída {name} je closed TypedDict; položky navíc nejsou povolené.", + "typedDictDelete": "Nepovedlo se odstranit položku v TypedDict", + "typedDictEmptyName": "Názvy v rámci TypedDict nemůžou být prázdné", + "typedDictEntryName": "Očekával se řetězcový literál pro název položky slovníku", + "typedDictEntryUnique": "Názvy ve slovníku musí být jedinečné", + "typedDictExtraArgs": "Nadbytečné argumenty TypedDict nejsou podporovány", + "typedDictExtraItemsClosed": "TypedDict může používat buď \"closed\", nebo \"extra_items\", ale ne obojí.", + "typedDictFieldNotRequiredRedefinition": "Položka TypedDict {name} nejde předefinovat jako NotRequired.", + "typedDictFieldReadOnlyRedefinition": "Položka TypedDict {name} nejde předefinovat jako ReadOnly.", + "typedDictFieldRequiredRedefinition": "Položka TypedDict {name} nejde předefinovat jako Required.", + "typedDictFirstArg": "Jako první argument byl očekáván název třídy TypedDict", + "typedDictInClassPattern": "Třída TypedDict není ve vzoru třídy povolena.", + "typedDictInitsubclassParameter": "TypedDict nepodporuje parametr __init_subclass__ „{name}“.", + "typedDictNotAllowed": "„TypedDict“ se v tomto kontextu nedá použít.", + "typedDictSecondArgDict": "Jako druhý parametr se očekával parametr dict nebo keyword.", + "typedDictSecondArgDictEntry": "Očekávaná jednoduchá položka slovníku", + "typedDictSet": "Nelze přiřadit položku v TypedDict", + "unaccessedClass": "Třída „{name}“ není zpřístupněna", + "unaccessedFunction": "Funkce {name} není zpřístupněn", + "unaccessedImport": "Import {name} není zpřístupněn", + "unaccessedSymbol": "{name} není zpřístupněn", + "unaccessedVariable": "Proměnná {name} není zpřístupněná", + "unannotatedFunctionSkipped": "Analýza funkce „{name}“ se přeskočila, protože není označená", + "unaryOperationNotAllowed": "Ve výrazu typu není povolený unární operátor.", + "unexpectedAsyncToken": "Očekávalo se, že za async bude následovat def, with nebo for", + "unexpectedEof": "Neočekávaný EOF", + "unexpectedExprToken": "Neočekávaný token na konci výrazu", + "unexpectedIndent": "Neočekávané odsazení", + "unexpectedUnindent": "Neočekává se unindent", + "unhashableDictKey": "Klíč slovníku musí být hashovatelný", + "unhashableSetEntry": "Položka set musí být hashovatelná.", + "uninitializedAbstractVariables": "Proměnné definované v abstraktní základní třídě nejsou inicializovány ve třídě final {classType}.", + "uninitializedInstanceVariable": "Proměnná instance {name} není inicializována v těle třídy nebo v metodě __init__", + "unionForwardReferenceNotAllowed": "Syntaxi Union není možné použít s operandem řetězce; použijte uvozovky kolem celého výrazu.", + "unionSyntaxIllegal": "Alternativní syntaxe pro sjednocení vyžaduje Python 3.10 nebo novější", + "unionTypeArgCount": "Union vyžaduje dva nebo více argumentů typu.", + "unionUnpackedTuple": "Union nemůže obsahovat rozbalenou řazenou kolekci členů (tuple).", + "unionUnpackedTypeVarTuple": "Union nemůže obsahovat rozbalený typ TypeVarTuple.", + "unnecessaryCast": "Nepotřebné volání „cast“; typ už je „{type}“.", + "unnecessaryIsInstanceAlways": "Zbytečné volání isinstance; {testType} je vždy instancí třídy {classType}", + "unnecessaryIsInstanceNever": "Zbytečné volání isinstance; {testType} není nikdy instancí třídy {classType}", + "unnecessaryIsSubclassAlways": "Nepotřebné volání issubclass; „{testType}“ je vždy podtřídou třídy „{classType}“", + "unnecessaryIsSubclassNever": "Zbytečné volání issubclass; {testType} není nikdy podtřídou třídy {classType}", + "unnecessaryPyrightIgnore": "Nepotřebný komentář „# pyright: ignore“", + "unnecessaryPyrightIgnoreRule": "Nepotřebné pravidlo # pyright: ignore: {name}", + "unnecessaryTypeIgnore": "Nepotřebný komentář „# type: ignore“", + "unpackArgCount": "Po rozbalení „Unpack“ se očekával jeden argument typu", + "unpackExpectedTypeVarTuple": "Jako argument typu pro Unpack byl očekáván typ TypeVarTuple nebo tuple.", + "unpackExpectedTypedDict": "Byl očekáván argument typu TypedDict pro rozbalení Unpack", + "unpackIllegalInComprehension": "Operace rozbalení není v porozumění povolená", + "unpackInAnnotation": "Ve výrazu typu není povolený operátor rozbalení.", + "unpackInDict": "Operace rozbalení není ve slovnících povolena", + "unpackInSet": "Operátor rozbalení není v sadě (set) povolený.", + "unpackNotAllowed": "Unpack se v tomto kontextu nepovoluje.", + "unpackOperatorNotAllowed": "Operace rozbalení není v tomto kontextu povolená", + "unpackTuplesIllegal": "Operace rozbalení není povolená v řazených kolekcích členů před Pythonem 3.8", + "unpackedArgInTypeArgument": "V tomto kontextu nelze použít rozbalené argumenty.", + "unpackedArgWithVariadicParam": "Pro parametr TypeVarTuple nejde použít rozbalený argument", + "unpackedDictArgumentNotMapping": "Výraz argumentu za ** musí být mapování s typem klíče str", + "unpackedDictSubscriptIllegal": "Operátor rozbalení slovníku v dolním indexu není povolený", + "unpackedSubscriptIllegal": "Operátor rozbalení v dolním indexu vyžaduje Python 3.11 nebo novější", + "unpackedTypeVarTupleExpected": "Byl očekáván rozbalený typ TypeVarTuple; použijte Unpack[{name1}] nebo *{name2}", + "unpackedTypedDictArgument": "Nepovedlo se spárovat nebalený argument TypedDict s parametry", + "unreachableCodeCondition": "Kód není analyzován, protože podmínka je staticky vyhodnocena jako False", + "unreachableCodeStructure": "Kód je strukturálně nedostupný", + "unreachableCodeType": "Analýza typů indikuje, že kód není dostupný.", + "unreachableExcept": "Klauzule Except je nedosažitelná, protože výjimka je již zpracována", + "unsupportedDunderAllOperation": "Operace s __all__ se nepodporuje, takže exportovaný seznam symbolů nemusí být správný", + "unusedCallResult": "Výsledek výrazu volání je typu „{type}“ a nepoužívá se. přiřadit proměnné „_“, pokud je to záměrné", + "unusedCoroutine": "Výsledek volání asynchronní funkce se nepoužívá; použijte operátor await nebo přiřaďte výsledek proměnné", + "unusedExpression": "Hodnota výrazu se nepoužívá", + "varAnnotationIllegal": "Poznámky type pro proměnné vyžadují Python 3.6 nebo novější; pro kompatibilitu s předchozími verzemi použijte komentáře type.", + "variableFinalOverride": "Proměnná {name} je označená jako Final a přepíše proměnnou non-Final se stejným názvem ve třídě {className}.", + "variadicTypeArgsTooMany": "Seznam argumentů typů může mít maximálně jeden rozbalený typ TypeVarTuple nebo tuple.", + "variadicTypeParamTooManyAlias": "Alias typu může mít maximálně jeden parametr typu TypeVarTuple, ale přijal několik ({names})", + "variadicTypeParamTooManyClass": "Obecná třída může mít maximálně jeden parametr typu TypeVarTuple, ale přijala více ({names})", + "walrusIllegal": "Operátor := vyžaduje Python 3.8 nebo novější", + "walrusNotAllowed": "Operátor := není v tomto kontextu povolen bez okolních závorek", + "wildcardInFunction": "V rámci třídy nebo funkce není povolen import se zástupnými znaky.", + "wildcardLibraryImport": "Není povolený import se zástupnými znaky z knihovny.", + "wildcardPatternTypePartiallyUnknown": "Typ zachycený vzorem se zástupnými znaky je částečně neznámý", + "wildcardPatternTypeUnknown": "Typ zachycený vzorem se zástupnými znaky je neznámý", + "yieldFromIllegal": "Použití příkazu yield from vyžaduje Python 3.3 nebo novější", + "yieldFromOutsideAsync": "yield from není v asynchronní funkci povoleno", + "yieldOutsideFunction": "„yield“ není povoleno mimo funkci nebo lambdu", + "yieldWithinComprehension": "„yield“ není povolené uvnitř porozumění", + "zeroCaseStatementsFound": "Výraz shody (match) obsahovat alespoň jeden výraz velikosti písmen (case).", + "zeroLengthTupleNotAllowed": "Řazená kolekce členů (tuple) s nulovou délkou není v tomto kontextu povolená." + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "Speciální formulář Annotated nejde použít s kontrolami instancí a tříd.", + "argParam": "Argument odpovídá parametru {paramName}", + "argParamFunction": "Argument odpovídá parametru {paramName} ve funkci {functionName}", + "argsParamMissing": "Parametr „*{paramName}“ nemá žádný odpovídající parametr", + "argsPositionOnly": "Neshoda parametrů pouze s pozicí; Očekával(o/y) se {expected}, ale přijal(o/y) se {received}", + "argumentType": "Typ argumentu je {type}", + "argumentTypes": "Typy argumentů: ({types})", + "assignToNone": "Typ se nedá přiřadit k None.", + "asyncHelp": "Měli jste na mysli „async with“?", + "baseClassIncompatible": "Základní třída {baseClass} není kompatibilní s typem {type}", + "baseClassIncompatibleSubclass": "Základní třída {baseClass} je odvozená od třídy {subclass}, která není kompatibilní s typem {type}", + "baseClassOverriddenType": "Základní třída {baseClass} poskytuje typ {type}, který je přepsán", + "baseClassOverridesType": "Základní třída „{baseClass}“ přepisuje typ „{type}“", + "bytesTypePromotions": "Pokud chcete povolit chování povýšení typu pro „bytearray“ a „memoryview“, nastavte disableBytesTypePromotions na false", + "conditionalRequiresBool": "Metoda __bool__ pro typ {operandType} vrací typ {boolReturnType} místo bool", + "dataClassFieldLocation": "Deklarace pole", + "dataClassFrozen": "{name} je zablokované", + "dataProtocolUnsupported": "„{name}“ je datový protokol.", + "descriptorAccessBindingFailed": "Nepovedlo se vytvořit vazbu metody {name} pro třídu popisovače {className}.", + "descriptorAccessCallFailed": "Nepovedlo se volat metodu {name} pro třídu popisovače {className}.", + "finalMethod": "Metoda Final", + "functionParamDefaultMissing": "V parametru „{name}“ chybí výchozí argument", + "functionParamName": "Neshoda názvu parametru: {destName} a {srcName}", + "functionParamPositionOnly": "Neshoda parametrů pouze s pozicí; Parametr „{name}“ není jen pro pozici.", + "functionReturnTypeMismatch": "Návratový typ funkce „{sourceType}“ není kompatibilní s typem „{destType}“", + "functionTooFewParams": "Funkce přijímá příliš málo pozičních parametrů; očekávaný počet: {expected}, počet, který byl přijat: {received}", + "functionTooManyParams": "Funkce přijímá příliš mnoho pozičních parametrů; očekávaný počet: {expected}, počet, který byl přijat: {received}", + "genericClassNotAllowed": "Obecný typ s argumenty obecného typu se pro kontroly instancí nebo tříd nepovoluje.", + "incompatibleDeleter": "Metoda deleter vlastnosti (property) je nekompatibilní.", + "incompatibleGetter": "Metoda getter vlastnosti (property) je nekompatibilní.", + "incompatibleSetter": "Metoda setter vlastnosti (property) je nekompatibilní.", + "initMethodLocation": "Metoda __init__ je definována ve třídě {type}", + "initMethodSignature": "Podpis __init__ je {type}", + "initSubclassLocation": "Metoda __init_subclass__ je definována ve třídě {name}.", + "invariantSuggestionDict": "Zvažte přepnutí z možnosti „dict“ na možnost „Mapping“, která je v typu hodnoty kovariantní.", + "invariantSuggestionList": "Zvažte přepnutí z možnosti „list“ na možnost „Sequence“, která je kovariantní.", + "invariantSuggestionSet": "Zvažte přepnutí z možnosti „set“ na možnost „Container“, která je kovariantní.", + "isinstanceClassNotSupported": "{type} se pro kontroly instancí a tříd nepodporuje.", + "keyNotRequired": "„{name}! není v typu „{type}“ povinný klíč, takže přístup může vést k výjimce modulu runtime", + "keyReadOnly": "{name} je klíč jen pro čtení v {type}", + "keyRequiredDeleted": "{name} je povinný klíč a nedá se odstranit", + "keyUndefined": "{name} není definovaný klíč v typu {type}", + "kwargsParamMissing": "Parametr „**{paramName}“ nemá žádný odpovídající parametr", + "listAssignmentMismatch": "Typ {type} není kompatibilní s cílovým seznamem", + "literalAssignmentMismatch": "{sourceType} se nedá přiřadit k typu {destType}.", + "literalNotAllowed": "Speciální formulář Literal nejde použít s kontrolami instancí a tříd.", + "matchIsNotExhaustiveHint": "Pokud není zamýšleno vyčerpávající zpracování, přidejte case _: pass", + "matchIsNotExhaustiveType": "Nezpracovaný typ: {type}", + "memberAssignment": "Výraz typu {type} nelze přiřadit k atributu {name} třídy {classType}.", + "memberIsAbstract": "„{type}.{name}“ není implementováno.", + "memberIsAbstractMore": "a tento počet dalších: {count}...", + "memberIsClassVarInProtocol": "„{name}“ je v protokolu definován jako ClassVar.", + "memberIsInitVar": "{name} je pole init-only.", + "memberIsInvariant": "{name} je invariantní, protože je proměnlivé", + "memberIsNotClassVarInClass": "„{name}“ musí být definováno jako ClassVar, aby bylo kompatibilní s protokolem.", + "memberIsNotClassVarInProtocol": "„{name}“ není v protokolu definován jako ClassVar.", + "memberIsNotReadOnlyInProtocol": "{name} není v protokolu jen pro čtení.", + "memberIsReadOnlyInProtocol": "{name} je v protokolu jen pro čtení.", + "memberIsWritableInProtocol": "{name} se dá zapisovat do protokolu.", + "memberSetClassVar": "Atribut {name} nelze přiřadit prostřednictvím instance třídy, protože jde o ClassVar.", + "memberTypeMismatch": "{name} je nekompatibilní typ", + "memberUnknown": "Atribut {name} je neznámý.", + "metaclassConflict": "Metatřída {metaclass1} je v konfliktu s metatřídou {metaclass2}.", + "missingDeleter": "Chybí metoda deleter vlastnosti (property).", + "missingGetter": "Chybí metoda getter vlastnosti (property).", + "missingSetter": "Chybí metoda setter vlastnosti (property).", + "namedParamMissingInDest": "Další parametr „{name}“", + "namedParamMissingInSource": "Chybí parametr klíčového slova „{name}“.", + "namedParamTypeMismatch": "Parametr klíčového slova {name} typu {sourceType} není kompatibilní s typem {destType}.", + "namedTupleNotAllowed": "NamedTuple se nedá použít pro kontroly instancí nebo tříd.", + "newMethodLocation": "Metoda __new__ je definována ve třídě {type}", + "newMethodSignature": "Podpis __new__ je {type}", + "newTypeClassNotAllowed": "Typ vytvořený pomocí NewType nelze použít s kontrolami instancí a tříd", + "noOverloadAssignable": "Typ {type} neodpovídá žádné přetížené funkci", + "noneNotAllowed": "Možnost None se nedá použít pro kontroly instancí nebo tříd.", + "orPatternMissingName": "Chybějící názvy: {name}", + "overloadIndex": "Přetížení {index} je nejbližší shoda.", + "overloadNotAssignable": "Nejméně jedno přetížení {name} není možné přiřadit", + "overloadSignature": "Tady je definován podpis přetížení", + "overriddenMethod": "Přepsaná metoda", + "overriddenSymbol": "Přepsaný symbol", + "overrideInvariantMismatch": "Typ přepsání „{overrideType}“není stejný jako základní typ „{baseType}“", + "overrideIsInvariant": "Proměnná je proměnlivá, takže její typ je invariantní", + "overrideNoOverloadMatches": "Signatura přetížení v přepsání není kompatibilní se základní metodou", + "overrideNotClassMethod": "Základní metoda je deklarována jako metoda classmethod, ale přepsání není", + "overrideNotInstanceMethod": "Základní metoda je deklarována jako instanční metoda, ale přepsání není", + "overrideNotStaticMethod": "Základní metoda je deklarována jako staticmethod, ale přepsání není", + "overrideOverloadNoMatch": "Přepsání nezahrnuje všechna přetížení metody základní třídy", + "overrideOverloadOrder": "Přetížení přepisované metody musí být ve stejném pořadí jako základní metoda", + "overrideParamKeywordNoDefault": "Neshoda parametru klíčového slova „{name}“: základní parametr má výchozí hodnotu argumentu, parametr pro přepsání ne", + "overrideParamKeywordType": "Neshoda typu parametru klíčového slova „{name}“: základní parametr je typu „{baseType}“, parametr přepsání je typu „{overrideType}“", + "overrideParamName": "Neshoda názvu parametru {index}: základní parametr má název „{baseName}“, parametr přepsání má název „{overrideName}“", + "overrideParamNameExtra": "V základu chybí parametr {name}", + "overrideParamNameMissing": "V přepsání chybí parametr „{name}“", + "overrideParamNamePositionOnly": "Neshoda parametru {index}: základní parametr {baseName} je parametr klíčového slova, přepisující parametr je pouze pozice", + "overrideParamNoDefault": "Neshoda parametru {index}: Základní parametr má výchozí hodnotu argumentu, parametr přepsání se neshoduje.", + "overrideParamType": "Neshoda typu parametru {index}: Základní parametr je typu „{baseType}“, parametr přepsání je typu „{overrideType}“.", + "overridePositionalParamCount": "Neshoda počtu pozičních parametrů; Základní metoda má „{baseCount}“, ale přepsání má {overrideCount}", + "overrideReturnType": "Neshoda návratového typu: Základní metoda vrací typ {baseType}, přepsání vrací typ {overrideType}", + "overrideType": "Základní třída definuje typ jako {type}", + "paramAssignment": "Parametr {index}: typ {sourceType} není kompatibilní s typem {destType}.", + "paramSpecMissingInOverride": "V metodě přepsání chybí parametry ParamSpec", + "paramType": "Typ parametru je {paramType}", + "privateImportFromPyTypedSource": "Místo toho importovat z modulu {module}", + "propertyAccessFromProtocolClass": "Vlastnost definovaná v rámci třídy protokolu není přístupná jako proměnná třídy", + "propertyMethodIncompatible": "Metoda vlastnosti (property) {name} není kompatibilní.", + "propertyMethodMissing": "V přepsání (override) chybí metoda vlastnosti (property) „{name}“.", + "propertyMissingDeleter": "Vlastnost (property) {name} nemá definovanou metodu deleter.", + "propertyMissingSetter": "Vlastnost (property) {name} nemá definovanou metodu setter.", + "protocolIncompatible": "{sourceType} není kompatibilní s protokolem {destType}", + "protocolMemberMissing": "{name} není k dispozici", + "protocolRequiresRuntimeCheckable": "Třída Protocol musí být @runtime_checkable, aby se použila při kontrolách instancí a tříd.", + "protocolSourceIsNotConcrete": "„{sourceType}“ není konkrétní typ třídy a nedá se přiřadit k typu „{destType}“", + "protocolUnsafeOverlap": "Atributy „{name}“ mají stejné názvy jako protokol.", + "pyrightCommentIgnoreTip": "Pokud chcete potlačit diagnostiku pro jeden řádek, použijte # pyright: ignore[]", + "readOnlyAttribute": "Atribut {name} je jen pro čtení", + "seeClassDeclaration": "Viz deklarace třídy", + "seeDeclaration": "Viz deklarace", + "seeFunctionDeclaration": "Viz deklarace funkce", + "seeMethodDeclaration": "Viz deklarace metody", + "seeParameterDeclaration": "Viz deklarace parametru", + "seeTypeAliasDeclaration": "Zobrazit deklaraci aliasu typu", + "seeVariableDeclaration": "Zobrazit deklaraci proměnné", + "tupleAssignmentMismatch": "Typ „{type}“ není kompatibilní s cílovou řazenou kolekcí členů (tuple).", + "tupleEntryTypeMismatch": "Položka řazené kolekce členů (tuple) {entry} je nesprávného typu.", + "tupleSizeIndeterminateSrc": "Neshoda velikosti řazené kolekce členů (tuple); očekávalo se {expected}, ale přijalo se neurčité.", + "tupleSizeIndeterminateSrcDest": "Neshoda velikosti řazené kolekce členů (tuple); očekávalo se min. {expected}, ale přijalo se neurčité.", + "tupleSizeMismatch": "Neshoda velikosti řazené kolekce členů (tuple); očekávalo se {expected}, ale přijalo se {received}.", + "tupleSizeMismatchIndeterminateDest": "Neshoda velikosti řazené kolekce členů (tuple); Očekávalo se {expected}, ale přijalo se {received}.", + "typeAliasInstanceCheck": "Alias typu vytvořený pomocí příkazu „type“ se nedá použít s kontrolami instancí a tříd.", + "typeAssignmentMismatch": "Typ {sourceType} se nedá přiřadit k typu {destType}.", + "typeBound": "Typ {sourceType} se nedá přiřadit k horní hranici {destType} pro proměnnou typu {name}.", + "typeConstrainedTypeVar": "Typ {type} se nedá přiřadit k proměnné omezeného typu {name}.", + "typeIncompatible": "{sourceType} se nedá přiřadit k {destType}.", + "typeNotClass": "{type} není třída", + "typeNotStringLiteral": "„{type}“ není řetězcový literál", + "typeOfSymbol": "Typ „{name}“ je „{type}“", + "typeParamSpec": "Typ {type} není kompatibilní s parametrem ParamSpec {name}", + "typeUnsupported": "Typ {type} není podporován", + "typeVarDefaultOutOfScope": "Proměnná typu {name} není v oboru", + "typeVarIsContravariant": "Parametr typu „{name}“ je kontravariantní, ale „{sourceType}“ není nadtyp „{destType}“.", + "typeVarIsCovariant": "Parametr typu „{name}“ je kovariantní, ale „{sourceType}“ není podtyp „{destType}“.", + "typeVarIsInvariant": "Parametr typu „{name}“ je invariantní, ale „{sourceType}“ není stejný jako „{destType}“.", + "typeVarNotAllowed": "TypeVar se pro kontroly instancí nebo tříd nepovoluje.", + "typeVarTupleRequiresKnownLength": "Typ TypeVarTuple nemůže být vázaný na řazenou kolekci členů (tuple) neznámé délky.", + "typeVarUnnecessarySuggestion": "Místo toho použijte {type}.", + "typeVarUnsolvableRemedy": "Zadejte přetížení, které určuje návratový typ, pokud argument není zadán", + "typeVarsMissing": "Chybějící proměnné typu: {names}", + "typedDictBaseClass": "Třída „{type}“ není TypedDict.", + "typedDictClassNotAllowed": "Třída TypedDict není povolená pro kontroly instancí nebo tříd.", + "typedDictClosedExtraNotAllowed": "Do {name} nejde přidat položku.", + "typedDictClosedExtraTypeMismatch": "Nelze přidat položku {name} s typem {type}.", + "typedDictClosedFieldNotReadOnly": "Nelze přidat položku {name}, protože musí být ReadOnly", + "typedDictClosedFieldNotRequired": "Nelze přidat položku {name}, protože musí být NotRequired.", + "typedDictExtraFieldNotAllowed": "{name} není k dispozici v {type}", + "typedDictExtraFieldTypeMismatch": "Typ {name} není kompatibilní s typem „extra_items“ v typu {type}.", + "typedDictFieldMissing": "V {type} chybí {name}", + "typedDictFieldNotReadOnly": "{name} není v {type} jen pro čtení", + "typedDictFieldNotRequired": "{name} se v typu {type} nevyžaduje", + "typedDictFieldRequired": "{name} se vyžaduje v {type}", + "typedDictFieldTypeMismatch": "Typ {type} se nedá přiřadit k položce {name}", + "typedDictFieldUndefined": "{name} je nedefinovaná položka v typu {type}", + "typedDictKeyAccess": "Použít [\"{name}\"] k odkazování na položku v TypedDict", + "typedDictNotAllowed": "TypedDict se nedá použít pro kontroly instancí nebo tříd.", + "unhashableType": "Typ „{type}“ nejde zatřiďovat", + "uninitializedAbstractVariable": "Proměnná instance {name} je definovaná v abstraktní základní třídě {classType}, ale neinicializovala se", + "unreachableExcept": "{exceptionType} je podtřídou {parentType}", + "useDictInstead": "Označte typ slovníku pomocí dict[T1, T2]", + "useListInstead": "Použijte list[T] k označení typu seznamu (list) nebo T1 | T2 k označení typu sjednocení (union).", + "useTupleInstead": "Použijte tuple[T1, ..., Tn] k označení typu řazené kolekce členů (tuple) nebo T1 | T2 k označení typu sjednocení (union).", + "useTypeInstead": "Místo toho použít type[T]", + "varianceMismatchForClass": "Odchylka argumentu typu „{typeVarName}“ není kompatibilní se základní třídou „{className}“", + "varianceMismatchForTypeAlias": "Rozptyl argumentu typu „{typeVarName}“ není kompatibilní s typem „{typeAliasParam}“" + }, + "Service": { + "longOperation": "Výčet zdrojových souborů pracovního prostoru trvá dlouho. Zvažte raději otevření podsložky. [Další informace](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.de.json b/python-parser/packages/pyright-internal/src/localization/package.nls.de.json new file mode 100644 index 00000000..b8bed3ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.de.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Type Stub erstellen", + "createTypeStubFor": "Type Stub für \"{moduleName}\" erstellen", + "executingCommand": "Der Befehl wird ausgeführt.", + "filesToAnalyzeCount": "{count} Dateien zu analysieren", + "filesToAnalyzeOne": "1 zu analysierende Datei", + "findingReferences": "Verweise werden gesucht", + "organizeImports": "Import-Direktiven organisieren" + }, + "Completion": { + "autoImportDetail": "Autoimport", + "indexValueDetail": "Indexwert" + }, + "Diagnostic": { + "abstractMethodInvocation": "Die Methode „{method}“ kann nicht aufgerufen werden, da sie abstrakt und nicht implementiert ist.", + "annotatedMetadataInconsistent": "Der mit Anmerkungen versehene Metadatentyp „{metadataType}“ ist nicht mit dem Typ „{type}“ kompatibel.", + "annotatedParamCountMismatch": "Nicht übereinstimmende Parameteranmerkungsanzahl: {expected} erwartet, aber {received} empfangen", + "annotatedTypeArgMissing": "Es wurde ein Typargument und mindestens eine Anmerkung für \"Annotated\" erwartet.", + "annotationBytesString": "In Typausdrücken dürfen keine Bytes-Zeichenfolgenliterale verwendet werden", + "annotationFormatString": "Typausdrücke dürfen keine Formatzeichenfolgenliterale (f-strings) enthalten", + "annotationNotSupported": "Typanmerkung wird für diese Anweisung nicht unterstützt.", + "annotationRawString": "Typausdrücke dürfen keine unformatierten Zeichenfolgenliterale enthalten", + "annotationSpansStrings": "Typausdrücke dürfen nicht mehrere Zeichenfolgenliterale umfassen.", + "annotationStringEscape": "Typausdrücke dürfen keine Escapezeichen enthalten", + "annotationTemplateString": "Typausdrücke dürfen keine Vorlagenzeichenfolgenliterale (t-strings) verwenden.", + "argAssignment": "Ein Argument vom Typ \"{argType}\" kann dem Parameter vom Typ \"{paramType}\" nicht zugewiesen werden.", + "argAssignmentFunction": "Ein Argument vom Typ \"{argType}\" kann dem Parameter vom Typ \"{paramType}\" in der Funktion \"{functionName}\" nicht zugewiesen werden.", + "argAssignmentParam": "Ein Argument vom Typ \"{argType}\" kann dem Parameter \"{paramName}\" vom Typ \"{paramType}\" nicht zugewiesen werden.", + "argAssignmentParamFunction": "Ein Argument vom Typ \"{argType}\" kann dem Parameter \"{paramName}\" vom Typ \"{paramType}\" in der Funktion \"{functionName}\" nicht zugewiesen werden.", + "argMissingForParam": "Für Parameter {name} fehlt ein Argument.", + "argMissingForParams": "Fehlende Argumente für die Parameter {names}", + "argMorePositionalExpectedCount": "Es wurden {expected} weitere Positionsargumente erwartet.", + "argMorePositionalExpectedOne": "Es wurde 1 weiteres Positionsargument erwartet.", + "argPositional": "Positionsargument erwartet", + "argPositionalExpectedCount": "Es wurden {expected} Positionsargumente erwartet.", + "argPositionalExpectedOne": "Es wurde 1 Positionsargument erwartet.", + "argTypePartiallyUnknown": "Der Argumenttyp ist teilweise unbekannt", + "argTypeUnknown": "Argumenttyp ist unbekannt", + "assertAlwaysTrue": "Assertausdruck wird immer als „true“ ausgewertet.", + "assertTypeArgs": "\"assert_type\" erwartet zwei Positionsargumente.", + "assertTypeTypeMismatch": "\"assert_type\" Konflikt: \"{expected}\" erwartet, aber \"{received}\" empfangen", + "assignmentExprComprehension": "Ziel des Zuweisungsausdrucks \"{name}\" kann nicht denselben Namen wie das Verständnis für das Ziel verwenden.", + "assignmentExprContext": "Der Zuweisungsausdruck muss sich innerhalb des Moduls, der Funktion oder der Lambdafunktion befinden.", + "assignmentExprInSubscript": "Zuweisungsausdrücke innerhalb eines Tiefgestellten werden nur in Python 3.10 und höher unterstützt.", + "assignmentInProtocol": "Instanzen- oder Klassenvariablen innerhalb einer Protocol Klasse müssen explizit innerhalb des Klassentexts deklariert werden.", + "assignmentTargetExpr": "Der Ausdruck kann kein Zuweisungsziel sein.", + "asyncNotInAsyncFunction": "Die Verwendung von \"async\" ist außerhalb einer asynchronen Funktion nicht zulässig.", + "awaitIllegal": "Die Verwendung von \"await\" erfordert Python 3.5 oder höher.", + "awaitNotAllowed": "In Typausdrücken darf „await“ nicht verwendet werden", + "awaitNotInAsync": "\"await\" ist nur innerhalb einer asynchronen Funktion zulässig.", + "backticksIllegal": "Ausdrücke, die von Backticks umgeben sind, werden in Python 3.x nicht unterstützt; verwenden Sie stattdessen repr", + "baseClassCircular": "Die Klasse kann nicht von sich selbst abgeleitet werden.", + "baseClassFinal": "Die Basisklasse \"{type}\" ist als final gekennzeichnet und kann nicht als Unterklasse verwendet werden.", + "baseClassIncompatible": "Basisklassen von {type} sind gegenseitig inkompatibel.", + "baseClassInvalid": "Das Argument für die Klasse muss eine Basisklasse sein.", + "baseClassMethodTypeIncompatible": "Basisklassen für die Klasse \"{classType}\" definieren die Methode \"{name}\" auf inkompatible Weise.", + "baseClassUnknown": "Der Basisklassentyp ist unbekannt, sodass der Typ der abgeleiteten Klasse verdeckt wird.", + "baseClassVariableTypeIncompatible": "Basisklassen für die Klasse \"{classType}\" definieren die Variable \"{name}\" auf inkompatible Weise.", + "binaryOperationNotAllowed": "Der binärer Operator ist im Typausdruck nicht zulässig", + "bindParamMissing": "Die Methode „{methodName}“ konnte nicht gebunden werden, weil der Parameter „self“ oder „cls“ fehlt.", + "bindTypeMismatch": "Die Methode \"{methodName}\" konnte nicht gebunden werden, da \"{type}\" dem Parameter \"{paramName}\" nicht zugewiesen werden kann.", + "breakInExceptionGroup": "„break“ ist in einem „except*“ Block nicht zulässig.", + "breakOutsideLoop": "\"break\" kann nur innerhalb einer Schleife verwendet werden.", + "bytesUnsupportedEscape": "Nicht unterstützte Escapesequenz in bytes-Literal", + "callableExtraArgs": "Es wurden nur zwei Typargumente für \"Callable\" erwartet.", + "callableFirstArg": "Parametertypliste oder \"...\" erwartet.", + "callableNotInstantiable": "Der Typ \"{type}\" kann nicht instanziiert werden.", + "callableSecondArg": "Rückgabetyp als zweites Typargument für \"Callable\" erwartet", + "casePatternIsIrrefutable": "Ein unwiderlegbares Muster ist nur für die letzte case-Anweisung zulässig.", + "classAlreadySpecialized": "Der Typ \"{type}\" ist bereits spezialisiert.", + "classDecoratorTypeUnknown": "Der nicht typisierte Klassendekorator verdeckt den Typ der Klasse; Decorator wird ignoriert.", + "classDefinitionCycle": "Die Klassendefinition für \"{name}\" hängt von sich selbst ab.", + "classGetItemClsParam": "__class_getitem__ Außerkraftsetzung sollte einen \"cls\"-Parameter annehmen.", + "classMethodClsParam": "Klassenmethoden sollten einen \"cls\"-Parameter verwenden.", + "classNotRuntimeSubscriptable": "Tiefgestellte Zeichen für die Klasse „{name}“ generieren eine Laufzeitausnahme; schließen Sie den Typausdruck in Anführungszeichen ein", + "classPatternBuiltInArgPositional": "Das Klassenmuster akzeptiert nur positionsbezogenes Untermuster.", + "classPatternNewType": "„{type}“ kann in einem Klassenmuster nicht verwendet werden, da es mit NewType definiert ist.", + "classPatternPositionalArgCount": "Zu viele Positionsmuster für Klasse \"{type}\". Erwartet: {expected}, empfangen: {received}.", + "classPatternTypeAlias": "\"{type}\" kann nicht in einem Klassenmuster verwendet werden, da es sich um einen spezialisierten Typalias handelt.", + "classPropertyDeprecated": "Klasseneigenschaften sind in Python 3.11 veraltet und werden in Python 3.13 nicht unterstützt.", + "classTypeParametersIllegal": "Die Syntax des Klassentypparameters erfordert Python 3.12 oder höher.", + "classVarFirstArgMissing": "Nach \"ClassVar\" wurde ein Typargument erwartet.", + "classVarNotAllowed": "\"ClassVar\" ist in diesem Kontext nicht zulässig.", + "classVarOverridesInstanceVar": "Die Klassenvariable \"{name}\" überschreibt die Instanzvariable desselben Namens in der Klasse \"{className}\"", + "classVarTooManyArgs": "Nach \"ClassVar\" wird ein Typargument erwartet.", + "classVarWithTypeVar": "Der Typ \"ClassVar\" darf keine Typvariablen enthalten.", + "clsSelfParamTypeMismatch": "Der Typ des Parameters \"{name}\" muss ein Obertyp seiner Klasse \"{classType}\" sein", + "codeTooComplexToAnalyze": "Der Code ist zu komplex für die Analyse; verringern Sie die Komplexität durch Refactoring in Unterroutinen oder durch Reduzieren bedingter Codepfade", + "collectionAliasInstantiation": "Der Typ \"{type}\" kann nicht instanziiert werden, verwenden Sie stattdessen \"{alias}\".", + "comparisonAlwaysFalse": "Die Bedingung wird immer als False ausgewertet, da die Typen \"{leftType}\" und \"{rightType}\" keine Überlappung aufweisen.", + "comparisonAlwaysTrue": "Die Bedingung wird immer als True ausgewertet, da die Typen \"{leftType}\" und \"{rightType}\" keine Überlappung aufweisen.", + "comprehensionInDict": "Verständnis kann nicht mit anderen Wörterbucheinträgen verwendet werden.", + "comprehensionInSet": "Verständnis kann nicht mit anderen „set“ Einträgen verwendet werden.", + "concatenateContext": "„Concatenate“ ist in diesem Kontext nicht zulässig.", + "concatenateParamSpecMissing": "Das letzte Typargument für \"Concatenate\" muss ein ParamSpec oder \"...\" sein.", + "concatenateTypeArgsMissing": "„Concatenate„ erfordert mindestens zwei Typargumente.", + "conditionalOperandInvalid": "Ungültiger bedingter Operand vom Typ \"{type}\"", + "constantRedefinition": "\"{name}\" ist konstant (da es sich um Großbuchstaben handelt) und kann nicht neu definiert werden.", + "constructorParametersMismatch": "Keine Übereinstimmung zwischen der Signatur von __new__ und __init__ in der Klasse \"{classType}\"", + "containmentAlwaysFalse": "Der Ausdruck wird immer als False ausgewertet, da die Typen \"{leftType}\" und \"{rightType}\" keine Überlappung aufweisen.", + "containmentAlwaysTrue": "Der Ausdruck wird immer als True ausgewertet, da die Typen \"{leftType}\" und \"{rightType}\" keine Überlappung aufweisen.", + "continueInExceptionGroup": "„continue“ ist in einem „except*“ Block nicht zulässig.", + "continueOutsideLoop": "\"continue\" kann nur innerhalb einer Schleife verwendet werden.", + "coroutineInConditionalExpression": "Bedingter Ausdruck verweist auf eine Coroutine, die immer zu \"True\" ausgewertet wird.", + "dataClassBaseClassFrozen": "Eine nicht fixierte Klasse kann nicht von einer fixierten Klasse erben.", + "dataClassBaseClassNotFrozen": "Eine fixierte Klasse kann nicht von einer nicht fixierten Klasse erben.", + "dataClassConverterFunction": "Das Argument vom Typ \"{argType}\" ist kein gültiger Konverter für das Feld \"{fieldName}\" vom Typ \"{fieldType}\"", + "dataClassConverterOverloads": "Keine Überladungen von \"{funcName}\" sind gültige Konverter für das Feld \"{fieldName}\" vom Typ \"{fieldType}\"", + "dataClassFieldInheritedDefault": "„{fieldName}“ überschreibt ein Feld mit demselben Namen, aber es fehlt ein Standardwert", + "dataClassFieldWithDefault": "Felder ohne Standardwerte dürfen nicht nach Feldern mit Standardwerten angezeigt werden.", + "dataClassFieldWithPrivateName": "Das Feld \"Dataclass\" kann keinen privaten Namen verwenden.", + "dataClassFieldWithoutAnnotation": "Datenklassenfeld ohne Typanmerkung verursacht eine Laufzeitausnahme", + "dataClassPostInitParamCount": "Dataclass __post_init__ falsche Parameteranzahl; Anzahl von InitVar-Feldern ist {expected}", + "dataClassPostInitType": "Dataclass __post_init__ Methodenparametertypkonflikt für Feld \"{fieldName}\"", + "dataClassSlotsOverwrite": "__slots__ ist bereits in der Klasse definiert.", + "dataClassTransformExpectedBoolLiteral": "Es wurde ein Ausdruck erwartet, der statisch als True oder False ausgewertet wird.", + "dataClassTransformFieldSpecifier": "Es wurde tuple von Klassen oder Funktionen erwartet, es wurde jedoch der Typ \"{type}\" empfangen", + "dataClassTransformPositionalParam": "Alle Argumente für \"dataclass_transform\" müssen Schlüsselwortargumente sein.", + "dataClassTransformUnknownArgument": "Argument \"{name}\" wird von dataclass_transform nicht unterstützt.", + "dataProtocolInSubclassCheck": "Datenprotokolle (die Nicht-Methodenattribute enthalten) sind in „issubclass“-Aufrufen nicht zulässig.", + "declaredReturnTypePartiallyUnknown": "Der deklarierte Rückgabetyp \"{returnType}\" ist teilweise unbekannt.", + "declaredReturnTypeUnknown": "Unbekannter Rückgabetyp deklariert", + "defaultValueContainsCall": "Funktionsaufrufe und änderbare Objekte sind innerhalb des Parameterstandardwertausdrucks nicht zulässig.", + "defaultValueNotAllowed": "Ein Parameter mit \"*\" oder \"**\" darf keinen Standardwert aufweisen.", + "delTargetExpr": "Der Ausdruck kann nicht gelöscht werden", + "deprecatedClass": "Die Klasse \"{name}\" ist veraltet.", + "deprecatedConstructor": "Der Konstruktor für die Klasse \"{name}\" ist veraltet.", + "deprecatedDescriptorDeleter": "Die Methode \"__delete__\" für den Deskriptor \"{name}\" ist veraltet.", + "deprecatedDescriptorGetter": "Die Methode \"__get__\" für den Deskriptor \"{name}\" ist veraltet.", + "deprecatedDescriptorSetter": "Die Methode \"__set__\" für den Deskriptor \"{name}\" ist veraltet.", + "deprecatedFunction": "Die Funktion \"{name}\" ist veraltet.", + "deprecatedMethod": "Die Methode \"{name}\" in der Klasse \"{className}\" ist veraltet.", + "deprecatedPropertyDeleter": "Der deleter für property \"{name}\" ist veraltet.", + "deprecatedPropertyGetter": "Der getter für property \"{name}\" ist veraltet.", + "deprecatedPropertySetter": "Der setter für property \"{name}\" ist veraltet.", + "deprecatedType": "Dieser Typ ist ab python-{version} veraltet; verwenden Sie stattdessen \"{replacement}\"", + "dictExpandIllegalInComprehension": "Wörterbucherweiterung ist im Verständnis nicht zulässig.", + "dictInAnnotation": "Der Wörterbuchausdruck ist im Typausdruck nicht zulässig", + "dictKeyValuePairs": "Wörterbucheinträge müssen Schlüssel-Wert-Paare enthalten.", + "dictUnpackIsNotMapping": "Es wird eine Zuordnung für den Operator zum Entpacken des Wörterbuchs erwartet.", + "dunderAllSymbolNotPresent": "\"{name}\" ist in __all__ angegeben, aber nicht im Modul vorhanden.", + "duplicateArgsParam": "Es ist nur ein \"*\"-Parameter zulässig.", + "duplicateBaseClass": "Doppelte Basisklasse nicht zulässig", + "duplicateCapturePatternTarget": "\"{name}\" des Erfassungsziels darf nicht mehrmals innerhalb desselben Musters vorkommen.", + "duplicateCatchAll": "Es ist nur eine catch-all except-Klausel zulässig.", + "duplicateEnumMember": "Der Enumerationsmember \"{name}\" wurde bereits deklariert.", + "duplicateGenericAndProtocolBase": "Es ist nur eine Generic[...]- oder ein Protocol[...]-Basisklasse zulässig.", + "duplicateImport": "\"{importName}\" wurde mehrmals importiert.", + "duplicateKeywordOnly": "Es ist nur ein \"*\"-Trennzeichen zulässig.", + "duplicateKwargsParam": "Es ist nur ein \"**\"-Parameter zulässig.", + "duplicateParam": "Doppelter Parameter \"{name}\"", + "duplicatePositionOnly": "Es ist nur ein \"/\"-Parameter zulässig.", + "duplicateStarPattern": "In einer Mustersequenz ist nur ein \"*\"-Muster zulässig.", + "duplicateStarStarPattern": "Es ist nur ein \"**\"-Eintrag zulässig.", + "duplicateUnpack": "In list ist nur ein Vorgang zum Entpacken zulässig.", + "ellipsisAfterUnpacked": "„...“ kann nicht mit einem entpackten „TypeVarTuple“ oder „tuple“ verwendet werden.", + "ellipsisContext": "\"...\" ist in diesem Kontext nicht zulässig.", + "ellipsisSecondArg": "\"...\" ist nur als zweites von zwei Argumenten zulässig.", + "enumClassOverride": "Die Enumerationsklasse \"{name}\" ist final und kann nicht in eine Unterklasse aufgenommen werden.", + "enumMemberDelete": "Das Enumerationselement \"{name}\" kann nicht gelöscht werden.", + "enumMemberSet": "Das Enumerationselement \"{name}\" kann nicht zugewiesen werden.", + "enumMemberTypeAnnotation": "Typanmerkungen sind für enum Member nicht zulässig", + "exceptGroupMismatch": "Die „try“-Anweisung darf nicht sowohl „except“ als auch „except*“ enthalten.", + "exceptGroupRequiresType": "Die Ausnahmegruppensyntax („except*“) erfordert einen Ausnahmetyp.", + "exceptRequiresParens": "Vor Python 3.14 müssen mehrere Ausnahmetypen in Klammern stehen.", + "exceptWithAsRequiresParens": "Bei Verwendung von „as“ müssen mehrere Ausnahmetypen in Klammern stehen.", + "exceptionGroupIncompatible": "Die Ausnahmegruppensyntax (\"except*\") erfordert Python 3.11 oder höher.", + "exceptionGroupTypeIncorrect": "Der Ausnahmetyp in except* kann nicht von BaseGroupException abgeleitet werden.", + "exceptionTypeIncorrect": "\"{type}\" ist nicht von BaseException abgeleitet.", + "exceptionTypeNotClass": "\"{type}\" ist keine gültige Ausnahmeklasse.", + "exceptionTypeNotInstantiable": "Der Konstruktor für den Ausnahmetyp \"{type}\" erfordert mindestens ein Argument.", + "expectedAfterDecorator": "Es wurde eine Funktions- oder Klassendeklaration nach dem Decorator erwartet.", + "expectedArrow": "\"->\" gefolgt von Rückgabetypanmerkung erwartet", + "expectedAsAfterException": "\"as\" nach Ausnahmetyp erwartet", + "expectedAssignRightHandExpr": "Ausdruck rechts von \"=\" erwartet", + "expectedBinaryRightHandExpr": "Ausdruck rechts vom Operator", + "expectedBoolLiteral": "\"True\" oder \"False\" erwartet", + "expectedCase": "\"case\"-Anweisung erwartet", + "expectedClassName": "Klassenname erwartet", + "expectedCloseBrace": "\"{\" wurde nicht geschlossen.", + "expectedCloseBracket": "\"[\" wurde nicht geschlossen.", + "expectedCloseParen": "\"(\" wurde nicht geschlossen.", + "expectedColon": "\":\" erwartet", + "expectedComplexNumberLiteral": "Für den Musterabgleich wurde ein komplexes Zahlenliteral erwartet.", + "expectedDecoratorExpr": "Das Ausdrucksformular wird für den Decorator vor Python 3.9 nicht unterstützt.", + "expectedDecoratorName": "Decoratorname erwartet", + "expectedDecoratorNewline": "Am Ende des Decorators wurde eine neue Zeile erwartet.", + "expectedDelExpr": "Ausdruck nach \"del\" erwartet", + "expectedElse": "Unerwartetes \"else\"", + "expectedEquals": "\"=\" erwartet", + "expectedExceptionClass": "Ungültige Ausnahmeklasse oder ungültiges Objekt.", + "expectedExceptionObj": "Ausnahmeobjekt, Ausnahmeklasse oder None erwartet", + "expectedExpr": "Ausdruck erwartet.", + "expectedFunctionAfterAsync": "Funktionsdefinition nach \"async\" erwartet", + "expectedFunctionName": "Funktionsname nach \"def\" erwartet", + "expectedIdentifier": "Bezeichner erwartet", + "expectedImport": "\"import\" erwartet", + "expectedImportAlias": "Symbol nach \"as\" erwartet", + "expectedImportSymbols": "Nach dem \"import\" wurde mindestens ein Symbolname erwartet.", + "expectedIn": "\"in\" wurde erwartet.", + "expectedInExpr": "Ausdruck nach \"in\" erwartet", + "expectedIndentedBlock": "Eingerückter Block erwartet", + "expectedMemberName": "Attributname nach „.“ erwartet", + "expectedModuleName": "Modulname erwartet", + "expectedNameAfterAs": "Symbolname nach \"as\" erwartet", + "expectedNamedParameter": "Der Schlüsselwortparameter muss auf \"*\" folgen.", + "expectedNewline": "Zeilenumbruch erwartet", + "expectedNewlineOrSemicolon": "Anweisungen müssen durch Zeilenumbrüche oder Semikolons getrennt werden.", + "expectedOpenParen": "\"(\" erwartet", + "expectedParamName": "Parametername erwartet", + "expectedPatternExpr": "Musterausdruck erwartet", + "expectedPatternSubjectExpr": "Musterbetreffausdruck erwartet", + "expectedPatternValue": "Es wurde ein Musterwertausdruck im Format \"a.b\" erwartet.", + "expectedReturnExpr": "Ausdruck nach \"return\" erwartet", + "expectedSliceIndex": "Index- oder Sliceausdruck erwartet", + "expectedTypeNotString": "Typ erwartet, aber Zeichenfolgenliteral empfangen", + "expectedTypeParameterName": "Name für Typparameter erwartet", + "expectedYieldExpr": "Ausdruck in yield-Anweisung erwartet", + "finalClassIsAbstract": "Die Klasse „{type}“ ist als final markiert und muss alle abstrakten Symbole implementieren.", + "finalContext": "\"Final\" ist in diesem Kontext nicht zulässig.", + "finalInLoop": "Eine \"Final\"-Variable kann nicht innerhalb einer Schleife zugewiesen werden.", + "finalMethodOverride": "Die Methode \"{name}\" kann die in der Klasse definierte final Methode \"{className}\" nicht überschreiben.", + "finalNonMethod": "Die Funktion „{name}“ kann nicht @final markiert werden, da sie keine Methode ist.", + "finalReassigned": "\"{name}\" ist als \"Final\" deklariert und kann nicht neu zugewiesen werden.", + "finalRedeclaration": "\"{name}\" wurde zuvor als \"Final\" deklariert.", + "finalRedeclarationBySubclass": "\"{name}\" kann nicht neu deklariert werden, da die übergeordnete Klasse \"{className}\" es als Final deklariert.", + "finalTooManyArgs": "Nach \"Final\" wurde ein einzelnes Typargument erwartet.", + "finalUnassigned": "\"{name}\" ist als Final deklariert, aber der Wert ist nicht zugewiesen.", + "finallyBreak": "Ein „break“ kann nicht verwendet werden, um einen „finally“-Block zu beenden.", + "finallyContinue": "Ein „continue“ kann nicht verwendet werden, um einen „finally“-Block zu beenden.", + "finallyReturn": "Ein „return“ kann nicht verwendet werden, um einen „finally“-Block zu beenden.", + "formatStringBrace": "Eine einzelne schließende geschweifte Klammer ist innerhalb des f-string-Literals nicht zulässig; verwenden Sie doppelte schließende geschweifte Klammern.", + "formatStringBytes": "Formatzeichenfolgenliterale (f-strings) dürfen nicht binär sein.", + "formatStringDebuggingIllegal": "Der F-String-Debugspezifizierer \"=\" erfordert Python 3.8 oder höher.", + "formatStringEscape": "Escapesequenz (umgekehrter Schrägstrich) ist im Ausdrucksteil der f-Zeichenfolge vor Python 3.12 nicht zulässig.", + "formatStringExpectedConversion": "Nach \"!\" in f-string wurde ein Konvertierungsspezifizierer erwartet.", + "formatStringIllegal": "Formatzeichenfolgenliterale (f-strings) erfordern Python 3.6 oder höher.", + "formatStringInPattern": "Die Formatzeichenfolge ist im Muster nicht zulässig.", + "formatStringNestedFormatSpecifier": "Ausdrücke sind zu tief im Formatzeichenfolgenspezifizierer geschachtelt.", + "formatStringNestedQuote": "Zeichenfolgen, die in einer f-Zeichenfolge geschachtelt sind, dürfen nicht dasselbe Anführungszeichen wie die f-Zeichenfolge vor Python 3.12 verwenden.", + "formatStringTemplate": "Formatzeichenfolgenliterale (f-strings) können nicht gleichzeitig Vorlagenzeichenfolgen (t-strings) sein.", + "formatStringUnicode": "Formatzeichenfolgenliterale (f-strings) dürfen nicht Unicode sein.", + "formatStringUnterminated": "Nicht abgeschlossener Ausdruck in f-string; \"}\" wird erwartet.", + "functionDecoratorTypeUnknown": "Ein nicht typisierter Funktionsdekorator verdeckt den Funktionstyp; Decorator wird ignoriert.", + "functionInConditionalExpression": "Bedingter Ausdruck verweist auf eine Funktion, die immer zu \"True\" ausgewertet wird.", + "functionTypeParametersIllegal": "Die Syntax des Funktionstypparameters erfordert Python 3.12 oder höher.", + "futureImportLocationNotAllowed": "Importe von __future__ müssen am Anfang der Datei auftreten", + "generatorAsyncReturnType": "Der Rückgabetyp der asynchronen Generatorfunktion muss mit \"AsyncGenerator[{yieldType}, Any]\" kompatibel sein.", + "generatorNotParenthesized": "Generatorausdrücke müssen in Klammern gesetzt werden, wenn sie nicht das einzige Argument sind.", + "generatorSyncReturnType": "Der Rückgabetyp der Generatorfunktion muss mit \"Generator[{yieldType}, Any, Any]\" kompatibel sein.", + "genericBaseClassNotAllowed": "Die Basisklasse \"Generic\" kann nicht mit der Typparametersyntax verwendet werden.", + "genericClassAssigned": "Der generische Klassentyp kann nicht zugewiesen werden.", + "genericClassDeleted": "Der generische Klassentyp kann nicht gelöscht werden.", + "genericInstanceVariableAccess": "Der Zugriff auf die generische Instanzvariable über die Klasse ist mehrdeutig.", + "genericNotAllowed": "\"Generic\" ist in diesem Kontext nicht gültig.", + "genericTypeAliasBoundTypeVar": "Der generische Typalias innerhalb der Klasse kann keine gebundenen Typvariablen {names} verwenden.", + "genericTypeArgMissing": "\"Generic\" erfordert mindestens ein Typargument.", + "genericTypeArgTypeVar": "Das Typargument für \"Generic\" muss eine Typvariable sein.", + "genericTypeArgUnique": "Typargumente für \"Generic\" müssen eindeutig sein.", + "globalReassignment": "\"{name}\" wird vor einer globalen Deklaration zugewiesen.", + "globalRedefinition": "\"{name}\" wurde bereits als global deklariert.", + "implicitStringConcat": "Implizite Zeichenfolgenverkettung nicht zulässig", + "importCycleDetected": "Zyklus in Importkette erkannt", + "importDepthExceeded": "Importkettentiefe überschritten {depth}", + "importResolveFailure": "Import \"{importName}\" konnte nicht aufgelöst werden.", + "importSourceResolveFailure": "Import \"{importName}\" konnte aus der Quelle nicht aufgelöst werden.", + "importSymbolUnknown": "\"{name}\" ist ein unbekanntes Importsymbol.", + "incompatibleMethodOverride": "Die Methode \"{name}\" überschreibt die Klasse \"{className}\" auf inkompatible Weise.", + "inconsistentIndent": "Der Betrag für Nichteinzug stimmt nicht mit dem vorherigen Einzug überein.", + "inconsistentTabs": "Inkonsistente Verwendung von Tabulatoren und Leerzeichen im Einzug.", + "initMethodSelfParamTypeVar": "Typ-Anmerkung für \"self\"-Parameter der \"__init__\"-Methode kann keine klassenübergreifenden Typvariablen enthalten", + "initMustReturnNone": "Der Rückgabetyp von \"__init__\" muss \"None\" sein.", + "initSubclassCallFailed": "Falsche Schlüsselwortargumente für __init_subclass__ Methode.", + "initSubclassClsParam": "__init_subclass__ Außerkraftsetzung sollte einen \"cls\"-Parameter annehmen.", + "initVarNotAllowed": "„InitVar“ ist in diesem Kontext nicht zulässig.", + "instanceMethodSelfParam": "Instanzmethoden sollten einen \"self\"-Parameter verwenden.", + "instanceVarOverridesClassVar": "Die Instanzvariable \"{name}\" überschreibt die Klassenvariable desselben Namens in der Klasse \"{className}\"", + "instantiateAbstract": "Abstrakte Klasse \"{type}\" kann nicht erstellt werden.", + "instantiateProtocol": "Die Protocol-Klasse \"{type}\" kann nicht instanziiert werden.", + "internalBindError": "Interner Fehler beim Binden der Datei \"{file}\": {message}", + "internalParseError": "Interner Fehler beim Parsen der Datei \"{file}\": {message}", + "internalTypeCheckingError": "Interner Fehler bei der Typüberprüfung der Datei \"{file}\": {message}", + "invalidIdentifierChar": "Ungültiges Zeichen in Bezeichner", + "invalidStubStatement": "Die Anweisung ist innerhalb einer Typstubdatei bedeutungslos.", + "invalidTokenChars": "Ungültiges Zeichen \"{text}\" im Token", + "isInstanceInvalidType": "Das zweite Argument für \"isinstance\" muss eine Klasse oder tuple von Klassen sein.", + "isSubclassInvalidType": "Das zweite Argument für \"issubclass\" muss eine Klasse oder tuple von Klassen sein.", + "keyValueInSet": "Schlüssel-Wert-Paare sind innerhalb einer Menge „set“ nicht zulässig.", + "keywordArgInTypeArgument": "Schlüsselwortargumente können nicht in Typargumentlisten verwendet werden.", + "keywordOnlyAfterArgs": "Schlüsselworttrennzeichen ist nach dem Parameter \"*\" nicht zulässig.", + "keywordParameterMissing": "Mindestens ein Schlüsselwortparameter muss dem Parameter \"*\" folgen.", + "keywordSubscriptIllegal": "Schlüsselwortargumente innerhalb von Tiefskripts werden nicht unterstützt.", + "lambdaReturnTypePartiallyUnknown": "Der Rückgabetyp des Lambdaausdrucks \"{returnType}\" ist teilweise unbekannt.", + "lambdaReturnTypeUnknown": "Der Rückgabetyp der Lambdafunktion ist unbekannt.", + "listAssignmentMismatch": "Ein Ausdruck vom Typ \"{type}\" kann der Zielliste nicht zugewiesen werden.", + "listInAnnotation": "Der Listenausdruck ist im Typausdruck nicht zulässig", + "literalEmptyArgs": "Nach \"Literal\" wurde mindestens ein Typargument erwartet.", + "literalNamedUnicodeEscape": "Benannte Escapesequenz für Unicodezeichen werden in Zeichenfolgenanmerkungen vom Typ „Literal“ nicht unterstützt.", + "literalNotAllowed": "\"Literal\" kann in diesem Kontext nicht ohne Typargument verwendet werden.", + "literalNotCallable": "Der Literaltyp kann nicht instanziiert werden.", + "literalUnsupportedType": "Typargumente für \"Literal\" müssen None, ein Literalwert (int, bool, str oder bytes) oder ein enum Wert sein.", + "matchIncompatible": "Match Anweisungen erfordern Python 3.10 oder höher", + "matchIsNotExhaustive": "Fälle innerhalb der match-Anweisung behandeln nicht umfassend alle Werte.", + "maxParseDepthExceeded": "Maximale Analysetiefe überschritten; brechen Sie den Ausdruck in kleinere Unterausdrücke um", + "memberAccess": "Auf das Attribut „{name}“ für die Klasse „{type}“ kann nicht zugegriffen werden", + "memberDelete": "Das Attribut „{name}“ für die Klasse „{type}“ kann nicht gelöscht werden", + "memberSet": "Zum Attribut „{name}“ für die Klasse „{type}“ kann nicht zugewiesen werden", + "metaclassConflict": "Die Metaklasse einer abgeleiteten Klasse muss eine Unterklasse der Metaklassen aller ihrer Basisklassen sein.", + "metaclassDuplicate": "Es kann nur eine Metaklasse angegeben werden.", + "metaclassIsGeneric": "Metaklasse kann nicht generisch sein.", + "methodNotDefined": "\"{name}\" Methode nicht definiert", + "methodNotDefinedOnType": "Die Methode \"{name}\" ist für den Typ \"{type}\" nicht definiert.", + "methodOrdering": "Es kann keine konsistente Methodenreihenfolge erstellt werden.", + "methodOverridden": "\"{name}\" überschreibt die Methode mit demselben Namen in der Klasse \"{className}\" mit inkompatiblem Typ \"{type}\"", + "methodReturnsNonObject": "Die Methode \"{name}\" gibt kein Objekt zurück.", + "missingSuperCall": "Die Methode \"{methodName}\" ruft nicht die Methode mit demselben Namen in der übergeordneten Klasse auf.", + "mixingBytesAndStr": "Bytes- und str-Werte können nicht verkettet werden", + "moduleAsType": "Das Modul kann nicht als Typ verwendet werden.", + "moduleNotCallable": "Das Modul kann nicht aufgerufen werden.", + "moduleUnknownMember": "„{memberName}“ ist kein bekanntes Attribut des Moduls „{moduleName}“", + "namedExceptAfterCatchAll": "Eine benannte except-Klausel darf nicht nach catch-all except-Klausel auftreten.", + "namedParamAfterParamSpecArgs": "Der Schlüsselwortparameter \"{name}\" kann nicht in der Signatur nach dem Parameter \"ParamSpec args\" verwendet werden.", + "namedTupleEmptyName": "Namen innerhalb benannten tuple dürfen nicht leer sein.", + "namedTupleEntryRedeclared": "\"{name}\" kann nicht überschrieben werden, da die übergeordnete benannte tuple Klasse \"{className}\" ist.", + "namedTupleFieldUnderscore": "„Named tuple“-Feldnamen dürfen nicht mit einem Unterstrich beginnen.", + "namedTupleFirstArg": "Es wird ein benannter tuple Klassenname als erstes Argument erwartet.", + "namedTupleMultipleInheritance": "Mehrfachvererbung mit NamedTuple wird nicht unterstützt.", + "namedTupleNameKeyword": "Feldnamen dürfen kein Schlüsselwort sein.", + "namedTupleNameType": "Es wurde tuple mit zwei Einträgen unter Angabe von Eintragsname und -typ erwartet.", + "namedTupleNameUnique": "Namen innerhalb benannten tuple müssen eindeutig sein.", + "namedTupleNoTypes": "\"namedtuple\" stellt keine Typen für tuple bereit; verwenden Sie stattdessen \"NamedTuple\".", + "namedTupleSecondArg": "Benannte tuple Eintragsliste als zweites Argument erwartet", + "newClsParam": "__new__ Außerkraftsetzung sollte einen \"cls\"-Parameter annehmen.", + "newTypeAnyOrUnknown": "Das zweite Argument für NewType muss eine bekannte Klasse sein, nicht „Any“ oder „Unknown“.", + "newTypeBadName": "Das erste Argument für NewType muss ein Zeichenfolgenliteral sein.", + "newTypeLiteral": "\"NewType\" kann nicht mit dem Literaltyp verwendet werden.", + "newTypeNameMismatch": "NewType muss einer Variablen mit demselben Namen zugewiesen werden.", + "newTypeNotAClass": "Klasse als zweites Argument für NewType erwartet", + "newTypeParamCount": "NewType erfordert zwei Positionsargumente.", + "newTypeProtocolClass": "NewType kann nicht mit strukturellem Typ (Protocol- oder TypedDict-Klasse) verwendet werden.", + "noOverload": "Keine Überladungen für \"{name}\" stimmen mit den angegebenen Argumenten überein.", + "noReturnContainsReturn": "Eine Funktion mit dem deklarierten return Typ \"NoReturn\" kann keine return-Anweisung enthalten.", + "noReturnContainsYield": "Eine Funktion mit dem deklarierten Rückgabetyp \"NoReturn\" kann keine yield-Anweisung enthalten.", + "noReturnReturnsNone": "Eine Funktion mit dem deklarierten Rückgabetyp \"NoReturn\" kann nicht \"None\" zurückgeben.", + "nonDefaultAfterDefault": "Das nicht standardmäßige Argument folgt dem Standardargument.", + "nonLocalInModule": "Nonlocal Deklaration auf Modulebene nicht zulässig", + "nonLocalNoBinding": "Es wurde keine Bindung für nonlocal \"{name}\" gefunden.", + "nonLocalReassignment": "\"{name}\" wird vor einer nonlocal Deklaration zugewiesen.", + "nonLocalRedefinition": "\"{name}\" wurde bereits als nonlocal deklariert.", + "noneNotCallable": "Ein Objekt vom Typ \"None\" kann nicht aufgerufen werden.", + "noneNotIterable": "Ein Objekt vom Typ \"None\" kann nicht als iterierbarer Wert verwendet werden.", + "noneNotSubscriptable": "Das Objekt vom Typ \"None\" kann nicht tiefgestellt werden.", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "Das Objekt vom Typ „None“ kann nicht mit „async with“ verwendet werden.", + "noneOperator": "Der Operator \"{operator}\" wird für den \"{None}\" nicht unterstützt.", + "noneUnknownMember": "„{name}“ ist kein bekanntes Attribut von „None“", + "nonlocalTypeParam": "Die Bindung „Nonlocal“ ist für den Typparameter „{name}“ nicht zulässig.", + "notRequiredArgCount": "Nach \"NotRequired\" wurde ein einzelnes Typargument erwartet.", + "notRequiredNotInTypedDict": "\"NotRequired\" ist in diesem Kontext nicht zulässig.", + "objectNotCallable": "Das Objekt vom Typ \"{type}\" kann nicht aufgerufen werden.", + "obscuredClassDeclaration": "Die Klassendeklaration \"{name}\" wird durch eine Deklaration desselben Namens verdeckt.", + "obscuredFunctionDeclaration": "Die Funktionsdeklaration \"{name}\" wird durch eine Deklaration desselben Namens verdeckt.", + "obscuredMethodDeclaration": "Die Methodendeklaration \"{name}\" wird durch eine Deklaration desselben Namens verdeckt.", + "obscuredParameterDeclaration": "Die Parameterdeklaration \"{name}\" wird durch eine Deklaration desselben Namens verdeckt.", + "obscuredTypeAliasDeclaration": "Die Typaliasdeklaration \"{name}\" wird durch eine Deklaration desselben Namens verdeckt.", + "obscuredVariableDeclaration": "Die Deklaration \"{name}\" wird durch eine Deklaration desselben Namens verdeckt.", + "operatorLessOrGreaterDeprecated": "Der Operator \"<>\" wird in Python 3 nicht unterstützt; verwenden Sie stattdessen \"!=\".", + "optionalExtraArgs": "Nach \"Optional\" wurde ein Typargument erwartet.", + "orPatternIrrefutable": "Ein irrefutables Muster ist nur als letztes Unterpattern in einem \"or\"-Muster zulässig.", + "orPatternMissingName": "Alle Teilmuster innerhalb eines \"or\"-Musters müssen auf dieselben Namen zielen.", + "overlappingKeywordArgs": "Typisierte Wörterbuchüberlappungen mit Schlüsselwortparameter: {names}", + "overlappingOverload": "Überladung {obscured} für \"{name}\" wird nie verwendet, da sich die Parameter überlappen, {obscuredBy}", + "overloadAbstractImplMismatch": "Überladungen müssen dem abstrakten Status der Implementierung entsprechen.", + "overloadAbstractMismatch": "Überladungen müssen alle abstrakt sein oder nicht.", + "overloadClassMethodInconsistent": "Überladungen für \"{name}\" verwenden @classmethod inkonsistent", + "overloadFinalImpl": "@final Decorator sollte nur auf die Implementierung angewendet werden.", + "overloadFinalNoImpl": "Nur die erste Überladung sollte @final markiert werden.", + "overloadImplementationMismatch": "Die überladene Implementierung ist nicht konsistent mit der Signatur der Überladung {index}", + "overloadOverrideImpl": "@override Decorator sollte nur auf die Implementierung angewendet werden.", + "overloadOverrideNoImpl": "Nur die erste Überladung sollte @override markiert werden.", + "overloadReturnTypeMismatch": "Überladung {prevIndex} für \"{name}\" überlappt {newIndex} und gibt einen inkompatiblen Typ zurück.", + "overloadStaticMethodInconsistent": "Überladungen für \"{name}\" verwenden @staticmethod inkonsistent", + "overloadWithoutImplementation": "\"{name}\" ist als overload markiert, es wurde jedoch keine Implementierung bereitgestellt.", + "overriddenMethodNotFound": "Die Methode \"{name}\" ist als override markiert, aber es ist keine Basismethode mit demselben Namen vorhanden.", + "overrideDecoratorMissing": "Die Methode \"{name}\" ist nicht als override markiert, überschreibt jedoch eine Methode in der Klasse \"{className}\"", + "paramAfterKwargsParam": "Der Parameter kann nicht auf den Parameter \"**\" folgen.", + "paramAlreadyAssigned": "Der Parameter \"{name}\" ist bereits zugewiesen.", + "paramAnnotationMissing": "Typanmerkung fehlt für Parameter \"{name}\"", + "paramAssignmentMismatch": "Ein Ausdruck vom Typ \"{sourceType}\" kann keinem Parameter vom Typ \"{paramType}\" zugewiesen werden.", + "paramNameMissing": "Kein Parameter mit dem Namen \"{name}\"", + "paramSpecArgsKwargsDuplicate": "Es wurden bereits Argumente für ParamSpec \"{type}\" bereitgestellt", + "paramSpecArgsKwargsUsage": "Die Attribute „args“ und „kwargs“ von ParamSpec müssen beide innerhalb einer Funktionssignatur auftreten", + "paramSpecArgsMissing": "Argumente für ParamSpec \"{type}\" fehlen.", + "paramSpecArgsUsage": "Das Attribut „args“ von ParamSpec ist nur gültig, wenn es mit dem Parameter „*args“ verwendet wird", + "paramSpecAssignedName": "ParamSpec muss einer Variablen mit dem Namen \"{name}\" zugewiesen werden.", + "paramSpecContext": "ParamSpec ist in diesem Kontext nicht zulässig.", + "paramSpecDefaultNotTuple": "Es wurde ein Auslassungszeichen, ein tuple Ausdruck oder ParamSpec für den Standardwert von ParamSpec erwartet.", + "paramSpecFirstArg": "Der Name von ParamSpec wurde als erstes Argument erwartet.", + "paramSpecKwargsUsage": "Das Attribut „kwargs“ von ParamSpec ist nur gültig, wenn es mit dem Parameter „**kwargs“ verwendet wird", + "paramSpecNotUsedByOuterScope": "ParamSpec \"{name}\" hat in diesem Kontext keine Bedeutung.", + "paramSpecUnknownArg": "ParamSpec unterstützt nur ein Argument.", + "paramSpecUnknownMember": "„{name}“ ist kein bekanntes Attribut von ParamSpec", + "paramSpecUnknownParam": "\"{name}\" ist ein unbekannter Parameter für ParamSpec.", + "paramTypeCovariant": "Eine Variable vom Typ \"Covariant\" kann nicht im Parametertyp verwendet werden.", + "paramTypePartiallyUnknown": "Der Typ des Parameters \"{paramName}\" ist teilweise unbekannt.", + "paramTypeUnknown": "Der Typ des Parameters \"{paramName}\" ist unbekannt.", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "Das Muster wird für den Antragstellertyp \"{type}\" nie abgeglichen", + "positionArgAfterNamedArg": "Ein Positionsargument darf nicht nach Schlüsselwortargumenten stehen.", + "positionArgAfterUnpackedDictArg": "Das positionelle Argument kann nach dem Entpacken des Schlüsselwortarguments nicht mehr angezeigt werden.", + "positionOnlyAfterArgs": "Das Parametertrennzeichen \"Nur Position\" ist nach dem Parameter \"*\" nicht zulässig.", + "positionOnlyAfterKeywordOnly": "Der Parameter \"/\" muss vor dem Parameter \"*\" stehen.", + "positionOnlyAfterNon": "Der Parameter \"Nur Position\" ist nach einem Parameter, der nicht vom Typ \"Nur Position\" ist, nicht zulässig.", + "positionOnlyFirstParam": "Das Parametertrennzeichen \"Nur Position\" ist als erster Parameter nicht zulässig.", + "positionOnlyIncompatible": "Für das Parametertrennzeichen \"Nur Position\" ist Python 3.8 oder höher erforderlich.", + "privateImportFromPyTypedModule": "\"{name}\" wird nicht aus dem Modul \"{module}\" exportiert.", + "privateUsedOutsideOfClass": "\"{name}\" ist privat und wird außerhalb der Klasse verwendet, in dem es deklariert ist.", + "privateUsedOutsideOfModule": "\"{name}\" ist privat und wird außerhalb des Moduls verwendet, in dem es deklariert ist.", + "propertyOverridden": "\"{name}\" überschreibt property desselben Namens in der Klasse \"{className}\" nicht ordnungsgemäß", + "propertyStaticMethod": "Statische Methoden sind für property getter, setter oder deleter nicht zulässig.", + "protectedUsedOutsideOfClass": "\"{name}\" ist geschützt und wird außerhalb der Klasse verwendet, in der es deklariert ist.", + "protocolBaseClass": "Die Protocol-Klasse \"{classType}\" kann nicht von einer non-Protocol-Klasse \"{baseType}\" abgeleitet werden", + "protocolBaseClassWithTypeArgs": "Typargumente sind mit der Protocol Klasse nicht zulässig, wenn die Typparametersyntax verwendet wird.", + "protocolIllegal": "Die Verwendung von \"Protocol\" erfordert Python 3.7 oder höher.", + "protocolNotAllowed": "\"Protocol\" kann in diesem Kontext nicht verwendet werden.", + "protocolTypeArgMustBeTypeParam": "Das Typargument für „Protocol“ muss ein Typparameter sein.", + "protocolUnsafeOverlap": "Die Klasse überlappt unsicher mit „{name}“ und könnte zur Laufzeit eine Übereinstimmung erzeugen.", + "protocolVarianceContravariant": "Die Typvariable \"{variable}\", die im generischen Protocol \"{class}\" verwendet wird, muss \"contravariant\" sein.", + "protocolVarianceCovariant": "Die Typvariable \"{variable}\", die im generischen Protocol \"{class}\" verwendet wird, muss \"covariant\" sein.", + "protocolVarianceInvariant": "Die Typvariable \"{variable}\", die im generischen Protocol \"{class}\" verwendet wird, muss \"invariant\" sein.", + "pyrightCommentInvalidDiagnosticBoolValue": "Auf die Pyright-Kommentardirektive muss \"=\" und der Wert \"true\" oder \"false\" folgen.", + "pyrightCommentInvalidDiagnosticSeverityValue": "Auf die Pyright-Kommentardirektive muss \"=\" und der Wert \"true\", \"false\", \"error\", \"warning\", \"information\" oder \"none\" folgen.", + "pyrightCommentMissingDirective": "Auf einen Pyright-Kommentar muss eine Direktive (basic oder strict) oder eine Diagnoseregel folgen.", + "pyrightCommentNotOnOwnLine": "Pyright-Kommentare, die zum Steuern von Einstellungen auf Dateiebene verwendet werden, müssen in ihrer eigenen Zeile angezeigt werden.", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" ist eine unbekannte Diagnoseregel für pyright-Kommentar.", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" ist ein ungültiger Wert für den pyright-Kommentar; \"true\", \"false\", \"error\", \"warning\", \"information\" oder \"none\" erwartet.", + "pyrightCommentUnknownDirective": "„{directive}“ ist eine unbekannte Direktive für pyright-Kommentar; „strict“, „standard“ oder „basic“ erwartet", + "readOnlyArgCount": "Nach \"ReadOnly\" wurde ein einzelnes Typargument erwartet.", + "readOnlyNotInTypedDict": "\"ReadOnly\" ist in diesem Kontext nicht zulässig.", + "recursiveDefinition": "Der Typ von \"{name}\" konnte nicht bestimmt werden, da er sich auf selbst bezieht.", + "relativeImportNotAllowed": "Relative Importe können nicht mit dem Formular \"import .a\" verwendet werden; verwenden Sie stattdessen \"from . import a\"", + "requiredArgCount": "Nach \"Required\" wurde ein einzelnes Typargument erwartet.", + "requiredNotInTypedDict": "\"Required\" ist in diesem Kontext nicht zulässig.", + "returnInAsyncGenerator": "Eine Return-Anweisung mit einem Wert ist im asynchronen Generator nicht zulässig.", + "returnInExceptionGroup": "„return“ ist in einem „except*“ Block nicht zulässig.", + "returnMissing": "Die Funktion mit dem deklarierten Rückgabetyp \"{returnType}\" muss einen Wert für alle Codepfade zurückgeben.", + "returnOutsideFunction": "\"return\" kann nur innerhalb einer Funktion verwendet werden.", + "returnTypeContravariant": "Die Variable vom Typ \"contravariant\" kann nicht im Rückgabetyp verwendet werden.", + "returnTypeMismatch": "Der Typ „{exprType}“ kann dem Rückgabetyp „{returnType}“ nicht zugewiesen werden", + "returnTypePartiallyUnknown": "Der Rückgabetyp \"{returnType}\" ist teilweise unbekannt.", + "returnTypeUnknown": "Unbekannter Rückgabetyp", + "revealLocalsArgs": "Es wurden keine Argumente für den Aufruf \"reveal_locals\" erwartet.", + "revealLocalsNone": "Keine locals Elemente in diesem Bereich", + "revealTypeArgs": "Für den Aufruf \"reveal_type\" wurde ein einzelnes Positionsargument erwartet.", + "revealTypeExpectedTextArg": "Das Argument \"expected_text\" für die Funktion \"reveal_type\" muss ein str-Literalwert sein.", + "revealTypeExpectedTextMismatch": "Typentextkonflikt; \"{expected}\" erwartet, aber \"{received}\" empfangen", + "revealTypeExpectedTypeMismatch": "Typenkonflikt; \"{expected}\" erwartet, aber \"{received}\" empfangen", + "selfTypeContext": "\"Self\" ist in diesem Kontext ungültig.", + "selfTypeMetaclass": "„Self“ kann nicht innerhalb einer Metaklasse (einer Unterklasse von „type“) verwendet werden.", + "selfTypeWithTypedSelfOrCls": "\"Self\" kann nicht in einer Funktion mit einem Parameter \"self\" oder \"cls\" verwendet werden, der eine andere Typanmerkung als \"Self\" aufweist.", + "sentinelBadName": "Das erste Argument für Sentinel muss ein Zeichenfolgenliteral sein.", + "sentinelNameMismatch": "Sentinel muss einer Variablen mit demselben Namen zugewiesen werden.", + "sentinelParamCount": "Sentinel benötigt ein Positionsargument.", + "setterGetterTypeMismatch": "Der Property setter Werttyp kann dem getter Rückgabetyp nicht zugewiesen werden.", + "singleOverload": "\"{name}\" ist als Überladung markiert, aber es fehlen weitere Überladungen.", + "slotsAttributeError": "\"{name}\" ist in __slots__ nicht angegeben.", + "slotsClassVarConflict": "\"{name}\" steht in Konflikt mit Instanzvariablen, die in __slots__ deklariert sind.", + "starPatternInAsPattern": "Das Sternmuster kann nicht mit dem Ziel \"as\" verwendet werden.", + "starPatternInOrPattern": "Das Sternmuster kann in anderen Mustern nicht ORed sein.", + "starStarWildcardNotAllowed": "** kann nicht zusammen mit Platzhalter \"_\" verwendet werden", + "staticClsSelfParam": "Statische Methoden dürfen keinen \"self\"- oder \"cls\"-Parameter annehmen.", + "stringNonAsciiBytes": "Ein Nicht-ASCII-Zeichen ist im Zeichenfolgenliteral in Bytes nicht zulässig.", + "stringNotSubscriptable": "Der Zeichenfolgenausdruck kann im Typausdruck nicht tiefgestellt werden; schließen Sie den samten Ausdruck in Anführungszeichen ein", + "stringUnsupportedEscape": "Nicht unterstützte Escapesequenz im Zeichenfolgenliteral.", + "stringUnterminated": "Das Zeichenfolgenliteral ist nicht beendet.", + "stubFileMissing": "Die Stubdatei wurde für \"{importName}\" nicht gefunden.", + "stubUsesGetAttr": "Die Typ-Stub-Datei ist unvollständig; \"__getattr__\" verdeckt Typfehler für Modul", + "sublistParamsIncompatible": "Sublist Parameter werden in Python 3.x nicht unterstützt.", + "superCallArgCount": "Es werden nicht mehr als zwei Argumente für den „super“ Aufruf erwartet.", + "superCallFirstArg": "Klassentyp als erstes Argument für super-Aufruf erwartet, aber \"{type}\" empfangen", + "superCallSecondArg": "Das zweite Argument für den \"super\"-Aufruf muss ein Objekt oder eine Klasse sein, das bzw. die von \"{type}\" abgeleitet wird.", + "superCallZeroArgForm": "Die Nullargumentform des „super“ Aufrufs ist nur innerhalb einer Methode gültig.", + "superCallZeroArgFormStaticMethod": "Die Nullargumentform des „super“ Aufrufs ist nicht innerhalb einer statischen Methode gültig.", + "symbolIsPossiblyUnbound": "\"{name}\" ist möglicherweise ungebunden.", + "symbolIsUnbound": "\"{name}\" ist ungebunden.", + "symbolIsUndefined": "\"{name}\" ist nicht definiert.", + "symbolOverridden": "\"{name}\" überschreibt das Symbol desselben Namens in der Klasse \"{className}\"", + "templateStringBytes": "Vorlagenzeichenfolgenliterale (t-strings) dürfen nicht binär sein.", + "templateStringIllegal": "Vorlagenzeichenfolgenliterale (t-strings) erfordern Python 3.14 oder höher.", + "templateStringUnicode": "Vorlagenzeichenfolgenliterale (t-strings) dürfen kein Unicode enthalten.", + "ternaryNotAllowed": "Der ternäre Ausdruck ist im Typausdruck nicht zulässig", + "totalOrderingMissingMethod": "Die Klasse muss \"__lt__\", \"__le__\", \"__gt__\" oder \"__ge__\" definieren, um total_ordering zu verwenden.", + "trailingCommaInFromImport": "Nachgestelltes Komma ist ohne umgebende Klammern nicht zulässig.", + "tryWithoutExcept": "Die try-Anweisung muss mindestens eine except- oder finally-Klausel aufweisen.", + "tupleAssignmentMismatch": "Ein Ausdruck vom Typ \"{type}\" kann dem Ziel-tuple nicht zugewiesen werden.", + "tupleInAnnotation": "Der Tuple-ausdruck ist im Typausdruck nicht zulässig", + "tupleIndexOutOfRange": "Der Index {index} liegt für den Typ {type} außerhalb des gültigen Bereichs.", + "typeAliasIllegalExpressionForm": "Ungültiges Ausdrucksformular für Typaliasdefinition", + "typeAliasIsRecursiveDirect": "Der Typalias \"{name}\" kann sich nicht selbst in seiner Definition verwenden.", + "typeAliasNotInModuleOrClass": "TypeAlias kann nur innerhalb eines Moduls oder Klassenbereichs definiert werden.", + "typeAliasRedeclared": "\"{name}\" ist als TypeAlias deklariert und kann nur einmal zugewiesen werden.", + "typeAliasStatementBadScope": "Eine type Anweisung kann nur innerhalb eines Moduls oder Klassenbereichs verwendet werden.", + "typeAliasStatementIllegal": "Die Typaliasanweisung erfordert Python 3.12 oder höher.", + "typeAliasTypeBadScope": "Ein Typalias kann nur innerhalb eines Modul- oder Klassenbereichs definiert werden.", + "typeAliasTypeBaseClass": "Ein in einer \"type\"-Anweisung definierter type Alias kann nicht als Basisklasse verwendet werden.", + "typeAliasTypeMustBeAssigned": "TypeAliasType muss einer Variablen mit dem gleichen Namen wie der Typalias zugewiesen werden.", + "typeAliasTypeNameArg": "Das erste Argument für TypeAliasType muss ein Zeichenfolgenliteral sein, das den Namen des Typalias darstellt.", + "typeAliasTypeNameMismatch": "Der Name des Typalias muss mit dem Namen der Variablen übereinstimmen, der er zugewiesen ist.", + "typeAliasTypeParamInvalid": "Die Typparameterliste muss tuple sein, das nur TypeVar, TypeVarTuple oder ParamSpec enthält.", + "typeAnnotationCall": "Der Aufrufausdruck ist im Typausdruck nicht zulässig", + "typeAnnotationVariable": "Variable im Typausdruck nicht zulässig", + "typeAnnotationWithCallable": "Das Typargument für \"type\" muss eine Klasse sein. Aufrufbare Elemente werden nicht unterstützt.", + "typeArgListExpected": "ParamSpec, Ellipse oder list der Typen erwartet", + "typeArgListNotAllowed": "Der Listenausdruck ist für dieses Typargument nicht zulässig.", + "typeArgsExpectingNone": "Für die Klasse \"{name}\" werden keine Typargumente erwartet.", + "typeArgsMismatchOne": "Es wurde ein Typargument erwartet, es wurde jedoch {received} empfangen.", + "typeArgsMissingForAlias": "Für den generischen Typalias \"{name}\" werden Typargumente erwartet.", + "typeArgsMissingForClass": "Für die generische Klasse \"{name}\" werden Typargumente erwartet.", + "typeArgsTooFew": "Für \"{name}\" wurden zu wenige Typargumente angegeben; {expected} erwartet, aber {received} empfangen", + "typeArgsTooMany": "Für \"{name}\" wurden zu viele Typargumente angegeben; {expected} erwartet, aber {received} empfangen", + "typeAssignmentMismatch": "Der Typ „{sourceType}“ kann dem deklarierten Typ „{destType}“ nicht zugewiesen werden", + "typeAssignmentMismatchWildcard": "Das Importsymbol „{name}“ weist den Typ „{sourceType}“ auf, der dem deklarierten Typ „{destType}“ nicht zugewiesen werden kann.", + "typeCallNotAllowed": "Der type()-Aufruf darf nicht im Typausdruck verwendet werden", + "typeCheckOnly": "\"{name}\" ist als @type_check_only markiert und kann nur in Typanmerkungen verwendet werden.", + "typeCommentDeprecated": "Die Verwendung von type Kommentaren ist veraltet; verwenden Sie stattdessen type Anmerkung", + "typeExpectedClass": "Die Klasse wurde erwartet, aber „{type}“ wurde empfangen.", + "typeFormArgs": "„TypeForm“ akzeptiert ein einzelnes positionelles Argument", + "typeGuardArgCount": "Nach \"TypeGuard\" oder \"TypeIs\" wurde ein einzelnes Typargument erwartet.", + "typeGuardParamCount": "Benutzerdefinierte Typenschutzfunktionen und -methoden müssen mindestens einen Eingabeparameter aufweisen.", + "typeIsReturnType": "Der Rückgabetyp von TypeIs (\"{returnType}\") ist nicht konsistent mit dem Wertparametertyp (\"{type}\").", + "typeNotAwaitable": "\"{type}\" ist nicht awaitable.", + "typeNotIntantiable": "\"{type}\" kann nicht instanziiert werden.", + "typeNotIterable": "\"{type}\" ist nicht iterierbar.", + "typeNotSpecializable": "Der Typ \"{type}\" konnte nicht spezialisiert werden", + "typeNotSubscriptable": "Das Objekt vom Typ \"{type}\" kann nicht tiefgestellt werden.", + "typeNotSupportBinaryOperator": "Der Operator \"{operator}\" wird für Typen \"{leftType}\" und \"{rightType}\" nicht unterstützt.", + "typeNotSupportBinaryOperatorBidirectional": "Der Operator \"{operator}\" wird für die Typen \"{leftType}\" und \"{rightType}\" nicht unterstützt, wenn der erwartete Typ \"{expectedType}\" ist.", + "typeNotSupportUnaryOperator": "Der Operator \"{operator}\" wird für den Typ \"{type}\" nicht unterstützt.", + "typeNotSupportUnaryOperatorBidirectional": "Der Operator \"{operator}\" wird für den Typ \"{type}\" nicht unterstützt, wenn der erwartete Typ \"{expectedType}\" ist.", + "typeNotUsableWith": "Das Objekt vom Typ „{type}“ kann nicht mit „with“ verwendet werden, da es {method} nicht ordnungsgemäß implementiert.", + "typeNotUsableWithAsync": "Das Objekt vom Typ „{type}“ kann nicht mit „asynch with“ verwendet werden, da es {method} nicht ordnungsgemäß implementiert.", + "typeParameterBoundNotAllowed": "\"Bound\" oder \"constraint\" kann nicht mit einem variadic-Typparameter oder ParamSpec verwendet werden.", + "typeParameterConstraintTuple": "Die Typparametereinschränkung muss ein Tupel mit mindestens zwei Typen sein.", + "typeParameterExistingTypeParameter": "Der Typparameter \"{name}\" wird bereits verwendet.", + "typeParameterNotDeclared": "Der Typparameter \"{name}\" ist nicht in der Typparameterliste für \"{container}\" enthalten.", + "typeParametersMissing": "Es muss mindestens ein Typparameter angegeben werden.", + "typePartiallyUnknown": "Der Typ von \"{name}\" ist teilweise unbekannt.", + "typeUnknown": "Der Typ von \"{name}\" ist unbekannt.", + "typeVarAssignedName": "TypeVar muss einer Variablen mit dem Namen \"{name}\" zugewiesen werden.", + "typeVarAssignmentMismatch": "Der Typ \"{type}\" kann der Typvariablen \"{name}\" nicht zugewiesen werden.", + "typeVarBoundAndConstrained": "TypeVar kann nicht gleichzeitig gebunden und eingeschränkt sein.", + "typeVarBoundGeneric": "Der gebundene Typ \"TypeVar\" darf nicht generisch sein.", + "typeVarConstraintGeneric": "Der Typ der TypeVar-Einschränkung darf nicht generisch sein.", + "typeVarDefaultBoundMismatch": "Der Standardtyp \"TypeVar\" muss ein Untertyp des gebundenen Typs sein.", + "typeVarDefaultConstraintMismatch": "Der Standardtyp \"TypeVar\" muss einer der eingeschränkten Typen sein.", + "typeVarDefaultIllegal": "Für Standardtypen von Typvariablen ist Python 3.13 oder höher erforderlich.", + "typeVarDefaultInvalidTypeVar": "Der Typparameter \"{name}\" weist einen Standardtyp auf, der auf eine oder mehrere Typvariablen verweist, die außerhalb des Gültigkeitsbereichs liegen.", + "typeVarFirstArg": "Der Name von TypeVar wurde als erstes Argument erwartet.", + "typeVarInvalidForMemberVariable": "Der Attributtyp kann keine Typvariable \"{name}\" verwenden, die auf die lokale Methode festgelegt ist.", + "typeVarNoMember": "TypeVar „{type}“ weist kein Attribut „{name}“ auf", + "typeVarNotSubscriptable": "TypeVar \"{type}\" kann nicht tiefgestellt werden.", + "typeVarNotUsedByOuterScope": "Die Typvariable \"{name}\" hat in diesem Kontext keine Bedeutung.", + "typeVarPossiblyUnsolvable": "Die Typvariable \"{name}\" wird möglicherweise nicht aufgelöst, wenn der Aufrufer kein Argument für Parameter \"{param}\" bereitstellt", + "typeVarSingleConstraint": "TypeVar muss mindestens zwei eingeschränkte Typen aufweisen.", + "typeVarTupleConstraints": "TypeVarTuple darf keine Werteinschränkungen aufweisen.", + "typeVarTupleContext": "TypeVarTuple ist in diesem Kontext nicht zulässig.", + "typeVarTupleDefaultNotUnpacked": "Der Standardtyp \"TypeVarTuple\" muss entpacktes tuple oder ein TypeVarTuple sein.", + "typeVarTupleMustBeUnpacked": "Der Entpackungsoperator ist für den TypeVarTuple-Wert erforderlich.", + "typeVarTupleUnknownParam": "\"{name}\" ist ein unbekannter Parameter für TypeVarTuple.", + "typeVarUnknownParam": "\"{name}\" ist ein unbekannter Parameter für TypeVar.", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" wird bereits von einem äußeren Bereich verwendet.", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" wird in der generischen Funktionssignatur nur einmal angezeigt.", + "typeVarVariance": "TypeVar darf nicht gleichzeitig \"covariant\" und \"contravariant\" sein.", + "typeVarWithDefaultFollowsVariadic": "TypeVar „{typeVarName}“ weist einen Standardwert auf und kann TypeVarTuple „{variadicName}“ nicht folgen.", + "typeVarWithoutDefault": "\"{name}\" kann nicht nach \"{other}\" in der Typparameterliste angezeigt werden, da es keinen Standardtyp aufweist.", + "typeVarsNotInGenericOrProtocol": "Generic[] oder Protocol[] müssen alle Typvariablen enthalten.", + "typedDictAccess": "Auf das Element in TypedDict konnte nicht zugegriffen werden.", + "typedDictAssignedName": "TypedDict muss einer Variablen mit dem Namen „{name}“ zugewiesen werden.", + "typedDictBadVar": "TypedDict-Klassen dürfen nur Typanmerkungen enthalten.", + "typedDictBaseClass": "Alle Basisklassen für TypedDict-Klassen müssen auch TypedDict-Klassen sein.", + "typedDictBoolParam": "Es wird erwartet, dass \"{name}\" Parameter den Wert \"True\" oder \"False\" aufweist.", + "typedDictClosedExtras": "Die Basisklasse \"{name}\" ist eine TypedDict, die den Typ zusätzlicher Elemente auf den Typ \"{type}\" beschränkt.", + "typedDictClosedFalseNonOpenBase": "Die Basisklasse „{name}“ ist kein offenes TypedDict; closed=False ist nicht zulässig.", + "typedDictClosedNoExtras": "Die Basisklasse „{name}“ ist ein closed TypedDict; zusätzliche Elemente sind nicht zulässig.", + "typedDictDelete": "Das Element in TypedDict konnte nicht gelöscht werden.", + "typedDictEmptyName": "Namen innerhalb eines TypedDict dürfen nicht leer sein.", + "typedDictEntryName": "Für den Wörterbucheintragsnamen wurde ein Zeichenfolgenliteral erwartet.", + "typedDictEntryUnique": "Namen innerhalb eines Wörterbuchs müssen eindeutig sein.", + "typedDictExtraArgs": "Zusätzliche TypedDict-Argumente werden nicht unterstützt.", + "typedDictExtraItemsClosed": "TypedDict können entweder \"closed\" oder \"extra_items\" verwenden, aber nicht beides.", + "typedDictFieldNotRequiredRedefinition": "Das TypedDict-Element „{name}“ kann nicht als „NotRequired“ neu definiert werden.", + "typedDictFieldReadOnlyRedefinition": "Das TypedDict-Element „{name}“ kann nicht als „ReadOnly“ neu definiert werden.", + "typedDictFieldRequiredRedefinition": "Das TypedDict-Element „{name}“ kann nicht als „Required“ neu definiert werden.", + "typedDictFirstArg": "Der Klassenname \"TypedDict\" wird als erstes Argument erwartet.", + "typedDictInClassPattern": "Die TypedDict-Klasse ist im Klassenmuster nicht zulässig.", + "typedDictInitsubclassParameter": "TypedDict unterstützt __init_subclass__ Parameter „{name}“ nicht.", + "typedDictNotAllowed": "\"TypedDict\" kann in diesem Kontext nicht verwendet werden.", + "typedDictSecondArgDict": "Es wird ein dict- oder Schlüsselwortparameter als zweiter Parameter erwartet.", + "typedDictSecondArgDictEntry": "Einfacher Wörterbucheintrag erwartet", + "typedDictSet": "Element konnte in TypedDict nicht zugewiesen werden.", + "unaccessedClass": "Auf Klasse \"{name}\" wird nicht zugegriffen", + "unaccessedFunction": "Auf Funktion \"{name}\" wird nicht zugegriffen", + "unaccessedImport": "Auf Import \"{name}\" wird nicht zugegriffen", + "unaccessedSymbol": "Auf \"{name}\" wird nicht zugegriffen", + "unaccessedVariable": "Auf Variable \"{name}\" wird nicht zugegriffen", + "unannotatedFunctionSkipped": "Die Analyse der Funktion \"{name}\" wird übersprungen, da sie nicht kommentiert wurde.", + "unaryOperationNotAllowed": "Der unäre Operator ist im Typausdruck nicht zulässig", + "unexpectedAsyncToken": "Es wurde erwartet, dass \"def\", \"with\" oder \"for\" auf \"async\" folgt.", + "unexpectedEof": "Unerwartetes Dateiende.", + "unexpectedExprToken": "Unerwartetes Token am Ende des Ausdrucks.", + "unexpectedIndent": "Unerwarteter Einzug", + "unexpectedUnindent": "\"Unindent\" nicht erwartet.", + "unhashableDictKey": "Der Wörterbuchschlüssel muss hashbar sein.", + "unhashableSetEntry": "Der Eintrag \"Set\" muss hashbar sein.", + "uninitializedAbstractVariables": "In der abstrakten Basisklasse definierte Variablen sind in der final Klasse \"{classType}\" nicht initialisiert", + "uninitializedInstanceVariable": "Die Instanzvariable \"{name}\" ist im Klassentext oder in der __init__ Methode nicht initialisiert.", + "unionForwardReferenceNotAllowed": "Die Unionsyntax kann nicht mit einem Zeichenfolgenoperanden verwendet werden; verwenden Sie Anführungszeichen um den gesamten Ausdruck", + "unionSyntaxIllegal": "Alternative Syntax für Unions erfordert Python 3.10 oder höher.", + "unionTypeArgCount": "Union erfordert mindestens zwei Typargumente.", + "unionUnpackedTuple": "Union kann kein entpacktes tuple enthalten.", + "unionUnpackedTypeVarTuple": "Die Union kann kein entpacktes TypeVarTuple enthalten.", + "unnecessaryCast": "Nicht erforderlicher \"cast\"-Aufruf; der Typ ist bereits \"{type}\".", + "unnecessaryIsInstanceAlways": "Nicht erforderlicher isinstance-Aufruf; \"{testType}\" ist immer eine Instanz von \"{classType}\"", + "unnecessaryIsInstanceNever": "Nicht erforderlicher isinstance-Aufruf; \"{testType}\" ist nie eine Instanz von \"{classType}\"", + "unnecessaryIsSubclassAlways": "Nicht erforderlicher issubclass-Aufruf; \"{testType}\" ist immer eine Unterklasse von \"{classType}\"", + "unnecessaryIsSubclassNever": "Nicht erforderlicher issubclass-Aufruf; \"{testType}\" ist nie eine Unterklasse von \"{classType}\"", + "unnecessaryPyrightIgnore": "Unnötiger \"# pyright: ignore\"-Kommentar", + "unnecessaryPyrightIgnoreRule": "Unnötiger \"# pyright: ignore\"-Regel: \"{name}\"", + "unnecessaryTypeIgnore": "Nicht erforderlicher \"# type: ignore\"-Kommentar", + "unpackArgCount": "Nach \"Unpack\" wurde ein einzelnes Typargument erwartet.", + "unpackExpectedTypeVarTuple": "„TypeVarTuple“ oder „tuple“ als Typargument für „Unpack“ erwartet", + "unpackExpectedTypedDict": "TypedDict-Typargument für Unpack erwartet", + "unpackIllegalInComprehension": "Der Entpackvorgang ist in Verständnis nicht zulässig.", + "unpackInAnnotation": "Der Operator zum Entpacken ist im Typausdruck nicht zulässig", + "unpackInDict": "Der Entpackvorgang ist in Wörterbüchern nicht zulässig.", + "unpackInSet": "Der Operator zum Entpacken ist innerhalb einer Menge „set“ nicht zulässig.", + "unpackNotAllowed": "\"Unpack\" ist in diesem Kontext nicht zulässig.", + "unpackOperatorNotAllowed": "Der Entpackvorgang ist in diesem Kontext nicht zulässig.", + "unpackTuplesIllegal": "Der Entpackvorgang ist in Tupeln vor Python 3.8 nicht zulässig.", + "unpackedArgInTypeArgument": "Nicht gepackte Argumente können in diesem Kontext nicht verwendet werden.", + "unpackedArgWithVariadicParam": "Das nicht gepackte Argument kann nicht für den Parameter \"TypeVarTuple\" verwendet werden.", + "unpackedDictArgumentNotMapping": "Der Argumentausdruck nach ** muss eine Zuordnung mit dem Schlüsseltyp \"str\" sein.", + "unpackedDictSubscriptIllegal": "Der Operator zum Entpacken des Wörterbuchs in tiefgestellten Zeichen ist nicht zulässig.", + "unpackedSubscriptIllegal": "Das Entpacken des Operators im Tiefstellungsskript erfordert Python 3.11 oder höher.", + "unpackedTypeVarTupleExpected": "Nicht gepackter TypeVarTuple erwartet; verwenden Sie Unpack[{name1}] oder *{name2}", + "unpackedTypedDictArgument": "Das entpackte TypedDict-Argument kann nicht mit Parametern abgelichen werden.", + "unreachableCodeCondition": "Der Code wird nicht analysiert, da die Bedingung statisch als falsch ausgewertet wird.", + "unreachableCodeStructure": "Code ist aufgrund der Struktur nicht erreichbar", + "unreachableCodeType": "Typanalyse weist darauf hin, dass Code nicht erreichbar ist", + "unreachableExcept": "Die except-Klausel ist nicht erreichbar, weil die Ausnahme bereits behandelt wird.", + "unsupportedDunderAllOperation": "Der Vorgang für \"__all__\" wird nicht unterstützt, daher ist die exportierte Symbolliste möglicherweise falsch.", + "unusedCallResult": "Das Ergebnis des Aufrufausdrucks ist vom Typ \"{type}\" und wird nicht verwendet; der Variablen \"_\" zuweisen, wenn dies beabsichtigt ist", + "unusedCoroutine": "Das Ergebnis eines asynchronen Funktionsaufrufs wird nicht verwendet; verwenden Sie \"await\", oder weisen Sie der Variablen ein Ergebnis zu.", + "unusedExpression": "Der Ausdruckswert wird nicht verwendet.", + "varAnnotationIllegal": "Type Anmerkungen für Variablen erfordern Python 3.6 oder höher; verwenden Sie den type Kommentar für Kompatibilität mit früheren Versionen", + "variableFinalOverride": "Die Variable \"{name}\" ist als \"Final\" gekennzeichnet und überschreibt die Nicht-Final-Variable desselben Namens in der Klasse \"{className}\"", + "variadicTypeArgsTooMany": "Die Liste der Typargumente darf höchstens ein entpacktes „TypeVarTuple“ oder „tuple“ enthalten.", + "variadicTypeParamTooManyAlias": "Der Typalias darf höchstens einen TypeVarTuple-Typparameter aufweisen, es wurden jedoch mehrere ({names}) empfangen.", + "variadicTypeParamTooManyClass": "Die generische Klasse darf höchstens einen TypeVarTuple-Typparameter aufweisen, es wurden jedoch mehrere ({names}) empfangen.", + "walrusIllegal": "Der Operator \":=\" erfordert Python 3.8 oder höher.", + "walrusNotAllowed": "Der Operator \":=\" ist in diesem Kontext ohne umgebende Klammern nicht zulässig.", + "wildcardInFunction": "Der Platzhalterimport ist innerhalb einer Klasse oder Funktion nicht zulässig.", + "wildcardLibraryImport": "Der Platzhalterimport aus einer Bibliothek ist nicht zulässig.", + "wildcardPatternTypePartiallyUnknown": "Der durch das Platzhaltermuster erfasste Typ ist teilweise unbekannt.", + "wildcardPatternTypeUnknown": "Der durch das Platzhaltermuster erfasste Typ ist unbekannt.", + "yieldFromIllegal": "Die Verwendung von \"yield from\" erfordert Python 3.3 oder höher.", + "yieldFromOutsideAsync": "\"yield from\" ist in einer asynchronen Funktion nicht zulässig.", + "yieldOutsideFunction": "\"yield\" ist außerhalb einer Funktion oder eines Lambdas nicht zulässig.", + "yieldWithinComprehension": "„yield“ ist innerhalb eines Verständnisses nicht zulässig", + "zeroCaseStatementsFound": "Die match-Anweisung muss mindestens eine case-Anweisung enthalten", + "zeroLengthTupleNotAllowed": "Zero-length tuple is not allowed in this context" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "Das Sonderformular „Annotated“ kann nicht mit Instanz- und Klassenüberprüfungen verwendet werden.", + "argParam": "Argument entspricht Parameter \"{paramName}\"", + "argParamFunction": "Argument entspricht Parameter \"{paramName}\" in Funktion \"{functionName}\"", + "argsParamMissing": "Der Parameter \"*{paramName}\" weist keinen entsprechenden Parameter auf.", + "argsPositionOnly": "Nicht übereinstimmende Parameteranmerkungsanzahl: {expected} erwartet, aber {received} empfangen", + "argumentType": "Argumenttyp ist \"{type}\"", + "argumentTypes": "Argumenttypen: ({types})", + "assignToNone": "Der Typ kann nicht „None“ zugewiesen werden.", + "asyncHelp": "Meinten Sie \"async with\"?", + "baseClassIncompatible": "Die Basisklasse \"{baseClass}\" ist nicht mit dem Typ \"{type}\" kompatibel.", + "baseClassIncompatibleSubclass": "Die Basisklasse \"{baseClass}\" wird von \"{subclass}\" abgeleitet, die mit dem Typ \"{type}\" nicht kompatibel ist.", + "baseClassOverriddenType": "Die Basisklasse \"{baseClass}\" stellt einen Typ \"{type}\" bereit, der überschrieben wird.", + "baseClassOverridesType": "Basisklasse \"{baseClass}\" überschreibt mit Typ \"{type}\"", + "bytesTypePromotions": "Legen Sie disableBytesTypePromotions auf false fest, um das Typerweiterungsverhalten für \"bytearray\" und \"memoryview\" zu aktivieren.", + "conditionalRequiresBool": "Die Methode __bool__ für den Typ \"{operandType}\" gibt den Typ \"{boolReturnType}\" anstelle von \"bool\" zurück", + "dataClassFieldLocation": "Felddeklaration", + "dataClassFrozen": "\"{name}\" ist fixiert", + "dataProtocolUnsupported": "„{name}“ ist ein Datenprotokoll.", + "descriptorAccessBindingFailed": "Fehler beim Binden der Methode „{name}“ für die Deskriptorklasse „{className}“", + "descriptorAccessCallFailed": "Fehler beim Aufrufen der Methode „{name}“ für die Deskriptorklasse „{className}“", + "finalMethod": "Final Methode", + "functionParamDefaultMissing": "Standardargument für Parameter \"{name}\" fehlt.", + "functionParamName": "Parameternamen stimmen nicht überein: \"{destName}\" und \"{srcName}\"", + "functionParamPositionOnly": "Nicht übereinstimmender Parameter „nur für Position“. Der Parameter „{name}“ ist nicht „nur für Position“.", + "functionReturnTypeMismatch": "Der Funktionsrückgabetyp \"{sourceType}\" ist nicht mit dem Typ \"{destType}\" kompatibel.", + "functionTooFewParams": "Die Funktion akzeptiert zu wenige Positionsparameter; {expected} erwartet, aber {received} empfangen", + "functionTooManyParams": "Die Funktion akzeptiert zu viele Positionsparameter; {expected} erwartet, aber {received} empfangen", + "genericClassNotAllowed": "Ein generischer Typ mit Typargumenten ist für Instanz- oder Klassenprüfungen nicht zulässig.", + "incompatibleDeleter": "Die deleter Methode der Property ist nicht kompatibel.", + "incompatibleGetter": "Die Property getter Methode ist nicht kompatibel.", + "incompatibleSetter": "Die Property setter Methode ist nicht kompatibel.", + "initMethodLocation": "Die __init__ Methode ist in der Klasse \"{type}\" definiert.", + "initMethodSignature": "Die Signatur von __init__ ist \"{type}\".", + "initSubclassLocation": "Die __init_subclass__ Methode ist in der Klasse \"{name}\" definiert.", + "invariantSuggestionDict": "Erwägen Sie den Wechsel von \"dict\" zu \"Mapping\" (im Werttyp covariant).", + "invariantSuggestionList": "Erwägen Sie den Wechsel von \"list\" zu \"Sequence\" (covariant).", + "invariantSuggestionSet": "Erwägen Sie den Wechsel von „set“ zu „Container“, der Kovariante ist.", + "isinstanceClassNotSupported": "„{type}“ wird für Instanz- und Klassenüberprüfungen nicht unterstützt.", + "keyNotRequired": "\"{name}\" ist kein erforderlicher Schlüssel in \"{type}\". Der Zugriff kann daher zu einer Laufzeitausnahme führen.", + "keyReadOnly": "\"{name}\" ist ein schreibgeschützter Schlüssel in \"{type}\"", + "keyRequiredDeleted": "\"{name}\" ist ein erforderlicher Schlüssel und kann nicht gelöscht werden.", + "keyUndefined": "\"{name}\" ist kein definierter Schlüssel in \"{type}\"", + "kwargsParamMissing": "Der Parameter \"**{paramName}\" weist keinen entsprechenden Parameter auf.", + "listAssignmentMismatch": "Der Typ \"{type}\" ist nicht mit der Zielliste kompatibel.", + "literalAssignmentMismatch": "„{sourceType}“ kann dem Typ „{destType}“ nicht zugewiesen werden.", + "literalNotAllowed": "Das Sonderformular „Literal“ kann nicht mit Instanz- und Klassenüberprüfungen verwendet werden.", + "matchIsNotExhaustiveHint": "Wenn keine ausführliche Behandlung beabsichtigt ist, fügen Sie \"case _: pass\" hinzu.", + "matchIsNotExhaustiveType": "Unbehandelter Typ: \"{type}\"", + "memberAssignment": "Ein Ausdruck vom Typ „{type}“ kann dem Attribut „{name}“ der Klasse „{classType}“ nicht zugewiesen werden", + "memberIsAbstract": "„{type}. {name}“ ist nicht implementiert.", + "memberIsAbstractMore": "und {count} weitere...", + "memberIsClassVarInProtocol": "„{name}“ ist als ClassVar im Protokoll definiert.", + "memberIsInitVar": "„{name}“ ist ein init-only-Feld.", + "memberIsInvariant": "\"{name}\" ist invariant, da es änderbar ist.", + "memberIsNotClassVarInClass": "„{name}“ muss als ClassVar definiert sein, um mit dem Protokoll kompatibel zu sein.", + "memberIsNotClassVarInProtocol": "„{name}“ ist nicht als ClassVar im Protokoll definiert.", + "memberIsNotReadOnlyInProtocol": "\"{name}\" ist im Protokoll nicht schreibgeschützt.", + "memberIsReadOnlyInProtocol": "\"{name}\" ist im Protokoll schreibgeschützt.", + "memberIsWritableInProtocol": "\"{name}\" ist im Protokoll beschreibbar.", + "memberSetClassVar": "Attribut „{name}“ kann nicht über eine Klasseninstanz zugewiesen werden, da es sich um eine ClassVar handelt", + "memberTypeMismatch": "\"{name}\" ist ein inkompatibler Typ.", + "memberUnknown": "Das Attribut „{name}“ ist unbekannt", + "metaclassConflict": "Die Metaklasse \"{metaclass1}\" verursacht einen Konflikt mit \"{metaclass2}\"", + "missingDeleter": "Die Property deleter Methode fehlt.", + "missingGetter": "Die Property getter Methode fehlt.", + "missingSetter": "Die Property setter Methode fehlt.", + "namedParamMissingInDest": "Zusätzlicher Parameter \"{name}\"", + "namedParamMissingInSource": "Fehlender Schlüsselwortparameter \"{name}\"", + "namedParamTypeMismatch": "Der Schlüsselwortparameter „{name}“ vom Typ „{sourceType}“ ist nicht mit dem Typ „{destType}“ kompatibel", + "namedTupleNotAllowed": "NamedTuple kann nicht für Instanzen- oder Klassenüberprüfungen verwendet werden.", + "newMethodLocation": "Die __new__ Methode ist in der Klasse \"{type}\" definiert.", + "newMethodSignature": "Signatur von __new__ ist \"{type}\"", + "newTypeClassNotAllowed": "Der mit NewType erstellte Typ kann nicht mit Instanz- und Klassenüberprüfungen verwendet werden.", + "noOverloadAssignable": "Keine überladene Funktion stimmt mit dem Typ \"{type}\" überein.", + "noneNotAllowed": "\"None\" kann nicht für Instanz- oder Klassenprüfungen verwendet werden.", + "orPatternMissingName": "Fehlende Namen: {name}", + "overloadIndex": "Überladung \"{index}\" ist die nächste Übereinstimmung.", + "overloadNotAssignable": "Mindestens eine Überladung von \"{name}\" kann nicht zugewiesen werden.", + "overloadSignature": "Die Überladungssignatur ist hier definiert.", + "overriddenMethod": "Überschriebene Methode", + "overriddenSymbol": "Außer Kraft gesetztes Symbol", + "overrideInvariantMismatch": "Der Überschreibungstyp \"{overrideType}\" ist nicht identisch mit dem Basistyp \"{baseType}\".", + "overrideIsInvariant": "Die Variable ist veränderlich, sodass ihr Typ unveränderlich ist.", + "overrideNoOverloadMatches": "Keine Überladungssignatur in Überschreibung ist mit der Basismethode kompatibel.", + "overrideNotClassMethod": "Die Basismethode ist als classmethod deklariert, die Überschreibung jedoch nicht", + "overrideNotInstanceMethod": "Die Basismethode ist als Instanz deklariert, die Überschreibung jedoch nicht", + "overrideNotStaticMethod": "Die Basismethode ist als staticmethod deklariert, die Überschreibung jedoch nicht", + "overrideOverloadNoMatch": "Außerkraftsetzung behandelt nicht alle Überladungen der Basismethode.", + "overrideOverloadOrder": "Überladungen für die Überschreibungsmethode müssen in der gleichen Reihenfolge wie die Basismethode vorliegen.", + "overrideParamKeywordNoDefault": "Konflikt mit Schlüsselwortparameter \"{name}\": Der Basisparameter hat einen Standardargumentwert, der Außerkraftsetzungsparameter ist nicht identisch.", + "overrideParamKeywordType": "Schlüsselwortparameter \"{name}\" Typenkonflikt: Basisparameter ist Typ \"{baseType}\", Überschreibungsparameter ist Typ \"{overrideType}\"", + "overrideParamName": "Namenskonflikt mit Parameter {index}: Der Basisparameter hat den Namen \"{baseName}\", der Außerkraftsetzungsparameter ist \"{overrideName}\"", + "overrideParamNameExtra": "Der Parameter \"{name}\" fehlt in der Basis.", + "overrideParamNameMissing": "Der Parameter \"{name}\" fehlt in der Außerkraftsetzung.", + "overrideParamNamePositionOnly": "Parameter „{index}“ stimmt nicht überein: Basisparameter „{baseName}“ ist Schlüsselwortparameter, Außerkraftsetzungsparameter ist „position-only“.", + "overrideParamNoDefault": "Konflikt mit Parameter {index}: Der Basisparameter hat einen Standardargumentwert, der Außerkraftsetzungsparameter ist nicht identisch.", + "overrideParamType": "Parameter {index} Typenkonflikt: Basisparameter ist Typ \"{baseType}\", Überschreibungsparameter ist Typ \"{overrideType}\"", + "overridePositionalParamCount": "Anzahl der Positionsparameter stimmt nicht überein; die Basismethode hat {baseCount}, aber die Überschreibung hat {overrideCount}", + "overrideReturnType": "Rückgabetypkonflikt: Basismethode gibt Typ \"{baseType}\" zurück, Überschreibung gibt Typ \"{overrideType}\" zurück", + "overrideType": "Die Basisklasse definiert den Typ als \"{type}\"", + "paramAssignment": "Parameter „{index}“: Typ „{sourceType}“ ist nicht mit dem Typ „{destType}“ kompatibel", + "paramSpecMissingInOverride": "Parameter \"ParamSpec\" fehlen in der Überschreibungsmethode.", + "paramType": "Parametertyp ist \"{paramType}\"", + "privateImportFromPyTypedSource": "Stattdessen aus \"{module}\" importieren", + "propertyAccessFromProtocolClass": "Auf eine in einer Protokollklasse definierte Eigenschaft kann nicht als Klassenvariable zugegriffen werden.", + "propertyMethodIncompatible": "Die Property-Methode \"{name}\" ist inkompatibel.", + "propertyMethodMissing": "Die Property-Methode \"{name}\" fehlt in der Überschreibung.", + "propertyMissingDeleter": "Property \"{name}\" hat keinen definierten deleter.", + "propertyMissingSetter": "Property \"{name}\" hat keinen definierten setter.", + "protocolIncompatible": "\"{sourceType}\" ist nicht mit dem Protokoll \"{destType}\" kompatibel.", + "protocolMemberMissing": "\"{name}\" ist nicht vorhanden.", + "protocolRequiresRuntimeCheckable": "Die Protocol Klasse muss @runtime_checkable sein, damit sie mit Instanz- und Klassenprüfungen verwendet werden kann.", + "protocolSourceIsNotConcrete": "\"{sourceType}\" ist kein konkreter Klassentyp und kann dem Typ \"{destType}\" nicht zugewiesen werden.", + "protocolUnsafeOverlap": "Attribute von „{name}“ weisen die gleichen Namen wie das Protokoll auf.", + "pyrightCommentIgnoreTip": "Verwenden Sie \"# pyright: ignore[]\", um die Diagnose für eine einzelne Zeile zu unterdrücken.", + "readOnlyAttribute": "Das Attribut \"{name}\" ist schreibgeschützt.", + "seeClassDeclaration": "Siehe Klassendeklaration", + "seeDeclaration": "Siehe Deklaration", + "seeFunctionDeclaration": "Siehe Funktionsdeklaration", + "seeMethodDeclaration": "Siehe Methodendeklaration", + "seeParameterDeclaration": "Siehe Parameterdeklaration", + "seeTypeAliasDeclaration": "Siehe Typaliasdeklaration", + "seeVariableDeclaration": "Siehe Variablendeklaration", + "tupleAssignmentMismatch": "Der Typ \"{type}\" ist nicht mit dem Ziel-tuple kompatibel.", + "tupleEntryTypeMismatch": "Der Tuple-eintrag {entry} ist ein falscher Typ.", + "tupleSizeIndeterminateSrc": "Nicht übereinstimmende Tuple Größe; {expected} erwartet, aber unbestimmt empfangen", + "tupleSizeIndeterminateSrcDest": "Nicht übereinstimmende Tuple Größe; {expected} oder mehr erwartet, aber „unbestimmt“ empfangen", + "tupleSizeMismatch": "Nicht übereinstimmende Tuple Größe; {expected} erwartet, aber {received} empfangen", + "tupleSizeMismatchIndeterminateDest": "Nicht übereinstimmende Tuple Größe; {expected} oder mehr erwartet, aber {received} empfangen", + "typeAliasInstanceCheck": "Der mit der „type“-Anweisung erstellte Typalias kann nicht mit Instanz- und Klassenüberprüfungen verwendet werden.", + "typeAssignmentMismatch": "Der Typ „{sourceType}“ kann dem Typ „{destType}“ nicht zugewiesen werden.", + "typeBound": "Der Typ „{sourceType}“ kann der oberen Grenze „{destType}“ für die Typvariable „{name}“ nicht zugewiesen werden.", + "typeConstrainedTypeVar": "Der Typ „{type}“ kann der eingeschränkten Typvariablen „{name}“ nicht zugewiesen werden", + "typeIncompatible": "„{sourceType}“ kann „{destType}“ nicht zugewiesen werden.", + "typeNotClass": "\"{type}\" ist keine Klasse.", + "typeNotStringLiteral": "\"{type}\" ist kein Zeichenfolgenliteral.", + "typeOfSymbol": "Der Typ von \"{name}\" ist \"{type}\".", + "typeParamSpec": "Typ \"{type}\" ist nicht mit ParamSpec \"{name}\" kompatibel.", + "typeUnsupported": "Typ \"{type}\" wird nicht unterstützt.", + "typeVarDefaultOutOfScope": "Die Typvariable \"{name}\" befindet sich nicht im Bereich.", + "typeVarIsContravariant": "Der Typparameter \"{name}\" ist kontravariant, aber \"{sourceType}\" ist kein Supertyp von \"{destType}\"", + "typeVarIsCovariant": "Der Typparameter \"{name}\" ist kovariant, aber \"{sourceType}\" ist kein Untertyp von \"{destType}\"", + "typeVarIsInvariant": "Der Typparameter \"{name}\" ist invariant, \"{sourceType}\" ist jedoch nicht identisch mit \"{destType}\"", + "typeVarNotAllowed": "TypeVar ist für Instanz- oder Klassenprüfungen nicht zulässig.", + "typeVarTupleRequiresKnownLength": "TypeVarTuple kann nicht an tuple unbekannter Länge gebunden werden.", + "typeVarUnnecessarySuggestion": "Stattdessen {type} verwenden", + "typeVarUnsolvableRemedy": "Geben Sie eine Überladung an, die den Rückgabetyp angibt, wenn das Argument nicht angegeben ist.", + "typeVarsMissing": "Fehlende Typvariablen: {names}", + "typedDictBaseClass": "Die Klasse \"{type}\" ist kein TypedDict.", + "typedDictClassNotAllowed": "Die TypedDict-Klasse ist für Instanz- oder Klassenüberprüfungen nicht zulässig.", + "typedDictClosedExtraNotAllowed": "Das Element „{name}“ kann nicht hinzugefügt werden.", + "typedDictClosedExtraTypeMismatch": "Das Element „{name}“ mit dem Typ „{type}“ kann nicht hinzugefügt werden.", + "typedDictClosedFieldNotReadOnly": "Das Element „{name}“ kann nicht hinzugefügt werden, da es ReadOnly sein muss.", + "typedDictClosedFieldNotRequired": "Das Element „{name}“ kann nicht hinzugefügt werden, da es „NotRequired“ sein muss.", + "typedDictExtraFieldNotAllowed": "„{name}“ ist in „{type}“ nicht vorhanden.", + "typedDictExtraFieldTypeMismatch": "Der Typ von „{name}“ ist nicht mit dem Typ „extra_items“ in „{type}“ kompatibel.", + "typedDictFieldMissing": "\"{name}\" fehlt in \"{type}\"", + "typedDictFieldNotReadOnly": "\"{name}\" ist in \"{type}\" nicht schreibgeschützt.", + "typedDictFieldNotRequired": "\"{name}\" ist in \"{type}\" nicht erforderlich.", + "typedDictFieldRequired": "\"{name}\" ist in \"{type}\" erforderlich.", + "typedDictFieldTypeMismatch": "Der Typ „{type}“ kann dem Element „{name}“ nicht zugewiesen werden.", + "typedDictFieldUndefined": "„{name}“ ist ein nicht definiertes Element im Typ „{type}“.", + "typedDictKeyAccess": "[\"{name}\"] verwenden, um in TypedDict auf ein Element zu verweisen", + "typedDictNotAllowed": "TypedDict kann nicht für Instanzen- oder Klassenüberprüfungen verwendet werden.", + "unhashableType": "Der Typ \"{type}\" kann nicht mit einem Hash erstellt werden.", + "uninitializedAbstractVariable": "Die Instanzvariable \"{name}\" ist in einer abstrakten Basisklasse \"{classType}\" definiert, aber nicht initialisiert.", + "unreachableExcept": "\"{exceptionType}\" ist eine Unterklasse von \"{parentType}\"", + "useDictInstead": "Verwenden Sie dict[T1, T2], um einen Wörterbuchtyp anzugeben.", + "useListInstead": "Verwenden Sie list[T], um einen list-Typ anzugeben, oder T1 | T2, um einen union-Typ anzugeben.", + "useTupleInstead": "Verwenden Sie tuple[T1, ..., Tn], um einen tuple-Typ anzugeben, oder T1 | T2, um einen union-Typ anzugeben.", + "useTypeInstead": "Verwenden Sie stattdessen type[T].", + "varianceMismatchForClass": "Die Varianz des Typarguments \"{typeVarName}\" ist nicht mit der Basisklasse \"{className}\" kompatibel", + "varianceMismatchForTypeAlias": "Die Varianz des Typarguments \"{typeVarName}\" ist nicht mit \"{typeAliasParam}\" kompatibel" + }, + "Service": { + "longOperation": "Das Aufzählen von Arbeitsbereichsquelldateien nimmt viel Zeit in Anspruch. Erwägen Sie stattdessen, einen Unterordner zu öffnen. [Weitere Informationen](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.en-us.json b/python-parser/packages/pyright-internal/src/localization/package.nls.en-us.json new file mode 100644 index 00000000..63f1bebf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.en-us.json @@ -0,0 +1,2169 @@ +{ + "CodeAction": { + "createTypeStub": { + "message": "Create Type Stub", + "comment": "{Locked='Stub'}" + }, + "createTypeStubFor": { + "message": "Create Type Stub For \"{moduleName}\"", + "comment": "{Locked='Stub'}" + }, + "executingCommand": "Executing command", + "filesToAnalyzeCount": "{count} files to analyze", + "filesToAnalyzeOne": "1 file to analyze", + "findingReferences": "Finding references", + "organizeImports": "Organize Imports" + }, + "Completion": { + "autoImportDetail": "Auto-import", + "indexValueDetail": "Index value" + }, + "Diagnostic": { + "abstractMethodInvocation": "Method \"{method}\" cannot be called because it is abstract and unimplemented", + "annotatedMetadataInconsistent": "Annotated metadata type \"{metadataType}\" is not compatible with type \"{type}\"", + "annotatedParamCountMismatch": "Parameter annotation count mismatch: expected {expected} but received {received}", + "annotatedTypeArgMissing": { + "message": "Expected one type argument and one or more annotations for \"Annotated\"", + "comment": "{Locked='Annotated'}" + }, + "annotationBytesString": "Type expressions cannot use bytes string literals", + "annotationFormatString": { + "message": "Type expressions cannot use format string literals (f-strings)", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "annotationNotSupported": "Type annotation not supported for this statement", + "annotationRawString": "Type expressions cannot use raw string literals", + "annotationSpansStrings": "Type expressions cannot span multiple string literals", + "annotationStringEscape": "Type expressions cannot contain escape characters", + "annotationTemplateString": { + "message": "Type expressions cannot use template string literals (t-strings)", + "comment": "'t-string' is the common English slang for a Python template string" + }, + "argAssignment": "Argument of type \"{argType}\" cannot be assigned to parameter of type \"{paramType}\"", + "argAssignmentFunction": "Argument of type \"{argType}\" cannot be assigned to parameter of type \"{paramType}\" in function \"{functionName}\"", + "argAssignmentParam": "Argument of type \"{argType}\" cannot be assigned to parameter \"{paramName}\" of type \"{paramType}\"", + "argAssignmentParamFunction": "Argument of type \"{argType}\" cannot be assigned to parameter \"{paramName}\" of type \"{paramType}\" in function \"{functionName}\"", + "argMissingForParam": "Argument missing for parameter {name}", + "argMissingForParams": "Arguments missing for parameters {names}", + "argMorePositionalExpectedCount": "Expected {expected} more positional arguments", + "argMorePositionalExpectedOne": "Expected 1 more positional argument", + "argPositional": "Expected positional argument", + "argPositionalExpectedCount": "Expected {expected} positional arguments", + "argPositionalExpectedOne": "Expected 1 positional argument", + "argTypePartiallyUnknown": "Argument type is partially unknown", + "argTypeUnknown": "Argument type is unknown", + "assertAlwaysTrue": { + "message": "Assert expression always evaluates to true", + "comment": "{Locked='true'}" + }, + "assertTypeArgs": { + "message": "\"assert_type\" expects two positional arguments", + "comment": "{Locked='assert_type'}" + }, + "assertTypeTypeMismatch": { + "message": "\"assert_type\" mismatch: expected \"{expected}\" but received \"{received}\"", + "comment": "{Locked='assert_type'}" + }, + "assignmentExprComprehension": { + "message": "Assignment expression target \"{name}\" cannot use same name as comprehension for target", + "comment": "A comprehension is a 'set of looping and filtering instructions' applied to a collection to generate a new collection; the word may not be translatable" + }, + "assignmentExprContext": "Assignment expression must be within module, function or lambda", + "assignmentExprInSubscript": "Assignment expressions within a subscript are supported only in Python 3.10 and newer", + "assignmentInProtocol": { + "message": "Instance or class variables within a Protocol class must be explicitly declared within the class body", + "comment": "{Locked='Protocol'}" + }, + "assignmentTargetExpr": "Expression cannot be assignment target", + "asyncNotInAsyncFunction": { + "message": "Use of \"async\" not allowed outside of async function", + "comment": "{Locked='async'}" + }, + "awaitIllegal": { + "message": "Use of \"await\" requires Python 3.5 or newer", + "comment": "{Locked='await'}" + }, + "awaitNotAllowed": { + "message": "Type expressions cannot use \"await\"", + "comment": "{Locked='await'}" + }, + "awaitNotInAsync": { + "message": "\"await\" allowed only within async function", + "comment": "{Locked='await','async'}" + }, + "backticksIllegal": { + "message": "Expressions surrounded by backticks are not supported in Python 3.x; use repr instead", + "comment": "{Locked='repr'}" + }, + "baseClassCircular": "Class cannot derive from itself", + "baseClassFinal": { + "message": "Base class \"{type}\" is marked final and cannot be subclassed", + "comment": "{Locked='final'}" + }, + "baseClassIncompatible": "Base classes of {type} are mutually incompatible", + "baseClassInvalid": "Argument to class must be a base class", + "baseClassMethodTypeIncompatible": "Base classes for class \"{classType}\" define method \"{name}\" in incompatible way", + "baseClassUnknown": "Base class type is unknown, obscuring type of derived class", + "baseClassVariableTypeIncompatible": "Base classes for class \"{classType}\" define variable \"{name}\" in incompatible way", + "binaryOperationNotAllowed": "Binary operator not allowed in type expression", + "bindParamMissing": { + "message": "Could not bind method \"{methodName}\" because it is missing a \"self\" or \"cls\" parameter", + "comment": "Binding is the process through which Pyright determines what object a name refers to" + }, + "bindTypeMismatch": { + "message": "Could not bind method \"{methodName}\" because \"{type}\" is not assignable to parameter \"{paramName}\"", + "comment": "Binding is the process through which Pyright determines what object a name refers to" + }, + "breakInExceptionGroup": { + "message": "\"break\" is not allowed in an \"except*\" block", + "comment": "{Locked='break','except*'}" + }, + "breakOutsideLoop": { + "message": "\"break\" can be used only within a loop", + "comment": "{Locked='break'}" + }, + "bytesUnsupportedEscape": { + "message": "Unsupported escape sequence in bytes literal", + "comment": "{Locked='bytes'}" + }, + "callableExtraArgs": { + "message": "Expected only two type arguments to \"Callable\"", + "comment": "{Locked='Callable'}" + }, + "callableFirstArg": "Expected parameter type list or \"...\"", + "callableNotInstantiable": "Cannot instantiate type \"{type}\"", + "callableSecondArg": { + "message": "Expected return type as second type argument for \"Callable\"", + "comment": "{Locked='Callable'}" + }, + "casePatternIsIrrefutable": "Irrefutable pattern is allowed only for the last case statement", + "classAlreadySpecialized": "Type \"{type}\" is already specialized", + "classDecoratorTypeUnknown": "Untyped class decorator obscures type of class; ignoring decorator", + "classDefinitionCycle": "Class definition for \"{name}\" depends on itself", + "classGetItemClsParam": { + "message": "__class_getitem__ override should take a \"cls\" parameter", + "comment": "{Locked='__class_getitem__','cls'}" + }, + "classMethodClsParam": { + "message": "Class methods should take a \"cls\" parameter", + "comment": "{Locked='cls'}" + }, + "classNotRuntimeSubscriptable": "Subscript for class \"{name}\" will generate runtime exception; enclose type expression in quotes", + "classPatternBuiltInArgPositional": "Class pattern accepts only positional sub-pattern", + "classPatternNewType": "\"{type}\" cannot be used in a class pattern because it is defined using NewType", + "classPatternPositionalArgCount": "Too many positional patterns for class \"{type}\"; expected {expected} but received {received}", + "classPatternTypeAlias": "\"{type}\" cannot be used in a class pattern because it is a specialized type alias", + "classPropertyDeprecated": "Class properties are deprecated in Python 3.11 and will not be supported in Python 3.13", + "classTypeParametersIllegal": "Class type parameter syntax requires Python 3.12 or newer", + "classVarFirstArgMissing": { + "message": "Expected a type argument after \"ClassVar\"", + "comment": "{Locked='ClassVar'}" + }, + "classVarNotAllowed": { + "message": "\"ClassVar\" is not allowed in this context", + "comment": "{Locked='ClassVar'}" + }, + "classVarOverridesInstanceVar": "Class variable \"{name}\" overrides instance variable of same name in class \"{className}\"", + "classVarTooManyArgs": { + "message": "Expected only one type argument after \"ClassVar\"", + "comment": "{Locked='ClassVar'}" + }, + "classVarWithTypeVar": { + "message": "\"ClassVar\" type cannot include type variables", + "comment": "{Locked='ClassVar'}" + }, + "clsSelfParamTypeMismatch": "Type of parameter \"{name}\" must be a supertype of its class \"{classType}\"", + "codeTooComplexToAnalyze": "Code is too complex to analyze; reduce complexity by refactoring into subroutines or reducing conditional code paths", + "collectionAliasInstantiation": "Type \"{type}\" cannot be instantiated, use \"{alias}\" instead", + "comparisonAlwaysFalse": { + "message": "Condition will always evaluate to False since the types \"{leftType}\" and \"{rightType}\" have no overlap", + "comment": "{Locked='False'}" + }, + "comparisonAlwaysTrue": { + "message": "Condition will always evaluate to True since the types \"{leftType}\" and \"{rightType}\" have no overlap", + "comment": "{Locked='True'}" + }, + "comprehensionInDict": { + "message": "Comprehension cannot be used with other dictionary entries", + "comment": "A comprehension is a 'set of looping and filtering instructions' applied to a collection to generate a new collection; the word may not be translatable" + }, + "comprehensionInSet": { + "message": "Comprehension cannot be used with other set entries", + "comment": ["{Locked='set'}", "A comprehension is a 'set of looping and filtering instructions' applied to a collection to generate a new collection; the word may not be translatable"] + }, + "concatenateContext": { + "message": "\"Concatenate\" is not allowed in this context", + "comment": "{Locked='Concatenate'}" + }, + "concatenateParamSpecMissing": { + "message": "Last type argument for \"Concatenate\" must be a ParamSpec or \"...\"", + "comment": "{Locked='Concatenate','ParamSpec','...'}" + }, + "concatenateTypeArgsMissing": { + "message": "\"Concatenate\" requires at least two type arguments", + "comment": "{Locked='Concatenate'}" + }, + "conditionalOperandInvalid": "Invalid conditional operand of type \"{type}\"", + "constantRedefinition": "\"{name}\" is constant (because it is uppercase) and cannot be redefined", + "constructorParametersMismatch": { + "message": "Mismatch between signature of __new__ and __init__ in class \"{classType}\"", + "comment": "{Locked='__new__','__init__'}" + }, + "containmentAlwaysFalse": { + "message": "Expression will always evaluate to False since the types \"{leftType}\" and \"{rightType}\" have no overlap", + "comment": "{Locked='False'}" + }, + "containmentAlwaysTrue": { + "message": "Expression will always evaluate to True since the types \"{leftType}\" and \"{rightType}\" have no overlap", + "comment": "{Locked='True'}" + }, + "continueInExceptionGroup": { + "message": "\"continue\" is not allowed in an \"except*\" block", + "comment": "{Locked='continue','except*'}" + }, + "continueOutsideLoop": { + "message": "\"continue\" can be used only within a loop", + "comment": "{Locked='continue'}" + }, + "coroutineInConditionalExpression": { + "message": "Conditional expression references coroutine which always evaluates to True", + "comment": "{Locked='True'}" + }, + "dataClassBaseClassFrozen": "A non-frozen class cannot inherit from a class that is frozen", + "dataClassBaseClassNotFrozen": "A frozen class cannot inherit from a class that is not frozen", + "dataClassConverterFunction": "Argument of type \"{argType}\" is not a valid converter for field \"{fieldName}\" of type \"{fieldType}\"", + "dataClassConverterOverloads": "No overloads of \"{funcName}\" are valid converters for field \"{fieldName}\" of type \"{fieldType}\"", + "dataClassFieldInheritedDefault": "\"{fieldName}\" overrides a field of the same name but is missing a default value", + "dataClassFieldWithDefault": "Fields without default values cannot appear after fields with default values", + "dataClassFieldWithPrivateName": "Dataclass field cannot use private name", + "dataClassFieldWithoutAnnotation": "Dataclass field without type annotation will cause runtime exception", + "dataClassPostInitParamCount": { + "message": "Dataclass __post_init__ incorrect parameter count; number of InitVar fields is {expected}", + "comment": "{Locked='__post_init__','InitVar'}" + }, + "dataClassPostInitType": { + "message": "Dataclass __post_init__ method parameter type mismatch for field \"{fieldName}\"", + "comment": "{Locked='__post_init__'}" + }, + "dataClassSlotsOverwrite": { + "message": "__slots__ is already defined in class", + "comment": "{Locked='__slots__'}" + }, + "dataClassTransformExpectedBoolLiteral": { + "message": "Expected expression that statically evaluates to True or False", + "comment": "{Locked='True','False'}" + }, + "dataClassTransformFieldSpecifier": { + "message": "Expected tuple of classes or functions but received type \"{type}\"", + "comment": "{Locked='tuple'}" + }, + "dataClassTransformPositionalParam": { + "message": "All arguments to \"dataclass_transform\" must be keyword arguments", + "comment": "{Locked='dataclass_transform'}" + }, + "dataClassTransformUnknownArgument": { + "message": "Argument \"{name}\" is not supported by dataclass_transform", + "comment": "{Locked='dataclass_transform'}" + }, + "dataProtocolInSubclassCheck": { + "message": "Data protocols (which include non-method attributes) are not allowed in issubclass calls", + "comment": "{Locked='issubclass'}" + }, + "declaredReturnTypePartiallyUnknown": "Declared return type, \"{returnType}\", is partially unknown", + "declaredReturnTypeUnknown": "Declared return type is unknown", + "defaultValueContainsCall": "Function calls and mutable objects not allowed within parameter default value expression", + "defaultValueNotAllowed": "Parameter with \"*\" or \"**\" cannot have default value", + "delTargetExpr": "Expression cannot be deleted", + "deprecatedClass": "The class \"{name}\" is deprecated", + "deprecatedConstructor": "The constructor for class \"{name}\" is deprecated", + "deprecatedDescriptorDeleter": "The \"__delete__\" method for descriptor \"{name}\" is deprecated", + "deprecatedDescriptorGetter": "The \"__get__\" method for descriptor \"{name}\" is deprecated", + "deprecatedDescriptorSetter": "The \"__set__\" method for descriptor \"{name}\" is deprecated", + "deprecatedFunction": "The function \"{name}\" is deprecated", + "deprecatedMethod": "The method \"{name}\" in class \"{className}\" is deprecated", + "deprecatedPropertyDeleter": { + "message": "The deleter for property \"{name}\" is deprecated", + "comment": "{Locked='deleter','property'}" + }, + "deprecatedPropertyGetter": { + "message": "The getter for property \"{name}\" is deprecated", + "comment": "{Locked='getter','property'}" + }, + "deprecatedPropertySetter": { + "message": "The setter for property \"{name}\" is deprecated", + "comment": "{Locked='setter','property'}" + }, + "deprecatedType": "This type is deprecated as of Python {version}; use \"{replacement}\" instead", + "dictExpandIllegalInComprehension": { + "message": "Dictionary expansion not allowed in comprehension", + "comment": "A comprehension is a 'set of looping and filtering instructions' applied to a collection to generate a new collection; the word may not be translatable" + }, + "dictInAnnotation": "Dictionary expression not allowed in type expression", + "dictKeyValuePairs": "Dictionary entries must contain key/value pairs", + "dictUnpackIsNotMapping": "Expected mapping for dictionary unpack operator", + "dunderAllSymbolNotPresent": { + "message": "\"{name}\" is specified in __all__ but is not present in module", + "comment": "{Locked='__all__'}" + }, + "duplicateArgsParam": "Only one \"*\" parameter allowed", + "duplicateBaseClass": "Duplicate base class not allowed", + "duplicateCapturePatternTarget": "Capture target \"{name}\" cannot appear more than once within the same pattern", + "duplicateCatchAll": { + "message": "Only one catch-all except clause allowed", + "comment": "{Locked='except'}" + }, + "duplicateEnumMember": { + "message": "Enum member \"{name}\" is already declared", + "comment": "{Locked='Enum'}" + }, + "duplicateGenericAndProtocolBase": { + "message": "Only one Generic[...] or Protocol[...] base class allowed", + "comment": "{Locked='Generic[...]','Protocol[...]'}" + }, + "duplicateImport": "\"{importName}\" is imported more than once", + "duplicateKeywordOnly": "Only one \"*\" separator allowed", + "duplicateKwargsParam": "Only one \"**\" parameter allowed", + "duplicateParam": "Duplicate parameter \"{name}\"", + "duplicatePositionOnly": "Only one \"/\" parameter allowed", + "duplicateStarPattern": "Only one \"*\" pattern allowed in a pattern sequence", + "duplicateStarStarPattern": "Only one \"**\" entry allowed", + "duplicateUnpack": { + "message": "Only one unpack operation allowed in list", + "comment": "{Locked='list'}" + }, + "ellipsisAfterUnpacked": { + "message": "\"...\" cannot be used with an unpacked TypeVarTuple or tuple", + "comment": "{Locked='TypeVarTuple','tuple'}" + }, + "ellipsisContext": "\"...\" is not allowed in this context", + "ellipsisSecondArg": "\"...\" is allowed only as the second of two arguments", + "enumClassOverride": { + "message": "Enum class \"{name}\" is final and cannot be subclassed", + "comment": "{Locked='Enum','final'}" + }, + "enumMemberDelete": { + "message": "Enum member \"{name}\" cannot be deleted", + "comment": "{Locked='Enum'}" + }, + "enumMemberSet": { + "message": "Enum member \"{name}\" cannot be assigned", + "comment": "{Locked='Enum'}" + }, + "enumMemberTypeAnnotation": { + "message": "Type annotations are not allowed for enum members", + "comment": "{Locked='enum'}" + }, + "exceptionGroupIncompatible": { + "message": "Exception group syntax (\"except*\") requires Python 3.11 or newer", + "comment": "{Locked='except*'}" + }, + "exceptGroupMismatch": { + "message": "Try statement cannot include both \"except\" and \"except*\"", + "comment": "{Locked='except','except*'}" + }, + "exceptGroupRequiresType": { + "message": "Exception group syntax (\"except*\") requires an exception type", + "comment": "{Locked='except*'}" + }, + "exceptionGroupTypeIncorrect": { + "message": "Exception type in except* cannot derive from BaseGroupException", + "comment": "{Locked='except*','BaseGroupException'}" + }, + "exceptionTypeIncorrect": { + "message": "\"{type}\" does not derive from BaseException", + "comment": "{Locked='BaseException'}" + }, + "exceptionTypeNotClass": "\"{type}\" is not a valid exception class", + "exceptionTypeNotInstantiable": "Constructor for exception type \"{type}\" requires one or more arguments", + "exceptWithAsRequiresParens": "Multiple exception types must be parenthesized when using \"as\"", + "exceptRequiresParens": "Multiple exception types must be parenthesized prior to Python 3.14", + "expectedAfterDecorator": "Expected function or class declaration after decorator", + "expectedArrow": "Expected \"->\" followed by return type annotation", + "expectedAsAfterException": { + "message": "Expected \"as\" after exception type", + "comment": "{Locked='as'}" + }, + "expectedAssignRightHandExpr": "Expected expression to the right of \"=\"", + "expectedBinaryRightHandExpr": "Expected expression to the right of operator", + "expectedBoolLiteral": { + "message": "Expected True or False", + "comment": "{Locked='True','False'}" + }, + "expectedCase": { + "message": "Expected \"case\" statement", + "comment": "{Locked='case'}" + }, + "expectedClassName": "Expected class name", + "expectedCloseBrace": "\"{\" was not closed", + "expectedCloseBracket": "\"[\" was not closed", + "expectedCloseParen": "\"(\" was not closed", + "expectedColon": "Expected \":\"", + "expectedComplexNumberLiteral": { + "message": "Expected complex number literal for pattern matching", + "comment": "Complex numbers are a mathematical concept consisting of a real number and an imaginary number" + }, + "expectedDecoratorExpr": "Expression form not supported for decorator prior to Python 3.9", + "expectedDecoratorName": "Expected decorator name", + "expectedDecoratorNewline": "Expected new line at end of decorator", + "expectedDelExpr": { + "message": "Expected expression after \"del\"", + "comment": "{Locked='del'}" + }, + "expectedElse": { + "message": "Expected \"else\"", + "comment": "{Locked='else'}" + }, + "expectedEquals": "Expected \"=\"", + "expectedExceptionClass": "Invalid exception class or object", + "expectedExceptionObj": { + "message": "Expected exception object, exception class or None", + "comment": "{Locked='None'}" + }, + "expectedExpr": "Expected expression", + "expectedFunctionAfterAsync": { + "message": "Expected function definition after \"async\"", + "comment": "{Locked='async'}" + }, + "expectedFunctionName": { + "message": "Expected function name after \"def\"", + "comment": "{Locked='def'}" + }, + "expectedIdentifier": "Expected identifier", + "expectedImport": { + "message": "Expected \"import\"", + "comment": "{Locked='import'}" + }, + "expectedImportAlias": { + "message": "Expected symbol after \"as\"", + "comment": "{Locked='as'}" + }, + "expectedImportSymbols": { + "message": "Expected one or more symbol names after \"import\"", + "comment": "{Locked='import'}" + }, + "expectedIn": { + "message": "Expected \"in\"", + "comment": "{Locked='in'}" + }, + "expectedInExpr": { + "message": "Expected expression after \"in\"", + "comment": "{Locked='in'}" + }, + "expectedIndentedBlock": "Expected indented block", + "expectedMemberName": "Expected attribute name after \".\"", + "expectedModuleName": "Expected module name", + "expectedNameAfterAs": { + "message": "Expected symbol name after \"as\"", + "comment": "{Locked='as'}" + }, + "expectedNamedParameter": "Keyword parameter must follow \"*\"", + "expectedNewline": "Expected newline", + "expectedNewlineOrSemicolon": "Statements must be separated by newlines or semicolons", + "expectedOpenParen": "Expected \"(\"", + "expectedParamName": "Expected parameter name", + "expectedPatternExpr": "Expected pattern expression", + "expectedPatternSubjectExpr": "Expected pattern subject expression", + "expectedPatternValue": { + "message": "Expected pattern value expression of the form \"a.b\"", + "comment": "{Locked='a.b'}" + }, + "expectedReturnExpr": { + "message": "Expected expression after \"return\"", + "comment": "{Locked='return'}" + }, + "expectedSliceIndex": "Expected index or slice expression", + "expectedTypeNotString": "Expected type but received a string literal", + "expectedTypeParameterName": "Expected type parameter name", + "expectedYieldExpr": { + "message": "Expected expression in yield statement", + "comment": "{Locked='yield'}" + }, + "finalClassIsAbstract": { + "message": "Class \"{type}\" is marked final and must implement all abstract symbols", + "comment": "{Locked='final'}" + }, + "finalContext": { + "message": "\"Final\" is not allowed in this context", + "comment": "{Locked='Final'}" + }, + "finalInLoop": { + "message": "A \"Final\" variable cannot be assigned within a loop", + "comment": "{Locked='Final'}" + }, + "finallyBreak": { + "message": "A \"break\" cannot be used to exit a \"finally\" block", + "comment": "{Locked='break', 'finally'}" + }, + "finallyContinue": { + "message": "A \"continue\" cannot be used to exit a \"finally\" block", + "comment": "{Locked='continue', 'finally'}" + }, + "finallyReturn": { + "message": "A \"return\" cannot be used to exit a \"finally\" block", + "comment": "{Locked='return', 'finally'}" + }, + "finalMethodOverride": { + "message": "Method \"{name}\" cannot override final method defined in class \"{className}\"", + "comment": "{Locked='final'}" + }, + "finalNonMethod": { + "message": "Function \"{name}\" cannot be marked @final because it is not a method", + "comment": "{Locked='@final'}" + }, + "finalReassigned": { + "message": "\"{name}\" is declared as Final and cannot be reassigned", + "comment": "{Locked='Final'}" + }, + "finalRedeclaration": { + "message": "\"{name}\" was previously declared as Final", + "comment": "{Locked='Final'}" + }, + "finalRedeclarationBySubclass": { + "message": "\"{name}\" cannot be redeclared because parent class \"{className}\" declares it as Final", + "comment": "{Locked='Final'}" + }, + "finalTooManyArgs": { + "message": "Expected a single type argument after \"Final\"", + "comment": "{Locked='Final'}" + }, + "finalUnassigned": { + "message": "\"{name}\" is declared Final, but value is not assigned", + "comment": "{Locked='Final'}" + }, + "formatStringBrace": { + "message": "Single close brace not allowed within f-string literal; use double close brace", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringBytes": { + "message": "Format string literals (f-strings) cannot be binary", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringDebuggingIllegal": { + "message": "F-string debugging specifier \"=\" requires Python 3.8 or newer", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringEscape": { + "message": "Escape sequence (backslash) not allowed in expression portion of f-string prior to Python 3.12", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringExpectedConversion": { + "message": "Expected a conversion specifier after \"!\" in f-string", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringIllegal": { + "message": "Format string literals (f-strings) require Python 3.6 or newer", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringInPattern": "Format string not allowed in pattern", + "formatStringNestedFormatSpecifier": "Expressions nested too deeply within format string specifier", + "formatStringNestedQuote": { + "message": "Strings nested within an f-string cannot use the same quote character as the f-string prior to Python 3.12", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringTemplate": { + "message": "Format string literals (f-strings) cannot also be template strings (t-strings)", + "comment": "'f-string' is the common English slang for a Python format string, 't-string' is the common English slang for a Python template string" + }, + "formatStringUnicode": { + "message": "Format string literals (f-strings) cannot be unicode", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "formatStringUnterminated": { + "message": "Unterminated expression in f-string; expecting \"}\"", + "comment": "'f-string' is the common English slang for a Python format string" + }, + "functionDecoratorTypeUnknown": "Untyped function decorator obscures type of function; ignoring decorator", + "functionInConditionalExpression": { + "message": "Conditional expression references function which always evaluates to True", + "comment": "{Locked='True'}" + }, + "functionTypeParametersIllegal": "Function type parameter syntax requires Python 3.12 or newer", + "futureImportLocationNotAllowed": { + "message": "Imports from __future__ must be at the beginning of the file", + "comment": "{Locked='__future__'}" + }, + "generatorAsyncReturnType": { + "message": "Return type of async generator function must be compatible with \"AsyncGenerator[{yieldType}, Any]\"", + "comment": "{Locked='async','AsyncGenerator[{yieldType}, Any]'}" + }, + "generatorNotParenthesized": "Generator expressions must be parenthesized if not sole argument", + "generatorSyncReturnType": { + "message": "Return type of generator function must be compatible with \"Generator[{yieldType}, Any, Any]\"", + "comment": "{Locked='Generator[{yieldType}, Any, Any]'}" + }, + "genericBaseClassNotAllowed": { + "message": "\"Generic\" base class cannot be used with type parameter syntax", + "comment": "{Locked='Generic'}" + }, + "genericClassAssigned": { + "message": "Generic class type cannot be assigned", + "comment": "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container" + }, + "genericClassDeleted": { + "message": "Generic class type cannot be deleted", + "comment": "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container" + }, + "genericInstanceVariableAccess": { + "message": "Access to generic instance variable through class is ambiguous", + "comment": "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container" + }, + "genericNotAllowed": { + "message": "\"Generic\" is not valid in this context", + "comment": "{Locked='Generic'}" + }, + "genericTypeAliasBoundTypeVar": { + "message": "Generic type alias within class cannot use bound type variables {names}", + "comment": "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container" + }, + "genericTypeArgMissing": { + "message": "\"Generic\" requires at least one type argument", + "comment": "{Locked='Generic'}" + }, + "genericTypeArgTypeVar": { + "message": "Type argument for \"Generic\" must be a type variable", + "comment": "{Locked='Generic'}" + }, + "genericTypeArgUnique": { + "message": "Type arguments for \"Generic\" must be unique", + "comment": "{Locked='Generic'}" + }, + "globalReassignment": { + "message": "\"{name}\" is assigned before global declaration", + "comment": "{Locked='global'}" + }, + "globalRedefinition": { + "message": "\"{name}\" was already declared global", + "comment": "{Locked='global'}" + }, + "implicitStringConcat": "Implicit string concatenation not allowed", + "importCycleDetected": "Cycle detected in import chain", + "importDepthExceeded": "Import chain depth exceeded {depth}", + "importResolveFailure": "Import \"{importName}\" could not be resolved", + "importSourceResolveFailure": "Import \"{importName}\" could not be resolved from source", + "importSymbolUnknown": "\"{name}\" is unknown import symbol", + "incompatibleMethodOverride": "Method \"{name}\" overrides class \"{className}\" in an incompatible manner", + "inconsistentIndent": "Unindent amount does not match previous indent", + "inconsistentTabs": "Inconsistent use of tabs and spaces in indentation", + "initMethodSelfParamTypeVar": { + "message": "Type annotation for \"self\" parameter of \"__init__\" method cannot contain class-scoped type variables", + "comment": "{Locked='self','__init__'}" + }, + "initMustReturnNone": { + "message": "Return type of \"__init__\" must be None", + "comment": "{Locked='__init__','None'}" + }, + "initSubclassCallFailed": { + "message": "Incorrect keyword arguments for __init_subclass__ method", + "comment": "{Locked='__init_subclass__'}" + }, + "initSubclassClsParam": { + "message": "__init_subclass__ override should take a \"cls\" parameter", + "comment": "{Locked='__init_subclass__','cls'}" + }, + "initVarNotAllowed": { + "message": "\"InitVar\" is not allowed in this context", + "comment": "{Locked='InitVar'}" + }, + "instanceMethodSelfParam": { + "message": "Instance methods should take a \"self\" parameter", + "comment": "{Locked='self'}" + }, + "instanceVarOverridesClassVar": "Instance variable \"{name}\" overrides class variable of same name in class \"{className}\"", + "instantiateAbstract": "Cannot instantiate abstract class \"{type}\"", + "instantiateProtocol": { + "message": "Cannot instantiate Protocol class \"{type}\"", + "comment": "{Locked='Protocol'}" + }, + "internalBindError": { + "message": "An internal error occurred while binding file \"{file}\": {message}", + "comment": "Binding is the process through which Pyright determines what object a name refers to" + }, + "internalParseError": "An internal error occurred while parsing file \"{file}\": {message}", + "internalTypeCheckingError": "An internal error occurred while type checking file \"{file}\": {message}", + "invalidIdentifierChar": "Invalid character in identifier", + "invalidStubStatement": { + "message": "Statement is meaningless within a type stub file", + "comment": "{StrContains=i'stub'}" + }, + "invalidTokenChars": "Invalid character \"{text}\" in token", + "isInstanceInvalidType": { + "message": "Second argument to \"isinstance\" must be a class or tuple of classes", + "comment": "{Locked='isinstance','tuple'}" + }, + "isSubclassInvalidType": { + "message": "Second argument to \"issubclass\" must be a class or tuple of classes", + "comment": "{Locked='issubclass','tuple'}" + }, + "keyValueInSet": { + "message": "Key/value pairs are not allowed within a set", + "comment": "{Locked='set'}" + }, + "keywordArgInTypeArgument": "Keyword arguments cannot be used in type argument lists", + "keywordOnlyAfterArgs": "Keyword-only argument separator not allowed after \"*\" parameter", + "keywordParameterMissing": "One or more keyword parameters must follow \"*\" parameter", + "keywordSubscriptIllegal": "Keyword arguments within subscripts are not supported", + "lambdaReturnTypePartiallyUnknown": "Return type of lambda, \"{returnType}\", is partially unknown", + "lambdaReturnTypeUnknown": "Return type of lambda is unknown", + "listAssignmentMismatch": "Expression with type \"{type}\" cannot be assigned to target list", + "listInAnnotation": { + "message": "List expression not allowed in type expression", + "comment": "{Locked='List'}" + }, + "literalEmptyArgs": { + "message": "Expected one or more type arguments after \"Literal\"", + "comment": "{Locked='Literal'}" + }, + "literalNamedUnicodeEscape": { + "message": "Named unicode escape sequences are not supported in \"Literal\" string annotations", + "comment": "{Locked='Literal'}" + }, + "literalNotAllowed": { + "message": "\"Literal\" cannot be used in this context without a type argument", + "comment": "{Locked='Literal'}" + }, + "literalNotCallable": { + "message": "Literal type cannot be instantiated", + "comment": "{Locked='Literal'}" + }, + "literalUnsupportedType": { + "message": "Type arguments for \"Literal\" must be None, a literal value (int, bool, str, or bytes), or an enum value", + "comment": "{Locked='Literal','None','int','bool','str','bytes','enum'}" + }, + "matchIncompatible": { + "message": "Match statements require Python 3.10 or newer", + "comment": ["{StrContains=i'match'}", "'match' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "matchIsNotExhaustive": { + "message": "Cases within match statement do not exhaustively handle all values", + "comment": ["{Locked='match'}", "Case statements are children of match statements where 'case' is a keyword. It may be best to keep 'case' in English"] + }, + "maxParseDepthExceeded": "Maximum parse depth exceeded; break expression into smaller sub-expressions", + "memberAccess": "Cannot access attribute \"{name}\" for class \"{type}\"", + "memberDelete": "Cannot delete attribute \"{name}\" for class \"{type}\"", + "memberSet": "Cannot assign to attribute \"{name}\" for class \"{type}\"", + "metaclassConflict": { + "message": "The metaclass of a derived class must be a subclass of the metaclasses of all its base classes", + "comment": "Metaclasses are a complex concept and it may be best to not localize the term" + }, + "metaclassDuplicate": { + "message": "Only one metaclass can be provided", + "comment": "Metaclasses are a complex concept and it may be best to not localize the term" + }, + "metaclassIsGeneric": { + "message": "Metaclass cannot be generic", + "comment": ["Metaclasses are a complex concept and it may be best to not localize the term", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "methodNotDefined": "\"{name}\" method not defined", + "methodNotDefinedOnType": "\"{name}\" method not defined on type \"{type}\"", + "methodOrdering": "Cannot create consistent method ordering", + "methodOverridden": "\"{name}\" overrides method of same name in class \"{className}\" with incompatible type \"{type}\"", + "methodReturnsNonObject": "\"{name}\" method does not return an object", + "missingSuperCall": "Method \"{methodName}\" does not call the method of the same name in parent class", + "mixingBytesAndStr": { + "message": "Bytes and str values cannot be concatenated", + "comment": ["{Locked='str'}", "{StrContains=i'bytes'}", "'bytes' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "moduleAsType": "Module cannot be used as a type", + "moduleNotCallable": "Module is not callable", + "moduleUnknownMember": "\"{memberName}\" is not a known attribute of module \"{moduleName}\"", + "namedExceptAfterCatchAll": { + "message": "A named except clause cannot appear after catch-all except clause", + "comment": "{Locked='except'}" + }, + "namedParamAfterParamSpecArgs": { + "message": "Keyword parameter \"{name}\" cannot appear in signature after ParamSpec args parameter", + "comment": "{Locked='ParamSpec','args'}" + }, + "namedTupleEmptyName": { + "message": "Names within a named tuple cannot be empty", + "comment": "{Locked='tuple'}" + }, + "namedTupleEntryRedeclared": { + "message": "Cannot override \"{name}\" because parent class \"{className}\" is a named tuple", + "comment": "{Locked='tuple'}" + }, + "namedTupleFieldUnderscore": { + "message": "Named tuple field names cannot start with an underscore", + "comment": "{Locked='Named','tuple'}" + }, + "namedTupleFirstArg": { + "message": "Expected named tuple class name as first argument", + "comment": "{Locked='tuple'}" + }, + "namedTupleMultipleInheritance": { + "message": "Multiple inheritance with NamedTuple is not supported", + "comment": "{Locked='NamedTuple'}" + }, + "namedTupleNameKeyword": "Field names cannot be a keyword", + "namedTupleNameType": { + "message": "Expected two-entry tuple specifying entry name and type", + "comment": "{Locked='tuple'}" + }, + "namedTupleNameUnique": { + "message": "Names within a named tuple must be unique", + "comment": "{Locked='tuple'}" + }, + "namedTupleNoTypes": { + "message": "\"namedtuple\" provides no types for tuple entries; use \"NamedTuple\" instead", + "comment": "{Locked='namedtuple','tuple','NamedTuple'}" + }, + "namedTupleSecondArg": { + "message": "Expected named tuple entry list as second argument", + "comment": "{Locked='tuple','list'}" + }, + "newClsParam": { + "message": "__new__ override should take a \"cls\" parameter", + "comment": "{Locked='__new__','cls'}" + }, + "newTypeAnyOrUnknown": { + "message": "The second argument to NewType must be a known class, not Any or Unknown", + "comment": "{Locked='NewType','Any','Unknown'}" + }, + "newTypeBadName": { + "message": "The first argument to NewType must be a string literal", + "comment": "{Locked='NewType'}" + }, + "newTypeLiteral": { + "message": "NewType cannot be used with Literal type", + "comment": "{Locked='NewType','Literal'}" + }, + "newTypeNameMismatch": { + "message": "NewType must be assigned to a variable with the same name", + "comment": "{Locked='NewType'}" + }, + "newTypeNotAClass": { + "message": "Expected class as second argument to NewType", + "comment": "{Locked='NewType'}" + }, + "newTypeParamCount": { + "message": "NewType requires two positional arguments", + "comment": "{Locked='NewType'}" + }, + "newTypeProtocolClass": { + "message": "NewType cannot be used with structural type (a Protocol or TypedDict class)", + "comment": "{Locked='NewType','Protocol','TypedDict'}" + }, + "noOverload": "No overloads for \"{name}\" match the provided arguments", + "noReturnContainsReturn": { + "message": "Function with declared return type \"NoReturn\" cannot include a return statement", + "comment": "{Locked='NoReturn','return'}" + }, + "noReturnContainsYield": { + "message": "Function with declared return type \"NoReturn\" cannot include a yield statement", + "comment": "{Locked='NoReturn','yield'}" + }, + "noReturnReturnsNone": { + "message": "Function with declared return type \"NoReturn\" cannot return \"None\"", + "comment": "{Locked='NoReturn','None'}" + }, + "nonDefaultAfterDefault": "Non-default argument follows default argument", + "nonLocalInModule": { + "message": "Nonlocal declaration not allowed at module level", + "comment": ["{StrContains=i'nonlocal'}", "'nonlocal' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "nonLocalNoBinding": { + "message": "No binding for nonlocal \"{name}\" found", + "comment": ["{Locked='nonlocal'}", "'No binding found' means that Pyright couldn't resolve the variable name to an object"] + }, + "nonLocalReassignment": { + "message": "\"{name}\" is assigned before nonlocal declaration", + "comment": "{Locked='nonlocal'}" + }, + "nonLocalRedefinition": { + "message": "\"{name}\" was already declared nonlocal", + "comment": "{Locked='nonlocal'}" + }, + "nonlocalTypeParam": { + "message": "Nonlocal binding is not allowed for type parameter \"{name}\"", + "comment": ["{StrContains=i'nonlocal'}", "'nonlocal' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "noneNotCallable": { + "message": "Object of type \"None\" cannot be called", + "comment": "{Locked='None'}" + }, + "noneNotIterable": { + "message": "Object of type \"None\" cannot be used as iterable value", + "comment": "{Locked='None'}" + }, + "noneNotSubscriptable": { + "message": "Object of type \"None\" is not subscriptable", + "comment": "{Locked='None'}" + }, + "noneNotUsableWith": { + "message": "Object of type \"None\" cannot be used with \"with\"", + "comment": "{Locked='None','with'}" + }, + "noneNotUsableWithAsync": { + "message": "Object of type \"None\" cannot be used with \"async with\"", + "comment": "{Locked='None','with', 'async}" + }, + "noneOperator": { + "message": "Operator \"{operator}\" not supported for \"None\"", + "comment": "{Locked='None'}" + }, + "noneUnknownMember": { + "message": "\"{name}\" is not a known attribute of \"None\"", + "comment": "{Locked='None'}" + }, + "notRequiredArgCount": { + "message": "Expected a single type argument after \"NotRequired\"", + "comment": "{Locked='NotRequired'}" + }, + "notRequiredNotInTypedDict": { + "message": "\"NotRequired\" is not allowed in this context", + "comment": "{Locked='NotRequired'}" + }, + "objectNotCallable": "Object of type \"{type}\" is not callable", + "obscuredClassDeclaration": "Class declaration \"{name}\" is obscured by a declaration of the same name", + "obscuredFunctionDeclaration": "Function declaration \"{name}\" is obscured by a declaration of the same name", + "obscuredMethodDeclaration": "Method declaration \"{name}\" is obscured by a declaration of the same name", + "obscuredParameterDeclaration": "Parameter declaration \"{name}\" is obscured by a declaration of the same name", + "obscuredTypeAliasDeclaration": "Type alias declaration \"{name}\" is obscured by a declaration of the same name", + "obscuredVariableDeclaration": "Declaration \"{name}\" is obscured by a declaration of the same name", + "operatorLessOrGreaterDeprecated": "Operator \"<>\" is not supported in Python 3; use \"!=\" instead", + "optionalExtraArgs": { + "message": "Expected one type argument after \"Optional\"", + "comment": "{Locked='Optional'}" + }, + "orPatternIrrefutable": { + "message": "Irrefutable pattern allowed only as the last subpattern in an \"or\" pattern", + "comment": "{Locked='or'}" + }, + "orPatternMissingName": { + "message": "All subpatterns within an \"or\" pattern must target the same names", + "comment": "{Locked='or'}" + }, + "overlappingKeywordArgs": "Typed dictionary overlaps with keyword parameter: {names}", + "overlappingOverload": "Overload {obscured} for \"{name}\" will never be used because its parameters overlap overload {obscuredBy}", + "overloadAbstractImplMismatch": "Overloads must match abstract status of implementation", + "overloadAbstractMismatch": "Overloads must all be abstract or not", + "overloadClassMethodInconsistent": { + "message": "Overloads for \"{name}\" use @classmethod inconsistently", + "comment": "{Locked='@classmethod'}" + }, + "overloadFinalImpl": { + "message": "@final decorator should be applied only to the implementation", + "comment": "{Locked='@final'}" + }, + "overloadFinalNoImpl": { + "message": "Only the first overload should be marked @final", + "comment": "{Locked='@final'}" + }, + "overloadImplementationMismatch": "Overloaded implementation is not consistent with signature of overload {index}", + "overloadOverrideImpl": { + "message": "@override decorator should be applied only to the implementation", + "comment": "{Locked='@override'}" + }, + "overloadOverrideNoImpl": { + "message": "Only the first overload should be marked @override", + "comment": "{Locked='@override'}" + }, + "overloadReturnTypeMismatch": "Overload {prevIndex} for \"{name}\" overlaps overload {newIndex} and returns an incompatible type", + "overloadStaticMethodInconsistent": { + "message": "Overloads for \"{name}\" use @staticmethod inconsistently", + "comment": "{Locked='@staticmethod'}" + }, + "overloadWithoutImplementation": { + "message": "\"{name}\" is marked as overload, but no implementation is provided", + "comment": "{Locked='overload'}" + }, + "overriddenMethodNotFound": { + "message": "Method \"{name}\" is marked as override, but no base method of same name is present", + "comment": "{Locked='override'}" + }, + "overrideDecoratorMissing": { + "message": "Method \"{name}\" is not marked as override but is overriding a method in class \"{className}\"", + "comment": "{Locked='override'}" + }, + "paramAfterKwargsParam": "Parameter cannot follow \"**\" parameter", + "paramAlreadyAssigned": "Parameter \"{name}\" is already assigned", + "paramAnnotationMissing": "Type annotation is missing for parameter \"{name}\"", + "paramAssignmentMismatch": "Expression of type \"{sourceType}\" cannot be assigned to parameter of type \"{paramType}\"", + "paramNameMissing": "No parameter named \"{name}\"", + "paramSpecArgsKwargsDuplicate": { + "message": "Arguments for ParamSpec \"{type}\" have already been provided", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecArgsKwargsUsage": { + "message": "\"args\" and \"kwargs\" attributes of ParamSpec must both appear within a function signature", + "comment": "{Locked='args','kwargs','ParamSpec'}" + }, + "paramSpecArgsMissing": { + "message": "Arguments for ParamSpec \"{type}\" are missing", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecArgsUsage": { + "message": "\"args\" attribute of ParamSpec is valid only when used with *args parameter", + "comment": "{Locked='args','ParamSpec','*args'}" + }, + "paramSpecAssignedName": { + "message": "ParamSpec must be assigned to a variable named \"{name}\"", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecContext": { + "message": "ParamSpec is not allowed in this context", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecDefaultNotTuple": { + "message": "Expected ellipsis, a tuple expression, or ParamSpec for default value of ParamSpec", + "comment": "{Locked='tuple','ParamSpec'}" + }, + "paramSpecFirstArg": { + "message": "Expected name of ParamSpec as first argument", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecKwargsUsage": { + "message": "\"kwargs\" attribute of ParamSpec is valid only when used with **kwargs parameter", + "comment": "{Locked='kwargs','ParamSpec','**kwargs'}" + }, + "paramSpecNotUsedByOuterScope": { + "message": "ParamSpec \"{name}\" has no meaning in this context", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecUnknownArg": { + "message": "ParamSpec does not support more than one argument", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecUnknownMember": { + "message": "\"{name}\" is not a known attribute of ParamSpec", + "comment": "{Locked='ParamSpec'}" + }, + "paramSpecUnknownParam": { + "message": "\"{name}\" is unknown parameter to ParamSpec", + "comment": "{Locked='ParamSpec'}" + }, + "paramTypeCovariant": "Covariant type variable cannot be used in parameter type", + "paramTypePartiallyUnknown": "Type of parameter \"{paramName}\" is partially unknown", + "paramTypeUnknown": "Type of parameter \"{paramName}\" is unknown", + "parenthesizedContextManagerIllegal": { + "message": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "comment": "{Locked='with'}" + }, + "patternNeverMatches": "Pattern will never be matched for subject type \"{type}\"", + "positionArgAfterNamedArg": "Positional argument cannot appear after keyword arguments", + "positionArgAfterUnpackedDictArg": "Positional argument cannot appear after keyword argument unpacking", + "positionOnlyAfterArgs": "Position-only parameter separator not allowed after \"*\" parameter", + "positionOnlyAfterKeywordOnly": "\"/\" parameter must appear before \"*\" parameter", + "positionOnlyAfterNon": "Position-only parameter not allowed after parameter that is not position-only", + "positionOnlyFirstParam": "Position-only parameter separator not allowed as first parameter", + "positionOnlyIncompatible": "Position-only parameter separator requires Python 3.8 or newer", + "privateImportFromPyTypedModule": "\"{name}\" is not exported from module \"{module}\"", + "privateUsedOutsideOfClass": "\"{name}\" is private and used outside of the class in which it is declared", + "privateUsedOutsideOfModule": "\"{name}\" is private and used outside of the module in which it is declared", + "propertyOverridden": { + "message": "\"{name}\" incorrectly overrides property of same name in class \"{className}\"", + "comment": "{Locked='property'}" + }, + "propertyStaticMethod": { + "message": "Static methods not allowed for property getter, setter or deleter", + "comment": "{Locked='property','getter','setter','deleter'}" + }, + "protectedUsedOutsideOfClass": "\"{name}\" is protected and used outside of the class in which it is declared", + "protocolBaseClass": { + "message": "Protocol class \"{classType}\" cannot derive from non-Protocol class \"{baseType}\"", + "comment": "{Locked='Protocol'}" + }, + "protocolBaseClassWithTypeArgs": { + "message": "Type arguments are not allowed with Protocol class when using type parameter syntax", + "comment": "{Locked='Protocol'}" + }, + "protocolIllegal": { + "message": "Use of \"Protocol\" requires Python 3.7 or newer", + "comment": "{Locked='Protocol'}" + }, + "protocolNotAllowed": { + "message": "\"Protocol\" cannot be used in this context", + "comment": "{Locked='Protocol'}" + }, + "protocolTypeArgMustBeTypeParam": { + "message": "Type argument for \"Protocol\" must be a type parameter", + "comment": "{Locked='Protocol'}" + }, + "protocolUnsafeOverlap": "Class overlaps \"{name}\" unsafely and could produce a match at runtime", + "protocolVarianceContravariant": { + "message": "Type variable \"{variable}\" used in generic Protocol \"{class}\" should be contravariant", + "comment": ["{Locked='Protocol'}", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "protocolVarianceCovariant": { + "message": "Type variable \"{variable}\" used in generic Protocol \"{class}\" should be covariant", + "comment": ["{Locked='Protocol'}", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "protocolVarianceInvariant": { + "message": "Type variable \"{variable}\" used in generic Protocol \"{class}\" should be invariant", + "comment": ["{Locked='Protocol'}", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "pyrightCommentInvalidDiagnosticBoolValue": { + "message": "Pyright comment directive must be followed by \"=\" and a value of true or false", + "comment": "{Locked='Pyright','true','false'}" + }, + "pyrightCommentInvalidDiagnosticSeverityValue": { + "message": "Pyright comment directive must be followed by \"=\" and a value of true, false, error, warning, information, or none", + "comment": "{Locked='Pyright','true','false','error','warning','information','none'}" + }, + "pyrightCommentMissingDirective": { + "message": "Pyright comment must be followed by a directive (basic or strict) or a diagnostic rule", + "comment": "{Locked='Pyright','basic','strict'}" + }, + "pyrightCommentNotOnOwnLine": { + "message": "Pyright comments used to control file-level settings must appear on their own line", + "comment": "{Locked='Pyright'}" + }, + "pyrightCommentUnknownDiagnosticRule": { + "message": "\"{rule}\" is an unknown diagnostic rule for pyright comment", + "comment": "{Locked='pyright'}" + }, + "pyrightCommentUnknownDiagnosticSeverityValue": { + "message": "\"{value}\" is invalid value for pyright comment; expected true, false, error, warning, information, or none", + "comment": "{Locked='pyright','true','false','error','warning','information','none'}" + }, + "pyrightCommentUnknownDirective": { + "message": "\"{directive}\" is an unknown directive for pyright comment; expected \"strict\", \"standard\", or \"basic\"", + "comment": "{Locked='pyright','strict','standard','basic'}" + }, + "readOnlyArgCount": { + "message": "Expected a single type argument after \"ReadOnly\"", + "comment": "{Locked='ReadOnly'}" + }, + "readOnlyNotInTypedDict": { + "message": "\"ReadOnly\" is not allowed in this context", + "comment": "{Locked='ReadOnly'}" + }, + "recursiveDefinition": "Type of \"{name}\" could not be determined because it refers to itself", + "relativeImportNotAllowed": { + "message": "Relative imports cannot be used with \"import .a\" form; use \"from . import a\" instead", + "comment": "{Locked='import .a','from . import a'}" + }, + "requiredArgCount": { + "message": "Expected a single type argument after \"Required\"", + "comment": "{Locked='Required'}" + }, + "requiredNotInTypedDict": { + "message": "\"Required\" is not allowed in this context", + "comment": "{Locked='Required'}" + }, + "returnInAsyncGenerator": { + "message": "Return statement with value is not allowed in async generator", + "comment": "{Locked='async'}" + }, + "returnMissing": "Function with declared return type \"{returnType}\" must return value on all code paths", + "returnInExceptionGroup": { + "message": "\"return\" is not allowed in an \"except*\" block", + "comment": "{Locked='return','except*'}" + }, + "returnOutsideFunction": { + "message": "\"return\" can be used only within a function", + "comment": "{Locked='return'}" + }, + "returnTypeContravariant": "Contravariant type variable cannot be used in return type", + "returnTypeMismatch": "Type \"{exprType}\" is not assignable to return type \"{returnType}\"", + "returnTypePartiallyUnknown": "Return type, \"{returnType}\", is partially unknown", + "returnTypeUnknown": "Return type is unknown", + "revealLocalsArgs": { + "message": "Expected no arguments for \"reveal_locals\" call", + "comment": "{Locked='reveal_locals'}" + }, + "revealLocalsNone": { + "message": "No locals in this scope", + "comment": "{Locked='locals'}" + }, + "revealTypeArgs": { + "message": "Expected a single positional argument for \"reveal_type\" call", + "comment": "{Locked='reveal_type'}" + }, + "revealTypeExpectedTextArg": { + "message": "The \"expected_text\" argument for function \"reveal_type\" must be a str literal value", + "comment": "{Locked='expected_text','reveal_type','str'}" + }, + "revealTypeExpectedTextMismatch": "Type text mismatch; expected \"{expected}\" but received \"{received}\"", + "revealTypeExpectedTypeMismatch": "Type mismatch; expected \"{expected}\" but received \"{received}\"", + "selfTypeContext": { + "message": "\"Self\" is not valid in this context", + "comment": "{Locked='Self'}" + }, + "selfTypeMetaclass": { + "message": "\"Self\" cannot be used within a metaclass (a subclass of \"type\")", + "comment": ["{Locked='Self'}", "Metaclasses are a complex concept and it may be best to not localize the term"] + }, + "selfTypeWithTypedSelfOrCls": { + "message": "\"Self\" cannot be used in a function with a `self` or `cls` parameter that has a type annotation other than \"Self\"", + "comment": "{Locked='Self','self','cls'}" + }, + "sentinelBadName": { + "message": "The first argument to Sentinel must be a string literal", + "comment": "{Locked='Sentinel'}" + }, + "sentinelParamCount": { + "message": "Sentinel requires one positional argument", + "comment": "{Locked='Sentinel'}" + }, + "sentinelNameMismatch": { + "message": "Sentinel must be assigned to a variable with the same name", + "comment": "{Locked='Sentinel'}" + }, + "setterGetterTypeMismatch": { + "message": "Property setter value type is not assignable to the getter return type", + "comment": ["{Locked='setter','getter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "singleOverload": "\"{name}\" is marked as overload, but additional overloads are missing", + "slotsAttributeError": { + "message": "\"{name}\" is not specified in __slots__", + "comment": "{Locked='__slots__'}" + }, + "slotsClassVarConflict": { + "message": "\"{name}\" conflicts with instance variable declared in __slots__", + "comment": "{Locked='__slots__'}" + }, + "starPatternInAsPattern": { + "message": "Star pattern cannot be used with \"as\" target", + "comment": ["{Locked='as'}", "Star pattern refers to the use of the * (star) character to represent a variable length pattern match"] + }, + "starPatternInOrPattern": { + "message": "Star pattern cannot be ORed within other patterns", + "comment": ["Star pattern refers to the use of the * (star) character to represent a variable length pattern match", "'ORed' means joined together with a binary 'or' operation"] + }, + "starStarWildcardNotAllowed": "** cannot be used with wildcard \"_\"", + "staticClsSelfParam": { + "message": "Static methods should not take a \"self\" or \"cls\" parameter", + "comment": "{Locked='self','cls'}" + }, + "stringNonAsciiBytes": { + "message": "Non-ASCII character not allowed in bytes string literal", + "comment": "{Locked='ASCII'}" + }, + "stringNotSubscriptable": "String expression cannot be subscripted in type expression; enclose entire expression in quotes", + "stringUnsupportedEscape": "Unsupported escape sequence in string literal", + "stringUnterminated": "String literal is unterminated", + "stubFileMissing": { + "message": "Stub file not found for \"{importName}\"", + "comment": "{StrContains=i'stub'}" + }, + "stubUsesGetAttr": { + "message": "Type stub file is incomplete; \"__getattr__\" obscures type errors for module", + "comment": ["{Locked='__getattr__'}", "{StrContains=i'stub'}"] + }, + "sublistParamsIncompatible": { + "message": "Sublist parameters are not supported in Python 3.x", + "comment": "{StrContains=i'sublist'}" + }, + "superCallArgCount": { + "message": "Expected no more than two arguments to \"super\" call", + "comment": "{Locked='super'}" + }, + "superCallFirstArg": { + "message": "Expected class type as first argument to \"super\" call but received \"{type}\"", + "comment": "{Locked='super'}" + }, + "superCallSecondArg": { + "message": "Second argument to \"super\" call must be object or class that derives from \"{type}\"", + "comment": "{Locked='super'}" + }, + "superCallZeroArgForm": { + "message": "Zero-argument form of \"super\" call is valid only within a method", + "comment": "{Locked='super'}" + }, + "superCallZeroArgFormStaticMethod": { + "message": "Zero-argument form of \"super\" call is not valid within a static method", + "comment": "{Locked='super'}" + }, + "symbolIsPossiblyUnbound": "\"{name}\" is possibly unbound", + "symbolIsUnbound": "\"{name}\" is unbound", + "symbolIsUndefined": "\"{name}\" is not defined", + "symbolOverridden": "\"{name}\" overrides symbol of same name in class \"{className}\"", + "templateStringBytes": { + "message": "Template string literals (t-strings) cannot be binary", + "comment": "'t-string' is the common English slang for a Python template string" + }, + "templateStringIllegal": { + "message": "Template string literals (t-strings) require Python 3.14 or newer", + "comment": "'t-string' is the common English slang for a Python template string" + }, + "templateStringUnicode": { + "message": "Template string literals (t-strings) cannot be unicode", + "comment": "'t-string' is the common English slang for a Python template string" + }, + "ternaryNotAllowed": "Ternary expression not allowed in type expression", + "totalOrderingMissingMethod": { + "message": "Class must define one of \"__lt__\", \"__le__\", \"__gt__\", or \"__ge__\" to use total_ordering", + "comment": "{Locked='__lt__','__le__','__gt__','__ge__','total_ordering'}" + }, + "trailingCommaInFromImport": "Trailing comma not allowed without surrounding parentheses", + "tryWithoutExcept": { + "message": "Try statement must have at least one except or finally clause", + "comment": ["{Locked='except','finally'}", "{StrContains=i'try'}", "'try' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "tupleAssignmentMismatch": { + "message": "Expression with type \"{type}\" cannot be assigned to target tuple", + "comment": "{Locked='tuple'}" + }, + "tupleInAnnotation": { + "message": "Tuple expression not allowed in type expression", + "comment": ["{StrContains=i'tuple'}", "'tuple' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "tupleIndexOutOfRange": "Index {index} is out of range for type {type}", + "typeAliasIllegalExpressionForm": "Invalid expression form for type alias definition", + "typeAliasIsRecursiveDirect": "Type alias \"{name}\" cannot use itself in its definition", + "typeAliasNotInModuleOrClass": { + "message": "A TypeAlias can be defined only within a module or class scope", + "comment": "{Locked='TypeAlias'}" + }, + "typeAliasRedeclared": { + "message": "\"{name}\" is declared as a TypeAlias and can be assigned only once", + "comment": "{Locked='TypeAlias'}" + }, + "typeAliasStatementBadScope": { + "message": "A type statement can be used only within a module or class scope", + "comment": "{Locked='type'}" + }, + "typeAliasStatementIllegal": "Type alias statement requires Python 3.12 or newer", + "typeAliasTypeBadScope": "A type alias can be defined only within a module or class scope", + "typeAliasTypeBaseClass": { + "message": "A type alias defined in a \"type\" statement cannot be used as a base class", + "comment": "{Locked='\"type\"'}" + }, + "typeAliasTypeMustBeAssigned": { + "message": "TypeAliasType must be assigned to a variable with the same name as the type alias", + "comment": "{Locked='TypeAliasType'}" + }, + "typeAliasTypeNameArg": { + "message": "First argument to TypeAliasType must be a string literal representing the name of the type alias", + "comment": "{Locked='TypeAliasType'}" + }, + "typeAliasTypeNameMismatch": "Name of type alias must match the name of the variable to which it is assigned", + "typeAliasTypeParamInvalid": { + "message": "Type parameter list must be a tuple containing only TypeVar, TypeVarTuple, or ParamSpec", + "comment": "{Locked='tuple','TypeVar','TypeVarTuple','ParamSpec'}" + }, + "typeAnnotationCall": "Call expression not allowed in type expression", + "typeAnnotationVariable": "Variable not allowed in type expression", + "typeAnnotationWithCallable": { + "message": "Type argument for \"type\" must be a class; callables are not supported", + "comment": ["{Locked='type'}", "'callables' are objects that can be called like a function"] + }, + "typeArgListExpected": { + "message": "Expected ParamSpec, ellipsis, or list of types", + "comment": "{Locked='ParamSpec','list'}" + }, + "typeArgListNotAllowed": { + "message": "List expression not allowed for this type argument", + "comment": ["{StrContains=i'list'}", "'list' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "typeArgsExpectingNone": "Expected no type arguments for class \"{name}\"", + "typeArgsMismatchOne": "Expected one type argument but received {received}", + "typeArgsMissingForAlias": { + "message": "Expected type arguments for generic type alias \"{name}\"", + "comment": "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container" + }, + "typeArgsMissingForClass": { + "message": "Expected type arguments for generic class \"{name}\"", + "comment": "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container" + }, + "typeArgsTooFew": "Too few type arguments provided for \"{name}\"; expected {expected} but received {received}", + "typeArgsTooMany": "Too many type arguments provided for \"{name}\"; expected {expected} but received {received}", + "typeAssignmentMismatch": "Type \"{sourceType}\" is not assignable to declared type \"{destType}\"", + "typeAssignmentMismatchWildcard": "Import symbol \"{name}\" has type \"{sourceType}\", which is not assignable to declared type \"{destType}\"", + "typeCallNotAllowed": { + "message": "type() call should not be used in type expression", + "comment": "{Locked='type()'}" + }, + "typeCheckOnly": { + "message": "\"{name}\" is marked as @type_check_only and can be used only in type annotations", + "comment": "{Locked='@type_check_only'}" + }, + "typeCommentDeprecated": { + "message": "Use of type comments is deprecated; use type annotation instead", + "comment": "{Locked='type'}" + }, + "typeExpectedClass": "Expected class but received \"{type}\"", + "typeFormArgs": { + "message": "\"TypeForm\" accepts a single positional argument", + "comment": "{Locked='TypeForm'}" + }, + "typeGuardArgCount": { + "message": "Expected a single type argument after \"TypeGuard\" or \"TypeIs\"", + "comment": "{Locked='TypeGuard','TypeIs'}" + }, + "typeGuardParamCount": "User-defined type guard functions and methods must have at least one input parameter", + "typeIsReturnType": { + "message": "Return type of TypeIs (\"{returnType}\") is not consistent with value parameter type (\"{type}\")", + "comment": "{Locked='TypeIs'}" + }, + "typeNotAwaitable": { + "message": "\"{type}\" is not awaitable", + "comment": "{Locked='awaitable'}" + }, + "typeNotIntantiable": "\"{type}\" cannot be instantiated", + "typeNotIterable": "\"{type}\" is not iterable", + "typeNotSpecializable": "Could not specialize type \"{type}\"", + "typeNotSubscriptable": "Object of type \"{type}\" is not subscriptable", + "typeNotSupportBinaryOperator": "Operator \"{operator}\" not supported for types \"{leftType}\" and \"{rightType}\"", + "typeNotSupportBinaryOperatorBidirectional": "Operator \"{operator}\" not supported for types \"{leftType}\" and \"{rightType}\" when expected type is \"{expectedType}\"", + "typeNotSupportUnaryOperator": "Operator \"{operator}\" not supported for type \"{type}\"", + "typeNotSupportUnaryOperatorBidirectional": "Operator \"{operator}\" not supported for type \"{type}\" when expected type is \"{expectedType}\"", + "typeNotUsableWith": "Object of type \"{type}\" cannot be used with \"with\" because it does not correctly implement {method}", + "typeNotUsableWithAsync": { + "message": "Object of type \"{type}\" cannot be used with \"async with\" because it does not correctly implement {method}", + "comment": ["{Locked='async','with}"] + }, + "typeParameterBoundNotAllowed": { + "message": "Bound or constraint cannot be used with a variadic type parameter or ParamSpec", + "comment": ["{Locked='ParamSpec'}", "'variadic' means that it accepts a variable number of arguments"] + }, + "typeParameterConstraintTuple": "Type parameter constraint must be a tuple of two or more types", + "typeParameterExistingTypeParameter": "Type parameter \"{name}\" is already in use", + "typeParameterNotDeclared": "Type parameter \"{name}\" is not included in the type parameter list for \"{container}\"", + "typeParametersMissing": "At least one type parameter must be specified", + "typePartiallyUnknown": "Type of \"{name}\" is partially unknown", + "typeUnknown": "Type of \"{name}\" is unknown", + "typeVarAssignedName": { + "message": "TypeVar must be assigned to a variable named \"{name}\"", + "comment": "{Locked='TypeVar'}" + }, + "typeVarAssignmentMismatch": "Type \"{type}\" cannot be assigned to type variable \"{name}\"", + "typeVarBoundAndConstrained": { + "message": "TypeVar cannot be both bound and constrained", + "comment": "{Locked='TypeVar'}" + }, + "typeVarBoundGeneric": { + "message": "TypeVar bound type cannot be generic", + "comment": ["{Locked='TypeVar'}", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "typeVarConstraintGeneric": { + "message": "TypeVar constraint type cannot be generic", + "comment": ["{Locked='TypeVar'}", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "typeVarDefaultBoundMismatch": { + "message": "TypeVar default type must be a subtype of the bound type", + "comment": "{Locked='TypeVar'}" + }, + "typeVarDefaultConstraintMismatch": { + "message": "TypeVar default type must be one of the constrained types", + "comment": "{Locked='TypeVar'}" + }, + "typeVarDefaultIllegal": "Type variable default types require Python 3.13 or newer", + "typeVarDefaultInvalidTypeVar": "Type parameter \"{name}\" has a default type that refers to one or more type variables that are out of scope", + "typeVarFirstArg": { + "message": "Expected name of TypeVar as first argument", + "comment": "{Locked='TypeVar'}" + }, + "typeVarInvalidForMemberVariable": "Attribute type cannot use type variable \"{name}\" scoped to local method", + "typeVarNoMember": { + "message": "TypeVar \"{type}\" has no attribute \"{name}\"", + "comment": "{Locked='TypeVar'}" + }, + "typeVarNotSubscriptable": { + "message": "TypeVar \"{type}\" is not subscriptable", + "comment": "{Locked='TypeVar'}" + }, + "typeVarNotUsedByOuterScope": "Type variable \"{name}\" has no meaning in this context", + "typeVarPossiblyUnsolvable": "Type variable \"{name}\" may go unsolved if caller supplies no argument for parameter \"{param}\"", + "typeVarSingleConstraint": { + "message": "TypeVar must have at least two constrained types", + "comment": "{Locked='TypeVar'}" + }, + "typeVarTupleConstraints": { + "message": "TypeVarTuple cannot have value constraints", + "comment": "{Locked='TypeVarTuple'}" + }, + "typeVarTupleContext": { + "message": "TypeVarTuple is not allowed in this context", + "comment": "{Locked='TypeVarTuple'}" + }, + "typeVarTupleDefaultNotUnpacked": { + "message": "TypeVarTuple default type must be an unpacked tuple or TypeVarTuple", + "comment": "{Locked='TypeVarTuple','tuple'}" + }, + "typeVarTupleMustBeUnpacked": { + "message": "Unpack operator is required for TypeVarTuple value", + "comment": "{Locked='TypeVarTuple'}" + }, + "typeVarTupleUnknownParam": { + "message": "\"{name}\" is unknown parameter to TypeVarTuple", + "comment": "{Locked='TypeVarTuple'}" + }, + "typeVarUnknownParam": { + "message": "\"{name}\" is unknown parameter to TypeVar", + "comment": "{Locked='TypeVar'}" + }, + "typeVarUsedByOuterScope": { + "message": "TypeVar \"{name}\" is already in use by an outer scope", + "comment": "{Locked='TypeVar'}" + }, + "typeVarUsedOnlyOnce": { + "message": "TypeVar \"{name}\" appears only once in generic function signature", + "comment": ["{Locked='TypeVar'}", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "typeVarVariance": { + "message": "TypeVar cannot be both covariant and contravariant", + "comment": "{Locked='TypeVar'}" + }, + "typeVarWithDefaultFollowsVariadic": { + "message": "TypeVar \"{typeVarName}\" has a default value and cannot follow TypeVarTuple \"{variadicName}\"", + "comment": "{Locked='TypeVar','TypeVarTuple'}" + }, + "typeVarWithoutDefault": "\"{name}\" cannot appear after \"{other}\" in type parameter list because it has no default type", + "typeVarsNotInGenericOrProtocol": { + "message": "Generic[] or Protocol[] must include all type variables", + "comment": "{Locked='Generic[]','Protocol[]'}" + }, + "typedDictAccess": { + "message": "Could not access item in TypedDict", + "comment": "{Locked='TypedDict'}" + }, + "typedDictAssignedName": { + "message": "TypedDict must be assigned to a variable named \"{name}\"", + "comment": "{Locked='TypedDict'}" + }, + "typedDictBadVar": { + "message": "TypedDict classes can contain only type annotations", + "comment": "{Locked='TypedDict'}" + }, + "typedDictBaseClass": { + "message": "All base classes for TypedDict classes must also be TypedDict classes", + "comment": "{Locked='TypedDict'}" + }, + "typedDictBoolParam": { + "message": "Expected \"{name}\" parameter to have a value of True or False", + "comment": "{Locked='True','False'}" + }, + "typedDictClosedExtras": { + "message": "Base class \"{name}\" is a TypedDict that limits the type of extra items to type \"{type}\"", + "comment": "{Locked='closed','TypedDict'}" + }, + "typedDictClosedNoExtras": { + "message": "Base class \"{name}\" is a closed TypedDict; extra items are not allowed", + "comment": "{Locked='closed','TypedDict'}" + }, + "typedDictClosedFalseNonOpenBase": { + "message": "Base class \"{name}\" is not an open TypedDict; closed=False is not allowed", + "comment": "{Locked='TypedDict','closed'}" + }, + "typedDictDelete": { + "message": "Could not delete item in TypedDict", + "comment": "{Locked='TypedDict'}" + }, + "typedDictEmptyName": { + "message": "Names within a TypedDict cannot be empty", + "comment": "{Locked='TypedDict'}" + }, + "typedDictEntryName": "Expected string literal for dictionary entry name", + "typedDictEntryUnique": "Names within a dictionary must be unique", + "typedDictExtraArgs": { + "message": "Extra TypedDict arguments not supported", + "comment": "{Locked='TypedDict'}" + }, + "typedDictExtraItemsClosed": { + "message": "TypedDict can use either \"closed\" or \"extra_items\" but not both", + "comment": "{Locked='TypedDict','closed','extra_items'}" + }, + "typedDictFieldNotRequiredRedefinition": { + "message": "TypedDict item \"{name}\" cannot be redefined as NotRequired", + "comment": "{Locked='TypedDict','NotRequired'}" + }, + "typedDictFieldReadOnlyRedefinition": { + "message": "TypedDict item \"{name}\" cannot be redefined as ReadOnly", + "comment": "{Locked='TypedDict','ReadOnly'}" + }, + "typedDictFieldRequiredRedefinition": { + "message": "TypedDict item \"{name}\" cannot be redefined as Required", + "comment": "{Locked='TypedDict','Required'}" + }, + "typedDictFirstArg": { + "message": "Expected TypedDict class name as first argument", + "comment": "{Locked='TypedDict'}" + }, + "typedDictInClassPattern": { + "message": "TypedDict class not allowed in class pattern", + "comment": "{Locked='TypedDict'}" + }, + "typedDictInitsubclassParameter": { + "message": "TypedDict does not support __init_subclass__ parameter \"{name}\"", + "comment": "{Locked='TypedDict','__init_subclass__'}" + }, + "typedDictNotAllowed": { + "message": "\"TypedDict\" cannot be used in this context", + "comment": "{Locked='TypedDict'}" + }, + "typedDictSecondArgDict": { + "message": "Expected dict or keyword parameter as second parameter", + "comment": "{Locked='dict'}" + }, + "typedDictSecondArgDictEntry": "Expected simple dictionary entry", + "typedDictSet": { + "message": "Could not assign item in TypedDict", + "comment": "{Locked='TypedDict'}" + }, + "unaccessedClass": "Class \"{name}\" is not accessed", + "unaccessedFunction": "Function \"{name}\" is not accessed", + "unaccessedImport": "Import \"{name}\" is not accessed", + "unaccessedSymbol": "\"{name}\" is not accessed", + "unaccessedVariable": "Variable \"{name}\" is not accessed", + "unannotatedFunctionSkipped": "Analysis of function \"{name}\" is skipped because it is unannotated", + "unaryOperationNotAllowed": "Unary operator not allowed in type expression", + "unexpectedAsyncToken": { + "message": "Expected \"def\", \"with\" or \"for\" to follow \"async\"", + "comment": "{Locked='def','with','for','async'}" + }, + "unexpectedEof": "Unexpected EOF", + "unexpectedExprToken": "Unexpected token at end of expression", + "unexpectedIndent": "Unexpected indentation", + "unexpectedUnindent": "Unindent not expected", + "unhashableDictKey": "Dictionary key must be hashable", + "unhashableSetEntry": { + "message": "Set entry must be hashable", + "comment": ["{StrContains=i'set'}", "'set' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "uninitializedAbstractVariables": { + "message": "Variables defined in abstract base class are not initialized in final class \"{classType}\"", + "comment": "{Locked='final'}" + }, + "uninitializedInstanceVariable": { + "message": "Instance variable \"{name}\" is not initialized in the class body or __init__ method", + "comment": "{Locked='__init__'}" + }, + "unionForwardReferenceNotAllowed": { + "message": "Union syntax cannot be used with string operand; use quotes around entire expression", + "comment": "{Locked='Union'}" + }, + "unionSyntaxIllegal": { + "message": "Alternative syntax for unions requires Python 3.10 or newer", + "comment": "'unions' as in the mathematical set theory term" + }, + "unionTypeArgCount": { + "message": "Union requires two or more type arguments", + "comment": "{Locked='Union'}" + }, + "unionUnpackedTuple": { + "message": "Union cannot include an unpacked tuple", + "comment": "{Locked='Union','tuple'}" + }, + "unionUnpackedTypeVarTuple": { + "message": "Union cannot include an unpacked TypeVarTuple", + "comment": "{Locked='Union','TypeVarTuple'}" + }, + "unnecessaryCast": { + "message": "Unnecessary \"cast\" call; type is already \"{type}\"", + "comment": "{Locked='cast'}" + }, + "unnecessaryIsInstanceAlways": { + "message": "Unnecessary isinstance call; \"{testType}\" is always an instance of \"{classType}\"", + "comment": "{Locked='isinstance'}" + }, + "unnecessaryIsSubclassAlways": { + "message": "Unnecessary issubclass call; \"{testType}\" is always a subclass of \"{classType}\"", + "comment": "{Locked='issubclass'}" + }, + "unnecessaryIsInstanceNever": { + "message": "Unnecessary isinstance call; \"{testType}\" is never an instance of \"{classType}\"", + "comment": "{Locked='isinstance'}" + }, + "unnecessaryIsSubclassNever": { + "message": "Unnecessary issubclass call; \"{testType}\" is never a subclass of \"{classType}\"", + "comment": "{Locked='issubclass'}" + }, + "unnecessaryPyrightIgnore": { + "message": "Unnecessary \"# pyright: ignore\" comment", + "comment": "{Locked='# pyright: ignore'}" + }, + "unnecessaryPyrightIgnoreRule": { + "message": "Unnecessary \"# pyright: ignore\" rule: \"{name}\"", + "comment": "{Locked='# pyright: ignore'}" + }, + "unnecessaryTypeIgnore": { + "message": "Unnecessary \"# type: ignore\" comment", + "comment": "{Locked='# type: ignore'}" + }, + "unpackArgCount": { + "message": "Expected a single type argument after \"Unpack\"", + "comment": "{Locked='Unpack'}" + }, + "unpackExpectedTypeVarTuple": { + "message": "Expected TypeVarTuple or tuple as type argument for Unpack", + "comment": "{Locked='TypeVarTuple','tuple','Unpack'}" + }, + "unpackExpectedTypedDict": { + "message": "Expected TypedDict type argument for Unpack", + "comment": "{Locked='TypedDict','Unpack'}" + }, + "unpackIllegalInComprehension": { + "message": "Unpack operation not allowed in comprehension", + "comment": "A comprehension is a 'set of looping and filtering instructions' applied to a collection to generate a new collection; the word may not be translatable" + }, + "unpackInAnnotation": "Unpack operator not allowed in type expression", + "unpackInDict": "Unpack operation not allowed in dictionaries", + "unpackInSet": { + "message": "Unpack operator not allowed within a set", + "comment": "{Locked='set'}" + }, + "unpackNotAllowed": { + "message": "Unpack is not allowed in this context", + "comment": "{Locked='Unpack'}" + }, + "unpackOperatorNotAllowed": "Unpack operation is not allowed in this context", + "unpackTuplesIllegal": { + "message": "Unpack operation not allowed in tuples prior to Python 3.8", + "comment": "'tuple' is a keyword and should not be localized, but here it is pluralized" + }, + "unpackedArgInTypeArgument": "Unpacked arguments cannot be used in this context", + "unpackedArgWithVariadicParam": { + "message": "Unpacked argument cannot be used for TypeVarTuple parameter", + "comment": "{Locked='TypeVarTuple'}" + }, + "unpackedDictArgumentNotMapping": { + "message": "Argument expression after ** must be a mapping with a \"str\" key type", + "comment": "{Locked='str'}" + }, + "unpackedDictSubscriptIllegal": "Dictionary unpack operator in subscript is not allowed", + "unpackedSubscriptIllegal": "Unpack operator in subscript requires Python 3.11 or newer", + "unpackedTypeVarTupleExpected": { + "message": "Expected unpacked TypeVarTuple; use Unpack[{name1}] or *{name2}", + "comment": "{Locked='TypeVarTuple','Unpack[{name1}]','*{name2}'}" + }, + "unpackedTypedDictArgument": { + "message": "Unable to match unpacked TypedDict argument to parameters", + "comment": "{Locked='TypedDict'}" + }, + "unreachableCodeCondition": "Code is not analyzed because condition is statically evaluated as false", + "unreachableCodeStructure": "Code is structurally unreachable", + "unreachableCodeType": "Type analysis indicates code is unreachable", + "unreachableExcept": { + "message": "Except clause is unreachable because exception is already handled", + "comment": ["{StrContains=i'except'}", "'except' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "unsupportedDunderAllOperation": { + "message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect", + "comment": "{Locked='__all__'}" + }, + "unusedCallResult": "Result of call expression is of type \"{type}\" and is not used; assign to variable \"_\" if this is intentional", + "unusedCoroutine": { + "message": "Result of async function call is not used; use \"await\" or assign result to variable", + "comment": "{Locked='async'}" + }, + "unusedExpression": "Expression value is unused", + "varAnnotationIllegal": { + "message": "Type annotations for variables requires Python 3.6 or newer; use type comment for compatibility with previous versions", + "comment": "{Locked='type'}" + }, + "variableFinalOverride": { + "message": "Variable \"{name}\" is marked Final and overrides non-Final variable of same name in class \"{className}\"", + "comment": "{Locked='Final'}" + }, + "variadicTypeArgsTooMany": { + "message": "Type argument list can have at most one unpacked TypeVarTuple or tuple", + "comment": "{Locked='TypeVarTuple','tuple'}" + }, + "variadicTypeParamTooManyAlias": { + "message": "Type alias can have at most one TypeVarTuple type parameter but received multiple ({names})", + "comment": "{Locked='TypeVarTuple'}" + }, + "variadicTypeParamTooManyClass": { + "message": "Generic class can have at most one TypeVarTuple type parameter but received multiple ({names})", + "comment": ["{Locked='TypeVarTuple'}", "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container"] + }, + "walrusIllegal": "Operator \":=\" requires Python 3.8 or newer", + "walrusNotAllowed": "Operator \":=\" is not allowed in this context without surrounding parentheses", + "wildcardInFunction": { + "message": "Wildcard import not allowed within a class or function", + "comment": "{Locked='import'}" + }, + "wildcardLibraryImport": { + "message": "Wildcard import from a library not allowed", + "comment": "{Locked='import'}" + }, + "wildcardPatternTypePartiallyUnknown": "Type captured by wildcard pattern is partially unknown", + "wildcardPatternTypeUnknown": "Type captured by wildcard pattern is unknown", + "yieldFromIllegal": { + "message": "Use of \"yield from\" requires Python 3.3 or newer", + "comment": "{Locked='yield from'}" + }, + "yieldFromOutsideAsync": { + "message": "\"yield from\" not allowed in an async function", + "comment": "{Locked='yield from','async'}" + }, + "yieldOutsideFunction": { + "message": "\"yield\" not allowed outside of a function or lambda", + "comment": "{Locked='yield'}" + }, + "yieldWithinComprehension": { + "message": "\"yield\" not allowed inside a comprehension", + "comment": ["{Locked='yield'}", "A comprehension is a 'set of looping and filtering instructions' applied to a collection to generate a new collection; the word may not be translatable"] + }, + "zeroCaseStatementsFound": { + "message": "Match statement must include at least one case statement", + "comment": ["{Locked='case'}", "{StrContains=i'match'}", "'match' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "zeroLengthTupleNotAllowed": { + "message": "Zero-length tuple is not allowed in this context", + "comment": "{Locked='tuple'}" + } + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": { + "message": "\"Annotated\" special form cannot be used with instance and class checks", + "comment": "{Locked='Annotated'}" + }, + "argParam": "Argument corresponds to parameter \"{paramName}\"", + "argParamFunction": "Argument corresponds to parameter \"{paramName}\" in function \"{functionName}\"", + "argsParamMissing": "Parameter \"*{paramName}\" has no corresponding parameter", + "argsPositionOnly": "Position-only parameter mismatch; expected {expected} but received {received}", + "argumentType": "Argument type is \"{type}\"", + "argumentTypes": "Argument types: ({types})", + "assignToNone": { + "message": "Type is not assignable to \"None\"", + "comment": "{Locked='None'}" + }, + "asyncHelp": { + "message": "Did you mean \"async with\"?", + "comment": "{Locked='async with'}" + }, + "baseClassIncompatible": "Base class \"{baseClass}\" is incompatible with type \"{type}\"", + "baseClassIncompatibleSubclass": "Base class \"{baseClass}\" derives from \"{subclass}\" which is incompatible with type \"{type}\"", + "baseClassOverriddenType": "Base class \"{baseClass}\" provides type \"{type}\", which is overridden", + "baseClassOverridesType": "Base class \"{baseClass}\" overrides with type \"{type}\"", + "bytesTypePromotions": { + "message": "Set disableBytesTypePromotions to false to enable type promotion behavior for \"bytearray\" and \"memoryview\"", + "comment": "{Locked='disableBytesTypePromotions','false','bytearray','memoryview'}" + }, + "conditionalRequiresBool": { + "message": "Method __bool__ for type \"{operandType}\" returns type \"{boolReturnType}\" rather than \"bool\"", + "comment": "{Locked='__bool__'}" + }, + "dataClassFieldLocation": "Field declaration", + "dataClassFrozen": "\"{name}\" is frozen", + "dataProtocolUnsupported": "\"{name}\" is a data protocol", + "descriptorAccessBindingFailed": { + "message": "Failed to bind method \"{name}\" for descriptor class \"{className}\"", + "comment": "Binding is the process through which Pyright determines what object a name refers to" + }, + "descriptorAccessCallFailed": "Failed to call method \"{name}\" for descriptor class \"{className}\"", + "finalMethod": { + "message": "Final method", + "comment": "{Locked='Final'}" + }, + "functionParamDefaultMissing": "Parameter \"{name}\" is missing default argument", + "functionParamName": "Parameter name mismatch: \"{destName}\" versus \"{srcName}\"", + "functionParamPositionOnly": "Position-only parameter mismatch; parameter \"{name}\" is not position-only", + "functionReturnTypeMismatch": "Function return type \"{sourceType}\" is incompatible with type \"{destType}\"", + "functionTooFewParams": "Function accepts too few positional parameters; expected {expected} but received {received}", + "functionTooManyParams": "Function accepts too many positional parameters; expected {expected} but received {received}", + "genericClassNotAllowed": { + "message": "Generic type with type arguments not allowed for instance or class checks", + "comment": "A generic type is a parameterized type, for example a container where the generic type parameter specifies the type of elements in the container" + }, + "incompatibleDeleter": { + "message": "Property deleter method is incompatible", + "comment": ["{Locked='deleter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "incompatibleGetter": { + "message": "Property getter method is incompatible", + "comment": ["{Locked='getter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "incompatibleSetter": { + "message": "Property setter method is incompatible", + "comment": ["{Locked='setter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "initMethodLocation": { + "message": "The __init__ method is defined in class \"{type}\"", + "comment": "{Locked='__init__'}" + }, + "initMethodSignature": { + "message": "Signature of __init__ is \"{type}\"", + "comment": "{Locked='__init__'}" + }, + "initSubclassLocation": { + "message": "The __init_subclass__ method is defined in class \"{name}\"", + "comment": "{Locked='__init_subclass__'}" + }, + "invariantSuggestionDict": { + "message": "Consider switching from \"dict\" to \"Mapping\" which is covariant in the value type", + "comment": "{Locked='dict','Mapping'}" + }, + "invariantSuggestionList": { + "message": "Consider switching from \"list\" to \"Sequence\" which is covariant", + "comment": "{Locked='list','Sequence'}" + }, + "invariantSuggestionSet": { + "message": "Consider switching from \"set\" to \"Container\" which is covariant", + "comment": "{Locked='set','Container'}" + }, + "isinstanceClassNotSupported": "\"{type}\" is not supported for instance and class checks", + "keyNotRequired": "\"{name}\" is not a required key in \"{type}\", so access may result in runtime exception", + "keyReadOnly": "\"{name}\" is a read-only key in \"{type}\"", + "keyRequiredDeleted": "\"{name}\" is a required key and cannot be deleted", + "keyUndefined": "\"{name}\" is not a defined key in \"{type}\"", + "kwargsParamMissing": "Parameter \"**{paramName}\" has no corresponding parameter", + "listAssignmentMismatch": "Type \"{type}\" is incompatible with target list", + "literalAssignmentMismatch": "\"{sourceType}\" is not assignable to type \"{destType}\"", + "literalNotAllowed": { + "message": "\"Literal\" special form cannot be used with instance and class checks", + "comment": "{Locked='Literal'}" + }, + "matchIsNotExhaustiveHint": { + "message": "If exhaustive handling is not intended, add \"case _: pass\"", + "comment": "{Locked='case _: pass'}" + }, + "matchIsNotExhaustiveType": "Unhandled type: \"{type}\"", + "memberAssignment": "Expression of type \"{type}\" cannot be assigned to attribute \"{name}\" of class \"{classType}\"", + "memberIsAbstract": "\"{type}.{name}\" is not implemented", + "memberIsAbstractMore": { + "message": "and {count} more...", + "comment": "{StrEnds='...'}" + }, + "memberIsClassVarInProtocol": { + "message": "\"{name}\" is defined as a ClassVar in protocol", + "comment": "{Locked='ClassVar'}" + }, + "memberIsInitVar": { + "message": "\"{name}\" is an init-only field", + "comment": "{Locked='init-only'}" + }, + "memberIsInvariant": "\"{name}\" is invariant because it is mutable", + "memberIsNotClassVarInClass": { + "message": "\"{name}\" must be defined as a ClassVar to be compatible with protocol", + "comment": "{Locked='ClassVar'}" + }, + "memberIsNotClassVarInProtocol": { + "message": "\"{name}\" is not defined as a ClassVar in protocol", + "comment": "{Locked='ClassVar'}" + }, + "memberIsNotReadOnlyInProtocol": "\"{name}\" is not read-only in protocol", + "memberIsReadOnlyInProtocol": "\"{name}\" is read-only in protocol", + "memberIsWritableInProtocol": "\"{name}\" is writable in protocol", + "memberSetClassVar": { + "message": "Attribute \"{name}\" cannot be assigned through a class instance because it is a ClassVar", + "comment": "{Locked='ClassVar'}" + }, + "memberTypeMismatch": "\"{name}\" is an incompatible type", + "memberUnknown": "Attribute \"{name}\" is unknown", + "metaclassConflict": { + "message": "Metaclass \"{metaclass1}\" conflicts with \"{metaclass2}\"", + "comment": "Metaclasses are a complex concept and it may be best to not localize the term" + }, + "missingDeleter": { + "message": "Property deleter method is missing", + "comment": ["{Locked='deleter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "missingGetter": { + "message": "Property getter method is missing", + "comment": ["{Locked='getter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "missingSetter": { + "message": "Property setter method is missing", + "comment": ["{Locked='setter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "namedParamMissingInDest": "Extra parameter \"{name}\"", + "namedParamMissingInSource": "Missing keyword parameter \"{name}\"", + "namedParamTypeMismatch": "Keyword parameter \"{name}\" of type \"{sourceType}\" is incompatible with type \"{destType}\"", + "namedTupleNotAllowed": { + "message": "NamedTuple cannot be used for instance or class checks", + "comment": "{Locked='NamedTuple'}" + }, + "newMethodLocation": { + "message": "The __new__ method is defined in class \"{type}\"", + "comment": "{Locked='__new__'}" + }, + "newMethodSignature": { + "message": "Signature of __new__ is \"{type}\"", + "comment": "{Locked='__new__'}" + }, + "newTypeClassNotAllowed": { + "message": "Type created with NewType cannot be used with instance and class checks", + "comment": "{Locked='NewType'}" + }, + "noOverloadAssignable": "No overloaded function matches type \"{type}\"", + "noneNotAllowed": { + "message": "None cannot be used for instance or class checks", + "comment": "{Locked='None'}" + }, + "orPatternMissingName": "Missing names: {name}", + "overloadIndex": "Overload {index} is the closest match", + "overloadNotAssignable": "One or more overloads of \"{name}\" is not assignable", + "overloadSignature": "Overload signature is defined here", + "overriddenMethod": "Overridden method", + "overriddenSymbol": "Overridden symbol", + "overrideInvariantMismatch": "Override type \"{overrideType}\" is not the same as base type \"{baseType}\"", + "overrideIsInvariant": "Variable is mutable so its type is invariant", + "overrideNoOverloadMatches": "No overload signature in override is compatible with base method", + "overrideNotClassMethod": { + "message": "Base method is declared as a classmethod but override is not", + "comment": "{Locked='classmethod'}" + }, + "overrideNotInstanceMethod": "Base method is declared as an instance method but override is not", + "overrideNotStaticMethod": { + "message": "Base method is declared as a staticmethod but override is not", + "comment": "{Locked='staticmethod'}" + }, + "overrideOverloadNoMatch": "Override does not handle all overloads of base method", + "overrideOverloadOrder": "Overloads for override method must be in the same order as the base method", + "overrideParamKeywordNoDefault": "Keyword parameter \"{name}\" mismatch: base parameter has default argument value, override parameter does not", + "overrideParamKeywordType": "Keyword parameter \"{name}\" type mismatch: base parameter is type \"{baseType}\", override parameter is type \"{overrideType}\"", + "overrideParamName": "Parameter {index} name mismatch: base parameter is named \"{baseName}\", override parameter is named \"{overrideName}\"", + "overrideParamNameExtra": "Parameter \"{name}\" is missing in base", + "overrideParamNameMissing": "Parameter \"{name}\" is missing in override", + "overrideParamNamePositionOnly": "Parameter {index} mismatch: base parameter \"{baseName}\" is keyword parameter, override parameter is position-only", + "overrideParamNoDefault": "Parameter {index} mismatch: base parameter has default argument value, override parameter does not", + "overrideParamType": "Parameter {index} type mismatch: base parameter is type \"{baseType}\", override parameter is type \"{overrideType}\"", + "overridePositionalParamCount": "Positional parameter count mismatch; base method has {baseCount}, but override has {overrideCount}", + "overrideReturnType": "Return type mismatch: base method returns type \"{baseType}\", override returns type \"{overrideType}\"", + "overrideType": "Base class defines type as \"{type}\"", + "paramAssignment": "Parameter {index}: type \"{sourceType}\" is incompatible with type \"{destType}\"", + "paramSpecMissingInOverride": { + "message": "ParamSpec parameters are missing in override method", + "comment": "{Locked='ParamSpec'}" + }, + "paramType": "Parameter type is \"{paramType}\"", + "privateImportFromPyTypedSource": "Import from \"{module}\" instead", + "propertyAccessFromProtocolClass": "A property defined within a protocol class cannot be accessed as a class variable", + "propertyMethodIncompatible": { + "message": "Property method \"{name}\" is incompatible", + "comment": ["{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "propertyMethodMissing": { + "message": "Property method \"{name}\" is missing in override", + "comment": ["{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "propertyMissingDeleter": { + "message": "Property \"{name}\" has no defined deleter", + "comment": ["{Locked='deleter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "propertyMissingSetter": { + "message": "Property \"{name}\" has no defined setter", + "comment": ["{Locked='setter'}", "{StrContains=i'property'}", "'property' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "protocolIncompatible": "\"{sourceType}\" is incompatible with protocol \"{destType}\"", + "protocolMemberMissing": "\"{name}\" is not present", + "protocolRequiresRuntimeCheckable": { + "message": "Protocol class must be @runtime_checkable to be used with instance and class checks", + "comment": "{Locked='Protocol','@runtime_checkable'}" + }, + "protocolSourceIsNotConcrete": "\"{sourceType}\" is not a concrete class type and cannot be assigned to type \"{destType}\"", + "protocolUnsafeOverlap": "Attributes of \"{name}\" have the same names as the protocol", + "pyrightCommentIgnoreTip": { + "message": "Use \"# pyright: ignore[]\" to suppress diagnostics for a single line", + "comment": "{Locked='# pyright: ignore[]'}" + }, + "readOnlyAttribute": "Attribute \"{name}\" is read-only", + "seeClassDeclaration": "See class declaration", + "seeDeclaration": "See declaration", + "seeFunctionDeclaration": "See function declaration", + "seeMethodDeclaration": "See method declaration", + "seeParameterDeclaration": "See parameter declaration", + "seeTypeAliasDeclaration": "See type alias declaration", + "seeVariableDeclaration": "See variable declaration", + "tupleAssignmentMismatch": { + "message": "Type \"{type}\" is incompatible with target tuple", + "comment": "{Locked='tuple'}" + }, + "tupleEntryTypeMismatch": { + "message": "Tuple entry {entry} is incorrect type", + "comment": ["{StrContains=i'tuple'}", "'tuple' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "tupleSizeIndeterminateSrc": { + "message": "Tuple size mismatch; expected {expected} but received indeterminate", + "comment": ["{StrContains=i'tuple'}", "'tuple' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "tupleSizeIndeterminateSrcDest": { + "message": "Tuple size mismatch; expected {expected} or more but received indeterminate", + "comment": ["{StrContains=i'tuple'}", "'tuple' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "tupleSizeMismatch": { + "message": "Tuple size mismatch; expected {expected} but received {received}", + "comment": ["{StrContains=i'tuple'}", "'tuple' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "tupleSizeMismatchIndeterminateDest": { + "message": "Tuple size mismatch; expected {expected} or more but received {received}", + "comment": ["{StrContains=i'tuple'}", "'tuple' is a keyword and should not be localized. It is only capitalized here because it is the first word in the sentence"] + }, + "typeAliasInstanceCheck": { + "message": "Type alias created with \"type\" statement cannot be used with instance and class checks", + "comment": "{Locked='type'}" + }, + "typeAssignmentMismatch": "Type \"{sourceType}\" is not assignable to type \"{destType}\"", + "typeBound": "Type \"{sourceType}\" is not assignable to upper bound \"{destType}\" for type variable \"{name}\"", + "typeConstrainedTypeVar": "Type \"{type}\" is not assignable to constrained type variable \"{name}\"", + "typeIncompatible": "\"{sourceType}\" is not assignable to \"{destType}\"", + "typeNotClass": "\"{type}\" is not a class", + "typeNotStringLiteral": "\"{type}\" is not a string literal", + "typeOfSymbol": "Type of \"{name}\" is \"{type}\"", + "typeParamSpec": { + "message": "Type \"{type}\" is incompatible with ParamSpec \"{name}\"", + "comment": "{Locked='ParamSpec'}" + }, + "typeUnsupported": "Type \"{type}\" is unsupported", + "typeVarDefaultOutOfScope": "Type variable \"{name}\" is not in scope", + "typeVarIsContravariant": "Type parameter \"{name}\" is contravariant, but \"{sourceType}\" is not a supertype of \"{destType}\"", + "typeVarIsCovariant": "Type parameter \"{name}\" is covariant, but \"{sourceType}\" is not a subtype of \"{destType}\"", + "typeVarIsInvariant": "Type parameter \"{name}\" is invariant, but \"{sourceType}\" is not the same as \"{destType}\"", + "typeVarNotAllowed": { + "message": "TypeVar not allowed for instance or class checks", + "comment": "{Locked='TypeVar'}" + }, + "typeVarTupleRequiresKnownLength": { + "message": "TypeVarTuple cannot be bound to a tuple of unknown length", + "comment": "{Locked='TypeVarTuple','tuple'}" + }, + "typeVarUnnecessarySuggestion": "Use {type} instead", + "typeVarUnsolvableRemedy": "Provide an overload that specifies the return type when the argument is not supplied", + "typeVarsMissing": "Missing type variables: {names}", + "typedDictBaseClass": { + "message": "Class \"{type}\" is not a TypedDict", + "comment": "{Locked='TypedDict'}" + }, + "typedDictClassNotAllowed": { + "message": "TypedDict class not allowed for instance or class checks", + "comment": "{Locked='TypedDict'}" + }, + "typedDictClosedExtraNotAllowed": "Cannot add item \"{name}\"", + "typedDictClosedExtraTypeMismatch": "Cannot add item \"{name}\" with type \"{type}\"", + "typedDictClosedFieldNotReadOnly": { + "message": "Cannot add item \"{name}\" because it must be ReadOnly", + "comment": "{Locked='ReadOnly'}" + }, + "typedDictClosedFieldNotRequired": { + "message": "Cannot add item \"{name}\" because it must be NotRequired", + "comment": "{Locked='NotRequired'}" + }, + "typedDictExtraFieldNotAllowed": "\"{name}\" is not present in \"{type}\"", + "typedDictExtraFieldTypeMismatch": { + "message": "Type of \"{name}\" is incompatible with type of \"extra_items\" in \"{type}\"", + "comment": "{Locked='extra_items'}" + }, + "typedDictFieldMissing": "\"{name}\" is missing from \"{type}\"", + "typedDictFieldNotReadOnly": "\"{name}\" is not read-only in \"{type}\"", + "typedDictFieldNotRequired": "\"{name}\" is not required in \"{type}\"", + "typedDictFieldRequired": "\"{name}\" is required in \"{type}\"", + "typedDictFieldTypeMismatch": "Type \"{type}\" is not assignable to item \"{name}\"", + "typedDictFieldUndefined": "\"{name}\" is an undefined item in type \"{type}\"", + "typedDictKeyAccess": { + "message": "Use [\"{name}\"] to reference item in TypedDict", + "comment": "{Locked='TypedDict'}" + }, + "typedDictNotAllowed": { + "message": "TypedDict cannot be used for instance or class checks", + "comment": "{Locked='TypedDict'}" + }, + "unhashableType": "Type \"{type}\" is not hashable", + "uninitializedAbstractVariable": "Instance variable \"{name}\" is defined in abstract base class \"{classType}\" but not initialized", + "unreachableExcept": "\"{exceptionType}\" is a subclass of \"{parentType}\"", + "useDictInstead": { + "message": "Use dict[T1, T2] to indicate a dictionary type", + "comment": "{Locked='dict[T1, T2]'}" + }, + "useListInstead": { + "message": "Use list[T] to indicate a list type or T1 | T2 to indicate a union type", + "comment": "{Locked='list[T]','list','T1 | T2','union'}" + }, + "useTupleInstead": { + "message": "Use tuple[T1, ..., Tn] to indicate a tuple type or T1 | T2 to indicate a union type", + "comment": "{Locked='tuple[T1, ..., Tn]','tuple','T1 | T2','union'}" + }, + "useTypeInstead": { + "message": "Use type[T] instead", + "comment": "{Locked='type[T]'}" + }, + "varianceMismatchForClass": "Variance of type argument \"{typeVarName}\" is incompatible with base class \"{className}\"", + "varianceMismatchForTypeAlias": "Variance of type argument \"{typeVarName}\" is incompatible with \"{typeAliasParam}\"" + }, + "Service": { + "longOperation": "Enumeration of workspace source files is taking a long time. Consider opening a sub-folder instead. [Learn more](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.es.json b/python-parser/packages/pyright-internal/src/localization/package.nls.es.json new file mode 100644 index 00000000..cc5cf110 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.es.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Crear Tipo Stub", + "createTypeStubFor": "Crear Tipo Stub Para \"{moduleName}\"", + "executingCommand": "Ejecutando el comando", + "filesToAnalyzeCount": "{count} archivos para analizar", + "filesToAnalyzeOne": "1 archivo para analizar", + "findingReferences": "Buscando referencias", + "organizeImports": "Organizar Importaciones" + }, + "Completion": { + "autoImportDetail": "Importación automática", + "indexValueDetail": "Valor de índice" + }, + "Diagnostic": { + "abstractMethodInvocation": "No se puede llamar al método \"{method}\" porque es abstracto y no se ha implementado.", + "annotatedMetadataInconsistent": "El tipo de metadatos anotados \"{metadataType}\" no es compatible con el tipo \"{type}\"", + "annotatedParamCountMismatch": "El recuento de anotaciones del parámetro no coincide: se esperaba {expected}, pero se recibió {received}", + "annotatedTypeArgMissing": "Se espera un argumento de tipo y una o más anotaciones para \"Annotated\".", + "annotationBytesString": "Las expresiones de tipo no pueden usar literales de cadena de bytes", + "annotationFormatString": "Las expresiones de tipo no pueden usar literales de cadena de formato (f-strings)", + "annotationNotSupported": "No se admite la anotación de tipo para esta declaración", + "annotationRawString": "Las expresiones de tipo no pueden usar literales de cadena sin formato", + "annotationSpansStrings": "Las expresiones de tipo no pueden abarcar varios literales de cadena", + "annotationStringEscape": "Las expresiones de tipo no pueden contener caracteres de escape", + "annotationTemplateString": "Las expresiones de tipo no pueden usar literales de cadena de plantilla (cadenas t)", + "argAssignment": "Argumento de tipo \"{argType}\" no puede ser asignado a parámetro de tipo \"{paramType}\"", + "argAssignmentFunction": "El argumento de tipo \"{argType}\" no puede ser asignado a parámetro de tipo \"{paramType}\" en función \"{functionName}\"", + "argAssignmentParam": "Argumento de tipo \"{argType}\" no puede ser asignado a parámetro \"{paramName}\" de tipo \"{paramType}\"", + "argAssignmentParamFunction": "Argumento de tipo \"{argType}\" no puede ser asignado a parámetro \"{paramName}\" de tipo \"{paramType}\" en función \"{functionName}\"", + "argMissingForParam": "Falta el argumento para el parámetro {name}", + "argMissingForParams": "Faltan argumentos para los parámetros {names}", + "argMorePositionalExpectedCount": "Más argumentos posicionales {expected} esperados", + "argMorePositionalExpectedOne": "Se espera 1 argumento posicional más", + "argPositional": "Argumento posicional esperado", + "argPositionalExpectedCount": "Argumentos posicionales esperados {expected}", + "argPositionalExpectedOne": "Se espera 1 argumento posicional", + "argTypePartiallyUnknown": "El tipo de argumento es parcialmente desconocido", + "argTypeUnknown": "Tipo de argumento desconocido", + "assertAlwaysTrue": "La expresión Assert siempre se evalúa como true", + "assertTypeArgs": "\"assert_type\" espera dos argumentos posicionales", + "assertTypeTypeMismatch": "Error de coincidencia \"assert_type\": se esperaba \"{expected}\" pero se ha recibido \"{received}\"", + "assignmentExprComprehension": "El destino de la expresión de asignación \"{name}\" no puede usar el mismo nombre que la comprensión para el destino", + "assignmentExprContext": "La expresión de asignación debe estar dentro de un módulo, función o lambda", + "assignmentExprInSubscript": "Las expresiones de asignación dentro de un subíndice solo se admiten en Python 3.10 y versiones posteriores.", + "assignmentInProtocol": "Las variables de instancia o clase dentro de una clase Protocol deben declararse explícitamente en el cuerpo de la clase.", + "assignmentTargetExpr": "La expresión no puede ser objetivo de asignación", + "asyncNotInAsyncFunction": "No se permite el uso de \"async\" fuera de la función async", + "awaitIllegal": "El uso de \"await\" requiere Python 3.5 o posterior.", + "awaitNotAllowed": "Las expresiones de tipo no pueden usar \"await\"", + "awaitNotInAsync": "\"await\" solo se permite dentro de una función async", + "backticksIllegal": "En Python 3.x no se admiten expresiones rodeadas de puntos suspensivos; utilice repr en su lugar.", + "baseClassCircular": "La clase no se puede derivar de sí misma", + "baseClassFinal": "La clase base \"{type}\" está marcada como final y no puede ser subclasificada", + "baseClassIncompatible": "Las clases base de {type} son mutuamente incompatibles", + "baseClassInvalid": "El argumento de la clase debe ser una clase base", + "baseClassMethodTypeIncompatible": "Las clases base para la clase \"{classType}\" definen el método \"{name}\" de forma incompatible", + "baseClassUnknown": "Se desconoce el tipo de la clase base, lo que oculta el tipo de la clase derivada.", + "baseClassVariableTypeIncompatible": "Las clases base para la clase \"{classType}\" definen la variable \"{name}\" de forma incompatible", + "binaryOperationNotAllowed": "Operador binario no permitido en la expresión de tipo", + "bindParamMissing": "No se pudo enlazar el método \"{methodName}\" porque falta un parámetro \"self\" o \"cls\"", + "bindTypeMismatch": "No se pudo enlazar el método \"{methodName}\" porque \"{type}\" no se puede asignar al parámetro \"{paramName}\"", + "breakInExceptionGroup": "No se permite \"break\" en un bloque \"except*\"", + "breakOutsideLoop": "\"break\" solo se puede usar dentro de un bucle", + "bytesUnsupportedEscape": "Secuencia de escape no admitida en el literal de bytes", + "callableExtraArgs": "Se esperaban solo dos argumentos de tipo para \"Callable\".", + "callableFirstArg": "Lista de tipos de parámetros esperados o \"...\"", + "callableNotInstantiable": "No se puede instanciar el tipo \"{type}\"", + "callableSecondArg": "Tipo de retorno esperado como segundo argumento de tipo para \"Callable\"", + "casePatternIsIrrefutable": "El patrón irrefutable solo se permite para la última instrucción case", + "classAlreadySpecialized": "El tipo \"{type}\" ya está especializado", + "classDecoratorTypeUnknown": "El decorador de clase sin tipo oculta el tipo de clase; omitiendo el elemento Decorator", + "classDefinitionCycle": "La definición de clase para \"{name}\" depende de sí misma.", + "classGetItemClsParam": "__class_getitem__ debe tomar un parámetro \"cls\"", + "classMethodClsParam": "Los métodos de clase deben tomar un parámetro \"cls\"", + "classNotRuntimeSubscriptable": "El subíndice para la clase \"{name}\" generará una excepción en tiempo de ejecución; encierre la expresión de tipo entre comillas", + "classPatternBuiltInArgPositional": "El patrón de clase solo acepta subpatrones posicionales", + "classPatternNewType": "\"{type}\" no se puede usar en un patrón de clase porque se define mediante NewType", + "classPatternPositionalArgCount": "Demasiados patrones posicionales para la clase \"{type}\"; esperado {expected} pero recibido {received}", + "classPatternTypeAlias": "\"{type}\" no se puede usar en un patrón de clase porque es un alias de tipo especializado", + "classPropertyDeprecated": "Las propiedades de clase están en desuso en Python 3.11 y no se admitirán en Python 3.13.", + "classTypeParametersIllegal": "La sintaxis de los parámetros de tipo de clase requiere Python 3.12 o posterior.", + "classVarFirstArgMissing": "Se esperaba un argumento de tipo después de \"ClassVar\"", + "classVarNotAllowed": "\"ClassVar\" no está permitido en este contexto", + "classVarOverridesInstanceVar": "La variable de clase \"{name}\" anula la variable de instancia del mismo nombre en la clase \"{className}\"", + "classVarTooManyArgs": "Solo se esperaba un argumento de tipo después de \"ClassVar\"", + "classVarWithTypeVar": "El tipo \"ClassVar\" no puede incluir variables de tipo", + "clsSelfParamTypeMismatch": "El tipo de parámetro \"{name}\" debe ser un supertipo de su clase \"{classType}\"", + "codeTooComplexToAnalyze": "El código es demasiado complejo para analizarlo; reduzca la complejidad refactorizándolo en subrutinas o reduciendo las rutas de código condicional.", + "collectionAliasInstantiation": "No se puede crear una instancia del tipo \"{type}\"; use \"{alias}\" en su lugar.", + "comparisonAlwaysFalse": "La condición siempre se evaluará como False, ya que los tipos \"{leftType}\" y \"{rightType}\" no se superponen.", + "comparisonAlwaysTrue": "La condición siempre se evaluará como True, ya que los tipos \"{leftType}\" y \"{rightType}\" no se superponen.", + "comprehensionInDict": "La comprensión no puede utilizarse con otras entradas del diccionario", + "comprehensionInSet": "La comprensión no se puede usar con otras entradas de set", + "concatenateContext": "\"Concatenate\" no se permite en este contexto", + "concatenateParamSpecMissing": "El último argumento de tipo para \"Concatenate\" debe ser un ParamSpec o \"...\"", + "concatenateTypeArgsMissing": "\"Concatenate\" requiere al menos dos argumentos de tipo", + "conditionalOperandInvalid": "Operando condicional no válido de tipo \"{type}\"", + "constantRedefinition": "\"{name}\" es constante (porque está en mayúsculas) y no se puede volver a definir", + "constructorParametersMismatch": "Error de coincidencia entre la firma de __new__ y __init__ en la clase \"{classType}\"", + "containmentAlwaysFalse": "La expresión siempre se evaluará como False, ya que los tipos \"{leftType}\" y \"{rightType}\" no tienen superposición", + "containmentAlwaysTrue": "La expresión siempre se evaluará como True, ya que los tipos \"{leftType}\" y \"{rightType}\" no tienen superposición", + "continueInExceptionGroup": "No se permite \"continue\" en un bloque \"except*\"", + "continueOutsideLoop": "\"continue\" solo puede utilizarse dentro de un bucle", + "coroutineInConditionalExpression": "La expresión condicional hace referencia a una corrutina que siempre se evalúa como True", + "dataClassBaseClassFrozen": "Una clase no inmovilizada no puede heredar de una clase inmovilizada", + "dataClassBaseClassNotFrozen": "Una clase congelada no puede heredar de una clase que no esté congelada", + "dataClassConverterFunction": "Argumento de tipo \"{argType}\" no es un convertidor válido para el campo \"{fieldName}\" de tipo \"{fieldType}\"", + "dataClassConverterOverloads": "No hay sobrecargas de \"{funcName}\" que sean convertidores válidos para el campo \"{fieldName}\" de tipo \"{fieldType}\"", + "dataClassFieldInheritedDefault": "\"{fieldName}\" invalida un campo con el mismo nombre, pero falta un valor predeterminado", + "dataClassFieldWithDefault": "Los campos sin valores predeterminados no pueden aparecer después de los campos con valores predeterminados", + "dataClassFieldWithPrivateName": "El campo Dataclass no puede utilizar un nombre privado", + "dataClassFieldWithoutAnnotation": "El campo Dataclass sin anotación de tipo provocará una excepción en tiempo de ejecución", + "dataClassPostInitParamCount": "Dataclass __post_init__ recuento de parámetros incorrecto; el número de campos InitVar es {expected}.", + "dataClassPostInitType": "El tipo de parámetro del método __post_init__ de la clase de datos no coincide con el del campo \"{fieldName}\".", + "dataClassSlotsOverwrite": "__slots__ ya está definido en la clase", + "dataClassTransformExpectedBoolLiteral": "Expresión esperada que se evalúa estáticamente como True o False", + "dataClassTransformFieldSpecifier": "Se esperaba una tuple de clases o funciones, pero se recibió el tipo \"{type}\"", + "dataClassTransformPositionalParam": "Todos los argumentos de \"dataclass_transform\" deben ser argumentos de palabra clave", + "dataClassTransformUnknownArgument": "El argumento \"{name}\" no es compatible con dataclass_transform", + "dataProtocolInSubclassCheck": "No se permiten protocolos de datos (que incluyen atributos que no son de método) en llamadas issubclass", + "declaredReturnTypePartiallyUnknown": "El tipo de retorno declarado, \"{returnType}\", es parcialmente desconocido", + "declaredReturnTypeUnknown": "El tipo de retorno declarado es desconocido", + "defaultValueContainsCall": "No se permiten llamadas de función y objetos mutables dentro de la expresión de valor predeterminado del parámetro", + "defaultValueNotAllowed": "Los parámetros con \"*\" o \"**\" no pueden tener valor por defecto", + "delTargetExpr": "No se puede eliminar la expresión", + "deprecatedClass": "La clase \"{name}\" está en desuso", + "deprecatedConstructor": "El constructor de la clase \"{name}\" está obsoleto", + "deprecatedDescriptorDeleter": "El método \"__delete__\" para el \"{name}\" de descriptor está en desuso", + "deprecatedDescriptorGetter": "El método \"__get__\" para el \"{name}\" de descriptor está en desuso", + "deprecatedDescriptorSetter": "El método \"__set__\" para el \"{name}\" de descriptor está en desuso", + "deprecatedFunction": "La función \"{name}\" está obsoleta", + "deprecatedMethod": "El método \"{name}\" en la clase \"{className}\" está en desuso", + "deprecatedPropertyDeleter": "El deleter de la property \"{name}\" está en desuso", + "deprecatedPropertyGetter": "El getter de la property \"{name}\" está en desuso", + "deprecatedPropertySetter": "El setter de la property \"{name}\" está en desuso", + "deprecatedType": "Este tipo está obsoleto a partir de la {version} de Python; utilice en su lugar \"{replacement}\".", + "dictExpandIllegalInComprehension": "No se permite la ampliación del diccionario en la comprensión", + "dictInAnnotation": "Expresión de diccionario no permitida en expresión de tipo", + "dictKeyValuePairs": "Las entradas del diccionario deben contener pares clave/valor", + "dictUnpackIsNotMapping": "Asignación esperada para el operador de desempaquetado del diccionario", + "dunderAllSymbolNotPresent": "\"{name}\" se especifica en __all__ pero no está presente en el módulo", + "duplicateArgsParam": "Solo se permite un parámetro \"*\".", + "duplicateBaseClass": "Clase base duplicada no permitida", + "duplicateCapturePatternTarget": "El destino de captura \"{name}\" no puede aparecer más de una vez dentro del mismo patrón", + "duplicateCatchAll": "Solo se permite una cláusula de except", + "duplicateEnumMember": "El miembro Enum \"{name}\" ya está declarado", + "duplicateGenericAndProtocolBase": "Solo se permite una clase base Generic[...] o Protocol[...].", + "duplicateImport": "\"{importName}\" se importa más de una vez", + "duplicateKeywordOnly": "Solo se permite un separador \"*\".", + "duplicateKwargsParam": "Solo se permite un parámetro \"**\".", + "duplicateParam": "Parámetro duplicado \"{name}\"", + "duplicatePositionOnly": "Solo se permite un parámetro \"/\"", + "duplicateStarPattern": "Solo se permite un patrón \"*\" en una secuencia de patrones", + "duplicateStarStarPattern": "Solo se permite una entrada \"**\"", + "duplicateUnpack": "Solo se permite una operación de desempaquetado en la lista", + "ellipsisAfterUnpacked": "\"...\" no se puede usar con una TypeVarTuple o tuple sin empaquetar", + "ellipsisContext": "\"...\" no está permitido en este contexto", + "ellipsisSecondArg": "\"...\" está permitido sólo como el segundo de dos argumentos", + "enumClassOverride": "La clase Enum \"{name}\" es final y no puede ser subclasificada", + "enumMemberDelete": "No se puede eliminar el miembro de Enum \"{name}\"", + "enumMemberSet": "No se puede asignar el miembro de Enum \"{name}\"", + "enumMemberTypeAnnotation": "No se permiten anotaciones de tipo para miembros de enumeración", + "exceptGroupMismatch": "La instrucción Try no puede incluir \"except\" y \"except*\"", + "exceptGroupRequiresType": "La sintaxis del grupo de excepciones (\"except*\") requiere un tipo de excepción", + "exceptRequiresParens": "Se deben incluir entre paréntesis varios tipos de excepción antes de Python 3.14", + "exceptWithAsRequiresParens": "Se deben incluir entre paréntesis varios tipos de excepción al usar \"as\"", + "exceptionGroupIncompatible": "La sintaxis de grupo de excepciones (\"except*\") requiere Python 3.11 o posterior.", + "exceptionGroupTypeIncorrect": "El tipo de excepción en except* no puede derivarse de BaseGroupException", + "exceptionTypeIncorrect": "\"{type}\" no se deriva de BaseException", + "exceptionTypeNotClass": "\"{type}\" no es una clase de excepción válida", + "exceptionTypeNotInstantiable": "El constructor para el tipo de excepción \"{type}\" requiere uno o más argumentos", + "expectedAfterDecorator": "Se esperaba una declaración de función o clase después del decorador", + "expectedArrow": "Se esperaba \"->\" seguido de una anotación de tipo de retorno", + "expectedAsAfterException": "Se esperaba \"as\" después del tipo de excepción", + "expectedAssignRightHandExpr": "Expresión esperada a la derecha de \"=\"", + "expectedBinaryRightHandExpr": "Expresión esperada a la derecha del operador", + "expectedBoolLiteral": "Se esperaba True o False", + "expectedCase": "Declaración \"case\" esperada", + "expectedClassName": "Nombre de clase esperado", + "expectedCloseBrace": "\"{\" no estaba cerrado", + "expectedCloseBracket": "\"[\" no estaba cerrado", + "expectedCloseParen": "\"(\" no estaba cerrado", + "expectedColon": "Se esperaba \":\"", + "expectedComplexNumberLiteral": "Número complejo literal esperado para la concordancia de patrones", + "expectedDecoratorExpr": "Forma de expresión no compatible con Decorator anterior a Python 3.9", + "expectedDecoratorName": "Nombre esperado del Decorator", + "expectedDecoratorNewline": "Nueva línea esperada al final de Decorator", + "expectedDelExpr": "Expresión esperada después de \"del\"", + "expectedElse": "Se espera \"else\"", + "expectedEquals": "Se esperaba \"=\"", + "expectedExceptionClass": "Clase o objeto de excepción no válido", + "expectedExceptionObj": "Objeto de excepción esperado, clase de excepción o None", + "expectedExpr": "Se esperaba una expresión", + "expectedFunctionAfterAsync": "Definición de función esperada después de \"async\"", + "expectedFunctionName": "Se esperaba nombre de la función luego de \"def\"", + "expectedIdentifier": "Identificador esperado", + "expectedImport": "Se espera \"import\"", + "expectedImportAlias": "Símbolo esperado después de \"as\"", + "expectedImportSymbols": "Se esperan uno o más nombres de símbolos tras la importación", + "expectedIn": "Se esperaba \"in\"", + "expectedInExpr": "Expresión esperada después de \"in\"", + "expectedIndentedBlock": "Bloque con sangría previsto", + "expectedMemberName": "Se esperaba un nombre de atributo después de \".\"", + "expectedModuleName": "Nombre de módulo esperado", + "expectedNameAfterAs": "Se esperaba un nombre de símbolo después de \"as\"", + "expectedNamedParameter": "El parámetro de palabra clave debe ir después de \"*\".", + "expectedNewline": "Nueva línea esperada", + "expectedNewlineOrSemicolon": "Las declaraciones deben ir separadas por nuevas líneas o punto y coma", + "expectedOpenParen": "Se espera \"(\"", + "expectedParamName": "Nombre esperado del parámetro", + "expectedPatternExpr": "Expresión del patrón esperado", + "expectedPatternSubjectExpr": "Expresión de asunto de patrón esperada", + "expectedPatternValue": "Expresión de valor de patrón esperada de la forma \"a.b\"", + "expectedReturnExpr": "Expresión esperada después de \"return\"", + "expectedSliceIndex": "Expresión de índice o segmento esperada", + "expectedTypeNotString": "Se esperaba un tipo pero se ha recibido una cadena literal", + "expectedTypeParameterName": "Nombre de parámetro de tipo esperado", + "expectedYieldExpr": "Expresión esperada en la instrucción yield", + "finalClassIsAbstract": "La clase \"{type}\" está marcada como final y debe implementar todos los símbolos abstractos", + "finalContext": "\"Final\" no está permitido en este contexto", + "finalInLoop": "No se puede asignar una variable \"Final\" dentro de un bucle", + "finalMethodOverride": "El método \"{name}\" no puede anular el método final definido en la clase \" {className}\"", + "finalNonMethod": "La función \"{name}\" no se puede marcar @final porque no es un método", + "finalReassigned": "\"{name}\" se declara como Final y no se puede reasignar", + "finalRedeclaration": "\"{name}\" se declaró anteriormente como Final", + "finalRedeclarationBySubclass": "\"{name}\" no puede ser redeclarado porque la clase padre \"{className}\" lo declara como Final", + "finalTooManyArgs": "Se esperaba un único argumento de tipo después de \"Final\".", + "finalUnassigned": "\"{name}\" se declara Final, pero no se asigna valor", + "finallyBreak": "No se puede usar un elemento \"break\" para salir de un bloque \"finally\"", + "finallyContinue": "No se puede usar un elemento \"continue\" para salir de un bloque \"finally\"", + "finallyReturn": "No se puede usar un elemento \"return\" para salir de un bloque \"finally\"", + "formatStringBrace": "No se permite una llave de cierre única dentro del literal de cadena f; usar llave de cierre doble", + "formatStringBytes": "Los literales de cadena de formato (f-strings) no pueden ser binarios", + "formatStringDebuggingIllegal": "El especificador de depuración de cadena F \"=\" requiere Python 3.8 o posterior", + "formatStringEscape": "Secuencia de escape (barra diagonal inversa) no permitida en la parte de expresión de f-string anterior a Python 3.12", + "formatStringExpectedConversion": "Se esperaba un especificador de conversión después de \"!\" en f-string", + "formatStringIllegal": "Los literales de cadena de formato (f-strings) requieren Python 3.6 o posterior", + "formatStringInPattern": "Cadena de formato no permitida en el patrón", + "formatStringNestedFormatSpecifier": "Expresiones anidadas demasiado profundamente dentro del especificador de cadena de formato", + "formatStringNestedQuote": "Las cadenas anidadas dentro de una cadena f no pueden usar el mismo carácter de comillas que la cadena f anterior a Python 3.12", + "formatStringTemplate": "Los literales de cadena de formato (f-strings) tampoco pueden ser cadenas de plantilla (cadenas t)", + "formatStringUnicode": "Los literales de cadena de formato (cadenas f) no pueden ser unicode", + "formatStringUnterminated": "Expresión sin terminar en f-string; se esperaba \"}\"", + "functionDecoratorTypeUnknown": "Un decorator de función no tipificado oculta el tipo de función; ignorar el decorator", + "functionInConditionalExpression": "La expresión condicional hace referencia a una función que siempre se evalúa como True", + "functionTypeParametersIllegal": "La sintaxis del parámetro de tipo de función requiere Python 3.12 o posterior", + "futureImportLocationNotAllowed": "Las importaciones desde __future__ deben estar al principio del fichero", + "generatorAsyncReturnType": "El tipo de retorno de la función generadora async debe ser compatible con \"AsyncGenerator[{yieldType}, Any]\"", + "generatorNotParenthesized": "Las expresiones del generador deben ir entre paréntesis si no son el único argumento", + "generatorSyncReturnType": "El tipo de retorno de la función generadora debe ser compatible con \"Generator[{yieldType}, Any, Any]\"", + "genericBaseClassNotAllowed": "La clase base \"Generic\" no se puede usar con la sintaxis de parámetro de tipo", + "genericClassAssigned": "No se puede asignar un tipo de clase genérico", + "genericClassDeleted": "No se puede eliminar el tipo de clase genérica", + "genericInstanceVariableAccess": "El acceso a la variable de instancia genérica a través de la clase es ambiguo", + "genericNotAllowed": "\"Generic\" no es válido en este contexto", + "genericTypeAliasBoundTypeVar": "Los alias de tipo genérico dentro de una clase no pueden utilizar variables de tipo vinculadas {names}.", + "genericTypeArgMissing": "\"Generic\" requiere al menos un argumento de tipo", + "genericTypeArgTypeVar": "El argumento de tipo para \"Generic\" debe ser una variable de tipo", + "genericTypeArgUnique": "Los argumentos de tipo para \"Generic\" deben ser únicos", + "globalReassignment": "\"{name}\" se asigna antes de la declaración global", + "globalRedefinition": "\"{name}\" ya ha sido declarado global", + "implicitStringConcat": "No se permite la concatenación implícita de cadenas", + "importCycleDetected": "Ciclo detectado en la cadena de importación", + "importDepthExceeded": "La profundidad de la cadena de importación superó {depth}", + "importResolveFailure": "No se ha podido resolver la importación \"{importName}\".", + "importSourceResolveFailure": "La importación \"{importName}\" no se ha podido resolver desde el origen", + "importSymbolUnknown": "\"{name}\" es un símbolo de importación desconocido", + "incompatibleMethodOverride": "El método \"{name}\" sobrescribe la clase \"{className}\" de forma incompatible", + "inconsistentIndent": "La cantidad sin sangría no coincide con la sangría anterior", + "inconsistentTabs": "Uso incoherente de tabuladores y espacios en la sangría", + "initMethodSelfParamTypeVar": "La anotación de tipo para el parámetro \"self\" del método \"__init__\" no puede contener variables de tipo con ámbito de clase", + "initMustReturnNone": "El tipo de retorno de \"__init__\" debe ser None", + "initSubclassCallFailed": "Argumentos de palabra clave incorrectos para el método __init_subclass__", + "initSubclassClsParam": "__init_subclass__ debe tomar un parámetro \"cls\"", + "initVarNotAllowed": "\"InitVar\" no se permite en este contexto", + "instanceMethodSelfParam": "Los métodos de instancia deben tomar un parámetro \"self\"", + "instanceVarOverridesClassVar": "La variable de instancia \"{name}\" invalida la variable de clase del mismo nombre en la clase \"{className}\"", + "instantiateAbstract": "No se puede instanciar la clase abstracta \"{type}\"", + "instantiateProtocol": "No se puede crear una instancia de la clase Protocol \"{type}\"", + "internalBindError": "Se ha producido un error interno al vincular el archivo \"{file}\": {message}", + "internalParseError": "Se ha producido un error interno al procesar el archivo \"{file}\": {message}", + "internalTypeCheckingError": "Se ha producido un error interno al comprobar el tipo de archivo \"{file}\":{message}", + "invalidIdentifierChar": "Carácter no válido en el identificador", + "invalidStubStatement": "La declaración no tiene sentido dentro de un archivo de tipo stub", + "invalidTokenChars": "Carácter \"{text}\" no válido en el token", + "isInstanceInvalidType": "El segundo argumento de \"isinstance\" debe ser una clase o tuple de clases", + "isSubclassInvalidType": "El segundo argumento de \"issubclass\" debe ser una clase o tuple de clases", + "keyValueInSet": "No se permiten pares de clave/valor dentro de un set", + "keywordArgInTypeArgument": "No se pueden usar argumentos de palabra clave en listas de argumentos de tipo", + "keywordOnlyAfterArgs": "No se permite el separador de argumentos por palabra clave después del parámetro \"*\".", + "keywordParameterMissing": "Uno o varios parámetros de palabra clave deben seguir el parámetro \"*\"", + "keywordSubscriptIllegal": "No se admiten argumentos de palabra clave dentro de subíndices", + "lambdaReturnTypePartiallyUnknown": "El tipo de retorno de la lambda \"{returnType}\" es parcialmente desconocido.", + "lambdaReturnTypeUnknown": "Se desconoce el tipo de retorno de la lambda", + "listAssignmentMismatch": "La expresión con el tipo \"{type}\" no puede asignarse a la lista de destino", + "listInAnnotation": "No se permite la expresión de List en la expresión de tipo", + "literalEmptyArgs": "Se esperaban uno o varios argumentos de tipo después de \"Literal\"", + "literalNamedUnicodeEscape": "No se admiten secuencias de escape Unicode con nombre en las anotaciones de cadena de \"Literales\".", + "literalNotAllowed": "\"Literal\" no se puede usar en este contexto sin un argumento de tipo", + "literalNotCallable": "El tipo Literal no puede instanciarse", + "literalUnsupportedType": "Los argumentos de tipo para \"Literal\" deben ser None, un valor literal (int, bool, str, o bytes), o un valor enum", + "matchIncompatible": "Las declaraciones de Match requieren Python 3.10 o posterior", + "matchIsNotExhaustive": "Los casos dentro de la declaración de match no tratan exhaustivamente todos los valores", + "maxParseDepthExceeded": "Se ha superado la profundidad máxima de análisis; divida la expresión en subexpresiones más pequeñas.", + "memberAccess": "No se puede tener acceso al atributo \"{name}\" para la clase \"{type}\"", + "memberDelete": "No se puede eliminar el atributo \"{name}\" de la clase \"{type}\"", + "memberSet": "No se puede asignar al atributo \"{name}\" para la clase \"{type}\"", + "metaclassConflict": "La metaclase de una clase derivada debe ser una subclase de las metaclases de todas sus clases base", + "metaclassDuplicate": "Solo se puede proporcionar una metaclase", + "metaclassIsGeneric": "La metaclase no puede ser genérica", + "methodNotDefined": "Método \"{name}\" no definido", + "methodNotDefinedOnType": "Método \"{name}\" no definido en el tipo \"{type}\"", + "methodOrdering": "No se puede crear una ordenación coherente de los métodos", + "methodOverridden": "\"{name}\" invalida el método del mismo nombre en la clase \"{className}\" con el tipo incompatible \"{type}\"", + "methodReturnsNonObject": "El método \"{name}\" no devuelve un objeto", + "missingSuperCall": "El método \"{methodName}\" no llama al método del mismo nombre en la clase principal.", + "mixingBytesAndStr": "No se pueden concatenar los valores de bytes y str", + "moduleAsType": "El módulo no se puede usar como tipo.", + "moduleNotCallable": "No se puede llamar al módulo", + "moduleUnknownMember": "\"{memberName}\" no es un atributo conocido del módulo \"{moduleName}\"", + "namedExceptAfterCatchAll": "Una cláusula except con nombre no puede aparecer después de la cláusula catch-all except", + "namedParamAfterParamSpecArgs": "El parámetro de palabra clave \"{name}\" no puede aparecer en la firma después del parámetro ParamSpec args", + "namedTupleEmptyName": "Los nombres de una tuple con nombre no pueden estar vacíos", + "namedTupleEntryRedeclared": "No se puede invalidar \"{name}\" porque la clase primaria \"{className}\" es una tuple con nombre", + "namedTupleFieldUnderscore": "Named de campo de tuple con nombre no pueden empezar por un carácter de subrayado", + "namedTupleFirstArg": "Nombre de clase de tuple como primer argumento", + "namedTupleMultipleInheritance": "No se admite la herencia múltiple con NamedTuple", + "namedTupleNameKeyword": "Los nombres de campo no pueden ser una palabra clave", + "namedTupleNameType": "tuple de dos entradas esperada que especifica el nombre y el tipo de entrada", + "namedTupleNameUnique": "Los nombres dentro de una tuple con nombre deben ser únicos", + "namedTupleNoTypes": "\"namedtuple\" no proporciona tipos para las entradas de tuple; utilice en su lugar \"NamedTuple\".", + "namedTupleSecondArg": "list de entradas de tuple con nombre esperada como segundo argumento", + "newClsParam": "__new__ debe tomar un parámetro \"cls\"", + "newTypeAnyOrUnknown": "El segundo argumento de NewType debe ser una clase conocida, no Any ni Unknown", + "newTypeBadName": "El primer argumento de NewType debe ser una cadena literal", + "newTypeLiteral": "NewType no se puede usar con el tipo Literal", + "newTypeNameMismatch": "NewType debe asignarse a una variable con el mismo nombre", + "newTypeNotAClass": "Clase esperada como segundo argumento de NewType", + "newTypeParamCount": "NewType requiere dos argumentos posicionales", + "newTypeProtocolClass": "NewType no se puede usar con un tipo estructural (Protocol o clase TypedDict)", + "noOverload": "Ninguna sobrecarga para \"{name}\" coincide con los argumentos proporcionados", + "noReturnContainsReturn": "La función con tipo de return declarado \"NoReturn\" no puede incluir una sentencia return", + "noReturnContainsYield": "La función con tipo de retorno declarado \"NoReturn\" no puede incluir una instrucción yield", + "noReturnReturnsNone": "La función con el tipo de valor devuelto declarado \"NoReturn\" no puede devolver \"None\"", + "nonDefaultAfterDefault": "El argumento no predeterminado sigue al argumento predeterminado", + "nonLocalInModule": "Declaración Nonlocal no permitida a nivel de módulo", + "nonLocalNoBinding": "No se ha encontrado ningún enlace para \"{name}\" nonlocal.", + "nonLocalReassignment": "\"{name}\" se asigna antes de la declaración nonlocal", + "nonLocalRedefinition": "\"{name}\" ya fue declarado nonlocal", + "noneNotCallable": "No se puede llamar al objeto de tipo \"None\"", + "noneNotIterable": "No se puede utilizar un objeto de tipo \"None\" como valor iterable", + "noneNotSubscriptable": "El objeto de tipo \"None\" no se puede suscribir", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "El objeto de tipo \"None\" no se puede usar con \"async with\"", + "noneOperator": "El operador \"{operator}\" no es compatible con \"None\".", + "noneUnknownMember": "\"{name}\" no es un atributo conocido de \"None\"", + "nonlocalTypeParam": "No se permite el enlace nonlocal para el parámetro de tipo \"{name}\"", + "notRequiredArgCount": "Se esperaba un único argumento de tipo después de \"NotRequired\".", + "notRequiredNotInTypedDict": "\"NotRequired\" no está permitido en este contexto", + "objectNotCallable": "El objeto de tipo \"{type}\" no es invocable", + "obscuredClassDeclaration": "La declaración de clase \"{name}\" queda oculta por una declaración del mismo nombre", + "obscuredFunctionDeclaration": "La declaración de función \"{name}\" queda oculta por una declaración del mismo nombre", + "obscuredMethodDeclaration": "La declaración de método \"{name}\" queda oculta por una declaración del mismo nombre", + "obscuredParameterDeclaration": "La declaración de parámetro \"{name}\" queda oculta por una declaración del mismo nombre", + "obscuredTypeAliasDeclaration": "La declaración de alias de tipo \"{name}\" queda oculta por una declaración del mismo nombre", + "obscuredVariableDeclaration": "La declaración \"{name}\" está oculta por una declaración del mismo nombre", + "operatorLessOrGreaterDeprecated": "El operador \"<>\" no es admitido en Python 3; utilice en su lugar \"!=\".", + "optionalExtraArgs": "Se esperaba un argumento de tipo después de \"Optional\"", + "orPatternIrrefutable": "El patrón irrefutable solo se permite como el último subpatrón en un patrón \"or\".", + "orPatternMissingName": "Todos los subpatrones de un patrón \"or\" deben tener los mismos nombres", + "overlappingKeywordArgs": "El diccionario escrito se superpone con el parámetro de palabra clave: {names}", + "overlappingOverload": "La sobrecarga {obscured} para \"{name}\" nunca se utilizará porque sus parámetros se superpone con la sobrecarga {obscuredBy}.", + "overloadAbstractImplMismatch": "Las sobrecargas deben coincidir con el estado abstracto de la implementación", + "overloadAbstractMismatch": "Todos los métodos sobrecargados deben ser abstractos o no", + "overloadClassMethodInconsistent": "Las sobrecargas de \"{name}\" usan @classmethod de forma incoherente", + "overloadFinalImpl": "@final elemento Decorator solo se debe aplicar a la implementación.", + "overloadFinalNoImpl": "Solo la primera sobrecarga debe marcarse @final", + "overloadImplementationMismatch": "La implementación de la sobrecarga no es consistente con la firma de la sobrecarga {index}", + "overloadOverrideImpl": "@override elemento Decorator solo se debe aplicar a la implementación.", + "overloadOverrideNoImpl": "Solo la primera sobrecarga debe marcarse @override", + "overloadReturnTypeMismatch": "La sobrecarga {prevIndex} para \" {name}\" se superpone con la sobrecarga {newIndex} y devuelve un tipo incompatible", + "overloadStaticMethodInconsistent": "Las sobrecargas de \"{name}\" usan @staticmethod de forma incoherente", + "overloadWithoutImplementation": "\"{name}\" está marcado como overload, pero no se proporciona ninguna implementación.", + "overriddenMethodNotFound": "El método \"{name}\" está marcado como override, pero no existe ningún método base con el mismo nombre", + "overrideDecoratorMissing": "El método \"{name}\" no está marcado como override, pero está reemplazando un método de la clase \"{className}\"", + "paramAfterKwargsParam": "El parámetro no puede seguir el parámetro \"**\"", + "paramAlreadyAssigned": "El parámetro \"{name}\" ya está asignado", + "paramAnnotationMissing": "Falta la anotación de tipo para el parámetro \"{name}\"", + "paramAssignmentMismatch": "La expresión de tipo \"{sourceType}\" no se puede asignar al parámetro de tipo \"{paramType}\"", + "paramNameMissing": "Ningún parámetro llamado \"{name}\"", + "paramSpecArgsKwargsDuplicate": "Ya se han proporcionado los argumentos para ParamSpec \"{type}\".", + "paramSpecArgsKwargsUsage": "Los atributos \"args\" y \"kwargs\" de ParamSpec deben aparecer ambos dentro de una firma de función", + "paramSpecArgsMissing": "Faltan argumentos para ParamSpec \"{type}\".", + "paramSpecArgsUsage": "El atributo \"args\" de ParamSpec solo es válido cuando se usa con el parámetro *args.", + "paramSpecAssignedName": "ParamSpec debe asignarse a una variable llamada \"{name} \"", + "paramSpecContext": "ParamSpec no está permitido en este contexto", + "paramSpecDefaultNotTuple": "Se esperaban puntos suspensivos, una expresión de tuple o ParamSpec para el valor predeterminado de ParamSpec", + "paramSpecFirstArg": "Se esperaba el nombre de ParamSpec como primer argumento", + "paramSpecKwargsUsage": "El miembro \"kwargs\" de ParamSpec solo es válido cuando se utiliza con el parámetro **kwargs", + "paramSpecNotUsedByOuterScope": "ParamSpec \"{name}\" no tiene significado en este contexto", + "paramSpecUnknownArg": "ParamSpec no admite más de un argumento", + "paramSpecUnknownMember": "\"{name}\" no es un atributo conocido de ParamSpec", + "paramSpecUnknownParam": "\"{name}\" es un parámetro desconocido para ParamSpec", + "paramTypeCovariant": "La variable de tipo covariante no puede utilizarse en el tipo de parámetro", + "paramTypePartiallyUnknown": "El tipo de parámetro \"{paramName}\" es parcialmente desconocido", + "paramTypeUnknown": "Se desconoce el tipo del parámetro \"{paramName}\".", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "El patrón nunca coincidirá para el tipo de asunto \"{type}\"", + "positionArgAfterNamedArg": "El argumento posicional no puede aparecer después de los argumentos de palabra clave", + "positionArgAfterUnpackedDictArg": "El argumento posicional no puede aparecer después de desempaquetar el argumento de palabra clave", + "positionOnlyAfterArgs": "No se permite el separador de parámetros de un solo puesto después del parámetro \"*\".", + "positionOnlyAfterKeywordOnly": "El parámetro \"/\" debe aparecer antes del parámetro \"*\".", + "positionOnlyAfterNon": "Parámetro de un solo puesto no permitido después del parámetro que no es de solo posición", + "positionOnlyFirstParam": "Separador parámetros de solo un puesto no permitido como primer parámetro", + "positionOnlyIncompatible": "El separador de parámetros de un solo puesto requiere Python 3.8 o posterior", + "privateImportFromPyTypedModule": "\"{name}\" no se exporta desde el módulo \"{module}\"", + "privateUsedOutsideOfClass": "\"{name}\" es privado y se utiliza fuera de la clase en la que se declara", + "privateUsedOutsideOfModule": "\"{name}\" es privado y se utiliza fuera del módulo en el que se declara", + "propertyOverridden": "\"{name}\" invalida incorrectamente la property del mismo nombre en la clase \"{className}\"", + "propertyStaticMethod": "Métodos estáticos no permitidos para los valores de property getter, setter o deleter", + "protectedUsedOutsideOfClass": "\"{name}\" está protegido y se usa fuera de la clase en la que se declara", + "protocolBaseClass": "La clase de Protocol \"{classType}\" no se puede derivar de la clase que no es Protocol \"{baseType}\"", + "protocolBaseClassWithTypeArgs": "No se permiten argumentos de tipo con la clase Protocol cuando se usa la sintaxis de parámetro de tipo", + "protocolIllegal": "El uso de \"Protocolo\" requiere Python 3.7 o posterior.", + "protocolNotAllowed": "\"Protocolo\" no puede utilizarse en este contexto", + "protocolTypeArgMustBeTypeParam": "El argumento de tipo para \"Protocol\" debe ser un parámetro de tipo", + "protocolUnsafeOverlap": "La clase se superpone \"{name}\" de forma no segura y podría producir una coincidencia en tiempo de ejecución", + "protocolVarianceContravariant": "La variable de tipo \"{variable}\" usada en Protocol genérico \"{class}\" debe ser contravariante", + "protocolVarianceCovariant": "La variable de tipo \"{variable}\" usada en Protocol genérico \"{class}\" debe ser covariante", + "protocolVarianceInvariant": "La variable de tipo \"{variable}\" usada en Protocol genérico \"{class}\" debe ser invariable", + "pyrightCommentInvalidDiagnosticBoolValue": "La directiva de comentario Pyright debe ir seguida de \"=\" y un valor de true o false", + "pyrightCommentInvalidDiagnosticSeverityValue": "La directiva de comentario Pyright debe ir seguida de \"=\" y un valor de true, false, error, warning, information o none.", + "pyrightCommentMissingDirective": "El comentario de Pyright debe ir seguido de una directiva (basic o estricta) o de una regla de diagnóstico", + "pyrightCommentNotOnOwnLine": "Los comentarios de Pyright utilizados para controlar los ajustes a nivel de archivo deben aparecer en su propia línea", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" es una regla de diagnóstico desconocida para el comentario pyright", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" es un valor no válido para el comentario pyright; se espera true, false, error, warning, information o none.", + "pyrightCommentUnknownDirective": "\"{directive}\" es una directiva desconocida para el comentario pyright; se esperaba \"strict\", \"standard\" o \"basic\"", + "readOnlyArgCount": "Se esperaba un único argumento de tipo después de \"ReadOnly\"", + "readOnlyNotInTypedDict": "\"ReadOnly\" no está permitido en este contexto", + "recursiveDefinition": "No se pudo determinar el tipo de \"{name}\" porque hace referencia a sí mismo.", + "relativeImportNotAllowed": "Las importaciones relativas no pueden utilizarse con la forma \"import .a\"; utilice en su lugar \"from . import a\"", + "requiredArgCount": "Se esperaba un único argumento de tipo después de \"Required\"", + "requiredNotInTypedDict": "\"Required\" no está permitido en este contexto", + "returnInAsyncGenerator": "No se permite la instrucción Return con valor en el generador async", + "returnInExceptionGroup": "No se permite \"return\" en un bloque \"except*\"", + "returnMissing": "La función con el tipo de valor devuelto declarado \"{returnType}\" debe devolver un valor en todas las rutas de acceso del código.", + "returnOutsideFunction": "\"return\" solo se puede usar dentro de una función", + "returnTypeContravariant": "La variable de tipo contravariante no se puede usar en el tipo de valor devuelto", + "returnTypeMismatch": "El tipo \"{exprType}\" no se puede asignar al tipo de valor devuelto \"{returnType}\"", + "returnTypePartiallyUnknown": "El tipo de retorno, \"{returnType}\", es parcialmente desconocido", + "returnTypeUnknown": "Tipo de retorno desconocido", + "revealLocalsArgs": "No se esperaba ningún argumento para la llamada \"reveal_locals\"", + "revealLocalsNone": "No hay locals en este ámbito", + "revealTypeArgs": "Se esperaba un único argumento posicional para la llamada \"reveal_type\"", + "revealTypeExpectedTextArg": "El argumento \"expected_text\" de la función \"reveal_type\" debe ser un valor literal str.", + "revealTypeExpectedTextMismatch": "El tipo de texto no coincide; se esperaba \"{expected}\" pero se ha recibido \"{received}\".", + "revealTypeExpectedTypeMismatch": "Error de coincidencia de tipos; se esperaba \"{expected}\", pero se recibió \"{received}\"", + "selfTypeContext": "\"Self\" no es válido en este contexto", + "selfTypeMetaclass": "\"Self\" no se puede usar dentro de una metaclase (una subclase de \"type\")", + "selfTypeWithTypedSelfOrCls": "\"Self\" no puede utilizarse en una función con un parámetro `self` o `cls` que tenga una anotación de tipo distinta de \"Self\".", + "sentinelBadName": "El primer argumento de Sentinel debe ser un literal de cadena", + "sentinelNameMismatch": "Sentinel debe asignarse a una variable con el mismo nombre", + "sentinelParamCount": "Sentinel requiere un argumento posicional", + "setterGetterTypeMismatch": "El tipo de valor setter de Property no se puede asignar al tipo devuelto por el valor getter", + "singleOverload": "\"{name}\" está marcado como sobrecarga, pero faltan sobrecargas adicionales", + "slotsAttributeError": "\"{name}\" no se especificó en __slots__", + "slotsClassVarConflict": "\"{name}\" entra en conflicto con la variable de instancia declarada en __slots__", + "starPatternInAsPattern": "El patrón estrella no puede utilizarse con el objetivo \"as\"", + "starPatternInOrPattern": "El patrón de estrella no puede unirse a otros patrones", + "starStarWildcardNotAllowed": "** no puede utilizarse con el comodín \"_\".", + "staticClsSelfParam": "Los métodos estáticos no deben tomar un parámetro \"self\" o \"cls\".", + "stringNonAsciiBytes": "Carácter no ASCII no permitido en el literal de cadena de bytes", + "stringNotSubscriptable": "La expresión de cadena no puede ir entre comillas en la expresión de tipo; encierre toda la expresión entre comillas.", + "stringUnsupportedEscape": "Secuencia de escape no admitida en el literal de cadena", + "stringUnterminated": "La cadena literal no está terminada", + "stubFileMissing": "Archivo Stub no encontrado para \"{importName}\"", + "stubUsesGetAttr": "El archivo stub de tipo está incompleto; \"__getattr__\" oculta errores de tipo para el módulo", + "sublistParamsIncompatible": "Los parámetros de sublista no están soportados en Python 3.x", + "superCallArgCount": "No se esperaban más de dos argumentos para la llamada \"super\"", + "superCallFirstArg": "Se esperaba el tipo de clase como primer argumento de la llamada a \"super\" pero se recibió \"{type}\"", + "superCallSecondArg": "El segundo argumento de la llamada a \"super\" debe ser un objeto o clase que derive de \"{type}\"", + "superCallZeroArgForm": "La forma sin argumentos de la llamada \"super\" sólo es válida dentro de un método", + "superCallZeroArgFormStaticMethod": "La forma sin argumentos de la llamada \"super\" no es válida en un método estático", + "symbolIsPossiblyUnbound": "\"{name}\" está posiblemente desvinculado", + "symbolIsUnbound": "\"{name}\" está sin consolidar", + "symbolIsUndefined": "\"{name}\" no está definido", + "symbolOverridden": "\"{name}\" anula el símbolo del mismo nombre en la clase \"{className}\"", + "templateStringBytes": "Los literales de cadena de plantilla (cadenas t) no pueden ser binarios", + "templateStringIllegal": "Los literales de cadena de plantilla (cadenas t) requieren Python 3.14 o posterior", + "templateStringUnicode": "Los literales de cadena de plantilla (cadenas t) no pueden ser Unicode", + "ternaryNotAllowed": "No se permite la expresión de ternario en la expresión de tipo", + "totalOrderingMissingMethod": "La clase debe definir uno de \"__lt__\", \"__le__\", \"__gt__\", o \"__ge__\" para utilizar total_ordering", + "trailingCommaInFromImport": "No se permite la coma final sin paréntesis alrededor", + "tryWithoutExcept": "La instrucción Try debe tener al menos una cláusula except o finally", + "tupleAssignmentMismatch": "La expresión con el tipo \"{type}\" no se puede asignar a la tuple de destino", + "tupleInAnnotation": "No se permite la expresión de tuple en la expresión de tipo", + "tupleIndexOutOfRange": "El índice {index} está fuera de rango para el tipo {type}.", + "typeAliasIllegalExpressionForm": "Forma de expresión no válida para la definición de alias de tipo", + "typeAliasIsRecursiveDirect": "El alias de tipo \"{name}\" no puede usarse a sí mismo en su definición", + "typeAliasNotInModuleOrClass": "Un TypeAlias solo puede definirse en el ámbito de un módulo o de una clase", + "typeAliasRedeclared": "\"{name}\" se declara como TypeAlias y solo puede asignarse una vez", + "typeAliasStatementBadScope": "Una instrucción de type solo se puede usar en el ámbito de un módulo o de una clase", + "typeAliasStatementIllegal": "La sentencia Type alias requiere Python 3.12 o posterior", + "typeAliasTypeBadScope": "Un alias de tipo solo se puede definir dentro de un ámbito de módulo o clase", + "typeAliasTypeBaseClass": "Un alias de tipo definido en una instrucción \"type\" no se puede usar como clase base", + "typeAliasTypeMustBeAssigned": "TypeAliasType debe asignarse a una variable con el mismo nombre que el alias de tipo", + "typeAliasTypeNameArg": "El primer argumento de TypeAliasType debe ser un literal de cadena que represente el nombre del alias de tipo", + "typeAliasTypeNameMismatch": "El nombre del alias de tipo debe coincidir con el nombre de la variable a la que se asigna", + "typeAliasTypeParamInvalid": "La lista de parámetros de tipo debe ser una tuple que contenga solo TypeVar, TypeVarTuple o ParamSpec.", + "typeAnnotationCall": "No se permite la expresión de llamada en la expresión de tipo", + "typeAnnotationVariable": "Variable no permitida en la expresión de tipo", + "typeAnnotationWithCallable": "El argumento de tipo para \"type\" debe ser una clase; no se admiten invocables", + "typeArgListExpected": "ParamSpec esperado, elipsis o lista de tipos", + "typeArgListNotAllowed": "Expresión de lista no permitida para este argumento de tipo", + "typeArgsExpectingNone": "No se esperaban argumentos de tipo para la clase \"{name}\"", + "typeArgsMismatchOne": "Se esperaba un argumento de tipo pero ha recibido {received}", + "typeArgsMissingForAlias": "Argumentos de tipo esperados para el alias de tipo genérico \"{name}\"", + "typeArgsMissingForClass": "Se esperaban argumentos de tipo para la clase genérica \"{name}\"", + "typeArgsTooFew": "Se han proporcionado muy pocos argumentos de tipo para \"{name}\"; se esperaba {expected} pero se ha recibido {received}.", + "typeArgsTooMany": "Se proporcionaron demasiados argumentos de tipo para \"{name}\"; se esperaba {expected}, pero se recibieron {received}", + "typeAssignmentMismatch": "El tipo \"{sourceType}\" no se puede asignar al tipo declarado \"{destType}\"", + "typeAssignmentMismatchWildcard": "El símbolo de importación \"{name}\" tiene el tipo \"{sourceType}\", que no se puede asignar al tipo declarado \"{destType}\"", + "typeCallNotAllowed": "La llamada a type() no debe utilizarse en la expresión de tipo", + "typeCheckOnly": "\"{name}\" está marcado como @type_check_only y solo se puede usar en anotaciones de tipo", + "typeCommentDeprecated": "El uso de comentarios de type está obsoleto; utilice en su lugar anotaciones de type.", + "typeExpectedClass": "Se esperaba la clase pero se recibió \"{type}\"", + "typeFormArgs": "\"TypeForm\" acepta un único argumento posicional", + "typeGuardArgCount": "Se esperaba un único argumento de tipo después de \"TypeGuard\" o \"TypeIs\"", + "typeGuardParamCount": "Las funciones y métodos de protección de tipo definidos por el usuario deben tener al menos un parámetro de entrada", + "typeIsReturnType": "El tipo de valor devuelto de TypeIs (\"{returnType}\") no es coherente con el tipo de parámetro de valor (\"{type}\")", + "typeNotAwaitable": "\"{type}\" no se awaitable", + "typeNotIntantiable": "\"{type}\" no puede crear instancias", + "typeNotIterable": "\"{type}\" no es iterable", + "typeNotSpecializable": "No se pudo especializar el tipo \"{type}\"", + "typeNotSubscriptable": "Objeto de tipo \"{type}\" no es subscriptible", + "typeNotSupportBinaryOperator": "El operador \"{operator}\" no se admite para los tipos \"{leftType}\" y \"{rightType}\"", + "typeNotSupportBinaryOperatorBidirectional": "No se admite el operador \"{operator}\" para los tipos \"{leftType}\" y \"{rightType}\" cuando el tipo esperado es \"{expectedType}\"", + "typeNotSupportUnaryOperator": "El operador \"{operator}\" no se admite para el tipo \"{type}\"", + "typeNotSupportUnaryOperatorBidirectional": "Operador \"{operator}\" no admitido para el tipo \"{type}\" cuando el tipo esperado es \"{expectedType}\"", + "typeNotUsableWith": "El objeto de tipo \"{type}\" no se puede usar con \"with\" porque no implementa correctamente {method}", + "typeNotUsableWithAsync": "El objeto de tipo \"{type}\" no se puede usar con \"async with\" porque no implementa correctamente {method}", + "typeParameterBoundNotAllowed": "No se pueden usar límites o restricciones con un parámetro de tipo variádico o ParamSpec", + "typeParameterConstraintTuple": "La restricción del parámetro de tipo debe ser una tupla de dos o más tipos", + "typeParameterExistingTypeParameter": "El parámetro de tipo \"{name}\" ya está en uso", + "typeParameterNotDeclared": "El parámetro de tipo \"{name}\" no está incluido en la lista de parámetros de tipo para \"{container}\"", + "typeParametersMissing": "Debe especificarse al menos un parámetro de tipo", + "typePartiallyUnknown": "El tipo de \"{name}\" es parcialmente desconocido", + "typeUnknown": "El tipo de \"{name} \" es desconocido", + "typeVarAssignedName": "TypeVar debe asignarse a una variable llamada \"{name}\"", + "typeVarAssignmentMismatch": "No se puede asignar el tipo \"{type}\" a la variable de tipo \"{name}\"", + "typeVarBoundAndConstrained": "TypeVar no puede estar ligado y restringido a la vez", + "typeVarBoundGeneric": "El tipo vinculado TypeVar no puede ser genérico", + "typeVarConstraintGeneric": "El tipo de restricción TypeVar no puede ser genérico", + "typeVarDefaultBoundMismatch": "El tipo predeterminado TypeVar debe ser un subtipo del tipo enlazado.", + "typeVarDefaultConstraintMismatch": "El tipo predeterminado TypeVar debe ser uno de los tipos restringidos.", + "typeVarDefaultIllegal": "Los tipos predeterminados de variable de tipo requieren Python 3.13 o posterior", + "typeVarDefaultInvalidTypeVar": "El parámetro de tipo \"{name}\" tiene un tipo por defecto que hace referencia a una o más variables de tipo que están fuera de ámbito.", + "typeVarFirstArg": "Nombre esperado de TypeVar como primer argumento", + "typeVarInvalidForMemberVariable": "El tipo de atributo no puede usar una variable de tipo \"{name}\" con ámbito de método local", + "typeVarNoMember": "TypeVar \"{type}\" no tiene ningún atributo \"{name}\"", + "typeVarNotSubscriptable": "TypeVar \"{type}\" no es subscribible", + "typeVarNotUsedByOuterScope": "La variable de tipo \"{name}\" no tiene ningún significado en este contexto", + "typeVarPossiblyUnsolvable": "La variable de tipo \"{name}\" puede quedar sin resolver si el autor de la llamada no proporciona ningún argumento para el parámetro \"{param}\"", + "typeVarSingleConstraint": "TypeVar debe tener al menos dos tipos restringidos", + "typeVarTupleConstraints": "TypeVarTuple no puede tener restricciones de valor", + "typeVarTupleContext": "TypeVarTuple no está permitido en este contexto", + "typeVarTupleDefaultNotUnpacked": "El tipo predeterminado TypeVarTuple debe ser una tuple desempaquetada o TypeVarTuple", + "typeVarTupleMustBeUnpacked": "Se requiere el operador Unpack para el valor TypeVarTuple.", + "typeVarTupleUnknownParam": "\"{name}\" es un parámetro desconocido para TypeVarTuple", + "typeVarUnknownParam": "\"{name}\" es un parámetro desconocido para TypeVar", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" ya está en uso por un ámbito externo", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" solo aparece una vez en la firma de la función genérica", + "typeVarVariance": "TypeVar no puede ser covariante y contravariante", + "typeVarWithDefaultFollowsVariadic": "TypeVar \"{typeVarName}\" tiene un valor predeterminado y no puede seguir a TypeVarTuple \"{variadicName}\"", + "typeVarWithoutDefault": "\"{name}\" no puede aparecer después de \"{other}\" en la lista de parámetros de tipo porque no tiene ningún tipo predeterminado", + "typeVarsNotInGenericOrProtocol": "Generic[] o Protocol[] deben incluir todas las variables de tipo", + "typedDictAccess": "No se ha podido acceder al elemento en TypedDict", + "typedDictAssignedName": "TypedDict debe asignarse a una variable denominada \"{name}\"", + "typedDictBadVar": "Las clases TypedDict solo pueden contener anotaciones de tipo", + "typedDictBaseClass": "Todas las clases base de las clases TypedDict deben ser también clases TypedDict", + "typedDictBoolParam": "Se esperaba que el parámetro \"{name}\" tuviera un valor de True o False.", + "typedDictClosedExtras": "El \"{name}\" de clase base es un TypedDict que limita el tipo de elementos adicionales al tipo \"{type}\"", + "typedDictClosedFalseNonOpenBase": "La clase base \"{name}\" no es un TypedDict abierto; closed=False no está permitido", + "typedDictClosedNoExtras": "La clase base \"{name}\" es un TypedDict closed; no se permiten elementos adicionales", + "typedDictDelete": "No se puede eliminar un elemento en TypedDict", + "typedDictEmptyName": "Los nombres de un TypedDict no pueden estar vacíos", + "typedDictEntryName": "Cadena literal esperada para el nombre de la entrada del diccionario", + "typedDictEntryUnique": "Los nombres dentro de un diccionario deben ser únicos", + "typedDictExtraArgs": "No se admiten argumentos TypedDict adicionales", + "typedDictExtraItemsClosed": "TypedDict puede usar \"closed\" o \"extra_items\", pero no ambos", + "typedDictFieldNotRequiredRedefinition": "El elemento TypedDict \"{name}\" no se puede redefinir como NotRequired", + "typedDictFieldReadOnlyRedefinition": "El elemento TypedDict \"{name}\" no se puede redefinir como ReadOnly", + "typedDictFieldRequiredRedefinition": "El elemento TypedDict \"{name}\" no se puede redefinir como Required", + "typedDictFirstArg": "Nombre de clase TypedDict esperado como primer argumento", + "typedDictInClassPattern": "No se permite la clase TypedDict en el patrón de clase", + "typedDictInitsubclassParameter": "TypedDict no admite __init_subclass__ parámetro \"{name}\"", + "typedDictNotAllowed": "\"TypedDict\" no puede utilizarse en este contexto", + "typedDictSecondArgDict": "Parámetro dict o palabra clave esperado como segundo parámetro", + "typedDictSecondArgDictEntry": "Entrada de diccionario simple esperada", + "typedDictSet": "No se pudo asignar el elemento en TypedDict", + "unaccessedClass": "No se accede a la clase \"{name}\"", + "unaccessedFunction": "No se accede a la función \"{name}", + "unaccessedImport": "No se accede a la importación \"{name}", + "unaccessedSymbol": "No se accede a \"{name}\"", + "unaccessedVariable": "No se accede a la variable \"{name} \".", + "unannotatedFunctionSkipped": "Se omite el análisis de la función \"{name}\" porque no está anotada", + "unaryOperationNotAllowed": "Operador unario no permitido en la expresión de tipo", + "unexpectedAsyncToken": "Se esperaba que \"def\", \"with\" o \"for\" siguieran a \"async\".", + "unexpectedEof": "EOF inesperado", + "unexpectedExprToken": "Token inesperado al final de la expresión", + "unexpectedIndent": "sangSangría inesperadaría inesperada", + "unexpectedUnindent": "No se espera sangría", + "unhashableDictKey": "La clave del diccionario debe ser hash", + "unhashableSetEntry": "La entrada del set debe ser hashable", + "uninitializedAbstractVariables": "Las variables definidas en la clase base abstracta no se inicializan en la clase final \"{classType}\"", + "uninitializedInstanceVariable": "La variable de instancia \"{name}\" no está inicializada en el cuerpo de la clase o en el método __init__.", + "unionForwardReferenceNotAllowed": "Union syntax cannot be used with string operand; use quotes around entire expression", + "unionSyntaxIllegal": "La sintaxis alternativa para las uniones requiere Python 3.10 o posterior.", + "unionTypeArgCount": "Union requiere dos o más argumentos de tipo", + "unionUnpackedTuple": "La Union no puede incluir una tuple desempaquetada", + "unionUnpackedTypeVarTuple": "La Union no puede incluir un TypeVarTuple desempaquetado", + "unnecessaryCast": "Llamada \"cast\" innecesaria; el tipo ya es \"{type}\"", + "unnecessaryIsInstanceAlways": "Llamada isinstance innecesaria; \"{testType}\" es siempre una instancia de \"{classType}\"", + "unnecessaryIsInstanceNever": "Llamada isinstance innecesaria; \"{testType}\" es nunca una instancia de \"{classType}\"", + "unnecessaryIsSubclassAlways": "Llamada de issubclass innecesaria; \"{testType}\" siempre es una subclase de \"{classType}\"", + "unnecessaryIsSubclassNever": "Llamada de issubclass innecesaria; \"{testType}\" nunca es una subclase de \"{classType}\"", + "unnecessaryPyrightIgnore": "Comentario \"# pyright: ignore\" innecesario", + "unnecessaryPyrightIgnoreRule": "Regla innecesaria \"# pyright: ignore\": \"{name}\"", + "unnecessaryTypeIgnore": "Comentario \"# type: ignore\" innecesario", + "unpackArgCount": "Se esperaba un único argumento de tipo después de \"Unpack\"", + "unpackExpectedTypeVarTuple": "Se esperaba TypeVarTuple o tuple como argumento de tipo para Unpack", + "unpackExpectedTypedDict": "Se esperaba un argumento de tipo TypedDict para Unpack", + "unpackIllegalInComprehension": "Operación de desempaquetado no permitida en la comprensión", + "unpackInAnnotation": "No se permite el operador desempaquetado en la expresión de tipo", + "unpackInDict": "Operación de desempaquetado no permitida en diccionarios", + "unpackInSet": "No se permite el operador Unpack dentro de un set", + "unpackNotAllowed": "Unpack no está permitido en este contexto", + "unpackOperatorNotAllowed": "La operación de desempaquetado no está permitida en este contexto", + "unpackTuplesIllegal": "Operación de desempaquetado no permitida en tuplas anteriores a Python 3.8", + "unpackedArgInTypeArgument": "No se pueden usar argumentos sin empaquetar en este contexto", + "unpackedArgWithVariadicParam": "No se puede usar un argumento desempaquetado para el parámetro TypeVarTuple", + "unpackedDictArgumentNotMapping": "La expresión del argumento después de ** debe ser un mapeo con un tipo de clave \"str\".", + "unpackedDictSubscriptIllegal": "El operador de desempaquetado del diccionario en el subíndice no está permitido", + "unpackedSubscriptIllegal": "El operador de desempaquetado en el subíndice requiere Python 3.11 o posterior.", + "unpackedTypeVarTupleExpected": "Se espera un TypeVarTuple desempaquetado; use Unpack[{name1}] o *{name2}", + "unpackedTypedDictArgument": "No se puede emparejar el argumento TypedDict desempaquetado con los parámetros", + "unreachableCodeCondition": "El código no se analiza porque la condición se evalúa estáticamente como false", + "unreachableCodeStructure": "El código es estructuralmente inaccesible", + "unreachableCodeType": "El análisis de tipos indica que no se puede acceder al código", + "unreachableExcept": "La cláusula Excepto es inalcanzable porque la excepción ya está administrada", + "unsupportedDunderAllOperation": "No se admite la operación en \"__all__\", por lo que la lista de símbolos exportada puede ser incorrecta.", + "unusedCallResult": "El resultado de la expresión de llamada es de tipo \"{type}\" y no se usa; asignar a la variable \"_\" si esto es intencionado", + "unusedCoroutine": "El resultado de la llamada a una función async no se utiliza; utilice \"await\" o asigne el resultado a una variable.", + "unusedExpression": "El valor de expresión no se usa", + "varAnnotationIllegal": "Las anotaciones de type para variables requieren Python 3.6 o posterior; utilice el comentario de tipo para la compatibilidad con versiones anteriores.", + "variableFinalOverride": "La variable \"{name}\" está marcada como Final y anula la variable no Final del mismo nombre en la clase \"{className}\".", + "variadicTypeArgsTooMany": "La lista de argumentos de tipo puede tener como máximo una TypeVarTuple o tuple desempaquetada", + "variadicTypeParamTooManyAlias": "Los alias de tipo pueden tener como máximo un parámetro de tipo TypeVarTuple, pero reciben varios ({names})", + "variadicTypeParamTooManyClass": "La clase genérica puede tener como máximo un parámetro de tipo TypeVarTuple pero recibió múltiples ({names})", + "walrusIllegal": "El operador \":=\" requiere Python 3.8 o posterior", + "walrusNotAllowed": "El operador \":=\" no está permitido en este contexto sin paréntesis alrededor", + "wildcardInFunction": "No se permite la importación de comodines dentro de una clase o función", + "wildcardLibraryImport": "No se permite la importación de caracteres comodín desde una biblioteca", + "wildcardPatternTypePartiallyUnknown": "El tipo capturado por el patrón comodín es parcialmente desconocido", + "wildcardPatternTypeUnknown": "Se desconoce el tipo capturado por el patrón de caracteres comodín", + "yieldFromIllegal": "El uso de \"yield from\" requiere Python 3.3 o posterior.", + "yieldFromOutsideAsync": "\"yield from\" no permitido en una función async", + "yieldOutsideFunction": "\"yield\" no se permite fuera de una función o lambda", + "yieldWithinComprehension": "\"yield\" no está permitido dentro de una comprensión de lista", + "zeroCaseStatementsFound": "La instrucción Match debe incluir al menos una instrucción case", + "zeroLengthTupleNotAllowed": "La tuple de longitud cero no está permitida en este contexto" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "El formulario especial \"Annotated\" no se puede usar con comprobaciones de instancia y clase", + "argParam": "El argumento corresponde al parámetro \"{paramName}\"", + "argParamFunction": "El argumento corresponde al parámetro \"{paramName}\" en la función \"{functionName}\"", + "argsParamMissing": "El parámetro \"*{paramName}\" no tiene ningún parámetro correspondiente", + "argsPositionOnly": "Error de coincidencia del parámetro de solo posición; se esperaba {expected}, pero se recibieron {received}", + "argumentType": "El tipo de argumento es \"{type}\"", + "argumentTypes": "Tipos de argumento: ({types})", + "assignToNone": "El tipo no se puede asignar a \"None\"", + "asyncHelp": "¿Quería decir \"async with\"?", + "baseClassIncompatible": "La clase base \"{baseClass}\" no es compatible con el tipo \"{type}\"", + "baseClassIncompatibleSubclass": "La clase base \"{baseClass}\" deriva de \"{subclass}\", que no es compatible con el tipo \"{type}\"", + "baseClassOverriddenType": "La clase base \"{baseClass}\" proporciona el tipo \"{type}\", que se sobrescribe", + "baseClassOverridesType": "Invalidaciones de clase base \"{baseClass}\" con el tipo \"{type}\"", + "bytesTypePromotions": "Establezca disableBytesTypePromotions en false para activar el comportamiento de promoción de tipos para \"bytearray\" y \"memoryview\".", + "conditionalRequiresBool": "El método __bool__ para el tipo \"{operandType}\" devuelve el tipo \"{boolReturnType}\" en lugar de \"bool\"", + "dataClassFieldLocation": "en declaración de campo", + "dataClassFrozen": "\"{name}\" está congelado", + "dataProtocolUnsupported": "\"{name}\" es un protocolo de datos", + "descriptorAccessBindingFailed": "No se pudo enlazar el método \"{name}\" para la clase de descriptor \"{className}\"", + "descriptorAccessCallFailed": "No se pudo llamar al método \"{name}\" para la clase de descriptor \"{className}\"", + "finalMethod": "Final method", + "functionParamDefaultMissing": "Falta el argumento predeterminado en el parámetro \"{name}\"", + "functionParamName": "Nombre de parámetro no coincidente: \"{destName}\" frente a \"{srcName}\"", + "functionParamPositionOnly": "Error de coincidencia del parámetro de solo posición; el parámetro \"{name}\" no es de solo posición", + "functionReturnTypeMismatch": "El tipo de valor devuelto de la función \"{sourceType}\" no es compatible con el tipo \"{destType}\"", + "functionTooFewParams": "La función acepta muy pocos parámetros posicionales; esperado {expected} pero recibido {received}", + "functionTooManyParams": "La función acepta demasiados parámetros posicionales; esperado {expected} pero recibido {received}", + "genericClassNotAllowed": "Tipo genérico con argumentos de tipo no permitidos para comprobaciones de instancia o clase", + "incompatibleDeleter": "El método de deleter de property no es compatible", + "incompatibleGetter": "El método getter de property no es compatible", + "incompatibleSetter": "El método setter de la property no es compatible", + "initMethodLocation": "El método __init__ se define en la clase \"{type}\"", + "initMethodSignature": "La firma de __init__ es \"{type}\"", + "initSubclassLocation": "El método __init_subclass__ se define en la clase \"{name}\"", + "invariantSuggestionDict": "Considere cambiar de \"dict\" a \" Mapping\" que es covariante en el tipo de valor", + "invariantSuggestionList": "Considere la posibilidad de cambiar de \"lista\" a \"Sequence\" que es covariante", + "invariantSuggestionSet": "Considere la posibilidad de cambiar de \"set\" a \"Container\" que es covariante", + "isinstanceClassNotSupported": "\"{type}\" no se admite para las comprobaciones de instancia y clase", + "keyNotRequired": "\"{name}\" no es una clave necesaria en \"{type}\", por lo que el acceso puede dar lugar a una excepción en tiempo de ejecución", + "keyReadOnly": "\"{name}\" es una clave de solo lectura en \"{type}\"", + "keyRequiredDeleted": "\"{name}\" es una clave necesaria y no se puede eliminar", + "keyUndefined": "\"{name}\" no es una clave definida en \"{type}\"", + "kwargsParamMissing": "El parámetro \"**{paramName}\" no tiene ningún parámetro correspondiente.", + "listAssignmentMismatch": "El tipo \"{type}\" es incompatible con la lista de objetivos", + "literalAssignmentMismatch": "\"{sourceType}\" no se puede asignar al tipo \"{destType}\"", + "literalNotAllowed": "El formulario especial \"Literal\" no se puede usar con comprobaciones de instancia y clase", + "matchIsNotExhaustiveHint": "Si no se pretende un tratamiento exhaustivo, agregue \"case _: pass\"", + "matchIsNotExhaustiveType": "Tipo no manejado: \"{type}\"", + "memberAssignment": "La expresión de tipo \"{type}\" no se puede asignar al atributo \"{name}\" de la clase \"{classType}\"", + "memberIsAbstract": "\"{type}. {name}\" no está implementado", + "memberIsAbstractMore": "y {count} más...", + "memberIsClassVarInProtocol": "\"{name}\" se define como ClassVar en el protocolo", + "memberIsInitVar": "\"{name}\" es un campo init-only", + "memberIsInvariant": "\"{name}\" es invariable porque es mutable", + "memberIsNotClassVarInClass": "\"{name}\" debe definirse como ClassVar para que sea compatible con el protocolo", + "memberIsNotClassVarInProtocol": "\"{name}\" no está definido como ClassVar en el protocolo", + "memberIsNotReadOnlyInProtocol": "\"{name}\" no es de solo lectura en el protocolo", + "memberIsReadOnlyInProtocol": "\"{name}\" es de solo lectura en el protocolo", + "memberIsWritableInProtocol": "\"{name}\" se puede escribir en el protocolo", + "memberSetClassVar": "El atributo \"{name}\" no se puede asignar a través de una instancia de clase porque es un ClassVar.", + "memberTypeMismatch": "\"{name}\" es un tipo incompatible", + "memberUnknown": "Atributo \"{name}\" desconocido", + "metaclassConflict": "La metaclase \"{metaclass1}\" entra en conflicto con \"{metaclass2}\"", + "missingDeleter": "Falta el método de deleter de property", + "missingGetter": "Falta el método getter de la property", + "missingSetter": "Falta el método setter de property", + "namedParamMissingInDest": "Parámetro adicional \"{name}\"", + "namedParamMissingInSource": "Falta el parámetro de palabra clave \"{name}\"", + "namedParamTypeMismatch": "El parámetro de palabra clave \"{name}\" de tipo \"{sourceType}\" no es compatible con el tipo \"{destType}\"", + "namedTupleNotAllowed": "No se puede usar NamedTuple para comprobaciones de instancia o clase", + "newMethodLocation": "El método __new__ está definido en la clase \"{type}\"", + "newMethodSignature": "La firma de __new__ es \"{type}\"", + "newTypeClassNotAllowed": "El tipo creado con NewType no se puede usar con comprobaciones de instancia y clase", + "noOverloadAssignable": "Ninguna función sobrecargada coincide con el tipo \"{type}\"", + "noneNotAllowed": "No se puede usar None para comprobaciones de instancia o clase", + "orPatternMissingName": "Nombres que faltan: {name}", + "overloadIndex": "La sobrecarga {index} es la coincidencia más cercana", + "overloadNotAssignable": "Una o más sobrecargas de \"{name}\" no es asignable", + "overloadSignature": "Aquí se define la firma de la sobrecarga", + "overriddenMethod": "Método reemplazado", + "overriddenSymbol": "Símbolo anulado", + "overrideInvariantMismatch": "El tipo de invalidación “{overrideType}” no es el mismo que el tipo básico “{baseType}”", + "overrideIsInvariant": "La variable es mutable, por lo que su tipo es invariable", + "overrideNoOverloadMatches": "Ninguna firma de sobrecarga en anulación es compatible con el método base", + "overrideNotClassMethod": "El método base se declara como classmethod pero el Reemplazar no", + "overrideNotInstanceMethod": "El método base se declara como método de instancia, pero la invalidación no", + "overrideNotStaticMethod": "El método base se declara como staticmethod pero el reemplazo no", + "overrideOverloadNoMatch": "La invalidación no controla todas las sobrecargas del método base", + "overrideOverloadOrder": "Las sobrecargas para el método de anulación deben estar en el mismo orden que el método base", + "overrideParamKeywordNoDefault": "El parámetro de palabra clave \"{name}\" no coincide: el parámetro base tiene el valor de argumento predeterminado, el parámetro de invalidación no", + "overrideParamKeywordType": "El parámetro de palabra clave \"{name}\" no coincide: el parámetro base es de tipo \"{baseType}\", el parámetro de invalidación es de tipo \"{overrideType}\"", + "overrideParamName": "El nombre del parámetro {index} no coincide: el parámetro base se denomina \"{baseName}\", el parámetro de invalidación se denomina \"{overrideName}\"", + "overrideParamNameExtra": "Falta el parámetro \"{name}\" en la base", + "overrideParamNameMissing": "Falta el parámetro \"{name}\" en la invalidación", + "overrideParamNamePositionOnly": "El parámetro {index} no coincide: el parámetro base \"{baseName}\" es un parámetro de palabra clave, el parámetro de invalidación es de solo posición", + "overrideParamNoDefault": "El parámetro {index} no coincide: el parámetro base tiene un valor de argumento predeterminado, el parámetro de invalidación no", + "overrideParamType": "El parámetro {index} no coincide: el parámetro base es de tipo \"{baseType}\", el parámetro de invalidación es de tipo \"{overrideType}\"", + "overridePositionalParamCount": "El recuento de parámetros posicionales no coincide; el método base tiene {baseCount}, pero la invalidación tiene {overrideCount}", + "overrideReturnType": "Error de tipo de retorno: el método base devuelve el tipo \"{baseType}\", el reemplazo devuelve el tipo \"{overrideType}\".", + "overrideType": "La clase base define el tipo como \"{type}\"", + "paramAssignment": "El parámetro {index}: el tipo \"{sourceType}\" no es compatible con el tipo \"{destType}\"", + "paramSpecMissingInOverride": "Faltan parámetros ParamSpec en el método de invalidación", + "paramType": "El tipo de parámetro es \"{paramType}\"", + "privateImportFromPyTypedSource": "Importar desde \"{module}\" en su lugar", + "propertyAccessFromProtocolClass": "No se puede tener acceso a una propiedad definida dentro de una clase de protocolo como variable de clase", + "propertyMethodIncompatible": "El método de property \"{name}\" no es compatible", + "propertyMethodMissing": "Falta el método de property \"{name}\" en la invalidación", + "propertyMissingDeleter": "Property \"{name}\" no tiene un supresor deleter", + "propertyMissingSetter": "Property \"{name}\" no tiene el valor setter definido", + "protocolIncompatible": "\"{sourceType}\" no es compatible con el protocolo \"{destType}\"", + "protocolMemberMissing": "\"{name}\" no está presente.", + "protocolRequiresRuntimeCheckable": "La clase de Protocol debe ser @runtime_checkable para usarse con comprobaciones de instancia y clase", + "protocolSourceIsNotConcrete": "\"{sourceType}\" no es un tipo de clase concreto y no se puede asignar al tipo \"{destType}\"", + "protocolUnsafeOverlap": "Los atributos de \"{name}\" tienen los mismos nombres que el protocolo", + "pyrightCommentIgnoreTip": "Utilice \"# pyright: ignore[]\" para suprimir el diagnóstico de una sola línea", + "readOnlyAttribute": "El atributo \"{name}\" es de solo lectura", + "seeClassDeclaration": "Ver declaración de clase", + "seeDeclaration": "Ver declaración", + "seeFunctionDeclaration": "Ver declaración de función", + "seeMethodDeclaration": "Consulte la declaración del métodoa", + "seeParameterDeclaration": "Declaración de parámetro", + "seeTypeAliasDeclaration": "Véase la declaración de alias de tipo", + "seeVariableDeclaration": "declaración de variable out", + "tupleAssignmentMismatch": "El tipo \"{type}\" no es compatible con la tuple de destino", + "tupleEntryTypeMismatch": "La entrada {entry} de la tuple es de tipo incorrecto", + "tupleSizeIndeterminateSrc": "El tamaño de la tuple no coincide; se esperaba {expected} pero se recibió uno indeterminado", + "tupleSizeIndeterminateSrcDest": "El tamaño de la tuple no coincide; se esperaba {expected} o más, pero se recibió uno indeterminado", + "tupleSizeMismatch": "El tamaño de la tuple no coincide; se esperaba {expected} pero se recibió {received}", + "tupleSizeMismatchIndeterminateDest": "El tamaño de la tuple no coincide; se esperaba {expected} o más, pero se recibió {received}", + "typeAliasInstanceCheck": "El alias de tipo creado con la instrucción \"type\" no se puede usar con comprobaciones de instancia y clase", + "typeAssignmentMismatch": "El tipo \"{sourceType}\" no se puede asignar al tipo \"{destType}\"", + "typeBound": "El tipo \"{sourceType}\" no se puede asignar al límite superior \"{destType}\" para la variable de tipo \"{name}\"", + "typeConstrainedTypeVar": "El tipo \"{type}\" no se puede asignar a la variable de tipo restringido \"{name}\"", + "typeIncompatible": "\"{sourceType}\" no se puede asignar a \"{destType}\"", + "typeNotClass": "\"{type}\" no es una clase", + "typeNotStringLiteral": "\"{type}\" no es un literal de cadena", + "typeOfSymbol": "El tipo de \"{name}\" es \"{type}\"", + "typeParamSpec": "El tipo \"{type}\" no es compatible con ParamSpec \"{name}\"", + "typeUnsupported": "El tipo \"{type}\" no es compatible", + "typeVarDefaultOutOfScope": "La variable de tipo \"{name} \" no está en el ámbito de aplicación", + "typeVarIsContravariant": "El parámetro de tipo \"{name}\" es contravariante, pero \"{sourceType}\" no es un supertipo de \"{destType}\"", + "typeVarIsCovariant": "El parámetro de tipo \"{name}\" es covariante, pero \"{sourceType}\" no es un subtipo de \"{destType}\"", + "typeVarIsInvariant": "El parámetro de tipo \"{name}\" es invariable, pero \"{sourceType}\" no es el mismo que \"{destType}\"", + "typeVarNotAllowed": "TypeVar no se permite para comprobaciones de instancia o clase", + "typeVarTupleRequiresKnownLength": "TypeVarTuple no se puede enlazar a una tuple de longitud desconocida", + "typeVarUnnecessarySuggestion": "Usar {type} en su lugar", + "typeVarUnsolvableRemedy": "Proporciona una sobrecarga que especifica el tipo de retorno cuando no se proporciona el argumento", + "typeVarsMissing": "Faltan variables de tipo: {names}", + "typedDictBaseClass": "La clase “{type}” no es un TypedDict", + "typedDictClassNotAllowed": "No se permite la clase TypedDict para comprobaciones de instancia o clase", + "typedDictClosedExtraNotAllowed": "No se puede agregar el elemento \"{name}\"", + "typedDictClosedExtraTypeMismatch": "No se puede agregar el elemento \"{name}\" con el tipo \"{type}\"", + "typedDictClosedFieldNotReadOnly": "No se puede agregar el elemento \"{name}\" porque debe ser ReadOnly", + "typedDictClosedFieldNotRequired": "No se puede agregar el elemento \"{name}\" porque debe ser NotRequired.", + "typedDictExtraFieldNotAllowed": "\"{name}\" no está presente en \"{type}\"", + "typedDictExtraFieldTypeMismatch": "El tipo de \"{name}\" no es compatible con el tipo de \"extra_items\" en \"{type}\"", + "typedDictFieldMissing": "\"{name}\" falta en \"{type}\"", + "typedDictFieldNotReadOnly": "\"{name}\" no es de solo lectura en \"{type}\"", + "typedDictFieldNotRequired": "\"{name}\" no es obligatorio en \"{type}\"", + "typedDictFieldRequired": "\"{name}\" es obligatorio en \"{type}\"", + "typedDictFieldTypeMismatch": "El tipo \"{type}\" no se puede asignar al elemento \"{name}\"", + "typedDictFieldUndefined": "\"{name}\" es un elemento no definido en el tipo \"{type}\"", + "typedDictKeyAccess": "Utilizar [\"{name}\"] para hacer referencia al elemento en TypedDict", + "typedDictNotAllowed": "No se puede usar TypedDict para comprobaciones de instancia o clase", + "unhashableType": "El tipo \"{type}\" no admite hash", + "uninitializedAbstractVariable": "La variable de instancia \"{name}\" está definida en la clase base abstracta \"{classType} \" pero no inicializada.", + "unreachableExcept": "\"{exceptionType}\" es una subclase de \"{parentType}\"", + "useDictInstead": "Utilice dict[T1, T2] para indicar un tipo de diccionario", + "useListInstead": "Utilice list[T] para indicar un tipo de list o T1 | T2 para indicar un tipo de union", + "useTupleInstead": "Utilice tuple[T1, ..., Tn] para indicar un tipo de tuple o T1 | T2 para indicar un tipo de union", + "useTypeInstead": "Utilice type[T] en su lugar", + "varianceMismatchForClass": "La varianza del argumento de tipo \"{typeVarName}\" no es compatible con la clase base \"{className}\"", + "varianceMismatchForTypeAlias": "La varianza del argumento de tipo \"{typeVarName}\" no es compatible con \"{typeAliasParam}\"" + }, + "Service": { + "longOperation": "La enumeración de los archivos de origen del área de trabajo está tardando mucho tiempo. Considere la posibilidad de abrir una subcarpeta en su lugar. [Más información](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.fr.json b/python-parser/packages/pyright-internal/src/localization/package.nls.fr.json new file mode 100644 index 00000000..7ab15681 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.fr.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Créer un Stub de type", + "createTypeStubFor": "Créer un Stub de type pour « {moduleName} »", + "executingCommand": "Exécution de la commande", + "filesToAnalyzeCount": "{count} fichiers à analyser", + "filesToAnalyzeOne": "1 fichier à analyser", + "findingReferences": "Recherche de références", + "organizeImports": "Organiser les importations" + }, + "Completion": { + "autoImportDetail": "Importation automatique", + "indexValueDetail": "Valeur de l'indice" + }, + "Diagnostic": { + "abstractMethodInvocation": "Désolé, nous n’avons pas pu appeler la méthode « {method} », car elle est abstraite et non implémentée", + "annotatedMetadataInconsistent": "Le type de métadonnées annoté « {metadataType} » n’est pas compatible avec le type « {type} »", + "annotatedParamCountMismatch": "Non-concordance du nombre d'annotations de paramètre : attendu {expected} mais reçu {received}", + "annotatedTypeArgMissing": "Un argument de type et une ou plusieurs annotations sont attendus pour « Annotated »", + "annotationBytesString": "Les expressions de type ne peuvent pas utiliser de littéraux de chaîne d'octets", + "annotationFormatString": "Les expressions de type ne peuvent pas utiliser de littéraux de chaîne de format (chaînes f)", + "annotationNotSupported": "Annotation de type non prise en charge pour cette instruction", + "annotationRawString": "Les expressions de type ne peuvent pas utiliser de littéraux de chaîne bruts", + "annotationSpansStrings": "Les expressions de type ne peuvent pas s'étendre sur plusieurs littéraux de chaîne", + "annotationStringEscape": "Les expressions de type ne peuvent pas contenir de caractères d'échappement", + "annotationTemplateString": "Les expressions de type ne peuvent pas utiliser de littéraux de chaîne de modèle (t-strings)", + "argAssignment": "Impossible d’affecter l’argument de type « {argType} » au paramètre de type « {paramType} »", + "argAssignmentFunction": "Impossible d’affecter l’argument de type « {argType} » au paramètre de type « {paramType} » dans la fonction « {functionName} »", + "argAssignmentParam": "Impossible d’affecter l’argument de type « {argType} » au paramètre « {paramName} » de type « {paramType} »", + "argAssignmentParamFunction": "Impossible d’affecter l’argument de type « {argType} » au paramètre « {paramName} » de type « {paramType} » dans la fonction « {functionName} »", + "argMissingForParam": "Argument manquant pour le paramètre {name}", + "argMissingForParams": "Arguments manquants pour les paramètres {names}", + "argMorePositionalExpectedCount": "{expected} arguments positionnels supplémentaires attendus", + "argMorePositionalExpectedOne": "1 argument positionnel supplémentaire attendu", + "argPositional": "Argument positionnel attendu", + "argPositionalExpectedCount": "Arguments positionnels {expected} attendus", + "argPositionalExpectedOne": "1 argument positionnel attendu", + "argTypePartiallyUnknown": "Le type d'argument est partiellement inconnu", + "argTypeUnknown": "Le type d’argument est inconnu", + "assertAlwaysTrue": "L’expression Assert prend toujours la valeur true", + "assertTypeArgs": "« assert_type » attend deux arguments positionnels", + "assertTypeTypeMismatch": "Non-concordance de « assert_type » : « {expected} » attendu, mais « {received} »", + "assignmentExprComprehension": "La cible d'expression d'affectation \"{name}\" ne peut pas utiliser le même nom que la compréhension pour la cible", + "assignmentExprContext": "L’expression d’assignation doit se trouver dans le module, la fonction ou l’expression lambda", + "assignmentExprInSubscript": "Les expressions d’assignation dans un indice sont prises en charge uniquement dans Python 3.10 et versions ultérieures", + "assignmentInProtocol": "Les variables d'instance ou de classe au sein d'une classe Protocol doivent être explicitement déclarées dans le corps de la classe", + "assignmentTargetExpr": "L’expression ne peut pas être une cible d’assignation", + "asyncNotInAsyncFunction": "L'utilisation de \"async\" n'est pas autorisée en dehors de la fonction async", + "awaitIllegal": "L’utilisation de « await » nécessite Python 3.5 ou version ultérieure", + "awaitNotAllowed": "Les expressions de type ne peuvent pas utiliser « await »", + "awaitNotInAsync": "« await » autorisé uniquement dans la fonction asynchrone", + "backticksIllegal": "Les expressions entourées de accents inverses ne sont pas prises en charge dans Python 3.x; utiliser repr à la place", + "baseClassCircular": "La classe ne peut pas dériver d'elle-même", + "baseClassFinal": "La classe de base « {type} » est marquée comme finale et ne peut pas être sous-classée", + "baseClassIncompatible": "Les classes de base de {type} sont mutuellement incompatibles", + "baseClassInvalid": "L’argument pour la classe doit être une classe de base", + "baseClassMethodTypeIncompatible": "Les classes de base de la classe « {classType} » définissent la méthode « {name} » de manière incompatible", + "baseClassUnknown": "Le type de classe de base est inconnu, ce qui masque le type de classe dérivée", + "baseClassVariableTypeIncompatible": "Les classes de base de la classe « {classType} » définissent la variable « {name} » de manière incompatible", + "binaryOperationNotAllowed": "Opérateur binaire non autorisé dans l'expression de type", + "bindParamMissing": "Impossible de lier la méthode « {methodName} », car il manque un paramètre « self » ou « cls »", + "bindTypeMismatch": "Impossible de lier la méthode \"{methodName}\" car \"{type}\" n'est pas attribuable au paramètre \"{paramName}\"", + "breakInExceptionGroup": "« break » n’est pas autorisé dans un bloc « except* »", + "breakOutsideLoop": "\"break\" ne peut être utilisé qu'à l'intérieur d'une boucle", + "bytesUnsupportedEscape": "Séquence d’échappement non prise en charge dans le littéral de bytes", + "callableExtraArgs": "Seuls deux arguments de type sont attendus pour « Callable »", + "callableFirstArg": "Liste de types de paramètres attendue ou « ... »", + "callableNotInstantiable": "Impossible d’instancier le type « {type} »", + "callableSecondArg": "Type de retour attendu en tant que deuxième argument de type pour « Callable »", + "casePatternIsIrrefutable": "Le motif irréfutable n'est autorisé que pour la dernière instruction case", + "classAlreadySpecialized": "Le type « {type} » est déjà spécialisé", + "classDecoratorTypeUnknown": "Le décorateur de classe non typé masque le type de classe ; décorateur ignorant", + "classDefinitionCycle": "La définition de classe pour \"{name}\" dépend d'elle-même", + "classGetItemClsParam": "__class_getitem__ remplacement doit prendre un paramètre « cls »", + "classMethodClsParam": "Les méthodes de classe doivent prendre un paramètre \"cls\"", + "classNotRuntimeSubscriptable": "L'indice pour la classe « {name} » générera une exception d'exécution ; placez l'expression de type entre guillemets", + "classPatternBuiltInArgPositional": "Le modèle de classe accepte uniquement le sous-modèle positionnel", + "classPatternNewType": "« {type} » ne peut pas être utilisé dans un modèle de classe, car il est défini à l’aide de NewType", + "classPatternPositionalArgCount": "Trop de modèles positionnels pour les \"{type}\" de classe ; {expected} attendue mais {received} reçues", + "classPatternTypeAlias": "\"{type}\" ne peut pas être utilisé dans un modèle de classe car il s'agit d'un alias de type spécialisé", + "classPropertyDeprecated": "Les propriétés de classe sont obsolètes dans Python 3.11 et ne seront pas prises en charge dans Python 3.13", + "classTypeParametersIllegal": "La syntaxe du paramètre de type de classe nécessite Python 3.12 ou version ultérieure", + "classVarFirstArgMissing": "Attendu un argument de type après \"ClassVar\"", + "classVarNotAllowed": "« ClassVar » n’est pas autorisé dans ce contexte", + "classVarOverridesInstanceVar": "La variable de classe « {name} » remplace la variable d’instance du même nom dans la classe « {className} »", + "classVarTooManyArgs": "Attendu un seul argument de type après \"ClassVar\"", + "classVarWithTypeVar": "Le type \"ClassVar\" ne peut pas inclure de variables de type", + "clsSelfParamTypeMismatch": "Le type de paramètre \"{name}\" doit être un supertype de sa classe \"{classType}\"", + "codeTooComplexToAnalyze": "Le code est trop complexe à analyser ; réduire la complexité en refactorisant en sous-routines ou en réduisant les chemins de code conditionnels", + "collectionAliasInstantiation": "Le type \"{type}\" ne peut pas être instancié, utilisez \"{alias}\" à la place", + "comparisonAlwaysFalse": "La condition prend toujours la valeur False, car les types « {leftType} » et « {rightType} » ne se chevauchent pas", + "comparisonAlwaysTrue": "La condition prend toujours la valeur True, car les types « {leftType} » et « {rightType} » ne se chevauchent pas", + "comprehensionInDict": "La compréhension ne peut pas être utilisée avec d’autres entrées de dictionnaire", + "comprehensionInSet": "La compréhension ne peut pas être utilisée avec d’autres entrées set", + "concatenateContext": "« Concatenate » n’est pas autorisé dans ce contexte", + "concatenateParamSpecMissing": "Le dernier argument de type pour « Concatenate » doit être un ParamSpec ou bien « ... »", + "concatenateTypeArgsMissing": "« Concatenate » nécessite au moins deux arguments de type", + "conditionalOperandInvalid": "Opérande conditionnel non valide de type \"{type}\"", + "constantRedefinition": "\"{name}\" est constant (car il est en majuscule) et ne peut pas être redéfini", + "constructorParametersMismatch": "Non-concordance entre la signature de __new__ et __init__ dans la classe \"{classType}\"", + "containmentAlwaysFalse": "L'expression sera toujours évaluée à False car les types \"{leftType}\" et \"{rightType}\" ne se chevauchent pas", + "containmentAlwaysTrue": "L'expression sera toujours évaluée à True puisque les types \"{leftType}\" et \"{rightType}\" ne se chevauchent pas", + "continueInExceptionGroup": "« continue » n’est pas autorisé dans un bloc « except* »", + "continueOutsideLoop": "« continuer » ne peut être utilisé qu’au sein d’une boucle", + "coroutineInConditionalExpression": "L'expression conditionnelle fait référence à une coroutine qui est toujours évaluée à True", + "dataClassBaseClassFrozen": "Une classe non gelée ne peut pas hériter d'une classe gelée", + "dataClassBaseClassNotFrozen": "Une classe figée ne peut pas hériter d’une classe qui n’est pas figée", + "dataClassConverterFunction": "L’argument de type « {argType} » n’est pas un convertisseur valide pour le champ « {fieldName} » de type « {fieldType} »", + "dataClassConverterOverloads": "Aucune surcharge de « {funcName} » n’est valide pour le champ « {fieldName} » de type « {fieldType} »", + "dataClassFieldInheritedDefault": "« {fieldName} » remplace un champ du même nom mais n’a pas de valeur par défaut", + "dataClassFieldWithDefault": "Les champs sans valeurs par défaut ne peuvent pas apparaître après les champs avec des valeurs par défaut", + "dataClassFieldWithPrivateName": "Le champ Dataclass ne peut pas utiliser de nom privé", + "dataClassFieldWithoutAnnotation": "Le champ Dataclass sans annotation de type provoquera une exception d'exécution", + "dataClassPostInitParamCount": "La classe de données __post_init__ nombre de paramètres incorrect ; le nombre de champs InitVar est {expected}", + "dataClassPostInitType": "Incompatibilité du type de paramètre de méthode __post_init__ Dataclass pour le champ « {fieldName} »", + "dataClassSlotsOverwrite": "__slots__ est déjà défini dans la classe", + "dataClassTransformExpectedBoolLiteral": "Expression attendue qui prend statiquement la valeur True ou False", + "dataClassTransformFieldSpecifier": "Expected tuple of classes or functions but received type \"{type}\"", + "dataClassTransformPositionalParam": "Tous les arguments de « dataclass_transform » doivent être des arguments de mot clé", + "dataClassTransformUnknownArgument": "L’argument « {name} » n’est pas pris en charge par dataclass_transform", + "dataProtocolInSubclassCheck": "Les protocoles de données (qui incluent des attributs non méthode) ne sont pas autorisés dans les appels de issubclass", + "declaredReturnTypePartiallyUnknown": "Le type de retour déclaré « {returnType} » est partiellement inconnu", + "declaredReturnTypeUnknown": "Le type de retour déclaré est inconnu", + "defaultValueContainsCall": "Les appels de fonction et les objets mutables ne sont pas autorisés dans l'expression de la valeur par défaut du paramètre", + "defaultValueNotAllowed": "Le paramètre avec « * » ou « ** » ne peut pas avoir de valeur par défaut", + "delTargetExpr": "L'expression ne peut pas être supprimée", + "deprecatedClass": "La classe \"{name}\" est obsolète", + "deprecatedConstructor": "Le constructeur de la classe « {name} » est déconseillé", + "deprecatedDescriptorDeleter": "La méthode « __delete__ » du descripteur « {name} » est déconseillée", + "deprecatedDescriptorGetter": "La méthode « __get__ » du descripteur « {name} » est déconseillée", + "deprecatedDescriptorSetter": "La méthode « __set__ » du descripteur « {name} » est déconseillée", + "deprecatedFunction": "La fonction \"{name}\" est obsolète", + "deprecatedMethod": "La méthode \"{name}\" dans la classe \"{className}\" est obsolète", + "deprecatedPropertyDeleter": "Le deleter de la property « {name} » est déconseillé", + "deprecatedPropertyGetter": "Le getter de la property « {name} » est déconseillé", + "deprecatedPropertySetter": "Le setter de la property « {name} » est déconseillé", + "deprecatedType": "Ce type est déconseillé à compter de Python {version}; utiliser « {replacement} » à la place", + "dictExpandIllegalInComprehension": "Expansion du dictionnaire non autorisée dans la compréhension", + "dictInAnnotation": "Expression de dictionnaire non autorisée dans l'expression de type", + "dictKeyValuePairs": "Les entrées de dictionnaire doivent contenir des paires clé/valeur", + "dictUnpackIsNotMapping": "Mappage attendu pour l’opérateur de décompression de dictionnaire", + "dunderAllSymbolNotPresent": "« {name} » est spécifié dans __all__ mais n’est pas présent dans le module", + "duplicateArgsParam": "Un seul paramètre « * » est autorisé", + "duplicateBaseClass": "Classe de base en double non autorisée", + "duplicateCapturePatternTarget": "La cible Capture \"{name}\" ne peut pas apparaître plus d'une fois dans le même modèle", + "duplicateCatchAll": "Une seule clause catch-all except autorisée", + "duplicateEnumMember": "Le membre Enum « {name} » est déjà déclaré", + "duplicateGenericAndProtocolBase": "Une seule classe de base Generic[...] ou Protocol[...] autorisée", + "duplicateImport": "« {importName} » est importé plusieurs fois", + "duplicateKeywordOnly": "Un seul séparateur « * » autorisé", + "duplicateKwargsParam": "Un seul paramètre « ** » est autorisé", + "duplicateParam": "Dupliquer le paramètre « {name} »", + "duplicatePositionOnly": "Un seul paramètre \"/\" autorisé", + "duplicateStarPattern": "Un seul motif \"*\" autorisé dans une séquence de motifs", + "duplicateStarStarPattern": "Une seule entrée \"**\" autorisée", + "duplicateUnpack": "Une seule opération de décompression autorisée dans la liste", + "ellipsisAfterUnpacked": "\"...\" ne peut pas être utilisé avec un TypeVarTuple ou un tuple décompressé", + "ellipsisContext": "« ... » n’est pas autorisé dans ce contexte", + "ellipsisSecondArg": "« ... » n’est autorisé qu’en tant que second des deux arguments", + "enumClassOverride": "La classe Enum « {name} » est finale et ne peut pas être sous-classée", + "enumMemberDelete": "Le membre Enum « {name} » ne peut pas être supprimé", + "enumMemberSet": "Le membre Enum « {name} » ne peut pas être affecté", + "enumMemberTypeAnnotation": "Les annotations de type ne sont pas autorisées pour les membres enum", + "exceptGroupMismatch": "L’instruction Try ne peut pas inclure à la fois « except » et « except* »", + "exceptGroupRequiresType": "La syntaxe du groupe d’exceptions (« except* ») nécessite un type d’exception", + "exceptRequiresParens": "Plusieurs types d’exception doivent être entre parenthèses avant Python 3.14", + "exceptWithAsRequiresParens": "Plusieurs types d’exception doivent être entre parenthèses lors de l’utilisation de « as »", + "exceptionGroupIncompatible": "La syntaxe du groupe d’exceptions (« except* ») nécessite Python 3.11 ou version ultérieure", + "exceptionGroupTypeIncorrect": "Le type d’exception dans except* ne peut pas dériver de BaseGroupException", + "exceptionTypeIncorrect": "\"{type}\" ne dérive pas de BaseException", + "exceptionTypeNotClass": "« {type} » n’est pas une classe d’exception valide", + "exceptionTypeNotInstantiable": "Le constructeur pour le type d’exception « {type} » requiert un ou plusieurs arguments", + "expectedAfterDecorator": "Fonction attendue ou déclaration de classe après le décorateur", + "expectedArrow": "« -> » attendu suivi d’une annotation de type de retour", + "expectedAsAfterException": "« as » attendu après le type d’exception", + "expectedAssignRightHandExpr": "Expression attendue à droite de « = »", + "expectedBinaryRightHandExpr": "Expression attendue à droite de l’opérateur", + "expectedBoolLiteral": "Attendu True ou False", + "expectedCase": "Instruction « case » attendue", + "expectedClassName": "Nom de classe attendu", + "expectedCloseBrace": "« { » n’a pas été fermé", + "expectedCloseBracket": "« [ » n’a pas été fermé", + "expectedCloseParen": "« ( » n’a pas été fermé", + "expectedColon": "« : » attendu", + "expectedComplexNumberLiteral": "Littéral de nombre complexe attendu pour la correspondance de modèle", + "expectedDecoratorExpr": "Formulaire d’expression non pris en charge pour l’élément décoratif antérieur à Python 3.9", + "expectedDecoratorName": "Nom de l’élément décoratif attendu", + "expectedDecoratorNewline": "Nouvelle ligne attendue à la fin de l’élément décoratif", + "expectedDelExpr": "Expression attendue après « del »", + "expectedElse": "« else » attendu", + "expectedEquals": "« = » attendu", + "expectedExceptionClass": "Classe ou objet d'exception non valide", + "expectedExceptionObj": "Objet d’exception attendu, classe d’exception ou None", + "expectedExpr": "Expression attendue", + "expectedFunctionAfterAsync": "Définition de fonction attendue après \"async\"", + "expectedFunctionName": "Nom de fonction attendu après « def »", + "expectedIdentifier": "Identifiant attendu", + "expectedImport": "« importation » attendue", + "expectedImportAlias": "Symbole attendu après « as »", + "expectedImportSymbols": "Un ou plusieurs noms de symboles attendus après « l’importation »", + "expectedIn": "« in » attendu", + "expectedInExpr": "Expression attendue après « in »", + "expectedIndentedBlock": "Bloc en retrait attendu", + "expectedMemberName": "Nom d’attribut attendu après « . »", + "expectedModuleName": "Nom de module attendu", + "expectedNameAfterAs": "Nom de symbole attendu après \"as\"", + "expectedNamedParameter": "Le paramètre de mot clé doit suivre « * »", + "expectedNewline": "Nouvelle ligne attendue", + "expectedNewlineOrSemicolon": "Les instructions doivent être séparées par des nouvelles lignes ou des points-virgules", + "expectedOpenParen": "Attendu \"(\"", + "expectedParamName": "Nom du paramètre attendu", + "expectedPatternExpr": "Expression de modèle attendue", + "expectedPatternSubjectExpr": "Expression du sujet du modèle attendu", + "expectedPatternValue": "Expression de valeur de modèle attendue sous la forme « a.b »", + "expectedReturnExpr": "Expression attendue après « return »", + "expectedSliceIndex": "Expression d'index ou de tranche attendue", + "expectedTypeNotString": "Type attendu mais réception d’un littéral de chaîne", + "expectedTypeParameterName": "Nom de paramètre de type attendu", + "expectedYieldExpr": "Expression attendue dans l'instruction yield", + "finalClassIsAbstract": "La classe « {type} » est marquée comme finale et doit implémenter toutes les symboles abstraits", + "finalContext": "« Final » n’est pas autorisé dans ce contexte", + "finalInLoop": "Impossible d’assigner une variable « Final » dans une boucle", + "finalMethodOverride": "La méthode « {name} » ne peut pas remplacer la méthode finale définie dans la classe « {className} »", + "finalNonMethod": "La fonction « {name} » ne peut pas être marquée @final, car il ne s’agit pas d’une méthode", + "finalReassigned": "« {name} » est déclaré Final et ne peut pas être réaffecté", + "finalRedeclaration": "« {name} » a été déclaré comme Final", + "finalRedeclarationBySubclass": "« {name} » ne peut pas être redéclaré, car la classe parente « {className} » la déclare Final", + "finalTooManyArgs": "Argument de type unique attendu après « Final »", + "finalUnassigned": "« {name} » est déclaré Final, mais la valeur n’est pas affectée", + "finallyBreak": "Vous ne pouvez pas utiliser « break » pour quitter un bloc « finally »", + "finallyContinue": "Vous ne pouvez pas utiliser « continue » pour quitter un bloc « finally »", + "finallyReturn": "Vous ne pouvez pas utiliser « return » pour quitter un bloc « finally »", + "formatStringBrace": "Accolade fermante unique non autorisée dans le littéral f-string ; utiliser une double accolade fermée", + "formatStringBytes": "Les littéraux de chaîne de format (chaînes f) ne peuvent pas être binaires", + "formatStringDebuggingIllegal": "Le spécificateur de débogage de chaîne F \"=\" nécessite Python 3.8 ou une version plus récente", + "formatStringEscape": "Séquence d'échappement (barre oblique inverse) non autorisée dans la partie expression de la chaîne f avant Python 3.12", + "formatStringExpectedConversion": "Spécificateur de conversion attendu après « ! » dans f-string", + "formatStringIllegal": "Les littéraux de chaîne de format (chaînes f) nécessitent Python 3.6 ou une version plus récente", + "formatStringInPattern": "Chaîne de format non autorisée dans le modèle", + "formatStringNestedFormatSpecifier": "Expressions imbriqués trop profondément dans le spécificateur de chaîne de format", + "formatStringNestedQuote": "Les chaînes imbriquées dans une chaîne f ne peuvent pas utiliser le même guillemet que la chaîne f avant Python 3.12", + "formatStringTemplate": "Les littéraux de chaîne de format (f-strings) ne peuvent pas non plus être des chaînes de modèle (t-strings)", + "formatStringUnicode": "Les littéraux de chaîne de format (f-strings) ne peuvent pas être unicode", + "formatStringUnterminated": "Expression non terminée dans f-string ; attendant \"}\"", + "functionDecoratorTypeUnknown": "Le décorateur de fonction non typé masque le type de fonction ; élément décoratif ignoré", + "functionInConditionalExpression": "L’expression conditionnelle fait référence à une fonction qui prend toujours la valeur True", + "functionTypeParametersIllegal": "La syntaxe des paramètres de type de fonction nécessite Python 3.12 ou une version plus récente", + "futureImportLocationNotAllowed": "Les importations à partir de __future__ doivent se trouver au début du fichier", + "generatorAsyncReturnType": "Le type de retour de la fonction de générateur asynchrone doit être compatible avec « AsyncGenerator[{yieldType}, Any] »", + "generatorNotParenthesized": "Les expressions de générateur doivent être entre parenthèses si elles ne sont pas uniquement des arguments", + "generatorSyncReturnType": "Le type de retour de la fonction de générateur doit être compatible avec « Generator[{yieldType}, Any, Any] »", + "genericBaseClassNotAllowed": "La classe de base « Generic » ne peut pas être utilisée avec la syntaxe de paramètre de type", + "genericClassAssigned": "Impossible d’attribuer le type de classe générique", + "genericClassDeleted": "Le type de classe générique ne peut pas être supprimé", + "genericInstanceVariableAccess": "L’accès à une variable d’instance générique via une classe est ambigu", + "genericNotAllowed": "« Generic » n’est pas valide dans ce contexte", + "genericTypeAliasBoundTypeVar": "L’alias de type générique dans la classe ne peut pas utiliser les variables de type lié {names}", + "genericTypeArgMissing": "« Generic » nécessite au moins un argument de type", + "genericTypeArgTypeVar": "L’argument de type pour « Generic » doit être une variable de type", + "genericTypeArgUnique": "Les arguments de type pour « Generic » doivent être uniques", + "globalReassignment": "« {name} » est attribué avant la déclaration globale", + "globalRedefinition": "« {name} » a déjà été déclaré global", + "implicitStringConcat": "Concaténation implicite de chaînes non autorisée", + "importCycleDetected": "Cycle détecté dans la chaîne d'importation", + "importDepthExceeded": "La profondeur de la chaîne d'importation a dépassé {depth}", + "importResolveFailure": "Impossible de résoudre l’importation « {importName} »", + "importSourceResolveFailure": "Impossible de résoudre l’importation « {importName} » à partir de la source", + "importSymbolUnknown": "« {name} » est un symbole d’importation inconnu", + "incompatibleMethodOverride": "La méthode « {name} » remplace la classe « {className} » de manière incompatible", + "inconsistentIndent": "Le montant du retrait ne correspond pas au retrait précédent", + "inconsistentTabs": "Utilisation incohérente des onglets et des espaces dans la mise en retrait", + "initMethodSelfParamTypeVar": "L’annotation de type pour le paramètre « self » de la méthode « __init__ » ne peut pas contenir de variables de type de portée de classe", + "initMustReturnNone": "Le type de retour de « __init__ » doit être None", + "initSubclassCallFailed": "Arguments de mot clé incorrects pour la méthode __init_subclass__", + "initSubclassClsParam": "__init_subclass__ remplacement doit prendre un paramètre « cls »", + "initVarNotAllowed": "« InitVar » n’est pas autorisé dans ce contexte", + "instanceMethodSelfParam": "Les méthodes d’instance doivent prendre un paramètre « self »", + "instanceVarOverridesClassVar": "La variable d'instance \"{name}\" remplace la variable de classe du même nom dans la classe \"{className}\"", + "instantiateAbstract": "Impossible d'instancier la classe abstraite \"{type}\"", + "instantiateProtocol": "Impossible d’instancier la classe de Protocol \"{type}\"", + "internalBindError": "Une erreur interne s’est produite lors de la liaison du fichier « {file} » : {message}", + "internalParseError": "Une erreur interne s’est produite lors de l’analyse du fichier « {file} » : {message}", + "internalTypeCheckingError": "Une erreur interne s’est produite lors de la vérification de type du fichier « {file} » : {message}", + "invalidIdentifierChar": "Caractère invalide dans l'identifiant", + "invalidStubStatement": "L’instruction n’a aucun sens dans un fichier stub de type", + "invalidTokenChars": "Caractère non valide \"{text}\" dans le jeton", + "isInstanceInvalidType": "Le deuxième argument de \"isinstance\" doit être une classe ou un tuple de classes", + "isSubclassInvalidType": "Le deuxième argument de « issubclass » doit être une classe ou un tuple de classes", + "keyValueInSet": "Les paires clé/valeur ne sont pas autorisées dans un set", + "keywordArgInTypeArgument": "Les arguments de mot-clé ne peuvent pas être utilisés dans les listes d'arguments de type", + "keywordOnlyAfterArgs": "Séparateur d’arguments mot clé uniquement non autorisé après le paramètre « * »", + "keywordParameterMissing": "Un ou plusieurs paramètres de mot-clé doivent suivre le paramètre \"*\"", + "keywordSubscriptIllegal": "Les arguments de mot-clé dans les indices ne sont pas pris en charge", + "lambdaReturnTypePartiallyUnknown": "Le type de retour de lambda, « {returnType} », est partiellement inconnu", + "lambdaReturnTypeUnknown": "Le type de retour de lambda est inconnu", + "listAssignmentMismatch": "Impossible d’affecter l’expression de type « {type} » à la liste cible", + "listInAnnotation": "Expression de List non autorisée dans l’expression de type", + "literalEmptyArgs": "Attendu un ou plusieurs arguments de type après \"Literal\"", + "literalNamedUnicodeEscape": "Les séquences d’échappement Unicode nommées ne sont pas prises en charge dans les annotations de chaîne « Literal »", + "literalNotAllowed": "« Literal » ne peut pas être utilisé dans ce contexte sans argument de type", + "literalNotCallable": "Impossible d’instancier le type Literal", + "literalUnsupportedType": "Les arguments de type pour « Literal » doivent être None, une valeur littérale (int, bool, str ou bytes) ou une valeur enum", + "matchIncompatible": "Les instructions de Match nécessitent Python 3.10 ou version ultérieure", + "matchIsNotExhaustive": "Les cas dans l’instruction match ne gèrent pas toutes les valeurs de manière exhaustive", + "maxParseDepthExceeded": "Profondeur d’analyse maximale dépassée ; scinder l’expression en sous-expressions plus petites", + "memberAccess": "Désolé... Nous ne pouvons pas accéder à l’attribut « {name} » pour la classe « {type} »", + "memberDelete": "Désolé... Nous ne pouvons pas supprimer l’attribut « {name} » pour la classe « {type} »", + "memberSet": "Désolé... Nous ne pouvons pas affecter l’attribut « {name} » pour la classe « {type} »", + "metaclassConflict": "La métaclasse d’une classe dérivée doit être une sous-classe des métaclasses de toutes ses classes de base", + "metaclassDuplicate": "Une seule métaclasse peut être fournie", + "metaclassIsGeneric": "La métaclasse ne peut pas être générique", + "methodNotDefined": "Méthode « {name} » non définie", + "methodNotDefinedOnType": "Méthode « {name} » non définie sur le type « {type} »", + "methodOrdering": "Impossible de créer un classement de méthode cohérent", + "methodOverridden": "\"{name}\" remplace la méthode du même nom dans la classe \"{className}\" avec un type incompatible \"{type}\"", + "methodReturnsNonObject": "La méthode « {name} » ne retourne pas d’objet", + "missingSuperCall": "La méthode « {methodName} » n’appelle pas la méthode du même nom dans la classe parente", + "mixingBytesAndStr": "Les valeurs Bytes et str ne peuvent pas être concaténées", + "moduleAsType": "Le module ne peut pas être utilisé comme type", + "moduleNotCallable": "Le module ne peut pas être appelé", + "moduleUnknownMember": "« {memberName} » n’est pas un attribut connu du module « {moduleName} »", + "namedExceptAfterCatchAll": "Une clause except nommée ne peut pas apparaître après une clause except fourre-tout", + "namedParamAfterParamSpecArgs": "Le paramètre de mot clé « {name} » ne peut pas apparaître dans la signature après le paramètre ParamSpec args", + "namedTupleEmptyName": "Les noms dans un tuple nommé ne peuvent pas être vides", + "namedTupleEntryRedeclared": "Impossible de remplacer « {name} », car la classe parente « {className} » est un tuple nommé", + "namedTupleFieldUnderscore": "Les noms de champs Named tuple ne peuvent pas commencer par un trait de soulignement", + "namedTupleFirstArg": "Nom de classe de tuple nommé attendu en tant que premier argument", + "namedTupleMultipleInheritance": "L’héritage multiple avec NamedTuple n’est pas pris en charge", + "namedTupleNameKeyword": "Les noms de champs ne peuvent pas être un mot-clé", + "namedTupleNameType": "Expected two-entry tuple specifying entry name and type", + "namedTupleNameUnique": "Les noms dans un tuple nommé doivent être uniques", + "namedTupleNoTypes": "« namedtuple » ne fournit aucun type pour les entrées de tuple ; utilisez « NamedTuple » à la place", + "namedTupleSecondArg": "Expected named tuple entry list as second argument", + "newClsParam": "__new__ remplacement doit prendre un paramètre « cls »", + "newTypeAnyOrUnknown": "Le deuxième argument de NewType doit être une classe connue, et non Any ou Unknown", + "newTypeBadName": "Le premier argument de NewType doit être un littéral de chaîne", + "newTypeLiteral": "Impossible d’utiliser NewType avec le type Literal", + "newTypeNameMismatch": "NewType doit être affecté à une variable portant le même nom", + "newTypeNotAClass": "Classe attendue comme deuxième argument de NewType", + "newTypeParamCount": "NewType requiert deux arguments positionnels", + "newTypeProtocolClass": "Désolé, nous n’avons pas pu utiliser NewType avec un type structurelle (un Protocol ou une classe TypedDict)", + "noOverload": "Aucune surcharge pour « {name} » ne correspond aux arguments fournis", + "noReturnContainsReturn": "La fonction avec le type de return déclaré « NoReturn » ne peut pas inclure d’instruction de return", + "noReturnContainsYield": "La fonction avec le type de retour déclaré « NoReturn » ne peut pas inclure d’instruction yield", + "noReturnReturnsNone": "La fonction avec le type de retour déclaré \"NoReturn\" ne peut pas renvoyer \"None\"", + "nonDefaultAfterDefault": "L’argument autre que l’argument par défaut suit l’argument par défaut", + "nonLocalInModule": "Déclaration nonlocal non autorisée au niveau du module", + "nonLocalNoBinding": "Aucune liaison pour le « {name} » nonlocal trouvé", + "nonLocalReassignment": "« {name} » est attribué avant la déclaration nonlocal", + "nonLocalRedefinition": "« {name} » a déjà été déclaré nonlocal", + "noneNotCallable": "L’objet de type « None » ne peut pas être appelé", + "noneNotIterable": "L’objet de type « None » ne peut pas être utilisé en tant que valeur itérable", + "noneNotSubscriptable": "L’objet de type « None » n’est pas inscriptible", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "L’objet de type « None » ne peut pas être utilisé avec « async with »", + "noneOperator": "L’opérateur « {operator} » n’est pas pris en charge pour « None »", + "noneUnknownMember": "« {name} » n’est pas un attribut connu de « None »", + "nonlocalTypeParam": "La liaison nonlocale n’est pas autorisée pour le paramètre de type \"{name}\"", + "notRequiredArgCount": "Argument de type unique attendu après « NotRequired »", + "notRequiredNotInTypedDict": "« NotRequired » n’est pas autorisé dans ce contexte", + "objectNotCallable": "L’objet de type « {type} » n’est pas appelant", + "obscuredClassDeclaration": "La déclaration de classe « {name} » est masquée par une déclaration du même nom", + "obscuredFunctionDeclaration": "La déclaration de fonction « {name} » est masquée par une déclaration du même nom", + "obscuredMethodDeclaration": "La déclaration de méthode « {name} » est masquée par une déclaration du même nom", + "obscuredParameterDeclaration": "La déclaration de paramètre « {name} » est masquée par une déclaration du même nom", + "obscuredTypeAliasDeclaration": "La déclaration d’alias de type « {name} » est masquée par une déclaration du même nom", + "obscuredVariableDeclaration": "La déclaration « {name} » est masquée par une déclaration du même nom", + "operatorLessOrGreaterDeprecated": "L’opérateur « <> » n’est pas pris en charge dans Python 3 ; utilisez « != » à la place", + "optionalExtraArgs": "Attendu un argument de type après « Optional »", + "orPatternIrrefutable": "Modèle irréfutable autorisé uniquement en tant que dernier sous-modèle dans un modèle \"ou\"", + "orPatternMissingName": "Tous les sous-modèles d’un modèle « or » doivent cibler les mêmes noms", + "overlappingKeywordArgs": "Le dictionnaire tapé chevauche avec le mot clé paramètre : {names}", + "overlappingOverload": "La surcharge {obscured} pour « {name} » ne sera jamais utilisée, car ses paramètres chevauchent la surcharge {obscuredBy}", + "overloadAbstractImplMismatch": "Les surcharges doivent correspondre à l’état abstrait de l’implémentation", + "overloadAbstractMismatch": "Les surcharges doivent toutes être abstraites ou non", + "overloadClassMethodInconsistent": "Les surcharges pour « {name} » utilisent @classmethod de manière incohérente", + "overloadFinalImpl": "@final decorator ne doit être appliqué qu’à l’implémentation", + "overloadFinalNoImpl": "Seule la première surcharge doit être marquée @final", + "overloadImplementationMismatch": "L’implémentation surchargée n’est pas cohérente avec la signature de la surcharge {index}", + "overloadOverrideImpl": "@override decorator ne doit être appliqué qu’à l’implémentation", + "overloadOverrideNoImpl": "Seule la première surcharge doit être marquée @override", + "overloadReturnTypeMismatch": "La surcharge {prevIndex} pour « {name} » chevauche la surcharge {newIndex} et retourne un type incompatible", + "overloadStaticMethodInconsistent": "Les surcharges pour « {name} » utilisent @staticmethod de manière incohérente", + "overloadWithoutImplementation": "« {name} » est marqué comme overload, mais aucune implémentation n’est fournie", + "overriddenMethodNotFound": "La méthode « {name} » est marquée comme override, mais aucune méthode de base du même nom n’est présente", + "overrideDecoratorMissing": "La méthode \"{name}\" n'est pas marquée comme override mais remplace une méthode dans la classe \"{className}\"", + "paramAfterKwargsParam": "Le paramètre ne peut pas suivre le paramètre \"**\"", + "paramAlreadyAssigned": "Le paramètre « {name} » est déjà affecté", + "paramAnnotationMissing": "L'annotation de type est manquante pour le paramètre \"{name}\"", + "paramAssignmentMismatch": "L'expression de type \"{sourceType}\" ne peut pas être affectée au paramètre de type \"{paramType}\"", + "paramNameMissing": "Aucun paramètre nommé « {name} »", + "paramSpecArgsKwargsDuplicate": "Des arguments pour ParamSpec « {type} » ont déjà été fournis", + "paramSpecArgsKwargsUsage": "Les attributs « args » et « kwargs » de ParamSpec doivent apparaître tous les deux dans une signature de fonction", + "paramSpecArgsMissing": "Les arguments pour ParamSpec « {type} » sont manquants", + "paramSpecArgsUsage": "L’attribut « args » de ParamSpec n’est valide que lorsqu’il est utilisé avec le paramètre *args", + "paramSpecAssignedName": "ParamSpec doit être affecté à une variable nommée « {name} »", + "paramSpecContext": "ParamSpec n’est pas autorisé dans ce contexte", + "paramSpecDefaultNotTuple": "Points de suspension attendus, expression de tuple ou ParamSpec pour la valeur par défaut de ParamSpec", + "paramSpecFirstArg": "Nom attendu de ParamSpec comme premier argument", + "paramSpecKwargsUsage": "L’attribut « kwargs » de ParamSpec n’est valide que lorsqu’il est utilisé avec le paramètre **kwargs", + "paramSpecNotUsedByOuterScope": "ParamSpec « {name} » n’a aucune signification dans ce contexte", + "paramSpecUnknownArg": "ParamSpec ne prend pas en charge plus d'un argument", + "paramSpecUnknownMember": "« {name} » n’est pas un attribut connu de ParamSpec", + "paramSpecUnknownParam": "\"{name}\" est un paramètre inconnu pour ParamSpec", + "paramTypeCovariant": "Impossible d’utiliser la variable de type Covariant dans le type de paramètre", + "paramTypePartiallyUnknown": "Le type du paramètre « {paramName} » est partiellement inconnu", + "paramTypeUnknown": "Le type de paramètre « {paramName} » est inconnu", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "Le modèle ne sera jamais mis en correspondance pour le type d’objet « {type} »", + "positionArgAfterNamedArg": "L’argument positionnel ne peut pas apparaître après les arguments de mot clé", + "positionArgAfterUnpackedDictArg": "L’argument positionnel ne peut pas apparaître après la décompression d’argument de mot clé", + "positionOnlyAfterArgs": "Séparateur de paramètres de position seule non autorisé après le paramètre « * »", + "positionOnlyAfterKeywordOnly": "Le paramètre « / » doit apparaître avant le paramètre « * »", + "positionOnlyAfterNon": "Paramètre de position seule non autorisé après un paramètre qui n’est pas en position seule", + "positionOnlyFirstParam": "Séparateur de paramètres de position seule non autorisé en tant que premier paramètre", + "positionOnlyIncompatible": "Le séparateur de paramètres de position seule requiert Python 3.8 ou version ultérieure", + "privateImportFromPyTypedModule": "« {name} » n’est pas exporté à partir du module « {module} »", + "privateUsedOutsideOfClass": "« {name} » est privé et utilisé en dehors de la classe dans laquelle il est déclaré", + "privateUsedOutsideOfModule": "« {name} » est privé et utilisé en dehors du module dans lequel il est déclaré", + "propertyOverridden": "« {name} » remplace à tort la property du même nom dans la classe « {className} »", + "propertyStaticMethod": "Méthodes statiques non autorisées pour la property getter, setter ou deleter", + "protectedUsedOutsideOfClass": "\"{name}\" est protégé et utilisé en dehors de la classe dans laquelle il est déclaré", + "protocolBaseClass": "La classe de Protocol \"{classType}\" ne peut pas dériver de la classe non Protocol \"{baseType}\"", + "protocolBaseClassWithTypeArgs": "Les arguments de type ne sont pas autorisés avec la classe Protocol lors de l'utilisation de la syntaxe des paramètres de type", + "protocolIllegal": "L’utilisation de « Protocole » nécessite Python 3.7 ou une version plus récente", + "protocolNotAllowed": "\"Protocole\" ne peut pas être utilisé dans ce contexte", + "protocolTypeArgMustBeTypeParam": "L’argument de type pour « Protocol » doit être un paramètre de type", + "protocolUnsafeOverlap": "La classe chevauche « {name} » de manière non sécurisée et peut produire une correspondance au moment de l’exécution", + "protocolVarianceContravariant": "La variable de type \"{variable}\" utilisée dans le Protocol générique \"{class}\" doit être contravariante", + "protocolVarianceCovariant": "La variable de type \"{variable}\" utilisée dans le Protocol générique \"{class}\" doit être covariante", + "protocolVarianceInvariant": "La variable de type \"{variable}\" utilisée dans le Protocol générique \"{class}\" doit être invariante", + "pyrightCommentInvalidDiagnosticBoolValue": "La directive de commentaire Pyright doit être suivie de « = » et d’une valeur true ou false", + "pyrightCommentInvalidDiagnosticSeverityValue": "La directive de commentaire Pyright doit être suivie de « = » et avoir la valeur true, false, error, warning, information ou none", + "pyrightCommentMissingDirective": "Le commentaire Pyright doit être suivi d’une directive (basic ou strict) ou d’une règle de diagnostic", + "pyrightCommentNotOnOwnLine": "Les commentaires Pyright utilisés pour contrôler les paramètres au niveau du fichier doivent apparaître sur leur propre ligne", + "pyrightCommentUnknownDiagnosticRule": "« {rule} » est une règle de diagnostic inconnue pour le commentaire pyright", + "pyrightCommentUnknownDiagnosticSeverityValue": "« {value} » n’est pas valide pour le commentaire pyright ; true, false, error, warning, information ou none attendu", + "pyrightCommentUnknownDirective": "« {directive} » est une directive inconnue pour le commentaire pyright ; « strict », « standard » ou « basic » attendu", + "readOnlyArgCount": "Attendu un seul argument de type après \"ReadOnly\"", + "readOnlyNotInTypedDict": "« ReadOnly » n’est pas autorisé dans ce contexte", + "recursiveDefinition": "Le type de \"{name}\" n'a pas pu être déterminé car il fait référence à lui-même", + "relativeImportNotAllowed": "Les importations relatives ne peuvent pas être utilisées avec le formulaire « import .a » ; utiliser « from . import a » à la place", + "requiredArgCount": "Attendu un argument de type unique après \"Required\"", + "requiredNotInTypedDict": "« Required » n’est pas autorisé dans ce contexte", + "returnInAsyncGenerator": "L'instruction de retour avec valeur n'est pas autorisée dans le générateur asynchrone", + "returnInExceptionGroup": "« return » n’est pas autorisé dans un bloc « except* »", + "returnMissing": "La fonction avec le type de retour déclaré \"{returnType}\" doit renvoyer une valeur sur tous les chemins de code", + "returnOutsideFunction": "\"return\" ne peut être utilisé que dans une fonction", + "returnTypeContravariant": "La variable de type contravariant ne peut pas être utilisée dans le type de retour", + "returnTypeMismatch": "Le type « {exprType} » n’est pas assignable au type de retour « {returnType} »", + "returnTypePartiallyUnknown": "Le type de retour « {returnType} » est partiellement inconnu", + "returnTypeUnknown": "Le type de retour est inconnu", + "revealLocalsArgs": "Aucun argument attendu pour l'appel \"reveal_locals\"", + "revealLocalsNone": "Aucun élément locals dans cette étendue", + "revealTypeArgs": "Argument positionnel unique attendu pour l’appel « reveal_type »", + "revealTypeExpectedTextArg": "L'argument \"expected_text\" pour la fonction \"reveal_type\" doit être une valeur littérale str", + "revealTypeExpectedTextMismatch": "Incompatibilité de texte de type ; « {expected} » attendu, mais a reçu « {received} »", + "revealTypeExpectedTypeMismatch": "Incompatibilité de type; attendu \"{expected}\" mais reçu \"{received}\"", + "selfTypeContext": "« Self » n’est pas valide dans ce contexte", + "selfTypeMetaclass": "« Self » ne peut pas être utilisé dans une métaclasse (une sous-classe de « type »)", + "selfTypeWithTypedSelfOrCls": "« Self » ne peut pas être utilisé dans une fonction avec un paramètre « self » ou « cls » qui a une annotation de type autre que « Self »", + "sentinelBadName": "Le premier argument de Sentinel doit être un littéral de chaîne", + "sentinelNameMismatch": "Sentinel doit être affecté à une variable portant le même nom", + "sentinelParamCount": "Sentinel requiert un argument positionnel", + "setterGetterTypeMismatch": "Le type valeur setter de property n’est pas assignable au type de retour getter", + "singleOverload": "« {name} » est marqué comme surcharge, mais des surcharges supplémentaires sont manquantes", + "slotsAttributeError": "\"{name}\" n'est pas spécifié dans __slots__", + "slotsClassVarConflict": "\"{name}\" est en conflit avec la variable d'instance déclarée dans __slots__", + "starPatternInAsPattern": "Le modèle d’étoile ne peut pas être utilisé avec la cible « as »", + "starPatternInOrPattern": "Le modèle d’étoile ne peut pas être supprimé dans d’autres modèles", + "starStarWildcardNotAllowed": "** ne peut pas être utilisé avec le caractère générique « _ »", + "staticClsSelfParam": "Les méthodes statiques ne doivent pas prendre de paramètre « self » ou « cls »", + "stringNonAsciiBytes": "Caractère non-ASCII non autorisé dans le littéral de chaîne d'octets", + "stringNotSubscriptable": "L'expression de chaîne ne peut pas être indexée dans une expression de type ; placez l'expression entière entre guillemets", + "stringUnsupportedEscape": "Séquence d'échappement non prise en charge dans le littéral de chaîne", + "stringUnterminated": "Le littéral de chaîne n’est pas spécifié", + "stubFileMissing": "Fichier stub introuvable pour « {importName} »", + "stubUsesGetAttr": "Le fichier stub de type est incomplet ; « __getattr__ » masque les erreurs de type pour le module", + "sublistParamsIncompatible": "Les paramètres de Sublist ne sont pas pris en charge dans Python 3.x", + "superCallArgCount": "Pas plus de deux arguments attendus pour l'appel \"super\"", + "superCallFirstArg": "Type de classe attendu en tant que premier argument de l’appel « super », mais « {type} » reçu", + "superCallSecondArg": "Le deuxième argument de l’appel « super » doit être un objet ou une classe dérivé de « {type} »", + "superCallZeroArgForm": "La forme sans argument d'appel \"super\" n'est valide que dans une méthode", + "superCallZeroArgFormStaticMethod": "La forme à zéro argument de l’appel « super » n’est pas valide dans une méthode statique", + "symbolIsPossiblyUnbound": "« {name} » est peut-être indépendant", + "symbolIsUnbound": "« {name} » est indépendant", + "symbolIsUndefined": "« {name} » n’est pas défini", + "symbolOverridden": "« {name} » remplace le symbole du même nom dans la classe « {className} »", + "templateStringBytes": "Les littéraux de chaîne de modèle (t-strings) ne peuvent pas être binaires", + "templateStringIllegal": "Les littéraux de chaîne de modèle (t-strings) nécessitent Python 3.14 ou version ultérieure", + "templateStringUnicode": "Les littéraux de chaîne de modèle (t-strings) ne peuvent pas être unicode", + "ternaryNotAllowed": "Expression ternaire non autorisée dans l'expression de type", + "totalOrderingMissingMethod": "La classe doit définir « __lt__ », « __le__ », « __gt__ » ou « __ge__ » pour utiliser total_ordering", + "trailingCommaInFromImport": "Virgule de fin non autorisée sans parenthèses adjacentes", + "tryWithoutExcept": "L'instruction try doit avoir au moins une clause except ou finally", + "tupleAssignmentMismatch": "L'expression avec le type \"{type}\" ne peut pas être assignée au tuple cible", + "tupleInAnnotation": "Expression de tuple non autorisée dans l'expression de type", + "tupleIndexOutOfRange": "L’index {index} est hors limites pour le type {type}", + "typeAliasIllegalExpressionForm": "Formulaire d’expression non valide pour la définition d’alias de type", + "typeAliasIsRecursiveDirect": "L'alias de type \"{name}\" ne peut pas s'utiliser lui-même dans sa définition", + "typeAliasNotInModuleOrClass": "Un TypeAlias ne peut être défini qu’au sein d’un module ou d’une étendue de classe", + "typeAliasRedeclared": "« {name} » est déclaré en tant que TypeAlias et ne peut être attribué qu’une seule fois", + "typeAliasStatementBadScope": "Une instruction de type ne peut être utilisée que dans une étendue de module ou de classe", + "typeAliasStatementIllegal": "L’instruction d’alias de type nécessite Python 3.12 ou version ultérieure", + "typeAliasTypeBadScope": "Vous ne pouvez définir un alias de type qu’au sein d’un module ou d’une étendue de classe", + "typeAliasTypeBaseClass": "A type alias defined in a \"type\" statement cannot be used as a base class", + "typeAliasTypeMustBeAssigned": "TypeAliasType doit être affecté à une variable portant le même nom que l'alias de type", + "typeAliasTypeNameArg": "Le premier argument de TypeAliasType doit être un littéral de chaîne représentant le nom de l'alias de type", + "typeAliasTypeNameMismatch": "Le nom de l’alias de type doit correspondre au nom de la variable à laquelle il est affecté", + "typeAliasTypeParamInvalid": "La liste de paramètres de type doit être un tuple contenant uniquement TypeVar, TypeVarTuple ou ParamSpec", + "typeAnnotationCall": "Expression d'appel non autorisée dans l'expression de type", + "typeAnnotationVariable": "Variable non autorisée dans l'expression de type", + "typeAnnotationWithCallable": "L'argument de type pour \"type\" doit être une classe ; les callables ne sont pas pris en charge", + "typeArgListExpected": "ParamSpec, ellipse ou liste de types attendue", + "typeArgListNotAllowed": "Expression de liste non autorisée pour cet argument de type", + "typeArgsExpectingNone": "Aucun argument de type attendu pour la classe « {name} »", + "typeArgsMismatchOne": "Argument de type attendu mais {received} reçu", + "typeArgsMissingForAlias": "Arguments de type attendus pour l’alias de type générique « {name} »", + "typeArgsMissingForClass": "Arguments de type attendus pour la classe générique \"{name}\"", + "typeArgsTooFew": "Trop peu d’arguments de type fournis pour « {name} » ; {expected} attendu, mais {received} reçu", + "typeArgsTooMany": "Trop d'arguments de type fournis pour \"{name}\" ; attendu {expected} mais reçu {received}", + "typeAssignmentMismatch": "Le type « {sourceType} » n’est pas assignable au type déclaré « {destType} »", + "typeAssignmentMismatchWildcard": "Le symbole d’importation « {name} » a le type « {sourceType} », qui n’est pas assignable au type déclaré « {destType} »", + "typeCallNotAllowed": "l'appel type() ne doit pas être utilisé dans une expression de type", + "typeCheckOnly": "\"{name}\" est marqué comme @type_check_only et ne peut être utilisé que dans les annotations de type", + "typeCommentDeprecated": "L’utilisation de commentaires de type est déconseillée ; utiliser l’annotation de type à la place", + "typeExpectedClass": "Classe attendue mais « {type} » reçu", + "typeFormArgs": "« TypeForm » accepte un seul argument positionnel", + "typeGuardArgCount": "Argument de type unique attendu après « TypeGuard » ou « TypeIs »", + "typeGuardParamCount": "Les méthodes et fonctions de protection de type définies par l’utilisateur doivent avoir au moins un paramètre d’entrée", + "typeIsReturnType": "Le type de retour des TypeIs (« {returnType} ») n’est pas cohérent avec le type de paramètre de valeur (« {type} »)", + "typeNotAwaitable": "« {type} » n’est pas awaitable", + "typeNotIntantiable": "« {type} » ne peut pas être instancié", + "typeNotIterable": "« {type} » n’est pas itérable", + "typeNotSpecializable": "Impossible de spécialiser le type \"{type}\"", + "typeNotSubscriptable": "L’objet de type « {type} » n’est pas sous-scriptible", + "typeNotSupportBinaryOperator": "Opérateur \"{operator}\" non pris en charge pour les types \"{leftType}\" et \"{rightType}\"", + "typeNotSupportBinaryOperatorBidirectional": "L'opérateur \"{operator}\" n'est pas pris en charge pour les types \"{leftType}\" et \"{rightType}\" lorsque le type attendu est \"{expectedType}\"", + "typeNotSupportUnaryOperator": "L'opérateur \"{operator}\" n'est pas pris en charge pour le type \"{type}\"", + "typeNotSupportUnaryOperatorBidirectional": "L’opérateur « {operator} » n’est pas pris en charge pour le type « {type} » quand le type attendu est « {expectedType} »", + "typeNotUsableWith": "L’objet de type « {type} » ne peut pas être utilisé avec « with », car il n’implémente pas correctement {method}", + "typeNotUsableWithAsync": "L’objet de type « {type} » ne peut pas être utilisé avec « async with », car il n’implémente pas correctement {method}", + "typeParameterBoundNotAllowed": "La limite ou la contrainte ne peut pas être utilisée avec un paramètre de type variadique ou ParamSpec", + "typeParameterConstraintTuple": "La contrainte de paramètre de type doit être un tuple de plusieurs types", + "typeParameterExistingTypeParameter": "Le paramètre de type « {name} » est déjà utilisé", + "typeParameterNotDeclared": "Le paramètre de type « {name} » n’est pas inclus dans la liste des paramètres de type pour « {container} »", + "typeParametersMissing": "Au moins un paramètre de type doit être spécifié", + "typePartiallyUnknown": "Le type de « {name} » est partiellement inconnu", + "typeUnknown": "Le type de « {name} » est inconnu", + "typeVarAssignedName": "TypeVar doit être affecté à une variable nommée « {name} »", + "typeVarAssignmentMismatch": "Le type \"{type}\" ne peut pas être affecté à la variable de type \"{name}\"", + "typeVarBoundAndConstrained": "TypeVar ne peut pas être à la fois lié et contraint", + "typeVarBoundGeneric": "Le type lié TypeVar ne peut pas être générique", + "typeVarConstraintGeneric": "Le type de contrainte TypeVar ne peut pas être générique", + "typeVarDefaultBoundMismatch": "Le type par défaut TypeVar doit être un sous-type du type lié", + "typeVarDefaultConstraintMismatch": "Le type par défaut TypeVar doit être l'un des types contraints", + "typeVarDefaultIllegal": "Les types de variables de type par défaut nécessitent Python 3.13 ou une version plus récente", + "typeVarDefaultInvalidTypeVar": "Le paramètre de type « {name} » a un type par défaut qui fait référence à une ou plusieurs variables de type hors de portée", + "typeVarFirstArg": "Nom attendu de TypeVar comme premier argument", + "typeVarInvalidForMemberVariable": "Le type d’attribut ne peut pas utiliser de variable de type « {name} » étendue à une méthode locale", + "typeVarNoMember": "TypeVar « {type} » n’a aucun attribut « {name} »", + "typeVarNotSubscriptable": "TypeVar « {type} » n’est pas subscriptible", + "typeVarNotUsedByOuterScope": "La variable de type « {name} » n’a aucune signification dans ce contexte", + "typeVarPossiblyUnsolvable": "La variable de type \"{name}\" peut rester non résolue si l'appelant ne fournit aucun argument pour le paramètre \"{param}\"", + "typeVarSingleConstraint": "TypeVar doit avoir au moins deux types contraints", + "typeVarTupleConstraints": "TypeVarTuple ne peut pas avoir de contraintes de valeur", + "typeVarTupleContext": "TypeVarTuple n’est pas autorisé dans ce contexte", + "typeVarTupleDefaultNotUnpacked": "Le type par défaut TypeVarTuple doit être un tuple décompressé ou TypeVarTuple", + "typeVarTupleMustBeUnpacked": "L'opérateur Unpack est requis pour la valeur TypeVarTuple", + "typeVarTupleUnknownParam": "« {name} » est un paramètre inconnu de TypeVarTuple", + "typeVarUnknownParam": "\"{name}\" est un paramètre inconnu pour TypeVar", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" est déjà utilisé par une portée externe", + "typeVarUsedOnlyOnce": "TypeVar « {name} » n’apparaît qu’une seule fois dans la signature de fonction générique", + "typeVarVariance": "TypeVar ne peut pas être à la fois covariant et contravariant", + "typeVarWithDefaultFollowsVariadic": "TypeVar « {typeVarName} » a une valeur par défaut et ne peut pas suivre TypeVarTuple « {variadicName} »", + "typeVarWithoutDefault": "\"{name}\" ne peut pas apparaître après \"{other}\" dans la liste des paramètres de type car il n'a pas de type par défaut", + "typeVarsNotInGenericOrProtocol": "Generic[] ou Protocol[] doit inclure toutes les variables de type", + "typedDictAccess": "Impossible d’accéder à l’élément dans TypedDict", + "typedDictAssignedName": "TypedDict doit être affecté à une variable nommée « {name} »", + "typedDictBadVar": "Les classes TypedDict ne peuvent contenir que des annotations de type", + "typedDictBaseClass": "Toutes les classes de base pour les classes TypedDict doivent également être des classes TypedDict", + "typedDictBoolParam": "Paramètre « {name} » attendu avec la valeur True ou False", + "typedDictClosedExtras": "Le \"{name}\" de classe de base est un TypedDict qui limite le type d’éléments supplémentaires au type \"{type}\"", + "typedDictClosedFalseNonOpenBase": "La classe de base « {name} » n’est pas un TypedDict ouvert ; closed=False n’est pas autorisé", + "typedDictClosedNoExtras": "La classe de base « {name} » est un TypedDict closed, les éléments supplémentaires ne sont pas autorisés", + "typedDictDelete": "Impossible de supprimer l’élément dans TypedDict", + "typedDictEmptyName": "Les noms dans un TypedDict ne peuvent pas être vides", + "typedDictEntryName": "Littéral de chaîne attendu pour le nom d’entrée du dictionnaire", + "typedDictEntryUnique": "Les noms dans un dictionnaire doivent être uniques", + "typedDictExtraArgs": "Arguments TypedDict supplémentaires non pris en charge", + "typedDictExtraItemsClosed": "TypedDict pouvez utiliser \"closed\" ou \"extra_items\", mais pas les deux", + "typedDictFieldNotRequiredRedefinition": "L’élément TypedDict « {name} » ne peut pas être redéfini comme étant NotRequired", + "typedDictFieldReadOnlyRedefinition": "L’élément TypedDict « {name} » ne peut pas être redéfini comme état En ReadOnly", + "typedDictFieldRequiredRedefinition": "L’élément TypedDict « {name} » ne peut pas être redéfini comme étant Required", + "typedDictFirstArg": "Nom de classe TypedDict attendu comme premier argument", + "typedDictInClassPattern": "La classe TypedDict n'est pas autorisée dans le modèle de classe", + "typedDictInitsubclassParameter": "TypedDict ne prend pas en charge __init_subclass__ paramètre « {name} »", + "typedDictNotAllowed": "\"TypedDict\" ne peut pas être utilisé dans ce contexte", + "typedDictSecondArgDict": "Paramètre de mot clé ou de dict attendu en tant que deuxième paramètre", + "typedDictSecondArgDictEntry": "Entrée de dictionnaire simple attendue", + "typedDictSet": "Impossible d'attribuer l'élément dans TypedDict", + "unaccessedClass": "La classe \"{name}\" n'a pas été accédée", + "unaccessedFunction": "La fonction « {name} » n’a pas été accédée", + "unaccessedImport": "L’importation « {name} » n’a pas été accédée", + "unaccessedSymbol": "« {name} » n’a pas été accédé(e)", + "unaccessedVariable": "La variable « {name} » n’a pas été accédée", + "unannotatedFunctionSkipped": "L'analyse de la fonction \"{name}\" est ignorée car elle n'est pas annotée", + "unaryOperationNotAllowed": "L'opérateur unaire n'est pas autorisé dans l'expression de type", + "unexpectedAsyncToken": "« def », « with » ou « for » attendu pour suivre « async »", + "unexpectedEof": "EOF (fin de fichier) inattendue", + "unexpectedExprToken": "Jeton inattendu à la fin de l’expression", + "unexpectedIndent": "Retrait inattendu", + "unexpectedUnindent": "Unindent non attendu", + "unhashableDictKey": "La clé du dictionnaire doit être hachable", + "unhashableSetEntry": "L’entrée set doit être hachable", + "uninitializedAbstractVariables": "Les variables définies dans la classe de base abstraite ne sont pas initialisées dans la classe finale « {classType} »", + "uninitializedInstanceVariable": "La variable d’instance « {name} » n’est pas initialisée dans le corps de la classe ou dans la méthode __init__", + "unionForwardReferenceNotAllowed": "La syntaxe Union ne peut pas être utilisée avec l’opérande de chaîne ; utiliser des guillemets autour de l’expression entière", + "unionSyntaxIllegal": "Une autre syntaxe pour les unions nécessite Python 3.10 ou une version plus récente", + "unionTypeArgCount": "L’Union requiert au moins deux arguments de type", + "unionUnpackedTuple": "Union ne peut pas inclure un tuple décompressé", + "unionUnpackedTypeVarTuple": "Union ne peut pas inclure un TypeVarTuple décompressé", + "unnecessaryCast": "Appel \"cast\" inutile ; le type est déjà \"{type}\"", + "unnecessaryIsInstanceAlways": "Appel d’isinstance inutile ; « {testType} » est toujours une instance de « {classType} »", + "unnecessaryIsInstanceNever": "Appel d’isinstance inutile; « {testType} » n est jamais une instance de « {classType} »", + "unnecessaryIsSubclassAlways": "Appel issubclass inutile ; \"{testType}\" est toujours une sous-classe de \"{classType}\"", + "unnecessaryIsSubclassNever": "Appel issubclass inutile; « {testType} » est toujours une sous-classe de « {classType} »", + "unnecessaryPyrightIgnore": "Commentaire \"# pyright: ignore\" inutile", + "unnecessaryPyrightIgnoreRule": "Règle inutile « # pyright: ignore » : « {name} »", + "unnecessaryTypeIgnore": "Commentaire \"# type: ignore\" inutile", + "unpackArgCount": "Attendu un seul argument de type après \"Unpack\"", + "unpackExpectedTypeVarTuple": "TypeVarTuple ou tuple attendu en tant qu’argument de type pour Unpack", + "unpackExpectedTypedDict": "Argument de type TypedDict attendu pour Unpack", + "unpackIllegalInComprehension": "Opération de décompression non autorisée dans la compréhension", + "unpackInAnnotation": "L'opérateur de déballage n'est pas autorisé dans l'expression de type", + "unpackInDict": "Opération de décompression non autorisée dans les dictionnaires", + "unpackInSet": "Opérateur de déballage non autorisé dans un set", + "unpackNotAllowed": "Le Unpack n’est pas autorisé dans ce contexte", + "unpackOperatorNotAllowed": "L’opération de décompression n’est pas autorisée dans ce contexte", + "unpackTuplesIllegal": "Opération de décompression non autorisée dans les tuples avant Python 3.8", + "unpackedArgInTypeArgument": "Les arguments décompressés ne peuvent pas être utilisés dans ce contexte", + "unpackedArgWithVariadicParam": "L'argument décompressé ne peut pas être utilisé pour le paramètre TypeVarTuple", + "unpackedDictArgumentNotMapping": "L’expression d’argument après ** doit être un mappage avec un type de clé « str »", + "unpackedDictSubscriptIllegal": "L’opérateur de décompression de dictionnaire dans l’indice n’est pas autorisé", + "unpackedSubscriptIllegal": "L’opérateur de décompression en indice requiert Python 3.11 ou version ultérieure", + "unpackedTypeVarTupleExpected": "TypeVarTuple décompressé attendu ; utiliser Unpack[{name1}] ou *{name2}", + "unpackedTypedDictArgument": "Impossible de faire correspondre l’argument TypedDict décompressé aux paramètres", + "unreachableCodeCondition": "Le code n’est pas analysé, car la condition est évaluée statiquement comme false", + "unreachableCodeStructure": "Le code est structurellement inaccessible", + "unreachableCodeType": "L’analyse de type indique que le code est inaccessible", + "unreachableExcept": "La clause Except est inaccessible, car l’exception est déjà gérée", + "unsupportedDunderAllOperation": "L’opération sur « __all__ » n’est pas prise en charge. Par conséquent, la liste de symboles exportée peut être incorrecte", + "unusedCallResult": "Le résultat de l'expression d'appel est de type \"{type}\" et n'est pas utilisé ; affecter à la variable \"_\" si c'est intentionnel", + "unusedCoroutine": "Le résultat de l’appel de fonction asynchrone n’est pas utilisé ; utiliser « await » ou affecter le résultat à la variable", + "unusedExpression": "La valeur de l'expression n'est pas utilisée", + "varAnnotationIllegal": "Les annotations de type pour les variables nécessitent Python 3.6 ou une version ultérieure ; utiliser le commentaire de type pour la compatibilité avec les versions précédentes", + "variableFinalOverride": "La variable « {name} » est marquée comme Final et remplace la variable non-Final du même nom dans la classe « {className} »", + "variadicTypeArgsTooMany": "La liste d’arguments de type peut avoir au plus un TypeVarTuple ou tuple décompressé", + "variadicTypeParamTooManyAlias": "L’alias de type peut avoir au plus un paramètre de type TypeVarTuple, mais a reçu plusieurs ({names})", + "variadicTypeParamTooManyClass": "La classe générique peut avoir au plus un paramètre de type TypeVarTuple, mais en a reçu plusieurs ({names})", + "walrusIllegal": "L’opérateur « := » nécessite Python 3.8 ou version ultérieure", + "walrusNotAllowed": "L’opérateur « := » n’est pas autorisé dans ce contexte sans parenthèses adjacentes", + "wildcardInFunction": "import de caractères génériques non autorisée dans une classe ou une fonction", + "wildcardLibraryImport": "import de caractères génériques à partir d’une bibliothèque non autorisée", + "wildcardPatternTypePartiallyUnknown": "Le type capturé par le modèle générique est partiellement inconnu", + "wildcardPatternTypeUnknown": "Le type capturé par le modèle générique est inconnu", + "yieldFromIllegal": "L’utilisation de « yield from » nécessite Python 3.3 ou version ultérieure", + "yieldFromOutsideAsync": "« yield from » non autorisé dans une fonction asynchrone", + "yieldOutsideFunction": "« yield » non autorisé en dehors d’une fonction ou d’un lambda", + "yieldWithinComprehension": "« yield » n’est pas autorisé dans une compréhension de liste", + "zeroCaseStatementsFound": "L’instruction de Match doit inclure au moins une instruction case", + "zeroLengthTupleNotAllowed": "Le tuple de longueur nulle n’est pas autorisé dans ce contexte" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "Impossible d’utiliser le formulaire spécial « Annotated » avec les vérifications d’instance et de classe", + "argParam": "L’argument correspond au paramètre « {paramName} »", + "argParamFunction": "L’argument correspond au paramètre « {paramName} » dans la fonction « {functionName} »", + "argsParamMissing": "Le paramètre \"*{paramName}\" n'a pas de paramètre correspondant", + "argsPositionOnly": "Non-concordance des paramètres de position uniquement ; attendu {expected} mais reçu {received}", + "argumentType": "Le type d’argument est « {type} »", + "argumentTypes": "Types d'argument : ({types})", + "assignToNone": "Le type n’est pas assignable à « None »", + "asyncHelp": "Vouliez-vous dire « async with » ?", + "baseClassIncompatible": "La classe de base « {baseClass} » n’est pas compatible avec le type « {type} »", + "baseClassIncompatibleSubclass": "La classe de base « {baseClass} » dérive de « {subclass} » qui est incompatible avec le type « {type} »", + "baseClassOverriddenType": "La classe de base « {baseClass} » fournit le type « {type} », qui est remplacé", + "baseClassOverridesType": "La classe de base \"{baseClass}\" remplace le type \"{type}\"", + "bytesTypePromotions": "Définir disableBytesTypePromotions sur false pour activer le comportement de promotion de type pour « bytearray » et « memoryview »", + "conditionalRequiresBool": "La méthode __bool__ pour le type « {operandType} » retourne le type « {boolReturnType} » plutôt que « bool »", + "dataClassFieldLocation": "Déclaration de champ", + "dataClassFrozen": "« {name} » est figé", + "dataProtocolUnsupported": "« {name} » est un protocole de données", + "descriptorAccessBindingFailed": "Échec de la liaison du « {name} » de méthode pour la classe de descripteur « {className} »", + "descriptorAccessCallFailed": "Échec de l’appel du « {name} » de méthode pour la classe de descripteur « {className} »", + "finalMethod": "Méthode Final", + "functionParamDefaultMissing": "Le paramètre \"{name}\" n'a pas d'argument par défaut", + "functionParamName": "Incompatibilité de nom de paramètre : « {destName} » et « {srcName} »", + "functionParamPositionOnly": "Non-correspondance des paramètres position uniquement ; le paramètre « {name} » n’est pas en position seule", + "functionReturnTypeMismatch": "Le type de retour de fonction \"{sourceType}\" est incompatible avec le type \"{destType}\"", + "functionTooFewParams": "La fonction accepte trop peu de paramètres positionnels ; {expected} attendu, mais {received} reçu", + "functionTooManyParams": "La fonction accepte trop de paramètres positionnels ; {expected} attendu, mais {received} reçu", + "genericClassNotAllowed": "Type générique avec des arguments de type non autorisé pour les vérifications d’instance ou de classe", + "incompatibleDeleter": "La méthode du deleter de property n’est pas compatible", + "incompatibleGetter": "La méthode de getter de property est incompatible", + "incompatibleSetter": "La méthode setter de property n’est pas compatible", + "initMethodLocation": "La méthode __init__ est définie dans la classe « {type} »", + "initMethodSignature": "La signature de __init__ est « {type} »", + "initSubclassLocation": "La méthode __init_subclass__ est définie dans la classe « {name} »", + "invariantSuggestionDict": "Envisagez de passer de « dict » à « Mapping », qui est covariant dans le type valeur", + "invariantSuggestionList": "Envisagez de passer de « list » à « Sequence » qui est covariant", + "invariantSuggestionSet": "Pensez à passer de \"set\" à \"Container\" qui est covariant", + "isinstanceClassNotSupported": "« {type} » n’est pas pris en charge pour les vérifications d’instance et de classe", + "keyNotRequired": "\"{name}\" n'est pas une clé requise dans \"{type}\", donc l'accès peut entraîner une exception d'exécution", + "keyReadOnly": "« {name} » est une clé en lecture seule dans « {type} »", + "keyRequiredDeleted": "« {name} » est une clé obligatoire et ne peut pas être supprimée", + "keyUndefined": "« {name} » n’est pas une clé définie dans « {type} »", + "kwargsParamMissing": "Le paramètre \"**{paramName}\" n'a pas de paramètre correspondant", + "listAssignmentMismatch": "Le type « {type} » n’est pas compatible avec la liste cible", + "literalAssignmentMismatch": "« {sourceType} » n’est pas assignable au type « {destType} »", + "literalNotAllowed": "Le formulaire spécial « Literal » ne peut pas être utilisé avec les vérifications d’instances et de classes", + "matchIsNotExhaustiveHint": "Si la gestion exhaustive n’est pas prévue, ajoutez « case _: pass »", + "matchIsNotExhaustiveType": "Type non géré : « {type} »", + "memberAssignment": "L'expression de type « {type} » ne peut pas être attribuée à l’attribut « {name} » de la classe « {classType} »", + "memberIsAbstract": "« {type}.{name} » n’est pas implémenté", + "memberIsAbstractMore": "et {count} autres...", + "memberIsClassVarInProtocol": "« {name} » est défini en tant que ClassVar dans le protocole", + "memberIsInitVar": "« {name} » est un champ init-only", + "memberIsInvariant": "« {name} » est invariant, car il est mutable", + "memberIsNotClassVarInClass": "« {name} » doit être défini en tant que ClassVar pour être compatible avec le protocole", + "memberIsNotClassVarInProtocol": "« {name} » n’est pas défini en tant que ClassVar dans le protocole", + "memberIsNotReadOnlyInProtocol": "« {name} » n’est pas en lecture seule dans le protocole", + "memberIsReadOnlyInProtocol": "« {name} » est en lecture seule dans le protocole", + "memberIsWritableInProtocol": "« {name} » est accessible en écriture dans le protocole", + "memberSetClassVar": "L’attribut « {name} » ne peut pas être attribué via une instance de classe car il s’agit d’une ClassVar", + "memberTypeMismatch": "« {name} » est un type incompatible", + "memberUnknown": "L’attribut « {name} » est inconnu", + "metaclassConflict": "La métaclasse « {metaclass1} » est en conflit avec « {metaclass2} »", + "missingDeleter": "La méthode de deleter de property est manquante", + "missingGetter": "La méthode getter de property est manquante", + "missingSetter": "setter de définition de property est manquante", + "namedParamMissingInDest": "Paramètre supplémentaire « {name} »", + "namedParamMissingInSource": "Paramètre de mot clé manquant « {name} »", + "namedParamTypeMismatch": "Le paramètre de mot clé « {name} » de type « {sourceType} » est incompatible avec le type « {destType} »", + "namedTupleNotAllowed": "NamedTuple ne peut pas être utilisé pour les vérifications d’instance ou de classe", + "newMethodLocation": "La méthode __new__ est définie dans la classe « {type} »", + "newMethodSignature": "La signature de __new__ est « {type} »", + "newTypeClassNotAllowed": "Le type créé avec NewType ne peut pas être utilisé avec des vérifications d’instance et de classe", + "noOverloadAssignable": "Aucune fonction surchargée ne correspond au type « {type} »", + "noneNotAllowed": "None ne peut être utilisé pour les vérifications de instance ou de classe", + "orPatternMissingName": "Noms manquants : {name}", + "overloadIndex": "La surcharge {index} est la correspondance la plus proche", + "overloadNotAssignable": "Une ou plusieurs surcharges de « {name} » ne sont pas assignables", + "overloadSignature": "La signature de surcharge est définie ici", + "overriddenMethod": "Méthode substituée", + "overriddenSymbol": "Symbole substitué", + "overrideInvariantMismatch": "Le type de remplacement \"{overrideType}\" n'est pas le même que le type de base \"{baseType}\"", + "overrideIsInvariant": "La variable est mutable donc son type est invariant", + "overrideNoOverloadMatches": "Aucune signature de surcharge dans le remplacement n’est compatible avec la méthode de base", + "overrideNotClassMethod": "La méthode de base est déclarée en tant que classmethod, mais la substitution n’est pas", + "overrideNotInstanceMethod": "La méthode de base est déclarée en tant que méthode d'instance mais la substitution n'est pas", + "overrideNotStaticMethod": "La méthode de base est déclarée comme staticmethod, mais la substitution n’est pas", + "overrideOverloadNoMatch": "Le remplacement ne gère pas toutes les surcharges de la méthode de base", + "overrideOverloadOrder": "Les surcharges pour la méthode de remplacement doivent être dans le même ordre que la méthode de base", + "overrideParamKeywordNoDefault": "Non-concordance du paramètre de mot-clé \"{name}\" : le paramètre de base a une valeur d'argument par défaut, le paramètre de remplacement n'en a pas", + "overrideParamKeywordType": "Incompatibilité de type du paramètre de mot clé \"{name}\" : le paramètre de base est de type \"{baseType}\", le paramètre de remplacement est de type \"{overrideType}\"", + "overrideParamName": "Incompatibilité de nom de paramètre {index} : le paramètre de base est nommé \"{baseName}\", le paramètre de remplacement est nommé \"{overrideName}\"", + "overrideParamNameExtra": "Le paramètre « {name} » est manquant dans la base", + "overrideParamNameMissing": "Le paramètre \"{name}\" est manquant dans le remplacement", + "overrideParamNamePositionOnly": "Incompatibilité du paramètre {index} : le paramètre de base \"{baseName}\" est un paramètre de mot-clé, le paramètre de remplacement est uniquement de position", + "overrideParamNoDefault": "Non-concordance du paramètre {index} : le paramètre de base a une valeur d'argument par défaut, le paramètre de remplacement n'en a pas", + "overrideParamType": "Incompatibilité de type de paramètre {index} : le paramètre de base est de type \"{baseType}\", le paramètre de remplacement est de type \"{overrideType}\"", + "overridePositionalParamCount": "Non-concordance du nombre de paramètres positionnels ; la méthode de base a {baseCount}, mais la substitution a {overrideCount}", + "overrideReturnType": "Incompatibilité de type de retour : la méthode de base retourne le type « {baseType} », la substitution retourne le type « {overrideType} »", + "overrideType": "La classe de base définit le type comme « {type} »", + "paramAssignment": "Paramètre {index} : le type « {sourceType} » est incompatible avec le type « {destType} »", + "paramSpecMissingInOverride": "Les paramètres ParamSpec sont manquants dans la méthode de remplacement", + "paramType": "Le type de paramètre est « {paramType} »", + "privateImportFromPyTypedSource": "Importer à partir de « {module} » à la place", + "propertyAccessFromProtocolClass": "Une propriété définie dans une classe de protocole n'est pas accessible en tant que variable de classe", + "propertyMethodIncompatible": "La méthode de property « {name} » n’est pas compatible", + "propertyMethodMissing": "La méthode de property « {name} » est manquante dans le remplacement", + "propertyMissingDeleter": "La property « {name} » n’a pas de deleter défini", + "propertyMissingSetter": "La property « {name} » n’a pas de méthode setter définie", + "protocolIncompatible": "\"{sourceType}\" est incompatible avec le protocole \"{destType}\"", + "protocolMemberMissing": "« {name} » n’est pas présent", + "protocolRequiresRuntimeCheckable": "La classe de Protocol doit être @runtime_checkable à utiliser avec des vérifications d’instance et de classe", + "protocolSourceIsNotConcrete": "\"{sourceType}\" n'est pas un type de classe concret et ne peut pas être affecté au type \"{destType}\"", + "protocolUnsafeOverlap": "Les attributs de « {name} » ont les mêmes noms que le protocole", + "pyrightCommentIgnoreTip": "Utilisez « # pyright: ignore[] » pour supprimer les diagnostics pour une seule ligne", + "readOnlyAttribute": "L’attribut « {name} » est en lecture seule", + "seeClassDeclaration": "Voir la déclaration de classe", + "seeDeclaration": "Voir la déclaration", + "seeFunctionDeclaration": "Voir la déclaration de fonction", + "seeMethodDeclaration": "Voir la déclaration de méthode", + "seeParameterDeclaration": "Voir la déclaration des paramètres", + "seeTypeAliasDeclaration": "Voir la déclaration d’alias de type", + "seeVariableDeclaration": "Voir déclaration de variable", + "tupleAssignmentMismatch": "Le type \"{type}\" est incompatible avec le tuple cible", + "tupleEntryTypeMismatch": "Le type de l’entrée de tuple {entry} est incorrect", + "tupleSizeIndeterminateSrc": "Incompatibilité de taille de tuple ; attendu {expected} mais reçu pour une durée indéterminée", + "tupleSizeIndeterminateSrcDest": "Incompatibilité de taille de tuple : attente de {expected} ou plus, mais réception indéterminée", + "tupleSizeMismatch": "Incompatibilité de taille de tuple ; attendu {expected} mais reçu {received}", + "tupleSizeMismatchIndeterminateDest": "Incompatibilité de taille de tuple : attente de {expected} ou plus, mais réception de {received}", + "typeAliasInstanceCheck": "L’alias de type créé avec l’instruction « type » ne peut pas être utilisé avec des vérifications d’instance et de classe", + "typeAssignmentMismatch": "Le type « {sourceType} » n’est pas assignable au type « {destType} »", + "typeBound": "Le type « {sourceType} » n’est pas assignable à la limite supérieure « {destType} » pour la variable de type « {name} »", + "typeConstrainedTypeVar": "Le type « {type} » n’est pas assignable à la variable de type contrainte « {name} »", + "typeIncompatible": "« {sourceType} » n’est pas assignable à « {destType} »", + "typeNotClass": "« {type} » n’est pas une classe", + "typeNotStringLiteral": "\"{type}\" n'est pas un littéral de chaîne", + "typeOfSymbol": "Le type de \"{name}\" est \"{type}\"", + "typeParamSpec": "Le type « {type} » n’est pas compatible avec ParamSpec « {name} »", + "typeUnsupported": "Le type « {type} » n’est pas pris en charge", + "typeVarDefaultOutOfScope": "La variable de type « {name} » n’est pas dans l’étendue", + "typeVarIsContravariant": "Le paramètre de type \"{name}\" est contravariant, mais \"{sourceType}\" n'est pas un supertype de \"{destType}\"", + "typeVarIsCovariant": "Le paramètre de type \"{name}\" est covariant, mais \"{sourceType}\" n'est pas un sous-type de \"{destType}\"", + "typeVarIsInvariant": "Le paramètre de type \"{name}\" est invariant, mais \"{sourceType}\" n'est pas le même que \"{destType}\"", + "typeVarNotAllowed": "TypeVar non autorisé pour les vérifications d’instance ou de classe", + "typeVarTupleRequiresKnownLength": "TypeVarTuple ne peut pas être lié à un tuple de longueur inconnue", + "typeVarUnnecessarySuggestion": "Utilisez plutôt {type}", + "typeVarUnsolvableRemedy": "Fournir une surcharge qui spécifie le type de retour lorsque l’argument n’est pas fourni", + "typeVarsMissing": "Variables de type manquantes : {names}", + "typedDictBaseClass": "La classe \"{type}\" n'est pas un TypedDict", + "typedDictClassNotAllowed": "Classe TypedDict non autorisée pour les vérifications d’instance ou de classe", + "typedDictClosedExtraNotAllowed": "Impossible d’ajouter l’élément « {name} »", + "typedDictClosedExtraTypeMismatch": "Impossible d’ajouter l’élément « {name} » avec le type « {type} »", + "typedDictClosedFieldNotReadOnly": "Impossible d’ajouter l’élément « {name} », car il doit être ReadOnly", + "typedDictClosedFieldNotRequired": "Impossible d’ajouter l’élément « {name} », car il doit être NotRequired", + "typedDictExtraFieldNotAllowed": "« {name} » n’est pas présent dans « {type} »", + "typedDictExtraFieldTypeMismatch": "Le type de « {name} » est incompatible avec le type « extra_items » dans « {type} »", + "typedDictFieldMissing": "« {name} » est manquant dans « {type} »", + "typedDictFieldNotReadOnly": "« {name} » n’est pas en lecture seule dans « {type} »", + "typedDictFieldNotRequired": "« {name} » n’est pas obligatoire dans « {type} »", + "typedDictFieldRequired": "« {name} » est obligatoire dans « {type} »", + "typedDictFieldTypeMismatch": "Le type « {type} » n'est pas attribuable à l’élément « {name} »", + "typedDictFieldUndefined": "« {name} » est un élément non défini dans le type « {type} »", + "typedDictKeyAccess": "Utilisez [« {name} »] pour référencer l’élément dans TypedDict", + "typedDictNotAllowed": "TypedDict ne peut pas être utilisé pour les vérifications d’instance ou de classe", + "unhashableType": "Le type \"{type}\" n'est pas hachable", + "uninitializedAbstractVariable": "La variable d’instance « {name} » est définie dans la classe de base abstraite « {classType} » mais n’est pas initialisée", + "unreachableExcept": "« {exceptionType} » est une sous-classe de « {parentType} »", + "useDictInstead": "Utilisez dict[T1, T2] pour indiquer un type de dictionnaire", + "useListInstead": "Utiliser list[T] pour indiquer un type de liste ou T1 | T2 pour indiquer un type d’union", + "useTupleInstead": "Utiliser tuple[T1, ..., Tn] pour indiquer un type de tuple ou T1 | T2 pour indiquer un type d’union", + "useTypeInstead": "Utiliser type[T] à la place", + "varianceMismatchForClass": "La variance de l'argument de type \"{typeVarName}\" est incompatible avec la classe de base \"{className}\"", + "varianceMismatchForTypeAlias": "La variance de l'argument de type \"{typeVarName}\" est incompatible avec \"{typeAliasParam}\"" + }, + "Service": { + "longOperation": "L’énumération des fichiers sources de l’espace de travail prend beaucoup de temps. Envisagez plutôt d’ouvrir un sous-dossier. [En savoir plus](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.it.json b/python-parser/packages/pyright-internal/src/localization/package.nls.it.json new file mode 100644 index 00000000..03036d60 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.it.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Crea Stub di tipo", + "createTypeStubFor": "Crea Stub di tipo per \"{moduleName}\"", + "executingCommand": "Esecuzione del comando", + "filesToAnalyzeCount": "{count} file da analizzare", + "filesToAnalyzeOne": "1 file da analizzare", + "findingReferences": "Ricerca dei riferimenti in corso", + "organizeImports": "Organizza importazioni" + }, + "Completion": { + "autoImportDetail": "Importazione automatica", + "indexValueDetail": "Valore dell’indice" + }, + "Diagnostic": { + "abstractMethodInvocation": "Impossibile chiamare il metodo \"{method}\" perché è astratto e non implementato", + "annotatedMetadataInconsistent": "Il tipo di metadati annotati \"{metadataType}\" non è compatibile con il tipo \"{type}\"", + "annotatedParamCountMismatch": "Numero di annotazioni dei parametro non corrispondente: previsto {expected} ma ricevuto {received}", + "annotatedTypeArgMissing": "Previsto un argomento di tipo e una o più annotazioni per \"Annotated\"", + "annotationBytesString": "Le espressioni di tipo non possono usare valori letterali stringa byte", + "annotationFormatString": "Le espressioni di tipo non possono usare valori letterali stringa di formato (stringhe f)", + "annotationNotSupported": "Annotazione di tipo non supportata per questa istruzione", + "annotationRawString": "Le espressioni di tipo non possono usare valori letterali stringa non elaborata", + "annotationSpansStrings": "Le espressioni di tipo non possono estendersi su più valori letterali stringa", + "annotationStringEscape": "Le espressioni di tipo non possono contenere caratteri di escape", + "annotationTemplateString": "Le espressioni di tipo non possono usare valori letterali stringa modello (stringhe t)", + "argAssignment": "Non è possibile assegnare l'argomento di tipo \"{argType}\" al parametro di tipo \"{paramType}\"", + "argAssignmentFunction": "Non è possibile assegnare l'argomento di tipo \"{argType}\" al parametro di tipo \"{paramType}\" nella funzione \"{functionName}\"", + "argAssignmentParam": "Non è possibile assegnare l'argomento di tipo \"{argType}\" al parametro \"{paramName}\" di tipo \"{paramType}\"", + "argAssignmentParamFunction": "Non è possibile assegnare l'argomento di tipo \"{argType}\" al parametro \"{paramName}\" di tipo \"{paramType}\" nella funzione \"{functionName}\"", + "argMissingForParam": "Manca l'argomento per il parametro {name}", + "argMissingForParams": "Argomenti mancanti per i parametri {names}", + "argMorePositionalExpectedCount": "Sono previsti {expected} altri argomenti posizionali", + "argMorePositionalExpectedOne": "Previsto 1 altro argomento posizionale", + "argPositional": "Argomento posizionale previsto", + "argPositionalExpectedCount": "Sono previsti {expected} argomenti posizionali", + "argPositionalExpectedOne": "Previsto 1 argomento posizionale", + "argTypePartiallyUnknown": "Tipo di argomento parzialmente sconosciuto", + "argTypeUnknown": "Il tipo di argomento è sconosciuto", + "assertAlwaysTrue": "L'espressione assert restituisce sempre true", + "assertTypeArgs": "\"assert_type\" prevede due argomenti posizionali", + "assertTypeTypeMismatch": "\"assert_type\" non corrispondente: previsto \"{expected}\" ma ricevuto \"{received}\"", + "assignmentExprComprehension": "La destinazione dell'espressione di assegnazione \"{name}\" non può usare lo stesso nome della comprensione per la destinazione", + "assignmentExprContext": "L'espressione di assegnazione deve essere all'interno di modulo, funzione o lambda", + "assignmentExprInSubscript": "Le espressioni di assegnazione all'interno di un pedice sono supportate solo in Python 3.10 e versioni successive", + "assignmentInProtocol": "Le variabili di istanza o di classe all'interno di una classe Protocollo devono essere dichiarate esplicitamente nel corpo della classe", + "assignmentTargetExpr": "L'espressione non può essere una destinazione di assegnazione", + "asyncNotInAsyncFunction": "L'uso di \"async\" non è consentito al di fuori della funzione async", + "awaitIllegal": "L'uso di \"await\" richiede Python 3.5 o versione successiva", + "awaitNotAllowed": "Le espressioni di tipo non possono usare \"await\"", + "awaitNotInAsync": "\"await\" consentito solo all'interno della funzione async", + "backticksIllegal": "Le espressioni racchiuse tra backticks non sono supportate in Python 3.x; usare repr invece", + "baseClassCircular": "La classe non può derivare da se stessa", + "baseClassFinal": "La classe di base \"{type}\" è contrassegnata come finale e non può essere sottoclassata", + "baseClassIncompatible": "Le classi di base di {type} sono incompatibili tra di loro", + "baseClassInvalid": "L'argomento della classe deve essere una classe base", + "baseClassMethodTypeIncompatible": "Le classi di base per la classe \"{classType}\" definiscono il metodo \"{name}\" in modo incompatibile", + "baseClassUnknown": "Il tipo della classe di base è sconosciuto. È in corso il tentativo di determinare il tipo della classe derivata", + "baseClassVariableTypeIncompatible": "Le classi di base per la classe \"{classType}\" definiscono la variabile \"{name}\" in modo incompatibile", + "binaryOperationNotAllowed": "Operatore binario non consentito nell'espressione di tipo", + "bindParamMissing": "Impossibile associare il metodo \"{methodName}\" dal momento che il parametro \"self\" o \"cls\" risulta mancante", + "bindTypeMismatch": "Non è stato possibile associare il metodo \"{methodName}\" perché non è possibile assegnare\"{type}\" al parametro \"{paramName}\"", + "breakInExceptionGroup": "\"break\" non consentito in un blocco \"except*\"", + "breakOutsideLoop": "\"break\" può essere usato solo all'interno di un ciclo", + "bytesUnsupportedEscape": "Sequenza di escape non supportata nel valore letterale bytes", + "callableExtraArgs": "Sono previsti solo due argomenti di tipo per \"Callable\"", + "callableFirstArg": "Previsto elenco dei tipi di parametro o \"...\"", + "callableNotInstantiable": "Non è possibile creare un'istanza del tipo \"{type}\"", + "callableSecondArg": "È previsto un tipo restituito come secondo argomento di tipo per \"Callable\"", + "casePatternIsIrrefutable": "Il criterio inconfutabile è consentito solo per l'ultima istruzione case", + "classAlreadySpecialized": "Il tipo \"{type}\" è già specializzato", + "classDecoratorTypeUnknown": "L'elemento Decorator di classe non tipizzato nasconde il tipo di classe. l'elemento Decorator verrà ignorato", + "classDefinitionCycle": "La definizione della classe per \"{name}\" dipende da se stessa", + "classGetItemClsParam": "__class_getitem__ override deve accettare un parametro \"cls\"", + "classMethodClsParam": "I metodi di classe devono accettare un parametro \"cls\"", + "classNotRuntimeSubscriptable": "Il pedice per la classe \"{name}\" genererà un'eccezione di runtime; racchiudere l'espressione di tipo tra virgolette", + "classPatternBuiltInArgPositional": "Il modello di classe accetta solo un sotto pattern posizionale", + "classPatternNewType": "Non è possibile usare \"{type}\" in un modello di classe perché è definito tramite NewType", + "classPatternPositionalArgCount": "Troppi modelli posizionale per la classe \"{type}\"; previsto {expected} ma ottenuto {received}", + "classPatternTypeAlias": "\"{type}\" non può essere usato in uno schema di classe, perché è un alias di tipo specializzato", + "classPropertyDeprecated": "Le proprietà della classe sono deprecate in Python 3.11 e non saranno supportate in Python 3.13", + "classTypeParametersIllegal": "La sintassi del parametro del tipo di classe richiede Python 3.12 o versione successiva", + "classVarFirstArgMissing": "È previsto un argomento tipo dopo \"ClassVar\"", + "classVarNotAllowed": "\"ClassVar\" non consentito in questo contesto", + "classVarOverridesInstanceVar": "La variabile di classe \"{name}\" esegue l'override della variabile di istanza con lo stesso nome nella classe \"{className}\"", + "classVarTooManyArgs": "Previsto un solo argomento tipo dopo \"ClassVar\"", + "classVarWithTypeVar": "Il tipo \"ClassVar\" non può includere variabili di tipo", + "clsSelfParamTypeMismatch": "Il tipo di parametro \"{name}\" deve essere un supertipo della relativa classe \"{classType}\"", + "codeTooComplexToAnalyze": "Il codice è troppo complesso per l'analisi. ridurre la complessità eseguendo il refactoring in subroutine o riducendo i percorsi del codice condizionale", + "collectionAliasInstantiation": "Non è possibile creare un'istanza del tipo \"{type}\". In alternativa, usare \"{alias}\"", + "comparisonAlwaysFalse": "La condizione restituisce sempre False perché i tipi \"{leftType}\" e \"{rightType}\" non si sovrappongono", + "comparisonAlwaysTrue": "La condizione restituisce sempre True perché i tipi \"{leftType}\" e \"{rightType}\" non si sovrappongono", + "comprehensionInDict": "Non è possibile usare la comprensione con altre voci del dizionario", + "comprehensionInSet": "Non è possibile usare la comprensione con altre voci del set", + "concatenateContext": "“Concatenate” non è consentito in questo contesto", + "concatenateParamSpecMissing": "L'ultimo argomento di tipo per \"Concatenate\" deve essere un ParamSpec o \"...\"", + "concatenateTypeArgsMissing": "\"Concatenate\" richiede almeno due argomenti di tipo", + "conditionalOperandInvalid": "Operando condizionale non valido di tipo \"{type}\"", + "constantRedefinition": "\"{name}\" è costante (perché è in maiuscolo) e non può essere ridefinita", + "constructorParametersMismatch": "Mancata corrispondenza tra firma di __new__ e __init__ nella classe \"{classType}\"", + "containmentAlwaysFalse": "L'espressione restituisce sempre False perché i tipi \"{leftType}\" e \"{rightType}\" non si sovrappongono", + "containmentAlwaysTrue": "L'espressione restituisce sempre True perché i tipi \"{leftType}\" e \"{rightType}\" non si sovrappongono", + "continueInExceptionGroup": "\"continue\" non consentito in un blocco \"except*\"", + "continueOutsideLoop": "\"continue\" può essere usato solo all'interno di un ciclo", + "coroutineInConditionalExpression": "L'espressione condizionale fa riferimento a una coroutine che restituisce sempre True", + "dataClassBaseClassFrozen": "Una classe non bloccata non può ereditare da una classe bloccata", + "dataClassBaseClassNotFrozen": "Una classe bloccata non può ereditare da una classe non bloccata", + "dataClassConverterFunction": "L'argomento di tipo \"{argType}\" non è un convertitore valido per il campo \"{fieldName}\" di tipo \"{fieldType}\"", + "dataClassConverterOverloads": "Nessun overload di \"{funcName}\" è un convertitore valido per il campo \"{fieldName}\" di tipo \"{fieldType}\"", + "dataClassFieldInheritedDefault": "\"{fieldName}\" esegue l'override di un campo con lo stesso nome, ma manca un valore predefinito", + "dataClassFieldWithDefault": "I campi senza valori predefiniti non possono essere visualizzati dopo i campi con valori predefiniti", + "dataClassFieldWithPrivateName": "Il campo dataclass non può usare un nome privato", + "dataClassFieldWithoutAnnotation": "Il campo dataclass senza annotazione del tipo causerà un'eccezione di runtime", + "dataClassPostInitParamCount": "Dataclass __post_init__ conteggio dei parametri non corretto; il numero di campi InitVar è {expected}", + "dataClassPostInitType": "Dataclass __post_init__ tipo di parametro del metodo non corrispondente per il campo \"{fieldName}\"", + "dataClassSlotsOverwrite": "__slots__ è già definito nella classe", + "dataClassTransformExpectedBoolLiteral": "Espressione prevista che restituisce in modo statico True o False", + "dataClassTransformFieldSpecifier": "È prevista una tuple di classi o funzioni ma è stato ricevuto il tipo \"{type}\"", + "dataClassTransformPositionalParam": "Tutti gli argomenti di \"dataclass_transform\" devono essere argomenti di parole chiave", + "dataClassTransformUnknownArgument": "Argomento \"{name}\" non supportato da dataclass_transform", + "dataProtocolInSubclassCheck": "I protocolli dati (che includono attributi non di metodo) non sono consentiti nelle chiamate issubclass", + "declaredReturnTypePartiallyUnknown": "Il tipo restituito dichiarato \"{returnType}\" è parzialmente sconosciuto", + "declaredReturnTypeUnknown": "Il tipo restituito dichiarato è sconosciuto", + "defaultValueContainsCall": "Chiamate di funzione e oggetti modificabili non consentiti all'interno dell'espressione del valore predefinito del parametro", + "defaultValueNotAllowed": "Il parametro con \"*\" o \"**\" non può avere un valore predefinito", + "delTargetExpr": "Non è possibile eliminare l'espressione", + "deprecatedClass": "La classe \"{name}\" è deprecata", + "deprecatedConstructor": "Il costruttore per la classe \"{name}\" è deprecato", + "deprecatedDescriptorDeleter": "Il metodo \"__delete__\" per il descrittore \"{name}\" è deprecato", + "deprecatedDescriptorGetter": "Il metodo \"__get__\" per il descrittore \"{name}\" è deprecato", + "deprecatedDescriptorSetter": "Il metodo \"__set__\" per il descrittore \"{name}\" è deprecato", + "deprecatedFunction": "La funzione \"{name}\" è deprecata", + "deprecatedMethod": "Il metodo \"{name}\" nella classe \"{className}\" è deprecato", + "deprecatedPropertyDeleter": "Il deleter per la property \"{name}\" è deprecato", + "deprecatedPropertyGetter": "Il getter per la property \"{name}\" è deprecato", + "deprecatedPropertySetter": "Il setter per la property \"{name}\" è deprecato", + "deprecatedType": "Questo tipo è deprecato a partire da Python {version}; usa \"{replacement}\"", + "dictExpandIllegalInComprehension": "Espansione del dizionario non consentita nella comprensione", + "dictInAnnotation": "Espressione dizionario non consentita nell'espressione di tipo", + "dictKeyValuePairs": "Le voci del dizionario devono contenere coppie chiave-valore", + "dictUnpackIsNotMapping": "Mapping previsto per l'operatore di decompressione del dizionario", + "dunderAllSymbolNotPresent": "\"{name}\" è specificato in __all__ ma non è presente nel modulo", + "duplicateArgsParam": "È consentito un solo parametro \"*\"", + "duplicateBaseClass": "Classe di base duplicata non consentita", + "duplicateCapturePatternTarget": "La destinazione di acquisizione \"{name}\" non può comparire più di una volta all'interno dello stesso schema", + "duplicateCatchAll": "È consentita una sola clausola catch-all except", + "duplicateEnumMember": "Il membro di Enum \"{name}\" è già dichiarato", + "duplicateGenericAndProtocolBase": "È consentita una sola classe di base Generic[...] o Protocol[...]", + "duplicateImport": "\"{importName}\" è stato importato più di una volta", + "duplicateKeywordOnly": "È consentito un solo separatore \"*\"", + "duplicateKwargsParam": "È consentito un solo parametro \"**\"", + "duplicateParam": "“{name}\" parametro duplicato", + "duplicatePositionOnly": "È consentito un solo parametro \"/\"", + "duplicateStarPattern": "In una sequenza di criteri è consentito un solo criterio \"*\"", + "duplicateStarStarPattern": "È consentita una sola voce \"**\"", + "duplicateUnpack": "Nell list è consentita una sola operazione di decompressione", + "ellipsisAfterUnpacked": "\"...\" non può essere usato con un argomento TypeVarTuple non compresso o tuple", + "ellipsisContext": "\"...\" non è consentito in questo contesto", + "ellipsisSecondArg": "\"...\" è consentito solo come secondo di due argomenti", + "enumClassOverride": "La classe di Enum \"{name}\" è finale e non può essere sottoclassificata", + "enumMemberDelete": "Non è possibile eliminare il membro di Enum \"{name}\"", + "enumMemberSet": "Non è possibile assegnare il membro di Enum \"{name}\"", + "enumMemberTypeAnnotation": "Le annotazioni di tipo non sono consentite per i membri di enumerazione", + "exceptGroupMismatch": "L'istruzione Try non può includere sia \"except\" che \"except*\"", + "exceptGroupRequiresType": "La sintassi del gruppo di eccezioni (\"except*\") richiede un tipo di eccezione", + "exceptRequiresParens": "Più tipi di eccezione devono essere racchiusi tra parentesi prima di Python 3.14", + "exceptWithAsRequiresParens": "Più tipi di eccezione devono essere racchiusi tra parentesi quando si usa \"as\"", + "exceptionGroupIncompatible": "La sintassi del gruppo di eccezioni (\"except*\") richiede Python 3.11 o versione successiva", + "exceptionGroupTypeIncorrect": "Il tipo di eccezione in except* non può derivare da BaseGroupException", + "exceptionTypeIncorrect": "\"{type}\" non deriva da BaseException", + "exceptionTypeNotClass": "\"{type}\" non è una classe di eccezione valida", + "exceptionTypeNotInstantiable": "Il costruttore per il tipo di eccezione \"{type}\" richiede uno o più argomenti", + "expectedAfterDecorator": "Dichiarazione di funzione o classe prevista dopo l’elemento Decorator", + "expectedArrow": "Previsto \"->\" seguito da un'annotazione di tipo restituito", + "expectedAsAfterException": "Previsto \"as\" dopo il tipo di eccezione", + "expectedAssignRightHandExpr": "Espressione prevista a destra di \"=\"", + "expectedBinaryRightHandExpr": "Espressione prevista a destra dell'operatore", + "expectedBoolLiteral": "È previsto True o False", + "expectedCase": "Prevista istruzione \"case\"", + "expectedClassName": "Nome classe previsto", + "expectedCloseBrace": "\"{\" non è stato chiuso", + "expectedCloseBracket": "\"[\" non è stato chiuso", + "expectedCloseParen": "\"(\" non è stato chiuso", + "expectedColon": "Previsto \":\"", + "expectedComplexNumberLiteral": "È previsto un valore letterale di numero complesso per i criteri di ricerca", + "expectedDecoratorExpr": "Il modulo dell'espressione non è supportato per l'elemento Decorator precedente a Python 3.9", + "expectedDecoratorName": "È previsto un nome di elemento Decorator", + "expectedDecoratorNewline": "Prevista nuova riga alla fine dell'elemento Decorator", + "expectedDelExpr": "Espressione prevista dopo \"del\"", + "expectedElse": "Previsto \"else\"", + "expectedEquals": "Previsto \"=\"", + "expectedExceptionClass": "Classe od oggetto di eccezione non valido", + "expectedExceptionObj": "Previsto oggetto eccezione, classe eccezione o None", + "expectedExpr": "Espressione prevista", + "expectedFunctionAfterAsync": "Prevista definizione di funzione dopo \"async\"", + "expectedFunctionName": "È previsto un nome di funzione dopo \"def\"", + "expectedIdentifier": "Identificatore previsto", + "expectedImport": "Previsto \"import\"", + "expectedImportAlias": "Simbolo previsto dopo \"as\"", + "expectedImportSymbols": "Sono previsti uno o più nomi di simboli dopo “import”", + "expectedIn": "previsto 'in'", + "expectedInExpr": "Espressione prevista dopo \"in\"", + "expectedIndentedBlock": "Previsto un blocco rientrato", + "expectedMemberName": "Nome dell'attributo previsto dopo \".\"", + "expectedModuleName": "Nome del modulo previsto", + "expectedNameAfterAs": "È previsto il nome del simbolo dopo \"as\"", + "expectedNamedParameter": "Il parametro della parola chiave deve seguire \"*\"", + "expectedNewline": "Prevista nuova riga", + "expectedNewlineOrSemicolon": "Le istruzioni devono essere separate da nuove righe o punti e virgola", + "expectedOpenParen": "Previsto \"(\"", + "expectedParamName": "Nome del parametro previsto", + "expectedPatternExpr": "Espressione del criterio prevista", + "expectedPatternSubjectExpr": "Prevista espressione del soggetto del criterio", + "expectedPatternValue": "Espressione del valore del criterio prevista nel formato \"a.b\"", + "expectedReturnExpr": "Espressione prevista dopo \"return\"", + "expectedSliceIndex": "Prevista espressione di indice o sezione", + "expectedTypeNotString": "È previsto un tipo ma è stato ricevuto un valore letterale stringa", + "expectedTypeParameterName": "Nome del parametro del tipo previsto", + "expectedYieldExpr": "Espressione prevista nell'istruzione yield", + "finalClassIsAbstract": "La classe \"{type}\" è contrassegnata come finale e deve implementare tutti i simboli astratti", + "finalContext": "\"Finale\" non è consentito in questo contesto", + "finalInLoop": "Non è possibile assegnare una variabile \"Final\" all'interno di un ciclo", + "finalMethodOverride": "Il metodo \"{name}\" non può eseguire l'override del metodo finale definito nella classe \"{className}\"", + "finalNonMethod": "Impossibile contrassegnare la funzione “{name}” @final perché non è un metodo", + "finalReassigned": "\"{name}\" è dichiarato come Finale e non può essere riassegnato", + "finalRedeclaration": "\"{name}\" è stato dichiarato in precedenza come Finale", + "finalRedeclarationBySubclass": "Non è possibile ridichiarare \"{name}\" perché la classe padre \"{className}\" la dichiara come Finale", + "finalTooManyArgs": "Previsto un singolo argomento tipo dopo \"Final\"", + "finalUnassigned": "\"{name}\" è dichiarato Final, ma il valore non è assegnato", + "finallyBreak": "Non è possibile utilizzare \"break\" per uscire da un blocco \"finally\"", + "finallyContinue": "Non è possibile utilizzare \"continue\" per uscire da un blocco \"finally\"", + "finallyReturn": "Non è possibile utilizzare \"return\" per uscire da un blocco \"finally\"", + "formatStringBrace": "Parentesi graffa di chiusura singola non consentita all'interno del valore letterale f-string; usa parentesi graffa chiusa doppia", + "formatStringBytes": "I valori letterali stringa di formato (f-string) non possono essere binari", + "formatStringDebuggingIllegal": "L’identificatore di debug delle stringhe F \"=\" richiede Python 3.8 o versione successiva", + "formatStringEscape": "Sequenza di escape (barra rovesciata) non consentita nella porzione di espressione di f-string prima di Python 3.12", + "formatStringExpectedConversion": "È previsto un identificatore di conversione dopo \"!\" in f-string", + "formatStringIllegal": "I valori letterali stringa di formato (f-string) richiedono Python 3.6 o versione successiva", + "formatStringInPattern": "Stringa di formato non consentita nel criterio", + "formatStringNestedFormatSpecifier": "Espressioni annidate troppo in profondità nell'identificatore di stringa di formato", + "formatStringNestedQuote": "Le stringhe annidate all'interno di una stringa f non possono usare lo stesso carattere virgolette della stringa f prima di Python 3.12", + "formatStringTemplate": "I valori letterali stringa di formato (f-strings) non possono essere anche stringhe modello (stringhe t)", + "formatStringUnicode": "I valori letterali stringa di formato (f-string) non possono essere unicode", + "formatStringUnterminated": "Espressione senza terminazione in f-string; previsto \"}\".", + "functionDecoratorTypeUnknown": "L'elemento Decorator della funzione non tipizzato nasconde il tipo di funzione; l'elemento Decorator verrà ignorato", + "functionInConditionalExpression": "L'espressione condizionale fa riferimento a una funzione che restituisce sempre True", + "functionTypeParametersIllegal": "La sintassi del parametro del tipo di funzione richiede Python 3.12 o versione successiva", + "futureImportLocationNotAllowed": "Le importazioni da __future__ devono trovarsi all'inizio del file", + "generatorAsyncReturnType": "Il tipo restituito della funzione del generatore async deve essere compatibile con \"AsyncGenerator[{yieldType}, Any]\"", + "generatorNotParenthesized": "Le espressioni del generatore devono essere racchiuse tra parentesi se non è l'unico argomento", + "generatorSyncReturnType": "Il tipo restituito della funzione del generatore deve essere compatibile con \"Generator[{yieldType}, Any, Any]\"", + "genericBaseClassNotAllowed": "Non è possibile usare la classe di base \"Generic\" con la sintassi del parametro di tipo", + "genericClassAssigned": "Non è possibile assegnare il tipo di classe generico", + "genericClassDeleted": "Non è possibile eliminare il tipo di classe generico", + "genericInstanceVariableAccess": "L'accesso alla variabile di istanza generica tramite la classe è ambiguo", + "genericNotAllowed": "\"Generic\" non valido in questo contesto", + "genericTypeAliasBoundTypeVar": "L'alias di tipo generico all'interno della classe non può usare variabili di tipo associate {names}", + "genericTypeArgMissing": "\"Generico\" richiede almeno un argomento di tipo", + "genericTypeArgTypeVar": "L'argomento di tipo per \"Generic\" deve essere una variabile di tipo", + "genericTypeArgUnique": "Gli argomenti di tipo per \"Generic\" devono essere univoci", + "globalReassignment": "\"{name}\" è assegnato prima della dichiarazione globale", + "globalRedefinition": "\"{name}\" è già stato dichiarato globale", + "implicitStringConcat": "Concatenazione implicita di stringhe non consentita", + "importCycleDetected": "Ciclo rilevato nella catena di importazione", + "importDepthExceeded": "La profondità della catena di importazione ha superato {depth}", + "importResolveFailure": "Non è stato possibile risolvere l'importazione \"{importName}\"", + "importSourceResolveFailure": "Non è stato possibile risolvere l'importazione \"{importName}\" dall’origine", + "importSymbolUnknown": "\"{name}\" è un simbolo di importazione sconosciuto", + "incompatibleMethodOverride": "Il metodo \"{name}\" esegue l'override della classe \"{className}\" in modo incompatibile", + "inconsistentIndent": "Il valore dell'annullamento del rientro non corrisponde al rientro precedente", + "inconsistentTabs": "Uso incoerente di tabulazioni e spazi nel rientro", + "initMethodSelfParamTypeVar": "L'annotazione di tipo per il parametro \"self\" del metodo \"__init__\" non può contenere variabili di tipo con ambito classe", + "initMustReturnNone": "Il tipo restituito di \"__init__\" deve essere None", + "initSubclassCallFailed": "Argomenti di parola chiave non corretti per il metodo __init_subclass__", + "initSubclassClsParam": "__init_subclass__ override deve accettare un parametro \"cls\"", + "initVarNotAllowed": "\"InitVar\" non consentito in questo contesto", + "instanceMethodSelfParam": "I metodi di istanza devono accettare un parametro \"self\"", + "instanceVarOverridesClassVar": "La variabile di istanza \"{name}\" esegue l'override della variabile di classe con lo stesso nome nella classe \"{className}\"", + "instantiateAbstract": "Non è possibile creare un'istanza di classe astratta \"{type}\"", + "instantiateProtocol": "Non è possibile creare un'istanza della classe Protocol \"{type}\"", + "internalBindError": "Errore interno durante l'associazione del file \"{file}\": {message}", + "internalParseError": "Si è verificato un errore interno durante l'analisi del file \"{file}\": {message}", + "internalTypeCheckingError": "Errore interno durante il controllo del tipo del file \"{file}\": {message}", + "invalidIdentifierChar": "Carattere non valido nell'identificatore", + "invalidStubStatement": "L'istruzione non ha significato all'interno di un file stub di tipo", + "invalidTokenChars": "Carattere non valido \"{text}\" nel token", + "isInstanceInvalidType": "Il secondo argomento di \"isinstance\" deve essere una classe o una tuple di classi", + "isSubclassInvalidType": "Il secondo argomento di \"issubclass\" deve essere una classe o una tuple di classi", + "keyValueInSet": "Le coppie chiave-valore non sono consentite all'interno di un set", + "keywordArgInTypeArgument": "Gli argomenti delle parole chiave non possono essere usati negli elenchi di argomenti tipo", + "keywordOnlyAfterArgs": "Separatore di argomenti solo parola chiave non consentito dopo il parametro \"*\"", + "keywordParameterMissing": "Uno o più parametri di parole chiave devono seguire il parametro \"*\"", + "keywordSubscriptIllegal": "Gli argomenti delle parole chiave all'interno di pedici non sono supportati", + "lambdaReturnTypePartiallyUnknown": "Il tipo restituito dell'espressione lambda \"{returnType}\" è parzialmente sconosciuto", + "lambdaReturnTypeUnknown": "Il tipo restituito di lambda è sconosciuto", + "listAssignmentMismatch": "Non è possibile assegnare l'espressione con tipo \"{type}\" all'elenco di destinazione", + "listInAnnotation": "Espressione List non consentita nell'espressione type", + "literalEmptyArgs": "Sono previsti uno o più argomenti di tipo dopo \"Literal\"", + "literalNamedUnicodeEscape": "Le sequenze di escape Unicode denominate non sono supportate nelle annotazioni stringa \"Literal\"", + "literalNotAllowed": "Non è possibile usare \"Literal\" in questo contesto senza un argomento tipo", + "literalNotCallable": "Non è possibile creare un'istanza del tipo Literal", + "literalUnsupportedType": "Gli argomenti di tipo per \"Literal\" devono essere None, un valore letterale (int, bool, str o bytes) o un valore di enumerazione", + "matchIncompatible": "Le istruzioni match richiedono Python 3.10 o versione successiva", + "matchIsNotExhaustive": "I case all'interno dell'istruzione match non gestiscono in modo completo tutti i valori", + "maxParseDepthExceeded": "È stata superata la profondità massima di analisi; suddividere l'espressione in sottoespressioni più piccole", + "memberAccess": "Non è possibile accedere all'attributo \"{name}\" per la classe \"{type}\"", + "memberDelete": "Non è possibile eliminare l'attributo \"{name}\" per la classe \"{type}\"", + "memberSet": "Non è possibile assegnare all'attributo \"{name}\" per la classe \"{type}\"", + "metaclassConflict": "La metaclasse di una classe derivata deve essere una sottoclasse delle metaclassi di tutte le relative classi di base", + "metaclassDuplicate": "È possibile specificare una sola metaclasse", + "metaclassIsGeneric": "La metaclasse non può essere generica", + "methodNotDefined": "Metodo \"{name}\" non definito", + "methodNotDefinedOnType": "\"{name}\" metodo non definito nel tipo \"{type}\"", + "methodOrdering": "Non è possibile creare un ordinamento coerente del metodo", + "methodOverridden": "\"{name}\" esegue l'override del metodo con lo stesso nome nella classe \"{className}\" con un tipo non compatibile \"{type}\".", + "methodReturnsNonObject": "Il metodo \"{name}\" non restituisce un oggetto", + "missingSuperCall": "Il metodo \"{methodName}\" non chiama il metodo con lo stesso nome nella classe padre", + "mixingBytesAndStr": "Bytes e valori str non possono essere concatenati", + "moduleAsType": "Il modulo non può essere usato come tipo", + "moduleNotCallable": "Modulo non chiamabile", + "moduleUnknownMember": "\"{memberName}\" non è un attributo noto del modulo \"{moduleName}\"", + "namedExceptAfterCatchAll": "Una clausola except denominata non può trovarsi dopo la clausola catch-all except", + "namedParamAfterParamSpecArgs": "Il parametro della parola chiave \"{name}\" non può essere visualizzato nella firma dopo il parametro ParamSpec args", + "namedTupleEmptyName": "I nomi all'interno di un tuple denominato non possono essere vuoti", + "namedTupleEntryRedeclared": "Non è possibile eseguire l'override di \"{name}\" perché la classe padre \"{className}\" è un tuple denominato", + "namedTupleFieldUnderscore": "I nomi dei campi di una “Named tuple” non possono iniziare con un carattere di sottolineatura", + "namedTupleFirstArg": "È previsto il nome della classe di tuple denominata come primo argomento", + "namedTupleMultipleInheritance": "L'ereditarietà multipla con NamedTuple non è supportata", + "namedTupleNameKeyword": "I nomi dei campi non possono essere una parola chiave", + "namedTupleNameType": "È prevista una tuple a due voci che specifica il nome e il tipo della voce", + "namedTupleNameUnique": "I nomi all'interno di una tuple denominata devono essere univoci", + "namedTupleNoTypes": "\"namedtuple\" non fornisce tipi per le voci di tuple; usare invece \"NamedTuple\"", + "namedTupleSecondArg": "È previsto un list di voci di tuple denominate come secondo argomento", + "newClsParam": "__new__ override deve accettare un parametro \"cls\"", + "newTypeAnyOrUnknown": "Il secondo argomento di NewType deve essere una classe nota, non Any o Unknown", + "newTypeBadName": "Il primo argomento di NewType deve essere un valore letterale stringa", + "newTypeLiteral": "Non è possibile usare NewType con il tipo Literal", + "newTypeNameMismatch": "NewType deve essere assegnato a una variabile con lo stesso nome", + "newTypeNotAClass": "Classe prevista come secondo argomento di NewType", + "newTypeParamCount": "NewType richiede due argomenti posizionali", + "newTypeProtocolClass": "Non è possibile usare NewType con il tipo strutturale (una classe Protocol o TypedDict)", + "noOverload": "Nessun overload per \"{name}\" corrisponde agli argomenti specificati", + "noReturnContainsReturn": "La funzione con tipo return dichiarato \"NoReturn\" non può includere un'istruzione return", + "noReturnContainsYield": "La funzione con il tipo restituito dichiarato \"NoReturn\" non può includere un'istruzione yield", + "noReturnReturnsNone": "La funzione con tipo restituito dichiarato \"NoReturn\" non può restituire \"None\"", + "nonDefaultAfterDefault": "L'argomento non predefinito segue l'argomento predefinito", + "nonLocalInModule": "Dichiarazione nonlocale non consentita a livello di modulo", + "nonLocalNoBinding": "Non è stata trovata alcuna associazione per \"{name}\" nonlocal", + "nonLocalReassignment": "\"{name}\" viene assegnato prima della dichiarazione nonlocal", + "nonLocalRedefinition": "\"{name}\" è già stato dichiarato nonlocal", + "noneNotCallable": "Non è possibile chiamare l'oggetto di tipo \"None\"", + "noneNotIterable": "Impossibile utilizzare l'oggetto di tipo \"None\" come valore iterabile", + "noneNotSubscriptable": "L'oggetto di tipo \"None\" non è sottoponibile a pedice", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "Impossibile utilizzare l'oggetto di tipo \"None\" con \"async with\"", + "noneOperator": "Operatore \"{operator}\" non supportato per \"None\"", + "noneUnknownMember": "\"{name}\" non è un attributo noto di \"None\"", + "nonlocalTypeParam": "Non è consentita l'associazione nonlocal per il parametro di tipo \"{name}\"", + "notRequiredArgCount": "Previsto un singolo argomento tipo dopo \"NotRequired\"", + "notRequiredNotInTypedDict": "\"NotRequired\" non è consentito in questo contesto", + "objectNotCallable": "L'oggetto di tipo \"{type}\" non è chiamabile", + "obscuredClassDeclaration": "La dichiarazione di classe \"{name}\" è oscurata da una dichiarazione con lo stesso nome", + "obscuredFunctionDeclaration": "La dichiarazione della funzione \"{name}\" è oscurata da una dichiarazione con lo stesso nome", + "obscuredMethodDeclaration": "La dichiarazione del metodo \"{name}\" è oscurata da una dichiarazione con lo stesso nome", + "obscuredParameterDeclaration": "La dichiarazione del parametro \"{name}\" è oscurata da una dichiarazione con lo stesso nome", + "obscuredTypeAliasDeclaration": "La dichiarazione dell'alias di tipo \"{name}\" è nascosta da una dichiarazione con lo stesso nome", + "obscuredVariableDeclaration": "La dichiarazione \"{name}\" è oscurata da una dichiarazione con lo stesso nome", + "operatorLessOrGreaterDeprecated": "L'operatore \"<>\" non è supportato in Python 3. Usare invece \"!=\"", + "optionalExtraArgs": "È previsto un argomento di tipo dopo \"Optional\"", + "orPatternIrrefutable": "Criterio inconfutabile consentito solo come ultimo criterio secondario in un criterio \"or\"", + "orPatternMissingName": "Tutti i criteri secondari all'interno di un criterio \"or\" devono avere come destinazione gli stessi nomi", + "overlappingKeywordArgs": "Il dizionario tipizzato si sovrappone al parametro della parola chiave: {names}", + "overlappingOverload": "L'overload {obscured} per \"{name}\" non verrà mai usato perché i parametri si sovrappongono all'overload {obscuredBy}", + "overloadAbstractImplMismatch": "Gli overload devono corrispondere allo stato astratto dell'implementazione", + "overloadAbstractMismatch": "Gli overload devono essere tutti astratti o no", + "overloadClassMethodInconsistent": "Gli overload per \"{name}\" usano @classmethod in modo incoerente", + "overloadFinalImpl": "@final'elemento Decorator deve essere applicato solo all'implementazione", + "overloadFinalNoImpl": "Solo il primo overload deve essere contrassegnato @final", + "overloadImplementationMismatch": "L'implementazione di overload non è coerente con la firma dell'overload {index}", + "overloadOverrideImpl": "@override'elemento Decorator deve essere applicato solo all'implementazione", + "overloadOverrideNoImpl": "Solo il primo overload deve essere contrassegnato @override", + "overloadReturnTypeMismatch": "L'overload {prevIndex} per \"{name}\" si sovrappone all'overload {newIndex} e restituisce un tipo incompatibile", + "overloadStaticMethodInconsistent": "Gli overload per \"{name}\" usano @staticmethod in modo incoerente", + "overloadWithoutImplementation": "\"{name}\" è contrassegnato come overload, ma non viene fornita alcuna implementazione", + "overriddenMethodNotFound": "Il metodo \"{name}\" è contrassegnato come override, ma non è presente alcun metodo di base con lo stesso nome", + "overrideDecoratorMissing": "Il metodo \"{name}\" non è contrassegnato come override, ma esegue l'override di un metodo nella classe \"{className}\"", + "paramAfterKwargsParam": "Il parametro non può seguire il parametro \"**\"", + "paramAlreadyAssigned": "Il parametro \"{name}\" è già assegnato", + "paramAnnotationMissing": "Annotazione di tipo mancante per il parametro \"{name}\"", + "paramAssignmentMismatch": "Non è possibile assegnare l'espressione di tipo \"{sourceType}\" al parametro di tipo \"{paramType}\"", + "paramNameMissing": "Nessun parametro denominato \"{name}\"", + "paramSpecArgsKwargsDuplicate": "Gli argomenti per ParamSpec \"{type}\" sono già stati specificati", + "paramSpecArgsKwargsUsage": "Gli attributi \"args\" e \"kwargs\" di ParamSpec devono essere entrambi visualizzati all'interno di una firma di funzione", + "paramSpecArgsMissing": "Gli argomenti per ParamSpec \"{type}\" sono mancanti", + "paramSpecArgsUsage": "L'attributo \"args\" di ParamSpec è valido solo se usato con il parametro *args", + "paramSpecAssignedName": "ParamSpec deve essere assegnato a una variabile denominata \"{name}\"", + "paramSpecContext": "ParamSpec non è consentito in questo contesto", + "paramSpecDefaultNotTuple": "Sono previsti puntini di sospensione, un'espressione di tuple o ParamSpec per il valore predefinito di ParamSpec", + "paramSpecFirstArg": "Nome previsto di ParamSpec come primo argomento", + "paramSpecKwargsUsage": "L'attributo \"kwargs\" di ParamSpec è valido solo se usato con il parametro **kwargs", + "paramSpecNotUsedByOuterScope": "ParamSpec \"{name}\" non ha significato in questo contesto", + "paramSpecUnknownArg": "ParamSpec non supporta più di un argomento", + "paramSpecUnknownMember": "\"{name}\" non è un attributo noto di ParamSpec", + "paramSpecUnknownParam": "\"{name}\" è un parametro sconosciuto per ParamSpec", + "paramTypeCovariant": "Non è possibile usare la variabile di tipo covariante nel tipo di parametro", + "paramTypePartiallyUnknown": "Tipo di parametro \"{paramName}\" parzialmente sconosciuto", + "paramTypeUnknown": "Tipo di parametro \"{paramName}\" sconosciuto", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "Il criterio non verrà mai confrontato per il tipo di oggetto \"{type}\"", + "positionArgAfterNamedArg": "L'argomento posizionale non può essere visualizzato dopo gli argomenti della parola chiave", + "positionArgAfterUnpackedDictArg": "L'argomento posizionale non può essere visualizzato dopo la decompressione degli argomenti della parola chiave", + "positionOnlyAfterArgs": "Separatore di parametri di sola posizione non consentito dopo il parametro \"*\"", + "positionOnlyAfterKeywordOnly": "Il parametro \"/\" deve essere visualizzato prima del parametro \"*\"", + "positionOnlyAfterNon": "Il parametro di sola posizione non è consentito dopo un parametro che non è di sola posizione", + "positionOnlyFirstParam": "Separatore di argomenti di sola posizione non consentito come primo parametro", + "positionOnlyIncompatible": "Il separatore di parametri di sola posizione richiede Python 3.8 o versione successiva", + "privateImportFromPyTypedModule": "\"{name}\" non è esportato dal modulo \"{module}\"", + "privateUsedOutsideOfClass": "\"{name}\" è privato e utilizzato all'esterno del modulo in cui è dichiarato", + "privateUsedOutsideOfModule": "\"{name}\" è privato e utilizzato all'esterno del modulo in cui è dichiarato", + "propertyOverridden": "\"{name}\" esegue erroneamente l’override di una property con lo stesso nome nella classe \"{className}\"", + "propertyStaticMethod": "Metodi statici non consentiti per getter, setter o deleter di property", + "protectedUsedOutsideOfClass": "\"{name}\" è protetto e usato al di fuori della classe in cui è dichiarato", + "protocolBaseClass": "La classe Protocol \"{classType}\" non può derivare dalla classe non Protocol \"{baseType}\"", + "protocolBaseClassWithTypeArgs": "Gli argomenti tipo non sono consentiti con la classe Protocollo quando si usa la sintassi dei parametri tipo", + "protocolIllegal": "L'uso di \"Protocol\" richiede Python 3.7 o versione successiva", + "protocolNotAllowed": "\"Protocol\" non può essere usato in questo contesto", + "protocolTypeArgMustBeTypeParam": "L'argomento di tipo per \"Protocol\" deve essere un parametro di tipo", + "protocolUnsafeOverlap": "La classe si sovrappone a \"{name}\" in modo non sicuro e può produrre una corrispondenza in fase di esecuzione", + "protocolVarianceContravariant": "La variabile di tipo \"{variable}\" usata in \"{class}\" Protocol generico deve essere controvariante", + "protocolVarianceCovariant": "La variabile di tipo \"{variable}\" usata in \"{class}\" Protocol generico deve essere covariante", + "protocolVarianceInvariant": "La variabile di tipo \"{variable}\" usata in \"{class}\" Protocol generico deve essere invariante", + "pyrightCommentInvalidDiagnosticBoolValue": "La direttiva di commento Pyright deve essere seguita da \"=\" e da un valore true o false", + "pyrightCommentInvalidDiagnosticSeverityValue": "La direttiva di commento Pyright deve essere seguita da \"=\" e da un valore true, false, error, warning, information o none", + "pyrightCommentMissingDirective": "Il commento Pyright deve essere seguito da una direttiva (basic o strict) o da una regola di diagnostica", + "pyrightCommentNotOnOwnLine": "I commenti Pyright usati per controllare le impostazioni a livello di file devono essere visualizzati nella propria riga", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" è una regola di diagnostica sconosciuta per il commento pyright", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" non è un valore valido per il commento pyright; previsto true, false, error, warning, information o none", + "pyrightCommentUnknownDirective": "\"{directive}\" è una direttiva sconosciuta per il commento pyright; previsto \"strict\", \"standard\" o \"basic\"", + "readOnlyArgCount": "Previsto un singolo argomento tipo dopo \"ReadOnly\"", + "readOnlyNotInTypedDict": "\"ReadOnly\" non consentito in questo contesto", + "recursiveDefinition": "Non è stato possibile determinare il tipo di \"{name}\" perché fa riferimento a se stesso", + "relativeImportNotAllowed": "Le importazioni relative non possono essere usate con il modulo \"import .a\". Usare invece \"from . import a\"", + "requiredArgCount": "È previsto un singolo argomento di tipo dopo \"Required\"", + "requiredNotInTypedDict": "\"Required\" non è consentito in questo contesto", + "returnInAsyncGenerator": "L’istruzione return con valore non è consentita nel generatore async", + "returnInExceptionGroup": "\"return\" non consentito in un blocco \"except*\"", + "returnMissing": "La funzione con tipo restituito dichiarato \"{returnType}\" deve restituire un valore in tutti i percorsi di codice", + "returnOutsideFunction": "\"return\" può essere usata solo all'interno di una funzione.", + "returnTypeContravariant": "Non è possibile usare la variabile di tipo controvariante nel tipo restituito", + "returnTypeMismatch": "Il tipo \"{exprType}\" non è assegnabile al tipo restituito \"{returnType}\"", + "returnTypePartiallyUnknown": "Il tipo restituito \"{returnType}\" è parzialmente sconosciuto", + "returnTypeUnknown": "Il tipo restituito è sconosciuto", + "revealLocalsArgs": "Non è previsto alcun argomento per la chiamata \"reveal_locals\"", + "revealLocalsNone": "Non sono presenti variabili locals in questo ambito", + "revealTypeArgs": "Previsto un singolo argomento posizionale per la chiamata \"reveal_type\"", + "revealTypeExpectedTextArg": "L'argomento \"expected_text\" per la funzione \"reveal_type\" deve essere un valore letterale str", + "revealTypeExpectedTextMismatch": "Testo di tipo non corrispondente; previsto \"{expected}\" ma ricevuto \"{received}\"", + "revealTypeExpectedTypeMismatch": "Tipo non corrispondente; previsto \"{expected}\" ma ricevuto \"{received}\"", + "selfTypeContext": "\"Self\" non è valido in questo contesto", + "selfTypeMetaclass": "Impossibile utilizzare “Self” all'interno di una metaclasse (una sottoclasse di “type”)", + "selfTypeWithTypedSelfOrCls": "Non è possibile usare \"Self\" in una funzione con un parametro 'self' o 'cls' con un'annotazione di tipo diversa da \"Self\"", + "sentinelBadName": "Il primo argomento di Sentinel deve essere un valore letterale stringa", + "sentinelNameMismatch": "Sentinel deve essere assegnato a una variabile con lo stesso nome", + "sentinelParamCount": "Sentinel richiede un argomento posizionale", + "setterGetterTypeMismatch": "Il tipo di valore del setter di Property non è assegnabile al tipo restituito del getter", + "singleOverload": "\"{name}\" è contrassegnato come overload, ma mancano altri overload", + "slotsAttributeError": "\"{name}\" non è specificato in __slots__", + "slotsClassVarConflict": "\"{name}\" è in conflitto con la variabile di istanza dichiarata in __slots__", + "starPatternInAsPattern": "Il modello a stella non può essere usato con la destinazione \"as\"", + "starPatternInOrPattern": "Il modello a stella non può essere ORed all'interno di altri modelli", + "starStarWildcardNotAllowed": "** non può essere usato con il carattere jolly \"_\"", + "staticClsSelfParam": "I metodi statici non devono accettare un parametro \"self\" o \"cls\"", + "stringNonAsciiBytes": "Carattere non ASCII non consentito nel valore letterale stringa dei byte", + "stringNotSubscriptable": "L'espressione stringa non può essere in pedice nell'espressione di tipo. Racchiudere l'intera espressione tra virgolette", + "stringUnsupportedEscape": "Sequenza di escape non supportata nel valore letterale stringa", + "stringUnterminated": "Il valore letterale stringa non è terminato", + "stubFileMissing": "File di stub non trovato per \"{importName}\"", + "stubUsesGetAttr": "Il file dello stub di tipo è incompleto; \"__getattr__\" nasconde gli errori di tipo per il modulo", + "sublistParamsIncompatible": "I parametri di sublist non sono supportati in Python 3.x", + "superCallArgCount": "Non sono previsti più di due argomenti per la chiamata \"super\".", + "superCallFirstArg": "È previsto un tipo di classe come primo argomento della chiamata \"super\", ma è stato ricevuto \"{type}\"", + "superCallSecondArg": "Il secondo argomento della chiamata \"super\" deve essere un oggetto o una classe che deriva da \"{type}\"", + "superCallZeroArgForm": "Il modulo zero-argument della chiamata \"super\" è valido solo all'interno di un metodo", + "superCallZeroArgFormStaticMethod": "Il modulo zero-argument della chiamata \"super\" non è valido all'interno di un metodo statico", + "symbolIsPossiblyUnbound": "\"{name}\" potrebbe non essere associato", + "symbolIsUnbound": "\"{name}\" non associato", + "symbolIsUndefined": "\"{name}\" non è definito", + "symbolOverridden": "\"{name}\" esegue l'override del simbolo con lo stesso nome nella classe \"{className}\"", + "templateStringBytes": "I valori letterali stringa modello (stringhe t) non possono essere binari", + "templateStringIllegal": "I valori letterali stringa modello (stringhe t) richiedono Python 3.14 o versione successiva", + "templateStringUnicode": "I valori letterali stringa modello (stringhe t) non possono essere Unicode", + "ternaryNotAllowed": "Espressione ternaria non consentita nell'espressione di tipo", + "totalOrderingMissingMethod": "La classe deve definire uno dei valori di \"__lt__\", \"__le__\", \"__gt__\" o \"__ge__\" per usare total_ordering", + "trailingCommaInFromImport": "Virgola finale non consentita senza parentesi circostanti", + "tryWithoutExcept": "L'istruzione Try deve contenere almeno una clausola except or finally", + "tupleAssignmentMismatch": "Non è possibile assegnare l'espressione con tipo \"{type}\" al tuple di destinazione", + "tupleInAnnotation": "Espressione di tuple non consentita nell'espressione del tipo", + "tupleIndexOutOfRange": "L'indice {index} non è compreso nell'intervallo per il tipo {type}", + "typeAliasIllegalExpressionForm": "Modulo di espressione non valido per la definizione dell'alias di tipo", + "typeAliasIsRecursiveDirect": "L'alias di tipo \"{name}\" non può usare se stesso nella relativa definizione", + "typeAliasNotInModuleOrClass": "TypeAlias può essere definito solo all'interno di un modulo o di una classe", + "typeAliasRedeclared": "\"{name}\" è dichiarato come TypeAlias e può essere assegnato una sola volta", + "typeAliasStatementBadScope": "Un'istruzione type può essere usata solo all'interno di un modulo o di un ambito della classe", + "typeAliasStatementIllegal": "L'istruzione alias di tipo richiede Python 3.12 o versione successiva", + "typeAliasTypeBadScope": "È possibile definire un alias di tipo solo all'interno di un ambito classe o modulo", + "typeAliasTypeBaseClass": "Impossibile utilizzare come classe di base un alias di tipo definito in un'istruzione \"type\"", + "typeAliasTypeMustBeAssigned": "TypeAliasType deve essere assegnato a una variabile con lo stesso nome dell'alias di tipo", + "typeAliasTypeNameArg": "Il primo argomento di TypeAliasType deve essere un valore letterale stringa che rappresenta il nome dell'alias di tipo", + "typeAliasTypeNameMismatch": "Il nome dell'alias di tipo deve corrispondere al nome della variabile a cui è assegnato", + "typeAliasTypeParamInvalid": "L'elenco dei parametri di tipo deve essere un tuple contenente solo TypeVar, TypeVarTuple o ParamSpec.", + "typeAnnotationCall": "Espressione di chiamata non consentita nell'espressione di tipo", + "typeAnnotationVariable": "Variabile non consentita nell'espressione di tipo", + "typeAnnotationWithCallable": "L'argomento di tipo per \"type\" deve essere una classe. I callable non sono supportati", + "typeArgListExpected": "Sono previsti ParamSpec, puntini di sospensione o elenco di list", + "typeArgListNotAllowed": "Espressione di List non consentita per questo argomento di tipo", + "typeArgsExpectingNone": "Non sono previsti argomenti di tipo per la classe \"{name}\"", + "typeArgsMismatchOne": "Previsto un argomento di tipo, ricevuto {received}", + "typeArgsMissingForAlias": "Sono previsti argomenti di tipo per l'alias di tipo generico \"{name}\"", + "typeArgsMissingForClass": "Argomenti tipo previsti per la classe generica \"{name}\"", + "typeArgsTooFew": "Troppo pochi argomenti tipo forniti per \"{name}\"; previsto {expected} ma ricevuto {received}", + "typeArgsTooMany": "Troppi argomenti tipo forniti per \"{name}\"; previsto {expected} ma ricevuto {received}", + "typeAssignmentMismatch": "Il tipo \"{sourceType}\" non è assegnabile al tipo dichiarato \"{destType}\"", + "typeAssignmentMismatchWildcard": "Il simbolo di importazione \"{name}\" ha il tipo \"{sourceType}\", che non è assegnabile al tipo dichiarato \"{destType}\"", + "typeCallNotAllowed": "la chiamata type() non deve essere usata nell'espressione di tipo", + "typeCheckOnly": "\"{name}\" è contrassegnato come @type_check_only e può essere utilizzato solo nelle annotazioni tipo", + "typeCommentDeprecated": "L'uso dei commenti di type è deprecato. Usare invece l'annotazione type", + "typeExpectedClass": "Classe prevista ma ricevuta \"{type}\"", + "typeFormArgs": "\"TypeForm\" accetta un singolo argomento posizionale", + "typeGuardArgCount": "È previsto un singolo argomento di tipo dopo \"TypeGuard\" o \"TypeIs\"", + "typeGuardParamCount": "Le funzioni e i metodi di protezione dei tipi definiti dall'utente devono avere almeno un parametro di input", + "typeIsReturnType": "Il tipo restituito di TypeIs (\"{returnType}\") non è coerente con il tipo di parametro di valore (\"{type}\")", + "typeNotAwaitable": "\"{type}\" non è awaitable", + "typeNotIntantiable": "Non è possibile creare un'istanza di \"{type}\"", + "typeNotIterable": "\"{type}\" non è iterabile", + "typeNotSpecializable": "Non è stato possibile specializzare il tipo \"{type}\"", + "typeNotSubscriptable": "L'oggetto di tipo \"{type}\" non è sottoponibile a script", + "typeNotSupportBinaryOperator": "L'operatore \"{operator}\" non è supportato per i tipi \"{leftType}\" e \"{rightType}\".", + "typeNotSupportBinaryOperatorBidirectional": "L'operatore \"{operator}\" non è supportato per i tipi \"{leftType}\" e \"{rightType}\" quando il tipo previsto è \"{expectedType}\"", + "typeNotSupportUnaryOperator": "Operatore \"{operator}\" non supportato per il tipo \"{type}\"", + "typeNotSupportUnaryOperatorBidirectional": "L'operatore \"{operator}\" non è supportato per il tipo \"{type}\" quando il tipo previsto è \"{expectedType}\"", + "typeNotUsableWith": "Impossibile utilizzare l'oggetto di tipo \"{type}\" con \"with\" perché non implementa correttamente {method}", + "typeNotUsableWithAsync": "Impossibile utilizzare l'oggetto di tipo \"{type}\" con \"async with\" perché non implementa {method}", + "typeParameterBoundNotAllowed": "Il vincolo o il binding non possono essere usati con un parametro di tipo variadic o ParamSpec", + "typeParameterConstraintTuple": "Il vincolo del parametro di tipo deve essere una tupla di due o più tipi", + "typeParameterExistingTypeParameter": "Il parametro di tipo \"{name}\" è già in uso", + "typeParameterNotDeclared": "Il parametro di tipo \"{name}\" non è incluso nell'elenco dei parametri di tipo per \"{container}\"", + "typeParametersMissing": "È necessario specificare almeno un parametro di tipo", + "typePartiallyUnknown": "Tipo di \"{name}\" parzialmente sconosciuto", + "typeUnknown": "Il tipo di \"{name}\" è sconosciuto", + "typeVarAssignedName": "TypeVar deve essere assegnato a una variabile denominata \"{name}\"", + "typeVarAssignmentMismatch": "Non è possibile assegnare il tipo \"{type}\" alla variabile di tipo \"{name}\"", + "typeVarBoundAndConstrained": "TypeVar non può essere contemporaneamente associato e vincolato", + "typeVarBoundGeneric": "Il tipo associato a TypeVar non può essere generico", + "typeVarConstraintGeneric": "Il tipo di vincolo TypeVar non può essere generico", + "typeVarDefaultBoundMismatch": "Il tipo predefinito TypeVar deve essere un sottotipo del tipo associato", + "typeVarDefaultConstraintMismatch": "Il tipo predefinito TypeVar deve essere uno dei tipi vincolati", + "typeVarDefaultIllegal": "I tipi predefiniti delle variabili di tipo richiedono Python 3.13 o versione successiva", + "typeVarDefaultInvalidTypeVar": "Il parametro di tipo \"{name}\" ha un tipo predefinito che fa riferimento a una o più variabili di tipo non compreso nell'ambito", + "typeVarFirstArg": "Nome previsto di TypeVar come primo argomento", + "typeVarInvalidForMemberVariable": "Il tipo di attributo non può usare la variabile di tipo \"{name}\" con ambito del metodo locale", + "typeVarNoMember": "TypeVar \"{type}\" non ha alcun attributo \"{name}\"", + "typeVarNotSubscriptable": "TypeVar \"{type}\" non sottoponibile a script", + "typeVarNotUsedByOuterScope": "La variabile di tipo \"{name}\" non ha significato in questo contesto", + "typeVarPossiblyUnsolvable": "La variabile di tipo \"{name}\" potrebbe non essere risolta se il chiamante non fornisce alcun argomento per il parametro \"{param}\"", + "typeVarSingleConstraint": "TypeVar deve contenere almeno due tipi vincolati", + "typeVarTupleConstraints": "TypeVarTuple non può avere vincoli di valore", + "typeVarTupleContext": "TypeVarTuple non è consentito in questo contesto", + "typeVarTupleDefaultNotUnpacked": "Il tipo predefinito TypeVarTuple deve essere un tuple non compresso o TypeVarTuple", + "typeVarTupleMustBeUnpacked": "L'operatore Decomprimi è obbligatorio per il valore TypeVarTuple", + "typeVarTupleUnknownParam": "\"{name}\" è un parametro sconosciuto per TypeVarTuple", + "typeVarUnknownParam": "\"{name}\" è un parametro sconosciuto per TypeVar", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" già in uso da un ambito esterno", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" viene visualizzato una sola volta nella firma della funzione generica", + "typeVarVariance": "TypeVar non può essere covariante e controvariante", + "typeVarWithDefaultFollowsVariadic": "TypeVar \"{typeVarName}\" ha un valore predefinito e non può seguire TypeVarTuple \"{variadicName}\"", + "typeVarWithoutDefault": "\"{name}\" non può essere visualizzato dopo \"{other}\" nell'elenco dei parametri del tipo, perché non ha un tipo predefinito", + "typeVarsNotInGenericOrProtocol": "Generic[] o Protocol[] deve includere tutte le variabili di tipo", + "typedDictAccess": "Non è stato possibile accedere all'elemento in TypedDict", + "typedDictAssignedName": "TypedDict deve essere assegnato a una variabile denominata \"{name}\"", + "typedDictBadVar": "Le classi TypedDict possono contenere solo annotazioni di tipo", + "typedDictBaseClass": "Anche tutte le classi di base per le classi TypedDict devono essere classi TypedDict", + "typedDictBoolParam": "È previsto che il parametro \"{name}\" abbia il valore True o False", + "typedDictClosedExtras": "La classe di base \"{name}\" è una TypedDict che limita il tipo di elementi aggiuntivi al tipo \"{type}\"", + "typedDictClosedFalseNonOpenBase": "La classe base \"{name}\" non è un TypedDict aperto. closed=False non consentito", + "typedDictClosedNoExtras": "La classe di base \"{name}\" è un TypedDict closed; elementi aggiuntivi non consentiti", + "typedDictDelete": "Non è stato possibile eliminare l'elemento in TypedDict", + "typedDictEmptyName": "I nomi all'interno di un TypedDict non possono essere vuoti", + "typedDictEntryName": "Valore letterale stringa previsto per il nome della voce del dizionario", + "typedDictEntryUnique": "I nomi all'interno di un dizionario devono essere univoci", + "typedDictExtraArgs": "Argomenti TypedDict aggiuntivi non supportati", + "typedDictExtraItemsClosed": "TypedDict possono utilizzare \"closed\" o \"extra_items\" ma non entrambi", + "typedDictFieldNotRequiredRedefinition": "Non è possibile ridefinire il campo TypedDict \"{name}\" come NotRequired", + "typedDictFieldReadOnlyRedefinition": "Non è possibile ridefinire l’elemento TypedDict \"{name}\" come ReadOnly", + "typedDictFieldRequiredRedefinition": "Non è possibile ridefinire il campo TypedDict \"{name}\" come Required", + "typedDictFirstArg": "È previsto il nome della classe TypedDict come primo argomento", + "typedDictInClassPattern": "Classe TypedDict non consentita nel modello di classe", + "typedDictInitsubclassParameter": "TypedDict non supporta __init_subclass__ parametro “{name}”", + "typedDictNotAllowed": "\"TypedDict\" non può essere usato in questo contesto", + "typedDictSecondArgDict": "Previsto parametro dict o keyword come secondo parametro", + "typedDictSecondArgDictEntry": "Voce di dizionario semplice prevista", + "typedDictSet": "Non è stato possibile assegnare l'elemento in TypedDict", + "unaccessedClass": "Non è stato eseguito l'accesso alla classe \"{name}\"", + "unaccessedFunction": "Non è stato eseguito l'accesso alla funzione \"{name}\"", + "unaccessedImport": "Non è stato eseguito l'accesso all'importazione \"{name}\"", + "unaccessedSymbol": "Non è stato eseguito l'accesso a \"{name}\"", + "unaccessedVariable": "Non è stato eseguito l'accesso alla variabile \"{name}\"", + "unannotatedFunctionSkipped": "L'analisi della funzione \"{name}\" è stata ignorata perché non è annotata", + "unaryOperationNotAllowed": "Operatore unario non consentito nell'espressione di tipo", + "unexpectedAsyncToken": "È previsto che \"def\", \"with\" o \"for\" seguano \"async\"", + "unexpectedEof": "EOF imprevisto", + "unexpectedExprToken": "Token imprevisto alla fine dell'espressione", + "unexpectedIndent": "Rientro imprevisto", + "unexpectedUnindent": "Riduci rientro non previsto", + "unhashableDictKey": "La chiave del dizionario deve essere hashable", + "unhashableSetEntry": "La voce set deve essere hashable", + "uninitializedAbstractVariables": "Le variabili definite nella classe di base astratta non vengono inizializzate nella classe finale \"{classType}\"", + "uninitializedInstanceVariable": "La variabile di istanza \"{name}\" non è inizializzata nel corpo della classe o nel metodo __init__", + "unionForwardReferenceNotAllowed": "Impossibile utilizzare la sintassi di Union con l'operando stringa. Usare virgolette intorno all'intera espressione", + "unionSyntaxIllegal": "La sintassi alternativa per le unioni richiede Python 3.10 o versione successiva", + "unionTypeArgCount": "Unione richiede due o più argomenti di tipo", + "unionUnpackedTuple": "Union non può includere un tuple decompresso", + "unionUnpackedTypeVarTuple": "Union non può includere un TypeVarTuple non compresso", + "unnecessaryCast": "Chiamata \"cast\" non necessaria; il tipo è già \"{type}\"", + "unnecessaryIsInstanceAlways": "Chiamata isinstance non necessaria; \"{testType}\" è sempre un'istanza di \"{classType}\"", + "unnecessaryIsInstanceNever": "Chiamata isinstance non necessaria; \"{testType}\" non è mai un'istanza di \"{classType}\"", + "unnecessaryIsSubclassAlways": "Chiamata issubclass non necessaria; \"{testType}\" è sempre una sottoclasse di \"{classType}\"", + "unnecessaryIsSubclassNever": "Chiamata issubclass non necessaria; \"{testType}\" non è mai una sottoclasse di \"{classType}\"", + "unnecessaryPyrightIgnore": "Commento \"# pyright: ignore\" non necessario", + "unnecessaryPyrightIgnoreRule": "Regola \"# pyright: ignore\" non necessaria: \"{name}\"", + "unnecessaryTypeIgnore": "Commento \"# type: ignore\" non necessario", + "unpackArgCount": "Previsto un singolo argomento di tipo dopo \"Unpack\"", + "unpackExpectedTypeVarTuple": "È previsto TypeVarTuple o tuple come argomento di tipo per Unpack", + "unpackExpectedTypedDict": "Previsto argomento di tipo TypedDict per Unpack", + "unpackIllegalInComprehension": "Operazione di decompressione non consentita nella comprensione", + "unpackInAnnotation": "Operatore di decompressione non consentito nell'espressione di tipo", + "unpackInDict": "Operazione di decompressione non consentita nei dizionari", + "unpackInSet": "Operatore di decompressione non consentito all’interno di un set", + "unpackNotAllowed": "Unpack non è consentito in questo contesto", + "unpackOperatorNotAllowed": "L’operazione di decompressione non è consentita in questo contesto", + "unpackTuplesIllegal": "L'operazione di decompressione non è consentita nelle tuple precedenti a Python 3.8", + "unpackedArgInTypeArgument": "Non è possibile usare argomenti decompressi in questo contesto", + "unpackedArgWithVariadicParam": "Non è possibile usare l'argomento decompresso per il parametro TypeVarTuple", + "unpackedDictArgumentNotMapping": "L'espressione dell'argomento dopo ** deve essere un mapping con un tipo di chiave \"str\"", + "unpackedDictSubscriptIllegal": "L'operatore di decompressione del dizionario nel pedice non è consentito", + "unpackedSubscriptIllegal": "L'operatore di decompressione nel pedice richiede Python 3.11 o versione successiva", + "unpackedTypeVarTupleExpected": "Previsto TypeVarTuple decompresso; usa Unpack[{name1}] o *{name2}", + "unpackedTypedDictArgument": "Impossibile trovare una corrispondenza tra l'argomento TypedDict non compresso e i parametri", + "unreachableCodeCondition": "Il codice non viene analizzato perché la condizione viene valutata in modo statico come false", + "unreachableCodeStructure": "Il codice è strutturalmente irraggiungibile", + "unreachableCodeType": "L’analisi dei tipi indica che il codice non è raggiungibile.", + "unreachableExcept": "La clausola Except non è raggiungibile perché l'eccezione è già gestita", + "unsupportedDunderAllOperation": "L'operazione su \"__all__\" non è supportata, di conseguenza l'elenco dei simboli esportati potrebbe non essere corretto", + "unusedCallResult": "Il risultato dell'espressione di chiamata è di tipo \"{type}\" e non è usato. Assegnare alla variabile \"_\" se è intenzionale", + "unusedCoroutine": "Il risultato della chiamata alla funzione async non viene usato. Usare \"await\" o assegnare il risultato alla variabile", + "unusedExpression": "Il valore dell'espressione non è utilizzato", + "varAnnotationIllegal": "Le annotazioni type per le variabili richiedono Python 3.6 o versione successiva. Usare il commento di type per la compatibilità con le versioni precedenti", + "variableFinalOverride": "La variabile \"{name}\" è contrassegnata come Final ed esegue l'override della variabile non Final con lo stesso nome nella classe \"{className}\"", + "variadicTypeArgsTooMany": "L'elenco di argomenti del tipo può contenere al massimo un tuple o TypeVarTuple non compresso", + "variadicTypeParamTooManyAlias": "L'alias di tipo può avere al massimo un parametro di tipo TypeVarTuple, ma ne ha ricevuti più ({names})", + "variadicTypeParamTooManyClass": "La classe generica può avere al massimo un parametro di tipo TypeVarTuple, ma ne ha ricevuti più ({names})", + "walrusIllegal": "L'operatore \":=\" richiede Python 3.8 o versione successiva", + "walrusNotAllowed": "L'operatore \":=\" non è consentito in questo contesto senza parentesi circostanti", + "wildcardInFunction": "Wildcard import non consentito all'interno di una classe o di una funzione", + "wildcardLibraryImport": "Wildcard import da una libreria non consentito", + "wildcardPatternTypePartiallyUnknown": "Il tipo acquisito dal modello con caratteri jolly è parzialmente sconosciuto", + "wildcardPatternTypeUnknown": "Il tipo acquisito dal criterio con caratteri jolly è sconosciuto", + "yieldFromIllegal": "L'uso di \"yield from\" richiede Python 3.3 o versione successiva", + "yieldFromOutsideAsync": "\"yield from\" non consentito in una funzione async", + "yieldOutsideFunction": "\"yield\" non consentito all'esterno di una funzione o di un'espressione lambda", + "yieldWithinComprehension": "\"yield\" non consentito all'interno di una comprensione", + "zeroCaseStatementsFound": "L’istruzione Match deve includere almeno un’istruzione case", + "zeroLengthTupleNotAllowed": "Zero-length tuple is not allowed in this context" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "Non è possibile usare il modulo speciale \"Annotated\" con controlli di istanza e classe", + "argParam": "L'argomento corrisponde al parametro \"{paramName}\"", + "argParamFunction": "L'argomento corrisponde al parametro \"{paramName}\" nella funzione \"{functionName}\"", + "argsParamMissing": "Il parametro \"*{paramName}\" non ha un parametro corrispondente", + "argsPositionOnly": "Parametro di sola posizione non corrispondente; previsto {expected} ma ricevuto {received}", + "argumentType": "Il tipo di argomento è \"{type}\"", + "argumentTypes": "Tipi di argomento: ({types})", + "assignToNone": "Il tipo non è assegnabile a \"None\"", + "asyncHelp": "Intendevi \"async with\"?", + "baseClassIncompatible": "La classe base \"{baseClass}\" non è compatibile con il tipo \"{type}\"", + "baseClassIncompatibleSubclass": "La classe base \"{baseClass}\" deriva da \"{subclass}\", che non è compatibile con il tipo \"{type}\"", + "baseClassOverriddenType": "La classe di base \"{baseClass}\" fornisce il tipo \"{type}\", di cui viene eseguito l'override", + "baseClassOverridesType": "Override della classe base \"{baseClass}\" con tipo \"{type}\"", + "bytesTypePromotions": "Imposta disableBytesTypePromotions su false per abilitare il comportamento di innalzamento di livello del tipo per \"bytearray\" e \"memoryview\"", + "conditionalRequiresBool": "Il metodo __bool__ per il tipo \"{operandType}\" restituisce il tipo \"{boolReturnType}\" anziché \"bool\"", + "dataClassFieldLocation": "Dichiarazione di campo", + "dataClassFrozen": "\"{name}\" è bloccato", + "dataProtocolUnsupported": "“{name}” è un protocollo dati", + "descriptorAccessBindingFailed": "Impossibile associare il metodo \"{name}\" per la classe descrittore \"{className}\"", + "descriptorAccessCallFailed": "Impossibile chiamare il metodo \"{name}\" per la classe descrittore \"{className}\"", + "finalMethod": "Metodo Final", + "functionParamDefaultMissing": "Nel parametro \"{name}\" manca un argomento predefinito", + "functionParamName": "Nome del parametro non corrispondente: \"{destName}\" rispetto a \"{srcName}\"", + "functionParamPositionOnly": "Parametro di sola posizione non corrispondente; il parametro “{name}” non è di sola posizione", + "functionReturnTypeMismatch": "Il tipo restituito della funzione\"{sourceType}\" non è compatibile con il tipo \"{destType}\"", + "functionTooFewParams": "La funzione accetta un numero insufficiente di parametri posizionale. Previsto {expected} ma ricevuto {received}", + "functionTooManyParams": "La funzione accetta un numero eccessivo di parametri posizionale. Previsto {expected} ma ricevuto {received}", + "genericClassNotAllowed": "Tipo generico con argomenti di tipo non consentiti per i controlli di istanza o classe", + "incompatibleDeleter": "Il metodo deleter di Property non è compatibile", + "incompatibleGetter": "Il metodo getter di Property non è compatibile", + "incompatibleSetter": "Il metodo setter di Property non è compatibile", + "initMethodLocation": "Il metodo __init__ è definito nella classe \"{type}\"", + "initMethodSignature": "Firma del __init__ \"{type}\"", + "initSubclassLocation": "Il metodo __init_subclass__ è definito nella classe \"{name}\"", + "invariantSuggestionDict": "Prova a passare da \"dict\" a \"Mapping\", che è covariante nel tipo di valore", + "invariantSuggestionList": "Prova a passare da \"list\" a \"Sequence\", che è covariante", + "invariantSuggestionSet": "Prova a passare da \"set\" a \"Container\", che è covariante", + "isinstanceClassNotSupported": "\"{type}\" non è supportata per i controlli delle istanze e delle classi", + "keyNotRequired": "\"{name}\" non è una chiave obbligatoria in \"{type}\", quindi l'accesso potrebbe causare un'eccezione di runtime", + "keyReadOnly": "\"{name}\" è una chiave di sola lettura in \"{type}\"", + "keyRequiredDeleted": "\"{name}\" è una chiave obbligatoria e non può essere eliminata", + "keyUndefined": "\"{name}\" non è una chiave definita in \"{type}\"", + "kwargsParamMissing": "Il parametro \"**{paramName}\" non ha un parametro corrispondente", + "listAssignmentMismatch": "Il tipo \"{type}\" non è compatibile con l'elenco di destinazione", + "literalAssignmentMismatch": "\"{sourceType}\" non è assegnabile al tipo \"{destType}\"", + "literalNotAllowed": "Non è possibile usare il modulo speciale \"Literal\" con controlli di istanza e classe", + "matchIsNotExhaustiveHint": "Se la gestione completa non è prevista, aggiungere \"case _: pass\"", + "matchIsNotExhaustiveType": "Tipo non gestito: \"{type}\"", + "memberAssignment": "L'espressione di tipo \"{type}\" non può essere assegnata all'attributo \"{name}\" della classe \"{classType}\".", + "memberIsAbstract": "\"{type}.{name}\" non implementato", + "memberIsAbstractMore": "e {{count}} altro...", + "memberIsClassVarInProtocol": "“{name}” è definito come ClassVar nel protocollo", + "memberIsInitVar": "\"{name}\" è un campo di init-only", + "memberIsInvariant": "\"{name}\" è invariante perché modificabile", + "memberIsNotClassVarInClass": "\"{name}\" deve essere definito come ClassVar per essere compatibile con il protocollo", + "memberIsNotClassVarInProtocol": "“{name}” non è definito come ClassVar nel protocollo", + "memberIsNotReadOnlyInProtocol": "\"{name}\" non è di sola lettura nel protocollo", + "memberIsReadOnlyInProtocol": "\"{name}\" è di sola lettura nel protocollo", + "memberIsWritableInProtocol": "\"{name}\" è scrivibile nel protocollo", + "memberSetClassVar": "Non è possibile assegnare l'attributo \"{name}\" tramite un'istanza di classe perché è una ClassVar", + "memberTypeMismatch": "\"{name}\" è un tipo non compatibile", + "memberUnknown": "L'attributo \"{name}\" è sconosciuto", + "metaclassConflict": "La metaclasse \"{metaclass1}\" è in conflitto con \"{metaclass2}\"", + "missingDeleter": "Metodo deleter di Property mancante", + "missingGetter": "Metodo getter di Property mancante", + "missingSetter": "Metodo setter di Property mancante", + "namedParamMissingInDest": "Parametro aggiuntivo “{name}”", + "namedParamMissingInSource": "Parametro della parola chiave “{name}” mancante", + "namedParamTypeMismatch": "Il parametro \"{name}\" della parola chiave di tipo \"{sourceType}\" non è compatibile con il tipo \"{destType}\"", + "namedTupleNotAllowed": "Non è possibile usare NamedTuple per i controlli di istanze o classi", + "newMethodLocation": "Il metodo __new__ è definito nella classe \"{type}\"", + "newMethodSignature": "La firma del __new__ è \"{type}\"", + "newTypeClassNotAllowed": "Impossibile utilizzare il tipo creato con NewType con controlli di classe e di istanza", + "noOverloadAssignable": "Nessuna funzione di overload corrisponde al tipo \"{type}\"", + "noneNotAllowed": "Non è possibile usare None per i controlli di istanze o classi", + "orPatternMissingName": "Nomi mancanti: {name}", + "overloadIndex": "L'overload {index} è la corrispondenza più vicina", + "overloadNotAssignable": "Uno o più overload di \"{name}\" non sono assegnabili", + "overloadSignature": "La firma di overload è definita qui", + "overriddenMethod": "Metodo sottoposto a override", + "overriddenSymbol": "Simbolo sottoposto a override", + "overrideInvariantMismatch": "Il tipo di override \"{overrideType}\" non è uguale al tipo di base \"{baseType}\"", + "overrideIsInvariant": "La variabile è modificabile, quindi il relativo tipo è invariante", + "overrideNoOverloadMatches": "Nessuna firma di overload nell'override è compatibile con il metodo di base", + "overrideNotClassMethod": "Il metodo di base viene dichiarato come classmethod, ma l'override non lo è", + "overrideNotInstanceMethod": "Il metodo di base è dichiarato come metodo di istanza, ma l’override non lo è", + "overrideNotStaticMethod": "Il metodo di base viene dichiarato come staticmethod, ma l'override non lo è", + "overrideOverloadNoMatch": "La sostituzione non gestisce tutti gli overload del metodo di base", + "overrideOverloadOrder": "Gli overload per il metodo di override devono essere nello stesso ordine del metodo di base", + "overrideParamKeywordNoDefault": "Parametro della parola chiave \"{name}\" non corrispondente: il parametro di base ha un valore di argomento predefinito, il parametro di override non è", + "overrideParamKeywordType": "Tipo del parametro della parola chiave \"{name}\" non corrispondente: il parametro di base è di tipo \"{baseType}\", il parametro di override è di tipo \"{overrideType}\"", + "overrideParamName": "Nome del parametro {index} non corrispondente: il parametro di base è denominato \"{baseName}\", il parametro di override è denominato \"{overrideName}\"", + "overrideParamNameExtra": "Parametro \"{name}\" mancante nella base", + "overrideParamNameMissing": "Parametro \"{name}\" mancante nell'override", + "overrideParamNamePositionOnly": "Mancata corrispondenza del parametro {index}: il parametro di base \"{baseName}\" è un parametro di parola chiave, il parametro di override è di sola posizione", + "overrideParamNoDefault": "Parametro \"{index}\" non corrispondente: il parametro di base ha un valore di argomento predefinito, il parametro di override non è", + "overrideParamType": "Tipo di parametro {index} non corrispondente: il parametro di base è di tipo \"{baseType}\", il parametro di override è di tipo \"{overrideType}\"", + "overridePositionalParamCount": "Numero di parametri posizionali non corrispondente. Il metodo di base ne ha {baseCount}, ma l'override ne ha {overrideCount}", + "overrideReturnType": "Tipo restituito non corrispondente: il metodo di base restituisce il tipo \"{baseType}\", l'override restituisce il tipo \"{overrideType}\"", + "overrideType": "La classe di base definisce il tipo come \"{type}\"", + "paramAssignment": "Parametro {index}: il tipo \"{sourceType}\" non è compatibile con il tipo \"{destType}\"", + "paramSpecMissingInOverride": "Parametri ParamSpec mancanti nel metodo di override", + "paramType": "Tipo di parametro \"{paramType}\"", + "privateImportFromPyTypedSource": "Importa da \"{module}\"", + "propertyAccessFromProtocolClass": "Non è possibile accedere a una proprietà definita all'interno di una classe di protocollo come variabile di classe", + "propertyMethodIncompatible": "Il metodo di Property \"{name}\" non è compatibile", + "propertyMethodMissing": "Metodo di Property \"{name}\" mancante nell'override", + "propertyMissingDeleter": "Property \"{name}\" non dispone di un deleter definito", + "propertyMissingSetter": "Property \"{name}\" non dispone di un setter definito", + "protocolIncompatible": "\"{sourceType}\" non è compatibile con il protocollo \"{destType}\"", + "protocolMemberMissing": "\"{name}\" non è presente", + "protocolRequiresRuntimeCheckable": "La classe di Protocol deve essere @runtime_checkable in modo che sia possibile usarla con i controlli di istanza e classe", + "protocolSourceIsNotConcrete": "\"{sourceType}\" non è un tipo di classe concreto e non può essere assegnato al tipo \"{destType}\"", + "protocolUnsafeOverlap": "Gli attributi di “{name}” hanno gli stessi nomi del protocollo", + "pyrightCommentIgnoreTip": "Usa \"# pyright: ignore[]\" per eliminare la diagnostica per una singola riga", + "readOnlyAttribute": "L'attributo \"{name}\" è di sola lettura", + "seeClassDeclaration": "Vedere la dichiarazione di classe", + "seeDeclaration": "Vedere la dichiarazione", + "seeFunctionDeclaration": "Vedere la dichiarazione di funzione", + "seeMethodDeclaration": "Vedere la dichiarazione del metodo", + "seeParameterDeclaration": "Vedere la dichiarazione del parametro", + "seeTypeAliasDeclaration": "Vedere la dichiarazione di alias di tipo", + "seeVariableDeclaration": "Vedere la dichiarazione di variabile", + "tupleAssignmentMismatch": "Il tipo \"{type}\" non è compatibile con il tuple di destinazione", + "tupleEntryTypeMismatch": "Il tipo della voce di Tuple {entry} non è corretto", + "tupleSizeIndeterminateSrc": "Dimensioni del tuple non corrispondenti; previsto {expected} ma ricevuto indeterminato", + "tupleSizeIndeterminateSrcDest": "Dimensioni del tuple non corrispondenti; previsto {expected} o più, ma ricevuto indeterminato", + "tupleSizeMismatch": "Dimensioni tuple non corrispondenti; previsto {expected} ma ricevuto {received}", + "tupleSizeMismatchIndeterminateDest": "Dimensioni del tuple non corrispondenti; previsto {expected} o più ma ricevuto {received}", + "typeAliasInstanceCheck": "Non è possibile usare l'alias di tipo creato con l'istruzione \"type\" con controlli di classe e istanza", + "typeAssignmentMismatch": "Il tipo \"{sourceType}\" non è assegnabile al tipo \"{destType}\"", + "typeBound": "Il tipo \"{sourceType}\" non è assegnabile al limite superiore \"{destType}\" per la variabile di tipo \"{name}\"", + "typeConstrainedTypeVar": "Il tipo \"{type}\" non è assegnabile alla variabile di tipo vincolato \"{name}\"", + "typeIncompatible": "\"{sourceType}\" non è assegnabile a \"{destType}\"", + "typeNotClass": "\"{type}\" non è una classe", + "typeNotStringLiteral": "\"{type}\" non è un valore letterale stringa", + "typeOfSymbol": "Il tipo di \"{name}\" è \"{type}\"", + "typeParamSpec": "Il tipo \"{type}\" non è compatibile con il \"{name}\" ParamSpec", + "typeUnsupported": "Il tipo \"{type}\" non è supportato", + "typeVarDefaultOutOfScope": "La variabile di tipo \"{name}\" non è nell'ambito", + "typeVarIsContravariant": "Il parametro di tipo \"{name}\" è controvariante, ma \"{sourceType}\" non è un supertipo di \"{destType}\"", + "typeVarIsCovariant": "Il parametro di tipo \"{name}\" è covariante, ma \"{sourceType}\" non è un sottotipo di \"{destType}\"", + "typeVarIsInvariant": "Il parametro di tipo \"{name}\" è invariante, ma \"{sourceType}\" non è uguale a \"{destType}\"", + "typeVarNotAllowed": "TypeVar non consentito per i controlli di istanze o classi", + "typeVarTupleRequiresKnownLength": "Non è possibile associare TypeVarTuple a un tuple di lunghezza sconosciuta", + "typeVarUnnecessarySuggestion": "Usare invece {type}", + "typeVarUnsolvableRemedy": "Specificare un overload che specifica il tipo restituito quando l'argomento non viene fornito", + "typeVarsMissing": "Variabili di tipo mancanti: {names}", + "typedDictBaseClass": "La classe \"{type}\" non è un TypedDict", + "typedDictClassNotAllowed": "Classe TypedDict non consentita per i controlli di istanze o classi", + "typedDictClosedExtraNotAllowed": "Non è possibile aggiungere l'elemento \"{name}\"", + "typedDictClosedExtraTypeMismatch": "Non è possibile aggiungere l'elemento \"{name}\" con tipo \"{type}\"", + "typedDictClosedFieldNotReadOnly": "Non è possibile aggiungere l'elemento \"{name}\" perché deve essere ReadOnly", + "typedDictClosedFieldNotRequired": "Non è possibile aggiungere l'elemento \"{name}\" perché deve essere NotRequired", + "typedDictExtraFieldNotAllowed": "\"{name}\" non è presente in \"{type}\"", + "typedDictExtraFieldTypeMismatch": "Il tipo di \"{name}\" non è compatibile con il tipo \"\"extra_items\" in \"{type}\"", + "typedDictFieldMissing": "\"{name}\" mancante nel \"{type}\"", + "typedDictFieldNotReadOnly": "\"{name}\" non è di sola lettura in \"{type}\"", + "typedDictFieldNotRequired": "\"{name}\" non è obbligatorio in \"{type}\"", + "typedDictFieldRequired": "\"{name}\" è obbligatorio in \"{type}\"", + "typedDictFieldTypeMismatch": "Il tipo \"{type}\" non può essere assegnato all’elemento \"{name}\"", + "typedDictFieldUndefined": "\"{name}\" è un elemento non definito nel tipo \"{type}\"", + "typedDictKeyAccess": "Usare [\"{name}\"] per fare riferimento all'elemento in TypedDict", + "typedDictNotAllowed": "Non è possibile usare TypedDict per i controlli di istanze o classi", + "unhashableType": "Il tipo \"{type}\" non è hashable", + "uninitializedAbstractVariable": "La variabile di istanza \"{name}\" è definita nella classe di base astratta \"{classType}\" ma non è inizializzata", + "unreachableExcept": "\"{exceptionType}\" è una sottoclasse di \"{parentType}\"", + "useDictInstead": "Usare dict[T1, T2] per indicare un tipo di dizionario", + "useListInstead": "Usare list[T] per indicare un tipo di list o T1 | T2 per indicare un tipo di union", + "useTupleInstead": "Usare tuple[T1, ..., Tn] per indicare un tipo di tuple o T1 | T2 per indicare un tipo di unione", + "useTypeInstead": "Usare invece type[T]", + "varianceMismatchForClass": "La varianza dell'argomento tipo \"{typeVarName}\" non è compatibile con la classe di base \"{className}\"", + "varianceMismatchForTypeAlias": "La varianza dell'argomento tipo \"{typeVarName}\" non è compatibile con \"{typeAliasParam}\"" + }, + "Service": { + "longOperation": "L’enumerazione dei file di origine dell’area di lavoro sta richiedendo tempo. Provare ad aprire una sottocartella. [Altre informazioni](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.ja.json b/python-parser/packages/pyright-internal/src/localization/package.nls.ja.json new file mode 100644 index 00000000..0aeeb96f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.ja.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "型 Stub を作成する", + "createTypeStubFor": "\"{moduleName}\" の型 Stub を作成する", + "executingCommand": "コマンドの実行中", + "filesToAnalyzeCount": "分析する {count} 個のファイル", + "filesToAnalyzeOne": "分析する 1 つのファイル", + "findingReferences": "参照を検索しています", + "organizeImports": "インポートを整理" + }, + "Completion": { + "autoImportDetail": "自動インポート", + "indexValueDetail": "インデックス値" + }, + "Diagnostic": { + "abstractMethodInvocation": "メソッド \"{method}\" は抽象メソッドであり、実装されていないため、呼び出すことができません", + "annotatedMetadataInconsistent": "注釈付きのメタデータ型 \"{metadataType}\" は型 \"{type}\" と互換性がありません", + "annotatedParamCountMismatch": "パラメーター注釈数の不一致: {expected} が必要ですが、{received} を受信しました", + "annotatedTypeArgMissing": "\"Annotated\" には 1 つの型引数と 1 つ以上の注釈が必要です", + "annotationBytesString": "型式では、バイト文字列リテラルは使用できません", + "annotationFormatString": "型式では、書式指定文字列リテラル (f 文字列) を使用できません", + "annotationNotSupported": "このステートメントでは型注釈はサポートされていません", + "annotationRawString": "型式では、生文字列リテラルは使用できません", + "annotationSpansStrings": "型式は複数の文字列リテラルにまたがることはできません", + "annotationStringEscape": "型式にエスケープ文字を含めることはできません", + "annotationTemplateString": "型式では、テンプレート文字列リテラル (t 文字列) を使用できません", + "argAssignment": "型 \"{argType}\" の引数を型 \"{paramType}\" のパラメーターに割り当てることはできません", + "argAssignmentFunction": "型 \"{argType}\" の引数を関数 \"{functionName}\" の型 \"{paramType}\" のパラメーターに割り当てることはできません", + "argAssignmentParam": "型 \"{argType}\" の引数を型 \"{paramType}\" のパラメーター \"{paramName}\" に割り当てることはできません", + "argAssignmentParamFunction": "型 \"{argType}\" の引数を、関数 \"{functionName}\" の型 \"{paramType}\" のパラメーター \"{paramName}\" に割り当てることはできません", + "argMissingForParam": "パラメーター {name} に引数がありません", + "argMissingForParams": "パラメーター {names} に引数がありません", + "argMorePositionalExpectedCount": "さらに {expected} 個の位置引数が必要です", + "argMorePositionalExpectedOne": "さらに 1 つの位置引数が必要です", + "argPositional": "必要な位置引数", + "argPositionalExpectedCount": "{expected} 個の位置引数が必要です", + "argPositionalExpectedOne": "1 個の位置引数が必要です", + "argTypePartiallyUnknown": "引数の型が部分的に不明です", + "argTypeUnknown": "引数の型が不明です", + "assertAlwaysTrue": "Assert 式は常に true に評価されます", + "assertTypeArgs": "\"assert_type\" には 2 つの位置引数が必要です", + "assertTypeTypeMismatch": "\"assert_type\" の不一致: \"{expected}\" が必要ですが、\"{received}\" を受信しました", + "assignmentExprComprehension": "代入式のターゲット \"{name}\" は、ターゲットの理解と同じ名前を使用できません", + "assignmentExprContext": "代入式は、モジュール、関数、またはラムダ内に存在する必要があります", + "assignmentExprInSubscript": "下付き文字内の代入式は、Python 3.10 以降でのみサポートされます", + "assignmentInProtocol": "Protocol クラス内のインスタンス変数またはクラス変数は、クラス本体内で明示的に宣言する必要があります", + "assignmentTargetExpr": "式を代入先にすることはできません", + "asyncNotInAsyncFunction": "async 関数の外部では \"async\" の使用は許可されていません", + "awaitIllegal": "\"await\" を使用するには Python 3.5 以降が必要です", + "awaitNotAllowed": "型式では、\"await\" は使用できません", + "awaitNotInAsync": "\"await\" は async 関数内でのみ許可されます", + "backticksIllegal": "バッククォートで囲まれた式は、Python 3.x ではサポートされていません。代わりに repr を使用してください", + "baseClassCircular": "クラス自体から派生することはできません", + "baseClassFinal": "基底クラス \"{type}\" は final とマークされており、サブクラス化できません", + "baseClassIncompatible": "{type} の基底クラスは相互に互換性がありません", + "baseClassInvalid": "クラスへの引数は基底クラスである必要があります", + "baseClassMethodTypeIncompatible": "\"{classType}\" の基底クラスは、互換性のない方法でメソッド \"{name}\" を定義します", + "baseClassUnknown": "基底クラスの型が不明で、派生クラスの型が不明です", + "baseClassVariableTypeIncompatible": "クラス \"{classType}\" の基底クラスは、互換性のない方法で変数 \"{name}\" を定義します", + "binaryOperationNotAllowed": "2 項演算子は型式では使用できません", + "bindParamMissing": "\"self\" または \"cls\" パラメーターがないため、メソッド \"{methodName}\" をバインドできませんでした", + "bindTypeMismatch": "\"{type}\" がパラメーター \"{paramName}\" に割り当てできないため、メソッド \"{methodName}\" をバインドできませんでした", + "breakInExceptionGroup": "\"except*\" ブロックでは \"break\" を使用できません", + "breakOutsideLoop": "\"break\" はループ内でのみ使用できます", + "bytesUnsupportedEscape": "bytes リテラルでサポートされていないエスケープ シーケンス", + "callableExtraArgs": "\"Callable\" に必要な型引数は 2 つだけです", + "callableFirstArg": "パラメーターの型リストまたは \"...\" が必要です。", + "callableNotInstantiable": "型 \"{type}\" をインスタンス化できません", + "callableSecondArg": "\"Callable\" の 2 番目の型引数として戻り値の型が必要です", + "casePatternIsIrrefutable": "参照不可能なパターンは、最後の case ステートメントに対してのみ許可されます", + "classAlreadySpecialized": "型 \"{type}\" は既に特殊化されています", + "classDecoratorTypeUnknown": "型指定されていないクラス デコレーターはクラスの型を隠します。デコレーターを無視する", + "classDefinitionCycle": "\"{name}\" のクラス定義は、それ自体に依存します", + "classGetItemClsParam": "__class_getitem__ override は \"cls\" パラメーターを受け取る必要があります", + "classMethodClsParam": "クラス メソッドは \"cls\" パラメーターを受け取る必要があります", + "classNotRuntimeSubscriptable": "クラス \"{name}\" の添字はランタイム例外を生成します。型式を引用符で囲んでください", + "classPatternBuiltInArgPositional": "クラス パターンは位置指定サブパターンのみを受け入れます", + "classPatternNewType": "\"{type}\" は NewType を使用して定義されているため、クラス パターンでは使用できません", + "classPatternPositionalArgCount": "クラス \"{type}\" の位置指定パターンが多すぎます。{expected} が必要ですが、{received} を受信しました", + "classPatternTypeAlias": "\"{type}\" は特殊な型エイリアスであるため、クラス パターンでは使用できません", + "classPropertyDeprecated": "クラス プロパティは Python 3.11 では非推奨であり、Python 3.13 ではサポートされなくなります", + "classTypeParametersIllegal": "クラス型パラメーターの構文には Python 3.12 以降が必要です", + "classVarFirstArgMissing": "\"ClassVar\" の後に型引数が必要です", + "classVarNotAllowed": "\"ClassVar\" はこのコンテキストでは許可されていません", + "classVarOverridesInstanceVar": "クラス変数 \"{name}\" は、クラス \"{className}\" の同じ名前のインスタンス変数をオーバーライドします", + "classVarTooManyArgs": "\"ClassVar\" の後に必要な型引数は 1 つだけです", + "classVarWithTypeVar": "\"ClassVar\" 型に型変数を含めることはできません", + "clsSelfParamTypeMismatch": "パラメーター \"{name}\" の型は、そのクラス \"{classType}\" のスーパータイプである必要があります", + "codeTooComplexToAnalyze": "コードが複雑すぎるため、分析できません。サブルーチンにリファクタリングするか、条件付きコード パスを減らすことで複雑さを軽減してください", + "collectionAliasInstantiation": "型 \"{type}\" はインスタンス化できません。代わりに \"{alias}\" を使用してください", + "comparisonAlwaysFalse": "型 \"{leftType}\" と \"{rightType}\" に重複がないため、条件は常に False に評価されます", + "comparisonAlwaysTrue": "型 \"{leftType}\" と \"{rightType}\" に重複がないため、条件は常に True に評価されます", + "comprehensionInDict": "他の辞書エントリと共に理解することはできません", + "comprehensionInSet": "読解は他の set エントリと併用できません。", + "concatenateContext": "\"Concatenate\" はこのコンテキストで許可されていません", + "concatenateParamSpecMissing": "\"Concatenate\" の最後の型引数は ParamSpec または \"...\" である必要があります", + "concatenateTypeArgsMissing": "\"Concatenate\" には少なくとも 2 つの型引数が必要です", + "conditionalOperandInvalid": "型 \"{type}\" の条件オペランドが無効です", + "constantRedefinition": "\"{name}\" は定数であり (大文字であるため)、再定義できません", + "constructorParametersMismatch": "クラス \"{classType}\" の__new__と__init__のシグネチャの不一致", + "containmentAlwaysFalse": "型 \"{leftType}\" と \"{rightType}\" に重複がないため、式は常に False に評価されます", + "containmentAlwaysTrue": "型 \"{leftType}\" と \"{rightType}\" に重複がないため、式は常に True に評価されます", + "continueInExceptionGroup": "\"except*\" ブロックでは \"continue\" を使用できません", + "continueOutsideLoop": "\"continue\" はループ内でのみ使用できます", + "coroutineInConditionalExpression": "常に True に評価される条件式参照コルーチン", + "dataClassBaseClassFrozen": "固定されていないクラスは、固定されているクラスから継承できません", + "dataClassBaseClassNotFrozen": "固定されたクラスは、固定されていないクラスから継承できません", + "dataClassConverterFunction": "型 \"{argType}\" の引数は、型 \"{fieldType}\" のフィールド \"{fieldName}\" の有効なコンバーターではありません", + "dataClassConverterOverloads": "{funcName}\" のオーバーロードは、型 \"{fieldType}\" のフィールド \"{fieldName}\" に対して有効なコンバーターではありません", + "dataClassFieldInheritedDefault": "\"{fieldName}\" は同じ名前のフィールドをオーバーライドしますが、既定値がありません", + "dataClassFieldWithDefault": "既定値のないフィールドは、既定値を持つフィールドの後に表示できません", + "dataClassFieldWithPrivateName": "データクラス フィールドはプライベート名を使用できません", + "dataClassFieldWithoutAnnotation": "型注釈のないデータクラス フィールドが原因でランタイム例外が発生する", + "dataClassPostInitParamCount": "Dataclass __post_init__ パラメーター数が正しくありません。InitVar フィールドの数は {expected} です", + "dataClassPostInitType": "フィールド \"{fieldName}\" の Dataclass __post_init__ メソッド パラメーターの型が一致しません", + "dataClassSlotsOverwrite": "__slots__はクラスで既に定義されています", + "dataClassTransformExpectedBoolLiteral": "静的に True または False に評価される式が必要です", + "dataClassTransformFieldSpecifier": "クラスまたは関数の tuple が必要ですが、型 \"{type}\" を受け取りました", + "dataClassTransformPositionalParam": "\"dataclass_transform\" に対するすべての引数はキーワード引数である必要があります", + "dataClassTransformUnknownArgument": "引数 \"{name}\" はdataclass_transform でサポートされていません", + "dataProtocolInSubclassCheck": "データ プロトコル (メソッド以外の属性を含む) は、issubclass 呼び出しで使用できません", + "declaredReturnTypePartiallyUnknown": "宣言された戻り値の型 \"{returnType}\" は部分的に不明です", + "declaredReturnTypeUnknown": "宣言された戻り値の型が不明です", + "defaultValueContainsCall": "パラメーターの既定値の式内では、関数呼び出しと変更可能なオブジェクトは許可されません", + "defaultValueNotAllowed": "\"*\" または \"**\" のパラメーターに既定値を指定することはできません", + "delTargetExpr": "式を削除できません", + "deprecatedClass": "クラス \"{name}\" は非推奨です", + "deprecatedConstructor": "クラス \"{name}\" のコンストラクターは非推奨です", + "deprecatedDescriptorDeleter": "記述子 \"{name}\" の \"__delete__\" メソッドは非推奨です", + "deprecatedDescriptorGetter": "記述子 \"{name}\" の \"__get__\" メソッドは非推奨です", + "deprecatedDescriptorSetter": "記述子 \"{name}\" の \"__set__\" メソッドは非推奨です", + "deprecatedFunction": "関数 \"{name}\" は非推奨です", + "deprecatedMethod": "クラス \"{className}\" のメソッド \"{name}\" は非推奨です", + "deprecatedPropertyDeleter": "The deleter for property \"{name}\" is deprecated", + "deprecatedPropertyGetter": "The getter for property \"{name}\" is deprecated", + "deprecatedPropertySetter": "The setter for property \"{name}\" is deprecated", + "deprecatedType": "この型は Python {version} では非推奨です。代わりに\"{replacement}\"を使用してください", + "dictExpandIllegalInComprehension": "辞書の展開は理解できません", + "dictInAnnotation": "辞書式は型式では使用できません", + "dictKeyValuePairs": "辞書エントリにはキー/値のペアが含まれている必要があります", + "dictUnpackIsNotMapping": "ディクショナリ アンパック演算子に必要なマッピング", + "dunderAllSymbolNotPresent": "\"{name}\" は __all__ で指定されていますが、モジュールには存在しません", + "duplicateArgsParam": "許可される \"*\" パラメーターは 1 つだけです", + "duplicateBaseClass": "重複する基底クラスは許可されていません", + "duplicateCapturePatternTarget": "Capture ターゲット \"{name}\" を同じパターン内に複数回出現させることはできません", + "duplicateCatchAll": "許可される catch-all except 句は 1 つだけです", + "duplicateEnumMember": "Enum メンバー \"{name}\" は既に宣言されています", + "duplicateGenericAndProtocolBase": "許可される Generic[...] または Protocol[...] 基底クラスは 1 つだけです", + "duplicateImport": "\"{importName}\" が複数回インポートされています", + "duplicateKeywordOnly": "\"*\" 区切り記号を 1 つだけ使用できます", + "duplicateKwargsParam": "許可される \"**\" パラメーターは 1 つだけです", + "duplicateParam": "パラメーター \"{name}\" が重複しています", + "duplicatePositionOnly": "許可される \"/\" パラメーターは 1 つだけです", + "duplicateStarPattern": "パターン シーケンスで使用できる \"*\" パターンは 1 つだけです", + "duplicateStarStarPattern": "許可されている \"**\" エントリは 1 つだけです", + "duplicateUnpack": "list 内で許可されるアンパック操作は 1 つのみです", + "ellipsisAfterUnpacked": "\"...\" はアンパックされた TypeVarTuple または tuple と共に使用することはできません", + "ellipsisContext": "\"...\" はこのコンテキストでは許可されていません", + "ellipsisSecondArg": "\"...\" は2 つの引数の 2 番目の引数としてのみ使用できます", + "enumClassOverride": "Enum クラス \"{name}\" は final であり、サブクラス化できません", + "enumMemberDelete": "Enum メンバー \"{name}\" を削除できません", + "enumMemberSet": "Enum メンバー \"{name}\" を割り当てることはできません", + "enumMemberTypeAnnotation": "Type annotations are not allowed for enum members", + "exceptGroupMismatch": "Try ステートメントに \"except\" と \"except*\" の両方を含めることはできません", + "exceptGroupRequiresType": "例外グループ構文 (\"except*\") には例外の種類が必要です", + "exceptRequiresParens": "Python 3.14 より前の複数の例外の種類をかっこで囲む必要がある", + "exceptWithAsRequiresParens": "\"as\" を使用する場合は、複数の例外の種類をかっこで囲む必要があります", + "exceptionGroupIncompatible": "例外グループの構文 (\"except*\") には Python 3.11 以降が必要です", + "exceptionGroupTypeIncorrect": "except* の例外型は BaseGroupException から派生できません", + "exceptionTypeIncorrect": "\"{type}\" は BaseException から派生していません", + "exceptionTypeNotClass": "\"{type}\" は有効な例外クラスではありません", + "exceptionTypeNotInstantiable": "例外の種類 \"{type}\" のコンストラクターには 1 つ以上の引数が必要です", + "expectedAfterDecorator": "デコレーターの後に必要な関数またはクラス宣言", + "expectedArrow": "\"->\" の後に戻り値の型注釈が続く必要があります", + "expectedAsAfterException": "例外の種類の後に \"as\" が必要です", + "expectedAssignRightHandExpr": "\"=\" の右側に式が必要です", + "expectedBinaryRightHandExpr": "演算子の右側に式が必要です", + "expectedBoolLiteral": "True または False が必要です", + "expectedCase": "\"case\" ステートメントが必要です", + "expectedClassName": "必要なクラス名", + "expectedCloseBrace": "\"{\" は閉じられていません", + "expectedCloseBracket": "\"[\" は閉じられていません", + "expectedCloseParen": "\"(\" が閉じられませんでした", + "expectedColon": "\":\" が必要です", + "expectedComplexNumberLiteral": "パターン マッチングに必要な複素数リテラル", + "expectedDecoratorExpr": "Python 3.9 より前のデコレーターでは、式フォームはサポートされていません", + "expectedDecoratorName": "デコレーター名が必要です", + "expectedDecoratorNewline": "デコレーターの末尾に新しい行が必要です", + "expectedDelExpr": "\"del\" の後に式が必要です", + "expectedElse": "\"else\" が必要です", + "expectedEquals": "\"=\" が必要です", + "expectedExceptionClass": "例外クラスまたはオブジェクトが無効です", + "expectedExceptionObj": "例外オブジェクト、例外クラス、または None が必要です", + "expectedExpr": "式が必要です", + "expectedFunctionAfterAsync": "\"async\" の後に必要な関数定義", + "expectedFunctionName": "\"def\" の後に関数名が必要です", + "expectedIdentifier": "必要な識別子", + "expectedImport": "\"import\" が必要です", + "expectedImportAlias": "\"as\" の後にシンボルが必要です", + "expectedImportSymbols": "\"import\" の後に 1 つ以上のシンボル名が必要です", + "expectedIn": "'in' が必要です", + "expectedInExpr": "\"in\" の後に式が必要です", + "expectedIndentedBlock": "インデントされたブロックが必要です", + "expectedMemberName": "\"\" の後に属性名が必要です。", + "expectedModuleName": "必要なモジュール名", + "expectedNameAfterAs": "\"as\" の後にシンボル名が必要です", + "expectedNamedParameter": "キーワード パラメーターは \"*\" の後に続く必要があります", + "expectedNewline": "改行が必要です", + "expectedNewlineOrSemicolon": "ステートメントは改行またはセミコロンで区切る必要があります", + "expectedOpenParen": "\"(\" が必要です", + "expectedParamName": "必要なパラメーター名", + "expectedPatternExpr": "必要なパターン式", + "expectedPatternSubjectExpr": "必要なパターンの件名の式", + "expectedPatternValue": "\"a.b\" 形式のパターン値式が必要です", + "expectedReturnExpr": "\"return\" の後に式が必要です", + "expectedSliceIndex": "インデックスまたはスライス式が必要です", + "expectedTypeNotString": "必要な型ですが、文字列リテラルを受け取りました", + "expectedTypeParameterName": "必要な型パラメーター名", + "expectedYieldExpr": "yield ステートメントで必要な式", + "finalClassIsAbstract": "クラス \"{type}\" は final とマークされており、すべての抽象なシンボルを実装する必要があります", + "finalContext": "\"Final\" はこのコンテキストでは許可されていません", + "finalInLoop": "\"Final\" 変数をループ内で割り当てることはできません", + "finalMethodOverride": "メソッド \"{name}\" は、クラス \"{className}\" で定義されている final メソッドをオーバーライドできません", + "finalNonMethod": "関数 \"{name}\" はメソッドではないため、@final としてマークできません", + "finalReassigned": "\"{name}\" は Final として宣言されており、再割り当てできません", + "finalRedeclaration": "\"{name}\" は以前に Final として宣言されました", + "finalRedeclarationBySubclass": "親クラス \"{className}\" が Final として宣言しているため、\"{name}\" を再宣言できません", + "finalTooManyArgs": "\"Final\" の後に 1 つの型引数が必要です", + "finalUnassigned": "\"{name}\" は Final と宣言されていますが、値は割り当てされていません", + "finallyBreak": "\"break\" を使用して \"finally\" ブロックを終了することはできません", + "finallyContinue": "\"continue\" を使用して \"finally\" ブロックを終了することはできません", + "finallyReturn": "\"return\" を使用して \"finally\" ブロックを終了することはできません", + "formatStringBrace": "f-string リテラル内では、1 つの閉じかっこは使用できません。二重閉じかっこを使用してください", + "formatStringBytes": "書式指定文字列リテラル (f 文字列) をバイナリにすることはできません", + "formatStringDebuggingIllegal": "F-string デバッグ指定子 \"=\" には Python 3.8 以降が必要です", + "formatStringEscape": "Python 3.12 より前の f-string の式部分ではエスケープ シーケンス (バックスラッシュ) は使用できません", + "formatStringExpectedConversion": "f-string の \"!\" の後に変換指定子が必要です", + "formatStringIllegal": "書式文字列リテラル (f 文字列) には Python 3.6 以降が必要です", + "formatStringInPattern": "書式指定文字列はパターンでは使用できません", + "formatStringNestedFormatSpecifier": "書式指定子内で入れ子になった式が深すぎます", + "formatStringNestedQuote": "f-string 内で入れ子になった文字列は、Python 3.12 より前の f-string と同じ引用符文字を使用できません", + "formatStringTemplate": "書式文字列リテラル (f 文字列) をテンプレート文字列 (t 文字列) にすることもできません", + "formatStringUnicode": "書式指定文字列リテラル (f 文字列) を Unicode にすることはできません", + "formatStringUnterminated": "f-string の式が終了していません。\"}\" が必要です", + "functionDecoratorTypeUnknown": "型指定されていない関数デコレーターは、関数の型を隠します。デコレーターを無視しています", + "functionInConditionalExpression": "常に True に評価される条件式参照関数", + "functionTypeParametersIllegal": "関数型パラメーターの構文には Python 3.12 以降が必要です", + "futureImportLocationNotAllowed": "__future__ からのインポートは、ファイルの先頭にある必要があります", + "generatorAsyncReturnType": "async ジェネレーター関数の戻り値の型は、\"AsyncGenerator[{yieldType}, Any]\" と互換性がある必要があります", + "generatorNotParenthesized": "ジェネレーター式は、唯一の引数でない場合はかっこで囲む必要があります", + "generatorSyncReturnType": "ジェネレーター関数の戻り値の型は、\"Generator[{yieldType}, Any, Any]\" と互換性がある必要があります", + "genericBaseClassNotAllowed": "\"Generic\" 基底クラスを型パラメーター構文と共に使用することはできません", + "genericClassAssigned": "ジェネリック クラス型を割り当てることはできません", + "genericClassDeleted": "ジェネリック クラス型を削除できません", + "genericInstanceVariableAccess": "クラスを介したジェネリック インスタンス変数へのアクセスがあいまいです", + "genericNotAllowed": "\"Generic\" はこのコンテキストでは無効です", + "genericTypeAliasBoundTypeVar": "クラス内のジェネリック型エイリアスはバインドされた型変数 {names} を使用できません", + "genericTypeArgMissing": "\"Generic\" には少なくとも 1 つの型引数が必要です", + "genericTypeArgTypeVar": "\"Generic\" の型引数は型変数である必要があります", + "genericTypeArgUnique": "\"Generic\" の型引数は一意である必要があります", + "globalReassignment": "\"{name}\" は global 宣言の前に割り当てられます", + "globalRedefinition": "\"{name}\" は既に global として宣言されています", + "implicitStringConcat": "暗黙的な文字列連結は許可されていません", + "importCycleDetected": "インポート チェーンで循環が検出されました", + "importDepthExceeded": "インポート チェーンの深さが {depth} を超えました", + "importResolveFailure": "インポート \"{importName}\" を解決できませんでした", + "importSourceResolveFailure": "インポート \"{importName}\" をソースから解決できませんでした", + "importSymbolUnknown": "\"{name}\" は不明なインポート シンボルです", + "incompatibleMethodOverride": "メソッド \"{name}\" は互換性のない方法でクラス \"{className}\" をオーバーライドします", + "inconsistentIndent": "元のサイズが前のインデントと一致しません", + "inconsistentTabs": "インデントでのタブとスペースの一貫性のない使用", + "initMethodSelfParamTypeVar": "\"__init__\" メソッドの \"self\" パラメーターの型注釈に、クラス スコープ型の変数を含めることはできません", + "initMustReturnNone": "\"__init__\" の戻り値の型は None でなければなりません", + "initSubclassCallFailed": "__init_subclass__ メソッドのキーワード引数が正しくありません", + "initSubclassClsParam": "__init_subclass__ オーバーライドは \"cls\" パラメーターを受け取る必要があります", + "initVarNotAllowed": "\"InitVar\" はこのコンテキストでは許可されていません", + "instanceMethodSelfParam": "インスタンス メソッドは \"self\" パラメーターを受け取る必要があります", + "instanceVarOverridesClassVar": "インスタンス変数 \"{name}\" は、クラス \"{className}\" の同じ名前のクラス変数をオーバーライドします", + "instantiateAbstract": "抽象クラス \"{type}\" をインスタンス化できません", + "instantiateProtocol": "Protocol クラス \"{type}\" をインスタンス化できません", + "internalBindError": "ファイル \"{file}\" のバインド中に内部エラーが発生しました: {message}", + "internalParseError": "ファイル \"{file}\" の解析中に内部エラーが発生しました: {message}", + "internalTypeCheckingError": "ファイル \"{file}\" の種類チェック中に内部エラーが発生しました: {message}", + "invalidIdentifierChar": "識別子の無効な文字", + "invalidStubStatement": "ステートメントは、型 stub ファイル内では意味がありません", + "invalidTokenChars": "トークン内の無効な文字 \"{text}\"", + "isInstanceInvalidType": "\"isinstance\" の 2 番目の引数は、クラスまたはクラスの tuple である必要があります", + "isSubclassInvalidType": "\"issubclass\" の 2 番目の引数は、クラスまたはクラスの tuple である必要があります", + "keyValueInSet": "キーと値のペアは set 内では使用できません", + "keywordArgInTypeArgument": "キーワード引数は型引数リストでは使用できません", + "keywordOnlyAfterArgs": "キーワードのみの引数の区切り記号は、\"*\" パラメーターの後には使用できません", + "keywordParameterMissing": "1 つ以上のキーワード パラメーターが \"*\" パラメーターの後に続く必要があります", + "keywordSubscriptIllegal": "添字内のキーワード引数はサポートされていません", + "lambdaReturnTypePartiallyUnknown": "ラムダの戻り値の型、\"{returnType}\" が部分的に不明です", + "lambdaReturnTypeUnknown": "ラムダの戻り値の型が不明です", + "listAssignmentMismatch": "型 \"{type}\" の式をターゲット リストに割り当てることはできません", + "listInAnnotation": "List 式は型式では使用できません", + "literalEmptyArgs": "\"Literal\" の後に 1 つ以上の型引数が必要です", + "literalNamedUnicodeEscape": "名前付き Unicode エスケープ シーケンスは、\"Literal\" 文字列注釈ではサポートされていません", + "literalNotAllowed": "\"Literal\" は、型引数なしでこのコンテキストでは使用できません", + "literalNotCallable": "Literal 型はインスタンス化できません", + "literalUnsupportedType": "\"Literal\" の型引数は None、literal 値 (int、bool、str、または bytes)、または enum 値である必要があります", + "matchIncompatible": "Match ステートメントには Python 3.10 以降が必要です", + "matchIsNotExhaustive": "match ステートメント内のケースでは、すべての値が完全に処理されるわけではありません", + "maxParseDepthExceeded": "解析の最大深さを超えました。式を小さい部分式に分割する", + "memberAccess": "クラス \"{type}\" の属性 \"{name}\" にアクセスできません", + "memberDelete": "クラス \"{type}\" の属性 \"{name}\" を削除できません", + "memberSet": "クラス \"{type}\" の属性 \"{name}\" に割り当てることはできません", + "metaclassConflict": "派生クラスのメタクラスは、そのすべての基底クラスのメタクラスのサブクラスである必要があります", + "metaclassDuplicate": "指定できるメタクラスは 1 つだけです", + "metaclassIsGeneric": "メタクラスをジェネリックにすることはできません", + "methodNotDefined": "\"{name}\" メソッドが定義されていません", + "methodNotDefinedOnType": "型 \"{type}\" に \"{name}\" メソッドが定義されていません", + "methodOrdering": "一貫性のあるメソッドの順序を作成できません", + "methodOverridden": "\"{name}\" は、クラス \"{className}\" の同じ名前のメソッドを互換性のない型 \"{type}\" でオーバーライドします", + "methodReturnsNonObject": "\"{name}\" メソッドはオブジェクトを返しません", + "missingSuperCall": "メソッド \"{methodName}\" は親クラスで同じ名前のメソッドを呼び出しません", + "mixingBytesAndStr": "Bytes 値と str 値を連結することはできません", + "moduleAsType": "モジュールを型として使用することはできません", + "moduleNotCallable": "モジュールは呼び出し可能ではありません", + "moduleUnknownMember": "\"{memberName}\" はモジュール \"{moduleName}\" の既知の属性ではありません", + "namedExceptAfterCatchAll": "名前付き except 句は、catch-all except 句の後には使用できません", + "namedParamAfterParamSpecArgs": "ParamSpec args パラメーターの後にキーワード パラメーター \"{name}\" をシグネチャに含めることはできません", + "namedTupleEmptyName": "名前付き tuple 内の名前を空にすることはできません", + "namedTupleEntryRedeclared": "親クラス \"{className}\" が名前付き tuple であるため、\"{name}\" をオーバーライドできません", + "namedTupleFieldUnderscore": "Named tuple フィールド名をアンダースコアで始めることはできません", + "namedTupleFirstArg": "最初の引数として名前付き tuple クラス名が必要です", + "namedTupleMultipleInheritance": "NamedTuple による複数の継承はサポートされていません", + "namedTupleNameKeyword": "フィールド名をキーワードにすることはできません", + "namedTupleNameType": "エントリ名と型を指定する 2 エントリの tuple が必要です", + "namedTupleNameUnique": "名前付き tuple 内の名前は一意である必要があります", + "namedTupleNoTypes": "\"namedtuple\" は tuple エントリに型を提供しません。代わりに \"NamedTuple\" を使用してください", + "namedTupleSecondArg": "2 番目の引数として名前付き tuple エントリ list が必要です", + "newClsParam": "__new__ override は \"cls\" パラメーターを受け取る必要があります", + "newTypeAnyOrUnknown": "NewType の 2 番目の引数は、Any や Unknown ではなく、既知のクラスでなければなりません", + "newTypeBadName": "NewType の最初の引数は文字列リテラルである必要があります", + "newTypeLiteral": "NewType は Literal 型では使用できません", + "newTypeNameMismatch": "NewType は同じ名前の変数に割り当てる必要があります", + "newTypeNotAClass": "NewType の 2 番目の引数としてクラスが必要です", + "newTypeParamCount": "NewType には 2 つの位置引数が必要です", + "newTypeProtocolClass": "NewType は構造型 (Protocolまたは TypedDict クラス) では使用できません", + "noOverload": "指定された引数に一致する \"{name}\" のオーバーロードがありません", + "noReturnContainsReturn": "Function with declared return type \"NoReturn\" cannot include a return statement", + "noReturnContainsYield": "戻り値の型 \"NoReturn\" を宣言した関数に yield ステートメントを含めることはできません", + "noReturnReturnsNone": "戻り値の型が \"NoReturn\" として宣言されている関数は \"None\" を返すことができません", + "nonDefaultAfterDefault": "既定以外の引数は既定の引数の後に続きます", + "nonLocalInModule": "モジュール レベルでは nonlocal 宣言は許可されません", + "nonLocalNoBinding": "nonlocal \"{name}\" のバインドが見つかりません", + "nonLocalReassignment": "\"{name}\" は nonlocal 宣言の前に割り当てられます", + "nonLocalRedefinition": "\"{name}\" は既に nonlocal として宣言されています", + "noneNotCallable": "\"None\" 型のオブジェクトを呼び出すことはできません", + "noneNotIterable": "型 \"None\" のオブジェクトを反復可能な値として使用することはできません", + "noneNotSubscriptable": "\"None\" 型のオブジェクトは添字可能ではありません", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "型 \"None\" と \"async with\" は同時に使用できません", + "noneOperator": "演算子 \"{operator}\" は \"None\" ではサポートされていません", + "noneUnknownMember": "\"{name}\" は \"None\" の既知の属性ではありません", + "nonlocalTypeParam": "型パラメーター \"{name}\" に nonlocal バインドは許可されていません", + "notRequiredArgCount": "\"NotRequired\" の後に 1 つの型引数が必要です", + "notRequiredNotInTypedDict": "\"NotRequired\" はこのコンテキストでは許可されていません", + "objectNotCallable": "型 \"{type}\" のオブジェクトは呼び出し可能ではありません", + "obscuredClassDeclaration": "クラス宣言 \"{name}\" は、同じ名前の宣言によって隠されています", + "obscuredFunctionDeclaration": "関数宣言 \"{name}\" は、同じ名前の宣言によって隠されています", + "obscuredMethodDeclaration": "メソッド宣言 \"{name}\" は、同じ名前の宣言によって隠されています", + "obscuredParameterDeclaration": "パラメーター宣言 \"{name}\" は、同じ名前の宣言によって隠されています", + "obscuredTypeAliasDeclaration": "型エイリアス宣言 \"{name}\" は、同じ名前の宣言によって隠されています", + "obscuredVariableDeclaration": "宣言 \"{name}\" は、同じ名前の宣言によって隠されています", + "operatorLessOrGreaterDeprecated": "演算子 \"<>\" は Python 3 ではサポートされていません。代わりに \"!=\" を使用してください", + "optionalExtraArgs": "\"Optional\" の後に 1 つの型引数が必要です", + "orPatternIrrefutable": "参照不可能なパターンは、\"or\" パターンの最後のサブパターンとしてのみ許可されます", + "orPatternMissingName": "\"or\" パターン内のすべてのサブパターンは、同じ名前をターゲットにする必要があります", + "overlappingKeywordArgs": "入力された辞書はキーワード パラメーターと重複しています: {names}", + "overlappingOverload": "パラメーターがオーバーロード {obscuredBy} と重複しているため、\"{name}\" のオーバーロード {obscured} は使用されません", + "overloadAbstractImplMismatch": "オーバーロードは実装の抽象状態と一致する必要があります", + "overloadAbstractMismatch": "オーバーロードはすべて抽象であるか抽象でない必要があります", + "overloadClassMethodInconsistent": "\"{name}\" のオーバーロードでは、@classmethod を不整合に使用します", + "overloadFinalImpl": "@final デコレーターは実装にのみ適用する必要があります", + "overloadFinalNoImpl": "最初のオーバーロードのみを @final に設定する必要があります", + "overloadImplementationMismatch": "オーバーロードされた実装がオーバーロード {index} のシグネチャと一致しません", + "overloadOverrideImpl": "@override デコレーターは実装にのみ適用する必要があります", + "overloadOverrideNoImpl": "最初のオーバーロードのみを @override に設定する必要があります", + "overloadReturnTypeMismatch": "\"{name}\" のオーバーロード {prevIndex} はオーバーロード {newIndex} と重複し、互換性のない型を返します", + "overloadStaticMethodInconsistent": "\"{name}\" のオーバーロードでは、@staticmethod を不整合に使用します", + "overloadWithoutImplementation": "\"{name}\" は overload としてマークされていますが、実装が提供されていません", + "overriddenMethodNotFound": "メソッド \"{name}\" は override としてマークされていますが、同じ名前の基本メソッドが存在しません", + "overrideDecoratorMissing": "メソッド \"{name}\" は override としてマークされていませんが、クラス \"{className}\" のメソッドをオーバーライドしています", + "paramAfterKwargsParam": "パラメーターは \"**\" パラメーターの後に続けることはできません", + "paramAlreadyAssigned": "パラメーター \"{name}\" は既に割り当て済みです", + "paramAnnotationMissing": "パラメーター \"{name}\" に型注釈がありません", + "paramAssignmentMismatch": "型 \"{sourceType}\" の式を型 \"{paramType}\" のパラメーターに割り当てることはできません", + "paramNameMissing": "\"{name}\" という名前のパラメーターがありません", + "paramSpecArgsKwargsDuplicate": "ParamSpec \"{type}\" の引数は既に指定されています", + "paramSpecArgsKwargsUsage": "ParamSpec の \"args\" 属性と \"kwargs\" 属性の両方が関数シグネチャ内に含まれている必要があります", + "paramSpecArgsMissing": "ParamSpec \"{type}\" の引数がありません", + "paramSpecArgsUsage": "ParamSpec の \"args\" 属性は、*args パラメーターと共に使用する場合にのみ有効です", + "paramSpecAssignedName": "ParamSpec は 、\"{name}\" という名前の変数に割り当てる必要があります", + "paramSpecContext": "ParamSpec はこのコンテキストでは許可されていません", + "paramSpecDefaultNotTuple": "ParamSpec の既定値には、省略記号、tuple 式、または ParamSpec が必要です", + "paramSpecFirstArg": "最初の引数として ParamSpec の名前が必要です", + "paramSpecKwargsUsage": "ParamSpec の \"kwargs\" 属性は、**kwargs パラメーターと共に使用する場合にのみ有効です", + "paramSpecNotUsedByOuterScope": "ParamSpec \"{name}\" はこのコンテキストでは意味がありません", + "paramSpecUnknownArg": "ParamSpec は複数の引数をサポートしていません", + "paramSpecUnknownMember": "\"{name}\" は ParamSpec の既知の属性ではありません", + "paramSpecUnknownParam": "\"{name}\" は ParamSpec に対する不明なパラメーターです", + "paramTypeCovariant": "共変の型変数はパラメーター型では使用できません", + "paramTypePartiallyUnknown": "パラメーター \"{paramName}\" の型が部分的に不明です", + "paramTypeUnknown": "パラメーター \"{paramName}\" の型が不明です", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "サブジェクトの種類 \"{type}\" のパターンは一致しません", + "positionArgAfterNamedArg": "キーワード引数の後に位置引数を指定することはできません", + "positionArgAfterUnpackedDictArg": "キーワード引数のアンパックの後に位置引数を指定することはできません", + "positionOnlyAfterArgs": "\"*\" パラメーターの後に位置のみのパラメーターの区切り文字を使用することはできません", + "positionOnlyAfterKeywordOnly": "\"/\" パラメーターは 、\"*\" パラメーターの前に指定する必要があります", + "positionOnlyAfterNon": "位置のみのパラメーターの後に位置のみのパラメーターを指定することはできません", + "positionOnlyFirstParam": "位置のみのパラメーターの区切り記号を最初のパラメーターとして使用することはできません", + "positionOnlyIncompatible": "位置のみのパラメーターの区切り文字には Python 3.8 以降が必要です", + "privateImportFromPyTypedModule": "\"{name}\" はモジュール \"{module}\" からエクスポートされていません", + "privateUsedOutsideOfClass": "\"{name}\" はプライベートであり、宣言されているクラスの外部で使用されます", + "privateUsedOutsideOfModule": "\"{name}\" はプライベートであり、それが宣言されているモジュールの外部で使用されています", + "propertyOverridden": "\"{name}\" は、クラス \"{className}\" の同じ名前の property を誤ってオーバーライドします", + "propertyStaticMethod": "静的メソッドは、property の getter、setter または deleter に対して許可されません", + "protectedUsedOutsideOfClass": "\"{name}\" は保護され、宣言されているクラスの外部で使用されます", + "protocolBaseClass": "Protocol クラス \"{classType}\" は非 Protocol クラス \"{baseType}\" から派生できません", + "protocolBaseClassWithTypeArgs": "型パラメーター構文を使用する場合、Protocol クラスでは型引数を使用できません", + "protocolIllegal": "\"Protocol\" を使用するには Python 3.7 以降が必要です", + "protocolNotAllowed": "\"Protocol\" はこのコンテキストでは使用できません", + "protocolTypeArgMustBeTypeParam": "\"Protocol\" の型引数は型パラメーターである必要があります", + "protocolUnsafeOverlap": "クラスが安全でない方法で \"{name}\" と重複しており、実行時に一致する可能性があります", + "protocolVarianceContravariant": "ジェネリック Protocol \"{class}\" で使用される型変数 \"{variable}\" は反変である必要があります", + "protocolVarianceCovariant": "ジェネリック Protocol \"{class}\" で使用される型変数 \"{variable}\" は共変である必要があります", + "protocolVarianceInvariant": "ジェネリック Protocol \"{class}\" で使用される型変数 \"{variable}\" は不変である必要があります", + "pyrightCommentInvalidDiagnosticBoolValue": "Pyright コメント ディレクティブの後には \"=\" と値 true または false を指定する必要があります", + "pyrightCommentInvalidDiagnosticSeverityValue": "Pyright コメント ディレクティブの後に \"=\" と true、false、error、warning、information または none の値を指定する必要があります", + "pyrightCommentMissingDirective": "Pyright コメントの後にディレクティブ (basic または strict) または診断規則を指定する必要があります", + "pyrightCommentNotOnOwnLine": "ファイル レベルの設定を制御するために使用する Pyright コメントは、独自の行に表示する必要があります", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" は pyright コメントの不明な診断規則です", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" は、pyright コメントの無効な値です。true、false、error、warning、information または none が必要です", + "pyrightCommentUnknownDirective": "\"{directive}\" は、pyright コメントの不明なディレクティブです。\"strict\"、\"standard\"、または \"basic\" が必要です", + "readOnlyArgCount": "\"ReadOnly\" の後に 1 つの型引数が必要です", + "readOnlyNotInTypedDict": "\"ReadOnly\" はこのコンテキストでは許可されていません", + "recursiveDefinition": "\"{name}\" の型は、それ自体を参照しているため、特定できませんでした", + "relativeImportNotAllowed": "相対インポートは、\"import .a\" フォームでは使用できません。代わりに \"from . import a\" を使用します。", + "requiredArgCount": "\"Required\" の後に 1 つの型引数が必要です", + "requiredNotInTypedDict": "このコンテキストでは \"Required\" は許可されません", + "returnInAsyncGenerator": "値を持つ return ステートメントは、async ジェネレーターでは使用できません", + "returnInExceptionGroup": "\"except*\" ブロックでは \"return\" を使用できません", + "returnMissing": "戻り値の型が \"{returnType}\" として宣言されている関数は、すべてのコード パスで値を返す必要があります", + "returnOutsideFunction": "\"return\" は関数内でのみ使用できます", + "returnTypeContravariant": "反変の型変数は戻り値の型では使用できません", + "returnTypeMismatch": "型 \"{exprType}\" は戻り値の型 \"{returnType}\" に割り当てできません", + "returnTypePartiallyUnknown": "戻り値の型 \"{returnType}\" は部分的に不明です", + "returnTypeUnknown": "戻り値の型が不明です", + "revealLocalsArgs": "\"reveal_locals\" 呼び出しに引数が必要ありません", + "revealLocalsNone": "このスコープには locals がありません", + "revealTypeArgs": "\"reveal_type\" 呼び出しに 1 つの位置引数が必要です", + "revealTypeExpectedTextArg": "関数 \"reveal_type\" の \"expected_text\" 引数は、str リテラル値である必要があります", + "revealTypeExpectedTextMismatch": "入力テキストの不一致;\"{expected}\" が必要ですが、\"{received}\" を受信しました", + "revealTypeExpectedTypeMismatch": "型が一致しません。\"{expected}\" が必要ですが、\"{received}\" を受信しました", + "selfTypeContext": "\"Self\" はこのコンテキストでは無効です", + "selfTypeMetaclass": "\"Self\" はメタクラス (\"type\" のサブクラス) 内では使用できません", + "selfTypeWithTypedSelfOrCls": "\"Self\" は、\"Self\" 以外の型注釈を持つ 'self' または 'cls' パラメーターを持つ関数では使用できません", + "sentinelBadName": "Sentinel の最初の引数は文字列リテラルである必要があります", + "sentinelNameMismatch": "Sentinel は、同じ名前の変数に割り当てる必要があります", + "sentinelParamCount": "Sentinel には 1 つの位置引数が必要です", + "setterGetterTypeMismatch": "property setter 値の型は、getter の戻り値の型に割り当てることができません", + "singleOverload": "\"{name}\" はオーバーロードとしてマークされていますが、追加のオーバーロードがありません", + "slotsAttributeError": "__slots__で \"{name}\" が指定されていません", + "slotsClassVarConflict": "\"{name}\" が __slots__ で宣言されたインスタンス変数と競合しています", + "starPatternInAsPattern": "スター パターンを \"as\" ターゲットと共に使用することはできません", + "starPatternInOrPattern": "スター パターンを他のパターン内で OR 化することはできません", + "starStarWildcardNotAllowed": "** はワイルドカード \"_\" と共に使用できません", + "staticClsSelfParam": "静的メソッドに \"self\" または \"cls\" パラメーターを指定することはできません", + "stringNonAsciiBytes": "非 ASCII 文字はバイト文字列リテラルでは使用できません", + "stringNotSubscriptable": "型式では文字列式を添字にすることはできません。式全体を引用符で囲んでください", + "stringUnsupportedEscape": "文字列リテラルでサポートされていないエスケープ シーケンス", + "stringUnterminated": "文字列リテラルが未終了です", + "stubFileMissing": "\"{importName}\" の stub ファイルが見つかりません", + "stubUsesGetAttr": "型 stub ファイルが不完全です。\"__getattr__\" はモジュールの型エラーを隠します", + "sublistParamsIncompatible": "Python 3.x では sublist パラメーターはサポートされていません", + "superCallArgCount": "\"super\" 呼び出しには 2 つ以下の引数が必要です", + "superCallFirstArg": "\"super\" 呼び出しの最初の引数としてクラス型が必要ですが、\"{type}\" を受け取りました", + "superCallSecondArg": "\"super\" 呼び出しの 2 番目の引数は、\"{type}\" から派生したオブジェクトまたはクラスである必要があります", + "superCallZeroArgForm": "\"super\" 呼び出しの 0 引数形式は、メソッド内でのみ有効です", + "superCallZeroArgFormStaticMethod": "\"super\" 呼び出しの 0 引数形式は、静的メソッド内では有効ではありません", + "symbolIsPossiblyUnbound": "\"{name}\" はバインドされていない可能性があります", + "symbolIsUnbound": "\"{name}\" はバインドされていません", + "symbolIsUndefined": "\"{name}\" が定義されていません", + "symbolOverridden": "\"{name}\" はクラス \"{className}\" の同じ名前のシンボルをオーバーライドします", + "templateStringBytes": "テンプレート文字列リテラル (t 文字列) をバイナリにすることはできません", + "templateStringIllegal": "テンプレート文字列リテラル (t 文字列) には Python 3.14 以降が必要です", + "templateStringUnicode": "テンプレート文字列リテラル (t 文字列) を Unicode にすることはできません", + "ternaryNotAllowed": "3 項式は型式では使用できません", + "totalOrderingMissingMethod": "total_orderingを使用するには、クラスで \"__lt__\"、\"__le__\"、\"__gt__\"、または \"__ge__\" のいずれかを定義する必要があります", + "trailingCommaInFromImport": "末尾のコンマはかっこで囲まずには使用できません", + "tryWithoutExcept": "Try ステートメントには、少なくとも 1 つの except 句または finally 句が必要です", + "tupleAssignmentMismatch": "型 \"{type}\" の式はターゲット tuple に割り当てることができません", + "tupleInAnnotation": "tuple 式は型式では使用できません", + "tupleIndexOutOfRange": "インデックス {index} が型 {type} の範囲外です", + "typeAliasIllegalExpressionForm": "型エイリアス定義の式フォームが無効です", + "typeAliasIsRecursiveDirect": "型エイリアス \"{name}\" は、その定義でそれ自体を使用できません", + "typeAliasNotInModuleOrClass": "TypeAlias は、モジュールまたはクラススコープ内でのみ定義できます", + "typeAliasRedeclared": "\"{name}\" は TypeAlias として宣言されており、1 回だけ割り当てることができます", + "typeAliasStatementBadScope": "type ステートメントは、モジュールまたはクラススコープ内でのみ使用できます", + "typeAliasStatementIllegal": "型エイリアス ステートメントには Python 3.12 以降が必要です", + "typeAliasTypeBadScope": "型エイリアスは、モジュールまたはクラス スコープ内でのみ定義できます", + "typeAliasTypeBaseClass": "\"type\" ステートメントで定義された型エイリアスを基底クラスとして使用することはできません", + "typeAliasTypeMustBeAssigned": "TypeAliasType は、型エイリアスと同じ名前の変数に割り当てる必要があります", + "typeAliasTypeNameArg": "TypeAliasType の最初の引数は、型エイリアスの名前を表す文字列リテラルである必要があります", + "typeAliasTypeNameMismatch": "型エイリアスの名前は、それが割り当てられている変数の名前と一致する必要があります", + "typeAliasTypeParamInvalid": "型パラメーター リストは、TypeVar、TypeVarTuple、または ParamSpec のみを含む tuple である必要があります", + "typeAnnotationCall": "型式では呼び出し式を使用できません", + "typeAnnotationVariable": "型式では変数を使用できません", + "typeAnnotationWithCallable": "\"type\" の型引数はクラスである必要があります。呼び出し可能関数はサポートされていません", + "typeArgListExpected": "ParamSpec、省略記号、または型の list が必要です", + "typeArgListNotAllowed": "この型引数には list 式は使用できません", + "typeArgsExpectingNone": "クラス \"{name}\" に型引数が必要ありません", + "typeArgsMismatchOne": "1 つの型引数が必要ですが、{received} を受け取りました", + "typeArgsMissingForAlias": "ジェネリック型エイリアス \"{name}\" に必要な型引数", + "typeArgsMissingForClass": "ジェネリック クラス \"{name}\" に必要な型引数", + "typeArgsTooFew": "\"{name}\" に指定された型引数が少なすぎます。{expected} が必要ですが、{received} を受信しました", + "typeArgsTooMany": "\"{name}\" に指定された型引数が多すぎます。{expected} が必要ですが、{received} を受信しました", + "typeAssignmentMismatch": "型 \"{sourceType}\" は宣言された型 \"{destType}\" に割り当てできません", + "typeAssignmentMismatchWildcard": "インポート シンボル \"{name}\" には型 \"{sourceType}\" があり、宣言された型 \"{destType}\" には割り当てできません", + "typeCallNotAllowed": "type() 呼び出しは型式で使用しないでください", + "typeCheckOnly": "\"{name}\" は@type_check_onlyとしてマークされており、型注釈でのみ使用できます", + "typeCommentDeprecated": "type コメントの使用は非推奨です。代わりに type 注釈を使用してください", + "typeExpectedClass": "クラスが必要ですが、\"{type}\" を受け取りました", + "typeFormArgs": "\"TypeForm\" は 1 つの位置引数を受け取ります", + "typeGuardArgCount": "\"TypeGuard\" または \"TypeIs\" の後に 1 つの型引数が必要です", + "typeGuardParamCount": "ユーザー定義型ガード関数とメソッドには、少なくとも 1 つの入力パラメーターが必要です", + "typeIsReturnType": "TypeIs の戻り値の型 (\"{returnType}\") と値パラメーターの型 (\"{type}\") が一致しません", + "typeNotAwaitable": "\"{type}\" は awaitable ではありません", + "typeNotIntantiable": "\"{type}\" をインスタンス化できません", + "typeNotIterable": "\"{type}\" は反復できません", + "typeNotSpecializable": "型 \"{type}\" を特殊化できませんでした", + "typeNotSubscriptable": "型 \"{type}\" のオブジェクトは添字可能ではありません", + "typeNotSupportBinaryOperator": "演算子 \"{operator}\" は型 \"{leftType}\" と \"{rightType}\" ではサポートされていません", + "typeNotSupportBinaryOperatorBidirectional": "型 \"{leftType}\" と \"{rightType}\" に対して演算子 \"{operator}\" はサポートされていません。予期された型が \"{expectedType}\" の場合", + "typeNotSupportUnaryOperator": "演算子 \"{operator}\" は型 \"{type}\" ではサポートされていません", + "typeNotSupportUnaryOperatorBidirectional": "型 \"{type}\" が \"{expectedType}\" の場合、演算子 \"{operator}\" はサポートされていません", + "typeNotUsableWith": "型 \"{type}\" のオブジェクトは、{method} を正しく実装していないため、\"with\" と共に使用できません", + "typeNotUsableWithAsync": "型 \"{type}\" のオブジェクトは、{method} を正しく実装していないため、\"async with\" と共に使用できません", + "typeParameterBoundNotAllowed": "バインドまたは制約を可変個引数型パラメーターまたは ParamSpec と共に使用することはできません", + "typeParameterConstraintTuple": "型パラメーター制約は、2 つ以上の型のタプルである必要があります", + "typeParameterExistingTypeParameter": "型パラメーター \"{name}\" は既に使用されています", + "typeParameterNotDeclared": "型パラメーター \"{name}\" は、\"{container}\" の型パラメーターリストに含まれていません", + "typeParametersMissing": "少なくとも 1 つの型パラメーターを指定する必要があります", + "typePartiallyUnknown": "\"{name}\" の種類が部分的に不明です", + "typeUnknown": "\"{name}\" の種類が不明です", + "typeVarAssignedName": "TypeVar は 、\"{name}\" という名前の変数に割り当てる必要があります", + "typeVarAssignmentMismatch": "型 \"{type}\" を型変数 \"{name}\" に割り当てることはできません", + "typeVarBoundAndConstrained": "TypeVar をバインドと制約の両方にすることはできません", + "typeVarBoundGeneric": "TypeVar バインド型をジェネリックにすることはできません", + "typeVarConstraintGeneric": "TypeVar 制約型をジェネリックにすることはできません", + "typeVarDefaultBoundMismatch": "TypeVar の既定の型はバインドされた型のサブタイプである必要があります", + "typeVarDefaultConstraintMismatch": "TypeVar の既定の型は、制約付き型のいずれかである必要があります", + "typeVarDefaultIllegal": "型変数の既定の型には Python 3.13 以降が必要です", + "typeVarDefaultInvalidTypeVar": "型パラメーター \"{name}\" には、スコープ外の 1 つ以上の型変数を参照する既定の型があります", + "typeVarFirstArg": "最初の引数として TypeVar の名前が必要です", + "typeVarInvalidForMemberVariable": "属性型は、ローカル メソッドにスコープ指定された型変数 \"{name}\" を使用できません", + "typeVarNoMember": "TypeVar \"{type}\" には属性 \"{name}\" がありません", + "typeVarNotSubscriptable": "TypeVar \"{type}\" は添字可能ではありません", + "typeVarNotUsedByOuterScope": "型変数 \"{name}\" は、このコンテキストでは意味がありません", + "typeVarPossiblyUnsolvable": "呼び出し元がパラメーター \"{param}\" に引数を指定しない場合、型変数 \"{name}\" は解決されない可能性があります", + "typeVarSingleConstraint": "TypeVar には少なくとも 2 つの制約付き型が必要です", + "typeVarTupleConstraints": "TypeVarTuple に値制約を持たせることはできません", + "typeVarTupleContext": "TypeVarTuple はこのコンテキストでは許可されていません", + "typeVarTupleDefaultNotUnpacked": "TypeVarTuple の既定の型は、アンパックされた tuple または TypeVarTuple である必要があります", + "typeVarTupleMustBeUnpacked": "TypeVarTuple 値にはアンパック演算子が必要です", + "typeVarTupleUnknownParam": "\"{name}\" は TypeVarTuple に対する不明なパラメーターです", + "typeVarUnknownParam": "\"{name}\" は TypeVar に対する不明なパラメーターです", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" は外部スコープで既に使用されています", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" はジェネリック関数シグネチャに 1 回だけ出現します", + "typeVarVariance": "TypeVar を共変と反変の両方にすることはできません", + "typeVarWithDefaultFollowsVariadic": "TypeVar \"{typeVarName}\" には既定値があり、TypeVarTuple \"{variadicName}\" の後に続けることはできません", + "typeVarWithoutDefault": "\"{name}\" は既定の型がないため、型パラメーター リストの \"{other}\" の後に表示できません", + "typeVarsNotInGenericOrProtocol": "Generic[] または Protocol[] には、すべての型変数を含める必要があります", + "typedDictAccess": "TypedDict の項目にアクセスできませんでした", + "typedDictAssignedName": "TypedDict は \"{name}\" という名前の変数に割り当てる必要があります", + "typedDictBadVar": "TypedDict クラスには型注釈のみを含めることができます", + "typedDictBaseClass": "TypedDict クラスのすべての基底クラスも TypedDict クラスである必要があります", + "typedDictBoolParam": "\"{name}\" パラメーターの値は True または False である必要があります", + "typedDictClosedExtras": "基底クラス \"{name}\" は、余分な項目の型を型 \"{type}\" に制限する TypedDict です", + "typedDictClosedFalseNonOpenBase": "基底クラス \"{name}\" は開いている TypedDict; ではありません。closed=False は許可されていません", + "typedDictClosedNoExtras": "基底クラス \"{name}\" は closed した TypedDict です。追加の項目は許可されていません", + "typedDictDelete": "TypedDict の項目を削除できませんでした", + "typedDictEmptyName": "TypedDict 内の名前を空にすることはできません", + "typedDictEntryName": "辞書エントリ名に文字列リテラルが必要です", + "typedDictEntryUnique": "ディクショナリ内の名前は一意である必要があります", + "typedDictExtraArgs": "追加の TypedDict 引数はサポートされていません", + "typedDictExtraItemsClosed": "TypedDict は \"closed\" または \"extra_items\" を使用できますが、両方を使用することはできません", + "typedDictFieldNotRequiredRedefinition": "TypedDict アイテム \"{name}\" を NotRequired として再定義することはできません", + "typedDictFieldReadOnlyRedefinition": "TypedDict アイテム \"{name}\" を ReadOnly として再定義することはできません", + "typedDictFieldRequiredRedefinition": "TypedDict アイテム \"{name}\" を Required として再定義することはできません", + "typedDictFirstArg": "最初の引数として TypedDict クラス名が必要です", + "typedDictInClassPattern": "TypedDict クラスはクラス パターンでは使用できません", + "typedDictInitsubclassParameter": "TypedDict は __init_subclass__パラメーター \"{name}\" をサポートしていません", + "typedDictNotAllowed": "\"TypedDict\" はこのコンテキストでは使用できません", + "typedDictSecondArgDict": "2 番目のパラメーターとして dict パラメーターまたはキーワード パラメーターが必要です", + "typedDictSecondArgDictEntry": "単純な辞書エントリが必要です", + "typedDictSet": "TypedDict で項目を割り当てることができませんでした", + "unaccessedClass": "クラス \"{name}\" は参照されていません", + "unaccessedFunction": "関数 \"{name}\" は参照されていません", + "unaccessedImport": "インポート \"{name}\" は参照されていません", + "unaccessedSymbol": "\"{name}\" は参照されていません", + "unaccessedVariable": "変数 \"{name}\" は参照されていません", + "unannotatedFunctionSkipped": "関数 \"{name}\" の分析は、表示されないためスキップされます", + "unaryOperationNotAllowed": "単項演算子は型式では使用できません", + "unexpectedAsyncToken": "\"def\"、\"with\"、または \"for\" が \"async\" の後に続く必要があります", + "unexpectedEof": "予期しない EOF", + "unexpectedExprToken": "式の最後に予期しないトークンが含まれています", + "unexpectedIndent": "予期しないインデント", + "unexpectedUnindent": "インデント解除は予期されていません", + "unhashableDictKey": "辞書キーはハッシュ可能である必要があります", + "unhashableSetEntry": "set エントリはハッシュ可能である必要があります", + "uninitializedAbstractVariables": "抽象基底クラスで定義された変数が、final クラス \"{classType}\" で初期化されていません", + "uninitializedInstanceVariable": "インスタンス変数 \"{name}\" は、クラス本体または__init__ メソッドで初期化されていません", + "unionForwardReferenceNotAllowed": "Union 構文は文字列オペランドで使用できません。式全体を引用符で囲んでください", + "unionSyntaxIllegal": "共用体の代替構文には Python 3.10 以降が必要です", + "unionTypeArgCount": "Union には 2 つ以上の型引数が必要です", + "unionUnpackedTuple": "Union cannot include an unpacked tuple", + "unionUnpackedTypeVarTuple": "Union cannot include an unpacked TypeVarTuple", + "unnecessaryCast": "不要な \"cast\" 呼び出し。型は既に \"{type}\" です", + "unnecessaryIsInstanceAlways": "不要な isinstance 呼び出し。\"{testType}\" は常に \"{classType}\" のインスタンスです", + "unnecessaryIsInstanceNever": "不要な isinstance 呼び出し; \"{testType}\" は\"{classType}\" のインスタンスであったことはありません", + "unnecessaryIsSubclassAlways": "不要な issubclass 呼び出し。\"{testType}\" は常に \"{classType}\" のサブクラスです", + "unnecessaryIsSubclassNever": "不要な issubclass 呼び出し; \"{testType}\" は \"{classType}\" のサブクラスであったことはありません", + "unnecessaryPyrightIgnore": "不要な \"# pyright: ignore\" コメント", + "unnecessaryPyrightIgnoreRule": "不要な \"# pyright: ignore\" ルール: \"{name}\"", + "unnecessaryTypeIgnore": "不要な \"# type: ignore\" コメント", + "unpackArgCount": "\"Unpack\" の後に 1 つの型引数が必要です", + "unpackExpectedTypeVarTuple": "Unpack の型引数として TypeVarTuple または tuple が必要です", + "unpackExpectedTypedDict": "Unpack に必要な TypedDict 型引数", + "unpackIllegalInComprehension": "アンパック操作は理解できません", + "unpackInAnnotation": "アンパック演算子は型式では使用できません", + "unpackInDict": "アンパック操作はディクショナリで許可されていません", + "unpackInSet": "アンパック演算子は set 内では使用できません", + "unpackNotAllowed": "Unpack はこのコンテキストでは許可されていません", + "unpackOperatorNotAllowed": "このコンテキストではアンパック操作は許可されていません", + "unpackTuplesIllegal": "Python 3.8 より前のタプルではアンパック操作は許可されていません", + "unpackedArgInTypeArgument": "アンパックされた引数は、このコンテキストでは使用できません", + "unpackedArgWithVariadicParam": "アンパックされた引数は TypeVarTuple パラメーターには使用できません", + "unpackedDictArgumentNotMapping": "** の後の引数式は、\"str\" キー型のマッピングである必要があります", + "unpackedDictSubscriptIllegal": "下付き文字の辞書アンパック演算子は使用できません", + "unpackedSubscriptIllegal": "下付き文字の Unpack 演算子には Python 3.11 以降が必要です", + "unpackedTypeVarTupleExpected": "アンパックされた TypeVarTuple が必要です。Unpack[{name1}] または *{name2} を使用してください", + "unpackedTypedDictArgument": "アンパックされた TypedDict 引数をパラメーターと一致させることはできません", + "unreachableCodeCondition": "条件が静的に false として評価されるため、コードは分析されません", + "unreachableCodeStructure": "コードに構造的に到達できない", + "unreachableCodeType": "型分析はコードに到達不能であることを示します", + "unreachableExcept": "例外が既に処理されているため、Except 句に到達できません", + "unsupportedDunderAllOperation": "\"__all__\" に対する操作はサポートされていないため、エクスポートされたシンボル リストが正しくない可能性があります", + "unusedCallResult": "呼び出し式の結果は \"{type}\" 型であり、使用されません。これが意図的な場合は変数 \"_\" に代入する", + "unusedCoroutine": "async 関数呼び出しの結果が使用されていません。\"await\" を使用するか、結果を変数に代入してください。", + "unusedExpression": "式の値が使用されていません", + "varAnnotationIllegal": "変数の type 注釈には Python 3.6 以降が必要です。以前のバージョンとの互換性を保つために type コメントを使用してください", + "variableFinalOverride": "変数 \"{name}\" は Final とマークされ、クラス \"{className}\" の同じ名前の Final 以外の変数をオーバーライドします", + "variadicTypeArgsTooMany": "型引数リストには、アンパックされた TypeVarTuple または tuple を最大 1 つ含めることができます", + "variadicTypeParamTooManyAlias": "型エイリアスには TypeVarTuple 型パラメーターを最大 1 つ含めることができますが、複数の ({names}) を受け取りました", + "variadicTypeParamTooManyClass": "ジェネリック クラスには最大 1 つの TypeVarTuple 型パラメーターを指定できますが、複数の ({names}) を受け取りました", + "walrusIllegal": "演算子 \":=\" には Python 3.8 以降が必要です", + "walrusNotAllowed": "演算子 \":=\" は、かっこを囲まないこのコンテキストでは使用できません", + "wildcardInFunction": "ワイルドカードの import は、クラス内または関数内では許可されません", + "wildcardLibraryImport": "ライブラリからのワイルドカードの import は許可されていません", + "wildcardPatternTypePartiallyUnknown": "ワイルドカード パターンによってキャプチャされた型は部分的に不明です", + "wildcardPatternTypeUnknown": "ワイルドカード パターンによってキャプチャされた型が不明です", + "yieldFromIllegal": "\"yield from\" を使用するには Python 3.3 以降が必要です", + "yieldFromOutsideAsync": "async 関数では \"yield from\" は使用できません", + "yieldOutsideFunction": "関数またはラムダの外部では \"yield\" は許可されません", + "yieldWithinComprehension": "\"yield\" は内包表記内では使用できません", + "zeroCaseStatementsFound": "Match ステートメントには、少なくとも 1 つの case ステートメントを含める必要があります", + "zeroLengthTupleNotAllowed": "このコンテキストでは長さ 0 の tuple は使用できません" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "\"Annotated\" 特殊フォームは、インスタンスおよびクラスのチェックでは使用できません", + "argParam": "引数はパラメーター \"{paramName}\" に対応します", + "argParamFunction": "引数は関数 \"{functionName}\" のパラメーター \"{paramName}\" に対応します", + "argsParamMissing": "パラメーター \"*{paramName}\" に対応するパラメーターがありません", + "argsPositionOnly": "位置のみのパラメーターの不一致。{expected} が必要ですが、{received} を受信しました", + "argumentType": "引数の型は \"{type}\" です", + "argumentTypes": "引数の型: ({types})", + "assignToNone": "型は \"None\" に割り当てできません", + "asyncHelp": "\"async with\" を意味しましたか?", + "baseClassIncompatible": "基底クラス \"{baseClass}\" は型 \"{type}\" と互換性がありません", + "baseClassIncompatibleSubclass": "基底クラス \"{baseClass}\" は、型 \"{type}\" と互換性のない \"{subclass}\" から派生しています", + "baseClassOverriddenType": "基底クラス \"{baseClass}\" は、オーバーライドされる型 \"{type}\" を提供します", + "baseClassOverridesType": "基底クラス \"{baseClass}\" は型 \"{type}\" でオーバーライドします", + "bytesTypePromotions": "disableBytesTypePromotions を false に設定して、\"bytearray\" と \"memoryview\" の型昇格動作を有効にします", + "conditionalRequiresBool": "型 \"{operandType}\" のメソッド __bool__は、\"bool\" ではなく型 \"{boolReturnType}\" を返します", + "dataClassFieldLocation": "フィールド宣言", + "dataClassFrozen": "\"{name}\" は固定されています", + "dataProtocolUnsupported": "\"{name}\" はデータ プロトコルです", + "descriptorAccessBindingFailed": "記述子クラス \"{className}\" のメソッド \"{name}\" をバインドできませんでした", + "descriptorAccessCallFailed": "記述子クラス \"{className}\" のメソッド \"{name}\" を呼び出せませんでした", + "finalMethod": "Final メソッド", + "functionParamDefaultMissing": "パラメーター \"{name}\" に既定の引数がありません", + "functionParamName": "パラメーター名の不一致: \"{destName}\" と \"{srcName}\"", + "functionParamPositionOnly": "位置のみのパラメーターの不一致; パラメーター \"{name}\" は位置のみではありません", + "functionReturnTypeMismatch": "関数の戻り値の型 \"{sourceType}\" は型 \"{destType}\" と互換性がありません", + "functionTooFewParams": "関数が受け入れる位置指定パラメーターが少なすぎます。{expected} が必要ですが、{received} を受信しました", + "functionTooManyParams": "関数が受け入れる位置指定パラメーターが多すぎます。{expected} が必要ですが、{received} を受信しました", + "genericClassNotAllowed": "インスタンスまたはクラスのチェックでは、型引数を含むジェネリック型は使用できません", + "incompatibleDeleter": "property deleter メソッドは互換性がありません", + "incompatibleGetter": "property getter メソッドは互換性がありません", + "incompatibleSetter": "property setter メソッドは互換性がありません", + "initMethodLocation": "__init__ メソッドはクラス \"{type}\" で定義されています", + "initMethodSignature": "__init__の署名は \"{type}\" です", + "initSubclassLocation": "__init_subclass__ メソッドはクラス \"{name}\" で定義されています", + "invariantSuggestionDict": "\"dict\" から値の型の共変である \"Mapping\" への切り替えを検討してください", + "invariantSuggestionList": "\"list\" から共変である \"Sequence\" への切り替えを検討してください", + "invariantSuggestionSet": "\"set\" から共変である \"Container\" への切り替えを検討してください", + "isinstanceClassNotSupported": "インスタンスとクラスのチェックでは、\"{type}\" はサポートされていません", + "keyNotRequired": "\"{name}\" は \"{type}\" の必須キーではないため、アクセスすると実行時例外が発生する可能性があります", + "keyReadOnly": "\"{name}\" は \"{type}\" の読み取り専用キーです", + "keyRequiredDeleted": "\"{name}\" は必須キーであり、削除できません", + "keyUndefined": "\"{name}\" は \"{type}\" で定義されたキーではありません", + "kwargsParamMissing": "パラメーター \"**{paramName}\" に対応するパラメーターがありません", + "listAssignmentMismatch": "型 \"{type}\" はターゲット リストと互換性がありません", + "literalAssignmentMismatch": "\"{sourceType}\" は型 \"{destType}\" に割り当てできません", + "literalNotAllowed": "\"Literal\" 特殊フォームは、インスタンスおよびクラスのチェックとともに使用できません", + "matchIsNotExhaustiveHint": "完全な処理が意図されていない場合は、\"case _: pass\" を追加します", + "matchIsNotExhaustiveType": "ハンドルされない型: \"{type}\"", + "memberAssignment": "型 \"{type}\" の式をクラス \"{classType}\" の属性 \"{name}\" に割り当てることはできません", + "memberIsAbstract": "\"{type}.{name}\" は実装されていません", + "memberIsAbstractMore": "その他 {count} 件...", + "memberIsClassVarInProtocol": "\"{name}\" はプロトコルで ClassVar として定義されています", + "memberIsInitVar": "\"{name}\" は init-only フィールドです", + "memberIsInvariant": "\"{name}\" は変更可能であるため、不変です", + "memberIsNotClassVarInClass": "プロトコルと互換性を持たせるには、\"{name}\" を ClassVar として定義する必要があります", + "memberIsNotClassVarInProtocol": "\"{name}\" はプロトコルで ClassVar として定義されていません", + "memberIsNotReadOnlyInProtocol": "\"{name}\" はプロトコルで読み取り専用ではありません", + "memberIsReadOnlyInProtocol": "\"{name}\" はプロトコルで読み取り専用です", + "memberIsWritableInProtocol": "\"{name}\" はプロトコルで書き込み可能です", + "memberSetClassVar": "属性 \"{name}\" は ClassVar であるため、クラス インスタンスを介して割り当てることはできません", + "memberTypeMismatch": "\"{name}\" は互換性のない型です", + "memberUnknown": "属性 \"{name}\" が不明です", + "metaclassConflict": "メタクラス \"{metaclass1}\" が \"{metaclass2}\" と競合しています", + "missingDeleter": "property deleter メソッドがありません", + "missingGetter": "property getter メソッドがありません", + "missingSetter": "property setter メソッドがありません", + "namedParamMissingInDest": "余分なパラメーター \"{name}\"", + "namedParamMissingInSource": "キーワード パラメーター \"{name}\" が見つかりません", + "namedParamTypeMismatch": "型 \"{sourceType}\" のキーワード パラメーター \"{name}\" は型 \"{destType}\" と互換性がありません", + "namedTupleNotAllowed": "NamedTuple はインスタンスまたはクラスのチェックには使用できません", + "newMethodLocation": "__new__ メソッドはクラス \"{type}\" で定義されています", + "newMethodSignature": "__new__の署名は \"{type}\" です", + "newTypeClassNotAllowed": "NewType で作成された型は、インスタンスとクラスのチェックでは使用できません", + "noOverloadAssignable": "型 \"{type}\" に一致するオーバーロードされた関数はありません", + "noneNotAllowed": "インスタンスまたはクラスのチェックには None 使用できません", + "orPatternMissingName": "名前がありません: {name}", + "overloadIndex": "オーバーロード {index} が最も近い一致です", + "overloadNotAssignable": "\"{name}\" の 1 つ以上のオーバーロードが割り当て可能ではありません", + "overloadSignature": "オーバーロードシグネチャはここで定義されています", + "overriddenMethod": "オーバーライドされたメソッド", + "overriddenSymbol": "オーバーライドされたシンボル", + "overrideInvariantMismatch": "オーバーライドの型 \"{overrideType}\" が基本データ型 \"{baseType}\" と同じではありません", + "overrideIsInvariant": "変数は変更可能であるため、その型は不変です", + "overrideNoOverloadMatches": "オーバーライドのオーバーロード シグネチャが基本メソッドと互換性がありません", + "overrideNotClassMethod": "基本メソッドは classmethod として宣言されていますが、オーバーライドはされていません", + "overrideNotInstanceMethod": "基本メソッドはインスタンス メソッドとして宣言されていますが、オーバーライドは宣言されていません", + "overrideNotStaticMethod": "基本メソッドは staticmethod として宣言されていますが、オーバーライドは宣言されていません", + "overrideOverloadNoMatch": "オーバーライドは基本メソッドのすべてのオーバーロードを処理しません", + "overrideOverloadOrder": "オーバーライド メソッドのオーバーロードは、基本メソッドと同じ順序にする必要があります", + "overrideParamKeywordNoDefault": "キーワード パラメーター \"{name}\" の不一致: 基本パラメーターに既定の引数値があり、オーバーライド パラメーターにはありません", + "overrideParamKeywordType": "キーワード パラメーター \"{name}\" の型が一致しません: 基本パラメーターは型 \"{baseType}\"、オーバーライド パラメーターは型 \"{overrideType}\" です", + "overrideParamName": "パラメーター {index} 名が一致しません: ベース パラメーターの名前は \"{baseName}\"、オーバーライド パラメーターは \"{overrideName}\" です", + "overrideParamNameExtra": "パラメーター \"{name}\" が ベース に見つかりません", + "overrideParamNameMissing": "パラメーター \"{name}\" がオーバーライドに見つかりません", + "overrideParamNamePositionOnly": "パラメーター {index} の不一致: ベース パラメーター \"{baseName}\" はキーワード パラメーターで、オーバーライド パラメーターは位置のみです", + "overrideParamNoDefault": "パラメーター {index} の不一致: 基本パラメーターに既定の引数値があり、オーバーライド パラメーターが指定されていません", + "overrideParamType": "パラメーター {index} の型が一致しません: 基本パラメーターは型 \"{baseType}\"、オーバーライド パラメーターは型 \"{overrideType}\" です", + "overridePositionalParamCount": "位置指定パラメーター数が一致しません。基本メソッドには {baseCount} がありますが、オーバーライドには {overrideCount} があります", + "overrideReturnType": "戻り値の型の不一致: 基本メソッドは型 \"{baseType}\" を返し、オーバーライドは型 \"{overrideType}\" を返します", + "overrideType": "基底クラスは型を \"{type}\" として定義します", + "paramAssignment": "パラメーター {index}: 型 \"{sourceType}\" は型 \"{destType}\" と互換性がありません", + "paramSpecMissingInOverride": "ParamSpec パラメーターが override メソッドに見つかりません", + "paramType": "パラメーターの型は \"{paramType}\" です", + "privateImportFromPyTypedSource": "代わりに \"{module}\" からインポートする", + "propertyAccessFromProtocolClass": "プロトコル クラス内で定義されたプロパティにクラス変数としてアクセスできない", + "propertyMethodIncompatible": "property メソッド \"{name}\" は互換性がありません", + "propertyMethodMissing": "property メソッド \"{name}\" がオーバーライドにありません", + "propertyMissingDeleter": "property \"{name}\" に定義された deleter がありません", + "propertyMissingSetter": "property \"{name}\" に定義された setter がありません", + "protocolIncompatible": "\"{sourceType}\" はプロトコル \"{destType}\" と互換性がありません", + "protocolMemberMissing": "\"{name}\" が存在しません", + "protocolRequiresRuntimeCheckable": "インスタンスとクラスのチェックで使用するには、Protocol クラスが @runtime_checkable である必要があります", + "protocolSourceIsNotConcrete": "\"{sourceType}\" は具象クラス型ではないため、型 \"{destType}\" に割り当てることはできません", + "protocolUnsafeOverlap": "\"{name}\" の属性の名前がプロトコルの名前と同じです", + "pyrightCommentIgnoreTip": "\"# pyright: ignore[] を使用して 1 行の診断を抑制します", + "readOnlyAttribute": "属性 \"{name}\" は読み取り専用です", + "seeClassDeclaration": "クラス宣言を参照してください", + "seeDeclaration": "宣言を参照してください", + "seeFunctionDeclaration": "関数の宣言を参照してください", + "seeMethodDeclaration": "メソッド宣言を参照してください", + "seeParameterDeclaration": "パラメーター宣言を参照してください", + "seeTypeAliasDeclaration": "型のエイリアス宣言を参照してください", + "seeVariableDeclaration": "変数宣言を参照してください", + "tupleAssignmentMismatch": "型 \"{type}\" はターゲット tuple と互換性がありません", + "tupleEntryTypeMismatch": "tuple エントリ {entry} の型が正しくありません", + "tupleSizeIndeterminateSrc": "Tuple のサイズが一致しません。{expected} が必要ですが、受け取りは不確定です", + "tupleSizeIndeterminateSrcDest": "Tuple のサイズが一致しません。{expected} 以上が必要ですが、受け取りは不確定です", + "tupleSizeMismatch": "tuple のサイズが一致しません。{expected} が必要ですが、{received} を受信しました", + "tupleSizeMismatchIndeterminateDest": "Tuple のサイズが一致しません。{expected} 以上が必要ですが、{received} を受信しました", + "typeAliasInstanceCheck": "\"type\" ステートメントで作成された型エイリアスは、インスタンスとクラスのチェックでは使用できません", + "typeAssignmentMismatch": "型 \"{sourceType}\" は型 \"{destType}\" に割り当てできません", + "typeBound": "型 \"{sourceType}\" は、型変数 \"{name}\" の上限 \"{destType}\" に割り当てできません", + "typeConstrainedTypeVar": "型 \"{type}\" は制約付き型変数 \"{name}\" に割り当てできません", + "typeIncompatible": "\"{sourceType}\" は \"{destType}\" に割り当てできません", + "typeNotClass": "\"{type}\" はクラスではありません", + "typeNotStringLiteral": "\"{type}\" は文字列リテラルではありません", + "typeOfSymbol": "\"{name}\" の型は \"{type}\" です", + "typeParamSpec": "型 \"{type}\" は ParamSpec \"{name}\" と互換性がありません", + "typeUnsupported": "型 \"{type}\" はサポートされていません", + "typeVarDefaultOutOfScope": "型変数 \"{name}\" はスコープ内にありません", + "typeVarIsContravariant": "型パラメーター \"{name}\" は反変ですが、\"{sourceType}\" は \"{destType}\" のスーパータイプではありません", + "typeVarIsCovariant": "型パラメーター \"{name}\" は共変ですが、\"{sourceType}\" は \"{destType}\" のサブタイプではありません", + "typeVarIsInvariant": "型パラメーター \"{name}\" は不変ですが、\"{sourceType}\" は \"{destType}\" と同じではありません", + "typeVarNotAllowed": "TypeVar は、インスタンスまたはクラスのチェックには使用できません", + "typeVarTupleRequiresKnownLength": "TypeVarTuple を不明な長さの tuple にバインドすることはできません", + "typeVarUnnecessarySuggestion": "代わりに {type} を使用してください", + "typeVarUnsolvableRemedy": "引数が指定されていない場合に戻り値の型を指定するオーバーロードを指定します", + "typeVarsMissing": "型変数がありません: {names}", + "typedDictBaseClass": "クラス \"{type}\" は TypedDict ではありません", + "typedDictClassNotAllowed": "TypedDict クラスはインスタンスまたはクラスのチェックには使用できません", + "typedDictClosedExtraNotAllowed": "アイテム \"{name}\" を追加できません", + "typedDictClosedExtraTypeMismatch": "型 \"{type}\" のアイテム \"{name}\" を追加できません", + "typedDictClosedFieldNotReadOnly": "項目 \"{name}\" は ReadOnly である必要があるため、追加できません", + "typedDictClosedFieldNotRequired": "アイテム \"{name}\" を追加できません。これは NotRequired である必要があるためです。", + "typedDictExtraFieldNotAllowed": "\"{name}\" は \"{type}\" に存在しません", + "typedDictExtraFieldTypeMismatch": "\"{name}\" の型は、\"{type}\" の \"extra_items\" 型と互換性がありません", + "typedDictFieldMissing": "\"{name}\" が \"{type}\" に見つかりません", + "typedDictFieldNotReadOnly": "\"{name}\" は \"{type}\" では読み取り専用ではありません", + "typedDictFieldNotRequired": "\"{name}\" は \"{type}\" には必要ありません", + "typedDictFieldRequired": "\"{name}\" は \"{type}\" に必要です", + "typedDictFieldTypeMismatch": "型 \"{type}\" は、アイテム \"{name}\" に割り当てできません", + "typedDictFieldUndefined": "\"{name}\" は型 \"{type}\" の未定義のアイテムです", + "typedDictKeyAccess": "[\"{name}\"] を使用して TypedDict の項目を参照する", + "typedDictNotAllowed": "TypedDict はインスタンスまたはクラスのチェックには使用できません", + "unhashableType": "型 \"{type}\" はハッシュ可能ではありません", + "uninitializedAbstractVariable": "インスタンス変数 \"{name}\" は抽象基本クラス \"{classType}\" で定義されていますが、初期化されていません", + "unreachableExcept": "\"{exceptionType}\" は \"{parentType}\" のサブクラスです", + "useDictInstead": "辞書の種類を示すには、dict[T1, T2] を使用します", + "useListInstead": "list[T] を使用して list 型を示すか、T1 | T2 を使用して union 型を示します", + "useTupleInstead": "tuple[T1, ..., Tn] を使用して tuple 型を示すか、T1 | T2 を使用して union 型を示します", + "useTypeInstead": "代わりに type[T] を使用する", + "varianceMismatchForClass": "型引数 \"{typeVarName}\" の分散は、基底クラス \"{className}\" と互換性がありません", + "varianceMismatchForTypeAlias": "型引数 \"{typeVarName}\" の分散は \"{typeAliasParam}\" と互換性がありません" + }, + "Service": { + "longOperation": "ワークスペース ソース ファイルの列挙に時間がかかっています。代わりにサブフォルダーを開く方法を検討してください。[詳細情報](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.ko.json b/python-parser/packages/pyright-internal/src/localization/package.nls.ko.json new file mode 100644 index 00000000..81e5172e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.ko.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "형식 Stub 만들기", + "createTypeStubFor": "\"{moduleName}\"에 대한 형식 Stub 만들기", + "executingCommand": "명령 실행", + "filesToAnalyzeCount": "분석할 파일 {count}개", + "filesToAnalyzeOne": "분석할 파일 1개", + "findingReferences": "참조 찾기", + "organizeImports": "가져오기 구성" + }, + "Completion": { + "autoImportDetail": "자동 가져오기", + "indexValueDetail": "인덱스 값" + }, + "Diagnostic": { + "abstractMethodInvocation": "메서드 \"{method}\"은(는) 추상적이고 구현되지 않았으므로 호출할 수 없습니다.", + "annotatedMetadataInconsistent": "주석이 추가된 \"{metadataType}\" 메타데이터 형식이 \"{type}\" 형식과 호환되지 않습니다.", + "annotatedParamCountMismatch": "매개 변수 주석 개수가 일치하지 않습니다. {expected}이)(가) 필요하지만 {received}을(를) 받았습니다.", + "annotatedTypeArgMissing": "\"Annotated\"에 대해 하나의 형식 인수와 하나 이상의 주석이 필요합니다.", + "annotationBytesString": "형식 식은 바이트 문자열 리터럴을 사용할 수 없습니다.", + "annotationFormatString": "형식 식은 형식 문자열 리터럴(f 문자열)을 사용할 수 없습니다.", + "annotationNotSupported": "이 문에는 형식 주석이 지원되지 않습니다.", + "annotationRawString": "형식 식은 원시 문자열 리터럴을 사용할 수 없습니다.", + "annotationSpansStrings": "형식 식은 여러 문자열 리터럴에 걸쳐 사용할 수 없습니다.", + "annotationStringEscape": "형식 식에는 이스케이프 문자를 포함할 수 없습니다.", + "annotationTemplateString": "형식 식은 템플릿 문자열 리터럴(t-strings)을 사용할 수 없습니다.", + "argAssignment": "\"{argType}\" 형식의 인수를 \"{paramType}\" 형식의 매개 변수에 할당할 수 없습니다.", + "argAssignmentFunction": "\"{argType}\" 형식의 인수를 \"{functionName}\" 함수의 \"{paramType}\" 형식의 매개 변수에 할당할 수 없습니다.", + "argAssignmentParam": "\"{argType}\" 형식의 인수를 \"{paramType}\" 형식의 \"{paramName}\" 매개 변수에 할당할 수 없습니다.", + "argAssignmentParamFunction": "\"{argType}\" 형식의 인수를 \"{functionName}\" 함수에서 \"{paramType}\" 형식의 \"{paramName}\" 매개 변수에 할당할 수 없습니다.", + "argMissingForParam": "매개 변수 {name}에 대한 인수가 없습니다.", + "argMissingForParams": "매개 변수 {names}에 대한 인수가 없습니다.", + "argMorePositionalExpectedCount": "{expected}개 이상의 위치 인수가 필요합니다.", + "argMorePositionalExpectedOne": "1개의 위치 인수가 더 필요합니다.", + "argPositional": "위치 인수가 필요합니다.", + "argPositionalExpectedCount": "{expected} 위치 인수가 필요합니다.", + "argPositionalExpectedOne": "1개의 위치 인수가 필요합니다.", + "argTypePartiallyUnknown": "인수 형식을 부분적으로 알 수 없습니다.", + "argTypeUnknown": "인수 형식을 알 수 없습니다.", + "assertAlwaysTrue": "어설션 식은 항상 true로 평가됩니다.", + "assertTypeArgs": "\"assert_type\"에는 두 개의 위치 인수가 필요합니다.", + "assertTypeTypeMismatch": "\"assert_type\" 불일치: \"{expected}\"이(가) 필요하지만 \"{received}\"을(를) 받았습니다.", + "assignmentExprComprehension": "‘{name}’ 할당 식 대상은 대상에 대한 이해력과 같은 이름을 사용할 수 없습니다.", + "assignmentExprContext": "할당 식은 모듈, 함수 또는 람다 내에 있어야 합니다.", + "assignmentExprInSubscript": "아래 첨자 내의 할당 식은 Python 3.10 이상에서만 지원됩니다.", + "assignmentInProtocol": "Protocol 클래스 내의 인스턴스 또는 클래스 변수는 클래스 본문 내에서 명시적으로 선언해야 합니다.", + "assignmentTargetExpr": "식은 할당 대상이 될 수 없습니다.", + "asyncNotInAsyncFunction": "async 함수 외부에서는 “async”가 허용되지 않습니다.", + "awaitIllegal": "\"await\"를 사용하려면 Python 3.5 이상이 필요합니다.", + "awaitNotAllowed": "형식 식은 \"await\"를 사용할 수 없습니다.", + "awaitNotInAsync": "\"await\" allowed only within async function", + "backticksIllegal": "백틱으로 묶인 식은 Python 3.x에서 지원되지 않습니다. 대신 repr 사용", + "baseClassCircular": "클래스는 스스로에서 파생될 수 없습니다.", + "baseClassFinal": "기본 클래스 \"{type}\"이(가) final로 표시되어 서브클래스할 수 없습니다.", + "baseClassIncompatible": "{type}의 기본 클래스는 상호 호환되지 않습니다.", + "baseClassInvalid": "클래스에 대한 인수는 기본 클래스여야 합니다.", + "baseClassMethodTypeIncompatible": "\"{classType}\" 클래스의 기본 클래스가 호환되지 않는 방식으로 \"{name}\" 메서드를 정의합니다.", + "baseClassUnknown": "기본 클래스 형식을 알 수 없으므로 파생 클래스의 형식이 모호합니다.", + "baseClassVariableTypeIncompatible": "\"{classType}\" 클래스의 기본 클래스가 \"{name}\" 변수를 호환되지 않는 방식으로 정의합니다.", + "binaryOperationNotAllowed": "형식 식에는 이항 연산자를 사용할 수 없습니다.", + "bindParamMissing": "\"self\" 또는 \"cls\" 매개 변수가 누락되었기 때문에 메서드 \"{methodName}\"을(를) 바인딩할 수 없습니다.", + "bindTypeMismatch": "‘{type}’을(를) 매개 변수 ‘{paramName}’에 할당할 수 없으므로 ‘{methodName}’ 메서드를 바인딩할 수 없습니다.", + "breakInExceptionGroup": "\"except*\" 블록에는 \"break\"를 사용할 수 없습니다", + "breakOutsideLoop": "‘break’는 루프 내에서만 사용할 수 있습니다.", + "bytesUnsupportedEscape": "bytes 리터럴에 지원되지 않는 이스케이프 시퀀스가 있습니다.", + "callableExtraArgs": "\"Callable\"에 두 개의 형식 인수만 필요합니다.", + "callableFirstArg": "매개 변수 형식 목록 또는 \"...\"가 필요합니다.", + "callableNotInstantiable": "\"{type}\" 형식을 인스턴스화할 수 없습니다.", + "callableSecondArg": "반환 형식이 \"Callable\"에 대한 두 번째 형식 인수로 필요합니다.", + "casePatternIsIrrefutable": "되돌릴 수 없는 패턴은 마지막 case 문에만 사용할 수 있습니다.", + "classAlreadySpecialized": "\"{type}\" 형식이 이미 특수화되어 있습니다.", + "classDecoratorTypeUnknown": "형식화되지 않은 클래스 데코레이터는 클래스 형식을 모호하게 합니다. 데코레이터를 무시합니다.", + "classDefinitionCycle": "‘{name}’에 대한 클래스 정의가 스스로에 종속됩니다.", + "classGetItemClsParam": "__class_getitem__ 재정의는 \"cls\" 매개 변수를 사용해야 합니다.", + "classMethodClsParam": "클래스 메서드는 ‘cls’ 매개 변수를 사용해야 합니다.", + "classNotRuntimeSubscriptable": "클래스 \"{name}\"에 대한 첨자는 런타임 예외를 생성합니다. 형식 식을 따옴표로 묶습니다.", + "classPatternBuiltInArgPositional": "클래스 패턴은 위치 하위 패턴만 허용합니다.", + "classPatternNewType": "\"{type}\"은 NewType을 사용하여 정의되어 있으므로 클래스 패턴에서 사용할 수 없습니다.", + "classPatternPositionalArgCount": "클래스 \"{type}\"에 대한 위치 패턴이 너무 많습니다. {expected}이(가) 필요하지만 {received}을(를) 받았습니다.", + "classPatternTypeAlias": "‘{type}’은(는) 특수 형식 별칭이므로 클래스 패턴에서 사용할 수 없습니다.", + "classPropertyDeprecated": "클래스 속성은 Python 3.11에서 더 이상 사용되지 않으며 Python 3.13에서 지원되지 않습니다.", + "classTypeParametersIllegal": "클래스 형식 매개 변수 구문에는 Python 3.12 이상이 필요합니다.", + "classVarFirstArgMissing": "‘ClassVar’ 뒤에 형식 인수가 필요합니다.", + "classVarNotAllowed": "이 컨텍스트에서는 \"ClassVar\"를 사용할 수 없습니다.", + "classVarOverridesInstanceVar": "클래스 변수 \"{name}\"이(가) \"{className}\" 클래스에서 같은 이름의 인스턴스 변수를 재정의합니다.", + "classVarTooManyArgs": "‘ClassVar’ 뒤에는 형식 인수가 하나만 필요합니다.", + "classVarWithTypeVar": "‘ClassVar’ 형식에는 형식 변수를 포함할 수 없습니다.", + "clsSelfParamTypeMismatch": "‘{name}’ 매개 변수의 형식은 해당 ‘{classType}’ 클래스의 상위 형식이어야 합니다.", + "codeTooComplexToAnalyze": "코드가 너무 복잡하여 분석할 수 없습니다. 하위 경로로 리팩터링하거나 조건부 코드 경로를 줄여 복잡성 감소", + "collectionAliasInstantiation": "‘{type}’ 형식을 인스턴스화할 수 없습니다. 대신 ‘{alias}’을(를) 사용하세요.", + "comparisonAlwaysFalse": "\"{leftType}\" 및 \"{rightType}\" 형식이 겹치지 않으므로 조건은 항상 False로 평가됩니다.", + "comparisonAlwaysTrue": "\"{leftType}\" 및 \"{rightType}\" 형식이 겹치지 않으므로 조건은 항상 True로 평가됩니다.", + "comprehensionInDict": "이해력은 다른 사전 항목과 함께 사용할 수 없습니다.", + "comprehensionInSet": "이해력은 다른 set 항목과 함께 사용할 수 없습니다.", + "concatenateContext": "이 컨텍스트에서는 \"Concatenate\"를 사용할 수 없습니다.", + "concatenateParamSpecMissing": "\"Concatenate\"의 마지막 형식 인수는 ParamSpec 또는 \"...\"이어야 합니다.", + "concatenateTypeArgsMissing": "\"Concatenate\"에는 적어도 두 개의 형식 인수가 필요합니다.", + "conditionalOperandInvalid": "’{type}’ 형식의 조건부 피연산자입니다.", + "constantRedefinition": "‘{name}’은(는) 대문자이므로 상수이고 다시 정의할 수 없습니다.", + "constructorParametersMismatch": "‘{classType}’ 클래스에서 __new__ 서명과 __init__가 불일치합니다.", + "containmentAlwaysFalse": "‘{leftType}’ 및 ‘{rightType}’ 형식이 겹치지 않으므로 식은 항상 False로 평가됩니다.", + "containmentAlwaysTrue": "‘{leftType}’ 및 ‘{rightType}’ 형식이 겹치지 않으므로 식은 항상 True로 평가됩니다.", + "continueInExceptionGroup": "\"except*\" 블록에는 \"continue\"를 사용할 수 없습니다", + "continueOutsideLoop": "\"continue\"는 루프 내에서만 사용할 수 있습니다.", + "coroutineInConditionalExpression": "조건식은 항상 True로 평가되는 코루틴을 참조합니다.", + "dataClassBaseClassFrozen": "고정되지 않은 클래스는 고정된 클래스에서 상속할 수 없습니다.", + "dataClassBaseClassNotFrozen": "고정 클래스는 고정되지 않은 클래스에서 상속할 수 없습니다.", + "dataClassConverterFunction": "\"{argType}\" 형식의 인수는 \"{fieldType}\" 형식의 \"{fieldName}\" 필드에 유효한 변환기가 아닙니다.", + "dataClassConverterOverloads": "\"{funcName}\"의 오버로드는 \"{fieldType}\" 형식의 \"{fieldName}\" 필드에 유효한 변환기가 아닙니다.", + "dataClassFieldInheritedDefault": "\"{fieldName}\"이(가) 같은 이름의 필드를 재정의하지만 기본값이 없음", + "dataClassFieldWithDefault": "기본값이 없는 필드는 기본값이 있는 필드 뒤에 나타날 수 없습니다.", + "dataClassFieldWithPrivateName": "데이터 클래스 필드는 프라이빗 이름을 사용할 수 없습니다.", + "dataClassFieldWithoutAnnotation": "형식 주석이 없는 데이터 클래스 필드를 사용하면 런타임 예외가 발생합니다.", + "dataClassPostInitParamCount": "데이터 클래스 __post_init__의 잘못된 매개 변수 수입니다. InitVar 필드 수가 {expected}개입니다.", + "dataClassPostInitType": "데이터 클래스 __post_init__ 메서드 매개 변수 형식이 필드 \"{fieldName}\"에 대해 일치하지 않습니다.", + "dataClassSlotsOverwrite": "__slots__ 클래스에 이미 정의되어 있습니다.", + "dataClassTransformExpectedBoolLiteral": "정적으로 True 또는 False로 계산되는 식이 필요합니다.", + "dataClassTransformFieldSpecifier": "클래스 또는 함수의 tuple이 필요하지만 “{type}” 형식을 받았습니다.", + "dataClassTransformPositionalParam": "\"dataclass_transform\"에 대한 모든 인수는 키워드 인수여야 합니다.", + "dataClassTransformUnknownArgument": "dataclass_transform은 \"{name}\" 인수를 지원하지 않습니다.", + "dataProtocolInSubclassCheck": "데이터 프로토콜(비 메서드 특성 포함)은 issubclass 호출에서 허용되지 않습니다.", + "declaredReturnTypePartiallyUnknown": "선언된 반환 형식 \"{returnType}\"을(를) 부분적으로 알 수 없습니다.", + "declaredReturnTypeUnknown": "선언된 반환 형식을 알 수 없습니다.", + "defaultValueContainsCall": "매개 변수 기본값 식 내에서는 함수 호출 및 변경 가능한 개체를 사용할 수 없습니다.", + "defaultValueNotAllowed": "\"*\" 또는 \"**\"가 있는 매개 변수는 기본값을 가질 수 없습니다.", + "delTargetExpr": "식을 삭제할 수 없습니다.", + "deprecatedClass": "‘{name}’ 클래스는 사용되지 않습니다.", + "deprecatedConstructor": "클래스 \"{name}\"의 생성자는 더 이상 사용되지 않습니다.", + "deprecatedDescriptorDeleter": "\"{name}\" 설명자에 대한 \"____delete____\" 메서드는 사용되지 않습니다.", + "deprecatedDescriptorGetter": "\"{name}\" 설명자에 대한 \"__get__\" 메서드는 사용되지 않습니다.", + "deprecatedDescriptorSetter": "\"{name}\" 설명자에 대한 \"__set__\" 메서드는 사용되지 않습니다.", + "deprecatedFunction": "\"{name}\" 함수는 더 이상 사용되지 않습니다.", + "deprecatedMethod": "\"{className}\" 클래스의 \"{name}\" 메서드는 더 이상 사용되지 않습니다.", + "deprecatedPropertyDeleter": "\"{name}\" property에 대한 deleter는 사용되지 않습니다.", + "deprecatedPropertyGetter": "\"{name}\" property에 대한 getter는 사용되지 않습니다.", + "deprecatedPropertySetter": "\"{name}\" property에 대한 setter는 사용되지 않습니다.", + "deprecatedType": "이 형식은 Python {version}부터 사용되지 않습니다. 대신 \"{replacement}\"을(를) 사용하세요.", + "dictExpandIllegalInComprehension": "사전 확장은 이해에 사용할 수 없습니다.", + "dictInAnnotation": "형식 식에는 사전 식을 사용할 수 없습니다.", + "dictKeyValuePairs": "사전 항목은 키/값 쌍을 포함해야 합니다.", + "dictUnpackIsNotMapping": "사전 압축 풀기 연산자에 대한 매핑이 필요합니다.", + "dunderAllSymbolNotPresent": "\"{name}\"이(가) __all__에 지정되었지만 모듈에 없습니다.", + "duplicateArgsParam": "\"*\" 매개 변수 하나만 허용됨", + "duplicateBaseClass": "중복 기본 클래스는 허용되지 않습니다.", + "duplicateCapturePatternTarget": "‘{name}’ 캡처 대상이 동일한 패턴 내에 두 번 이상 나타날 수 없습니다.", + "duplicateCatchAll": "하나의 catch-all except 절만 허용됨", + "duplicateEnumMember": "Enum 멤버 \"{name}\"이(가) 이미 선언되었습니다.", + "duplicateGenericAndProtocolBase": "하나의 Generic[...] 또는 Protocol[...] 기본 클래스만 허용됩니다.", + "duplicateImport": "\"{importName}\"을(를) 두 번 이상 가져왔습니다.", + "duplicateKeywordOnly": "\"*\" 구분 기호는 하나만 사용할 수 있습니다.", + "duplicateKwargsParam": "\"**\" 매개 변수 하나만 허용됨", + "duplicateParam": "매개 변수 \"{name}\"이(가) 중복되었습니다.", + "duplicatePositionOnly": "‘/’ 매개 변수 하나민 허용됩니다.", + "duplicateStarPattern": "패턴 시퀀스에는 ‘*’ 패턴을 하나만 사용할 수 있습니다.", + "duplicateStarStarPattern": "‘**’ 항목 하나만 허용됩니다.", + "duplicateUnpack": "list에서는 한 개의 압축 풀기 작업만 허용됩니다.", + "ellipsisAfterUnpacked": "\"...\" 압축을 풀고 있는 TypeVarTuple 또는 tuple과 함께 사용할 수 없습니다.", + "ellipsisContext": "\"...\"는 이 컨텍스트에서는 허용되지 않습니다.", + "ellipsisSecondArg": "\"...\"는 두 인수 중 두 번째 인수로만 허용됩니다.", + "enumClassOverride": "Enum 클래스 \"{name}\"은(는) final 클래스이며 서브클래스할 수 없습니다.", + "enumMemberDelete": "Enum 멤버 \"{name}\"을(를) 삭제할 수 없음", + "enumMemberSet": "Enum 멤버 \"{name}\"을(를) 할당할 수 없음", + "enumMemberTypeAnnotation": "Type annotations are not allowed for enum members", + "exceptGroupMismatch": "Try 문에는 \"except\"와 \"except*\"를 둘 다 포함할 수 없습니다", + "exceptGroupRequiresType": "예외 그룹 구문(\"except*\")에는 예외 형식이 필요합니다", + "exceptRequiresParens": "Python 3.14 이전에는 여러 예외 형식은 괄호로 묶어야 합니다.", + "exceptWithAsRequiresParens": "\"as\"를 사용할 때 여러 예외 형식은 괄호로 묶어야 합니다.", + "exceptionGroupIncompatible": "예외 그룹 구문(\"except*\")에는 Python 3.11 이상이 필요합니다.", + "exceptionGroupTypeIncorrect": "except*의 예외 형식은 BaseGroupException에서 파생될 수 없습니다.", + "exceptionTypeIncorrect": "‘{type}’은 BaseException에서 파생되지 않습니다.", + "exceptionTypeNotClass": "\"{type}\"은(는) 올바른 예외 클래스가 아닙니다.", + "exceptionTypeNotInstantiable": "예외 형식 \"{type}\"에 대한 생성자에는 하나 이상의 인수가 필요합니다.", + "expectedAfterDecorator": "데코레이터 다음에 함수 또는 클래스 선언이 필요합니다.", + "expectedArrow": "\"->\" 다음에 반환 형식 주석이 와야 합니다.", + "expectedAsAfterException": "예외 형식 뒤에 ‘as’가 필요합니다.", + "expectedAssignRightHandExpr": "\"=\" 오른쪽에 식이 필요합니다.", + "expectedBinaryRightHandExpr": "연산자 오른쪽에 식이 필요합니다.", + "expectedBoolLiteral": "True 또는 False가 필요합니다.", + "expectedCase": "\"case\" 문이 필요합니다.", + "expectedClassName": "클래스 이름이 필요합니다.", + "expectedCloseBrace": "\"{\"가 닫혀 있지 않습니다.", + "expectedCloseBracket": "\"[{0}\"이(가) 닫혀 있지 않습니다.", + "expectedCloseParen": "\"(\"가 닫혀 있지 않음", + "expectedColon": "':'가 필요합니다.", + "expectedComplexNumberLiteral": "패턴 일치에 복소수 리터럴이 필요합니다.", + "expectedDecoratorExpr": "Python 3.9 이전의 데코레이터에는 식 형식이 지원되지 않습니다.", + "expectedDecoratorName": "데코레이터 이름이 필요합니다.", + "expectedDecoratorNewline": "데코레이터 끝에 새 줄이 필요합니다.", + "expectedDelExpr": "\"del\" 뒤에 식이 필요합니다.", + "expectedElse": "\"else\"가 필요합니다.", + "expectedEquals": "\"=\"가 필요합니다.", + "expectedExceptionClass": "잘못된 예외 클래스 또는 개체", + "expectedExceptionObj": "필요한 예외 개체, 예외 클래스 또는 None", + "expectedExpr": "식이 필요합니다.", + "expectedFunctionAfterAsync": "‘async’ 다음에 함수 정의가 필요합니다.", + "expectedFunctionName": "\"def\" 뒤에 함수 이름이 필요합니다.", + "expectedIdentifier": "식별자가 필요합니다.", + "expectedImport": "\"import\"가 필요합니다.", + "expectedImportAlias": "\"as\" 뒤에 기호가 필요합니다.", + "expectedImportSymbols": "\"import\" 뒤에 하나 이상의 기호 이름이 필요합니다.", + "expectedIn": "\"in\"이 필요합니다.", + "expectedInExpr": "\"in\" 뒤에 식이 필요합니다.", + "expectedIndentedBlock": "들여쓰기 블록이 필요합니다.", + "expectedMemberName": "\".\" 뒤에 특성 이름 필요", + "expectedModuleName": "필요한 모듈 이름", + "expectedNameAfterAs": "‘as’ 뒤에는 기호 이름이 와야 합니다.", + "expectedNamedParameter": "키워드 매개 변수는 \"*\"를 따라야 합니다.", + "expectedNewline": "줄 바꿈이 필요합니다.", + "expectedNewlineOrSemicolon": "문은 줄 바꿈 또는 세미콜론으로 구분해야 합니다.", + "expectedOpenParen": "’(‘가 필요합니다.", + "expectedParamName": "매개 변수 이름이 필요합니다.", + "expectedPatternExpr": "패턴 식이 필요합니다.", + "expectedPatternSubjectExpr": "패턴 제목 식이 필요합니다.", + "expectedPatternValue": "\"a.b\" 형식의 패턴 값 식이 필요합니다.", + "expectedReturnExpr": "\"return\" 뒤에 식이 필요합니다.", + "expectedSliceIndex": "인덱스 또는 조각 식이 필요합니다.", + "expectedTypeNotString": "형식이 필요하지만 문자열 리터럴을 받았습니다.", + "expectedTypeParameterName": "형식 매개 변수 이름이 필요합니다.", + "expectedYieldExpr": "yield 문에 식이 필요합니다.", + "finalClassIsAbstract": "\"{type}\" 클래스가 final로 표시되어 있으며 모든 추상 기호를 구현해야 합니다.", + "finalContext": "\"Final\"은 이 컨텍스트에서 허용되지 않습니다.", + "finalInLoop": "루프 내에는 \"Final\" 변수를 할당할 수 없습니다.", + "finalMethodOverride": "\"{name}\" 메서드는 \"{className}\" 클래스에 정의된 final 메서드를 재정의할 수 없습니다.", + "finalNonMethod": "함수 \"{name}\"은(는) 메서드가 아니므로 @final로 표시할 수 없습니다.", + "finalReassigned": "‘{name}’이 Final로 선언되었으므로 다시 할당할 수 없습니다.", + "finalRedeclaration": "\"{name}\"이(가) 이전에 Final로 선언되었습니다.", + "finalRedeclarationBySubclass": "부모 클래스 \"{className}\"이(가) Final로 선언하므로 \"{name}\"을(를) 다시 선언할 수 없습니다.", + "finalTooManyArgs": "\"Final\" 뒤에 단일 형식 인수가 필요합니다.", + "finalUnassigned": "\"{name}\"이(가) Final로 선언되었지만 값이 할당되지 않았습니다.", + "finallyBreak": "\"break\"를 사용하여 \"finally\" 블록을 종료할 수 없습니다", + "finallyContinue": "\"continue\"를 사용하여 \"finally\" 블록을 종료할 수 없습니다", + "finallyReturn": "\"return\"을 사용하여 \"finally\" 블록을 종료할 수 없습니다", + "formatStringBrace": "f-string 리터럴 내에서는 단일 닫는 중괄호를 사용할 수 없습니다. 이중 닫는 중괄호를 사용하세요.", + "formatStringBytes": "형식 문자열 리터럴(f-strings)은 이진일 수 없습니다.", + "formatStringDebuggingIllegal": "F-string 디버깅 지정자인 ‘=’는 Python 3.8 이상이 필요합니다.", + "formatStringEscape": "Python 3.12 이전의 f-string의 식 부분에 이스케이프 시퀀스(백슬래시)를 사용할 수 없습니다.", + "formatStringExpectedConversion": "f-string에서 \"!\" 뒤에 변환 지정자가 필요합니다.", + "formatStringIllegal": "형식 문자열 리터럴(f-strings)은 Python 3.6 이상이 필요합니다.", + "formatStringInPattern": "패턴에서 형식 문자열을 사용할 수 없습니다.", + "formatStringNestedFormatSpecifier": "형식 문자열 지정자 내에 너무 깊게 중첩된 식", + "formatStringNestedQuote": "f-string 내에 중첩된 문자열은 Python 3.12 이전의 f-string과 같은 따옴표를 사용할 수 없습니다.", + "formatStringTemplate": "서식 문자열 리터럴(f-strings)은 템플릿 문자열(t-strings)일 수 없습니다.", + "formatStringUnicode": "형식 문자열 리터럴(f-문자열)은 유니코드일 수 없습니다.", + "formatStringUnterminated": "f-string에 종결되지 않은 식이 있습니다. ‘}‘가 필요합니다.", + "functionDecoratorTypeUnknown": "형식화되지 않은 함수 데코레이터는 함수 형식을 모호하게 합니다. 데코레이터 무시", + "functionInConditionalExpression": "조건식은 항상 True로 평가되는 함수를 참조합니다.", + "functionTypeParametersIllegal": "함수 형식 매개 변수 구문에는 Python 3.12 이상이 필요합니다.", + "futureImportLocationNotAllowed": "__future__ 가져오기는 파일의 시작 부분에 있어야 합니다.", + "generatorAsyncReturnType": "async 생성기 함수의 반환 형식은 \"AsyncGenerator[{yieldType}, Any]\"와 호환되어야 합니다.", + "generatorNotParenthesized": "생성기 식은 단독 인수가 아닌 경우 괄호로 지정해야 합니다.", + "generatorSyncReturnType": "생성기 함수의 반환 형식은 \"Generator[{yieldType}, Any, Any]\"와 호환되어야 합니다.", + "genericBaseClassNotAllowed": "\"Generic\" 기본 클래스는 형식 매개 변수 구문과 함께 사용할 수 없습니다.", + "genericClassAssigned": "제네릭 클래스 형식을 할당할 수 없습니다.", + "genericClassDeleted": "제네릭 클래스 형식을 삭제할 수 없습니다.", + "genericInstanceVariableAccess": "클래스를 통한 제네릭 인스턴스 변수에 대한 액세스가 모호합니다.", + "genericNotAllowed": "이 컨텍스트에서 \"Generic\"이 잘못되었습니다.", + "genericTypeAliasBoundTypeVar": "클래스 내의 제네릭 형식 별칭은 바인딩된 형식 변수 {names}을(를) 사용할 수 없습니다.", + "genericTypeArgMissing": "\"Generic\"에는 하나 이상의 형식 인수가 필요합니다.", + "genericTypeArgTypeVar": "\"Generic\"의 형식 인수는 형식 변수여야 합니다.", + "genericTypeArgUnique": "\"Generic\"의 형식 인수는 고유해야 합니다.", + "globalReassignment": "global 선언 전에 \"{name}\"이(가) 할당되었습니다.", + "globalRedefinition": "\"{name}\"이(가) 이미 global로 선언되었습니다.", + "implicitStringConcat": "암시적 문자열 연결이 허용되지 않습니다.", + "importCycleDetected": "가져오기 체인에서 순환이 검색되었습니다.", + "importDepthExceeded": "가져오기 체인 깊이가 {depth}을(를) 초과했습니다.", + "importResolveFailure": "가져오기 \"{importName}\"을(를) 확인할 수 없습니다.", + "importSourceResolveFailure": "원본에서 가져오기 \"{importName}\"을(를) 확인할 수 없습니다.", + "importSymbolUnknown": "\"{name}\"은(는) 알 수 없는 가져오기 기호입니다.", + "incompatibleMethodOverride": "\"{name}\" 메서드가 호환되지 않는 방식으로 \"{className}\" 클래스를 재정의합니다.", + "inconsistentIndent": "들여쓰기하지 않은 양이 이전 들여쓰기와 일치하지 않습니다.", + "inconsistentTabs": "들여쓰기에서 탭 및 공백의 일관성 없는 사용", + "initMethodSelfParamTypeVar": "\"__init__\" 메서드의 \"self\" 매개 변수에 대한 형식 주석에는 클래스 범위 형식 변수를 포함할 수 없음", + "initMustReturnNone": "\"__init__\"의 반환 형식은 None이어야 합니다.", + "initSubclassCallFailed": "__init_subclass__ 메서드의 키워드 인수가 잘못됨", + "initSubclassClsParam": "__init_subclass__ 재정의는 \"cls\" 매개 변수를 사용해야 합니다.", + "initVarNotAllowed": "이 컨텍스트에서는 \"InitVar\"가 허용되지 않습니다.", + "instanceMethodSelfParam": "인스턴스 메서드는 \"self\" 매개 변수를 사용해야 합니다.", + "instanceVarOverridesClassVar": "‘{name}’ 인스턴스 변수가 ‘{className}’ 클래스에서 같은 이름의 클래스 변수를 재정의합니다.", + "instantiateAbstract": "'{type}' 추상 클래스를 인스턴스화할 수 없습니다.", + "instantiateProtocol": "Protocol 클래스 \"{type}\"을(를) 인스턴스화할 수 없습니다.", + "internalBindError": "파일 \"{file}\"을(를) 바인딩하는 동안 내부 오류가 발생했습니다. {message}", + "internalParseError": "파일 \"{file}\"을(를) 구문 분석하는 동안 내부 오류가 발생했습니다. {message}", + "internalTypeCheckingError": "파일 \"{file}\"의 형식을 확인하는 동안 내부 오류가 발생했습니다. {message}", + "invalidIdentifierChar": "식별자에 잘못된 문자가 있습니다.", + "invalidStubStatement": "형식 stub 파일 내에서는 문이 의미가 없습니다.", + "invalidTokenChars": "토큰에 잘못된 문자 ‘{text}’이(가) 있습니다.", + "isInstanceInvalidType": "\"isinstance\"에 대한 두 번째 인수는 클래스 또는 클래스의 tuple이어야 합니다.", + "isSubclassInvalidType": "\"issubclass\"에 대한 두 번째 인수는 클래스 또는 클래스의 tuple이어야 합니다.", + "keyValueInSet": "set 내에서 키/값 쌍을 사용할 수 없습니다.", + "keywordArgInTypeArgument": "키워드 인수는 형식 인수 목록에서 사용할 수 없습니다.", + "keywordOnlyAfterArgs": "키워드 전용 인수 구분 기호는 \"*\" 매개 변수 뒤에 사용할 수 없습니다.", + "keywordParameterMissing": "하나 이상의 키워드 매개 변수는 ’*‘ 매개 변수 뒤에 와야 합니다.", + "keywordSubscriptIllegal": "아래 첨자 내의 키워드 인수는 지원되지 않습니다.", + "lambdaReturnTypePartiallyUnknown": "람다의 반환 형식 \"{returnType}\"을(를) 부분적으로 알 수 없습니다.", + "lambdaReturnTypeUnknown": "람다의 반환 형식을 알 수 없습니다.", + "listAssignmentMismatch": "형식이 \"{type}\"인 식을 대상 목록에 할당할 수 없습니다.", + "listInAnnotation": "형식 식에는 List 식을 사용할 수 없습니다.", + "literalEmptyArgs": "‘Literal’ 뒤에 하나 이상의 형식 인수가 필요합니다.", + "literalNamedUnicodeEscape": "명명된 유니코드 이스케이프 시퀀스는 \"Literal\" 문자열 주석에서 지원되지 않습니다.", + "literalNotAllowed": "형식 인수가 없으면 이 컨텍스트에서 \"Literal\"을 사용할 수 없습니다.", + "literalNotCallable": "Literal 형식은 인스턴스화할 수 없습니다.", + "literalUnsupportedType": "\"Literal\"의 형식 인수는 None, 리터럴 값(int, bool, str 또는 bytes) 또는 enum 값이어야 합니다.", + "matchIncompatible": "Match 문에는 Python 3.10 이상이 필요합니다.", + "matchIsNotExhaustive": "match 문 내의 사례는 모든 값을 완전히 처리하지 않습니다.", + "maxParseDepthExceeded": "최대 구문 분석 깊이를 초과했습니다. 식을 더 작은 하위 식으로 나누기", + "memberAccess": "\"{type}\" 클래스의 \"{name}\" 특성에 액세스할 수 없음", + "memberDelete": "\"{type}\" 클래스의 \"{name}\" 특성을 삭제할 수 없음", + "memberSet": "\"{type}\" 클래스의 \"{name}\" 특성에 할당할 수 없음", + "metaclassConflict": "파생 클래스의 메타클래스는 모든 기본 클래스의 메타클래스의 서브클래스여야 합니다.", + "metaclassDuplicate": "메타클래스를 하나만 제공할 수 있습니다.", + "metaclassIsGeneric": "메타클래스는 제네릭일 수 없습니다.", + "methodNotDefined": "\"{name}\" 메서드가 정의되지 않았습니다.", + "methodNotDefinedOnType": "\"{name}\" 메서드가 \"{type}\" 형식에 정의되지 않았습니다.", + "methodOrdering": "일관된 메서드 순서를 만들 수 없습니다.", + "methodOverridden": "‘{name}’은(는) ‘{className}’ 클래스에서 같은 이름의 메서드를 호환되지 않는 ‘{type}’ 형식으로 재정의합니다.", + "methodReturnsNonObject": "\"{name}\" 메서드가 개체를 반환하지 않습니다.", + "missingSuperCall": "\"{methodName}\" 메서드가 부모 클래스에서 같은 이름의 메서드를 호출하지 않습니다.", + "mixingBytesAndStr": "Bytes 및 str 값을 연결할 수 없습니다.", + "moduleAsType": "모듈은 형식으로 사용할 수 없습니다.", + "moduleNotCallable": "모듈을 호출할 수 없습니다.", + "moduleUnknownMember": "‘{memberName}’은(는) ‘{moduleName}’ 모듈의 알려진 특성이 아님", + "namedExceptAfterCatchAll": "명명된 except 절은 catch-all except 절 뒤에 나타날 수 없습니다.", + "namedParamAfterParamSpecArgs": "ParamSpec args 매개 변수 뒤에 키워드 매개 변수 \"{name}\"을(를) 시그니처에 표시할 수 없습니다.", + "namedTupleEmptyName": "명명된 tuple 내의 이름은 비워 둘 수 없습니다.", + "namedTupleEntryRedeclared": "부모 클래스 \"{className}\"이(가) 명명된 tuple이므로 \"{name}\"을(를) 재정의할 수 없습니다.", + "namedTupleFieldUnderscore": "Named tuple 필드 이름은 밑줄로 시작할 수 없습니다", + "namedTupleFirstArg": "명명된 tuple 클래스 이름이 첫 번째 인수로 필요합니다.", + "namedTupleMultipleInheritance": "NamedTuple을 사용한 여러 상속은 지원되지 않습니다.", + "namedTupleNameKeyword": "필드 이름은 키워드일 수 없습니다.", + "namedTupleNameType": "항목 이름 및 형식을 지정하는 2개 항목 tuple이 필요합니다.", + "namedTupleNameUnique": "명명된 tuple 내의 이름은 고유해야 합니다.", + "namedTupleNoTypes": "\"namedtuple\"은 tuple 항목에 대한 형식을 제공하지 않습니다. 대신 \"NamedTuple\" 사용", + "namedTupleSecondArg": "두 번째 인수로 명명된 tuple 항목 list가 필요합니다.", + "newClsParam": "__new__ 재정의는 \"cls\" 매개 변수를 사용해야 합니다.", + "newTypeAnyOrUnknown": "NewType에 대한 두 번째 인수는 Any 또는 Unknown이 아닌 알려진 클래스여야 합니다.", + "newTypeBadName": "NewType의 첫 번째 인수는 문자열 리터럴이어야 합니다.", + "newTypeLiteral": "NewType은 Literal 형식과 함께 사용할 수 없습니다.", + "newTypeNameMismatch": "NewType은 이름이 같은 변수에 할당되어야 합니다.", + "newTypeNotAClass": "NewType에 대한 두 번째 인수로 클래스가 필요합니다.", + "newTypeParamCount": "NewType에는 두 개의 위치 인수가 필요합니다.", + "newTypeProtocolClass": "구조 형식(Protocol 또는 TypedDict 클래스)과 함께 NewType을 사용할 수 없습니다.", + "noOverload": "제공된 인수와 일치하는 \"{name}\"에 대한 오버로드가 없습니다.", + "noReturnContainsReturn": "선언된 return 형식이 \"NoReturn\"인 함수는 return 문을 포함할 수 없습니다.", + "noReturnContainsYield": "선언된 반환 형식이 \"NoReturn\"인 함수는 yield 문을 포함할 수 없습니다.", + "noReturnReturnsNone": "선언된 반환 형식이 \"NoReturn\"인 함수는 \"None\"을 반환할 수 없습니다.", + "nonDefaultAfterDefault": "기본값이 아닌 인수가 기본 인수를 따릅니다.", + "nonLocalInModule": "모듈 수준에서는 Nonlocal 선언을 사용할 수 없습니다.", + "nonLocalNoBinding": "No binding for nonlocal \"{name}\" found", + "nonLocalReassignment": "\"{name}\" is assigned before nonlocal declaration", + "nonLocalRedefinition": "\"{name}\"이(가) 이미 nonlocal로 선언되었습니다.", + "noneNotCallable": "‘None’ 유형의 개체를 호출할 수 없습니다.", + "noneNotIterable": "\"None\" 형식의 개체는 반복 가능한 값으로 사용할 수 없습니다.", + "noneNotSubscriptable": "’None’ 유형의 개체는 아래 첨자를 사용할 수 없습니다.", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "\"None\" 형식의 개체는 \"async with\"와 함께 사용할 수 없습니다.", + "noneOperator": "\"None\"에 대해 연산자 \"{operator}\"이(가) 지원되지 않습니다.", + "noneUnknownMember": "\"{name}\"은(는) \"None\"의 알려진 특성이 아님", + "nonlocalTypeParam": "Nonlocal 바인딩은 형식 매개변수 \"{name}\"에 사용할 수 없습니다.", + "notRequiredArgCount": "\"NotRequired\" 뒤에 단일 형식 인수가 필요합니다.", + "notRequiredNotInTypedDict": "이 컨텍스트에서는 \"NotRequired\"를 사용할 수 없습니다.", + "objectNotCallable": "\"{type}\" 형식의 개체를 호출할 수 없습니다.", + "obscuredClassDeclaration": "클래스 선언 \"{name}\"이(가) 같은 이름의 선언으로 가려져 있습니다.", + "obscuredFunctionDeclaration": "함수 선언 \"{name}\"이(가) 동일한 이름의 선언으로 가려집니다.", + "obscuredMethodDeclaration": "메서드 선언 \"{name}\"이(가) 동일한 이름의 선언으로 가려집니다.", + "obscuredParameterDeclaration": "매개 변수 선언 \"{name}\"이(가) 동일한 이름의 선언으로 가려집니다.", + "obscuredTypeAliasDeclaration": "형식 별칭 선언 \"{name}\"이(가) 동일한 이름의 선언으로 가려집니다.", + "obscuredVariableDeclaration": "\"{name}\" 선언이 같은 이름의 선언으로 가려집니다.", + "operatorLessOrGreaterDeprecated": "\"<>\" 연산자는 Python 3에서 지원되지 않습니다. 대신 \"!=\"를 사용하세요.", + "optionalExtraArgs": "\"Optional\" 뒤에 1개의 형식 인수가 필요합니다.", + "orPatternIrrefutable": "되돌릴 수 없는 패턴은 ‘or’ 패턴의 마지막 하위 페이지로만 허용됩니다.", + "orPatternMissingName": "\"or\" 패턴 내의 모든 하위 패턴은 동일한 이름을 대상으로 해야 합니다.", + "overlappingKeywordArgs": "형식화된 사전이 키워드 매개 변수 {names}과(와) 겹칩니다.", + "overlappingOverload": "매개 변수가 오버로드 {obscuredBy}과(와) 겹치므로 \"{name}\"에 대한 오버로드 {obscured}이(가) 사용되지 않습니다.", + "overloadAbstractImplMismatch": "오버로드는 구현의 추상 상태와 일치해야 합니다.", + "overloadAbstractMismatch": "오버로드는 모두 추상이거나 아니어야 합니다", + "overloadClassMethodInconsistent": "\"{name}\"의 오버로드가 @classmethod를 일관되지 않게 사용합니다.", + "overloadFinalImpl": "@final 데코레이터는 구현에만 적용해야 합니다.", + "overloadFinalNoImpl": "첫 번째 오버로드만 @final 표시해야 합니다.", + "overloadImplementationMismatch": "오버로드된 구현이 오버로드 {index}의 시그니처와 일치하지 않습니다.", + "overloadOverrideImpl": "@override 데코레이터는 구현에만 적용해야 합니다.", + "overloadOverrideNoImpl": "첫 번째 오버로드만 @override 표시해야 합니다.", + "overloadReturnTypeMismatch": "\"{name}\"에 대한 {prevIndex} 오버로드가 오버로드 {newIndex}과(와) 겹치고 호환되지 않는 형식을 반환합니다.", + "overloadStaticMethodInconsistent": "\"{name}\"의 오버로드가 @staticmethod를 일관되지 않게 사용합니다.", + "overloadWithoutImplementation": "“{name}“이(가) overload로 표시되어 있지만 구현이 제공되지 않았습니다.", + "overriddenMethodNotFound": "“{name}“ 메서드가 override로 표시되어 있지만 이름이 같은 기본 메서드가 없습니다.", + "overrideDecoratorMissing": "“{name}“ 메서드가 override로 표시되지 않았지만 “{className}“ 클래스에서 메서드를 재정의하고 있습니다.", + "paramAfterKwargsParam": "매개 변수는 ‘**’ 매개 변수 다음에 와야 합니다.", + "paramAlreadyAssigned": "매개 변수 \"{name}\"이(가) 이미 할당되었습니다.", + "paramAnnotationMissing": "‘{name}’ 매개 변수에 대한 형식 주석이 없습니다.", + "paramAssignmentMismatch": "‘{sourceType}’ 형식의 식을 ‘{paramType}’ 형식의 매개 변수에 할당할 수 없습니다.", + "paramNameMissing": "이름이 \"{name}\"인 매개 변수가 없습니다.", + "paramSpecArgsKwargsDuplicate": "ParamSpec \"{type}\" 인수가 이미 제공되었습니다.", + "paramSpecArgsKwargsUsage": "ParamSpec의 \"args\" 및 \"kwargs\" 특성은 모두 함수 서명 내에 나타나야 함", + "paramSpecArgsMissing": "ParamSpec \"{type}\"에 대한 인수가 없습니다.", + "paramSpecArgsUsage": "ParamSpec의 \"args\" 특성은 *args 매개 변수와 함께 사용할 경우에만 유효함", + "paramSpecAssignedName": "ParamSpec을 \"{name}\"이라는 변수에 할당해야 합니다.", + "paramSpecContext": "ParamSpec은 이 컨텍스트에서 허용되지 않습니다.", + "paramSpecDefaultNotTuple": "ParamSpec의 기본값에는 줄임표, tuple 식 또는 ParamSpec이 필요합니다.", + "paramSpecFirstArg": "첫 번째 인수로 ParamSpec의 이름이 필요합니다.", + "paramSpecKwargsUsage": "ParamSpec의 \"kwargs\" 특성은 **kwargs 매개 변수와 함께 사용할 경우에만 유효함", + "paramSpecNotUsedByOuterScope": "ParamSpec \"{name}\"은(는) 이 컨텍스트에서 의미가 없습니다.", + "paramSpecUnknownArg": "ParamSpec은 한 개 이상의 인수를 지원하지 않습니다.", + "paramSpecUnknownMember": "\"{name}\"은(는) ParamSpec의 알려진 특성이 아님", + "paramSpecUnknownParam": "‘{name}’은(는) ParamSpec에 대한 알 수 없는 매개 변수입니다.", + "paramTypeCovariant": "공변(Covariant) 형식 변수는 매개 변수 형식에 사용할 수 없습니다.", + "paramTypePartiallyUnknown": "매개 변수 \"{paramName}\"의 형식을 부분적으로 알 수 없습니다.", + "paramTypeUnknown": "매개 변수 \"{paramName}\"의 형식을 알 수 없습니다.", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "제목 형식 \"{type}\"에 대해 패턴이 일치하지 않습니다.", + "positionArgAfterNamedArg": "위치 인수는 키워드 인수 뒤에 나타날 수 없습니다.", + "positionArgAfterUnpackedDictArg": "위치 인수는 키워드 인수 압축 해제 후에 나타날 수 없습니다.", + "positionOnlyAfterArgs": "위치 전용 매개 변수 구분 기호는 \"*\" 매개 변수 뒤에 사용할 수 없습니다.", + "positionOnlyAfterKeywordOnly": "\"/\" 매개 변수는 \"*\" 매개 변수 앞에 나타나야 합니다.", + "positionOnlyAfterNon": "위치 전용이 아닌 매개 변수 다음에는 위치 전용 매개 변수를 사용할 수 없습니다.", + "positionOnlyFirstParam": "위치 전용 매개 변수 구분 기호는 첫 번째 매개 변수로 허용되지 않습니다.", + "positionOnlyIncompatible": "위치 전용 매개 변수 구분 기호에는 Python 3.8 이상이 필요합니다.", + "privateImportFromPyTypedModule": "\"{name}\"은(는) \"{module}\" 모듈에서 내보내지지 않습니다.", + "privateUsedOutsideOfClass": "\"{name}\"은(는) 프라이빗이며 선언된 클래스 외부에서 사용됩니다.", + "privateUsedOutsideOfModule": "\"{name}\"은(는) 프라이빗이며 선언된 모듈 외부에서 사용됩니다.", + "propertyOverridden": "“{name}“은(는) “{className}“ 클래스에서 같은 이름의 property를 잘못 재정의합니다.", + "propertyStaticMethod": "Static methods not allowed for property getter, setter or deleter", + "protectedUsedOutsideOfClass": "‘{name}’은(는) 선언된 클래스 외부에서 보호되고 사용됩니다.", + "protocolBaseClass": "Protocol 클래스 \"{classType}\"은(는) Protocol 아닌 클래스 \"{baseType}\"에서 파생될 수 없습니다.", + "protocolBaseClassWithTypeArgs": "형식 매개 변수 구문을 사용할 때는 Protocol 클래스에 형식 인수가 허용되지 않습니다.", + "protocolIllegal": "\"Protocol\"을 사용하려면 Python 3.7 이상이 필요합니다.", + "protocolNotAllowed": "이 컨텍스트에서는 \"Protocol\"을 사용할 수 없습니다.", + "protocolTypeArgMustBeTypeParam": "\"Protocol\"의 형식 인수는 형식 매개 변수여야 합니다.", + "protocolUnsafeOverlap": "클래스가 \"{name}\"과(와) 안전하지 않게 겹치며 런타임에 일치 항목을 생성할 수 있습니다.", + "protocolVarianceContravariant": "제네릭 Protocol \"{class}\"에 사용되는 형식 변수 \"{variable}\"은(는) 반공변이어야 합니다.", + "protocolVarianceCovariant": "제네릭 Protocol \"{class}\"에 사용되는 형식 변수 \"{variable}\"은(는) 공변(covariant)이어야 합니다.", + "protocolVarianceInvariant": "제네릭 Protocol \"{class}\"에 사용되는 형식 변수 \"{variable}\"은(는) 고정되어야 합니다.", + "pyrightCommentInvalidDiagnosticBoolValue": "Pyright 주석 지시문 뒤에는 \"=\"와 true 또는 false 값이 와야 합니다.", + "pyrightCommentInvalidDiagnosticSeverityValue": "Pyright 주석 지시문 뒤에는 \"=\"와 true, false, error, warning, information 또는 none 값이 와야 합니다.", + "pyrightCommentMissingDirective": "Pyright 메모 뒤에는 지시문(basic 또는 strict) 또는 진단 규칙이 있어야 합니다.", + "pyrightCommentNotOnOwnLine": "파일 수준 설정을 제어하는 데 사용되는Pyright 주석은 고유의 줄에 표시되어야 합니다.", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\"은(는) pyright 주석에 대한 알 수 없는 진단 규칙입니다.", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\"이(가) pyright 주석에 대해 잘못된 값입니다. true, false, error, warning, information 또는 none이 필요합니다.", + "pyrightCommentUnknownDirective": "\"{directive}\"은(는) pyright 주석에 대한 알 수 없는 지시문입니다. \"strict\", \"standard\" 또는 \"basic\"이 필요합니다.", + "readOnlyArgCount": "‘ReadOnly‘ 뒤에는 단일 형식 인수가 필요합니다.", + "readOnlyNotInTypedDict": "이 컨텍스트에서는 \"ReadOnly\"를 사용할 수 없습니다.", + "recursiveDefinition": "‘{name}’ 형식이 스스로를 참조하므로 확인할 수 없습니다.", + "relativeImportNotAllowed": "상대 가져오기는 \"import .a\" 양식과 함께 사용할 수 없습니다. 대신 \"from . import a\"을(를) 사용합니다.", + "requiredArgCount": "‘Required’ 뒤에 단일 형식 인수가 필요합니다.", + "requiredNotInTypedDict": "이 컨텍스트에서는 \"Required\"를 사용할 수 없습니다.", + "returnInAsyncGenerator": "값이 있는 Return 문은 async 생성기에서 사용할 수 없습니다.", + "returnInExceptionGroup": "\"except*\" 블록에는 \"return\"을 사용할 수 없습니다", + "returnMissing": "선언된 반환 형식이 \"{returnType}\"인 함수는 모든 코드 경로에서 값을 반환해야 합니다.", + "returnOutsideFunction": "‘return’은 함수 내에서만 사용할 수 있습니다.", + "returnTypeContravariant": "반공변 유형 변수는 반환 형식에 사용할 수 없습니다.", + "returnTypeMismatch": "형식 \"{exprType}\"을 형식 \"{returnType}\"에 반환하도록 할당할 수 없습니다.", + "returnTypePartiallyUnknown": "반환 형식 \"{returnType}\"을(를) 부분적으로 알 수 없습니다.", + "returnTypeUnknown": "반환 유형을 알 수 없습니다.", + "revealLocalsArgs": "‘reveal_locals’ 호출은 인수가 필요하지 않습니다.", + "revealLocalsNone": "이 범위에 locals가 없습니다.", + "revealTypeArgs": "\"reveal_type\" 호출에는 단일 위치 인수가 필요합니다.", + "revealTypeExpectedTextArg": "‘reveal_type’ 함수의 ‘expected_text’ 인수는 str 리터럴 값이어야 합니다.", + "revealTypeExpectedTextMismatch": "텍스트 형식이 일치하지 않습니다. \"{expected}\"이(가) 필요하지만 \"{received}\"을(를) 받았습니다.", + "revealTypeExpectedTypeMismatch": "텍스트 형식이 일치하지 않습니다. ‘{expected}’이(가) 필요하지만 ‘{received}’을(를) 받았습니다.", + "selfTypeContext": "이 컨텍스트에서는 \"Self\"가 잘못되었습니다.", + "selfTypeMetaclass": "메타클래스(\"type\"의 서브클래스) 내에서 \"Self\"를 사용할 수 없습니다.", + "selfTypeWithTypedSelfOrCls": "\"Self\"는 \"Self\" 이외의 형식 주석이 있는 'self' 또는 'cls' 매개 변수가 있는 함수에서 사용할 수 없습니다.", + "sentinelBadName": "Sentinel의 첫 번째 인수는 문자열 리터럴이어야 합니다.", + "sentinelNameMismatch": "Sentinel은 이름이 같은 변수에 할당되어야 합니다.", + "sentinelParamCount": "Sentinel에는 하나의 위치 인수가 필요합니다.", + "setterGetterTypeMismatch": "Property setter 값 형식을 getter 반환 형식에 할당할 수 없습니다.", + "singleOverload": "\"{name}\"이(가) 오버로드로 표시되었지만 추가 오버로드가 없습니다.", + "slotsAttributeError": "__slots__에서 ‘{name}’이(가) 지정되지 않았습니다.", + "slotsClassVarConflict": "‘{name}‘이(가) __slots__에 선언된 instance 변수와 충돌합니다.", + "starPatternInAsPattern": "별 무늬는 \"as\" 대상과 함께 사용할 수 없습니다.", + "starPatternInOrPattern": "별 무늬는 다른 패턴 내에서 ORed할 수 없습니다.", + "starStarWildcardNotAllowed": "**는 와일드카드 \"_\"와 함께 사용할 수 없습니다.", + "staticClsSelfParam": "정적 메서드는 \"self\" 또는 \"cls\" 매개 변수를 사용하면 안 됩니다.", + "stringNonAsciiBytes": "ASCII가 아닌 문자는 바이트 문자열 리터럴에 허용되지 않습니다.", + "stringNotSubscriptable": "형식 식에서는 문자열 식을 첨자할 수 없습니다. 전체 식을 따옴표로 묶습니다.", + "stringUnsupportedEscape": "문자열 리터럴에 지원되지 않는 이스케이프 시퀀스가 있습니다.", + "stringUnterminated": "문자열 리터럴이 종료되지 않았습니다.", + "stubFileMissing": "\"{importName}\"에 대한 stub 파일을 찾을 수 없습니다.", + "stubUsesGetAttr": "형식 stub 파일이 불완전합니다. \"__getattr__\"는 모듈에 대한 형식 오류를 모호하게 합니다.", + "sublistParamsIncompatible": "Sublist 매개 변수는 Python 3.x에서 지원되지 않습니다.", + "superCallArgCount": "‘super’ 호출에는 인수가 2개 이하여야 합니다.", + "superCallFirstArg": "\"super\" 호출에 대한 첫 번째 인수로 클래스 형식이 필요하지만 \"{type}\"을(를) 받았습니다.", + "superCallSecondArg": "\"super\" 호출에 대한 두 번째 인수는 \"{type}\"에서 파생된 개체 또는 클래스여야 합니다.", + "superCallZeroArgForm": "\"super\" 호출의 인수가 0인 형식은 메서드 내에서만 유효합니다.", + "superCallZeroArgFormStaticMethod": "\"super\" 호출의 인수가 0인 형식은 정적 메서드 내에서 유효하지 않습니다.", + "symbolIsPossiblyUnbound": "\"{name}\"은(는) 바인딩되지 않은 것일 수 있습니다.", + "symbolIsUnbound": "\"{name}\"의 바인딩이 해제되었습니다.", + "symbolIsUndefined": "\"{name}\"이(가) 정의되지 않았습니다.", + "symbolOverridden": "\"{name}\"이(가) 클래스 \"{className}\"에서 동일한 이름의 기호를 재정의합니다.", + "templateStringBytes": "템플릿 문자열 리터럴(t-strings)은 이진일 수 없습니다.", + "templateStringIllegal": "템플릿 문자열 리터럴(t-t-strings)에는 Python 3.14 이상이 필요합니다.", + "templateStringUnicode": "템플릿 문자열 리터럴(t-strings)은 유니코드일 수 없습니다.", + "ternaryNotAllowed": "형식 식에는 3항 식이 허용되지 않습니다.", + "totalOrderingMissingMethod": "클래스는 total_ordering을 사용하려면 \"__lt__\", \"__le__\", \"__gt__\" 또는 \"__ge__\" 중 하나를 정의해야 합니다.", + "trailingCommaInFromImport": "주변 괄호 없이는 후행 쉼표를 사용할 수 없습니다.", + "tryWithoutExcept": "try 문에는 except 또는 finally 절이 하나 이상 있어야 합니다.", + "tupleAssignmentMismatch": "형식이 “{type}“인 식을 대상 tuple에 할당할 수 없습니다.", + "tupleInAnnotation": "형식 식에는 tuple 식을 사용할 수 없습니다.", + "tupleIndexOutOfRange": "{index} 인덱스가 {type} 형식의 범위를 벗어났습니다.", + "typeAliasIllegalExpressionForm": "형식 별칭 정의에 대한 식 양식이 잘못되었습니다.", + "typeAliasIsRecursiveDirect": "형식 별칭 ‘{name}’의 정의에서 스스로를 사용할 수 없습니다.", + "typeAliasNotInModuleOrClass": "TypeAlias는 모듈 또는 클래스 범위 내에서만 정의할 수 있습니다.", + "typeAliasRedeclared": "\"{name}\"은(는) TypeAlias로 선언되며 한 번만 할당할 수 있습니다.", + "typeAliasStatementBadScope": "type 문은 모듈 또는 클래스 범위 내에서만 사용할 수 있습니다.", + "typeAliasStatementIllegal": "형식 별칭 문에는 Python 3.12 이상이 필요합니다.", + "typeAliasTypeBadScope": "형식 별칭은 모듈 또는 클래스 범위 내에서만 정의할 수 있습니다.", + "typeAliasTypeBaseClass": "\"type\" 문에 정의된 형식 별칭은 기본 클래스로 사용할 수 없습니다.", + "typeAliasTypeMustBeAssigned": "TypeAliasType은 형식 별칭과 이름이 같은 변수에 할당해야 합니다.", + "typeAliasTypeNameArg": "TypeAliasType의 첫 번째 인수는 형식 별칭의 이름을 나타내는 문자열 리터럴이어야 합니다.", + "typeAliasTypeNameMismatch": "형식 별칭의 이름은 할당된 변수의 이름과 일치해야 합니다.", + "typeAliasTypeParamInvalid": "형식 매개 변수 목록은 TypeVar, TypeVarTuple 또는 ParamSpec만 포함하는 tuple이어야 합니다.", + "typeAnnotationCall": "형식 식에는 호출 식을 사용할 수 없습니다.", + "typeAnnotationVariable": "형식 식에는 변수를 사용할 수 없습니다.", + "typeAnnotationWithCallable": "\"type\"에 대한 형식 인수는 클래스여야 합니다. 콜러블은 지원되지 않습니다.", + "typeArgListExpected": "ParamSpec, 줄임표 또는 형식의 list가 필요합니다.", + "typeArgListNotAllowed": "이 형식 인수에는 list 식을 사용할 수 없습니다.", + "typeArgsExpectingNone": "클래스 \"{name}\"에 형식 인수가 필요하지 않습니다.", + "typeArgsMismatchOne": "하나의 형식 인수가 필요하지만 {received}을(를) 받았습니다.", + "typeArgsMissingForAlias": "제네릭 형식 별칭 \"{name}\"에 대한 형식 인수가 필요합니다.", + "typeArgsMissingForClass": "‘{name}’ 제네릭 클래스에 대한 형식 인수가 필요합니다.", + "typeArgsTooFew": "\"{name}\"에 대해 제공된 형식 인수가 너무 적습니다. {expected}이(가) 필요하지만 {received}을(를) 받았습니다.", + "typeArgsTooMany": "‘{name}’에 대한 형식 인수가 너무 많습니다. {expected}이(가) 필요하지만 {received}을(를) 받았습니다.", + "typeAssignmentMismatch": "형식 \"{sourceType}\"을 선언된 형식 \"{destType}\"에 할당할 수 없습니다.", + "typeAssignmentMismatchWildcard": "가져오기 기호 \"{name}\"에 선언된 형식 \"{destType}\"에 할당할 수 없는 \"{sourceType}\" 형식이 있습니다.", + "typeCallNotAllowed": "type() 호출은 형식 식에 사용하면 안 됩니다.", + "typeCheckOnly": "\"{name}\"이(가) @type_check_only로 표시되어 있으므로 형식 주석에서만 사용할 수 있습니다.", + "typeCommentDeprecated": "type 메모는 더 이상 사용되지 않습니다. 대신 type 주석 사용", + "typeExpectedClass": "클래스가 필요하지만 \"{type}\"이(가) 수신됨", + "typeFormArgs": "\"TypeForm\"은 단일 위치 인수를 허용합니다.", + "typeGuardArgCount": "\"TypeGuard\" 또는 \"TypeIs\" 뒤에 단일 형식 인수가 필요합니다.", + "typeGuardParamCount": "사용자 정의 type guard 함수 및 메서드에는 하나 이상의 입력 매개 변수가 있어야 합니다.", + "typeIsReturnType": "TypeIs의 반환 형식(\"{returnType}\")이 값 매개 변수 형식(\"{type}\")과 일치하지 않습니다.", + "typeNotAwaitable": "“{type}“은(는) awaitable이 아닙니다.", + "typeNotIntantiable": "\"{type}\"을(를) 인스턴스화할 수 없습니다.", + "typeNotIterable": "\"{type}\" 반복할 수 없습니다.", + "typeNotSpecializable": "‘{type}’ 형식을 특수화할 수 없습니다.", + "typeNotSubscriptable": "\"{type}\" 형식의 개체를 첨자할 수 없습니다.", + "typeNotSupportBinaryOperator": "‘{operator}’ 연산자는 ‘{leftType}’ 및 ‘{rightType}’ 형식에 대해 지원되지 않습니다.", + "typeNotSupportBinaryOperatorBidirectional": "예상 형식이 ‘{expectedType}’인 경우 ‘{leftType}’ 및 ‘{rightType}’ 형식에 대해 ‘{operator}’ 연산자가 지원되지 않습니다.", + "typeNotSupportUnaryOperator": "‘{type}’‘에 대해 ’{operator}‘ 연산자가 지원되지 않습니다.", + "typeNotSupportUnaryOperatorBidirectional": "예상 형식이 \"{expectedType}\" 경우 형식 \"{type}\"에 대해 연산자 \"{operator}\"이(가) 지원되지 않습니다.", + "typeNotUsableWith": "\"{type}\" 형식의 개체는 {method}을(를) 올바르게 구현하지 않으므로 \"with\"와 함께 사용할 수 없습니다.", + "typeNotUsableWithAsync": "\"{type}\" 형식의 개체는 {method}을(를) 올바르게 구현하지 않으므로 \"async with\"와 함께 사용할 수 없습니다.", + "typeParameterBoundNotAllowed": "바운드 또는 제약 조건은 가변 인자 형식 매개 변수 또는 ParamSpec와 함께 사용할 수 없습니다.", + "typeParameterConstraintTuple": "형식 매개 변수 제약 조건은 두 개 이상의 형식 튜플이어야 합니다.", + "typeParameterExistingTypeParameter": "형식 매개 변수 \"{name}\"이(가) 이미 사용 중입니다.", + "typeParameterNotDeclared": "형식 매개 변수 \"{name}\"이(가) \"{container}\"의 형식 매개 변수 목록에 포함되어 있지 않습니다.", + "typeParametersMissing": "하나 이상의 형식 매개 변수를 지정해야 합니다.", + "typePartiallyUnknown": "\"{name}\"의 형식을 부분적으로 알 수 없습니다.", + "typeUnknown": "\"{name}\" 유형을 알 수 없습니다.", + "typeVarAssignedName": "TypeVar을 \"{name}\"이라는 변수에 할당해야 합니다.", + "typeVarAssignmentMismatch": "‘{type}’ 형식을 ‘{name}’ 형식 변수에 할당할 수 없습니다.", + "typeVarBoundAndConstrained": "TypeVar는 바인딩되고 제한될 수 없습니다.", + "typeVarBoundGeneric": "TypeVar 바인딩 형식은 제네릭일 수 없습니다.", + "typeVarConstraintGeneric": "TypeVar 제약 조건 형식은 제네릭일 수 없습니다.", + "typeVarDefaultBoundMismatch": "TypeVar 기본 형식은 바인딩된 형식의 하위 형식이어야 합니다.", + "typeVarDefaultConstraintMismatch": "TypeVar 기본 형식은 제약이 있는 형식 중 하나여야 합니다.", + "typeVarDefaultIllegal": "형식 변수 기본 형식은 Python 3.13 이상이 필요합니다.", + "typeVarDefaultInvalidTypeVar": "형식 매개 변수 \"{name}\"에 범위를 벗어난 하나 이상의 형식 변수를 참조하는 기본 형식이 있습니다.", + "typeVarFirstArg": "TypeVar의 이름이 첫 번째 인수로 필요합니다.", + "typeVarInvalidForMemberVariable": "특성 형식은 로컬 메서드로 범위가 지정된 \"{name}\" 형식 변수를 사용할 수 없음", + "typeVarNoMember": "TypeVar \"{type}\"에 특성 \"{name}\"이(가) 없음", + "typeVarNotSubscriptable": "TypeVar \"{type}\"을(를) 첨자할 수 없습니다.", + "typeVarNotUsedByOuterScope": "형식 변수 \"{name}\"은(는) 이 컨텍스트에서 의미가 없습니다.", + "typeVarPossiblyUnsolvable": "호출자가 ‘{param}’ 매개 변수에 대한 인수를 제공하지 않으면 ‘{name}’ 형식 변수가 확인되지 않을 수 있습니다.", + "typeVarSingleConstraint": "TypeVar에는 두 개 이상의 제한된 형식이 있어야 합니다.", + "typeVarTupleConstraints": "TypeVarTuple에는 값 제약 조건이 있을 수 없습니다.", + "typeVarTupleContext": "TypeVarTuple은 이 컨텍스트에서 허용되지 않습니다.", + "typeVarTupleDefaultNotUnpacked": "TypeVarTuple 기본 형식은 압축을 푼 tuple 또는 TypeVarTuple이어야 합니다.", + "typeVarTupleMustBeUnpacked": "TypeVarTuple 값에는 압축 풀기 연산자가 필요합니다.", + "typeVarTupleUnknownParam": "\"{name}\"은(는) TypeVarTuple에 대한 알 수 없는 매개 변수입니다.", + "typeVarUnknownParam": "‘{name}’은(는) TypeVar에 대한 알 수 없는 매개 변수입니다.", + "typeVarUsedByOuterScope": "TypeVar ‘{name}’은(는) 외부 범위에서 이미 사용 중입니다.", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\"이(가) 제네릭 함수 시그니처에 한 번만 나타납니다.", + "typeVarVariance": "TypeVar는 공변이면서 반공변일 수 없습니다.", + "typeVarWithDefaultFollowsVariadic": "TypeVar \"{typeVarName}\"에 기본값이 있으며 TypeVarTuple \"{variadicName}\"을(를) 따를 수 없습니다.", + "typeVarWithoutDefault": "‘{name}’은 기본 형식이 없으므로 형식 매개 변수 목록에서 ‘{other}’ 뒤에 나타날 수 없습니다.", + "typeVarsNotInGenericOrProtocol": "Generic[] 또는 Protocol[]에는 모든 형식 변수가 포함되어야 합니다.", + "typedDictAccess": "TypedDict의 항목에 액세스할 수 없습니다.", + "typedDictAssignedName": "TypedDict를 \"{name}\"이라는 변수에 할당해야 합니다.", + "typedDictBadVar": "TypedDict 클래스는 형식 주석만 포함할 수 있습니다.", + "typedDictBaseClass": "TypedDict 클래스의 모든 기본 클래스도 TypedDict 클래스여야 합니다.", + "typedDictBoolParam": "\"{name}\" 매개 변수에 True 또는 False 값이 있어야 합니다.", + "typedDictClosedExtras": "기본 클래스 \"{name}\" 추가 항목의 유형을 \"{type}\" 형식으로 제한하는 TypedDict.", + "typedDictClosedFalseNonOpenBase": "기본 클래스 \"{name}\"은 열린 TypedDict가 아니므로 closed=False는 허용되지 않습니다.", + "typedDictClosedNoExtras": "기본 클래스 \"{name}\"은(는) closed TypedDict입니다. 추가 항목은 허용되지 않습니다.", + "typedDictDelete": "TypedDict에서 항목을 삭제할 수 없습니다.", + "typedDictEmptyName": "TypedDict 내의 이름은 비워 둘 수 없습니다.", + "typedDictEntryName": "사전 항목 이름에 필요한 문자열 리터럴", + "typedDictEntryUnique": "사전 내의 이름은 고유해야 합니다.", + "typedDictExtraArgs": "추가 TypedDict 인수가 지원되지 않음", + "typedDictExtraItemsClosed": "TypedDict \"closed\" 또는 \"extra_items\" 중 하나만 사용할 수 있습니다.", + "typedDictFieldNotRequiredRedefinition": "TypedDict 항목 \"{name}\"은(는) NotRequired로 재정의될 수 없습니다.", + "typedDictFieldReadOnlyRedefinition": "TypedDict 항목 \"{name}\"은(는) ReadOnly로 재정의될 수 없습니다.", + "typedDictFieldRequiredRedefinition": "TypedDict 항목 \"{name}\"은(는) Required로 재정의될 수 없습니다.", + "typedDictFirstArg": "TypedDict 클래스 이름이 첫 번째 인수로 필요합니다.", + "typedDictInClassPattern": "TypedDict 클래스는 클래스 패턴에서 사용할 수 없습니다.", + "typedDictInitsubclassParameter": "TypedDict는 __init_subclass__ 매개 변수 \"{name}\"을(를) 지원하지 않습니다.", + "typedDictNotAllowed": "이 컨텍스트에서는 \"TypedDict\"를 사용할 수 없습니다.", + "typedDictSecondArgDict": "두 번째 매개 변수로 dict 또는 키워드 매개 변수가 필요합니다.", + "typedDictSecondArgDictEntry": "단순 사전 항목이 필요합니다.", + "typedDictSet": "TypedDict에서 항목을 할당할 수 없습니다.", + "unaccessedClass": "클래스 \"{name}\"에 액세스하지 않았습니다.", + "unaccessedFunction": "함수 \"{name}\"에 액세스하지 않았습니다.", + "unaccessedImport": "가져오기 \"{name}\"에 액세스하지 않았습니다.", + "unaccessedSymbol": "\"{name}\"에 액세스하지 않았습니다.", + "unaccessedVariable": "변수 \"{name}\"에 액세스하지 않았습니다.", + "unannotatedFunctionSkipped": "주석이 없으므로 ‘{name}’ 함수 분석을 건너뜁니다.", + "unaryOperationNotAllowed": "단항 연산자는 형식 식에 사용할 수 없습니다.", + "unexpectedAsyncToken": "\"async\"를 따르려면 \"def\", \"with\" 또는 \"for\"가 필요합니다.", + "unexpectedEof": "예기치 않은 EOF", + "unexpectedExprToken": "식 끝에 예기치 않은 토큰이 있습니다.", + "unexpectedIndent": "예기치 않은 들여쓰기", + "unexpectedUnindent": "들여쓰기가 필요 없음", + "unhashableDictKey": "사전 키는 해시 가능해야 합니다.", + "unhashableSetEntry": "Set 항목은 해시가 가능해야 합니다.", + "uninitializedAbstractVariables": "추상 기본 클래스에 정의된 변수가 final 클래스 \"{classType}\"에서 초기화되지 않았습니다.", + "uninitializedInstanceVariable": "인스턴스 변수 \"{name}\"이(가) 클래스 본문 또는 __init__ 메서드에서 초기화되지 않았습니다.", + "unionForwardReferenceNotAllowed": "Union 구문은 문자열 피연산자에서 사용할 수 없습니다. 전체 식 주위에 따옴표 사용", + "unionSyntaxIllegal": "공용 구조체에 대한 대체 구문에는 Python 3.10 이상이 필요합니다.", + "unionTypeArgCount": "Union에는 둘 이상의 형식 인수가 필요합니다.", + "unionUnpackedTuple": "Union은 압축을 푼 tuple을 포함할 수 없습니다.", + "unionUnpackedTypeVarTuple": "Union은 압축을 푼 TypeVarTuple을 포함할 수 없습니다.", + "unnecessaryCast": "불필요한 \"cast\" 호출입니다. 형식이 이미 “{type}“입니다.", + "unnecessaryIsInstanceAlways": "불필요한 isinstance 호출입니다. \"{testType}\"은(는) 항상 \"{classType}\"의 인스턴스입니다.", + "unnecessaryIsInstanceNever": "불필요한 isinstance 호출입니다. \"{testType}\"은(는) 항상 \"{classType}\"의 인스턴스입니다.", + "unnecessaryIsSubclassAlways": "불필요한 issubclass 호출입니다. ’{testType}‘은(는) 항상 ’{classType}‘의 하위 클래스입니다.", + "unnecessaryIsSubclassNever": "불필요한 issubclass 호출입니다. ’{testType}‘은(는) 항상 ’{classType}‘의 하위 클래스입니다.", + "unnecessaryPyrightIgnore": "불필요한 \"# pyright: ignore\" 메모입니다.", + "unnecessaryPyrightIgnoreRule": "불필요한 \"# pyright: ignore\" 규칙: \"{name}\"", + "unnecessaryTypeIgnore": "불필요한 \"# type: ignore\" 메모입니다.", + "unpackArgCount": "\"Unpack\" 뒤에는 단일 형식 인수가 필요합니다.", + "unpackExpectedTypeVarTuple": "Unpack에 대한 형식 인수로 TypeVarTuple 또는 tuple이 필요합니다.", + "unpackExpectedTypedDict": "Unpack을 위해서는 TypedDict 형식 인수가 필요합니다.", + "unpackIllegalInComprehension": "압축 풀기 작업은 이해에서 사용할 수 없습니다.", + "unpackInAnnotation": "형식 식에는 Unpack 연산자를 사용할 수 없습니다.", + "unpackInDict": "사전에서 압축 풀기 작업이 허용되지 않음", + "unpackInSet": "set 내에서는 압축 풀기 연산자를 사용할 수 없습니다.", + "unpackNotAllowed": "이 컨텍스트에서는 Unpack이 허용되지 않습니다.", + "unpackOperatorNotAllowed": "이 컨텍스트에서는 압축 풀기 작업이 허용되지 않습니다.", + "unpackTuplesIllegal": "Python 3.8 이전의 튜플에서는 압축 풀기 작업이 허용되지 않습니다.", + "unpackedArgInTypeArgument": "압축을 푼 인수는 이 컨텍스트에서 사용할 수 없음", + "unpackedArgWithVariadicParam": "압축을 푼 인수는 TypeVarTuple 매개 변수에 사용할 수 없습니다.", + "unpackedDictArgumentNotMapping": "** 뒤의 인수 식은 \"str\" 키 형식의 매핑이어야 합니다.", + "unpackedDictSubscriptIllegal": "아래 첨자에서 사전 압축 풀기 연산자는 사용할 수 없습니다.", + "unpackedSubscriptIllegal": "아래 첨자의 압축 풀기 연산자에는 Python 3.11 이상이 필요합니다.", + "unpackedTypeVarTupleExpected": "압축 해제된 TypeVarTuple이 필요합니다. Unpack[{name1}] 또는 *{name2} 사용", + "unpackedTypedDictArgument": "압축되지 않은 TypedDict 인수를 매개 변수와 일치시킬 수 없습니다.", + "unreachableCodeCondition": "조건이 정적으로 false로 평가되어 코드가 분석되지 않습니다.", + "unreachableCodeStructure": "코드에 구조적으로 도달할 수 없음", + "unreachableCodeType": "형식 분석을 통해 코드에 연결할 수 없음을 나타냅니다.", + "unreachableExcept": "예외가 이미 처리되었으므로 Except 절에 연결할 수 없습니다.", + "unsupportedDunderAllOperation": "\"__all__\"에 대한 작업이 지원되지 않으므로 내보낸 기호 목록이 잘못되었을 수 있습니다.", + "unusedCallResult": "호출 식의 결과가 ‘{type}’ 형식이므로 사용되지 않습니다. 의도적인 경우 변수 ‘_’에 할당하세요.", + "unusedCoroutine": "async 함수 호출의 결과가 사용되지 않습니다. \"await\"를 사용하거나 변수에 결과 할당", + "unusedExpression": "식 값은 사용되지 않습니다.", + "varAnnotationIllegal": "변수에 대한 type 주석에는 Python 3.6 이상이 필요합니다. 이전 버전과의 호환성을 위해 type 메모 사용", + "variableFinalOverride": "변수 \"{name}\"이(가) Final로 표시되고 \"{className}\" 클래스에서 이름이 같은 비-Final 변수를 재정의합니다.", + "variadicTypeArgsTooMany": "형식 인수 목록에는 압축을 풀고 있는 TypeVarTuple 또는 tuple이 하나만 있을 수 있습니다.", + "variadicTypeParamTooManyAlias": "형식 별칭에는 TypeVarTuple 형식 매개 변수가 최대 하나만 있을 수 있지만 여러 ({names})가 수신되었습니다.", + "variadicTypeParamTooManyClass": "제네릭 클래스에는 TypeVarTuple 형식 매개 변수가 하나만 있을 수 있지만 여러 ({names})을(를) 받았습니다.", + "walrusIllegal": "연산자 \":=\"에는 Python 3.8 이상이 필요합니다.", + "walrusNotAllowed": "주변 괄호 없이는 이 컨텍스트에서 \":=\" 연산자를 사용할 수 없습니다.", + "wildcardInFunction": "클래스 또는 함수 내에서 와일드카드 import가 허용되지 않음", + "wildcardLibraryImport": "라이브러리에서 와일드카드 import가 허용되지 않습니다.", + "wildcardPatternTypePartiallyUnknown": "와일드카드 패턴으로 캡처된 형식을 부분적으로 알 수 없습니다.", + "wildcardPatternTypeUnknown": "와일드카드 패턴으로 캡처된 형식을 부분적으로 알 수 없습니다.", + "yieldFromIllegal": "\"yield from\"을 사용하려면 Python 3.3 이상이 필요합니다.", + "yieldFromOutsideAsync": "async 함수에서는 \"yield from\"을 사용할 수 없습니다.", + "yieldOutsideFunction": "함수 또는 람다 외부에서는 ‘yield’를 사용할 수 없습니다.", + "yieldWithinComprehension": "comprehension 내에서는 \"yield\"를 사용할 수 없습니다.", + "zeroCaseStatementsFound": "Match 문에는 case 문이 하나 이상 포함되어야 합니다.", + "zeroLengthTupleNotAllowed": "길이가 0인 tuple은 이 컨텍스트에서 허용되지 않습니다." + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "\"Annotated\" 특수 양식은 인스턴스 및 클래스 검사와 함께 사용할 수 없습니다.", + "argParam": "인수가 \"{paramName}\" 매개 변수에 해당합니다.", + "argParamFunction": "인수가 \"{functionName}\" 함수의 \"{paramName}\" 매개 변수에 해당합니다.", + "argsParamMissing": "‘*{paramName}’ 매개 변수에 해당하는 매개 변수가 없습니다.", + "argsPositionOnly": "위치 전용 매개 변수가 일치하지 않습니다. {expected}이)(가) 필요하지만 {received}을(를) 받았습니다.", + "argumentType": "인수 형식이 \"{type}\"입니다.", + "argumentTypes": "인수 형식: ({types})", + "assignToNone": "형식을 \"None\"에 할당할 수 없습니다.", + "asyncHelp": "‘async with’를 사용하시겠습니까?", + "baseClassIncompatible": "기본 클래스 \"{baseClass}\"은(는) \"{type}\" 유형과 호환되지 않습니다.", + "baseClassIncompatibleSubclass": "기본 클래스 \"{baseClass}\"은(는) \"{type}\" 유형과 호환되지 않는 \"{subclass}\"에서 파생됩니다.", + "baseClassOverriddenType": "기본 클래스 \"{baseClass}\"이(가) 재정의된 \"{type}\" 형식을 제공합니다.", + "baseClassOverridesType": "‘{baseClass}’ 기본 클래스가 ‘{type}’ 형식을 재정의합니다.", + "bytesTypePromotions": "disableBytesTypePromotions를 false로 설정하여 \"bytearray\" 및 \"memoryview\"에 대한 형식 승격 동작을 사용하도록 설정합니다.", + "conditionalRequiresBool": "\"{operandType}\" 형식에 대한 메서드 __bool__에서 \"bool\" 대신 \"{boolReturnType}\" 형식을 반환합니다.", + "dataClassFieldLocation": "필드 선언", + "dataClassFrozen": "\"{name}\"이(가) 고정되어 있습니다.", + "dataProtocolUnsupported": "\"{name}\"은(는) 데이터 프로토콜입니다.", + "descriptorAccessBindingFailed": "설명자 클래스 \"{className}\"에 대한 메서드 \"{name}\"을(를) 바인딩하지 못했습니다.", + "descriptorAccessCallFailed": "설명자 클래스 \"{className}\"에 대한 메서드 \"{name}\"을(를) 호출하지 못했습니다.", + "finalMethod": "Final 메서드", + "functionParamDefaultMissing": "‘{name}’ 매개 변수에 기본 인수가 없습니다.", + "functionParamName": "매개 변수 이름 불일치: \"{destName}\" 및 \"{srcName}\"", + "functionParamPositionOnly": "위치 전용 매개 변수가 일치하지 않습니다. 매개 변수 \"{name}\"은(는) 위치 전용이 아닙니다.", + "functionReturnTypeMismatch": "‘{sourceType}’ 함수 반환 형식은 ‘{destType}’ 형식과 호환되지 않습니다.", + "functionTooFewParams": "함수가 너무 적은 위치 매개 변수를 허용합니다. {expected}이(가) 필요하지만 {received}을(를) 받았습니다.", + "functionTooManyParams": "함수가 너무 많은 위치 매개 변수를 허용합니다. {expected}이(가) 필요하지만 {received}을(를) 받았습니다.", + "genericClassNotAllowed": "인스턴스 또는 클래스 검사에 형식 인수가 허용되지 않는 제네릭 형식", + "incompatibleDeleter": "Property deleter 메서드가 호환되지 않습니다.", + "incompatibleGetter": "Property getter 메서드가 호환되지 않습니다.", + "incompatibleSetter": "Property setter 메서드가 호환되지 않습니다.", + "initMethodLocation": "__init__ 메서드가 \"{type}\" 클래스에 정의되어 있습니다.", + "initMethodSignature": "__init__의 서명은 \"{type}\"입니다.", + "initSubclassLocation": "__init_subclass__ 메서드는 \"{name}\" 클래스에 정의되어 있음", + "invariantSuggestionDict": "값 형식에서 공변(covariant)인 \"dict\"에서 \"Mapping\"(매핑)으로 전환하는 것이 좋습니다.", + "invariantSuggestionList": "공변(covariant)인 \"list\"에서 \"Sequence\"로 전환하는 것이 좋습니다.", + "invariantSuggestionSet": "공변(covariant)인 \"set\"에서 \"Container\"로 전환하는 것이 좋습니다.", + "isinstanceClassNotSupported": "\"{type}\"은(는) 인스턴스 및 클래스 검사에 지원되지 않음", + "keyNotRequired": "‘{name}’은(는) ‘{type}’에서 필수 키가 아니므로 액세스로 인해 런타임 예외가 발생할 수 있습니다.", + "keyReadOnly": "\"{name}\"은(는) \"{type}\"의 읽기 전용 키입니다.", + "keyRequiredDeleted": "\"{name}\"은(는) 필수 키이므로 삭제할 수 없습니다.", + "keyUndefined": "\"{name}\"이 \"{type}\"에 정의된 키가 아닙니다.", + "kwargsParamMissing": "‘**{paramName}’ 매개 변수에 해당하는 매개 변수가 없습니다.", + "listAssignmentMismatch": "\"{type}\" 형식이 대상 목록과 호환되지 않습니다.", + "literalAssignmentMismatch": "\"{sourceType}\"은 형식 \"{destType}\"에 할당할 수 없습니다.", + "literalNotAllowed": "\"Literal\" 특수 양식은 인스턴스 및 클래스 검사와 함께 사용할 수 없습니다", + "matchIsNotExhaustiveHint": "전체 처리가 의도되지 않은 경우 \"case _: pass\"를 추가합니다.", + "matchIsNotExhaustiveType": "처리되지 않은 형식: \"{type}\"", + "memberAssignment": "\"{type}\" 형식의 식을 \"{classType}\" 클래스의 \"{name}\" 특성에 할당할 수 없음", + "memberIsAbstract": "\"{type}.{name}\"이(가) 구현되지 않았습니다.", + "memberIsAbstractMore": "{count}개 더...", + "memberIsClassVarInProtocol": "\"{name}\"은(는) 프로토콜에서 ClassVar로 정의됩니다.", + "memberIsInitVar": "\"{name}\"은(는) init-only 필드임", + "memberIsInvariant": "\"{name}\"은(는) 변경 가능하므로 고정되지 않습니다.", + "memberIsNotClassVarInClass": "\"{name}\"은(는) 프로토콜과 호환되려면 ClassVar로 정의해야 합니다.", + "memberIsNotClassVarInProtocol": "\"{name}\"이(가) 프로토콜에서 ClassVar로 정의되지 않았습니다.", + "memberIsNotReadOnlyInProtocol": "\"{name}\"(이)가 프로토콜에서 읽기 전용이 아닙니다.", + "memberIsReadOnlyInProtocol": "\"{name}\"은(는) 프로토콜에서 읽기 전용입니다.", + "memberIsWritableInProtocol": "\"{name}\"은(는) 프로토콜에서 쓸 수 있습니다.", + "memberSetClassVar": "\"{name}\" 특성은 ClassVar이므로 클래스 인스턴스를 통해 할당할 수 없음", + "memberTypeMismatch": "\"{name}\"은(는) 호환되지 않는 형식입니다.", + "memberUnknown": "특성 \"{name}\" 알 수 없음", + "metaclassConflict": "메타클래스 \"{metaclass1}\"이(가) \"{metaclass2}\"과(와) 충돌합니다.", + "missingDeleter": "Property deleter 메서드가 없습니다.", + "missingGetter": "Property getter 메서드가 없습니다.", + "missingSetter": "Property setter 메서드가 없습니다.", + "namedParamMissingInDest": "\"{name}\" 추가 매개 변수", + "namedParamMissingInSource": "\"{name}\" 키워드 매개 변수 누락", + "namedParamTypeMismatch": "\"{sourceType}\" 형식의 키워드 매개 변수 \"{name}\"이(가) \"{destType}\" 형식과 호환되지 않음", + "namedTupleNotAllowed": "인스턴스 또는 클래스 검사에는 NamedTuple을 사용할 수 없습니다.", + "newMethodLocation": "__new__ 메서드가 \"{type}\" 클래스에 정의되어 있습니다.", + "newMethodSignature": "__new__ 의 서명은 \"{type}\"입니다.", + "newTypeClassNotAllowed": "NewType으로 만든 형식은 인스턴스 및 클래스 검사에 사용할 수 없습니다.", + "noOverloadAssignable": "\"{type}\" 형식과 일치하는 오버로드된 함수가 없습니다.", + "noneNotAllowed": "인스턴스 또는 클래스 검사에는 None을 사용할 수 없음", + "orPatternMissingName": "누락된 이름: {name}", + "overloadIndex": "오버로드 {index}이(가) 가장 가까운 일치 항목입니다.", + "overloadNotAssignable": "\"{name}\"의 오버로드를 하나 이상 할당할 수 없습니다.", + "overloadSignature": "오버로드 서명은 여기에 정의되어 있습니다.", + "overriddenMethod": "재정의된 메서드", + "overriddenSymbol": "재정의된 기호", + "overrideInvariantMismatch": "\"{overrideType}\" 재정의 형식이 \"{baseType}\" 기본 형식과 같지 않습니다.", + "overrideIsInvariant": "변수를 변경할 수 있으므로 해당 형식은 고정됩니다.", + "overrideNoOverloadMatches": "재정의의 오버로드 서명이 기본 메서드와 호환되지 않습니다.", + "overrideNotClassMethod": "기본 메서드가 classmethod로 선언되었지만 재정의는 그렇지 않은 경우", + "overrideNotInstanceMethod": "기본 메서드가 instance 메서드로 선언되었지만 재정의가", + "overrideNotStaticMethod": "기본 메서드가 staticmethod로 선언되었지만 재정의는 그렇지 않습니다.", + "overrideOverloadNoMatch": "재정의는 기본 메서드의 모든 오버로드를 처리하지 않습니다.", + "overrideOverloadOrder": "재정의 메서드에 대한 오버로드는 기본 메서드와 동일한 순서여야 합니다.", + "overrideParamKeywordNoDefault": "‘{name}’ 키워드 매개 변수가 불일치합니다. 기본 매개 변수에 기본 인수 값이 있습니다. 재정의 매개 변수에는 없습니다.", + "overrideParamKeywordType": "‘{name}’ 키워드 매개 변수 형식이 일치하지 않습니다. 기본 매개 변수는 형식이 ‘{baseType}’, 재정의 매개 변수는 형식이 ‘{overrideType}’입니다.", + "overrideParamName": "{index} 매개 변수 이름이 일치하지 않습니다. 기본 매개 변수는 이름이 ‘{baseName}’입니다. 재정의 매개 변수는 이름이 ‘{overrideName}’입니다.", + "overrideParamNameExtra": "기본에 매개 변수 \"{name}\"이(가) 없습니다.", + "overrideParamNameMissing": "재정의에 ‘{name}’ 매개 변수가 없습니다.", + "overrideParamNamePositionOnly": "매개 변수 {index} 불일치: 기본 매개 변수 \"{baseName}\"은(는) 키워드 매개 변수이며 재정의 매개 변수는 위치 전용임", + "overrideParamNoDefault": "{index} 매개 변수가 불일치합니다. 기본 매개 변수에 기본 인수 값이 있습니다. 재정의 매개 변수에는 없습니다.", + "overrideParamType": "‘{index}’ 매개 변수 형식이 일치하지 않습니다. 기본 매개 변수는 형식이 ‘{baseType}’, 재정의 매개 변수는 형식이 ‘{overrideType}’입니다.", + "overridePositionalParamCount": "위치 매개 변수 개수가 일치하지 않습니다. 기본 메서드에 {baseCount}개가 있지만 재정의에는 {overrideCount}개가 있습니다.", + "overrideReturnType": "반환 형식 불일치: 기본 메서드는 \"{baseType}\" 형식을 반환하고 재정의는 \"{overrideType}\" 형식을 반환합니다.", + "overrideType": "기본 클래스는 형식을 \"{type}\"으로 정의합니다.", + "paramAssignment": "매개 변수 {index}: \"{sourceType}\" 형식이 \"{destType}\" 형식과 호환되지 않음", + "paramSpecMissingInOverride": "재정의 메서드에 ParamSpec 매개 변수가 없습니다.", + "paramType": "매개 변수 형식은 \"{paramType}\"입니다.", + "privateImportFromPyTypedSource": "대신 \"{module}\"에서 가져오기", + "propertyAccessFromProtocolClass": "프로토콜 클래스 내에 정의된 속성은 클래스 변수로 액세스할 수 없습니다.", + "propertyMethodIncompatible": "Property 메서드 \"{name}\"이(가) 호환되지 않습니다.", + "propertyMethodMissing": "Property 메서드 “{name}”에 재정의가 없습니다.", + "propertyMissingDeleter": "\"{name}\" property에 정의된 deleter가 없습니다.", + "propertyMissingSetter": "\"{name}\" property에 정의된 setter가 없습니다.", + "protocolIncompatible": "‘{sourceType}’은(는) ‘{destType}’ 프로토콜과 호환되지 않습니다.", + "protocolMemberMissing": "\"{name}\"이(가) 없습니다.", + "protocolRequiresRuntimeCheckable": "인스턴스 및 클래스 검사와 함께 사용하려면 Protocol 클래스를 @runtime_checkable 합니다.", + "protocolSourceIsNotConcrete": "‘{sourceType}’은(는) 구체적인 클래스 형식이 아니므로 ‘{destType}’ 형식에 할당할 수 없습니다.", + "protocolUnsafeOverlap": "\"{name}\"의 특성은 프로토콜과 이름이 같습니다.", + "pyrightCommentIgnoreTip": "\"# pyright: ignore[]\"을 사용하여 한 줄에 대한 진단을 표시하지 않습니다.", + "readOnlyAttribute": "특성 \"{name}\"은(는) 읽기 전용입니다.", + "seeClassDeclaration": "클래스 선언 참조", + "seeDeclaration": "선언 참조", + "seeFunctionDeclaration": "함수 선언 참조", + "seeMethodDeclaration": "메서드 선언 참조", + "seeParameterDeclaration": "매개 변수 선언 보기", + "seeTypeAliasDeclaration": "형식 별칭 선언 참조", + "seeVariableDeclaration": "변수 선언 보기", + "tupleAssignmentMismatch": "\"{type}\" 형식이 대상 tuple과 호환되지 않습니다.", + "tupleEntryTypeMismatch": "Tuple 항목 {entry}이(가) 잘못된 형식입니다.", + "tupleSizeIndeterminateSrc": "Tuple 크기 불일치: {expected}이(가) 필요하지만 미정을 받았습니다.", + "tupleSizeIndeterminateSrcDest": "Tuple 크기 불일치: {expected} 이상이 필요하지만 미정을 받았습니다.", + "tupleSizeMismatch": "Tuple 크기 불일치: {expected}이(가) 필요하지만 {received}을(를) 받았습니다.", + "tupleSizeMismatchIndeterminateDest": "Tuple 크기 불일치: {expected} 이상이 필요하지만 {received}을(를) 받았습니다.", + "typeAliasInstanceCheck": "\"type\" 문을 사용해 만든 형식 별칭은 인스턴스 및 클래스 검사에 사용할 수 없습니다.", + "typeAssignmentMismatch": "형식 \"{sourceType}\"은 형식 \"{destType}\"에 할당할 수 없습니다.", + "typeBound": "형식 변수 \"{name}\"에 대한 상한 \"{destType}\"에 형식 \"{sourceType}\"을 할당할 수 없습니다.", + "typeConstrainedTypeVar": "형식 \"{type}\"을 제한된 형식 변수 \"{name}\"에 할당할 수 없습니다.", + "typeIncompatible": "\"{sourceType}\"은 \"{destType}\"에 할당할 수 없습니다.", + "typeNotClass": "\"{type}\"이 클래스가 아닙니다.", + "typeNotStringLiteral": "‘{type}’은(는) 문자열 리터럴이 아닙니다.", + "typeOfSymbol": "‘{name}’의 유형이 ‘{type}’입니다.", + "typeParamSpec": "\"{type}\" 형식이 ParamSpec \"{name}\"과(와) 호환되지 않습니다.", + "typeUnsupported": "\"{type}\" 형식은 지원되지 않습니다.", + "typeVarDefaultOutOfScope": "형식 변수 \"{name}\"이(가) 범위에 없습니다.", + "typeVarIsContravariant": "\"{name}\" 형식 매개 변수는 반공변형식 매개 변수)이지만 \"{sourceType}\"은(는) \"{destType}\"의 상위 형식이 아닙니다.", + "typeVarIsCovariant": "\"{name}\" 형식 매개 변수는 공변(covariant)이지만 \"{sourceType}\"은(는) \"{destType}\"의 하위 형식이 아닙니다.", + "typeVarIsInvariant": "\"{name}\" 형식 매개 변수는 고정이지만 \"{sourceType}\"은(는) \"{destType}\"와 같지 않습니다.", + "typeVarNotAllowed": "인스턴스 또는 클래스 검사에 TypeVar가 허용되지 않음", + "typeVarTupleRequiresKnownLength": "TypeVarTuple을 알 수 없는 길이의 tuple에 바인딩할 수 없습니다.", + "typeVarUnnecessarySuggestion": "대신 {type}을(를) 사용하세요.", + "typeVarUnsolvableRemedy": "인수가 제공되지 않을 때 반환 형식을 지정하는 오버로드를 제공합니다.", + "typeVarsMissing": "누락된 형식 변수: {names}", + "typedDictBaseClass": "\"{type}\" 클래스는 TypedDict가 아닙니다.", + "typedDictClassNotAllowed": "인스턴스 또는 클래스 검사에 TypedDict 클래스를 사용할 수 없습니다.", + "typedDictClosedExtraNotAllowed": "항목 \"{name}\"을(를) 추가할 수 없음", + "typedDictClosedExtraTypeMismatch": "형식이 \"{type}\"인 항목 \"{name}\"을(를) 추가할 수 없음", + "typedDictClosedFieldNotReadOnly": "\"{name}\" 항목은 ReadOnly여야 하므로 추가할 수 없습니다.", + "typedDictClosedFieldNotRequired": "\"{name}\" 항목은 NotRequired여야 하므로 추가할 수 없습니다.", + "typedDictExtraFieldNotAllowed": "\"{name}\"이(가) \"{type}\"에 없음", + "typedDictExtraFieldTypeMismatch": "\"{name}\" 형식은 \"{type}\"의 \"extra_items\" 형식과 호환되지 않습니다.", + "typedDictFieldMissing": "\"{name}\"이(가) \"{type}\"에 없습니다.", + "typedDictFieldNotReadOnly": "\"{name}\"은(는) \"{type}\"에서 읽기 전용이 아닙니다.", + "typedDictFieldNotRequired": "\"{name}\"은(는) \"{type}\"에 필요하지 않습니다.", + "typedDictFieldRequired": "\"{type}\"에 \"{name}\"이(가) 필요합니다.", + "typedDictFieldTypeMismatch": "\"{type}\" 형식은 \"{name}\" 항목에 할당할 수 없습니다.", + "typedDictFieldUndefined": "\"{name}\"은(는) \"{type}\" 형식의 정의되지 않은 항목입니다.", + "typedDictKeyAccess": "TypedDict에서 항목을 참조하려면 [\"{name}\"]을(를) 사용하세요.", + "typedDictNotAllowed": "TypedDict는 인스턴스 또는 클래스 검사에 사용할 수 없습니다.", + "unhashableType": "‘{type}’ 형식을 해시할 수 없습니다.", + "uninitializedAbstractVariable": "인스턴스 변수 \"{name}\"이(가) 추상 기본 클래스 \"{classType}\"에 정의되어 있지만 초기화되지 않았습니다.", + "unreachableExcept": "\"{exceptionType}\"은(는) \"{parentType}\"의 서브클래스입니다.", + "useDictInstead": "dict[T1, T2]를 사용하여 사전 형식을 나타냅니다.", + "useListInstead": "list[T]를 사용하여 list 형식을 나타내거나 T1 | T2를 사용하여 union 형식을 나타냅니다.", + "useTupleInstead": "tuple[T1, ..., Tn]을 사용하여 tuple 형식을 나타내거나 T1 | T2를 사용하여 union 형식을 나타냅니다.", + "useTypeInstead": "대신 type[T]를 사용합니다.", + "varianceMismatchForClass": "‘{typeVarName}’ 형식 인수의 차이는 ‘{className}’ 기본 클래스와 호환되지 않습니다.", + "varianceMismatchForTypeAlias": "‘{typeVarName}’ 형식 인수의 차이는 ‘{typeAliasParam}’와(과) 호환되지 않습니다." + }, + "Service": { + "longOperation": "작업 영역 소스 파일을 열거하는 데는 시간이 오래 걸립니다. 대신 하위 폴더를 여는 것이 좋습니다. [자세히 알아보기](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.pl.json b/python-parser/packages/pyright-internal/src/localization/package.nls.pl.json new file mode 100644 index 00000000..61a55ef5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.pl.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Utwórz typ zastępczy Stub", + "createTypeStubFor": "Utwórz typ Stub dla „{moduleName}”", + "executingCommand": "Wykonywanie polecenia", + "filesToAnalyzeCount": "Pliki do przeanalizowania: {count}", + "filesToAnalyzeOne": "1 plik do analizy", + "findingReferences": "Znajdowanie odwołań", + "organizeImports": "Organizuj dyrektywy Import" + }, + "Completion": { + "autoImportDetail": "Automatyczne importowanie", + "indexValueDetail": "Wartość indeksu" + }, + "Diagnostic": { + "abstractMethodInvocation": "Nie można wywołać metody „{method}”, ponieważ jest abstrakcyjna i niezaimplementowana", + "annotatedMetadataInconsistent": "Opisany typ metadanych „{metadataType}” nie jest zgodny z typem „{type}”", + "annotatedParamCountMismatch": "Niezgodność liczby adnotacji parametru; oczekiwano {expected}, a uzyskano {received}", + "annotatedTypeArgMissing": "Oczekiwano jednego argumentu typu i co najmniej jednej adnotacji dla wartości „Annotated”", + "annotationBytesString": "Wyrażenia typu nie mogą używać literałów ciągu bajtów", + "annotationFormatString": "Wyrażenia typu nie mogą używać literałów ciągów formatu (ciągów f)", + "annotationNotSupported": "Adnotacja typu nie jest obsługiwana dla tej instrukcji", + "annotationRawString": "Wyrażenia typu nie mogą używać nieprzetworzonych literałów ciągów", + "annotationSpansStrings": "Wyrażenia typu nie mogą obejmować wielu literałów ciągów", + "annotationStringEscape": "Wyrażenia typu nie mogą zawierać znaków ucieczki", + "annotationTemplateString": "Wyrażenia typu nie mogą używać literałów ciągu szablonu (t-string)", + "argAssignment": "Argumentu typu „{argType}” nie można przypisać do parametru typu „{paramType}”", + "argAssignmentFunction": "Argumentu typu „{argType}” nie można przypisać do parametru typu „{paramType}” w funkcji „{functionName}”", + "argAssignmentParam": "Argumentu typu „{argType}” nie można przypisać do parametru „{paramName}” typu „{paramType}”", + "argAssignmentParamFunction": "Argumentu typu „{argType}” nie można przypisać do parametru „{paramName}” typu „{paramType}” w funkcji „{functionName}”", + "argMissingForParam": "Brak argumentu dla parametru {name}", + "argMissingForParams": "Brak argumentów dla parametrów {names}", + "argMorePositionalExpectedCount": "Oczekiwano większej liczby argumentów pozycyjnych: {expected}", + "argMorePositionalExpectedOne": "Oczekiwano jeszcze 1 argumentu pozycyjnego", + "argPositional": "Oczekiwano argumentu pozycyjnego", + "argPositionalExpectedCount": "Oczekiwano liczby argumentów pozycyjnych: {expected}", + "argPositionalExpectedOne": "Oczekiwano 1 argumentu pozycyjnego", + "argTypePartiallyUnknown": "Typ argumentu jest częściowo nieznany", + "argTypeUnknown": "Typ argumentu jest nieznany", + "assertAlwaysTrue": "Wyrażenie Assert zawsze ma wartość true", + "assertTypeArgs": "Typ „assert_type” oczekuje dwóch argumentów pozycyjnych", + "assertTypeTypeMismatch": "Niezgodność „assert_type”; oczekiwano „{expected}”, ale otrzymano „{received}”", + "assignmentExprComprehension": "Element docelowy wyrażenia przypisania „{name}” nie może używać tej samej nazwy co zrozumienie dla elementu docelowego", + "assignmentExprContext": "Wyrażenie przypisania musi należeć do modułu, funkcji lub wyrażenia lambda", + "assignmentExprInSubscript": "Wyrażenia przypisania w indeksie dolnym są obsługiwane tylko w języku Python w wersji 3.10 i nowszej", + "assignmentInProtocol": "Zmienne wystąpienia lub klasy w klasie Protocol muszą być jawnie zadeklarowane w treści klasy", + "assignmentTargetExpr": "Wyrażenie nie może być elementem docelowym przypisania", + "asyncNotInAsyncFunction": "Użycie wartość „async” jest niedozwolone poza funkcją asynchroniczną", + "awaitIllegal": "Użycie „await” wymaga języka Python w wersji 3.5 lub nowszej", + "awaitNotAllowed": "Wyrażenia typu nie mogą używać instrukcji „await”", + "awaitNotInAsync": "Wartość „await” jest dozwolona tylko w ramach funkcji asynchronicznej", + "backticksIllegal": "Wyrażenia otoczone znakami wstecznymi nie są obsługiwane w języku Python w wersji 3.x; zamiast tego użyj wyrażenia repr", + "baseClassCircular": "Klasa nie może pochodzić od samej siebie", + "baseClassFinal": "Klasa bazowa „{type}” jest oznaczona jako final i nie można jej podzielić na podklasy", + "baseClassIncompatible": "Klasy bazowe typu {type} są wzajemnie niezgodne", + "baseClassInvalid": "Argument klasy musi być klasą bazową", + "baseClassMethodTypeIncompatible": "Klasy bazowe dla klasy „{classType}” definiują metodę „{name}” w niezgodny sposób", + "baseClassUnknown": "Typ klasy bazowej jest nieznany, zasłaniając typ klasy pochodnej", + "baseClassVariableTypeIncompatible": "Klasy bazowe dla klasy „{classType}” definiują zmienną „{name}” w niezgodny sposób", + "binaryOperationNotAllowed": "Operator binarny nie jest dozwolony w wyrażeniu typu", + "bindParamMissing": "Nie można powiązać metody „{methodName}”, ponieważ brakuje w niej parametru „self” lub „cls”", + "bindTypeMismatch": "Nie można powiązać metody „{methodName}”, ponieważ nie można przypisać typu „{type}” do parametru „{paramName}”", + "breakInExceptionGroup": "„break” nie jest dozwolone w bloku „except*”", + "breakOutsideLoop": "Wartość „break” może być używana tylko w pętli", + "bytesUnsupportedEscape": "Nieobsługiwana sekwencja ucieczki w literałach bytes", + "callableExtraArgs": "Oczekiwano tylko dwóch argumentów typu „Callable”", + "callableFirstArg": "Oczekiwano listy typów parametrów lub znaków „...”", + "callableNotInstantiable": "Nie można utworzyć wystąpienia typu „{type}”", + "callableSecondArg": "Oczekiwano zwracanego typu jako drugiego argumentu typu dla elementu „Callable”", + "casePatternIsIrrefutable": "Niepodważalny wzorzec jest dozwolony tylko dla ostatniej instrukcji dotyczącej wielkości liter", + "classAlreadySpecialized": "Typ „{type}” jest już wyspecjalizowany", + "classDecoratorTypeUnknown": "Dekorator klasy bez typu przesłania typ klasy; ignorowanie dekoratora", + "classDefinitionCycle": "Definicja klasy dla „{name}” zależy od niej samej", + "classGetItemClsParam": "Przesłonięcie __class_getitem__ powinno przyjmować parametr „cls”.", + "classMethodClsParam": "Metody klasy powinny przyjmować parametr „cls”", + "classNotRuntimeSubscriptable": "Indeks dolny dla klasy „{name}” wygeneruje wyjątek środowiska uruchomieniowego; umieścić wyrażenie typu w cudzysłowy", + "classPatternBuiltInArgPositional": "Wzorzec klasy akceptuje tylko podwzorzec pozycyjny", + "classPatternNewType": "Nie można użyć elementu „{type}” we wzorcu klasy, ponieważ jest on zdefiniowany przy użyciu elementu NewType", + "classPatternPositionalArgCount": "Zbyt wiele wzorców pozycyjnych dla klasy „{type}”; oczekiwano {expected}, ale otrzymano {received}", + "classPatternTypeAlias": "„{type}” nie może być używany we wzorcu klasy, ponieważ jest to alias typu specjalnego", + "classPropertyDeprecated": "Właściwości klasy są przestarzałe w języku Python 3.11 i nie będą obsługiwane w języku Python 3.13", + "classTypeParametersIllegal": "Składnia parametru typu klasy wymaga języka Python w wersji 3.12 lub nowszej", + "classVarFirstArgMissing": "Oczekiwano argumentu typu po wartości „ClassVar”", + "classVarNotAllowed": "Element „ClassVar” jest niedozwolony w tym kontekście", + "classVarOverridesInstanceVar": "Zmienna klasy „{name}” przesłania zmienną wystąpienia o tej samej nazwie w klasie „{className}”", + "classVarTooManyArgs": "Oczekiwano tylko jednego argumentu typu po wartości „ClassVar”", + "classVarWithTypeVar": "Typ „ClassVar” nie może zawierać zmiennych typu", + "clsSelfParamTypeMismatch": "Typ parametru „{name}” musi być nadtypem jego klasy „{classType}”", + "codeTooComplexToAnalyze": "Kod jest zbyt złożony, aby go analizować; zmniejsz złożoność przez refaktoryzację w podprocedury lub poprzez zmniejszenie ścieżek kodu warunkowego", + "collectionAliasInstantiation": "Nie można utworzyć wystąpienia typu „{type}”. Zamiast niego użyj „{alias}”", + "comparisonAlwaysFalse": "Warunek zawsze będzie miał wartość False, ponieważ typy „{leftType}” i „{rightType}” nie nakładają się", + "comparisonAlwaysTrue": "Warunek zawsze będzie miał wartość True, ponieważ typy „{leftType}” i „{rightType}” nie nakładają się", + "comprehensionInDict": "Zrozumienia nie można używać z innymi wpisami słownika", + "comprehensionInSet": "Nie można używać rozumienia z innymi wpisami set", + "concatenateContext": "Klasa „Concatenate” jest niedozwolona w tym kontekście", + "concatenateParamSpecMissing": "Ostatni argument typu dla elementu „Concatenate” musi mieć wartość ParamSpec lub „...”", + "concatenateTypeArgsMissing": "Element „Concatenate” wymaga co najmniej dwóch argumentów typu", + "conditionalOperandInvalid": "Nieprawidłowy warunkowy argument operacji typu „{type}”", + "constantRedefinition": "Nazwa „{name}” jest stałą (ponieważ jest pisana wielkimi literami) i nie można jej ponownie zdefiniować", + "constructorParametersMismatch": "Niezgodność między sygnaturą „__new__” i „__init__” w klasie „{classType}”", + "containmentAlwaysFalse": "Warunek zawsze będzie miał wartość False, ponieważ typy „{leftType}” i „{rightType}” nie nakładają się na siebie", + "containmentAlwaysTrue": "Warunek zawsze będzie miał wartość „True”, ponieważ typy „{leftType}” i „{rightType}” nie nakładają się na siebie", + "continueInExceptionGroup": "„continue” nie jest dozwolone w bloku „except*”", + "continueOutsideLoop": "Wartość „continue” może być używana tylko w pętli", + "coroutineInConditionalExpression": "Wyrażenie warunkowe odwołuje się do koprocedury, która zawsze wyznacza wartość True", + "dataClassBaseClassFrozen": "Klasa niezablokowana nie może dziedziczyć po klasie zablokowanej", + "dataClassBaseClassNotFrozen": "Zamrożona klasa nie może dziedziczyć po klasie niezamrożonej", + "dataClassConverterFunction": "Argument typu „{argType}” nie jest prawidłowym konwerterem pola „{fieldName}” typu „{fieldType}”", + "dataClassConverterOverloads": "Żadne przeciążenia „{funcName}” nie są prawidłowymi konwerterami dla pola „{fieldName}” typu „{fieldType}”", + "dataClassFieldInheritedDefault": "Pole „{fieldName}” zastępuje pole o tej samej nazwie, ale brakuje wartości domyślnej", + "dataClassFieldWithDefault": "Pola bez wartości domyślnych nie mogą występować po polach z wartościami domyślnymi", + "dataClassFieldWithPrivateName": "Pole klasy danych nie może używać nazwy prywatnej", + "dataClassFieldWithoutAnnotation": "Pole klasy danych bez adnotacji typu spowoduje wyjątek środowiska uruchomieniowego", + "dataClassPostInitParamCount": "Klasa danych __post_init__ ma niepoprawną liczbę parametrów; oczekiwana liczba pól InitVar to: {expected}", + "dataClassPostInitType": "Klasa danych __post_init__ ma niezgodność typu parametru metody dla pola „{fieldName}”", + "dataClassSlotsOverwrite": "Element __slots__ jest już zdefiniowany w klasie", + "dataClassTransformExpectedBoolLiteral": "Oczekiwano wyrażenia, które statycznie daje w wyniku wartość True lub False", + "dataClassTransformFieldSpecifier": "Oczekiwano spójnej kolekcji (tuple) klas lub funkcji, a uzyskano typ „{type}”", + "dataClassTransformPositionalParam": "Wszystkie argumenty elementu „dataclass_transform” muszą być argumentami słów kluczowych", + "dataClassTransformUnknownArgument": "Argument „{name}” nie jest obsługiwany przez dataclass_transform", + "dataProtocolInSubclassCheck": "Protokoły danych (które zawierają atrybuty niebędące atrybutami metody) są niedozwolone w wywołaniach klasy issubclass", + "declaredReturnTypePartiallyUnknown": "Zadeklarowany zwracany typ „{returnType}” jest częściowo nieznany", + "declaredReturnTypeUnknown": "Deklarowany zwracany typ jest nieznany", + "defaultValueContainsCall": "Wywołania funkcji i modyfikowalne obiekty są niedozwolone w wyrażeniu wartości domyślnej parametru", + "defaultValueNotAllowed": "Parametr o wartości „*” lub „**” nie może mieć wartości domyślnej", + "delTargetExpr": "Nie można usunąć wyrażenia", + "deprecatedClass": "Klasa „{name}” jest przestarzała", + "deprecatedConstructor": "Konstruktor klasy „{name}” jest przestarzały", + "deprecatedDescriptorDeleter": "Metoda „__set__” dla deskryptora „{name}” jest przestarzała", + "deprecatedDescriptorGetter": "Metoda „__set__” dla deskryptora „{name}” jest przestarzała", + "deprecatedDescriptorSetter": "Metoda „__set__” dla deskryptora „{name}” jest przestarzała", + "deprecatedFunction": "Ta funkcja „{name}” jest przestarzała", + "deprecatedMethod": "Metoda „{name}” w klasie „{className}” jest przestarzała", + "deprecatedPropertyDeleter": "deleter dla property „{name}” jest przestarzała", + "deprecatedPropertyGetter": "getter dla property „{name}” jest przestarzała", + "deprecatedPropertySetter": "setter dla property „{name}” jest przestarzała", + "deprecatedType": "Ten typ jest przestarzały dla języka Python w wersji {version}; zamiast tego użyj „{replacement}”.", + "dictExpandIllegalInComprehension": "Rozszerzanie słownika jest niedozwolone w rozumieniu", + "dictInAnnotation": "Wyrażenie słownika jest niedozwolone w wyrażeniu typu", + "dictKeyValuePairs": "Wpisy słownika muszą zawierać pary klucz/wartość", + "dictUnpackIsNotMapping": "Oczekiwano mapowania dla operatora rozpakowywania słownika", + "dunderAllSymbolNotPresent": "Nazwa „{name}” jest określona w wartości __all__, ale nie występuje w module", + "duplicateArgsParam": "Dozwolony tylko jeden parametr „*”", + "duplicateBaseClass": "Zduplikowana klasa bazowa jest niedozwolona", + "duplicateCapturePatternTarget": "Element docelowy przechwytywania „{name}” nie może występować więcej niż raz w obrębie tego samego wzorca", + "duplicateCatchAll": "Dozwolona jest tylko jedna klauzula typu catch-all except klauzuli", + "duplicateEnumMember": "Składowa Enum „{name}” jest już zadeklarowana", + "duplicateGenericAndProtocolBase": "Dozwolona jest tylko jedna klasa bazowa Generic[...] lub Protocol[...].", + "duplicateImport": "Nazwa „{importName}” została zaimportowana więcej niż raz", + "duplicateKeywordOnly": "Dozwolony tylko jeden separator „*”.", + "duplicateKwargsParam": "Dozwolony tylko jeden parametr „**”.", + "duplicateParam": "Duplikuj parametr „{name}”", + "duplicatePositionOnly": "Dozwolony tylko jeden parametr „/”", + "duplicateStarPattern": "W sekwencji wzorca dozwolony jest tylko jeden wzorzec „*”", + "duplicateStarStarPattern": "Dozwolony jest tylko jeden wpis „**”", + "duplicateUnpack": "Na list dozwolona jest tylko jedna operacja rozpakowywania", + "ellipsisAfterUnpacked": "Nie można używać „...” z rozpakowanym parametrem TypeVarTuple lub kolekcją tuple", + "ellipsisContext": "Wartość „...” jest niedozwolona w tym kontekście", + "ellipsisSecondArg": "Wartość „...” jest dozwolona tylko jako drugi z dwóch argumentów", + "enumClassOverride": "Klasa Enum „{name}” jest final i nie można jej podzielić na podklasy", + "enumMemberDelete": "Nie można usunąć składowej Enum \"{name}\"", + "enumMemberSet": "Nie można przypisać składowej Enum „{name}”", + "enumMemberTypeAnnotation": "Adnotacje typu nie są dozwolone dla składowych enum", + "exceptGroupMismatch": "Instrukcja Try nie może zawierać jednocześnie „except” i „except*”", + "exceptGroupRequiresType": "Składnia grupy wyjątków (\"except*\") wymaga typu wyjątku", + "exceptRequiresParens": "Wiele typów wyjątków musi być w nawiasach starszych niż Python 3.14", + "exceptWithAsRequiresParens": "Wiele typów wyjątków musi być w nawiasach podczas używania ciągu „as”", + "exceptionGroupIncompatible": "Składnia grupy wyjątków („except*”) wymaga języka Python w wersji 3.11 lub nowszej", + "exceptionGroupTypeIncorrect": "Typ wyjątku w wyrażeniu except* nie może pochodzić z grupy BaseGroupException", + "exceptionTypeIncorrect": "Typ „{type}” nie pochodzi od parametru BaseException", + "exceptionTypeNotClass": "Typ „{type}” nie jest prawidłową klasą wyjątku", + "exceptionTypeNotInstantiable": "Konstruktor typu wyjątku „{type}” wymaga co najmniej jednego argumentu", + "expectedAfterDecorator": "Oczekiwano deklaracji funkcji lub klasy po dekoratorze", + "expectedArrow": "Oczekiwano wartości „->”, po której następuje adnotacja zwracanego typu", + "expectedAsAfterException": "Oczekiwano wartości „as” po typie wyjątku", + "expectedAssignRightHandExpr": "Oczekiwano wyrażenia po prawej stronie znaku „=”", + "expectedBinaryRightHandExpr": "Oczekiwano wyrażenia po prawej stronie operatora", + "expectedBoolLiteral": "Oczekiwano wartości True lub False", + "expectedCase": "Oczekiwano instrukcji „case”", + "expectedClassName": "Oczekiwano nazwy klasy", + "expectedCloseBrace": "Brak zamknięcia dla: „{”", + "expectedCloseBracket": "Brak zamknięcia dla: „[”", + "expectedCloseParen": "Brak zamknięcia dla: „(”", + "expectedColon": "Oczekiwano „:”", + "expectedComplexNumberLiteral": "Oczekiwano literału liczby zespolonej na potrzeby dopasowywania wzorca", + "expectedDecoratorExpr": "Formularz wyrażenia nie jest obsługiwany przez dekorator w wersji wcześniejszej niż Python 3.9", + "expectedDecoratorName": "Oczekiwano nazwy dekoratora", + "expectedDecoratorNewline": "Oczekiwano nowego wiersza na końcu dekoratora", + "expectedDelExpr": "Oczekiwano wyrażenia po „del”", + "expectedElse": "Oczekiwano elementu „else”", + "expectedEquals": "Oczekiwano „=”", + "expectedExceptionClass": "Nieprawidłowa klasa lub obiekt wyjątku", + "expectedExceptionObj": "Oczekiwano obiektu wyjątku, klasy wyjątku lub wartości None", + "expectedExpr": "Oczekiwano wyrażenia", + "expectedFunctionAfterAsync": "Oczekiwano definicji funkcji po wartości „async”", + "expectedFunctionName": "Oczekiwano nazwy funkcji po wyrażeniu „def”", + "expectedIdentifier": "Oczekiwany identyfikator", + "expectedImport": "Oczekiwano wartości „import”", + "expectedImportAlias": "Oczekiwano symbolu po parametrze „as”", + "expectedImportSymbols": "Oczekiwano jednej lub więcej nazw symboli po wyrażeniu „import”", + "expectedIn": "Oczekiwano parametru „in”", + "expectedInExpr": "Oczekiwano wyrażenia po „in”", + "expectedIndentedBlock": "Oczekiwano wciętego bloku", + "expectedMemberName": "Oczekiwano nazwy atrybutu po „.”", + "expectedModuleName": "Oczekiwana nazwa modułu", + "expectedNameAfterAs": "Oczekiwano nazwy symbolu po wartości „as”", + "expectedNamedParameter": "Parametr słowa kluczowego musi następować po znaku „*”", + "expectedNewline": "Oczekiwano nowego wiersza", + "expectedNewlineOrSemicolon": "Instrukcje muszą być oddzielone znakami nowych wierszy lub średnikami", + "expectedOpenParen": "Oczekiwano „(”", + "expectedParamName": "Oczekiwano nazwy parametru", + "expectedPatternExpr": "Oczekiwano wyrażenia wzorca", + "expectedPatternSubjectExpr": "Oczekiwano wyrażenia tematu wzorca", + "expectedPatternValue": "Oczekiwano wyrażenia wartości wzorca w postaci „a.b”", + "expectedReturnExpr": "Oczekiwano wyrażenia po „return”.", + "expectedSliceIndex": "Oczekiwano wyrażenia indeksu lub wycinka", + "expectedTypeNotString": "Oczekiwano typu, ale otrzymano literał ciągu", + "expectedTypeParameterName": "Oczekiwano nazwy parametru typu", + "expectedYieldExpr": "Oczekiwano wyrażenia w instrukcji yield", + "finalClassIsAbstract": "Klasa „{type}” jest oznaczona jako final i musi implementować wszystkie symbole abstrakcyjne", + "finalContext": "Wartość „Final” jest niedozwolona w tym kontekście", + "finalInLoop": "Nie można przypisać zmiennej „Final” w pętli", + "finalMethodOverride": "Metoda „{name}” nie może przesłonić metody final zdefiniowanej w klasie „{className}”", + "finalNonMethod": "Nie można oznaczyć funkcji „{name}” jako @final, ponieważ nie jest to metoda", + "finalReassigned": "Element „{name}” jest zadeklarowany jako wersja Final i nie można go ponownie przypisać", + "finalRedeclaration": "Nazwa „{name}” została wcześniej zadeklarowana jako Final", + "finalRedeclarationBySubclass": "Nie można ponownie zadeklarować nazwy „{name}”, ponieważ klasa nadrzędna „{className}” deklaruje ją jako Final", + "finalTooManyArgs": "Oczekiwano jednego argumentu typu po wartości „Final”", + "finalUnassigned": "Nazwa „{name}” jest zadeklarowana jako wartość Final, ale wartość nie jest przypisana", + "finallyBreak": "Nie można użyć elementu „break” do zakończenia bloku „finally”", + "finallyContinue": "Nie można użyć elementu „continue” do zakończenia bloku „finally”", + "finallyReturn": "Nie można użyć elementu „return” do wyjścia z bloku „finally”", + "formatStringBrace": "Pojedynczy zamykający nawias klamrowy jest niedozwolony w literale ciągu f; użyj podwójnego zamykającego nawiasu klamrowego", + "formatStringBytes": "Literały ciągów formatu (ciągi f) nie mogą być binarne", + "formatStringDebuggingIllegal": "Specyfikator debugowania ciągu f „=” wymaga wersji języka Python 3.8 lub nowszej", + "formatStringEscape": "Sekwencja ucieczki (ukośnik odwrotny) jest niedozwolona w części wyrażenia ciągu f w wersji języka wcześniejszej niż Python 3.12", + "formatStringExpectedConversion": "Oczekiwano specyfikatora konwersji po znaku „!” w ciągu f-string", + "formatStringIllegal": "Literały ciągów formatu (ciągi f) wymagają wersji języka Python 3.6 lub nowszej", + "formatStringInPattern": "Ciąg formatu jest niedozwolony we wzorcu", + "formatStringNestedFormatSpecifier": "Wyrażenia zagnieżdżone zbyt głęboko w specyfikatorze ciągu formatu", + "formatStringNestedQuote": "Ciągi zagnieżdżone w ciągu f nie mogą używać tego samego znaku cudzysłowu co ciąg f w wersji języka wcześniejszej niż Python 3.12", + "formatStringTemplate": "Literały ciągów formatu (f-string) nie mogą być również ciągami szablonu (t-string)", + "formatStringUnicode": "Literały ciągu formatu (f-strings) nie mogą być formatu unicode", + "formatStringUnterminated": "Niezakończone wyrażenie w ciągu f; oczekiwano znaku „}”", + "functionDecoratorTypeUnknown": "Nietypowany dekorator funkcji zasłania typ funkcji; ignorując dekoratora", + "functionInConditionalExpression": "Wyrażenie warunkowe odwołuje się do funkcji, której wynikiem zawsze jest wartość True", + "functionTypeParametersIllegal": "Składnia parametru typu klasy wymaga wersji języka Python 3.12 lub nowszej", + "futureImportLocationNotAllowed": "Importy z __future__ muszą znajdować się na początku pliku", + "generatorAsyncReturnType": "Zwracany typ funkcji generatora asynchronicznego musi być zgodny z elementem „AsyncGenerator[{yieldType}, Any]”", + "generatorNotParenthesized": "Wyrażenia generatora muszą być ujęte w nawiasy, jeśli nie są jedynym argumentem", + "generatorSyncReturnType": "Zwracany typ funkcji generatora musi być zgodny z elementem „Generator[{yieldType}, Any, Any]”", + "genericBaseClassNotAllowed": "Nie można użyć klasy bazowej „Generic” ze składnią parametru typu", + "genericClassAssigned": "Nie można przypisać ogólnego typu klasy", + "genericClassDeleted": "Nie można usunąć ogólnego typu klasy", + "genericInstanceVariableAccess": "Dostęp do ogólnej zmiennej wystąpienia za pośrednictwem klasy jest niejednoznaczny", + "genericNotAllowed": "Element „Generic” jest nieprawidłowy w tym kontekście", + "genericTypeAliasBoundTypeVar": "Alias typu ogólnego w klasie nie może używać zmiennych typu powiązanego {names}", + "genericTypeArgMissing": "Wartość „Generic” wymaga co najmniej jednego argumentu typu", + "genericTypeArgTypeVar": "Argument typu dla wartości „Generic” musi być zmienną typu", + "genericTypeArgUnique": "Argumenty typu dla elementu „Generic” muszą być unikatowe", + "globalReassignment": "Nazwa „{name}” jest przypisywana przed deklaracją globalną", + "globalRedefinition": "Nazwa „{name}” została już zadeklarowana jako globalna", + "implicitStringConcat": "Niejawne łączenie ciągów jest niedozwolone", + "importCycleDetected": "Wykryto cykl w łańcuchu importu", + "importDepthExceeded": "Głębokość łańcucha importu przekroczyła {depth}", + "importResolveFailure": "Nie można rozpoznać importu „{importName}”.", + "importSourceResolveFailure": "Nie można rozpoznać importu „{importName}” ze źródła", + "importSymbolUnknown": "Nazwa „{name}” jest nieznanym symbolem importu", + "incompatibleMethodOverride": "Metoda „{name}” przesłania klasę „{className}” w niezgodny sposób", + "inconsistentIndent": "Wartość zmniejszenia wcięcia jest niezgodna z poprzednim wcięciem", + "inconsistentTabs": "Niespójne użycie tabulatorów i spacji we wcięciach", + "initMethodSelfParamTypeVar": "Adnotacja typu dla parametru „self” metody „__init__” nie może zawierać zmiennych typu o zakresie klasy", + "initMustReturnNone": "Zwracany typ „__init__” musi mieć wartość None", + "initSubclassCallFailed": "Nieprawidłowe argumenty słów kluczowych dla metody __init_subclass__", + "initSubclassClsParam": "Przesłonięcie __init_subclass__ powinno przyjmować parametr „cls”.", + "initVarNotAllowed": "Element „InitVar” jest niedozwolony w tym kontekście", + "instanceMethodSelfParam": "Metody wystąpienia powinny przyjmować parametr „self”", + "instanceVarOverridesClassVar": "Zmienna wystąpienia „{name}” zastępuje zmienną klasy o tej samej nazwie w klasie „{className}”", + "instantiateAbstract": "Nie można utworzyć wystąpienia klasy abstrakcyjnej „{type}”", + "instantiateProtocol": "Nie można utworzyć wystąpienia klasy Protocol typu „{type}”", + "internalBindError": "Wystąpił błąd wewnętrzny podczas wiązania pliku „{file}”: {message}", + "internalParseError": "Wystąpił błąd wewnętrzny podczas analizowania pliku „{file}”: {message}", + "internalTypeCheckingError": "Wystąpił błąd wewnętrzny podczas sprawdzania typu pliku „{file}”: {message}", + "invalidIdentifierChar": "Nieprawidłowy znak w identyfikatorze", + "invalidStubStatement": "Instrukcja nie ma znaczenia w pliku stub typu", + "invalidTokenChars": "Nieprawidłowy znak „{text}” w tokenie", + "isInstanceInvalidType": "Drugi argument instrukcji „isinstance” musi być klasą lub tuple", + "isSubclassInvalidType": "Drugi argument „issubclass” musi być klasą lub tuple", + "keyValueInSet": "Pary klucz/wartość nie są dozwolone w set", + "keywordArgInTypeArgument": "Argumentów słów kluczowych nie można używać na listach argumentów typu", + "keywordOnlyAfterArgs": "Separator argumentów tylko ze słowami kluczowymi jest niedozwolony po parametrze „*”", + "keywordParameterMissing": "Co najmniej jeden parametr słowa kluczowego musi występować po parametrze „*”", + "keywordSubscriptIllegal": "Argumenty słów kluczowych w indeksach podrzędnych nie są obsługiwane", + "lambdaReturnTypePartiallyUnknown": "Zwracany typ wyrażenia lambda „{returnType}” jest częściowo nieznany", + "lambdaReturnTypeUnknown": "Zwracany typ wyrażenia lambda jest nieznany", + "listAssignmentMismatch": "Wyrażenia typu „{type}” nie można przypisać do listy docelowej", + "listInAnnotation": "Wyrażenie List jest niedozwolone w wyrażeniu typu", + "literalEmptyArgs": "Oczekiwano co najmniej jednego argumentu typu po wartości „Literal”", + "literalNamedUnicodeEscape": "Nazwane sekwencje ucieczki Unicode nie są obsługiwane w adnotacjach ciągów „Literal”", + "literalNotAllowed": "Klasa „Literal” nie może być używana w tym kontekście bez argumentu typu", + "literalNotCallable": "Nie można utworzyć wystąpienia typu Literal", + "literalUnsupportedType": "Argumenty typu dla elementu „Literal” muszą mieć wartość None, wartość literału (int, bool, str lub bytes) lub wartość enum", + "matchIncompatible": "Instrukcje Match wymagają języka Python w wersji 3.10 lub nowszej", + "matchIsNotExhaustive": "Przypadki w instrukcji match nie obsługują wyczerpująco wszystkich wartości", + "maxParseDepthExceeded": "Przekroczono maksymalną głębokość analizy; podziel wyrażenie na mniejsze wyrażenia podrzędne", + "memberAccess": "Nie można uzyskać dostępu do atrybutu „{name}” dla klasy „{type}”", + "memberDelete": "Nie można usunąć atrybutu „{name}” dla klasy „{type}”", + "memberSet": "Nie można przypisać atrybutu „{name}” dla klasy „{type}”", + "metaclassConflict": "Metaklasa klasy pochodnej musi być podklasą metaklas wszystkich jej klas bazowych", + "metaclassDuplicate": "Można podać tylko jedną metaklasę", + "metaclassIsGeneric": "Metaklasa nie może być ogólna", + "methodNotDefined": "Nie zdefiniowano metody „{name}”.", + "methodNotDefinedOnType": "Metoda „{name}” nie została zdefiniowana dla typu „{type}”", + "methodOrdering": "Nie można utworzyć spójnej kolejności metod", + "methodOverridden": "„{name}” przesłania metodę o tej samej nazwie w klasie „{className}” o niezgodnym typie „{type}”", + "methodReturnsNonObject": "Metoda „{name}” nie zwraca obiektu", + "missingSuperCall": "Metoda „{methodName}” nie wywołuje metody o tej samej nazwie w klasie nadrzędnej", + "mixingBytesAndStr": "Nie można łączyć wartości bytes i str", + "moduleAsType": "Nie można użyć modułu jako typu", + "moduleNotCallable": "Moduł nie jest wywoływalny", + "moduleUnknownMember": "„{memberName}” nie jest znanym atrybutem modułu „{moduleName}”", + "namedExceptAfterCatchAll": "Nazwana klauzula „except” nie może występować po klauzuli „catch-all except”", + "namedParamAfterParamSpecArgs": "Parametr słowa kluczowego „{name}” nie może występować w sygnaturze po parametrze ParamSpec args", + "namedTupleEmptyName": "Nazwy w ramach nazwanej kolekcji tuple nie mogą być puste", + "namedTupleEntryRedeclared": "Nie można nadpisać nazwy „{name}”, ponieważ klasa nadrzędna „{className}” jest nazwaną kolekcją tuple", + "namedTupleFieldUnderscore": "Nazwy pola Named tuple nie mogą rozpoczynać się od podkreślenia", + "namedTupleFirstArg": "Oczekiwano nazwanej nazwy klasy tuple jako pierwszego argumentu", + "namedTupleMultipleInheritance": "Wielokrotne dziedziczenie z kotki NamedTuple nie jest obsługiwane", + "namedTupleNameKeyword": "Nazwy pól nie mogą być słowem kluczowym", + "namedTupleNameType": "Oczekiwano tuple z dwoma wpisami określającej nazwę i typ wpisu", + "namedTupleNameUnique": "Nazwy w nazwanej tuple muszą być unikatowe", + "namedTupleNoTypes": "Krotka „namedtuple” nie zapewnia typów wpisów tuple; zamiast tego użyj „NamedTuple”.", + "namedTupleSecondArg": "Oczekiwano nazwanej listy wpisów kolekcji tuple jako drugiego argumentu", + "newClsParam": "Przesłonięcie __new__ powinno przyjmować parametr „cls”.", + "newTypeAnyOrUnknown": "Drugi argument parametru NewType musi być znaną klasą, a nie wartością Any lub Unknown", + "newTypeBadName": "Pierwszy argument elementu NewType musi być literałem ciągu", + "newTypeLiteral": "Typ NewType nie może być używany z typem Literal", + "newTypeNameMismatch": "Element NewType musi być przypisany do zmiennej o tej samej nazwie", + "newTypeNotAClass": "Oczekiwano klasy jako drugiego argumentu dla elementu NewType", + "newTypeParamCount": "Typ NewType wymaga dwóch argumentów pozycyjnych", + "newTypeProtocolClass": "Elementu NewType nie można używać z typem strukturalnym (klasy Protocol lub TypedDict)", + "noOverload": "Żadne przeciążenia dla nazwy „{name}” nie pasują do podanych argumentów", + "noReturnContainsReturn": "Funkcja z zadeklarowanym return typem „NoReturn” nie może zawierać instrukcji return", + "noReturnContainsYield": "Funkcja z zadeklarowanym zwracanym typem „NoReturn” nie może zawierać instrukcji yield", + "noReturnReturnsNone": "Funkcja z zadeklarowanym typem zwracanym „NoReturn” nie może zwracać wartości „None”", + "nonDefaultAfterDefault": "Argument inny niż domyślny następuje po argumencie domyślnym", + "nonLocalInModule": "Deklaracja nonlocal nie jest dozwolona na poziomie modułu", + "nonLocalNoBinding": "Nie znaleziono powiązania dla nonlocal „{name}”.", + "nonLocalReassignment": "Nazwa „{name}” jest przypisywana przed deklaracją nonlocal", + "nonLocalRedefinition": "Nazwa „{name}” została już zadeklarowana jako nonlocal", + "noneNotCallable": "Nie można wywołać obiektu typu „None”", + "noneNotIterable": "Obiekt typu „None” nie może być używany jako wartość iterowalna", + "noneNotSubscriptable": "Obiekt typu „None” nie może być użyty w indeksie dolnym", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "Obiekt typu „None” nie może być używany z parametrem „async with”", + "noneOperator": "Operator „{operator}” nie jest obsługiwany dla wartości „None”", + "noneUnknownMember": "Nazwa „{name}” nie jest znanym atrybutem „None”", + "nonlocalTypeParam": "Powiązanie nonlocal nie jest dozwolone dla parametru typu „{name}”", + "notRequiredArgCount": "Oczekiwano jednego argumentu typu po wartości „NotRequired”", + "notRequiredNotInTypedDict": "Element „NotRequired” jest niedozwolony w tym kontekście", + "objectNotCallable": "Obiekt typu „{type}” nie jest wywoływalny", + "obscuredClassDeclaration": "Deklaracja klasy „{name}” jest zasłonięta przez deklarację o tej samej nazwie", + "obscuredFunctionDeclaration": "Deklaracja funkcji „{name}” jest zasłonięta przez deklarację o tej samej nazwie", + "obscuredMethodDeclaration": "Deklaracja metody „{name}” jest zasłonięta przez deklarację o tej samej nazwie", + "obscuredParameterDeclaration": "Deklaracja parametru „{name}” jest zasłonięta przez deklarację o tej samej nazwie", + "obscuredTypeAliasDeclaration": "Deklaracja aliasu typu „{name}” jest zasłonięta przez deklarację o tej samej nazwie", + "obscuredVariableDeclaration": "Deklaracja „{name}” jest zasłonięta przez deklarację o tej samej nazwie", + "operatorLessOrGreaterDeprecated": "Operator „<>” nie jest obsługiwany w języku Python w wersji 3; zamiast tego użyj „!=”.", + "optionalExtraArgs": "Oczekiwano jednego argumentu typu po parametrze „Optional”", + "orPatternIrrefutable": "Niepodważalny wzorzec jest dozwolony tylko jako ostatni podwzorzec we wzorcu „or”", + "orPatternMissingName": "Wszystkie wzorce podrzędne we wzorcu „or” muszą dotyczyć tych samych nazw", + "overlappingKeywordArgs": "Wpisany słownik nakłada się na parametr słowa kluczowego: {names}", + "overlappingOverload": "Przeciążenie {obscured} dla nazwy „{name}” nigdy nie zostanie użyte, ponieważ jego parametry nakładają się na przeciążenie {obscuredBy}", + "overloadAbstractImplMismatch": "Przeciążenia muszą być zgodne ze stanem abstrakcyjnym implementacji", + "overloadAbstractMismatch": "Przeciążenia muszą być abstrakcyjne lub nieabstrakcyjne", + "overloadClassMethodInconsistent": "Przeciążenia dla nazwy „{name}” używają metody @classmethod niekonsekwentnie", + "overloadFinalImpl": "@final dekorator powinien być stosowany tylko do implementacji", + "overloadFinalNoImpl": "Tylko pierwsze przeciążenie powinno być oznaczone @final", + "overloadImplementationMismatch": "Przeciążone wdrożenie jest niespójne z sygnaturą przeciążenia {index}", + "overloadOverrideImpl": "@override dekorator powinien być stosowany tylko do implementacji", + "overloadOverrideNoImpl": "Tylko pierwsze przeciążenie powinno być oznaczone @override", + "overloadReturnTypeMismatch": "Przeciążenie {prevIndex} dla nazwy „{name}” nakłada się na przeciążenie {newIndex} i zwraca niezgodny typ", + "overloadStaticMethodInconsistent": "Przeciążenia dla nazwy „{name}” używają metody @staticmethod niekonsekwentnie", + "overloadWithoutImplementation": "Nazwa „{name}” jest oznaczona jako overload, ale nie zapewniono implementacji", + "overriddenMethodNotFound": "Metoda „{name}” jest oznaczona jako override, ale nie istnieje metoda bazowa o tej samej nazwie", + "overrideDecoratorMissing": "Metoda „{name}” nie jest oznaczona jako override, ale zastępuje metodę w klasie „{className}”", + "paramAfterKwargsParam": "Parametr nie może następować po parametrze „**”", + "paramAlreadyAssigned": "Parametr „{name}” jest już przypisany", + "paramAnnotationMissing": "Brak adnotacji typu dla parametru „{name}”", + "paramAssignmentMismatch": "Wyrażenia typu „{sourceType}” nie można przypisać do parametru typu „{paramType}”", + "paramNameMissing": "Brak parametru o nazwie „{name}”", + "paramSpecArgsKwargsDuplicate": "Argumenty parametru ParamSpec „{type}” zostały już podane", + "paramSpecArgsKwargsUsage": "Atrybuty „args” i „kwargs” specyfikacji ParamSpec muszą znajdować się w sygnaturze funkcji", + "paramSpecArgsMissing": "Brak argumentów dla parametru ParamSpec „{type}”.", + "paramSpecArgsUsage": "Atrybut „args” parametru ParamSpec jest ważna tylko wtedy, gdy jest używana z parametrem *args", + "paramSpecAssignedName": "Parametr ParamSpec musi być przypisany do zmiennej o nazwie „{name}”", + "paramSpecContext": "Wartość ParamSpec jest niedozwolona w tym kontekście", + "paramSpecDefaultNotTuple": "Oczekiwano wielokropka, wyrażenia kolekcji tuple lub parametru ParamSpec dla wartości domyślnej ParamSpec", + "paramSpecFirstArg": "Oczekiwano nazwy parametru ParamSpec jako pierwszego argumentu", + "paramSpecKwargsUsage": "Atrybut „kwargs” parametru ParamSpec jest ważna tylko wtedy, gdy jest używana z parametrem **kwargs", + "paramSpecNotUsedByOuterScope": "Element ParamSpec „{name}” nie ma znaczenia w tym kontekście", + "paramSpecUnknownArg": "Parametr ParamSpec nie obsługuje więcej niż jednego argumentu", + "paramSpecUnknownMember": "„{name}” nie jest znanym atrybutem parametru ParamSpec", + "paramSpecUnknownParam": "„{name}” jest nieznanym parametrem dla parametru ParamSpec", + "paramTypeCovariant": "Zmienna typu kowariantnego nie może być używana w typie parametru", + "paramTypePartiallyUnknown": "Typ parametru „{paramName}” jest częściowo nieznany", + "paramTypeUnknown": "Typ parametru „{paramName}” jest nieznany", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "Wzorzec nigdy nie zostanie dopasowany do typu podmiotu „{type}”", + "positionArgAfterNamedArg": "Argument pozycyjny nie może występować po argumentach słów kluczowych", + "positionArgAfterUnpackedDictArg": "Argument pozycyjny nie może występować po rozpakowaniu argumentu słowa kluczowego", + "positionOnlyAfterArgs": "Separator parametru tylko do pozycjonowania jest niedozwolony po parametrze „*”", + "positionOnlyAfterKeywordOnly": "Parametr „/” musi występować przed parametrem „*”.", + "positionOnlyAfterNon": "Parametr tylko do pozycjonowania jest niedozwolony po parametrze, który nie jest tylko do pozycjonowania", + "positionOnlyFirstParam": "Separator parametrów tylko do pozycjonowania nie jest dozwolony jako pierwszy parametr", + "positionOnlyIncompatible": "Separator parametrów tylko do pozycjonowania wymaga języka Python 3.8 lub nowszego", + "privateImportFromPyTypedModule": "Nazwa „{name}” nie jest eksportowana z modułu „{module}”", + "privateUsedOutsideOfClass": "Nazwa „{name}” jest prywatna i używana poza klasą, w której została zadeklarowana", + "privateUsedOutsideOfModule": "Nazwa „{name}” jest prywatna i używana poza modułem, w którym została zadeklarowana", + "propertyOverridden": "Nazwa „{name}” nieprawidłowo zastępuje property o tej samej nazwie w klasie „{className}”", + "propertyStaticMethod": "Metody statyczne nie są dozwolone w przypadku getter, setter lub deleter property", + "protectedUsedOutsideOfClass": "Nazwa „{name}” jest chroniona i używana poza klasą, w której została zadeklarowana", + "protocolBaseClass": "Klasa Protocol typu „{classType}” nie może pochodzić od klasy niebędącej klasą Protocol typu „{baseType}”", + "protocolBaseClassWithTypeArgs": "Argumenty typu są niedozwolone z klasą Protocol, gdy jest używana składnia parametru typu", + "protocolIllegal": "Użycie elementu „Protocol” wymaga języka Python w wersji 3.7 lub nowszej", + "protocolNotAllowed": "Klasa „Protocol” nie może być używana w tym kontekście", + "protocolTypeArgMustBeTypeParam": "Argument typu dla elementy „Protocol” musi być parametrem typu", + "protocolUnsafeOverlap": "Klasa nakłada się niebezpiecznie na element „{name}” i może utworzyć dopasowanie w czasie wykonywania", + "protocolVarianceContravariant": "Zmienna typu „{variable}” używana w klasie ogólnej Protocol „{class}” powinna być kontrawariantna", + "protocolVarianceCovariant": "Zmienna typu „{variable}” używana w klasie ogólnej Protocol „{class}” powinna być kowariantna", + "protocolVarianceInvariant": "Zmienna typu „{variable}” używana w klasie ogólnego Protocol „{class}” powinna być niezmienna", + "pyrightCommentInvalidDiagnosticBoolValue": "Po dyrektywie komentarza Pyright musi następować znak „=” oraz wartość true lub false", + "pyrightCommentInvalidDiagnosticSeverityValue": "Po dyrektywie komentarza Pyright musi następować znak „=” oraz wartość true, false, error, warning, information lub none", + "pyrightCommentMissingDirective": "Po komentarzu Pyright musi następować dyrektywa (basic lub strict) lub reguła diagnostyczna", + "pyrightCommentNotOnOwnLine": "Komentarze Pyright używane do kontrolowania ustawień na poziomie plików muszą pojawiać się w oddzielnych wierszach", + "pyrightCommentUnknownDiagnosticRule": "Reguła „{rule}” jest nieznaną regułą diagnostyczną dla komentarza pyright", + "pyrightCommentUnknownDiagnosticSeverityValue": "Wartość „{value}” jest nieprawidłowa dla komentarza pyright; oczekiwano wartości: true, false, error, warning, information lub none", + "pyrightCommentUnknownDirective": "„{directive}” jest nieznaną dyrektywą dla komentarza pyright; oczekiwano „strict”, „standard” lub „basic”", + "readOnlyArgCount": "Oczekiwano jednego argumentu typu po wartości „ReadOnly”", + "readOnlyNotInTypedDict": "Element „ReadOnly” jest niedozwolony w tym kontekście", + "recursiveDefinition": "Nie można określić typu „{name}”, ponieważ odwołuje się on do samego siebie", + "relativeImportNotAllowed": "Importy względne nie mogą być używane z formularzem „import .a”; zamiast tego użyj „from . import a”.", + "requiredArgCount": "Oczekiwano jednego argumentu typu po wartości „Required”", + "requiredNotInTypedDict": "Element „Required” jest niedozwolony w tym kontekście", + "returnInAsyncGenerator": "Instrukcja „return” z wartością jest niedozwolona w generatorze asynchronicznym", + "returnInExceptionGroup": "„return” nie jest dozwolone w bloku „except*”", + "returnMissing": "Funkcja z zadeklarowanym typem zwracanym „{returnType}” musi zwracać wartość we wszystkich ścieżkach kodu", + "returnOutsideFunction": "Instrukcja „return” może być używana tylko w ramach funkcji", + "returnTypeContravariant": "Kontrawariantna zmienna typu nie może być używana w zwracanym typie", + "returnTypeMismatch": "Nie można przypisać typu „{exprType}” do zwracanego typu „{returnType}”", + "returnTypePartiallyUnknown": "Zwracany typ „{returnType}” jest częściowo nieznany", + "returnTypeUnknown": "Zwracany typ jest nieznany", + "revealLocalsArgs": "Oczekiwano braku argumentów dla wywołania „reveal_locals”", + "revealLocalsNone": "Brak locals w tym zakresie", + "revealTypeArgs": "Oczekiwano pojedynczego argumentu pozycyjnego dla wywołania „reveal_type”", + "revealTypeExpectedTextArg": "Argument „expected_text” dla funkcji „reveal_type” musi być wartością literału str", + "revealTypeExpectedTextMismatch": "Wpisz niezgodność tekstu; oczekiwano „{expected}”, ale otrzymano „{received}”", + "revealTypeExpectedTypeMismatch": "Niezgodność typu; oczekiwano wartości „{expected}”, a uzyskano „{received}”", + "selfTypeContext": "Wartość „Self” jest nieprawidłowa w tym kontekście", + "selfTypeMetaclass": "Nie można użyć elementu „Self” w ramach metaklasy (podklasy elementu „type”)", + "selfTypeWithTypedSelfOrCls": "Nie można użyć wartości „Self” w funkcji z parametrem „self” lub „cls”, która ma adnotację typu inną niż „Self”", + "sentinelBadName": "Pierwszy argument usługi Sentinel musi być literałem ciągu", + "sentinelNameMismatch": "Usługa Sentinel musi być przypisana do zmiennej o tej samej nazwie", + "sentinelParamCount": "Usługa Sentinel wymaga jednego argumentu pozycyjnego", + "setterGetterTypeMismatch": "Typu wartości setter property nie można przypisać do zwracanego typu getter", + "singleOverload": "Nazwa „{name}” jest oznaczona jako przeciążona, ale brakuje dodatkowych przeciążeń", + "slotsAttributeError": "Nie określono atrybutu „{name}” w elemencie __slots__", + "slotsClassVarConflict": "„{name}” powoduje konflikt ze zmienną wystąpienia zadeklarowaną w elemencie „__slots__”", + "starPatternInAsPattern": "Wzór gwiazdy nie może być użyty z miejscem docelowym parametru „as”.", + "starPatternInOrPattern": "Wzór gwiazdy nie może mieć wartości ORed w ramach innych wzorów", + "starStarWildcardNotAllowed": "Symbolu ** nie można używać z symbolem wieloznacznym „_”", + "staticClsSelfParam": "Metody statyczne nie powinny przyjmować parametru „self” ani „cls”.", + "stringNonAsciiBytes": "Znak inny niż ASCII jest niedozwolony w literale ciągu bajtów", + "stringNotSubscriptable": "Wyrażenie ciągu nie może być indeksowane w wyrażeniu typu; ujmij całe wyrażenie w cudzysłowy", + "stringUnsupportedEscape": "Nieobsługiwana sekwencja ucieczki w literale ciągu", + "stringUnterminated": "Literał ciągu jest niezakończony", + "stubFileMissing": "Nie znaleziono pliku stub dla nazwy „{importName}”", + "stubUsesGetAttr": "Plik stub typu jest niekompletny; element „__getattr__” przesłania błędy w przypadku modułu", + "sublistParamsIncompatible": "Parametry sublisty nie są obsługiwane w wersji języka Python 3.x", + "superCallArgCount": "Oczekiwano nie więcej niż dwóch argumentów wywołania „super”", + "superCallFirstArg": "Oczekiwano typu klasy jako pierwszego argumentu wywołania „super”, ale otrzymano „{type}”", + "superCallSecondArg": "Drugi argument wywołania „super” musi być obiektem lub klasą wywodzącą się z typu „{type}”", + "superCallZeroArgForm": "Forma bez argumentów wywołania „super” jest prawidłowa tylko w ramach metody", + "superCallZeroArgFormStaticMethod": "Forma z zerowym argumentem wywołania „super” jest nieprawidłowa w metodzie statycznej", + "symbolIsPossiblyUnbound": "Nazwa „{name}” jest prawdopodobnie niepowiązana", + "symbolIsUnbound": "Nazwa „{name}” jest niepowiązana", + "symbolIsUndefined": "Nazwa „{name}” nie jest zdefiniowana", + "symbolOverridden": "Nazwa „{name}” przesłania symbol o tej samej nazwie w klasie „{className}”", + "templateStringBytes": "Literały ciągu szablonu (t-string) nie mogą być binarne", + "templateStringIllegal": "Literały ciągu szablonu (t-string) wymagają języka Python 3.14 lub nowszego", + "templateStringUnicode": "Literały ciągu szablonu (t-string) nie mogą być w formacie unicode", + "ternaryNotAllowed": "Wyrażenie trójskładnikowe nie jest dozwolone w wyrażeniu typu", + "totalOrderingMissingMethod": "Klasa musi definiować jedną z następujących wartości: „__lt__”, „__le__”, „__gt__” lub „__ge__”, aby użyć parametru total_ordering", + "trailingCommaInFromImport": "Końcowy przecinek nie jest dozwolony bez otaczających nawiasów", + "tryWithoutExcept": "Instrukcja „Try” musi mieć co najmniej jedną klauzulę „except” lub „finally”", + "tupleAssignmentMismatch": "Nie można przypisywać wyrażenia w ramach typu „{type}” do docelowej kolekcji tuple", + "tupleInAnnotation": "Wyrażenie kolekcji tuple jest niedozwolone w wyrażeniu typu", + "tupleIndexOutOfRange": "Indeks {index} jest poza zakresem dla typu {type}", + "typeAliasIllegalExpressionForm": "Nieprawidłowy formularz wyrażenia dla definicji aliasu typu", + "typeAliasIsRecursiveDirect": "Alias typu „{name}” nie może używać samego siebie w swojej definicji", + "typeAliasNotInModuleOrClass": "Typ TypeAlias można zdefiniować tylko w zakresie modułu lub klasy", + "typeAliasRedeclared": "Nazwa „{name}” jest zadeklarowana jako TypeAlias i może być przypisana tylko raz", + "typeAliasStatementBadScope": "Instrukcja type może być użyta tylko w zakresie modułu lub klasy", + "typeAliasStatementIllegal": "Instrukcja typu alias wymaga języka Python w wersji 3.12 lub nowszej", + "typeAliasTypeBadScope": "Alias typu można zdefiniować tylko w zakresie modułu lub klasy", + "typeAliasTypeBaseClass": "A type alias defined in a \"type\" statement cannot be used as a base class", + "typeAliasTypeMustBeAssigned": "Typ TypeAliasType musi być przypisany do zmiennej o takiej samej nazwie jak alias typu", + "typeAliasTypeNameArg": "Pierwszy argument dla typu TypeAliasType musi być literałem ciągu reprezentującym nazwę aliasu typu", + "typeAliasTypeNameMismatch": "Nazwa aliasu typu musi być zgodna z nazwą zmiennej, do której jest przypisana", + "typeAliasTypeParamInvalid": "Lista parametrów typu musi być kolekcją tuple zawierającą tylko parametry TypeVar, TypeVarTuple lub ParamSpec", + "typeAnnotationCall": "Wyrażenie wywołania jest niedozwolone w wyrażeniu typu", + "typeAnnotationVariable": "Zmienna niedozwolona w wyrażeniu typu", + "typeAnnotationWithCallable": "Argument typu „type” musi być klasą; elementy wywoływane nie są obsługiwane", + "typeArgListExpected": "Oczekiwano parametru ParamSpec, wielokropka lub listy typów", + "typeArgListNotAllowed": "Wyrażenie listy jest niedozwolone dla tego argumentu typu", + "typeArgsExpectingNone": "Oczekiwano braku argumentów typu dla klasy „{name}”", + "typeArgsMismatchOne": "Oczekiwano jednego argumentu typu, ale otrzymano {received}", + "typeArgsMissingForAlias": "Oczekiwano argumentów typu dla aliasu typu ogólnego „{name}”", + "typeArgsMissingForClass": "Oczekiwano argumentów typu dla ogólnej klasy „{name}”", + "typeArgsTooFew": "Podano zbyt mało argumentów typu dla „{name}”; oczekiwano wartości {expected}, ale otrzymano {received}", + "typeArgsTooMany": "Podano zbyt wiele argumentów typu dla nazwy „{name}”; oczekiwano {expected}, a uzyskano {received}", + "typeAssignmentMismatch": "Nie można przypisać typu „{sourceType}” do zadeklarowanego typu „{destType}”", + "typeAssignmentMismatchWildcard": "Symbol importu „{name}” ma typ „{sourceType}”, którego nie można przypisać do zadeklarowanego typu „{destType}”", + "typeCallNotAllowed": "wywołanie type() nie powinno być używane w wyrażeniu typu", + "typeCheckOnly": "Nazwa „{name}” jest oznaczona jako @type_check_only i może być używana tylko w adnotacjach typu", + "typeCommentDeprecated": "Use of type comments is deprecated; use type annotation instead", + "typeExpectedClass": "Oczekiwano klasy, ale odebrano typ „{type}”", + "typeFormArgs": "„TypeForm” akceptuje pojedynczy argument pozycyjny", + "typeGuardArgCount": "Oczekiwano pojedynczego argumentu typu po parametrze „TypeGuard” lub „TypeIs”", + "typeGuardParamCount": "Funkcje i metody zabezpieczające typu zdefiniowane przez użytkownika muszą mieć co najmniej jeden parametr wejściowy", + "typeIsReturnType": "Zwracany typ TypeIs („{returnType}”) jest niezgodny z typem parametru wartości („{type}”)", + "typeNotAwaitable": "\"{type}\" is not awaitable", + "typeNotIntantiable": "Nie można utworzyć wystąpienia „{type}”", + "typeNotIterable": "Typ „{type}” nie jest iterowalny", + "typeNotSpecializable": "Nie można specjalizować typu „{type}”", + "typeNotSubscriptable": "Obiekt typu „{type}” nie może być użyty w indeksie dolnym", + "typeNotSupportBinaryOperator": "Operator „{operator}” nieobsługiwany dla typów „{leftType}” i „{rightType}”", + "typeNotSupportBinaryOperatorBidirectional": "Operator „{operator}” nie jest obsługiwany dla typów „{leftType}” i „{rightType}”, gdy oczekiwanym typem jest „{expectedType}”", + "typeNotSupportUnaryOperator": "Operator „{operator}” nie jest obsługiwany dla typu „{type}”", + "typeNotSupportUnaryOperatorBidirectional": "Operator „{operator}” nie jest obsługiwany dla typu „{type}”, gdy oczekiwanym typem jest „{expectedType}”", + "typeNotUsableWith": "Obiekt typu „{type}” nie może być używany z parametrem „with”, ponieważ nie wdraża poprawnie metody {method}", + "typeNotUsableWithAsync": "Obiekt typu „{type}” nie może być używany z parametrem „with”, ponieważ nie wdraża on poprawnie metody {method}", + "typeParameterBoundNotAllowed": "Powiązanie lub ograniczenie nie może być używane z parametrem typu ze zmienną liczbą argumentów lub parametrem ParamSpec", + "typeParameterConstraintTuple": "Ograniczenie parametru typu musi być krotką dwóch lub więcej typów", + "typeParameterExistingTypeParameter": "Parametr typu „{name}” jest już używany", + "typeParameterNotDeclared": "Parametr typu „{name}” nie znajduje się na liście parametrów typu dla „{container}”", + "typeParametersMissing": "Należy określić co najmniej jeden parametr typu", + "typePartiallyUnknown": "Typ „{name}” jest częściowo nieznany", + "typeUnknown": "Typ „{name}” jest nieznany", + "typeVarAssignedName": "Typ TypeVar musi być przypisany do zmiennej o nazwie „{name}”", + "typeVarAssignmentMismatch": "Nie można przypisać typu „{type}” do zmiennej typu „{name}”", + "typeVarBoundAndConstrained": "Typ TypeVar nie może być jednocześnie powiązany i ograniczony", + "typeVarBoundGeneric": "Typ powiązany TypeVar nie może być ogólny", + "typeVarConstraintGeneric": "Typ ograniczenia TypeVar nie może być ogólny", + "typeVarDefaultBoundMismatch": "Domyślny typ TypeVar musi być podtypem powiązanego typu", + "typeVarDefaultConstraintMismatch": "Typ domyślny TypeVar musi być jednym z typów z ograniczeniami", + "typeVarDefaultIllegal": "Typy domyślne zmiennych typów wymagają wersji języka Python 3.13 lub nowszej", + "typeVarDefaultInvalidTypeVar": "Parametr typu „{name}” ma typ domyślny, który odnosi się do co najmniej jednej zmiennej typu, która jest poza zakresem", + "typeVarFirstArg": "Oczekiwano nazwy TypeVar jako pierwszego argumentu", + "typeVarInvalidForMemberVariable": "Typ atrybutu nie może używać zmiennej typu „{name}” w zakresie metody lokalnej", + "typeVarNoMember": "Typ atrybutu TypeVar „{type}” nie ma atrybutu „{name}”", + "typeVarNotSubscriptable": "Typ TypeVar „{type}” nie może być subskrybowany", + "typeVarNotUsedByOuterScope": "Zmienna typu „{name}” nie ma w tym kontekście żadnego znaczenia", + "typeVarPossiblyUnsolvable": "Zmienna typu „{name}” może zostać nierozwiązana, jeśli obiekt wywołujący nie poda argumentu dla parametru „{param}”", + "typeVarSingleConstraint": "Typ TypeVar musi mieć co najmniej dwa typy ograniczone", + "typeVarTupleConstraints": "Element TypeVarTuple nie może mieć ograniczeń wartości", + "typeVarTupleContext": "Wartość TypeVarTuple jest niedozwolona w tym kontekście", + "typeVarTupleDefaultNotUnpacked": "Typ domyślny TypeVarTuple musi być nierozpakowaną kolekcją tuple lub parametrem TypeVarTuple", + "typeVarTupleMustBeUnpacked": "Operator rozpakowywania jest wymagany dla wartości parametru TypeVarTuple", + "typeVarTupleUnknownParam": "Nazwa „{name}” jest nieznanym parametrem typu TypeVarTuple", + "typeVarUnknownParam": "„{name}” jest nieznanym parametrem dla argumentu TypeVar", + "typeVarUsedByOuterScope": "Argument TypeVar „{name}” jest już używany przez zakres zewnętrzny", + "typeVarUsedOnlyOnce": "Typ TypeVar „{name}” pojawia się tylko raz w sygnaturze funkcji ogólnej", + "typeVarVariance": "Zmienna typu TypeVar nie może być jednocześnie kowariantna i kontrawariantna", + "typeVarWithDefaultFollowsVariadic": "Wartość TypeVar „{typeVarName}” ma wartość domyślną i nie może następować po wartości TypeVarTuple „{variadicName}”.", + "typeVarWithoutDefault": "Element „{name}” nie może występować po elemencie „{other}” na liście parametrów typu, ponieważ nie ma typu domyślnego", + "typeVarsNotInGenericOrProtocol": "Elementy Generic[] lub Protocol[] muszą zawierać wszystkie zmienne typu", + "typedDictAccess": "Nie można uzyskać dostępu do elementu w TypedDict", + "typedDictAssignedName": "Element TypedDict musi być przypisany do zmiennej o nazwie „{name}”", + "typedDictBadVar": "Klasy TypedDict mogą zawierać tylko adnotacje typu", + "typedDictBaseClass": "Wszystkie klasy bazowe dla klas TypedDict muszą być również klasami TypedDict", + "typedDictBoolParam": "Oczekiwano, że parametr „{name}” będzie miał wartość True lub False", + "typedDictClosedExtras": "\"{name}\" klasy bazowej to TypedDict ograniczająca typ dodatkowych elementów do typu \"{type}\"", + "typedDictClosedFalseNonOpenBase": "Klasa bazowa „{name}” nie jest otwartą klasą TypedDict; closed=False jest niedozwolone", + "typedDictClosedNoExtras": "Klasa bazowa „{name}” jest closed TypedDict; dodatkowe elementy są niedozwolone", + "typedDictDelete": "Nie można usunąć elementu w typie TypedDict", + "typedDictEmptyName": "Nazwy w elemencie TypedDict nie mogą być puste", + "typedDictEntryName": "Oczekiwano literału ciągu dla nazwy wpisu słownika", + "typedDictEntryUnique": "Nazwy w słowniku muszą być unikatowe", + "typedDictExtraArgs": "Dodatkowe argumenty TypedDict nie są obsługiwane", + "typedDictExtraItemsClosed": "TypedDict mogą używać \"closed\" lub \"extra_items\", ale nie obu", + "typedDictFieldNotRequiredRedefinition": "Element TypedDict „{name}” nie może zostać przedefiniowany jako NotRequired", + "typedDictFieldReadOnlyRedefinition": "Element TypedDict „{name}” nie może być przedefiniowany jako ReadOnly.", + "typedDictFieldRequiredRedefinition": "Element TypedDict „{name}” nie może zostać przedefiniowany jako Required", + "typedDictFirstArg": "Oczekiwano nazwy klasy TypedDict jako pierwszego argumentu", + "typedDictInClassPattern": "klasa TypedDict nie jest dozwolona we wzorcu klasy", + "typedDictInitsubclassParameter": "Element TypedDict nie obsługuje parametru __init_subclass__ „{name}”", + "typedDictNotAllowed": "Nie można użyć elementu „TypedDict” w tym kontekście", + "typedDictSecondArgDict": "Oczekiwano parametru dict lub słowa kluczowego jako drugiego parametru", + "typedDictSecondArgDictEntry": "Oczekiwano prostego wpisu słownika", + "typedDictSet": "Nie można przypisać elementu w TypedDict", + "unaccessedClass": "Nie uzyskano dostępu do klasy „{name}”", + "unaccessedFunction": "Brak dostępu do funkcji „{name}”.", + "unaccessedImport": "Import „{name}” nie jest dostępny", + "unaccessedSymbol": "Brak dostępu do „{name}”.", + "unaccessedVariable": "Brak dostępu do zmiennej „{name}”.", + "unannotatedFunctionSkipped": "Analiza funkcji „{name}” została pominięta, ponieważ nie ma adnotacji", + "unaryOperationNotAllowed": "Operator jednoargumentowy nie jest dozwolony w wyrażeniu typu", + "unexpectedAsyncToken": "Oczekiwano wartości „def”, „with” lub „for” po „async”", + "unexpectedEof": "Nieoczekiwany EOF", + "unexpectedExprToken": "Nieoczekiwany token na końcu wyrażenia", + "unexpectedIndent": "Nieoczekiwane wcięcie", + "unexpectedUnindent": "Nieoczekiwany brak wcięcia", + "unhashableDictKey": "Klucz słownika musi być wartością skrótu", + "unhashableSetEntry": "Set wpis musi być wartością skrótu", + "uninitializedAbstractVariables": "Zmienne zdefiniowane w abstrakcyjnej klasie bazowej nie są inicjowane w klasie final „{classType}”", + "uninitializedInstanceVariable": "Zmienna wystąpienia „{name}” nie została zainicjowana w treści klasy ani w metodzie __init__", + "unionForwardReferenceNotAllowed": "Składnia elementu Union nie może być używana z operandem ciągu; użyj cudzysłowów wokół całego wyrażenia", + "unionSyntaxIllegal": "Alternatywna składnia unii wymaga języka Python w wersji 3.10 lub nowszej", + "unionTypeArgCount": "Element Union wymaga co najmniej dwóch argumentów typu", + "unionUnpackedTuple": "Typ Union nie może zawierać niespakowanej kolekcji tuple", + "unionUnpackedTypeVarTuple": "Typ Union nie może zawierać niespakowanego parametru TypeVarTuple", + "unnecessaryCast": "Niepotrzebne wywołanie „cast”; typ jest już „{type}”", + "unnecessaryIsInstanceAlways": "Niepotrzebne wywołanie elementu isinstance; „{testType}” jest zawsze wystąpieniem „{classType}”", + "unnecessaryIsInstanceNever": "Niepotrzebne wywołanie funkcji isinstance; „{testType}” nigdy nie jest instancją „{classType}”", + "unnecessaryIsSubclassAlways": "Niepotrzebne wywołanie „issubclass”; „{testType}” jest zawsze podklasą klasy „{classType}”", + "unnecessaryIsSubclassNever": "Niepotrzebne wywołanie funkcji issubclass; „{testType}” nigdy nie jest podklasą „{classType}”", + "unnecessaryPyrightIgnore": "Niepotrzebny komentarz „# pyright: ignore”", + "unnecessaryPyrightIgnoreRule": "Niepotrzebna reguła „# pyright: ignore”: „{name}”", + "unnecessaryTypeIgnore": "Niepotrzebny komentarz „# type: ignore”", + "unpackArgCount": "Oczekiwano jednego argumentu typu po wartości „Unpack”", + "unpackExpectedTypeVarTuple": "Oczekiwano typu TypeVarTuple lub tuple jako argumentu typu dla elementu Unpack", + "unpackExpectedTypedDict": "Oczekiwano argumentu typu TypedDict dla elementu Unpack", + "unpackIllegalInComprehension": "Operacja rozpakowywania nie jest dozwolona w rozumieniu", + "unpackInAnnotation": "Operator rozpakowywania nie jest dozwolony w wyrażeniu typu", + "unpackInDict": "Operacja rozpakowywania nie jest dozwolona w słownikach", + "unpackInSet": "Rozpakowywanie operatora jest niedozwolone w set", + "unpackNotAllowed": "Element Unpack jest niedozwolony w tym kontekście", + "unpackOperatorNotAllowed": "Operacja rozpakowywania jest niedozwolona w tym kontekście", + "unpackTuplesIllegal": "Operacja rozpakowywania nie jest dozwolona w krotkach przed językiem Python w wersji 3.8", + "unpackedArgInTypeArgument": "Nie można użyć nierozpakowanych argumentów w tym kontekście", + "unpackedArgWithVariadicParam": "Nie można użyć nierozpakowanego argumentu dla parametru TypeVarTuple", + "unpackedDictArgumentNotMapping": "Wyrażenie argumentu po znakach ** musi być mapowaniem z typem klucza „str”", + "unpackedDictSubscriptIllegal": "Operator rozpakowywania słownika w indeksie dolnym jest niedozwolony", + "unpackedSubscriptIllegal": "Operator rozpakowywania w indeksie dolnym wymaga języka Python w wersji 3.11 lub nowszej", + "unpackedTypeVarTupleExpected": "Oczekiwano nierozpakowanego typu TypeVarTuple; użyj Unpack[{name1}] lub *{name2}", + "unpackedTypedDictArgument": "Nie można dopasować nierozpakowanego argumentu TypedDict do parametrów", + "unreachableCodeCondition": "Kod nie jest analizowany, ponieważ warunek jest statycznie oceniany jako fałsz", + "unreachableCodeStructure": "Kod jest strukturalnie nieosiągalny", + "unreachableCodeType": "Analiza typów wskazuje, że kod jest nieosiągalny", + "unreachableExcept": "Klauzula Except jest nieosiągalna, ponieważ wyjątek jest już obsługiwany", + "unsupportedDunderAllOperation": "Operacja na elemencie „__all__” nie jest obsługiwana, więc wyeksportowana lista symboli może być nieprawidłowa", + "unusedCallResult": "Wynik wyrażenia wywołania jest typu „{type}” i nie jest używany; przypisz do zmiennej „_”, jeśli jest to zamierzone", + "unusedCoroutine": "Wynik wywołania funkcji asynchronicznej nie jest używany; użyj wartości „await” lub przypisz wynik do zmiennej", + "unusedExpression": "Wartość wyrażenia jest nieużywana", + "varAnnotationIllegal": "Type annotations for variables requires Python 3.6 or newer; use type comment for compatibility with previous versions", + "variableFinalOverride": "Zmienna „{name}” jest oznaczona jako Final i zastępuje zmienną inną non-Final o tej samej nazwie w klasie „{className}”", + "variadicTypeArgsTooMany": "Lista argumentów typu może zawierać co najwyżej jeden nierozpakowany typ TypeVarTuple lub tuple", + "variadicTypeParamTooManyAlias": "Alias typu może mieć co najwyżej jeden parametr typu TypeVarTuple, ale otrzymał wiele ({names})", + "variadicTypeParamTooManyClass": "Klasa ogólna może mieć co najwyżej jeden parametr typu TypeVarTuple, ale otrzymał wiele ({names})", + "walrusIllegal": "Operator „:=” wymaga języka Python w wersji 3.8 lub nowszej", + "walrusNotAllowed": "Operator „:=” jest niedozwolony w tym kontekście bez otaczających nawiasów", + "wildcardInFunction": "Wildcard import not allowed within a class or function", + "wildcardLibraryImport": "Wildcard import from a library not allowed", + "wildcardPatternTypePartiallyUnknown": "Typ przechwycony przez wzorzec symboli wieloznacznych jest częściowo nieznany", + "wildcardPatternTypeUnknown": "Typ przechwycony przez wzorzec symboli wieloznacznych jest nieznany", + "yieldFromIllegal": "Użycie wartości „yield from” wymaga języka Python w wersji 3.3 lub nowszej", + "yieldFromOutsideAsync": "Instrukcja „yield from” jest niedozwolona w funkcji asynchronicznej", + "yieldOutsideFunction": "Instrukcja „yield” jest niedozwolona poza funkcją lub wyrażeniem lambda", + "yieldWithinComprehension": "Instrukcja „yield” nie jest dozwolona w rozumieniu", + "zeroCaseStatementsFound": "Match statement must include at least one case statement", + "zeroLengthTupleNotAllowed": "Zero-length tuple is not allowed in this context" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "Formularza specjalnego „Annotated” nie można używać z kontrolami wystąpień i klas", + "argParam": "Argument odpowiada parametrowi „{paramName}”", + "argParamFunction": "Argument odpowiada parametrowi „{paramName}” w funkcji „{functionName}”", + "argsParamMissing": "Parametr „*{paramName}” nie ma odpowiadającego mu parametru", + "argsPositionOnly": "Niezgodność parametrów tylko dla pozycji; oczekiwano wartości „{expected}”, a uzyskano „{received}”", + "argumentType": "Typ argumentu to „{type}”", + "argumentTypes": "Typy argumentów: ({types})", + "assignToNone": "Nie można przypisać typu do elementu „None”", + "asyncHelp": "Czy chodziło o wartość „async with”?", + "baseClassIncompatible": "Klasa bazowa „{baseClass}” jest niezgodna z typem „{type}”", + "baseClassIncompatibleSubclass": "Klasa bazowa „{baseClass}” pochodzi od klasy podrzędnej „{subclass}”, która jest niezgodna z typem „{type}”", + "baseClassOverriddenType": "Klasa bazowa „{baseClass}” udostępnia typ „{type}”, który jest przesłonięty", + "baseClassOverridesType": "Zastąpienie klasy bazowej „{baseClass}” typem „{type}”", + "bytesTypePromotions": "Ustaw właściwość disableBytesTypePromotions na wartość false, aby włączyć zachowanie promocji typu dla elementów „bytearray” i „memoryview”", + "conditionalRequiresBool": "Metoda __bool__ dla typu „{operandType}” zwraca typ \"{boolReturnType}\" zamiast „bool”", + "dataClassFieldLocation": "Deklaracja pola", + "dataClassFrozen": "Nazwa „{name}” jest zamrożona", + "dataProtocolUnsupported": "Element „{name}” to protokół danych", + "descriptorAccessBindingFailed": "Nie można powiązać metody „{name}” dla klasy deskryptora „{className}”", + "descriptorAccessCallFailed": "Nie można wywołać metody „{name}” dla klasy deskryptora „{className}”", + "finalMethod": "Final method", + "functionParamDefaultMissing": "Brak domyślnego argumentu dla parametru „{name}”", + "functionParamName": "Niezgodność nazw parametrów: „{destName}” a „{srcName}”", + "functionParamPositionOnly": "Niezgodność parametrów tylko do położenia; parametr „{name}” nie jest tylko pozycją", + "functionReturnTypeMismatch": "Typ zwracanego typu funkcji „{sourceType}” jest niezgodny z typem „{destType}”", + "functionTooFewParams": "Funkcja akceptuje zbyt mało parametrów pozycyjnych; oczekiwano {expected}, ale otrzymano {received}", + "functionTooManyParams": "Funkcja akceptuje zbyt wiele parametrów pozycyjnych; oczekiwano {expected}, ale otrzymano {received}", + "genericClassNotAllowed": "Typ ogólny z argumentami typu jest niedozwolony dla sprawdzania wystąpienia lub klasy", + "incompatibleDeleter": "Property deleter method is incompatible", + "incompatibleGetter": "Property getter method is incompatible", + "incompatibleSetter": "Property setter method is incompatible", + "initMethodLocation": "Metoda __init__ jest zdefiniowana w klasie „{type}”", + "initMethodSignature": "Sygnatura __init__ to typ „{type}”", + "initSubclassLocation": "Metoda __init_subclass__ jest zdefiniowana w klasie „{name}”", + "invariantSuggestionDict": "Rozważ przełączenie z wartości „dict” na „Mapping”, która jest kowariantna w typie wartości", + "invariantSuggestionList": "Rozważ zmianę wartości „list” na „Sequence”, która jest kowariantna", + "invariantSuggestionSet": "Rozważ przełączenie z wartości „set” na „Container”, która jest kowariantna", + "isinstanceClassNotSupported": "Typ „{type}” nie jest obsługiwany w przypadku kontroli wystąpień i klas", + "keyNotRequired": "„{name}” nie jest wymaganym kluczem w typie „{type}”, więc dostęp może spowodować wyjątek środowiska uruchomieniowego", + "keyReadOnly": "Nazwa „{name}” jest kluczem tylko do odczytu w typie „{type}”", + "keyRequiredDeleted": "Nazwa „{name}” jest wymaganym kluczem i nie można go usunąć", + "keyUndefined": "Nazwa „{name}” nie jest zdefiniowanym kluczem w typie „{type}”", + "kwargsParamMissing": "Parametr „**{paramName}” nie ma odpowiadającego mu parametru", + "listAssignmentMismatch": "Typ „{type}” jest niezgodny z listą docelową", + "literalAssignmentMismatch": "Nie można przypisać typu „{sourceType}” do typu „{destType}”", + "literalNotAllowed": "Formularza specjalnego „Literal” nie można używać z kontrolami wystąpień i klas", + "matchIsNotExhaustiveHint": "Jeśli kompleksowa obsługa nie jest zamierzona, dodaj „case _: pass”", + "matchIsNotExhaustiveType": "Nieobsługiwany typ: „{type}”", + "memberAssignment": "Wyrażenia typu „{type}” nie można przypisać do atrybutu „{name}” klasy „{classType}”", + "memberIsAbstract": "„{type}.{name}” nie zostało zaimplementowane", + "memberIsAbstractMore": "i jeszcze {count}...", + "memberIsClassVarInProtocol": "Element „{name}” jest zdefiniowany jako element ClassVar w protokole", + "memberIsInitVar": "Składowa „{name}” jest polem do operacji init-only", + "memberIsInvariant": "Nazwa „{name}” jest niezmienna, ponieważ jest modyfikowalna", + "memberIsNotClassVarInClass": "Element „{name}” musi być zdefiniowany jako ClassVar, aby był zgodny z protokołem", + "memberIsNotClassVarInProtocol": "Element „{name}” nie jest zdefiniowany jako ClassVar w protokole", + "memberIsNotReadOnlyInProtocol": "„{name}” nie jest tylko do odczytu w protokole", + "memberIsReadOnlyInProtocol": "„{name}” jest tylko do odczytu w protokole", + "memberIsWritableInProtocol": "Nazwa „{name}” jest zapisywalna w protokole", + "memberSetClassVar": "Atrybut „{name}” nie może zostać przypisany przez wystąpienie klasy, ponieważ jest to element ClassVar", + "memberTypeMismatch": "Nazwa „{name}” jest niezgodnym typem", + "memberUnknown": "Atrybut „{name}” jest nieznany", + "metaclassConflict": "Metaklasa „{metaclass1}” powoduje konflikt z „{metaclass2}”", + "missingDeleter": "Property deleter method is missing", + "missingGetter": "Property getter method is missing", + "missingSetter": "Property setter method is missing", + "namedParamMissingInDest": "Dodatkowy parametr „{name}”", + "namedParamMissingInSource": "Brak parametru słowa kluczowego „{name}”", + "namedParamTypeMismatch": "Parametr słowa kluczowego „{name}” typu „{sourceType}” jest niezgodny z typem „{destType}”", + "namedTupleNotAllowed": "Funkcja NamedTuple nie może być używana do sprawdzania wystąpień lub klas", + "newMethodLocation": "Metoda __new__ jest zdefiniowana w klasie „{type}”", + "newMethodSignature": "Sygnatura __new__ to typ „{type}”", + "newTypeClassNotAllowed": "Typ utworzony za pomocą elementu NewType nie może być używany z sprawdzaniem wystąpienia i klasy", + "noOverloadAssignable": "Żadna przeciążona funkcja nie pasuje do typu „{type}”", + "noneNotAllowed": "Wartość None nie może być używana w przypadku kontroli wystąpień lub klas", + "orPatternMissingName": "Brak nazw: {name}", + "overloadIndex": "Przeciążenie {index} jest najbardziej zbliżonym dopasowaniem", + "overloadNotAssignable": "Nie można przypisać jednego lub więcej przeciążeń „{name}”.", + "overloadSignature": "Sygnatura przeciążenia jest zdefiniowana tutaj", + "overriddenMethod": "Przesłonięta metoda", + "overriddenSymbol": "Przesłonięty symbol", + "overrideInvariantMismatch": "Typ zastąpienia „{overrideType}” nie jest taki sam jak typ podstawowy „{baseType}”", + "overrideIsInvariant": "Zmienna podlega przeobrażeniom, dlatego jej typ jest niezmienny", + "overrideNoOverloadMatches": "Żadna sygnatura przeciążenia w przesłonięciu nie jest zgodna z metodą bazową", + "overrideNotClassMethod": "Metoda bazowa jest zadeklarowana jako metoda classmethod, ale przesłonięcie nie", + "overrideNotInstanceMethod": "Metoda bazowa jest zadeklarowana jako metoda wystąpienia, ale zastąpienie nie jest", + "overrideNotStaticMethod": "Metoda bazowa jest zadeklarowana jako staticmethod, ale przesłonięcie nie", + "overrideOverloadNoMatch": "Zastąpienie nie obsługuje wszystkich przeciążeń metody podstawowej", + "overrideOverloadOrder": "Przeciążenia dla metody przesłaniania muszą być w takiej samej kolejności, co metoda bazowa", + "overrideParamKeywordNoDefault": "Niezgodność parametru słowa kluczowego „{name}”: parametr bazowy ma domyślną wartość argumentu, parametr zastąpienia nie ma jej", + "overrideParamKeywordType": "Niezgodność typu parametru słowa kluczowego „{name}”: parametr bazowy jest typu „{baseType}”, a parametr zastąpienia jest typu „{overrideType}”", + "overrideParamName": "Niezgodność nazwy parametru {index}: parametr podstawowy nosi nazwę „{baseName}”, a parametr zastąpienia — „{overrideName}”", + "overrideParamNameExtra": "W bazie brakuje parametru „{name}”.", + "overrideParamNameMissing": "Brak parametru „{name}” w zastąpieniu", + "overrideParamNamePositionOnly": "Niezgodność parametru {index}: parametr podstawowy „{baseName}” jest parametrem słowa kluczowego, parametr przesłonięcia jest tylko w pozycji", + "overrideParamNoDefault": "Niezgodność parametru {index}: parametr bazowy ma domyślną wartość argumentu, a parametr zastąpienia nie ma jej", + "overrideParamType": "Niezgodność typów parametru {index}: parametr bazowy jest typu „{baseType}”, parametr zastąpienia jest typu „{overrideType}”", + "overridePositionalParamCount": "Niezgodność liczby parametrów pozycyjnych; metoda podstawowa ma {baseCount}, a zastąpienie {overrideCount}", + "overrideReturnType": "Niezgodność zwracanego typu: metoda podstawowa zwraca typ „{baseType}”, przesłonięcie zwraca typ „{overrideType}”", + "overrideType": "Klasa bazowa definiuje typ jako „{type}”", + "paramAssignment": "Parametr {index}: typ „{sourceType}” jest niezgodny z typem „{destType}”", + "paramSpecMissingInOverride": "Brak parametrów ParamSpec w metodzie zastąpienia", + "paramType": "Typ parametru to „{paramType}”", + "privateImportFromPyTypedSource": "Zamiast tego importuj z modułu „{module}”.", + "propertyAccessFromProtocolClass": "Nie można uzyskać dostępu do właściwości zdefiniowanej w klasie protokołu jako zmiennej klasy", + "propertyMethodIncompatible": "Property method \"{name}\" is incompatible", + "propertyMethodMissing": "Property method \"{name}\" is missing in override", + "propertyMissingDeleter": "Property \"{name}\" has no defined deleter", + "propertyMissingSetter": "Property \"{name}\" has no defined setter", + "protocolIncompatible": "Protokół „{sourceType}” jest niezgodny z protokołem „{destType}”", + "protocolMemberMissing": "Brak nazwy „{name}”.", + "protocolRequiresRuntimeCheckable": "Klasa Protocol musi być @runtime_checkable, aby mogła być używana z kontrolami wystąpień i klas", + "protocolSourceIsNotConcrete": "„{sourceType}” nie jest typem specyficznej klasy i nie można go przypisać do typu „{destType}”", + "protocolUnsafeOverlap": "Atrybuty „{name}” mają takie same nazwy jak protokół", + "pyrightCommentIgnoreTip": "Użyj polecenia „# pyright: ignore[]”, aby wyłączyć diagnostykę dla pojedynczego wiersza", + "readOnlyAttribute": "Atrybut „{name}” jest tylko do odczytu", + "seeClassDeclaration": "Zobacz deklarację klasy", + "seeDeclaration": "Zobacz deklarację", + "seeFunctionDeclaration": "Zobacz deklarację funkcji", + "seeMethodDeclaration": "Zobacz deklarację metody", + "seeParameterDeclaration": "Zobacz deklarację parametru", + "seeTypeAliasDeclaration": "Zobacz deklarację aliasu typu", + "seeVariableDeclaration": "Zobacz deklarację zmiennej", + "tupleAssignmentMismatch": "Typ „{type}” jest niezgodny z docelową tuple", + "tupleEntryTypeMismatch": "Wpis tuple {entry} jest nieprawidłowego typu", + "tupleSizeIndeterminateSrc": "Niezgodność rozmiaru kolekcji tuple; oczekiwano {expected}, ale otrzymano rozmiar nieokreślony", + "tupleSizeIndeterminateSrcDest": "Niezgodność rozmiaru kolekcji tuple; oczekiwano {expected} lub więcej, a otrzymano rozmiar nieokreślony", + "tupleSizeMismatch": "Niezgodność rozmiaru tuple; oczekiwano {expected}, ale otrzymano {received}", + "tupleSizeMismatchIndeterminateDest": "Niezgodność rozmiaru kolekcji tuple; oczekiwano {expected} lub więcej, a otrzymano {received}", + "typeAliasInstanceCheck": "Alias typu utworzony za pomocą instrukcji „{type}” nie może być użyty do sprawdzania wystąpień i klas", + "typeAssignmentMismatch": "Typu „{sourceType}” nie można przypisać do typu „{destType}”", + "typeBound": "Typu „{sourceType}” nie można przypisać do górnej granicy „{destType}” dla zmiennej typu „{name}”", + "typeConstrainedTypeVar": "Typu „{type}” nie można przypisać do zmiennej typu ograniczonego „{name}”", + "typeIncompatible": "Nie można przypisać typu „{sourceType}” do typu „{destType}”", + "typeNotClass": "Typ „{type}” nie jest klasą", + "typeNotStringLiteral": "„{type}” nie jest literałem ciągu", + "typeOfSymbol": "Typ nazwy „{name}” jest to „{type}”", + "typeParamSpec": "Typ „{type}” jest niezgodny ze specyfikacją ParamSpec „{name}”", + "typeUnsupported": "Typ „{type}” nie jest obsługiwany", + "typeVarDefaultOutOfScope": "Zmienna typu „{name}” nie wchodzi w zakres", + "typeVarIsContravariant": "Parametr typu „{name}” jest kontrawariantny, ale „{sourceType}” nie jest nadtypem „{destType}”", + "typeVarIsCovariant": "Parametr typu „{name}” jest kowariantny, ale „{sourceType}” nie jest podtypem „{destType}”", + "typeVarIsInvariant": "Parametr typu „{name}” jest niezmienny, ale „{sourceType}” nie jest taki sam jak „{destType}”", + "typeVarNotAllowed": "Typ TypeVar nie jest dozwolony dla sprawdzania wystąpienia lub klasy", + "typeVarTupleRequiresKnownLength": "Nie można powiązać parametru TypeVarTuple ze tuple o nieznanej długości", + "typeVarUnnecessarySuggestion": "Zamiast tego użyj elementu {type}", + "typeVarUnsolvableRemedy": "Podaj przeciążenie, które określa zwracany typ, gdy nie podano argumentu", + "typeVarsMissing": "Brak zmiennych typu: {names}", + "typedDictBaseClass": "Klasa „{type}” nie jest typem TypedDict", + "typedDictClassNotAllowed": "Klasa TypedDict nie jest dozwolona na potrzeby sprawdzania wystąpienia lub klasy", + "typedDictClosedExtraNotAllowed": "Nie można dodać elementu \"{name}\"", + "typedDictClosedExtraTypeMismatch": "Nie można dodać elementu \"{name}\" z typem „{type}”", + "typedDictClosedFieldNotReadOnly": "Nie można dodać elementu „{name}”, ponieważ musi on mieć wartość ReadOnly", + "typedDictClosedFieldNotRequired": "Nie można dodać elementu \"{name}\", ponieważ musi on mieć wartość NotRequired", + "typedDictExtraFieldNotAllowed": "Element „{name}” nie jest obecny w typie „{type}”", + "typedDictExtraFieldTypeMismatch": "Typ „{name}” jest niezgodny z typem „extra_items” w typie „{type}”", + "typedDictFieldMissing": "Brak nazwy „{name}” w „{type}”", + "typedDictFieldNotReadOnly": "Nazwa „{name}” nie jest tylko do odczytu w „{type}”", + "typedDictFieldNotRequired": "Nazwa „{name}” nie jest wymagana w typie „{type}”", + "typedDictFieldRequired": "Nazwa „{name}” jest wymagana w typie „{type}”", + "typedDictFieldTypeMismatch": "Nie można przypisać typu „{type}” do elementu „{name}”", + "typedDictFieldUndefined": "Nazwa „{name}” jest niezdefiniowanym elementem w typie „{type}”", + "typedDictKeyAccess": "Użyj elementu [\"{name}\"], aby odwołać się do elementu w TypedDict", + "typedDictNotAllowed": "Funkcja TypedDict nie może być używana do sprawdzania wystąpień lub klas", + "unhashableType": "Typ „{type}” nie jest wartością skrótu", + "uninitializedAbstractVariable": "zmienna wystąpienia „{name}” jest zdefiniowana w abstrakcyjnej klasie bazowej „{classType}” ale nie została zainicjowana", + "unreachableExcept": "Typ „{exceptionType}” jest podklasą typu „{parentType}”", + "useDictInstead": "Użyj funkcji dict[T1, T2], aby wskazać typ słownika", + "useListInstead": "Use list[T] to indicate a list type or T1 | T2 to indicate a union type", + "useTupleInstead": "Use tuple[T1, ..., Tn] to indicate a tuple type or T1 | T2 to indicate a union type", + "useTypeInstead": "Zamiast tego użyj funkcji type[T]", + "varianceMismatchForClass": "Wariancja argumentu typu „{typeVarName}” jest niezgodna z klasą bazową „{className}”", + "varianceMismatchForTypeAlias": "Wariancja argumentu typu „{typeVarName}” jest niezgodna z parametrem „{typeAliasParam}”" + }, + "Service": { + "longOperation": "Wyliczanie plików źródłowych obszaru roboczego zajmuje dużo czasu. Zamiast tego rozważ otwarcie podfolderu. [Dowiedz się więcej](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.pt-br.json b/python-parser/packages/pyright-internal/src/localization/package.nls.pt-br.json new file mode 100644 index 00000000..0febab0e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.pt-br.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Criar Stub de Tipo", + "createTypeStubFor": "Criar Stub de tipo para \"{moduleName}\"", + "executingCommand": "Executando comando", + "filesToAnalyzeCount": "{count} arquivos a serem analisados", + "filesToAnalyzeOne": "1 arquivo a ser analisado", + "findingReferences": "Localizando referências", + "organizeImports": "Organizar as importações" + }, + "Completion": { + "autoImportDetail": "Importação automática", + "indexValueDetail": "Valor do índice" + }, + "Diagnostic": { + "abstractMethodInvocation": "O método \"{method}\" não pode ser chamado porque é abstrato e não está implementado", + "annotatedMetadataInconsistent": "O tipo de metadados anotados \"{metadataType}\" não é compatível com o tipo \"{type}\"", + "annotatedParamCountMismatch": "Incompatibilidade de contagem de anotações de parâmetro: esperado {expected}, mas recebido {received}", + "annotatedTypeArgMissing": "Esperava-se um argumento de tipo e uma ou mais anotações para \"Annotated\"", + "annotationBytesString": "Expressões de tipo não podem usar literais de cadeia de caracteres de bytes", + "annotationFormatString": "As expressões de tipo não podem usar literais de cadeia de caracteres de formato (cadeias de caracteres f)", + "annotationNotSupported": "Anotação de tipo sem suporte para esta instrução", + "annotationRawString": "Expressões de tipo não podem usar literais de cadeia de caracteres brutas", + "annotationSpansStrings": "Expressões de tipo não podem abranger vários literais de cadeia de caracteres", + "annotationStringEscape": "Expressões de tipo não podem conter caracteres de escape", + "annotationTemplateString": "Expressões de tipo não podem usar literais de cadeia de caracteres de modelo (cadeias de caracteres t)", + "argAssignment": "O argumento do tipo \"{argType}\" não pode ser atribuído ao parâmetro do tipo \"{paramType}\"", + "argAssignmentFunction": "O argumento do tipo \"{argType}\" não pode ser atribuído ao parâmetro do tipo \"{paramType}\" na função \"{functionName}\"", + "argAssignmentParam": "O argumento do tipo \"{argType}\" não pode ser atribuído ao parâmetro \"{paramName}\" do tipo \"{paramType}\"", + "argAssignmentParamFunction": "O argumento do tipo \"{argType}\" não pode ser atribuído ao parâmetro \"{paramName}\" do tipo \"{paramType}\" na função \"{functionName}\"", + "argMissingForParam": "Argumento ausente para o parâmetro {name}", + "argMissingForParams": "Argumentos ausentes para os parâmetros {names}", + "argMorePositionalExpectedCount": "Esperavam-se mais {expected} argumentos posicionais", + "argMorePositionalExpectedOne": "Espera-se mais um argumento posicional", + "argPositional": "Argumento posicional esperado", + "argPositionalExpectedCount": "{expected} argumentos posicionais esperados", + "argPositionalExpectedOne": "Argumento posicional esperado 1", + "argTypePartiallyUnknown": "O tipo de argumento é parcialmente desconhecido", + "argTypeUnknown": "O tipo de argumento é desconhecido", + "assertAlwaysTrue": "A expressão assert sempre é avaliada como true", + "assertTypeArgs": "\"assert_type\" espera dois argumentos posicionais", + "assertTypeTypeMismatch": "Incompatibilidade \"assert_type\": esperava-se \"{expected}\", mas recebeu \"{received}\"", + "assignmentExprComprehension": "O destino da expressão de atribuição \"{name}\" não pode usar o mesmo nome da compreensão para o destino", + "assignmentExprContext": "A expressão de atribuição deve estar dentro de módulo, função ou lambda", + "assignmentExprInSubscript": "Expressões de atribuição em um subscrito são compatíveis apenas no Python 3.10 e mais recente", + "assignmentInProtocol": "As variáveis de instância ou classe dentro de uma classe Protocol devem ser declaradas explicitamente dentro do corpo da classe", + "assignmentTargetExpr": "A expressão não pode ser o destino de atribuição", + "asyncNotInAsyncFunction": "Uso de \"async\" não permitido fora da função async", + "awaitIllegal": "O uso de \"await\" requer o Python 3.5 ou mais recente", + "awaitNotAllowed": "Expressões de tipo não podem usar \"await\"", + "awaitNotInAsync": "\"await\" permitido somente dentro da função async", + "backticksIllegal": "Não há suporte para expressões delimitadas por backticks no Python 3.x. Use repr em vez disso", + "baseClassCircular": "A classe não pode derivar de si mesma", + "baseClassFinal": "A classe base \"{type}\" está marcada como final e não pode ser subclasse", + "baseClassIncompatible": "Classes base de {type} são mutuamente incompatíveis", + "baseClassInvalid": "O argumento para a classe deve ser uma classe base", + "baseClassMethodTypeIncompatible": "Classes base para a classe \"{classType}\" definem o método \"{name}\" de maneira incompatível", + "baseClassUnknown": "O tipo de classe base é desconhecido, ocultando o tipo de classe derivada", + "baseClassVariableTypeIncompatible": "Classes base para a classe \"{classType}\" definem a variável \"{name}\" de maneira incompatível", + "binaryOperationNotAllowed": "Operador binário não permitido na expressão de tipo", + "bindParamMissing": "Não foi possível vincular o método \"{methodName}\" porque está faltando um parâmetro \"self\" ou \"cls\"", + "bindTypeMismatch": "Não foi possível associar o método \"{methodName}\" porque \"{type}\" não é atribuível ao parâmetro \"{paramName}\"", + "breakInExceptionGroup": "\"break\" não é permitido em um bloco \"except*\"", + "breakOutsideLoop": "\"break\" só pode ser usado dentro de um loop", + "bytesUnsupportedEscape": "Sequência de escape sem suporte em literal de bytes", + "callableExtraArgs": "Esperava-se apenas dois argumentos de tipo para \"Callable\"", + "callableFirstArg": "Lista de tipos de parâmetro esperado ou \"...\"", + "callableNotInstantiable": "Não é possível criar uma instância do tipo \"{type}\"", + "callableSecondArg": "Tipo de retorno esperado como segundo argumento de tipo para \"Callable\"", + "casePatternIsIrrefutable": "O padrão irrefutável é permitido somente para a última instrução case", + "classAlreadySpecialized": "O tipo \"{type}\" já é especializado", + "classDecoratorTypeUnknown": "Um decorador de classe sem tipo obscurece o tipo de classe. Ignorando o decorador", + "classDefinitionCycle": "A definição de classe para \"{name}\" depende de si mesma", + "classGetItemClsParam": "A substituição__class_getitem__ deve usar um parâmetro \"cls\"", + "classMethodClsParam": "Os métodos de classe devem usar um parâmetro \"cls\"", + "classNotRuntimeSubscriptable": "O subscrito para a classe \"{name}\" gerará uma exceção de runtime. Coloque a expressão de tipo entre aspas", + "classPatternBuiltInArgPositional": "O padrão de classe aceita apenas sub-padrão posicional", + "classPatternNewType": "\"{type}\" não pode ser usado em um padrão de classe porque é definido usando NewType", + "classPatternPositionalArgCount": "Muitos padrões posicionais para a classe \"{type}\"; esperado {expected} mas recebido {received}", + "classPatternTypeAlias": "\"{type}\" não pode ser usado em um padrão de classe porque é um alias de tipo especializado", + "classPropertyDeprecated": "As propriedades de classe foram preteridas no Python 3.11 e não terão suporte no Python 3.13", + "classTypeParametersIllegal": "A sintaxe do parâmetro de tipo de classe requer o Python 3.12 ou mais recente", + "classVarFirstArgMissing": "Um argumento de tipo era esperado após \"ClassVar\"", + "classVarNotAllowed": "\"ClassVar\" não é permitido neste contexto", + "classVarOverridesInstanceVar": "A variável de classe \"{name}\" substitui a variável de instância do mesmo nome na classe \"{className}\"", + "classVarTooManyArgs": "Espera-se apenas um argumento de tipo após \"ClassVar\"", + "classVarWithTypeVar": "O tipo \"ClassVar\" não pode incluir variáveis de tipo", + "clsSelfParamTypeMismatch": "O tipo de parâmetro \"{name}\" deve ser um supertipo de sua classe \"{classType}\"", + "codeTooComplexToAnalyze": "O código é muito complexo para ser analisado. Reduza a complexidade refatorando em sub-rotinas ou reduzindo caminhos de código condicionais", + "collectionAliasInstantiation": "O tipo \"{type}\" não pode ser instanciado, use \"{alias}\" em vez disso", + "comparisonAlwaysFalse": "A condição sempre será avaliada como False, pois os tipos \"{leftType}\" e \"{rightType}\" não têm sobreposição", + "comparisonAlwaysTrue": "A condição sempre será avaliada como True, pois os tipos \"{leftType}\" e \"{rightType}\" não têm sobreposição", + "comprehensionInDict": "A compreensão não pode ser usada com outras entradas de dicionário", + "comprehensionInSet": "A compreensão não pode ser usada com outras entradas de set", + "concatenateContext": "\"Concatenate\" não é permitido nesse contexto", + "concatenateParamSpecMissing": "O último tipo de argumento para \"Concatenate\" deve ser um ParamSpec ou \"...\"", + "concatenateTypeArgsMissing": "\"Concatenate\" requer pelo menos dois argumentos de tipo", + "conditionalOperandInvalid": "Operando condicional inválido do tipo \"{type}\"", + "constantRedefinition": "\"{name}\" é constante (porque está em maiúsculas) e não pode ser redefinido", + "constructorParametersMismatch": "Incompatibilidade entre a assinatura de __new__ e __init__ na classe \"{classType}\"", + "containmentAlwaysFalse": "A expressão sempre será avaliada como False, pois os tipos \"{leftType}\" e \"{rightType}\" não têm sobreposição", + "containmentAlwaysTrue": "A expressão sempre será avaliada como True, pois os tipos \"{leftType}\" e \"{rightType}\" não têm sobreposição", + "continueInExceptionGroup": "\"continue\" não é permitido em um bloco \"except*\"", + "continueOutsideLoop": "\"continue\" só pode ser usado dentro de um loop", + "coroutineInConditionalExpression": "A expressão condicional faz referência à corrotina, que sempre é avaliada como True", + "dataClassBaseClassFrozen": "Uma classe não congelada não pode herdar de uma classe congelada", + "dataClassBaseClassNotFrozen": "Uma classe congelada não pode herdar de uma classe que não está congelada", + "dataClassConverterFunction": "O argumento do tipo \"{argType}\" não é um conversor válido para o campo \"{fieldName}\" do tipo \"{fieldType}\"", + "dataClassConverterOverloads": "Nenhuma sobrecarga de \"{funcName}\" são conversores válidos para o campo \"{fieldName}\" do tipo \"{fieldType}\"", + "dataClassFieldInheritedDefault": "\"{fieldName}\" substitui um campo com o mesmo nome, mas não possui um valor padrão", + "dataClassFieldWithDefault": "Campos sem valores padrão não podem aparecer após campos com valores padrão", + "dataClassFieldWithPrivateName": "O campo Dataclass não pode usar o nome privado", + "dataClassFieldWithoutAnnotation": "O campo Dataclass sem anotação de tipo causará uma exceção de runtime", + "dataClassPostInitParamCount": "Contagem de parâmetros de dataclasse __post_init__ incorreta. O número de campos InitVar é {expected}", + "dataClassPostInitType": "Incompatibilidade de tipo de parâmetro de método de dataclasse __post_init__ para o campo \"{fieldName}\"", + "dataClassSlotsOverwrite": "__slots__ já está definido na classe", + "dataClassTransformExpectedBoolLiteral": "Expressão esperada que é avaliada estaticamente como True ou False", + "dataClassTransformFieldSpecifier": "Esperava-se tuple de classes ou funções, mas recebeu o tipo \"{type}\"", + "dataClassTransformPositionalParam": "Todos os argumentos para \"dataclass_transform\" devem ser argumentos de palavra-chave", + "dataClassTransformUnknownArgument": "O argumento \"{name}\" dataclass_transform não é compatível", + "dataProtocolInSubclassCheck": "Protocolos de dados (que incluem atributos que não são de método) não são permitidos em chamadas issubclass", + "declaredReturnTypePartiallyUnknown": "O tipo de retorno declarado, \"{returnType}\", é parcialmente desconhecido", + "declaredReturnTypeUnknown": "O tipo de retorno declarado é desconhecido", + "defaultValueContainsCall": "Chamadas de função e objetos mutáveis não permitidos na expressão de valor padrão do parâmetro", + "defaultValueNotAllowed": "O parâmetro com \"*\" ou \"**\" não pode ter valor padrão", + "delTargetExpr": "A expressão não pode ser excluída", + "deprecatedClass": "A classe \"{name}\" foi preterida", + "deprecatedConstructor": "O construtor da classe \"{name}\" foi preterido", + "deprecatedDescriptorDeleter": "O método \"__delete__\" para o descritor \"{name}\" está preterido", + "deprecatedDescriptorGetter": "O método \"__get__\" para o descritor \"{name}\" está preterido", + "deprecatedDescriptorSetter": "O método \"__set__\" para o descritor \"{name}\" está preterido", + "deprecatedFunction": "A função \"{name}\" está obsoleta", + "deprecatedMethod": "O método \"{name}\" na classe \"{className}\" está obsoleto", + "deprecatedPropertyDeleter": "O deleter da property \"{name}\" foi preterido", + "deprecatedPropertyGetter": "O getter da property \"{name}\" foi preterido", + "deprecatedPropertySetter": "O setter da property \"{name}\" está preterido", + "deprecatedType": "Este tipo foi preterido no Python {version}. Use \"{replacement}\" em vez disso", + "dictExpandIllegalInComprehension": "Expansão de dicionário não permitida na compreensão", + "dictInAnnotation": "Expressão de dicionário não permitida na expressão de tipo", + "dictKeyValuePairs": "Entradas de dicionário devem conter pares chave/valor", + "dictUnpackIsNotMapping": "Mapeamento esperado para o operador de desempacotamento de dicionário", + "dunderAllSymbolNotPresent": "\"{name}\" está especificado no __all__ mas não está presente no módulo", + "duplicateArgsParam": "Somente um parâmetro \"*\" permitido", + "duplicateBaseClass": "Classe base duplicada não permitida", + "duplicateCapturePatternTarget": "O destino de captura \"{name}\" não pode aparecer mais de uma vez dentro do mesmo padrão", + "duplicateCatchAll": "Somente uma cláusula de except catch-all é permitida", + "duplicateEnumMember": "O membro de Enum \"{name}\" já está declarado", + "duplicateGenericAndProtocolBase": "Somente uma classe base Generic[...] ou Protocol[...] é permitida", + "duplicateImport": "\"{importName}\" foi importado mais de uma vez", + "duplicateKeywordOnly": "Somente um separador \"*\" permitido", + "duplicateKwargsParam": "Somente um parâmetro \"**\" é permitido", + "duplicateParam": "Nome de parâmetro \"{name}\" duplicado", + "duplicatePositionOnly": "Somente um parâmetro \"/\" permitido", + "duplicateStarPattern": "Somente um padrão \"*\" permitido em uma sequência de padrões", + "duplicateStarStarPattern": "Somente uma entrada \"**\" é permitida", + "duplicateUnpack": "Somente uma operação unpack é permitida na lista", + "ellipsisAfterUnpacked": "\"...\" não pode ser usado com um TypeVarTuple ou tuple descompactado", + "ellipsisContext": "\"...\" não é permitido neste contexto", + "ellipsisSecondArg": "\"...\" é permitido apenas como o segundo de dois argumentos", + "enumClassOverride": "A classe Enum \"{name}\" é final e não pode ser subclasse", + "enumMemberDelete": "O membro Enum \"{name}\" não pode ser excluído", + "enumMemberSet": "O membro Enum \"{name}\" não pode ser atribuído", + "enumMemberTypeAnnotation": "Anotações de tipo não são permitidas para membros de enumeração", + "exceptGroupMismatch": "A instrução Try não pode incluir \"except\" e \"except*\"", + "exceptGroupRequiresType": "A sintaxe do grupo de exceção (\"except*\") requer um tipo de exceção", + "exceptRequiresParens": "Vários tipos de exceção devem estar entre parênteses antes do Python 3.14", + "exceptWithAsRequiresParens": "Vários tipos de exceção devem estar entre parênteses ao usar \"as\"", + "exceptionGroupIncompatible": "A sintaxe do grupo de exceção (\"except*\") requer o Python 3.11 ou mais recente", + "exceptionGroupTypeIncorrect": "O tipo de exceção em except* não pode derivar de BaseGroupException", + "exceptionTypeIncorrect": "\"{type}\" não deriva de BaseException", + "exceptionTypeNotClass": "\"{type}\" não é uma classe de exceção válida", + "exceptionTypeNotInstantiable": "O construtor para o tipo de exceção \"{type}\" requer um ou mais argumentos", + "expectedAfterDecorator": "Função ou declaração de classe esperada após o decorador", + "expectedArrow": "Esperava-se \"->\" seguido pela anotação de tipo de retorno", + "expectedAsAfterException": "Espera-se \"as\" após o tipo de exceção", + "expectedAssignRightHandExpr": "Expressão esperada à direita de \"=\"", + "expectedBinaryRightHandExpr": "Expressão esperada à direita do operador", + "expectedBoolLiteral": "Espera-se True ou False", + "expectedCase": "Instrução \"case\" esperada", + "expectedClassName": "Nome de classe esperado", + "expectedCloseBrace": "\"{\" não foi fechado", + "expectedCloseBracket": "\"[\" não foi fechado", + "expectedCloseParen": "\"(\" não foi fechado", + "expectedColon": "Esperava-se \":\"", + "expectedComplexNumberLiteral": "Literal de número complexo esperado para correspondência de padrões", + "expectedDecoratorExpr": "Formulário de expressão sem suporte para decorador antes do Python 3.9", + "expectedDecoratorName": "Nome do decorador esperado", + "expectedDecoratorNewline": "Nova linha esperada no final do decorador", + "expectedDelExpr": "Expressão esperada após \"del\"", + "expectedElse": "Esperava-se #else", + "expectedEquals": "Esperava-se \"=\"", + "expectedExceptionClass": "Classe ou objeto de exceção inválido", + "expectedExceptionObj": "Objeto de exceção esperado: classe de exceção ou None", + "expectedExpr": "Expressão esperada", + "expectedFunctionAfterAsync": "Definição de função esperada após \"async\"", + "expectedFunctionName": "Nome da função esperado após \"def\"", + "expectedIdentifier": "Identificador esperado", + "expectedImport": "\"importação\" esperada", + "expectedImportAlias": "Símbolo esperado após \"as\"", + "expectedImportSymbols": "Esperado um ou mais nomes de símbolos após “importar”", + "expectedIn": "Esperava-se \"in\"", + "expectedInExpr": "Expressão esperada após \"in\"", + "expectedIndentedBlock": "Bloco recuado esperado", + "expectedMemberName": "Nome de atributo esperado após \".\"", + "expectedModuleName": "Nome do módulo esperado", + "expectedNameAfterAs": "Nome de símbolo esperado após \"as\"", + "expectedNamedParameter": "O parâmetro de palavra-chave deve seguir \"*\"", + "expectedNewline": "Nova linha esperada", + "expectedNewlineOrSemicolon": "As instruções devem ser separadas por novas linhas ou ponto e vírgula", + "expectedOpenParen": "Esperava-se \"(\"", + "expectedParamName": "Nome do parâmetro esperado", + "expectedPatternExpr": "Expressão padrão esperada", + "expectedPatternSubjectExpr": "Expressão de assunto padrão esperada", + "expectedPatternValue": "Expressão de valor padrão esperada da forma \"a.b\"", + "expectedReturnExpr": "Expressão esperada após \"return\" ", + "expectedSliceIndex": "Expressão de índice ou fatia esperada", + "expectedTypeNotString": "Tipo esperado, mas recebeu um literal de cadeia de caracteres", + "expectedTypeParameterName": "Nome do parâmetro de tipo esperado", + "expectedYieldExpr": "Expressão esperada na instrução yield", + "finalClassIsAbstract": "A classe \"{type}\" está marcada como final e deve implementar todos os símbolos abstratos", + "finalContext": "\"Final\" não é permitido neste contexto", + "finalInLoop": "Uma variável \"Final\" não pode ser atribuída em um loop", + "finalMethodOverride": "O método \"{name}\" não pode substituir o método final definido na classe \"{className}\"", + "finalNonMethod": "A função \"{name}\" não pode ser marcada como @final porque não é um método", + "finalReassigned": "\"{name}\" está declarado como Final e não pode ser reatribuído", + "finalRedeclaration": "\"{name}\" foi declarado anteriormente como Final", + "finalRedeclarationBySubclass": "\"{name}\" não pode ser redeclarado porque a classe pai \"{className}\" a declara como Final", + "finalTooManyArgs": "Argumento de tipo único esperado após \"Final\"", + "finalUnassigned": "\"{name}\" está declarado como Final, mas o valor não foi atribuído", + "finallyBreak": "Uma \"break\" não pode ser usado para sair de um bloco \"finally\"", + "finallyContinue": "Um \"continue\" não pode ser usado para sair de um bloco \"finally\"", + "finallyReturn": "Um \"return\" não pode ser usado para sair de um bloco \"finally\"", + "formatStringBrace": "Chave de fechamento única não permitida no literal de cadeia de caracteres f. Usar chave de fechamento dupla", + "formatStringBytes": "Literais de cadeia de caracteres de formato (cadeia de caracteres f) não podem ser binárias", + "formatStringDebuggingIllegal": "O especificador de depuração de cadeia de caracteres f \"=\" requer o Python 3.8 ou mais recente", + "formatStringEscape": "Sequência de escape (barra invertida) não permitida na parte de expressão da cadeia de caracteres f em versões anteriores ao Python 3.12", + "formatStringExpectedConversion": "Esperava-se um especificador de conversão após \"!\" na cadeia de caracteres f", + "formatStringIllegal": "Literais de cadeia de caracteres de formato (cadeias de caracteres f) exigem Python 3.6 ou mais recente", + "formatStringInPattern": "Cadeia de caracteres de formato não permitida no padrão", + "formatStringNestedFormatSpecifier": "Expressões aninhadas muito profundamente dentro do especificador de cadeia de caracteres de formato", + "formatStringNestedQuote": "Cadeias de caracteres aninhadas em uma cadeia de caracteres f não podem usar o mesmo caractere de aspas que a cadeia de caracteres f antes do Python 3.12", + "formatStringTemplate": "Literais de cadeia de caracteres de formato (cadeia de caracteres f) também não podem ser cadeias de caracteres de modelo (cadeia de caracteres t)", + "formatStringUnicode": "As literais de cadeia de caracteres de formato (f-strings) não podem ser unicode", + "formatStringUnterminated": "Expressão não finalizada na cadeia de caracteres f. \"}\" era esperado", + "functionDecoratorTypeUnknown": "Um decorador de função sem tipo obscurece o tipo de função. Ignorando o decorador", + "functionInConditionalExpression": "Função de referências de expressão condicional que sempre é avaliada como True", + "functionTypeParametersIllegal": "A sintaxe do parâmetro de tipo de função requer o Python 3.12 ou mais recente", + "futureImportLocationNotAllowed": "As importações __future__ devem estar no início do arquivo", + "generatorAsyncReturnType": "O tipo de retorno da função geradora async deve ser compatível com \"AsyncGenerator[{yieldType}, Any]\"", + "generatorNotParenthesized": "As expressões de gerador devem estar entre parênteses se não forem argumentos exclusivos", + "generatorSyncReturnType": "O tipo de retorno da função de gerador deve ser compatível com \"Generator[{yieldType}, Any, Any]\"", + "genericBaseClassNotAllowed": "A classe base \"Generic\" não pode ser usada com sintaxe de parâmetro de tipo", + "genericClassAssigned": "O tipo de classe genérica não pode ser atribuído", + "genericClassDeleted": "O tipo de classe genérica não pode ser excluído", + "genericInstanceVariableAccess": "O acesso à variável de instância genérica por meio da classe é ambíguo", + "genericNotAllowed": "\"Generic\" não é válido neste contexto", + "genericTypeAliasBoundTypeVar": "O alias de tipo genérico dentro da classe não pode usar variáveis de tipo associado {names}", + "genericTypeArgMissing": "\"Generic\" requer pelo menos um argumento de tipo", + "genericTypeArgTypeVar": "O argumento de tipo para \"Generic\" deve ser uma variável de tipo", + "genericTypeArgUnique": "Os argumentos de tipo para \"Generic\" devem ser exclusivos", + "globalReassignment": "\"{name}\" é atribuído antes da declaração global", + "globalRedefinition": "\"{name}\" já foi declarado global", + "implicitStringConcat": "Concatenação de cadeia de caracteres implícita não permitida", + "importCycleDetected": "Ciclo detectado na cadeia de importação", + "importDepthExceeded": "A profundidade da cadeia de importação excedeu {depth}", + "importResolveFailure": "Não foi possível resolver a importação \"{importName}\"", + "importSourceResolveFailure": "Não foi possível resolver a importação \"{importName}\" da origem", + "importSymbolUnknown": "\"{name}\" é um símbolo de importação desconhecido", + "incompatibleMethodOverride": "O método \"{name}\" substitui a classe \"{className}\" de maneira incompatível", + "inconsistentIndent": "O valor de recuo não corresponde ao recuo anterior", + "inconsistentTabs": "Uso inconsistente de guias e espaços no recuo", + "initMethodSelfParamTypeVar": "A anotação de tipo para o parâmetro \"self\" do método \"__init__\" não pode conter variáveis ​​de tipo com escopo de classe", + "initMustReturnNone": "O tipo de retorno de \"__init__\" deve ser None", + "initSubclassCallFailed": "Argumentos de palavra-chave incorretos para o método __init_subclass__", + "initSubclassClsParam": "A substituição __init_subclass__ deve usar um parâmetro \"cls\"", + "initVarNotAllowed": "\"InitVar\" não é permitido neste contexto", + "instanceMethodSelfParam": "Os métodos de instância devem usar um parâmetro \"self\"", + "instanceVarOverridesClassVar": "A variável de instância \"{name}\" substitui a variável de classe de mesmo nome na classe \"{className}\"", + "instantiateAbstract": "Não é possível instanciar a classe abstrata \"{type}\"", + "instantiateProtocol": "Não é possível instanciar a classe Protocol \"{type}\"", + "internalBindError": "Erro interno ao associar o arquivo de associação \"{file}\": {message}", + "internalParseError": "Ocorreu um erro interno ao analisar o arquivo \"{file}\": {message}", + "internalTypeCheckingError": "Erro interno ao digitar o arquivo de verificação \"{file}\": {message}", + "invalidIdentifierChar": "Caractere inválido no identificador", + "invalidStubStatement": "A instrução não faz sentido dentro de um arquivo stub de tipo", + "invalidTokenChars": "Caractere inválido \"{text}\" no token", + "isInstanceInvalidType": "O segundo argumento para \"isinstance\" deve ser uma classe ou tuple de classes", + "isSubclassInvalidType": "O segundo argumento para \"issubclass\" deve ser uma classe ou tuple de classes", + "keyValueInSet": "Pares chave/valor não são permitidos em um set", + "keywordArgInTypeArgument": "Argumentos de palavra-chave não podem ser usados em listas de argumentos de tipo", + "keywordOnlyAfterArgs": "Separador de argumento somente palavra-chave não permitido após o parâmetro \"*\"", + "keywordParameterMissing": "Um ou mais parâmetros de palavra-chave devem seguir o parâmetro \"*\"", + "keywordSubscriptIllegal": "Não há suporte para argumentos de palavra-chave em subscritos", + "lambdaReturnTypePartiallyUnknown": "O tipo de retorno de lambda, \"{returnType}\", é parcialmente desconhecido", + "lambdaReturnTypeUnknown": "O tipo de retorno de lambda é desconhecido", + "listAssignmentMismatch": "A expressão com o tipo \"{type}\" não pode ser atribuída à lista de destino", + "listInAnnotation": "Expressão de List não permitida na expressão de tipo", + "literalEmptyArgs": "Um ou mais argumentos de tipo esperados após \"Literal\"", + "literalNamedUnicodeEscape": "Não há suporte para sequências de escape unicode nomeadas em anotações de cadeia de caracteres \"Literal\"", + "literalNotAllowed": "\"Literal\" não pode ser usado nesse contexto sem um argumento de tipo", + "literalNotCallable": "O tipo Literal não pode ser instanciado", + "literalUnsupportedType": "Os argumentos de tipo para \"Literal\" devem ser None, um valor literal (int, bool, str ou bytes) ou um valor de enumeração", + "matchIncompatible": "As match de correspondência exigem Python 3.10 ou mais recente", + "matchIsNotExhaustive": "Os casos dentro da instrução match não lidam exaustivamente com todos os valores", + "maxParseDepthExceeded": "Profundidade máxima de análise excedida. Divida a expressão em subexpressões menores", + "memberAccess": "Não é possível acessar o atributo \"{name}\" para a classe \"{type}\"", + "memberDelete": "Não é possível excluir o atributo \"{name}\" para a classe \"{type}\"", + "memberSet": "Não é possível atribuir o atributo \"{name}\" para a classe \"{type}\"", + "metaclassConflict": "A metaclasse de uma classe derivada deve ser uma subclasse das metaclasses de todas as suas classes base", + "metaclassDuplicate": "Apenas uma metaclasse pode ser fornecida", + "metaclassIsGeneric": "A metaclasse não pode ser genérica", + "methodNotDefined": "Método \"{name}\" não definido", + "methodNotDefinedOnType": "Método \"{name}\" não definido no tipo \"{type}\"", + "methodOrdering": "Não é possível criar ordenação de método consistente", + "methodOverridden": "\"{name}\" substitui o método de mesmo nome na classe \"{className}\" pelo tipo incompatível \"{type}\"", + "methodReturnsNonObject": "O método \"{name}\" não retorna um objeto", + "missingSuperCall": "O método \"{methodName}\" não chama o método do mesmo nome na classe pai", + "mixingBytesAndStr": "Valores de bytes e str não podem ser concatenados", + "moduleAsType": "O módulo não pode ser usado como um tipo.", + "moduleNotCallable": "O módulo não pode ser chamado", + "moduleUnknownMember": "\"{memberName}\" não é um atributo conhecido do módulo \"{moduleName}\"", + "namedExceptAfterCatchAll": "Uma cláusula except nomeada não pode aparecer após a cláusula catch-all except", + "namedParamAfterParamSpecArgs": "O parâmetro de palavra-chave \"{name}\" não pode aparecer na assinatura após o parâmetro args ParamSpec", + "namedTupleEmptyName": "Nomes dentro de uma tuple nomeada não podem ficar vazios", + "namedTupleEntryRedeclared": "Não é possível substituir \"{name}\" porque a classe pai \"{className}\" é uma tuple nomeada", + "namedTupleFieldUnderscore": "Nomes de campos de Named tuple não podem começar com um sublinhado", + "namedTupleFirstArg": "Nome de classe de tuple nomeado esperado como primeiro argumento", + "namedTupleMultipleInheritance": "Não há suporte para herança múltipla com NamedTuple", + "namedTupleNameKeyword": "Os nomes dos campos não podem ser uma palavra-chave", + "namedTupleNameType": "Expected two-entry tuple specifying entry name and type", + "namedTupleNameUnique": "Os nomes dentro de uma tuple nomeada devem ser exclusivos", + "namedTupleNoTypes": "\"namedtuple\" não fornece tipos para entradas de tuple. Em vez disso, use \"NamedTuple\"", + "namedTupleSecondArg": "Expected named tuple entry list as second argument", + "newClsParam": "A substituição __new__ deve usar um parâmetro \"cls\"", + "newTypeAnyOrUnknown": "O segundo argumento para NewType deve ser uma classe conhecida, não Any ou Unknown", + "newTypeBadName": "O primeiro argumento para NewType deve ser um literal de cadeia de caracteres", + "newTypeLiteral": "NewType não pode ser usado com o tipo Literal", + "newTypeNameMismatch": "NewType deve ser atribuído a uma variável com o mesmo nome", + "newTypeNotAClass": "Classe esperada como segundo argumento para NewType", + "newTypeParamCount": "NewType requer dois argumentos posicionais", + "newTypeProtocolClass": "NewType não pode ser usado com um tipo estrutural (uma classe Protocol ou TypedDict)", + "noOverload": "Nenhuma sobrecarga para \"{name}\" corresponde aos argumentos fornecidos", + "noReturnContainsReturn": "A função com o tipo de return declarado \"NoReturn\" não pode incluir uma instrução return", + "noReturnContainsYield": "A função com o tipo de retorno declarado \"NoReturn\" não pode incluir uma instrução yield", + "noReturnReturnsNone": "Função com tipo de retorno declarado \"NoReturn\" não pode retornar \"None\"", + "nonDefaultAfterDefault": "O argumento não padrão segue o argumento padrão", + "nonLocalInModule": "Declaração nonlocal não permitida no nível do módulo", + "nonLocalNoBinding": "Nenhuma associação para \"{name}\" nonlocal encontrada", + "nonLocalReassignment": "\"{name}\" é atribuído antes da declaração nonlocal", + "nonLocalRedefinition": "\"{name}\" já foi declarado nonlocal", + "noneNotCallable": "O objeto do tipo \"None\" não pode ser chamado", + "noneNotIterable": "O objeto do tipo \"None\" não pode ser usado como valor iterável", + "noneNotSubscriptable": "O objeto do tipo \"None\" não é subscrito", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "Objeto do tipo \"None\" não pode ser usado com \"async with\"", + "noneOperator": "Operador \"{operator}\" incompatível com \"None\"", + "noneUnknownMember": "\"{name}\" não é um atributo conhecido de \"None\"", + "nonlocalTypeParam": "Associação nonlocal não é permitida para o parâmetro de tipo \"{name}\"", + "notRequiredArgCount": "Argumento de tipo único esperado após \"NotRequired\"", + "notRequiredNotInTypedDict": "\"NotRequired\" não é permitido neste contexto", + "objectNotCallable": "O objeto do tipo \"{type}\" não pode ser chamado", + "obscuredClassDeclaration": "A declaração de classe \"{name}\" está obscurecida por uma declaração de mesmo nome", + "obscuredFunctionDeclaration": "A declaração de função \"{name}\" é obscurecida por uma declaração de mesmo nome", + "obscuredMethodDeclaration": "A declaração de método \"{name}\" é obscurecida por uma declaração de mesmo nome", + "obscuredParameterDeclaration": "A declaração de parâmetro \"{name}\" é obscurecida por uma declaração de mesmo nome", + "obscuredTypeAliasDeclaration": "A declaração de alias de tipo \"{name}\" é obscurecida por uma declaração de mesmo nome", + "obscuredVariableDeclaration": "A declaração \"{name}\" é obscurecida por uma declaração de mesmo nome", + "operatorLessOrGreaterDeprecated": "O operador \"<>\" não é compatível no Python 3. Use \"!=\" em vez disso", + "optionalExtraArgs": "Espera-se um argumento de tipo após \"Optional\"", + "orPatternIrrefutable": "Padrão irrefutável permitido somente como o último subpadrão em um padrão \"or\"", + "orPatternMissingName": "Todos os subpadrões dentro de um padrão \"or\" devem ter como destino os mesmos nomes", + "overlappingKeywordArgs": "O dicionário digitado se sobrepõe ao parâmetro de palavra-chave: {names}", + "overlappingOverload": "A sobrecarga {obscured} para \"{name}\" nunca será usada porque seus parâmetros se sobrepõem à sobrecarga {obscuredBy}", + "overloadAbstractImplMismatch": "As sobrecargas devem corresponder ao status abstrato da implementação", + "overloadAbstractMismatch": "As sobrecargas devem ser abstratas ou não", + "overloadClassMethodInconsistent": "Sobrecargas para \"{name}\" usam @classmethod inconsistentemente", + "overloadFinalImpl": "@final decorador deve ser aplicado somente à implementação", + "overloadFinalNoImpl": "Somente a primeira sobrecarga deve ser marcada como @final", + "overloadImplementationMismatch": "A implementação sobrecarregada não é consistente com a assinatura da sobrecarga {index}", + "overloadOverrideImpl": "@override decorador deve ser aplicado somente à implementação", + "overloadOverrideNoImpl": "Somente a primeira sobrecarga deve ser marcada como @override", + "overloadReturnTypeMismatch": "A sobrecarga {prevIndex} para \"{name}\" sobrepõe a sobrecarga {newIndex} e retorna um tipo incompatível", + "overloadStaticMethodInconsistent": "Sobrecargas para \"{name}\" usam @staticmethod inconsistentemente", + "overloadWithoutImplementation": "\"{name}\" está marcado como overload, mas nenhuma implementação foi fornecida", + "overriddenMethodNotFound": "O método \"{name}\" está marcado como override, mas nenhum método base de mesmo nome está presente", + "overrideDecoratorMissing": "O método \"{name}\" não está marcado como override, mas está substituindo um método na classe \"{className}\"", + "paramAfterKwargsParam": "O parâmetro não pode seguir o parâmetro \"**\"", + "paramAlreadyAssigned": "O parâmetro \"{name}\" já está atribuído", + "paramAnnotationMissing": "A anotação de tipo está ausente para o parâmetro \"{name}\"", + "paramAssignmentMismatch": "A expressão do tipo \"{sourceType}\" não pode ser atribuída ao parâmetro do tipo \"{paramType}\"", + "paramNameMissing": "Nenhum parâmetro chamado \"{name}\"", + "paramSpecArgsKwargsDuplicate": "Os argumentos para ParamSpec \"{type}\" já foram fornecidos", + "paramSpecArgsKwargsUsage": "Os atributos \"args\" e \"kwargs\" de ParamSpec devem aparecer dentro de uma assinatura de função", + "paramSpecArgsMissing": "Argumentos para ParamSpec \"{type}\" estão ausentes", + "paramSpecArgsUsage": "O atributo \"args\" de ParamSpec é válido somente quando usado com o parâmetro *args", + "paramSpecAssignedName": "ParamSpec deve ser atribuído a uma variável chamada \"{name}\"", + "paramSpecContext": "ParamSpec não é permitido neste contexto", + "paramSpecDefaultNotTuple": "Reticências esperadas, uma expressão de tuple ou ParamSpec para o valor padrão de ParamSpec", + "paramSpecFirstArg": "Nome esperado de ParamSpec como primeiro argumento", + "paramSpecKwargsUsage": "O atributo \"kwargs\" de ParamSpec é válido somente quando usado com o parâmetro **kwargs", + "paramSpecNotUsedByOuterScope": "O ParamSpec \"{name}\" não tem significado neste contexto", + "paramSpecUnknownArg": "ParamSpec não é compatível com mais de um argumento", + "paramSpecUnknownMember": "\"{name}\" não é um atributo conhecido de ParamSpec", + "paramSpecUnknownParam": "\"{name}\" é um parâmetro desconhecido para ParamSpec", + "paramTypeCovariant": "Variável de tipo covariante não pode ser usada no tipo de parâmetro", + "paramTypePartiallyUnknown": "O tipo de parâmetro \"{paramName}\" é parcialmente desconhecido", + "paramTypeUnknown": "O tipo de parâmetro \"{paramName}\" é desconhecido", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "O padrão nunca será correspondido para o tipo de assunto \"{type}\"", + "positionArgAfterNamedArg": "O argumento posicional não pode aparecer após argumentos de palavra-chave", + "positionArgAfterUnpackedDictArg": "O argumento posicional não pode aparecer após desempacotamento de argumento de palavra-chave", + "positionOnlyAfterArgs": "Separador de parâmetro somente de posição não permitido após o parâmetro \"*\"", + "positionOnlyAfterKeywordOnly": "O parâmetro \"/\" deve aparecer antes do parâmetro \"*\"", + "positionOnlyAfterNon": "Parâmetro somente de posição não permitido após o parâmetro que não é somente posição", + "positionOnlyFirstParam": "Separador de parâmetro somente de posição não permitido como primeiro parâmetro", + "positionOnlyIncompatible": "O separador de parâmetro somente de posição requer o Python 3.8 ou mais recente", + "privateImportFromPyTypedModule": "\"{name}\" não é exportado do módulo \"{module}\"", + "privateUsedOutsideOfClass": "\"{name}\" é privado e usado fora da classe na qual é declarado", + "privateUsedOutsideOfModule": "\"{name}\" é privado e usado fora do módulo no qual ele é declarado", + "propertyOverridden": "\"{name}\" override incorretamente a property de mesmo nome na classe \"{className}\"", + "propertyStaticMethod": "Métodos estáticos não permitidos para as property getter, setter ou deleter", + "protectedUsedOutsideOfClass": "\"{name}\" está protegido e usado fora da classe na qual está declarado", + "protocolBaseClass": "A classe \"{classType}\" Protocol não pode derivar da classe não Protocol \"{baseType}\"", + "protocolBaseClassWithTypeArgs": "Argumentos de tipo não são permitidos com a classe Protocol ao usar a sintaxe de parâmetro de tipo", + "protocolIllegal": "O uso de \"Protocol\" requer o Python 3.7 ou mais recente", + "protocolNotAllowed": "\"Protocol\" não pode ser usado nesse contexto", + "protocolTypeArgMustBeTypeParam": "O argumento de tipo para o \"Protocolo\" deve ser um parâmetro de tipo", + "protocolUnsafeOverlap": "A classe se sobrepõe a \"{name}\" de forma não segura e pode produzir uma correspondência em runtime", + "protocolVarianceContravariant": "A variável de tipo \"{variable}\" usada na \"{class}\" Protocol genérica deve ser contravariante", + "protocolVarianceCovariant": "A variável de tipo \"{variable}\" usada na \"{class}\" Protocol genérica deve ser covariante", + "protocolVarianceInvariant": "A variável de tipo \"{variable}\" usada na \"{class}\" Protocol genérica deve ser invariável", + "pyrightCommentInvalidDiagnosticBoolValue": "A diretiva de comentário Pyright deve ser seguida por \"=\" e um valor true ou false", + "pyrightCommentInvalidDiagnosticSeverityValue": "A diretiva de comentário Pyright deve ser seguida por \"=\" e um valor de true, false, error, warning, information ou none", + "pyrightCommentMissingDirective": "O comentário Pyright deve ser seguido por uma diretiva (basic ou strict) ou uma regra de diagnóstico", + "pyrightCommentNotOnOwnLine": "Comentários Pyright usados para controlar as configurações de nível de arquivo devem aparecer em sua própria linha", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" é uma regra de diagnóstico desconhecida para o comentário pyright", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" é um valor inválido para o comentário pyright. true, false, error, warning, information ou none esperados.", + "pyrightCommentUnknownDirective": "\"{directive}\" é uma diretiva desconhecida para o comentário pyright; esperava-se \"strict\", \"standard\" ou \"basic\"", + "readOnlyArgCount": "Argumento de tipo único esperado após \"ReadOnly\"", + "readOnlyNotInTypedDict": "\"ReadOnly\" não é permitido neste contexto", + "recursiveDefinition": "Não foi possível determinar o tipo de \"{name}\" porque ele refere-se a si mesmo", + "relativeImportNotAllowed": "Importações relativas não podem ser usadas com o formulário \"import .a\". Use \"from . import a\" em vez disso", + "requiredArgCount": "Argumento de tipo único esperado após \"Required\"", + "requiredNotInTypedDict": "\"Required\" não é permitido neste contexto", + "returnInAsyncGenerator": "A instrução return com valor não é permitida no gerador async", + "returnInExceptionGroup": "\"return\" não é permitido em um bloco \"except*\"", + "returnMissing": "Função com tipo de retorno declarado \"{returnType}\" deve retornar valor em todos os caminhos de código", + "returnOutsideFunction": "\"return\" só pode ser usado dentro de uma função", + "returnTypeContravariant": "A variável de tipo contravariante não pode ser usada no tipo de retorno", + "returnTypeMismatch": "O tipo \"{exprType}\" não pode ser atribuído ao tipo \"{returnType}\"", + "returnTypePartiallyUnknown": "O tipo de retorno, \"{returnType}\", é parcialmente desconhecido", + "returnTypeUnknown": "O tipo de retorno é desconhecido", + "revealLocalsArgs": "Nenhum argumento esperado para a chamada \"reveal_locals\"", + "revealLocalsNone": "Nenhum locals neste escopo", + "revealTypeArgs": "Esperava-se um único argumento posicional para a chamada \"reveal_type\"", + "revealTypeExpectedTextArg": "O argumento \"expected_text\" para a função \"reveal_type\" deve ser um valor literal str", + "revealTypeExpectedTextMismatch": "Tipo de incompatibilidade de texto. O esperado era \"{expected}\", mas recebeu \"{received}\"", + "revealTypeExpectedTypeMismatch": "Incompatibilidade de tipo. Esperava-se \"{expected}\", mas recebeu \"{received}\"", + "selfTypeContext": "\"Self\" não é válido neste contexto.", + "selfTypeMetaclass": "\"Self\" não pode ser usado em uma metaclasse (uma subclasse de \"type\")", + "selfTypeWithTypedSelfOrCls": "\"Self\" não pode ser usado em uma função com um parâmetro `self` ou `cls que tenha uma anotação de tipo diferente de \"Self\"", + "sentinelBadName": "O primeiro argumento para Sentinel deve ser um literal de cadeia de caracteres", + "sentinelNameMismatch": "Sentinel deve ser atribuído a uma variável com o mesmo nome", + "sentinelParamCount": "O Sentinel requer um argumento posicional", + "setterGetterTypeMismatch": "O tipo de valor do setter da property não é atribuível ao tipo de retorno getter", + "singleOverload": "\"{name}\" está marcado como sobrecarga, mas sobrecargas adicionais estão ausentes", + "slotsAttributeError": "\"{name}\" não está especificado em __slots__", + "slotsClassVarConflict": "\"{name}\" está em conflito com a variável de instância declarada __slots__", + "starPatternInAsPattern": "O padrão de estrela não pode ser usado com o destino \"as\"", + "starPatternInOrPattern": "O padrão de estrela não pode ser ORed em outros padrões", + "starStarWildcardNotAllowed": "** não pode ser usado com curinga \"_\"", + "staticClsSelfParam": "Os métodos estáticos não devem usar um parâmetro \"self\" ou \"cls\"", + "stringNonAsciiBytes": "Caractere não ASCII não permitido em literal de cadeia de caracteres de bytes", + "stringNotSubscriptable": "A expressão de cadeia de caracteres não pode ser subscrita na expressão de tipo. Coloque toda a expressão entre aspas", + "stringUnsupportedEscape": "Sequência de escape sem suporte no literal de cadeia de caracteres", + "stringUnterminated": "Literal de cadeia de caracteres não finalizado", + "stubFileMissing": "Arquivo stub não encontrado para \"{importName}\"", + "stubUsesGetAttr": "O arquivo stub de tipo está incompleto. \"__getattr__\" obscurece erros de tipo para o módulo", + "sublistParamsIncompatible": "parâmetros de sublista não são suportados no Python 3.x", + "superCallArgCount": "Não mais que dois argumentos eram esperados para a chamada \"super\"", + "superCallFirstArg": "Tipo de classe esperado como o primeiro argumento para a chamada \"super\", mas recebeu \"{type}\"", + "superCallSecondArg": "O segundo argumento para a chamada \"super\" deve ser objeto ou classe que deriva de \"{type}\"", + "superCallZeroArgForm": "A forma de chamada \"super\" com argumento zero é válida apenas dentro de um método", + "superCallZeroArgFormStaticMethod": "A forma de chamada \"super\" com argumento zero é válida apenas dentro de um método", + "symbolIsPossiblyUnbound": "\"{name}\" possivelmente não está associado", + "symbolIsUnbound": "\"{name}\" não está associado", + "symbolIsUndefined": "\"{name}\" não está definido", + "symbolOverridden": "\"{name}\" substitui o símbolo de mesmo nome na classe \"{className}\"", + "templateStringBytes": "Literais de cadeia de caracteres de modelo (cadeias de caracteres t) não podem ser binários", + "templateStringIllegal": "Literais de cadeia de caracteres de modelo (cadeias de caracteres t) exigem Python 3.14 ou mais recente", + "templateStringUnicode": "Literais de cadeia de caracteres de modelo (cadeias de caracteres t) não podem ser unicode", + "ternaryNotAllowed": "Expressão de ternário não permitida na expressão de tipo", + "totalOrderingMissingMethod": "A classe deve definir um dos \"__lt__\", \"__le__\", \"__gt__\" ou \"__ge__\" para usar total_ordering", + "trailingCommaInFromImport": "A vírgula à direita não é permitida sem parênteses ao redor", + "tryWithoutExcept": "A instrução Try deve ter pelo menos uma cláusula except ou finally", + "tupleAssignmentMismatch": "A expressão com o tipo \"{type}\" não pode ser atribuída à tuple de destino", + "tupleInAnnotation": "Expressão de tuple não permitida na expressão de tipo", + "tupleIndexOutOfRange": "O índice {index} está fora do intervalo para o tipo {type}", + "typeAliasIllegalExpressionForm": "Formulário de expressão inválido para definição de alias de tipo", + "typeAliasIsRecursiveDirect": "O alias de tipo \"{name}\" não pode usar a si mesmo em sua definição", + "typeAliasNotInModuleOrClass": "Um TypeAlias só pode ser definido dentro de um módulo ou escopo de classe", + "typeAliasRedeclared": "\"{name}\" é declarado como um TypeAlias e só pode ser atribuído uma vez", + "typeAliasStatementBadScope": "Uma instrução type só pode ser usada dentro de um módulo ou escopo de classe", + "typeAliasStatementIllegal": "A instrução de alias de tipo requer o Python 3.12 ou mais recente", + "typeAliasTypeBadScope": "Um alias de tipo só pode ser definido dentro de um módulo ou escopo de classe", + "typeAliasTypeBaseClass": "Um alias de tipo definido em uma instrução \"type\" não pode ser usado como uma classe base", + "typeAliasTypeMustBeAssigned": "TypeAliasType deve ser atribuído a uma variável com o mesmo nome que o alias de tipo", + "typeAliasTypeNameArg": "O primeiro argumento para TypeAliasType deve ser um literal de cadeia de caracteres que representa o nome do alias de tipo", + "typeAliasTypeNameMismatch": "O nome do alias de tipo deve corresponder ao nome da variável à qual ela está atribuída", + "typeAliasTypeParamInvalid": "A lista de parâmetros de tipo deve ser uma tuple contendo apenas TypeVar, TypeVarTuple ou ParamSpec", + "typeAnnotationCall": "Expressão de chamada não permitida na expressão de tipo", + "typeAnnotationVariable": "Variável não permitida na expressão de tipo", + "typeAnnotationWithCallable": "O argumento de tipo para \"type\" deve ser uma classe; não há suporte para callables", + "typeArgListExpected": "ParamSpec, reticências ou lista de tipos esperados", + "typeArgListNotAllowed": "Expressão de lista não permitida para este argumento de tipo", + "typeArgsExpectingNone": "Nenhum argumento de tipo era esperado para a classe \"{name}\"", + "typeArgsMismatchOne": "Esperava-se um argumento de tipo, mas recebeu {received}", + "typeArgsMissingForAlias": "Argumentos de tipo esperados para o alias de tipo genérico \"{name}\"", + "typeArgsMissingForClass": "Argumentos de tipo esperados para a classe genérica \"{name}\"", + "typeArgsTooFew": "Poucos argumentos de tipo fornecidos para \"{name}\". Esperava-se {expected}, mas recebeu {received}", + "typeArgsTooMany": "Muitos argumentos de tipo fornecidos para \"{name}\". Esperava-se {expected}, mas recebeu {received}", + "typeAssignmentMismatch": "O tipo \"{sourceType}\" não pode ser atribuído ao tipo declarado \"{destType}\"", + "typeAssignmentMismatchWildcard": "O símbolo de importação \"{name}\" tem o tipo \"{sourceType}\", que não pode ser atribuído ao tipo declarado \"{destType}\"", + "typeCallNotAllowed": "A chamada type() não deve ser usada na expressão de tipo", + "typeCheckOnly": "\"{name}\" está marcado como @type_check_only e pode ser usado apenas em anotações de tipo", + "typeCommentDeprecated": "O uso de comentários de type foi preterido. Use anotação de type em vez disso", + "typeExpectedClass": "Classe esperada, mas a recebida foi \"{type}\"", + "typeFormArgs": "\"TypeForm\" aceita um único argumento posicional", + "typeGuardArgCount": "Argumento de tipo único esperado após \"TypeGuard\" ou \"TypeIs\"", + "typeGuardParamCount": "Funções e métodos de proteção de tipo definidos pelo usuário devem ter pelo menos um parâmetro de entrada", + "typeIsReturnType": "O tipo de retorno de TypeIs (\"{returnType}\") não é consistente com o tipo de parâmetro de valor (\"{type}\")", + "typeNotAwaitable": "\"{type}\" não é awaitable", + "typeNotIntantiable": "\"{type}\" não pode ser instanciado", + "typeNotIterable": "\"{type}\" não é iterável", + "typeNotSpecializable": "Não foi possível especializar o tipo \"{type}\"", + "typeNotSubscriptable": "O objeto do tipo \"{type}\" não é subscrito", + "typeNotSupportBinaryOperator": "Operador \"{operator}\" sem suporte para os tipos \"{leftType}\" e \"{rightType}\"", + "typeNotSupportBinaryOperatorBidirectional": "O operador \"{operator}\" não tem suporte para os tipos \"{leftType}\" e \"{rightType}\" quando o tipo esperado é \"{expectedType}\"", + "typeNotSupportUnaryOperator": "Operador \"{operator}\" sem suporte para o tipo \"{type}\"", + "typeNotSupportUnaryOperatorBidirectional": "O operador \"{operator}\" não tem suporte para o tipo \"{type}\" quando o tipo esperado é \"{expectedType}\"", + "typeNotUsableWith": "Objeto do tipo \"{type}\" não pode ser usado com \"with\" porque não implementa corretamente o {method}", + "typeNotUsableWithAsync": "Objeto do tipo \"{type}\" não pode ser usado com \"async with\" porque não implementa corretamente o {method}", + "typeParameterBoundNotAllowed": "Associação ou restrição não pode ser usada com um parâmetro de tipo variadic ou ParamSpec", + "typeParameterConstraintTuple": "A restrição de parâmetro de tipo deve ser uma tupla de dois ou mais tipos", + "typeParameterExistingTypeParameter": "O parâmetro de tipo \"{name}\" já está em uso", + "typeParameterNotDeclared": "O parâmetro de tipo \"{name}\" não está incluído na lista de parâmetros de tipo para \"{container}\"", + "typeParametersMissing": "Pelo menos um parâmetro de tipo deve ser especificado", + "typePartiallyUnknown": "O tipo de \"{name}\" é parcialmente desconhecido", + "typeUnknown": "O tipo de \"{name}\" é desconhecido", + "typeVarAssignedName": "TypeVar deve ser atribuído a uma variável chamada \"{name}\"", + "typeVarAssignmentMismatch": "O tipo \"{type}\" não pode ser atribuído à variável de tipo \"{name}\"", + "typeVarBoundAndConstrained": "TypeVar não pode ser associado e restrito", + "typeVarBoundGeneric": "O tipo associado TypeVar não pode ser genérico", + "typeVarConstraintGeneric": "O tipo de restrição TypeVar não pode ser genérico", + "typeVarDefaultBoundMismatch": "O tipo padrão TypeVar deve ser um subtipo do tipo associado", + "typeVarDefaultConstraintMismatch": "O tipo padrão TypeVar deve ser um dos tipos restritos", + "typeVarDefaultIllegal": "Tipos padrão de variável de tipo exigem Python 3.13 ou mais recente", + "typeVarDefaultInvalidTypeVar": "O parâmetro de tipo \"{name}\" tem um tipo padrão que se refere a uma ou mais variáveis de tipo que estão fora do escopo", + "typeVarFirstArg": "Nome esperado de TypeVar como primeiro argumento", + "typeVarInvalidForMemberVariable": "O tipo de atributo não pode usar a variável de tipo \"{name}\" com escopo para o método local", + "typeVarNoMember": "TypeVar \"{type}\" não tem atributo \"{name}\"", + "typeVarNotSubscriptable": "Não é possível subscrever TypeVar \"{type}\"", + "typeVarNotUsedByOuterScope": "A variável de tipo \"{name}\" não tem significado neste contexto", + "typeVarPossiblyUnsolvable": "A variável de tipo \"{name}\" pode ficar sem resolução se o chamador não fornecer nenhum argumento para o parâmetro \"{param}\"", + "typeVarSingleConstraint": "TypeVar deve ter pelo menos dois tipos restritos", + "typeVarTupleConstraints": "TypeVarTuple não pode ter restrições de valor", + "typeVarTupleContext": "TypeVarTuple não é permitido neste contexto", + "typeVarTupleDefaultNotUnpacked": "O tipo padrão TypeVarTuple deve ser uma tuple desempacotamento ou TypeVarTuple", + "typeVarTupleMustBeUnpacked": "O operador Unpack é necessário para o valor TypeVarTuple", + "typeVarTupleUnknownParam": "\"{name}\" é um parâmetro desconhecido para TypeVarTuple", + "typeVarUnknownParam": "\"{name}\" é um parâmetro desconhecido para TypeVar", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" já está em uso por um escopo externo", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" aparece apenas uma vez na assinatura de função genérica", + "typeVarVariance": "TypeVar não pode ser covariante e contravariante", + "typeVarWithDefaultFollowsVariadic": "O TypeVar \"{typeVarName}\" tem um valor padrão e não pode seguir TypeVarTuple \"{variadicName}\"", + "typeVarWithoutDefault": "\"{name}\" não pode aparecer após \"{other}\" na lista de parâmetros de tipo porque não tem nenhum tipo padrão", + "typeVarsNotInGenericOrProtocol": "Generic[] ou Protocol[] deve incluir todas as variáveis de tipo", + "typedDictAccess": "Não foi possível acessar o item em TypedDict", + "typedDictAssignedName": "TypedDict deve ser atribuído a uma variável chamada \"{name}\"", + "typedDictBadVar": "As classes TypedDict podem conter apenas anotações de tipo", + "typedDictBaseClass": "Todas as classes base para classes TypedDict também devem ser classes TypedDict", + "typedDictBoolParam": "Esperava-se que o parâmetro \"{name}\" tivesse um valor True ou False", + "typedDictClosedExtras": "A classe \"{name}\" é um TypedDict que limita o tipo de itens extras a serem digitados \"{type}\"", + "typedDictClosedFalseNonOpenBase": "A classe base \"{name}\" não é um TypedDict aberto; closed=False não é permitido", + "typedDictClosedNoExtras": "A classe base \"{name}\" é um TypedDict closed; itens extras não são permitidos", + "typedDictDelete": "Não foi possível excluir o item em TypedDict", + "typedDictEmptyName": "Os nomes dentro de um TypedDict não podem estar vazios", + "typedDictEntryName": "Literal de cadeia de caracteres esperado para o nome da entrada do dicionário", + "typedDictEntryUnique": "Os nomes dentro de um dicionário devem ser exclusivos", + "typedDictExtraArgs": "Argumentos TypedDict extras são incompatíveis", + "typedDictExtraItemsClosed": "TypedDict pode usar \"closed\" ou \"extra_items\" mas não ambos", + "typedDictFieldNotRequiredRedefinition": "O item TypedDict \"{name}\" não pode ser redefinido como NotRequired", + "typedDictFieldReadOnlyRedefinition": "O item TypedDict \"{name}\" não pode ser redefinido como ReadOnly", + "typedDictFieldRequiredRedefinition": "O item TypedDict \"{name}\" não pode ser redefinido como Required", + "typedDictFirstArg": "Nome da classe TypedDict esperado como primeiro argumento", + "typedDictInClassPattern": "A classe TypedDict não é permitida no padrão de classe", + "typedDictInitsubclassParameter": "TypedDict não dá suporte ao parâmetro __init_subclass__ \"{name}\"", + "typedDictNotAllowed": "\"TypedDict\" não pode ser usado neste contexto", + "typedDictSecondArgDict": "Parâmetro dict ou de palavra-chave esperado como segundo parâmetro", + "typedDictSecondArgDictEntry": "Entrada de dicionário simples esperada", + "typedDictSet": "Não foi possível atribuir o item em TypedDict", + "unaccessedClass": "A classe \"{name}\" não foi acessada", + "unaccessedFunction": "A função \"{name}\" não foi acessada", + "unaccessedImport": "A importação \"{name}\" não foi acessada", + "unaccessedSymbol": "\"{name}\" não foi acessado", + "unaccessedVariable": "A variável \"{name}\" não foi acessada", + "unannotatedFunctionSkipped": "A análise da função \"{name}\" foi ignorada porque não foi anotada", + "unaryOperationNotAllowed": "Operador unário não permitido na expressão de tipo", + "unexpectedAsyncToken": "Esperado \"def\", \"with\" ou \"for\" para acompanhar \"async\"", + "unexpectedEof": "EOF inesperado", + "unexpectedExprToken": "Token inesperado no final da expressão", + "unexpectedIndent": "Recuo inesperado", + "unexpectedUnindent": "Recuo não esperado", + "unhashableDictKey": "A chave do dicionário deve ser hash", + "unhashableSetEntry": "A entrada set deve ser permitir hash", + "uninitializedAbstractVariables": "As variáveis definidas na classe base abstrata não são inicializadas na classe final \"{classType}\"", + "uninitializedInstanceVariable": "A variável de instância \"{name}\" não foi inicializada no corpo da classe ou no método __init__", + "unionForwardReferenceNotAllowed": "A sintaxe de Union não pode ser usada com operando de cadeia de caracteres. Use aspas em toda a expressão", + "unionSyntaxIllegal": "A sintaxe alternativa para uniões requer o Python 3.10 ou mais recente", + "unionTypeArgCount": "A Union requer dois ou mais argumentos de tipo", + "unionUnpackedTuple": "A Union não pode incluir uma tuple desempacotada", + "unionUnpackedTypeVarTuple": "A Union não pode incluir um TypeVarTuple desempacotado", + "unnecessaryCast": "Chamada \"cast\" desnecessária. O tipo já é \"{type}\"", + "unnecessaryIsInstanceAlways": "Chamada de isinstance desnecessária. \"{testType}\" é sempre uma instância de \"{classType}\"", + "unnecessaryIsInstanceNever": "Chamada de isinstance desnecessária; '{testType}' nunca é uma instância de '{classType}'", + "unnecessaryIsSubclassAlways": "Chamada issubclass desnecessária. \"{testType}\" é sempre uma subclasse de \"{classType}\"", + "unnecessaryIsSubclassNever": "Chamada issubclass desnecessária; '{testType}' nunca é uma subclasse de '{classType}'", + "unnecessaryPyrightIgnore": "Comentário desnecessário \"# pyright: ignore\"", + "unnecessaryPyrightIgnoreRule": "Regra desnecessária \"# pyright: ignore\": \"{name}\"", + "unnecessaryTypeIgnore": "Comentário \"# type: ignore\" desnecessário", + "unpackArgCount": "Argumento de tipo único esperado após \"Unpack\"", + "unpackExpectedTypeVarTuple": "TypeVarTuple ou tuple esperado como argumento de tipo para Unpack", + "unpackExpectedTypedDict": "Argumento de tipo TypedDict esperado para Unpack", + "unpackIllegalInComprehension": "Operação de desempacotamento não permitida na compreensão", + "unpackInAnnotation": "Operador de desempacotamento não permitido na expressão de tipo", + "unpackInDict": "Operação de desempacotamento não permitida em dicionários", + "unpackInSet": "Operador unpack não permitido em um set", + "unpackNotAllowed": "Unpack não é permitido neste contexto", + "unpackOperatorNotAllowed": "A operação de descompactação não é permitida neste contexto", + "unpackTuplesIllegal": "Operação de desempacotamento não permitida em tuplas anteriores ao Python 3.8", + "unpackedArgInTypeArgument": "Os argumentos descompactados não podem ser usados nesse contexto", + "unpackedArgWithVariadicParam": "O argumento desempacotado não pode ser usado para o parâmetro TypeVarTuple", + "unpackedDictArgumentNotMapping": "A expressão de argumento após ** deve ser um mapeamento com um tipo de chave \"str\"", + "unpackedDictSubscriptIllegal": "O operador de desempacotamento de dicionário no subscrito não é permitido", + "unpackedSubscriptIllegal": "O operador Unpack no subscrito requer o Python 3.11 ou mais recente", + "unpackedTypeVarTupleExpected": "Esperava-se TypeVarTuple desempacotada. Use Unpack[{name1}] ou *{name2}", + "unpackedTypedDictArgument": "Não é possível corresponder o argumento TypedDict desempacotado aos parâmetros", + "unreachableCodeCondition": "O código não é analisado porque a condição é avaliada estaticamente como falsa", + "unreachableCodeStructure": "O código está estruturalmente inacessível", + "unreachableCodeType": "A análise de tipo indica que o código está inacessível", + "unreachableExcept": "A cláusula Except está inacessível porque a exceção já foi tratada", + "unsupportedDunderAllOperation": "A operação em \"__all__\" não é compatível, portanto, a lista de símbolos exportada pode estar incorreta", + "unusedCallResult": "O resultado da expressão de chamada é do tipo \"{type}\" e não é usado. Atribua à variável \"_\" se isso for intencional", + "unusedCoroutine": "O resultado da chamada de função async não foi usado. Use \"await\" ou atribua o resultado à variável", + "unusedExpression": "O valor da expressão não é usado", + "varAnnotationIllegal": "As anotações de tipo para variáveis exigem Python 3.6 ou mais recente. Use comentário de type para compatibilidade com versões anteriores", + "variableFinalOverride": "A variável \"{name}\" está marcada como Final e substitui a variável não Final de mesmo nome na classe \"{className}\"", + "variadicTypeArgsTooMany": "A lista de argumentos de tipo pode ter no máximo um TypeVarTuple ou tuple descompactado", + "variadicTypeParamTooManyAlias": "O alias de tipo pode ter no máximo um parâmetro de tipo TypeVarTuple, mas recebeu vários ({names})", + "variadicTypeParamTooManyClass": "A classe genérica pode ter no máximo um parâmetro de tipo TypeVarTuple, mas recebeu vários ({names})", + "walrusIllegal": "O operador \":=\" requer o Python 3.8 ou mais recente", + "walrusNotAllowed": "Operador \":=\" não é permitido neste contexto sem parênteses", + "wildcardInFunction": "Wildcard import not allowed within a class or function", + "wildcardLibraryImport": "Wildcard import from a library not allowed", + "wildcardPatternTypePartiallyUnknown": "O tipo capturado pelo padrão curinga é parcialmente desconhecido", + "wildcardPatternTypeUnknown": "O tipo capturado pelo padrão curinga é desconhecido", + "yieldFromIllegal": "O uso de \"yield from\" requer o Python 3.3 ou mais recente", + "yieldFromOutsideAsync": "\"yield from\" não é permitido em uma função async", + "yieldOutsideFunction": "\"yield\" não permitido fora de uma função ou lambda", + "yieldWithinComprehension": "\"yield\" não é permitido dentro de uma compreensão", + "zeroCaseStatementsFound": "A instrução Match deve incluir pelo menos uma instrução case", + "zeroLengthTupleNotAllowed": "Zero-length tuple is not allowed in this context" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "O formulário especial \"Annotated\" não pode ser usado com verificações de instância e classe", + "argParam": "O argumento corresponde ao parâmetro \"{paramName}\"", + "argParamFunction": "O argumento corresponde ao parâmetro \"{paramName}\" na função \"{functionName}\"", + "argsParamMissing": "O parâmetro \"*{paramName}\" não tem nenhum parâmetro correspondente", + "argsPositionOnly": "Incompatibilidade de parâmetro somente de posição; esperava-se {expected}, mas recebeu {received}", + "argumentType": "O tipo de argumento é \"{type}\"", + "argumentTypes": "Tipos de argumento: ({types})", + "assignToNone": "O tipo não pode ser atribuído a \"None\"", + "asyncHelp": "Você quis dizer \"async with\"?", + "baseClassIncompatible": "A classe base \"{baseClass}\" é incompatível com o tipo \"{type}\"", + "baseClassIncompatibleSubclass": "A classe base \"{baseClass}\" deriva de \"{subclass}\" que é incompatível com o tipo \"{type}\"", + "baseClassOverriddenType": "A classe base \"{baseClass}\" fornece o tipo \"{type}\", que é substituído", + "baseClassOverridesType": "A classe base \"{baseClass}\" substitui pelo tipo \"{type}\"", + "bytesTypePromotions": "Defina disableBytesTypePromotions como false para habilitar o comportamento de promoção de tipo para \"bytearray\" e \"memoryview\"", + "conditionalRequiresBool": "O método __bool__ para o tipo \"{operandType}\" retorna o tipo \"{boolReturnType}\" em vez de \"bool\"", + "dataClassFieldLocation": "Declaração de campo", + "dataClassFrozen": "\"{name}\" está congelado", + "dataProtocolUnsupported": "\"{name}\" é um protocolo de dados", + "descriptorAccessBindingFailed": "Falha ao associar o método \"{name}\" para a classe de descritor \"{className}\"", + "descriptorAccessCallFailed": "Falha ao chamar o método \"{name}\" para a classe de descritor \"{className}\"", + "finalMethod": "Método Final", + "functionParamDefaultMissing": "O parâmetro \"{name}\" não tem um argumento padrão", + "functionParamName": "Incompatibilidade de nome de parâmetro: \"{destName}\" versus \"{srcName}\"", + "functionParamPositionOnly": "Incompatibilidade de parâmetro somente posição; o parâmetro \"{name}\" não é somente posição", + "functionReturnTypeMismatch": "O tipo de retorno de função \"{sourceType}\" é incompatível com o tipo \"{destType}\"", + "functionTooFewParams": "A função aceita poucos parâmetros posicionais. Esperava-se {expected}, mas recebeu {received}", + "functionTooManyParams": "A função aceita muitos parâmetros posicionais. Esperava-se {expected}, mas recebeu {received}", + "genericClassNotAllowed": "Tipo genérico com argumentos de tipo não permitidos para verificações de instância ou de classe", + "incompatibleDeleter": "O método de deleter de property é incompatível", + "incompatibleGetter": "O método getter de property é incompatível", + "incompatibleSetter": "O método setter de property é incompatível", + "initMethodLocation": "O método __init__ é definido na classe \"{type}\"", + "initMethodSignature": "A assinatura de __init__ é \"{type}\"", + "initSubclassLocation": "O método __init_subclass__ é definido na classe \"{name}\"", + "invariantSuggestionDict": "Considere alternar de \"dict\" para \"Mapping\", que é covariante no tipo de valor", + "invariantSuggestionList": "Considere alternar de \"list\" para \"Sequence\", que é covariante", + "invariantSuggestionSet": "Considere alternar de \"set\" para \"Container\", que é covariante", + "isinstanceClassNotSupported": "\"{type}\" não tem suporte para verificações de instância e classe", + "keyNotRequired": "\"{name}\" não é uma chave necessária em \"{type}\", portanto, o acesso pode resultar em exceção de runtime", + "keyReadOnly": "\"{name}\" é uma chave somente leitura em \"{type}\"", + "keyRequiredDeleted": "\"{name}\" é uma chave obrigatória e não pode ser excluída", + "keyUndefined": "\"{name}\" não é uma chave definida em \"{type}\"", + "kwargsParamMissing": "O parâmetro \"**{paramName}\" não tem nenhum parâmetro correspondente", + "listAssignmentMismatch": "O tipo \"{type}\" é incompatível com a lista de destino", + "literalAssignmentMismatch": "\"{sourceType}\" não pode ser atribuído a o tipo\"{destType}\"", + "literalNotAllowed": "O formulário especial \"Literal\" não pode ser usado com verificações de instância e classe", + "matchIsNotExhaustiveHint": "Se não pretende usar a manipulação exaustiva, adicione \"case _: pass\"", + "matchIsNotExhaustiveType": "Tipo sem tratamento: \"{type}\"", + "memberAssignment": "A expressão do tipo \"{type}\" não pode ser atribuída ao atributo \"{name}\" da classe \"{classType}\"", + "memberIsAbstract": "\"{type}.{name}\" não está implementado", + "memberIsAbstractMore": "e mais {count}...", + "memberIsClassVarInProtocol": "\"{name}\" é definido como um ClassVar no protocolo", + "memberIsInitVar": "\"{name}\" é um campo somente de init-only", + "memberIsInvariant": "\"{name}\" é invariável porque é mutável", + "memberIsNotClassVarInClass": "\"{name}\" deve ser definido como um ClassVar para ser compatível com o protocolo", + "memberIsNotClassVarInProtocol": "\"{name}\" não está definido como um ClassVar no protocolo", + "memberIsNotReadOnlyInProtocol": "\"{name}\" não é somente leitura no protocolo", + "memberIsReadOnlyInProtocol": "\"{name}\" é somente leitura no protocolo", + "memberIsWritableInProtocol": "\"{name}\" é gravável no protocolo", + "memberSetClassVar": "O atributo \"{name}\" não pode ser atribuído por meio de uma instância de classe porque é um ClassVar", + "memberTypeMismatch": "\"{name}\" é um tipo incompatível", + "memberUnknown": "O atributo \"{name}\" é desconhecido", + "metaclassConflict": "A metaclasse \"{metaclass1}\" entra em conflito com \"{metaclass2}\"", + "missingDeleter": "O método de deleter de property está ausente", + "missingGetter": "O método getter da property está ausente", + "missingSetter": "O método setter da property está ausente", + "namedParamMissingInDest": "Parâmetro extra \"{name}\"", + "namedParamMissingInSource": "Parâmetro de palavra-chave ausente \"{name}\"", + "namedParamTypeMismatch": "O parâmetro de palavra-chave \"{name}\" do tipo \"{sourceType}\" é incompatível com o tipo \"{destType}\"", + "namedTupleNotAllowed": "NamedTuple não pode ser usado para verificações de instância ou de classe", + "newMethodLocation": "O método __new__ é definido na classe \"{type}\"", + "newMethodSignature": "A assinatura de__new__ é \"{type}\"", + "newTypeClassNotAllowed": "O tipo criado com NewType não pode ser usado em verificações de instância e classe", + "noOverloadAssignable": "Nenhuma função sobrecarregada corresponde ao tipo \"{type}\"", + "noneNotAllowed": "None não pode ser usado para verificações de instância ou de classe", + "orPatternMissingName": "Nomes ausentes: {name}", + "overloadIndex": "Sobrecarga {index} é a correspondência mais próxima", + "overloadNotAssignable": "Uma ou mais sobrecargas de \"{name}\" não podem ser atribuídas", + "overloadSignature": "A assinatura de sobrecarga é definida aqui", + "overriddenMethod": "Método substituído", + "overriddenSymbol": "Símbolo substituído", + "overrideInvariantMismatch": "O tipo da substituição \"{overrideType}\" não é o mesmo do tipo básico \"{baseType}\"", + "overrideIsInvariant": "A variável é mutável, então seu tipo é invariável", + "overrideNoOverloadMatches": "Nenhuma assinatura de sobrecarga na substituição é compatível com o método base", + "overrideNotClassMethod": "O método base é declarado como um classmethod, mas a substituição não é", + "overrideNotInstanceMethod": "O método base é declarado como um método de instância, mas a substituição não é", + "overrideNotStaticMethod": "O método base é declarado como staticmethod, mas a substituição não é", + "overrideOverloadNoMatch": "Override não lida com todas as sobrecargas do método base", + "overrideOverloadOrder": "As sobrecargas para o método de substituição devem estar na mesma ordem que o método base", + "overrideParamKeywordNoDefault": "Incompatibilidade de parâmetro de palavra-chave \"{name}\": o parâmetro base tem valor de argumento padrão, o parâmetro de substituição não", + "overrideParamKeywordType": "Incompatibilidade de tipo de parâmetro de palavra-chave \"{name}\": o parâmetro base é do tipo \"{baseType}\", o parâmetro de substituição é do tipo \"{overrideType}\"", + "overrideParamName": "Incompatibilidade de nome de parâmetro {index}: o parâmetro base é denominado \"{baseName}\", o parâmetro de substituição é denomidado \"{overrideName}\"", + "overrideParamNameExtra": "O parâmetro \"{name}\" está ausente na base", + "overrideParamNameMissing": "O parâmetro \"{name}\" está ausente na substituição", + "overrideParamNamePositionOnly": "Incompatibilidade de parâmetro {index}: o parâmetro base \"{baseName}\" é o parâmetro de palavra-chave, o parâmetro de substituição é somente posição", + "overrideParamNoDefault": "Parâmetro {index} incompatível: o parâmetro base tem valor de argumento padrão, o parâmetro de substituição não", + "overrideParamType": "Incompatibilidade de tipo de parâmetro {index}: o parâmetro base é do tipo \"{baseType}\", o parâmetro de substituição é do tipo \"{overrideType}\"", + "overridePositionalParamCount": "Incompatibilidade de contagem de parâmetros posicionais. O método base tem {baseCount}, mas a substituição tem {overrideCount}", + "overrideReturnType": "Incompatibilidade de tipo de retorno: o método base retorna o tipo \"{baseType}\", a substituição retorna o tipo \"{overrideType}\"", + "overrideType": "A classe base define o tipo como \"{type}\"", + "paramAssignment": "Parâmetro {index}: o tipo \"{sourceType}\" é incompatível com o tipo \"{destType}\"", + "paramSpecMissingInOverride": "Os parâmetros ParamSpec estão ausentes no método de substituição", + "paramType": "O tipo de parâmetro é \"{paramType}\"", + "privateImportFromPyTypedSource": "Em vez disso, importe de \"{module}\"", + "propertyAccessFromProtocolClass": "Uma propriedade definida dentro de uma classe de protocolo não pode ser acessada como uma variável de classe", + "propertyMethodIncompatible": "O método de property \"{name}\" é incompatível", + "propertyMethodMissing": "O método de property \"{name}\" está ausente na substituição", + "propertyMissingDeleter": "A property \"{name}\" não tem nenhum deleter definido", + "propertyMissingSetter": "A property \"{name}\" não tem um setter definido", + "protocolIncompatible": "\"{sourceType}\" é incompatível com o protocolo \"{destType}\"", + "protocolMemberMissing": "\"{name}\" não está presente", + "protocolRequiresRuntimeCheckable": "A classe do Protocol deve ser @runtime_checkable para ser usada com verificações de instância e de classe", + "protocolSourceIsNotConcrete": "\"{sourceType}\" não é um tipo de classe concreta e não pode ser atribuído ao tipo \"{destType}\"", + "protocolUnsafeOverlap": "Os atributos de \"{name}\" têm os mesmos nomes que o protocolo", + "pyrightCommentIgnoreTip": "Use \"# pyright: ignore[]\" para suprimir o diagnóstico de uma única linha", + "readOnlyAttribute": "O atributo \"{name}\" é somente leitura", + "seeClassDeclaration": "Consulte a declaração de classe", + "seeDeclaration": "Consulte a declaração", + "seeFunctionDeclaration": "Ver declaração de função", + "seeMethodDeclaration": "Consulte a declaração de método", + "seeParameterDeclaration": "Consulte a declaração de parâmetro", + "seeTypeAliasDeclaration": "Ver declaração de alias de tipo", + "seeVariableDeclaration": "Consulte a declaração de variável", + "tupleAssignmentMismatch": "O tipo \"{type}\" é incompatível com a tuple de destino", + "tupleEntryTypeMismatch": "A entrada de tuple {entry} é do tipo incorreto", + "tupleSizeIndeterminateSrc": "Incompatibilidade de tamanho de tuple; esperado {expected} mas recebido indeterminado", + "tupleSizeIndeterminateSrcDest": "Incompatibilidade de tamanho de tuple; {expected} ou mais esperado, mas indeterminado recebido", + "tupleSizeMismatch": "Incompatibilidade de tamanho de tuple; esperado {expected} mas recebido {received}", + "tupleSizeMismatchIndeterminateDest": "Incompatibilidade de tamanho de tuple; {expected} ou mais esperado, mas {received} recebido", + "typeAliasInstanceCheck": "O alias de tipo criado com a instrução \"type\" não pode ser usado com verificações de instância e de classe", + "typeAssignmentMismatch": "\"{sourceType}\" não pode ser atribuído ao tipo\"{destType}\"", + "typeBound": "O tipo \"{sourceType}\" não pode ser atribuído ao limite superior \"{destType}\" na variável do tipo \"{name}\"", + "typeConstrainedTypeVar": "O tipo \"{type}\" não pode ser atribuído à variável do tipo restrita \"{name}\"", + "typeIncompatible": "\"{sourceType}\" não pode ser atribuído a \"{destType}\"", + "typeNotClass": "\"{type}\" não é uma classe.", + "typeNotStringLiteral": "\"{type}\" não é um literal de cadeia de caracteres", + "typeOfSymbol": "O tipo de \"{name}\" é \"{type}\"", + "typeParamSpec": "O tipo \"{type}\" é incompatível com ParamSpec \"{name}\"", + "typeUnsupported": "O tipo \"{type}\" é incompatível", + "typeVarDefaultOutOfScope": "A variável de tipo \"{name}\" não está no escopo", + "typeVarIsContravariant": "O parâmetro de tipo \"{name}\" é contravariante, mas \"{sourceType}\" não é um supertipo de \"{destType}\"", + "typeVarIsCovariant": "O parâmetro de tipo \"{name}\" é covariante, mas \"{sourceType}\" não é um subtipo de \"{destType}\"", + "typeVarIsInvariant": "O parâmetro de tipo \"{name}\" é invariável, mas \"{sourceType}\" não é o mesmo que \"{destType}\"", + "typeVarNotAllowed": "TypeVar não permitido para verificações de instância ou de classe", + "typeVarTupleRequiresKnownLength": "TypeVarTuple não pode ser associado a uma tuple de comprimento desconhecido", + "typeVarUnnecessarySuggestion": "Use {type} em vez disso", + "typeVarUnsolvableRemedy": "Forneça uma sobrecarga que especifica o tipo de retorno quando o argumento não é fornecido", + "typeVarsMissing": "Variáveis de tipo ausentes: {names}", + "typedDictBaseClass": "A classe \"{type}\" não é um TypedDict", + "typedDictClassNotAllowed": "A classe TypedDict não é permitida para verificações de instância ou classe", + "typedDictClosedExtraNotAllowed": "Não é possível adicionar o item \"{name}\"", + "typedDictClosedExtraTypeMismatch": "Não é possível adicionar o item \"{name}\" com o tipo \"{type}\"", + "typedDictClosedFieldNotReadOnly": "Não é possível adicionar o item \"{name}\" porque ele deve ser ReadOnly", + "typedDictClosedFieldNotRequired": "Não é possível adicionar o item \"{name}\" porque ele deve ser NotRequired", + "typedDictExtraFieldNotAllowed": "\"{name}\" não está presente em \"{type}\"", + "typedDictExtraFieldTypeMismatch": "Tipo de \"{name}\" é incompatível com tipo de \"extra_items\" em \"{type}\"", + "typedDictFieldMissing": "\"{name}\" está ausente de \"{type}\"", + "typedDictFieldNotReadOnly": "\"{name}\" não é somente leitura em \"{type}\"", + "typedDictFieldNotRequired": "\"{name}\" não é obrigatório em \"{type}\"", + "typedDictFieldRequired": "\"{name}\" é necessário em \"{type}\"", + "typedDictFieldTypeMismatch": "O tipo \"{type}\" não é atribuível ao item \"{name}\"", + "typedDictFieldUndefined": "\"{name}\" é um item indefinido no tipo \"{type}\"", + "typedDictKeyAccess": "Usar [\"{name}\"] para fazer referência ao item em TypedDict", + "typedDictNotAllowed": "TypedDict não pode ser usado para verificações de instância ou de classe", + "unhashableType": "O tipo \"{type}\" não é pode fazer hash", + "uninitializedAbstractVariable": "A variável de instância \"{name}\" está definida na classe base abstrata \"{classType}\", mas não foi inicializada", + "unreachableExcept": "\"{exceptionType}\" é uma subclasse de \"{parentType}\"", + "useDictInstead": "Usar dict[T1, T2] para indicar um tipo de dicionário", + "useListInstead": "Usar list[T] para indicar um tipo de list ou T1 | T2 para indicar um tipo de union", + "useTupleInstead": "Usar tuple[T1, ..., Tn] para indicar um tipo de tuple ou T1 | T2 para indicar um tipo de union", + "useTypeInstead": "Usar type[T] em vez disso", + "varianceMismatchForClass": "A variação do argumento de tipo \"{typeVarName}\" é incompatível com a classe base \"{className}\"", + "varianceMismatchForTypeAlias": "A variação do argumento de tipo \"{typeVarName}\" é incompatível com \"{typeAliasParam}\"" + }, + "Service": { + "longOperation": "A enumeração de arquivos de origem do espaço de trabalho está demorando muito. Em vez disso, considere abrir uma subpasta. [Saiba mais](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.qps-ploc.json b/python-parser/packages/pyright-internal/src/localization/package.nls.qps-ploc.json new file mode 100644 index 00000000..e086924f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.qps-ploc.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "[4i3uH][นั้Çrëætë Tÿpë StubẤğ倪İЂҰนั้ढूँ]", + "createTypeStubFor": "[oXYb0][นั้Çrëætë Tÿpë Stub Før \"{møðµlëÑæmë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "executingCommand": "[AxS1Z][นั้Ëxëçµtïñg çømmæñðẤğ倪İЂҰक्นั้ढूँ]", + "filesToAnalyzeCount": "[94Ml3][นั้{çøµñt} fïlës tø æñælÿzëẤğ倪İЂҰक्र्นั้ढूँ]", + "filesToAnalyzeOne": "[2zuMu][นั้1 fïlë tø æñælÿzëẤğ倪İЂҰक्นั้ढूँ]", + "findingReferences": "[0UmwS][นั้Fïñðïñg rëfërëñçësẤğ倪İЂҰक्นั้ढूँ]", + "organizeImports": "[KhOyl][นั้Ørgæñïzë ÏmpørtsẤğ倪İЂҰนั้ढूँ]" + }, + "Completion": { + "autoImportDetail": "[WdNQG][นั้Ƶtø-ïmpørtẤğ倪İนั้ढूँ]", + "indexValueDetail": "[mX94Q][นั้Ïñðëx vælµëẤğ倪İนั้ढूँ]" + }, + "Diagnostic": { + "abstractMethodInvocation": "[fE8MD][นั้Mëthøð \"{mëthøð}\" çæññøt þë çællëð þëçæµsë ït ïs æþstræçt æñð µñïmplëmëñtëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "annotatedMetadataInconsistent": "[iOP70][นั้Æññøtætëð mëtæðætæ tÿpë \"{mëtæðætæTÿpë}\" ïs ñøt çømpætïþlë wïth tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "annotatedParamCountMismatch": "[VZvZc][นั้Pæræmëtër æññøtætïøñ çøµñt mïsmætçh: ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "annotatedTypeArgMissing": "[mTgtG][นั้Ëxpëçtëð øñë tÿpë ærgµmëñt æñð øñë ør mørë æññøtætïøñs før \"Annotated\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "annotationBytesString": "[W1g86][นั้Tÿpë ëxprëssïøñs çæññøt µsë þÿtës strïñg lïtërælsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "annotationFormatString": "[zaI8H][นั้Tÿpë ëxprëssïøñs çæññøt µsë førmæt strïñg lïtëræls (f-strïñgs)Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "annotationNotSupported": "[xYlM8][นั้Tÿpë æññøtætïøñ ñøt sµppørtëð før thïs stætëmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "annotationRawString": "[WOMum][นั้Tÿpë ëxprëssïøñs çæññøt µsë ræw strïñg lïtërælsẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "annotationSpansStrings": "[6Gg9x][นั้Tÿpë ëxprëssïøñs çæññøt spæñ mµltïplë strïñg lïtërælsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "annotationStringEscape": "[MQdsm][นั้Tÿpë ëxprëssïøñs çæññøt çøñtæïñ ësçæpë çhæræçtërsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "annotationTemplateString": "[KaTjj][นั้Tÿpë ëxprëssïøñs çæññøt µsë tëmplætë strïñg lïtëræls (t-strïñgs)Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "argAssignment": "[7pdVt][นั้Ærgµmëñt øf tÿpë \"{ærgTÿpë}\" çæññøt þë æssïgñëð tø pæræmëtër øf tÿpë \"{pæræmTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "argAssignmentFunction": "[J08ms][นั้Ærgµmëñt øf tÿpë \"{ærgTÿpë}\" çæññøt þë æssïgñëð tø pæræmëtër øf tÿpë \"{pæræmTÿpë}\" ïñ fµñçtïøñ \"{fµñçtïøñÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "argAssignmentParam": "[hEBRl][นั้Ærgµmëñt øf tÿpë \"{ærgTÿpë}\" çæññøt þë æssïgñëð tø pæræmëtër \"{pæræmÑæmë}\" øf tÿpë \"{pæræmTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "argAssignmentParamFunction": "[0sIuD][นั้Ærgµmëñt øf tÿpë \"{ærgTÿpë}\" çæññøt þë æssïgñëð tø pæræmëtër \"{pæræmÑæmë}\" øf tÿpë \"{pæræmTÿpë}\" ïñ fµñçtïøñ \"{fµñçtïøñÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "argMissingForParam": "[1oqiG][นั้Ærgµmëñt mïssïñg før pæræmëtër {ñæmë}Ấğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "argMissingForParams": "[iAF8C][นั้Ærgµmëñts mïssïñg før pæræmëtërs {ñæmës}Ấğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "argMorePositionalExpectedCount": "[q3jgC][นั้Ëxpëçtëð {ëxpëçtëð} mørë pøsïtïøñæl ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "argMorePositionalExpectedOne": "[DNcCo][นั้Ëxpëçtëð 1 mørë pøsïtïøñæl ærgµmëñtẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "argPositional": "[XBHa9][นั้Ëxpëçtëð pøsïtïøñæl ærgµmëñtẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "argPositionalExpectedCount": "[F5Y6t][นั้Ëxpëçtëð {ëxpëçtëð} pøsïtïøñæl ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "argPositionalExpectedOne": "[XW4kV][นั้Ëxpëçtëð 1 pøsïtïøñæl ærgµmëñtẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "argTypePartiallyUnknown": "[Y02o3][นั้Ærgµmëñt tÿpë ïs pærtïællÿ µñkñøwñẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "argTypeUnknown": "[l0ccD][นั้Ærgµmëñt tÿpë ïs µñkñøwñẤğ倪İЂҰक्र्นั้ढूँ]", + "assertAlwaysTrue": "[5Weld][นั้Æssërt ëxprëssïøñ ælwæÿs ëvælµætës tø trueẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "assertTypeArgs": "[QHRQ7][นั้\"assert_type\" ëxpëçts twø pøsïtïøñæl ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "assertTypeTypeMismatch": "[fc1Kk][นั้\"assert_type\" mïsmætçh: ëxpëçtëð \"{ëxpëçtëð}\" þµt rëçëïvëð \"{rëçëïvëð}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "assignmentExprComprehension": "[F5OTr][นั้Æssïgñmëñt ëxprëssïøñ tærgët \"{ñæmë}\" çæññøt µsë sæmë ñæmë æs çømprëhëñsïøñ før tærgëtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "assignmentExprContext": "[U4d41][นั้Æssïgñmëñt ëxprëssïøñ mµst þë wïthïñ møðµlë, fµñçtïøñ ør læmþðæẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "assignmentExprInSubscript": "[mnJzw][นั้Æssïgñmëñt ëxprëssïøñs wïthïñ æ sµþsçrïpt ærë sµppørtëð øñlÿ ïñ Pÿthøñ 3.10 æñð ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "assignmentInProtocol": "[vey5h][นั้Ïñstæñçë ør çlæss værïæþlës wïthïñ æ Protocol çlæss mµst þë ëxplïçïtlÿ ðëçlærëð wïthïñ thë çlæss þøðÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "assignmentTargetExpr": "[22xbu][นั้Ëxprëssïøñ çæññøt þë æssïgñmëñt tærgëtẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "asyncNotInAsyncFunction": "[u0Y7U][นั้Üsë øf \"async\" ñøt ælløwëð øµtsïðë øf async fµñçtïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "awaitIllegal": "[2Wa68][นั้Üsë øf \"await\" rëqµïrës Pÿthøñ 3.5 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "awaitNotAllowed": "[TpX77][นั้Tÿpë ëxprëssïøñs çæññøt µsë \"await\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "awaitNotInAsync": "[qau2Q][นั้\"await\" ælløwëð øñlÿ wïthïñ async fµñçtïøñẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "backticksIllegal": "[V1LZI][นั้Ëxprëssïøñs sµrrøµñðëð þÿ þæçktïçks ærë ñøt sµppørtëð ïñ Pÿthøñ 3.x; µsë repr ïñstëæðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "baseClassCircular": "[frqWt][นั้Çlæss çæññøt ðërïvë frøm ïtsëlfẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "baseClassFinal": "[C9i92][นั้ßæsë çlæss \"{tÿpë}\" ïs mærkëð final æñð çæññøt þë sµþçlæssëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "baseClassIncompatible": "[K3wZ2][นั้ßæsë çlæssës øf {tÿpë} ærë mµtµællÿ ïñçømpætïþlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "baseClassInvalid": "[qULQr][นั้Ærgµmëñt tø çlæss mµst þë æ þæsë çlæssẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "baseClassMethodTypeIncompatible": "[2lM0z][นั้ßæsë çlæssës før çlæss \"{çlæssTÿpë}\" ðëfïñë mëthøð \"{ñæmë}\" ïñ ïñçømpætïþlë wæÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "baseClassUnknown": "[QQxIX][นั้ßæsë çlæss tÿpë ïs µñkñøwñ, øþsçµrïñg tÿpë øf ðërïvëð çlæssẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "baseClassVariableTypeIncompatible": "[YmxlD][นั้ßæsë çlæssës før çlæss \"{çlæssTÿpë}\" ðëfïñë værïæþlë \"{ñæmë}\" ïñ ïñçømpætïþlë wæÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "binaryOperationNotAllowed": "[1lzlz][นั้ßïñærÿ øpërætør ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "bindParamMissing": "[6ggqn][นั้Çøµlð ñøt þïñð mëthøð \"{mëthøðÑæmë}\" þëçæµsë ït ïs mïssïñg æ \"sëlf\" ør \"çls\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "bindTypeMismatch": "[x4sbf][นั้Çøµlð ñøt þïñð mëthøð \"{mëthøðÑæmë}\" þëçæµsë \"{tÿpë}\" ïs ñøt æssïgñæþlë tø pæræmëtër \"{pæræmÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "breakInExceptionGroup": "[Kwoku][นั้\"break\" ïs ñøt ælløwëð ïñ æñ \"except*\" þløçkẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "breakOutsideLoop": "[Ca4Ip][นั้\"break\" çæñ þë µsëð øñlÿ wïthïñ æ løøpẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "bytesUnsupportedEscape": "[TxAaf][นั้Üñsµppørtëð ësçæpë sëqµëñçë ïñ bytes lïtërælẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "callableExtraArgs": "[M3UIb][นั้Ëxpëçtëð øñlÿ twø tÿpë ærgµmëñts tø \"Callable\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "callableFirstArg": "[W1wTU][นั้Ëxpëçtëð pæræmëtër tÿpë lïst ør \"...\"Ấğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "callableNotInstantiable": "[sJ0Q8][นั้Çæññøt ïñstæñtïætë tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "callableSecondArg": "[9c1cS][นั้Ëxpëçtëð rëtµrñ tÿpë æs sëçøñð tÿpë ærgµmëñt før \"Callable\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "casePatternIsIrrefutable": "[NR6tj][นั้Ïrrëfµtæþlë pættërñ ïs ælløwëð øñlÿ før thë læst çæsë stætëmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "classAlreadySpecialized": "[Puetc][นั้Tÿpë \"{tÿpë}\" ïs ælrëæðÿ spëçïælïzëðẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "classDecoratorTypeUnknown": "[FhL8V][นั้Üñtÿpëð çlæss ðëçørætør øþsçµrës tÿpë øf çlæss; ïgñørïñg ðëçørætørẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "classDefinitionCycle": "[21Tlp][นั้Çlæss ðëfïñïtïøñ før \"{ñæmë}\" ðëpëñðs øñ ïtsëlfẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "classGetItemClsParam": "[A2iHF][นั้__class_getitem__ øvërrïðë shøµlð tækë æ \"cls\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "classMethodClsParam": "[aWMN3][นั้Çlæss mëthøðs shøµlð tækë æ \"cls\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "classNotRuntimeSubscriptable": "[O9BL6][นั้§µþsçrïpt før çlæss \"{ñæmë}\" wïll gëñërætë rµñtïmë ëxçëptïøñ; ëñçløsë tÿpë ëxprëssïøñ ïñ qµøtësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "classPatternBuiltInArgPositional": "[DOfs5][นั้Çlæss pættërñ æççëpts øñlÿ pøsïtïøñæl sµþ-pættërñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "classPatternNewType": "[9l6u3][นั้\"{tÿpë}\" çæññøt þë µsëð ïñ æ çlæss pættërñ þëçæµsë ït ïs ðëfïñëð µsïñg ÑëwTÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "classPatternPositionalArgCount": "[B65y5][นั้Tøø mæñÿ pøsïtïøñæl pættërñs før çlæss \"{tÿpë}\"; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "classPatternTypeAlias": "[AxDtv][นั้\"{tÿpë}\" çæññøt þë µsëð ïñ æ çlæss pættërñ þëçæµsë ït ïs æ spëçïælïzëð tÿpë ælïæsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "classPropertyDeprecated": "[Q6JgP][นั้Çlæss prøpërtïës ærë ðëprëçætëð ïñ Pÿthøñ 3.11 æñð wïll ñøt þë sµppørtëð ïñ Pÿthøñ 3.13Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "classTypeParametersIllegal": "[GybXD][นั้Çlæss tÿpë pæræmëtër sÿñtæx rëqµïrës Pÿthøñ 3.12 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "classVarFirstArgMissing": "[VtcEd][นั้Ëxpëçtëð æ tÿpë ærgµmëñt æftër \"ClassVar\"Ấğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "classVarNotAllowed": "[BU07G][นั้\"ClassVar\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "classVarOverridesInstanceVar": "[UEaro][นั้Çlæss værïæþlë \"{ñæmë}\" øvërrïðës ïñstæñçë værïæþlë øf sæmë ñæmë ïñ çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "classVarTooManyArgs": "[Mj1R5][นั้Ëxpëçtëð øñlÿ øñë tÿpë ærgµmëñt æftër \"ClassVar\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "classVarWithTypeVar": "[6mnjY][นั้\"ClassVar\" tÿpë çæññøt ïñçlµðë tÿpë værïæþlësẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "clsSelfParamTypeMismatch": "[MBrCQ][นั้Tÿpë øf pæræmëtër \"{ñæmë}\" mµst þë æ sµpërtÿpë øf ïts çlæss \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "codeTooComplexToAnalyze": "[FNQd7][นั้Çøðë ïs tøø çømplëx tø æñælÿzë; rëðµçë çømplëxïtÿ þÿ rëfæçtørïñg ïñtø sµþrøµtïñës ør rëðµçïñg çøñðïtïøñæl çøðë pæthsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "collectionAliasInstantiation": "[rZb8i][นั้Tÿpë \"{tÿpë}\" çæññøt þë ïñstæñtïætëð, µsë \"{ælïæs}\" ïñstëæðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "comparisonAlwaysFalse": "[N16ve][นั้Çøñðïtïøñ wïll ælwæÿs ëvælµætë tø False sïñçë thë tÿpës \"{lëftTÿpë}\" æñð \"{rïghtTÿpë}\" hævë ñø øvërlæpẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "comparisonAlwaysTrue": "[0TOLo][นั้Çøñðïtïøñ wïll ælwæÿs ëvælµætë tø True sïñçë thë tÿpës \"{lëftTÿpë}\" æñð \"{rïghtTÿpë}\" hævë ñø øvërlæpẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "comprehensionInDict": "[Orm2O][นั้Çømprëhëñsïøñ çæññøt þë µsëð wïth øthër ðïçtïøñærÿ ëñtrïësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "comprehensionInSet": "[YUnu9][นั้Çømprëhëñsïøñ çæññøt þë µsëð wïth øthër set ëñtrïësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "concatenateContext": "[8tRy6][นั้\"Concatenate\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "concatenateParamSpecMissing": "[3s1CV][นั้£æst tÿpë ærgµmëñt før \"Concatenate\" mµst þë æ ParamSpec ør \"...\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "concatenateTypeArgsMissing": "[aH5g8][นั้\"Concatenate\" rëqµïrës æt lëæst twø tÿpë ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "conditionalOperandInvalid": "[HnbrG][นั้Ïñvælïð çøñðïtïøñæl øpëræñð øf tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "constantRedefinition": "[oRKBh][นั้\"{ñæmë}\" ïs çøñstæñt (þëçæµsë ït ïs µppërçæsë) æñð çæññøt þë rëðëfïñëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "constructorParametersMismatch": "[WWloK][นั้Mïsmætçh þëtwëëñ sïgñætµrë øf __new__ æñð __init__ ïñ çlæss \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "containmentAlwaysFalse": "[e6PIv][นั้Ëxprëssïøñ wïll ælwæÿs ëvælµætë tø False sïñçë thë tÿpës \"{lëftTÿpë}\" æñð \"{rïghtTÿpë}\" hævë ñø øvërlæpẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "containmentAlwaysTrue": "[8OhUO][นั้Ëxprëssïøñ wïll ælwæÿs ëvælµætë tø True sïñçë thë tÿpës \"{lëftTÿpë}\" æñð \"{rïghtTÿpë}\" hævë ñø øvërlæpẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "continueInExceptionGroup": "[h1tNi][นั้\"continue\" ïs ñøt ælløwëð ïñ æñ \"except*\" þløçkẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "continueOutsideLoop": "[6ACvd][นั้\"continue\" çæñ þë µsëð øñlÿ wïthïñ æ løøpẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "coroutineInConditionalExpression": "[ygK2r][นั้Çøñðïtïøñæl ëxprëssïøñ rëfërëñçës çørøµtïñë whïçh ælwæÿs ëvælµætës tø TrueẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "dataClassBaseClassFrozen": "[jjiw4][นั้Æ ñøñ-frøzëñ çlæss çæññøt ïñhërït frøm æ çlæss thæt ïs frøzëñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "dataClassBaseClassNotFrozen": "[KOz4K][นั้Æ frøzëñ çlæss çæññøt ïñhërït frøm æ çlæss thæt ïs ñøt frøzëñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "dataClassConverterFunction": "[FxD8r][นั้Ærgµmëñt øf tÿpë \"{ærgTÿpë}\" ïs ñøt æ vælïð çøñvërtër før fïëlð \"{fïëlðÑæmë}\" øf tÿpë \"{fïëlðTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "dataClassConverterOverloads": "[ZJ0SE][นั้Ñø øvërløæðs øf \"{fµñçÑæmë}\" ærë vælïð çøñvërtërs før fïëlð \"{fïëlðÑæmë}\" øf tÿpë \"{fïëlðTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "dataClassFieldInheritedDefault": "[BKxvn][นั้\"{fïëlðÑæmë}\" øvërrïðës æ fïëlð øf thë sæmë ñæmë þµt ïs mïssïñg æ ðëfæµlt vælµëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "dataClassFieldWithDefault": "[iJuju][นั้Fïëlðs wïthøµt ðëfæµlt vælµës çæññøt æppëær æftër fïëlðs wïth ðëfæµlt vælµësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "dataClassFieldWithPrivateName": "[miQYb][นั้Ðætæçlæss fïëlð çæññøt µsë prïvætë ñæmëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "dataClassFieldWithoutAnnotation": "[zq5t5][นั้Ðætæçlæss fïëlð wïthøµt tÿpë æññøtætïøñ wïll çæµsë rµñtïmë ëxçëptïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "dataClassPostInitParamCount": "[yl0Bg][นั้Ðætæçlæss __post_init__ ïñçørrëçt pæræmëtër çøµñt; ñµmþër øf InitVar fïëlðs ïs {ëxpëçtëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "dataClassPostInitType": "[74TW4][นั้Ðætæçlæss __post_init__ mëthøð pæræmëtër tÿpë mïsmætçh før fïëlð \"{fïëlðÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "dataClassSlotsOverwrite": "[D17er][นั้__slots__ ïs ælrëæðÿ ðëfïñëð ïñ çlæssẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "dataClassTransformExpectedBoolLiteral": "[y2upJ][นั้Ëxpëçtëð ëxprëssïøñ thæt stætïçællÿ ëvælµætës tø True ør FalseẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "dataClassTransformFieldSpecifier": "[xE1Cp][นั้Ëxpëçtëð tuple øf çlæssës ør fµñçtïøñs þµt rëçëïvëð tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "dataClassTransformPositionalParam": "[Cu7w4][นั้Æll ærgµmëñts tø \"dataclass_transform\" mµst þë këÿwørð ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "dataClassTransformUnknownArgument": "[hLQXL][นั้Ærgµmëñt \"{ñæmë}\" ïs ñøt sµppørtëð þÿ dataclass_transformẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "dataProtocolInSubclassCheck": "[kIIkO][นั้Ðætæ prøtøçøls (whïçh ïñçlµðë ñøñ-mëthøð ættrïþµtës) ærë ñøt ælløwëð ïñ issubclass çællsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "declaredReturnTypePartiallyUnknown": "[pDeOu][นั้Ðëçlærëð rëtµrñ tÿpë, \"{rëtµrñTÿpë}\", ïs pærtïællÿ µñkñøwñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "declaredReturnTypeUnknown": "[XRFJs][นั้Ðëçlærëð rëtµrñ tÿpë ïs µñkñøwñẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "defaultValueContainsCall": "[G3smw][นั้Fµñçtïøñ çælls æñð mµtæþlë øþjëçts ñøt ælløwëð wïthïñ pæræmëtër ðëfæµlt vælµë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "defaultValueNotAllowed": "[SuRpb][นั้Pæræmëtër wïth \"*\" ør \"**\" çæññøt hævë ðëfæµlt vælµëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "delTargetExpr": "[VLtXk][นั้Ëxprëssïøñ çæññøt þë ðëlëtëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "deprecatedClass": "[DIyRn][นั้Thë çlæss \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "deprecatedConstructor": "[jq4aQ][นั้Thë çøñstrµçtør før çlæss \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "deprecatedDescriptorDeleter": "[ljYHS][นั้Thë \"__ðëlëtë__\" mëthøð før ðësçrïptør \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "deprecatedDescriptorGetter": "[06Y3N][นั้Thë \"__gët__\" mëthøð før ðësçrïptør \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "deprecatedDescriptorSetter": "[6nQQu][นั้Thë \"__sët__\" mëthøð før ðësçrïptør \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "deprecatedFunction": "[GdF0l][นั้Thë fµñçtïøñ \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "deprecatedMethod": "[GxfND][นั้Thë mëthøð \"{ñæmë}\" ïñ çlæss \"{çlæssÑæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "deprecatedPropertyDeleter": "[BUlI2][นั้Thë deleter før property \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "deprecatedPropertyGetter": "[54BuI][นั้Thë getter før property \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "deprecatedPropertySetter": "[EHGoz][นั้Thë setter før property \"{ñæmë}\" ïs ðëprëçætëðẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "deprecatedType": "[IWdSs][นั้Thïs tÿpë ïs ðëprëçætëð æs øf Pÿthøñ {vërsïøñ}; µsë \"{rëplæçëmëñt}\" ïñstëæðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "dictExpandIllegalInComprehension": "[3B8LL][นั้Ðïçtïøñærÿ ëxpæñsïøñ ñøt ælløwëð ïñ çømprëhëñsïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "dictInAnnotation": "[0UcII][นั้Ðïçtïøñærÿ ëxprëssïøñ ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "dictKeyValuePairs": "[Hnd6W][นั้Ðïçtïøñærÿ ëñtrïës mµst çøñtæïñ këÿ/vælµë pæïrsẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "dictUnpackIsNotMapping": "[RhO60][นั้Ëxpëçtëð mæppïñg før ðïçtïøñærÿ µñpæçk øpërætørẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "dunderAllSymbolNotPresent": "[mlrcI][นั้\"{ñæmë}\" ïs spëçïfïëð ïñ __all__ þµt ïs ñøt prësëñt ïñ møðµlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "duplicateArgsParam": "[bt3Os][นั้Øñlÿ øñë \"*\" pæræmëtër ælløwëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "duplicateBaseClass": "[HIzyw][นั้еplïçætë þæsë çlæss ñøt ælløwëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "duplicateCapturePatternTarget": "[zq38Z][นั้Çæptµrë tærgët \"{ñæmë}\" çæññøt æppëær mørë thæñ øñçë wïthïñ thë sæmë pættërñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "duplicateCatchAll": "[6gO00][นั้Øñlÿ øñë çætçh-æll except çlæµsë ælløwëðẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "duplicateEnumMember": "[k9W8A][นั้Enum mëmþër \"{ñæmë}\" ïs ælrëæðÿ ðëçlærëðẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "duplicateGenericAndProtocolBase": "[4EO4W][นั้Øñlÿ øñë Generic[...] ør Protocol[...] þæsë çlæss ælløwëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "duplicateImport": "[qgZGm][นั้\"{ïmpørtÑæmë}\" ïs ïmpørtëð mørë thæñ øñçëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "duplicateKeywordOnly": "[pbf3W][นั้Øñlÿ øñë \"*\" sëpærætør ælløwëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "duplicateKwargsParam": "[4QsUE][นั้Øñlÿ øñë \"**\" pæræmëtër ælløwëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "duplicateParam": "[s0XGf][นั้еplïçætë pæræmëtër \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "duplicatePositionOnly": "[9hzW4][นั้Øñlÿ øñë \"/\" pæræmëtër ælløwëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "duplicateStarPattern": "[8quwQ][นั้Øñlÿ øñë \"*\" pættërñ ælløwëð ïñ æ pættërñ sëqµëñçëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "duplicateStarStarPattern": "[wScoI][นั้Øñlÿ øñë \"**\" ëñtrÿ ælløwëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "duplicateUnpack": "[wjeOP][นั้Øñlÿ øñë µñpæçk øpërætïøñ ælløwëð ïñ listẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "ellipsisAfterUnpacked": "[4EsWH][นั้\"...\" çæññøt þë µsëð wïth æñ µñpæçkëð TypeVarTuple ør tupleẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "ellipsisContext": "[Y4jK3][นั้\"...\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "ellipsisSecondArg": "[pvXJA][นั้\"...\" ïs ælløwëð øñlÿ æs thë sëçøñð øf twø ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "enumClassOverride": "[2JsL1][นั้Enum çlæss \"{ñæmë}\" ïs final æñð çæññøt þë sµþçlæssëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "enumMemberDelete": "[5wmRY][นั้Enum mëmþër \"{ñæmë}\" çæññøt þë ðëlëtëðẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "enumMemberSet": "[mBLro][นั้Enum mëmþër \"{ñæmë}\" çæññøt þë æssïgñëðẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "enumMemberTypeAnnotation": "[z8FaL][นั้Tÿpë æññøtætïøñs ærë ñøt ælløwëð før enum mëmþërsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "exceptGroupMismatch": "[XbzjN][นั้Trÿ stætëmëñt çæññøt ïñçlµðë þøth \"except\" æñð \"except*\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "exceptGroupRequiresType": "[740wj][นั้Ëxçëptïøñ grøµp sÿñtæx (\"except*\") rëqµïrës æñ ëxçëptïøñ tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "exceptRequiresParens": "[6Rr0v][นั้Mµltïplë ëxçëptïøñ tÿpës mµst þë pærëñthësïzëð prïør tø Pÿthøñ 3.14Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "exceptWithAsRequiresParens": "[TBH3f][นั้Mµltïplë ëxçëptïøñ tÿpës mµst þë pærëñthësïzëð whëñ µsïñg \"æs\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "exceptionGroupIncompatible": "[d0SLP][นั้Ëxçëptïøñ grøµp sÿñtæx (\"except*\") rëqµïrës Pÿthøñ 3.11 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "exceptionGroupTypeIncorrect": "[Kanvz][นั้Ëxçëptïøñ tÿpë ïñ except* çæññøt ðërïvë frøm BaseGroupExceptionẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "exceptionTypeIncorrect": "[G7AZt][นั้\"{tÿpë}\" ðøës ñøt ðërïvë frøm BaseExceptionẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "exceptionTypeNotClass": "[v1FmY][นั้\"{tÿpë}\" ïs ñøt æ vælïð ëxçëptïøñ çlæssẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "exceptionTypeNotInstantiable": "[PfdeG][นั้Çøñstrµçtør før ëxçëptïøñ tÿpë \"{tÿpë}\" rëqµïrës øñë ør mørë ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "expectedAfterDecorator": "[rzMVF][นั้Ëxpëçtëð fµñçtïøñ ør çlæss ðëçlærætïøñ æftër ðëçørætørẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "expectedArrow": "[DrZKr][นั้Ëxpëçtëð \"->\" følløwëð þÿ rëtµrñ tÿpë æññøtætïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "expectedAsAfterException": "[FDdTe][นั้Ëxpëçtëð \"as\" æftër ëxçëptïøñ tÿpëẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "expectedAssignRightHandExpr": "[mPzHP][นั้Ëxpëçtëð ëxprëssïøñ tø thë rïght øf \"=\"Ấğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "expectedBinaryRightHandExpr": "[MgqnF][นั้Ëxpëçtëð ëxprëssïøñ tø thë rïght øf øpërætørẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "expectedBoolLiteral": "[bhZAe][นั้Ëxpëçtëð True ør FalseẤğ倪İЂҰक्र्นั้ढूँ]", + "expectedCase": "[kQ1sa][นั้Ëxpëçtëð \"case\" stætëmëñtẤğ倪İЂҰक्र्นั้ढूँ]", + "expectedClassName": "[f0XRc][นั้Ëxpëçtëð çlæss ñæmëẤğ倪İЂҰक्นั้ढूँ]", + "expectedCloseBrace": "[MQHKY][นั้\"{\" wæs ñøt çløsëðẤğ倪İЂҰक्นั้ढूँ]", + "expectedCloseBracket": "[YfM0n][นั้\"[\" wæs ñøt çløsëðẤğ倪İЂҰक्นั้ढूँ]", + "expectedCloseParen": "[tAuag][นั้\"(\" wæs ñøt çløsëðẤğ倪İЂҰक्นั้ढूँ]", + "expectedColon": "[KmzHa][นั้Ëxpëçtëð \":\"Ấğ倪İЂนั้ढूँ]", + "expectedComplexNumberLiteral": "[dc2rM][นั้Ëxpëçtëð çømplëx ñµmþër lïtëræl før pættërñ mætçhïñgẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "expectedDecoratorExpr": "[415JG][นั้Ëxprëssïøñ førm ñøt sµppørtëð før ðëçørætør prïør tø Pÿthøñ 3.9Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedDecoratorName": "[IKO4m][นั้Ëxpëçtëð ðëçørætør ñæmëẤğ倪İЂҰक्र्นั้ढूँ]", + "expectedDecoratorNewline": "[Bsyx3][นั้Ëxpëçtëð ñëw lïñë æt ëñð øf ðëçørætørẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "expectedDelExpr": "[u8JgL][นั้Ëxpëçtëð ëxprëssïøñ æftër \"del\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedElse": "[eROaU][นั้Ëxpëçtëð \"else\"Ấğ倪İЂҰนั้ढूँ]", + "expectedEquals": "[TXK4x][นั้Ëxpëçtëð \"=\"Ấğ倪İЂนั้ढूँ]", + "expectedExceptionClass": "[sYtUr][นั้Ïñvælïð ëxçëptïøñ çlæss ør øþjëçtẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedExceptionObj": "[w4tAQ][นั้Ëxpëçtëð ëxçëptïøñ øþjëçt, ëxçëptïøñ çlæss ør NoneẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "expectedExpr": "[iKSsw][นั้Ëxpëçtëð ëxprëssïøñẤğ倪İЂҰक्นั้ढूँ]", + "expectedFunctionAfterAsync": "[fWBMb][นั้Ëxpëçtëð fµñçtïøñ ðëfïñïtïøñ æftër \"async\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "expectedFunctionName": "[cR036][นั้Ëxpëçtëð fµñçtïøñ ñæmë æftër \"def\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "expectedIdentifier": "[Lj4l5][นั้Ëxpëçtëð ïðëñtïfïërẤğ倪İЂҰक्นั้ढूँ]", + "expectedImport": "[FNK2F][นั้Ëxpëçtëð \"import\"Ấğ倪İЂҰक्นั้ढूँ]", + "expectedImportAlias": "[mb4fF][นั้Ëxpëçtëð sÿmþøl æftër \"as\"Ấğ倪İЂҰक्र्นั้ढूँ]", + "expectedImportSymbols": "[QUZ7S][นั้Ëxpëçtëð øñë ør mørë sÿmþøl ñæmës æftër \"import\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "expectedIn": "[9XkiC][นั้Ëxpëçtëð \"in\"Ấğ倪İЂนั้ढूँ]", + "expectedInExpr": "[RXryp][นั้Ëxpëçtëð ëxprëssïøñ æftër \"in\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedIndentedBlock": "[7ZvJC][นั้Ëxpëçtëð ïñðëñtëð þløçkẤğ倪İЂҰक्र्นั้ढूँ]", + "expectedMemberName": "[VvTAF][นั้Ëxpëçtëð ættrïþµtë ñæmë æftër \".\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedModuleName": "[Jky7g][นั้Ëxpëçtëð møðµlë ñæmëẤğ倪İЂҰक्นั้ढूँ]", + "expectedNameAfterAs": "[KnNbR][นั้Ëxpëçtëð sÿmþøl ñæmë æftër \"as\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedNamedParameter": "[ZsE8l][นั้Këÿwørð pæræmëtër mµst følløw \"*\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedNewline": "[bW0cY][นั้Ëxpëçtëð ñëwlïñëẤğ倪İЂҰนั้ढूँ]", + "expectedNewlineOrSemicolon": "[av2Gz][นั้§tætëmëñts mµst þë sëpærætëð þÿ ñëwlïñës ør sëmïçøløñsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "expectedOpenParen": "[CLuzo][นั้Ëxpëçtëð \"(\"Ấğ倪İЂนั้ढूँ]", + "expectedParamName": "[b0il7][นั้Ëxpëçtëð pæræmëtër ñæmëẤğ倪İЂҰक्र्นั้ढूँ]", + "expectedPatternExpr": "[76AU4][นั้Ëxpëçtëð pættërñ ëxprëssïøñẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedPatternSubjectExpr": "[GUw9q][นั้Ëxpëçtëð pættërñ sµþjëçt ëxprëssïøñẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "expectedPatternValue": "[Ah06c][นั้Ëxpëçtëð pættërñ vælµë ëxprëssïøñ øf thë førm \"a.b\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "expectedReturnExpr": "[nyeYf][นั้Ëxpëçtëð ëxprëssïøñ æftër \"return\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "expectedSliceIndex": "[0HjFA][นั้Ëxpëçtëð ïñðëx ør slïçë ëxprëssïøñẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "expectedTypeNotString": "[FXeAr][นั้Ëxpëçtëð tÿpë þµt rëçëïvëð æ strïñg lïtërælẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "expectedTypeParameterName": "[aHX5Q][นั้Ëxpëçtëð tÿpë pæræmëtër ñæmëẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "expectedYieldExpr": "[TrB0N][นั้Ëxpëçtëð ëxprëssïøñ ïñ yield stætëmëñtẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "finalClassIsAbstract": "[qEcDN][นั้Çlæss \"{tÿpë}\" ïs mærkëð final æñð mµst ïmplëmëñt æll æþstræçt sÿmþølsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "finalContext": "[KT2Ma][นั้\"Final\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "finalInLoop": "[yUnYn][นั้Æ \"Final\" værïæþlë çæññøt þë æssïgñëð wïthïñ æ løøpẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "finalMethodOverride": "[rVyi2][นั้Mëthøð \"{ñæmë}\" çæññøt øvërrïðë final mëthøð ðëfïñëð ïñ çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "finalNonMethod": "[ITFlU][นั้Fµñçtïøñ \"{ñæmë}\" çæññøt þë mærkëð @final þëçæµsë ït ïs ñøt æ mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "finalReassigned": "[fgpqP][นั้\"{ñæmë}\" ïs ðëçlærëð æs Final æñð çæññøt þë rëæssïgñëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "finalRedeclaration": "[8jVSa][นั้\"{ñæmë}\" wæs prëvïøµslÿ ðëçlærëð æs FinalẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "finalRedeclarationBySubclass": "[0VswQ][นั้\"{ñæmë}\" çæññøt þë rëðëçlærëð þëçæµsë pærëñt çlæss \"{çlæssÑæmë}\" ðëçlærës ït æs FinalẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "finalTooManyArgs": "[9fleE][นั้Ëxpëçtëð æ sïñglë tÿpë ærgµmëñt æftër \"Final\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "finalUnassigned": "[PmdtN][นั้\"{ñæmë}\" ïs ðëçlærëð Final, þµt vælµë ïs ñøt æssïgñëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "finallyBreak": "[yAOUV][นั้Æ \"break\" çæññøt þë µsëð tø ëxït æ \"finally\" þløçkẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "finallyContinue": "[P31Q2][นั้Æ \"continue\" çæññøt þë µsëð tø ëxït æ \"finally\" þløçkẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "finallyReturn": "[8shTC][นั้Æ \"return\" çæññøt þë µsëð tø ëxït æ \"finally\" þløçkẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "formatStringBrace": "[j606J][นั้§ïñglë çløsë þræçë ñøt ælløwëð wïthïñ f-strïñg lïtëræl; µsë ðøµþlë çløsë þræçëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "formatStringBytes": "[1Xo44][นั้Førmæt strïñg lïtëræls (f-strïñgs) çæññøt þë þïñærÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "formatStringDebuggingIllegal": "[mQueA][นั้F-strïñg ðëþµggïñg spëçïfïër \"=\" rëqµïrës Pÿthøñ 3.8 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "formatStringEscape": "[lcP3H][นั้Ësçæpë sëqµëñçë (þæçkslæsh) ñøt ælløwëð ïñ ëxprëssïøñ pørtïøñ øf f-strïñg prïør tø Pÿthøñ 3.12Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "formatStringExpectedConversion": "[AjO2F][นั้Ëxpëçtëð æ çøñvërsïøñ spëçïfïër æftër \"!\" ïñ f-strïñgẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "formatStringIllegal": "[HhOeX][นั้Førmæt strïñg lïtëræls (f-strïñgs) rëqµïrë Pÿthøñ 3.6 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "formatStringInPattern": "[KqV2l][นั้Førmæt strïñg ñøt ælløwëð ïñ pættërñẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "formatStringNestedFormatSpecifier": "[OFqi6][นั้Ëxprëssïøñs ñëstëð tøø ðëëplÿ wïthïñ førmæt strïñg spëçïfïërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "formatStringNestedQuote": "[buq49][นั้§trïñgs ñëstëð wïthïñ æñ f-strïñg çæññøt µsë thë sæmë qµøtë çhæræçtër æs thë f-strïñg prïør tø Pÿthøñ 3.12Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "formatStringTemplate": "[vSiHo][นั้Førmæt strïñg lïtëræls (f-strïñgs) çæññøt ælsø þë tëmplætë strïñgs (t-strïñgs)Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "formatStringUnicode": "[RCCfD][นั้Førmæt strïñg lïtëræls (f-strïñgs) çæññøt þë µñïçøðëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "formatStringUnterminated": "[PnOZr][นั้Üñtërmïñætëð ëxprëssïøñ ïñ f-strïñg; ëxpëçtïñg \"}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "functionDecoratorTypeUnknown": "[Gv66U][นั้Üñtÿpëð fµñçtïøñ ðëçørætør øþsçµrës tÿpë øf fµñçtïøñ; ïgñørïñg ðëçørætørẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "functionInConditionalExpression": "[9A68n][นั้Çøñðïtïøñæl ëxprëssïøñ rëfërëñçës fµñçtïøñ whïçh ælwæÿs ëvælµætës tø TrueẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "functionTypeParametersIllegal": "[0JM96][นั้Fµñçtïøñ tÿpë pæræmëtër sÿñtæx rëqµïrës Pÿthøñ 3.12 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "futureImportLocationNotAllowed": "[IdoQY][นั้Ïmpørts frøm __future__ mµst þë æt thë þëgïññïñg øf thë fïlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "generatorAsyncReturnType": "[dYKl9][นั้Rëtµrñ tÿpë øf async gëñërætør fµñçtïøñ mµst þë çømpætïþlë wïth \"AsyncGenerator[{yieldType}, Any]\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "generatorNotParenthesized": "[WmWZM][นั้Gëñërætør ëxprëssïøñs mµst þë pærëñthësïzëð ïf ñøt sølë ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "generatorSyncReturnType": "[ASD1z][นั้Rëtµrñ tÿpë øf gëñërætør fµñçtïøñ mµst þë çømpætïþlë wïth \"Generator[{yieldType}, Any, Any]\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "genericBaseClassNotAllowed": "[fniUT][นั้\"Generic\" þæsë çlæss çæññøt þë µsëð wïth tÿpë pæræmëtër sÿñtæxẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "genericClassAssigned": "[iU1tH][นั้Gëñërïç çlæss tÿpë çæññøt þë æssïgñëðẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "genericClassDeleted": "[C942e][นั้Gëñërïç çlæss tÿpë çæññøt þë ðëlëtëðẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "genericInstanceVariableAccess": "[rpanq][นั้Æççëss tø gëñërïç ïñstæñçë værïæþlë thrøµgh çlæss ïs æmþïgµøµsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "genericNotAllowed": "[vnF07][นั้\"Generic\" ïs ñøt vælïð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "genericTypeAliasBoundTypeVar": "[S1NAS][นั้Gëñërïç tÿpë ælïæs wïthïñ çlæss çæññøt µsë þøµñð tÿpë værïæþlës {ñæmës}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "genericTypeArgMissing": "[OlCEv][นั้\"Generic\" rëqµïrës æt lëæst øñë tÿpë ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "genericTypeArgTypeVar": "[09E7H][นั้Tÿpë ærgµmëñt før \"Generic\" mµst þë æ tÿpë værïæþlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "genericTypeArgUnique": "[xHwpY][นั้Tÿpë ærgµmëñts før \"Generic\" mµst þë µñïqµëẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "globalReassignment": "[B2UyK][นั้\"{ñæmë}\" ïs æssïgñëð þëførë global ðëçlærætïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "globalRedefinition": "[UZSMp][นั้\"{ñæmë}\" wæs ælrëæðÿ ðëçlærëð globalẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "implicitStringConcat": "[t0D1l][นั้Ïmplïçït strïñg çøñçætëñætïøñ ñøt ælløwëðẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "importCycleDetected": "[FFPSZ][นั้Çÿçlë ðëtëçtëð ïñ ïmpørt çhæïñẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "importDepthExceeded": "[8G4s1][นั้Ïmpørt çhæïñ ðëpth ëxçëëðëð {ðëpth}Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "importResolveFailure": "[oBYA4][นั้Ïmpørt \"{ïmpørtÑæmë}\" çøµlð ñøt þë rësølvëðẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "importSourceResolveFailure": "[hjHFa][นั้Ïmpørt \"{ïmpørtÑæmë}\" çøµlð ñøt þë rësølvëð frøm søµrçëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "importSymbolUnknown": "[jY9ZH][นั้\"{ñæmë}\" ïs µñkñøwñ ïmpørt sÿmþølẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "incompatibleMethodOverride": "[i45Ka][นั้Mëthøð \"{ñæmë}\" øvërrïðës çlæss \"{çlæssÑæmë}\" ïñ æñ ïñçømpætïþlë mæññërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "inconsistentIndent": "[gdrcy][นั้Üñïñðëñt æmøµñt ðøës ñøt mætçh prëvïøµs ïñðëñtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "inconsistentTabs": "[I3Z6K][นั้Ïñçøñsïstëñt µsë øf tæþs æñð spæçës ïñ ïñðëñtætïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "initMethodSelfParamTypeVar": "[S5RC7][นั้Tÿpë æññøtætïøñ før \"self\" pæræmëtër øf \"__init__\" mëthøð çæññøt çøñtæïñ çlæss-sçøpëð tÿpë værïæþlësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "initMustReturnNone": "[RlXyC][นั้Rëtµrñ tÿpë øf \"__init__\" mµst þë NoneẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "initSubclassCallFailed": "[w22Kh][นั้Ïñçørrëçt këÿwørð ærgµmëñts før __init_subclass__ mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "initSubclassClsParam": "[6CWuS][นั้__init_subclass__ øvërrïðë shøµlð tækë æ \"cls\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "initVarNotAllowed": "[Bb6V0][นั้\"InitVar\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "instanceMethodSelfParam": "[dPZPj][นั้Ïñstæñçë mëthøðs shøµlð tækë æ \"self\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "instanceVarOverridesClassVar": "[cfYeg][นั้Ïñstæñçë værïæþlë \"{ñæmë}\" øvërrïðës çlæss værïæþlë øf sæmë ñæmë ïñ çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "instantiateAbstract": "[IyeLb][นั้Çæññøt ïñstæñtïætë æþstræçt çlæss \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "instantiateProtocol": "[Xa6p2][นั้Çæññøt ïñstæñtïætë Protocol çlæss \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "internalBindError": "[PnkgK][นั้Æñ ïñtërñæl ërrør øççµrrëð whïlë þïñðïñg fïlë \"{fïlë}\": {mëssægë}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "internalParseError": "[T91nL][นั้Æñ ïñtërñæl ërrør øççµrrëð whïlë pærsïñg fïlë \"{fïlë}\": {mëssægë}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "internalTypeCheckingError": "[9E5Bn][นั้Æñ ïñtërñæl ërrør øççµrrëð whïlë tÿpë çhëçkïñg fïlë \"{fïlë}\": {mëssægë}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "invalidIdentifierChar": "[Vpy5i][นั้Ïñvælïð çhæræçtër ïñ ïðëñtïfïërẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "invalidStubStatement": "[sxuu1][นั้§tætëmëñt ïs mëæñïñglëss wïthïñ æ tÿpë stub fïlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "invalidTokenChars": "[n9Jty][นั้Ïñvælïð çhæræçtër \"{tëxt}\" ïñ tøkëñẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "isInstanceInvalidType": "[Q3jK3][นั้§ëçøñð ærgµmëñt tø \"isinstance\" mµst þë æ çlæss ør tuple øf çlæssësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "isSubclassInvalidType": "[6Q7qf][นั้§ëçøñð ærgµmëñt tø \"issubclass\" mµst þë æ çlæss ør tuple øf çlæssësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "keyValueInSet": "[tmmyt][นั้Këÿ/vælµë pæïrs ærë ñøt ælløwëð wïthïñ æ setẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "keywordArgInTypeArgument": "[BzcKx][นั้Këÿwørð ærgµmëñts çæññøt þë µsëð ïñ tÿpë ærgµmëñt lïstsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "keywordOnlyAfterArgs": "[RLvT4][นั้Këÿwørð-øñlÿ ærgµmëñt sëpærætør ñøt ælløwëð æftër \"*\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "keywordParameterMissing": "[gTcAl][นั้Øñë ør mørë këÿwørð pæræmëtërs mµst følløw \"*\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "keywordSubscriptIllegal": "[khu47][นั้Këÿwørð ærgµmëñts wïthïñ sµþsçrïpts ærë ñøt sµppørtëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "lambdaReturnTypePartiallyUnknown": "[Z5ML1][นั้Rëtµrñ tÿpë øf læmþðæ, \"{rëtµrñTÿpë}\", ïs pærtïællÿ µñkñøwñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "lambdaReturnTypeUnknown": "[h4icY][นั้Rëtµrñ tÿpë øf læmþðæ ïs µñkñøwñẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "listAssignmentMismatch": "[DZh64][นั้Ëxprëssïøñ wïth tÿpë \"{tÿpë}\" çæññøt þë æssïgñëð tø tærgët lïstẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "listInAnnotation": "[i5U8t][นั้List ëxprëssïøñ ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "literalEmptyArgs": "[VkrFm][นั้Ëxpëçtëð øñë ør mørë tÿpë ærgµmëñts æftër \"Literal\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "literalNamedUnicodeEscape": "[8cbe7][นั้Ñæmëð µñïçøðë ësçæpë sëqµëñçës ærë ñøt sµppørtëð ïñ \"Literal\" strïñg æññøtætïøñsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "literalNotAllowed": "[FAk6E][นั้\"Literal\" çæññøt þë µsëð ïñ thïs çøñtëxt wïthøµt æ tÿpë ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "literalNotCallable": "[C75sx][นั้Literal tÿpë çæññøt þë ïñstæñtïætëðẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "literalUnsupportedType": "[10Yse][นั้Tÿpë ærgµmëñts før \"Literal\" mµst þë None, æ lïtëræl vælµë (int, bool, str, ør bytes), ør æñ enum vælµëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "matchIncompatible": "[9ljpM][นั้Match stætëmëñts rëqµïrë Pÿthøñ 3.10 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "matchIsNotExhaustive": "[BJ8EI][นั้Çæsës wïthïñ match stætëmëñt ðø ñøt ëxhæµstïvëlÿ hæñðlë æll vælµësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "maxParseDepthExceeded": "[5nAZx][นั้Mæxïmµm pærsë ðëpth ëxçëëðëð; þrëæk ëxprëssïøñ ïñtø smællër sµþ-ëxprëssïøñsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "memberAccess": "[YP5V0][นั้Çæññøt æççëss ættrïþµtë \"{ñæmë}\" før çlæss \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "memberDelete": "[o47cn][นั้Çæññøt ðëlëtë ættrïþµtë \"{ñæmë}\" før çlæss \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "memberSet": "[JCDqa][นั้Çæññøt æssïgñ tø ættrïþµtë \"{ñæmë}\" før çlæss \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "metaclassConflict": "[XgAJj][นั้Thë mëtæçlæss øf æ ðërïvëð çlæss mµst þë æ sµþçlæss øf thë mëtæçlæssës øf æll ïts þæsë çlæssësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "metaclassDuplicate": "[wrGuH][นั้Øñlÿ øñë mëtæçlæss çæñ þë prøvïðëðẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "metaclassIsGeneric": "[K5fXC][นั้Mëtæçlæss çæññøt þë gëñërïçẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "methodNotDefined": "[ZuPf7][นั้\"{ñæmë}\" mëthøð ñøt ðëfïñëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "methodNotDefinedOnType": "[sEtO5][นั้\"{ñæmë}\" mëthøð ñøt ðëfïñëð øñ tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "methodOrdering": "[0M0tn][นั้Çæññøt çrëætë çøñsïstëñt mëthøð ørðërïñgẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "methodOverridden": "[2Bu15][นั้\"{ñæmë}\" øvërrïðës mëthøð øf sæmë ñæmë ïñ çlæss \"{çlæssÑæmë}\" wïth ïñçømpætïþlë tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "methodReturnsNonObject": "[9nnVb][นั้\"{ñæmë}\" mëthøð ðøës ñøt rëtµrñ æñ øþjëçtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "missingSuperCall": "[jNXGA][นั้Mëthøð \"{mëthøðÑæmë}\" ðøës ñøt çæll thë mëthøð øf thë sæmë ñæmë ïñ pærëñt çlæssẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "mixingBytesAndStr": "[Ng6gL][นั้Bytes æñð str vælµës çæññøt þë çøñçætëñætëðẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "moduleAsType": "[p9N4B][นั้Møðµlë çæññøt þë µsëð æs æ tÿpëẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "moduleNotCallable": "[YY0Jq][นั้Møðµlë ïs ñøt çællæþlëẤğ倪İЂҰक्र्นั้ढूँ]", + "moduleUnknownMember": "[tegoa][นั้\"{mëmþërÑæmë}\" ïs ñøt æ kñøwñ ættrïþµtë øf møðµlë \"{møðµlëÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "namedExceptAfterCatchAll": "[pMR1l][นั้Æ ñæmëð except çlæµsë çæññøt æppëær æftër çætçh-æll except çlæµsëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "namedParamAfterParamSpecArgs": "[sF38r][นั้Këÿwørð pæræmëtër \"{ñæmë}\" çæññøt æppëær ïñ sïgñætµrë æftër ParamSpec args pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "namedTupleEmptyName": "[vnXqF][นั้Ñæmës wïthïñ æ ñæmëð tuple çæññøt þë ëmptÿẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "namedTupleEntryRedeclared": "[0tiaC][นั้Çæññøt øvërrïðë \"{ñæmë}\" þëçæµsë pærëñt çlæss \"{çlæssÑæmë}\" ïs æ ñæmëð tupleẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "namedTupleFieldUnderscore": "[eFGk5][นั้Named tuple fïëlð ñæmës çæññøt stært wïth æñ µñðërsçørëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "namedTupleFirstArg": "[L5ZXq][นั้Ëxpëçtëð ñæmëð tuple çlæss ñæmë æs fïrst ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "namedTupleMultipleInheritance": "[KYJOA][นั้Mµltïplë ïñhërïtæñçë wïth NamedTuple ïs ñøt sµppørtëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "namedTupleNameKeyword": "[g6NTa][นั้Fïëlð ñæmës çæññøt þë æ këÿwørðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "namedTupleNameType": "[AxfdS][นั้Ëxpëçtëð twø-ëñtrÿ tuple spëçïfÿïñg ëñtrÿ ñæmë æñð tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "namedTupleNameUnique": "[TQaej][นั้Ñæmës wïthïñ æ ñæmëð tuple mµst þë µñïqµëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "namedTupleNoTypes": "[Fn6FF][นั้\"namedtuple\" prøvïðës ñø tÿpës før tuple ëñtrïës; µsë \"NamedTuple\" ïñstëæðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "namedTupleSecondArg": "[SqoXY][นั้Ëxpëçtëð ñæmëð tuple ëñtrÿ list æs sëçøñð ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "newClsParam": "[EUESX][นั้__new__ øvërrïðë shøµlð tækë æ \"cls\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "newTypeAnyOrUnknown": "[D4ZjA][นั้Thë sëçøñð ærgµmëñt tø NewType mµst þë æ kñøwñ çlæss, ñøt Any ør UnknownẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "newTypeBadName": "[cqWvO][นั้Thë fïrst ærgµmëñt tø NewType mµst þë æ strïñg lïtërælẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "newTypeLiteral": "[4k8om][นั้NewType çæññøt þë µsëð wïth Literal tÿpëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "newTypeNameMismatch": "[kQgMv][นั้NewType mµst þë æssïgñëð tø æ værïæþlë wïth thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "newTypeNotAClass": "[ta6tZ][นั้Ëxpëçtëð çlæss æs sëçøñð ærgµmëñt tø NewTypeẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "newTypeParamCount": "[6b2ro][นั้NewType rëqµïrës twø pøsïtïøñæl ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "newTypeProtocolClass": "[1l02t][นั้NewType çæññøt þë µsëð wïth strµçtµræl tÿpë (æ Protocol ør TypedDict çlæss)Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "noOverload": "[IcBNQ][นั้Ñø øvërløæðs før \"{ñæmë}\" mætçh thë prøvïðëð ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "noReturnContainsReturn": "[nBLDq][นั้Fµñçtïøñ wïth ðëçlærëð return tÿpë \"NoReturn\" çæññøt ïñçlµðë æ return stætëmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "noReturnContainsYield": "[V3G36][นั้Fµñçtïøñ wïth ðëçlærëð rëtµrñ tÿpë \"NoReturn\" çæññøt ïñçlµðë æ yield stætëmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "noReturnReturnsNone": "[O3XA6][นั้Fµñçtïøñ wïth ðëçlærëð rëtµrñ tÿpë \"NoReturn\" çæññøt rëtµrñ \"None\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "nonDefaultAfterDefault": "[mFFgP][นั้Ñøñ-ðëfæµlt ærgµmëñt følløws ðëfæµlt ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "nonLocalInModule": "[kmLlv][นั้Nonlocal ðëçlærætïøñ ñøt ælløwëð æt møðµlë lëvëlẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "nonLocalNoBinding": "[WTA2d][นั้Ñø þïñðïñg før nonlocal \"{ñæmë}\" føµñðẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "nonLocalReassignment": "[T1M6J][นั้\"{ñæmë}\" ïs æssïgñëð þëførë nonlocal ðëçlærætïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "nonLocalRedefinition": "[gwh1h][นั้\"{ñæmë}\" wæs ælrëæðÿ ðëçlærëð nonlocalẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "noneNotCallable": "[sIZ5J][นั้Øþjëçt øf tÿpë \"None\" çæññøt þë çællëðẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "noneNotIterable": "[spDD0][นั้Øþjëçt øf tÿpë \"None\" çæññøt þë µsëð æs ïtëræþlë vælµëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "noneNotSubscriptable": "[Emzwj][นั้Øþjëçt øf tÿpë \"None\" ïs ñøt sµþsçrïptæþlëẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "noneNotUsableWith": "[zlOOD][นั้Øþjëçt øf tÿpë \"None\" çæññøt þë µsëð with \"with\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "noneNotUsableWithAsync": "[2p5GE][นั้Øþjëçt øf tÿpë \"Ñøñë\" çæññøt þë µsëð wïth \"æsÿñç wïth\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "noneOperator": "[3U0d3][นั้Øpërætør \"{øpërætør}\" ñøt sµppørtëð før \"None\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "noneUnknownMember": "[4KvEX][นั้\"{ñæmë}\" ïs ñøt æ kñøwñ ættrïþµtë øf \"None\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "nonlocalTypeParam": "[S78yW][นั้Nonlocal þïñðïñg ïs ñøt ælløwëð før tÿpë pæræmëtër \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "notRequiredArgCount": "[uOeAb][นั้Ëxpëçtëð æ sïñglë tÿpë ærgµmëñt æftër \"NotRequired\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "notRequiredNotInTypedDict": "[Vl6XL][นั้\"NotRequired\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "objectNotCallable": "[bzlKk][นั้Øþjëçt øf tÿpë \"{tÿpë}\" ïs ñøt çællæþlëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "obscuredClassDeclaration": "[ixjN9][นั้Çlæss ðëçlærætïøñ \"{ñæmë}\" ïs øþsçµrëð þÿ æ ðëçlærætïøñ øf thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "obscuredFunctionDeclaration": "[O71DX][นั้Fµñçtïøñ ðëçlærætïøñ \"{ñæmë}\" ïs øþsçµrëð þÿ æ ðëçlærætïøñ øf thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "obscuredMethodDeclaration": "[m04yf][นั้Mëthøð ðëçlærætïøñ \"{ñæmë}\" ïs øþsçµrëð þÿ æ ðëçlærætïøñ øf thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "obscuredParameterDeclaration": "[NaF76][นั้Pæræmëtër ðëçlærætïøñ \"{ñæmë}\" ïs øþsçµrëð þÿ æ ðëçlærætïøñ øf thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "obscuredTypeAliasDeclaration": "[0GZdR][นั้Tÿpë ælïæs ðëçlærætïøñ \"{ñæmë}\" ïs øþsçµrëð þÿ æ ðëçlærætïøñ øf thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "obscuredVariableDeclaration": "[HR10j][นั้Ðëçlærætïøñ \"{ñæmë}\" ïs øþsçµrëð þÿ æ ðëçlærætïøñ øf thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "operatorLessOrGreaterDeprecated": "[bNZp7][นั้Øpërætør \"<>\" ïs ñøt sµppørtëð ïñ Pÿthøñ 3; µsë \"!=\" ïñstëæðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "optionalExtraArgs": "[yW5W0][นั้Ëxpëçtëð øñë tÿpë ærgµmëñt æftër \"Optional\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "orPatternIrrefutable": "[peFRW][นั้Ïrrëfµtæþlë pættërñ ælløwëð øñlÿ æs thë læst sµþpættërñ ïñ æñ \"or\" pættërñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "orPatternMissingName": "[OxMxP][นั้Æll sµþpættërñs wïthïñ æñ \"or\" pættërñ mµst tærgët thë sæmë ñæmësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overlappingKeywordArgs": "[46dQE][นั้Tÿpëð ðïçtïøñærÿ øvërlæps wïth këÿwørð pæræmëtër: {ñæmës}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "overlappingOverload": "[SCQMv][นั้Øvërløæð {øþsçµrëð} før \"{ñæmë}\" wïll ñëvër þë µsëð þëçæµsë ïts pæræmëtërs øvërlæp øvërløæð {øþsçµrëðßÿ}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "overloadAbstractImplMismatch": "[IgMzu][นั้Øvërløæðs mµst mætçh æþstræçt stætµs øf ïmplëmëñtætïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "overloadAbstractMismatch": "[54DCM][นั้Øvërløæðs mµst æll þë æþstræçt ør ñøtẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "overloadClassMethodInconsistent": "[8y6vM][นั้Øvërløæðs før \"{ñæmë}\" µsë @classmethod ïñçøñsïstëñtlÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "overloadFinalImpl": "[Tj0nI][นั้@final ðëçørætør shøµlð þë æpplïëð øñlÿ tø thë ïmplëmëñtætïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "overloadFinalNoImpl": "[CReyN][นั้Øñlÿ thë fïrst øvërløæð shøµlð þë mærkëð @finalẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "overloadImplementationMismatch": "[dXlXE][นั้Øvërløæðëð ïmplëmëñtætïøñ ïs ñøt çøñsïstëñt wïth sïgñætµrë øf øvërløæð {ïñðëx}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "overloadOverrideImpl": "[vvyEk][นั้@override ðëçørætør shøµlð þë æpplïëð øñlÿ tø thë ïmplëmëñtætïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overloadOverrideNoImpl": "[cMlkw][นั้Øñlÿ thë fïrst øvërløæð shøµlð þë mærkëð @overrideẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "overloadReturnTypeMismatch": "[6BN74][นั้Øvërløæð {prëvÏñðëx} før \"{ñæmë}\" øvërlæps øvërløæð {ñëwÏñðëx} æñð rëtµrñs æñ ïñçømpætïþlë tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "overloadStaticMethodInconsistent": "[PKQvM][นั้Øvërløæðs før \"{ñæmë}\" µsë @staticmethod ïñçøñsïstëñtlÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "overloadWithoutImplementation": "[mn33a][นั้\"{ñæmë}\" ïs mærkëð æs overload, þµt ñø ïmplëmëñtætïøñ ïs prøvïðëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overriddenMethodNotFound": "[YKdBy][นั้Mëthøð \"{ñæmë}\" ïs mærkëð æs override, þµt ñø þæsë mëthøð øf sæmë ñæmë ïs prësëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "overrideDecoratorMissing": "[2BnJq][นั้Mëthøð \"{ñæmë}\" ïs ñøt mærkëð æs override þµt ïs øvërrïðïñg æ mëthøð ïñ çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "paramAfterKwargsParam": "[wJZkp][นั้Pæræmëtër çæññøt følløw \"**\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "paramAlreadyAssigned": "[srzhT][นั้Pæræmëtër \"{ñæmë}\" ïs ælrëæðÿ æssïgñëðẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "paramAnnotationMissing": "[1OYGc][นั้Tÿpë æññøtætïøñ ïs mïssïñg før pæræmëtër \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "paramAssignmentMismatch": "[Q8zha][นั้Ëxprëssïøñ øf tÿpë \"{søµrçëTÿpë}\" çæññøt þë æssïgñëð tø pæræmëtër øf tÿpë \"{pæræmTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "paramNameMissing": "[ivXu4][นั้Ñø pæræmëtër ñæmëð \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "paramSpecArgsKwargsDuplicate": "[4Ie64][นั้Ærgµmëñts før ParamSpec \"{tÿpë}\" hævë ælrëæðÿ þëëñ prøvïðëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "paramSpecArgsKwargsUsage": "[oVRV0][นั้\"args\" æñð \"kwargs\" ættrïþµtës øf ParamSpec mµst þøth æppëær wïthïñ æ fµñçtïøñ sïgñætµrëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "paramSpecArgsMissing": "[rd6zO][นั้Ærgµmëñts før ParamSpec \"{tÿpë}\" ærë mïssïñgẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "paramSpecArgsUsage": "[2U9SN][นั้\"args\" ættrïþµtë øf ParamSpec ïs vælïð øñlÿ whëñ µsëð wïth *args pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "paramSpecAssignedName": "[ww5mM][นั้ParamSpec mµst þë æssïgñëð tø æ værïæþlë ñæmëð \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "paramSpecContext": "[y6xyK][นั้ParamSpec ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "paramSpecDefaultNotTuple": "[6Tdff][นั้Ëxpëçtëð ëllïpsïs, æ tuple ëxprëssïøñ, ør ParamSpec før ðëfæµlt vælµë øf ParamSpecẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "paramSpecFirstArg": "[W2Y3X][นั้Ëxpëçtëð ñæmë øf ParamSpec æs fïrst ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "paramSpecKwargsUsage": "[2UE71][นั้\"kwargs\" ættrïþµtë øf ParamSpec ïs vælïð øñlÿ whëñ µsëð wïth **kwargs pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "paramSpecNotUsedByOuterScope": "[5Pk7H][นั้ParamSpec \"{ñæmë}\" hæs ñø mëæñïñg ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "paramSpecUnknownArg": "[6zeYc][นั้ParamSpec ðøës ñøt sµppørt mørë thæñ øñë ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "paramSpecUnknownMember": "[GhhiY][นั้\"{ñæmë}\" ïs ñøt æ kñøwñ ættrïþµtë øf ParamSpecẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "paramSpecUnknownParam": "[YADLo][นั้\"{ñæmë}\" ïs µñkñøwñ pæræmëtër tø ParamSpecẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "paramTypeCovariant": "[USAuF][นั้Çøværïæñt tÿpë værïæþlë çæññøt þë µsëð ïñ pæræmëtër tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "paramTypePartiallyUnknown": "[1ShLP][นั้Tÿpë øf pæræmëtër \"{pæræmÑæmë}\" ïs pærtïællÿ µñkñøwñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "paramTypeUnknown": "[fweDh][นั้Tÿpë øf pæræmëtër \"{pæræmÑæmë}\" ïs µñkñøwñẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "parenthesizedContextManagerIllegal": "[NBxCb][นั้Pærëñthësës withïñ \"with\" stætëmëñt rëqµïrës Pÿthøñ 3.9 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "patternNeverMatches": "[lyG7p][นั้Pættërñ wïll ñëvër þë mætçhëð før sµþjëçt tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "positionArgAfterNamedArg": "[szCz2][นั้Pøsïtïøñæl ærgµmëñt çæññøt æppëær æftër këÿwørð ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "positionArgAfterUnpackedDictArg": "[M8vOq][นั้Pøsïtïøñæl ærgµmëñt çæññøt æppëær æftër këÿwørð ærgµmëñt µñpæçkïñgẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "positionOnlyAfterArgs": "[Vqb7c][นั้Pøsïtïøñ-øñlÿ pæræmëtër sëpærætør ñøt ælløwëð æftër \"*\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "positionOnlyAfterKeywordOnly": "[z1FIK][นั้\"/\" pæræmëtër mµst æppëær þëførë \"*\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "positionOnlyAfterNon": "[iXb2r][นั้Pøsïtïøñ-øñlÿ pæræmëtër ñøt ælløwëð æftër pæræmëtër thæt ïs ñøt pøsïtïøñ-øñlÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "positionOnlyFirstParam": "[bmnbs][นั้Pøsïtïøñ-øñlÿ pæræmëtër sëpærætør ñøt ælløwëð æs fïrst pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "positionOnlyIncompatible": "[Ick28][นั้Pøsïtïøñ-øñlÿ pæræmëtër sëpærætør rëqµïrës Pÿthøñ 3.8 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "privateImportFromPyTypedModule": "[VRdf4][นั้\"{ñæmë}\" ïs ñøt ëxpørtëð frøm møðµlë \"{møðµlë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "privateUsedOutsideOfClass": "[3YBNL][นั้\"{ñæmë}\" ïs prïvætë æñð µsëð øµtsïðë øf thë çlæss ïñ whïçh ït ïs ðëçlærëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "privateUsedOutsideOfModule": "[TgDgt][นั้\"{ñæmë}\" ïs prïvætë æñð µsëð øµtsïðë øf thë møðµlë ïñ whïçh ït ïs ðëçlærëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "propertyOverridden": "[mwp5C][นั้\"{ñæmë}\" ïñçørrëçtlÿ øvërrïðës property øf sæmë ñæmë ïñ çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "propertyStaticMethod": "[qs3pr][นั้§tætïç mëthøðs ñøt ælløwëð før property getter, setter ør deleterẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "protectedUsedOutsideOfClass": "[z2Y7X][นั้\"{ñæmë}\" ïs prøtëçtëð æñð µsëð øµtsïðë øf thë çlæss ïñ whïçh ït ïs ðëçlærëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "protocolBaseClass": "[lv3rP][นั้Protocol çlæss \"{çlæssTÿpë}\" çæññøt ðërïvë frøm ñøñ-Protocol çlæss \"{þæsëTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "protocolBaseClassWithTypeArgs": "[tpYEx][นั้Tÿpë ærgµmëñts ærë ñøt ælløwëð wïth Protocol çlæss whëñ µsïñg tÿpë pæræmëtër sÿñtæxẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "protocolIllegal": "[jYjYe][นั้Üsë øf \"Protocol\" rëqµïrës Pÿthøñ 3.7 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "protocolNotAllowed": "[2GEt6][นั้\"Protocol\" çæññøt þë µsëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "protocolTypeArgMustBeTypeParam": "[WTgkM][นั้Tÿpë ærgµmëñt før \"Protocol\" mµst þë æ tÿpë pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "protocolUnsafeOverlap": "[79LbC][นั้Çlæss øvërlæps \"{ñæmë}\" µñsæfëlÿ æñð çøµlð prøðµçë æ mætçh æt rµñtïmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "protocolVarianceContravariant": "[B4htZ][นั้Tÿpë værïæþlë \"{værïæþlë}\" µsëð ïñ gëñërïç Protocol \"{çlæss}\" shøµlð þë çøñtræværïæñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "protocolVarianceCovariant": "[Hcnn5][นั้Tÿpë værïæþlë \"{værïæþlë}\" µsëð ïñ gëñërïç Protocol \"{çlæss}\" shøµlð þë çøværïæñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "protocolVarianceInvariant": "[o8oB7][นั้Tÿpë værïæþlë \"{værïæþlë}\" µsëð ïñ gëñërïç Protocol \"{çlæss}\" shøµlð þë ïñværïæñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "pyrightCommentInvalidDiagnosticBoolValue": "[eaJY0][นั้Pyright çømmëñt ðïrëçtïvë mµst þë følløwëð þÿ \"=\" æñð æ vælµë øf true ør falseẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "pyrightCommentInvalidDiagnosticSeverityValue": "[2YA7K][นั้Pyright çømmëñt ðïrëçtïvë mµst þë følløwëð þÿ \"=\" æñð æ vælµë øf true, false, error, warning, information, ør noneẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "pyrightCommentMissingDirective": "[yy6rB][นั้Pyright çømmëñt mµst þë følløwëð þÿ æ ðïrëçtïvë (basic ør strict) ør æ ðïægñøstïç rµlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "pyrightCommentNotOnOwnLine": "[mM2bV][นั้Pyright çømmëñts µsëð tø çøñtrøl fïlë-lëvël sëttïñgs mµst æppëær øñ thëïr øwñ lïñëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "pyrightCommentUnknownDiagnosticRule": "[DFAZp][นั้\"{rµlë}\" ïs æñ µñkñøwñ ðïægñøstïç rµlë før pyright çømmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "pyrightCommentUnknownDiagnosticSeverityValue": "[Tgt0Y][นั้\"{vælµë}\" ïs ïñvælïð vælµë før pyright çømmëñt; ëxpëçtëð true, false, error, warning, information, ør noneẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "pyrightCommentUnknownDirective": "[HD6T4][นั้\"{ðïrëçtïvë}\" ïs æñ µñkñøwñ ðïrëçtïvë før pyright çømmëñt; ëxpëçtëð \"strict\", \"standard\", ør \"basic\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "readOnlyArgCount": "[B1Erm][นั้Ëxpëçtëð æ sïñglë tÿpë ærgµmëñt æftër \"ReadOnly\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "readOnlyNotInTypedDict": "[xJrLN][นั้\"ReadOnly\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "recursiveDefinition": "[G3UUN][นั้Tÿpë øf \"{ñæmë}\" çøµlð ñøt þë ðëtërmïñëð þëçæµsë ït rëfërs tø ïtsëlfẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "relativeImportNotAllowed": "[JZqjC][นั้Rëlætïvë ïmpørts çæññøt þë µsëð wïth \"import .a\" førm; µsë \"from . import a\" ïñstëæðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "requiredArgCount": "[aZX4z][นั้Ëxpëçtëð æ sïñglë tÿpë ærgµmëñt æftër \"Required\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "requiredNotInTypedDict": "[TArW6][นั้\"Required\" ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "returnInAsyncGenerator": "[qb5pt][นั้Rëtµrñ stætëmëñt wïth vælµë ïs ñøt ælløwëð ïñ async gëñërætørẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "returnInExceptionGroup": "[wRRjc][นั้\"return\" ïs ñøt ælløwëð ïñ æñ \"except*\" þløçkẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "returnMissing": "[kPevK][นั้Fµñçtïøñ wïth ðëçlærëð rëtµrñ tÿpë \"{rëtµrñTÿpë}\" mµst rëtµrñ vælµë øñ æll çøðë pæthsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "returnOutsideFunction": "[O4SJp][นั้\"return\" çæñ þë µsëð øñlÿ wïthïñ æ fµñçtïøñẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "returnTypeContravariant": "[KkMhh][นั้Çøñtræværïæñt tÿpë værïæþlë çæññøt þë µsëð ïñ rëtµrñ tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "returnTypeMismatch": "[QYqHy][นั้Tÿpë \"{ëxprTÿpë}\" ïs ñøt æssïgñæþlë tø rëtµrñ tÿpë \"{rëtµrñTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "returnTypePartiallyUnknown": "[261DZ][นั้Rëtµrñ tÿpë, \"{rëtµrñTÿpë}\", ïs pærtïællÿ µñkñøwñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "returnTypeUnknown": "[II3Ix][นั้Rëtµrñ tÿpë ïs µñkñøwñẤğ倪İЂҰक्र्นั้ढूँ]", + "revealLocalsArgs": "[qKEIL][นั้Ëxpëçtëð ñø ærgµmëñts før \"reveal_locals\" çællẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "revealLocalsNone": "[xOTfI][นั้Ñø locals ïñ thïs sçøpëẤğ倪İЂҰक्र्นั้ढूँ]", + "revealTypeArgs": "[Sdo9V][นั้Ëxpëçtëð æ sïñglë pøsïtïøñæl ærgµmëñt før \"reveal_type\" çællẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "revealTypeExpectedTextArg": "[6cFBk][นั้Thë \"expected_text\" ærgµmëñt før fµñçtïøñ \"reveal_type\" mµst þë æ str lïtëræl vælµëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "revealTypeExpectedTextMismatch": "[ILnEV][นั้Tÿpë tëxt mïsmætçh; ëxpëçtëð \"{ëxpëçtëð}\" þµt rëçëïvëð \"{rëçëïvëð}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "revealTypeExpectedTypeMismatch": "[3XS8T][นั้Tÿpë mïsmætçh; ëxpëçtëð \"{ëxpëçtëð}\" þµt rëçëïvëð \"{rëçëïvëð}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "selfTypeContext": "[Hugyy][นั้\"Self\" ïs ñøt vælïð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "selfTypeMetaclass": "[YvoBy][นั้\"Self\" çæññøt þë µsëð wïthïñ æ mëtæçlæss (æ sµþçlæss øf \"tÿpë\")Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "selfTypeWithTypedSelfOrCls": "[sYgyY][นั้\"Self\" çæññøt þë µsëð ïñ æ fµñçtïøñ wïth æ `self` ør `cls` pæræmëtër thæt hæs æ tÿpë æññøtætïøñ øthër thæñ \"Self\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "sentinelBadName": "[dPrFn][นั้Thë fïrst ærgµmëñt tø Sentinel mµst þë æ strïñg lïtërælẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "sentinelNameMismatch": "[3VQf2][นั้Sentinel mµst þë æssïgñëð tø æ værïæþlë wïth thë sæmë ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "sentinelParamCount": "[e5Zwc][นั้Sentinel rëqµïrës øñë pøsïtïøñæl ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "setterGetterTypeMismatch": "[8ZD1z][นั้Property setter vælµë tÿpë ïs ñøt æssïgñæþlë tø thë getter rëtµrñ tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "singleOverload": "[YQVUc][นั้\"{ñæmë}\" ïs mærkëð æs øvërløæð, þµt æððïtïøñæl øvërløæðs ærë mïssïñgẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "slotsAttributeError": "[OF4rK][นั้\"{ñæmë}\" ïs ñøt spëçïfïëð ïñ __slots__Ấğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "slotsClassVarConflict": "[tcS3q][นั้\"{ñæmë}\" çøñflïçts wïth ïñstæñçë værïæþlë ðëçlærëð ïñ __slots__Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "starPatternInAsPattern": "[ZFdWe][นั้§tær pættërñ çæññøt þë µsëð wïth \"as\" tærgëtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "starPatternInOrPattern": "[y9LX3][นั้§tær pættërñ çæññøt þë ØRëð wïthïñ øthër pættërñsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "starStarWildcardNotAllowed": "[Ll1UV][นั้** çæññøt þë µsëð wïth wïlðçærð \"_\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "staticClsSelfParam": "[mO4QU][นั้§tætïç mëthøðs shøµlð ñøt tækë æ \"self\" ør \"cls\" pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "stringNonAsciiBytes": "[dFNRn][นั้Ñøñ-ASCII çhæræçtër ñøt ælløwëð ïñ þÿtës strïñg lïtërælẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "stringNotSubscriptable": "[hKZT7][นั้§trïñg ëxprëssïøñ çæññøt þë sµþsçrïptëð ïñ tÿpë ëxprëssïøñ; ëñçløsë ëñtïrë ëxprëssïøñ ïñ qµøtësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "stringUnsupportedEscape": "[K2WsY][นั้Üñsµppørtëð ësçæpë sëqµëñçë ïñ strïñg lïtërælẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "stringUnterminated": "[jUKYA][นั้§trïñg lïtëræl ïs µñtërmïñætëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "stubFileMissing": "[Ua5GT][นั้Stub fïlë ñøt føµñð før \"{ïmpørtÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "stubUsesGetAttr": "[KMBwK][นั้Tÿpë stub fïlë ïs ïñçømplëtë; \"__getattr__\" øþsçµrës tÿpë ërrørs før møðµlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "sublistParamsIncompatible": "[582LE][นั้Sublist pæræmëtërs ærë ñøt sµppørtëð ïñ Pÿthøñ 3.xẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "superCallArgCount": "[iLYq6][นั้Ëxpëçtëð ñø mørë thæñ twø ærgµmëñts tø \"super\" çællẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "superCallFirstArg": "[HSEvD][นั้Ëxpëçtëð çlæss tÿpë æs fïrst ærgµmëñt tø \"super\" çæll þµt rëçëïvëð \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "superCallSecondArg": "[dKoHi][นั้§ëçøñð ærgµmëñt tø \"super\" çæll mµst þë øþjëçt ør çlæss thæt ðërïvës frøm \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "superCallZeroArgForm": "[0XO27][นั้Zërø-ærgµmëñt førm øf \"super\" çæll ïs vælïð øñlÿ wïthïñ æ mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "superCallZeroArgFormStaticMethod": "[9hJPB][นั้Zërø-ærgµmëñt førm øf \"super\" çæll ïs ñøt vælïð wïthïñ æ stætïç mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "symbolIsPossiblyUnbound": "[cUgue][นั้\"{ñæmë}\" ïs pøssïþlÿ µñþøµñðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "symbolIsUnbound": "[zhGl5][นั้\"{ñæmë}\" ïs µñþøµñðẤğ倪İЂҰक्นั้ढूँ]", + "symbolIsUndefined": "[qCm6F][นั้\"{ñæmë}\" ïs ñøt ðëfïñëðẤğ倪İЂҰक्र्นั้ढूँ]", + "symbolOverridden": "[JwRrv][นั้\"{ñæmë}\" øvërrïðës sÿmþøl øf sæmë ñæmë ïñ çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "templateStringBytes": "[eAzQf][นั้Tëmplætë strïñg lïtëræls (t-strïñgs) çæññøt þë þïñærÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "templateStringIllegal": "[lMHIH][นั้Tëmplætë strïñg lïtëræls (t-strïñgs) rëqµïrë Pÿthøñ 3.14 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "templateStringUnicode": "[Me0yb][นั้Tëmplætë strïñg lïtëræls (t-strïñgs) çæññøt þë µñïçøðëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "ternaryNotAllowed": "[5NH6C][นั้Tërñærÿ ëxprëssïøñ ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "totalOrderingMissingMethod": "[eYfjn][นั้Çlæss mµst ðëfïñë øñë øf \"__lt__\", \"__le__\", \"__gt__\", ør \"__ge__\" tø µsë total_orderingẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "trailingCommaInFromImport": "[NcaZY][นั้Træïlïñg çømmæ ñøt ælløwëð wïthøµt sµrrøµñðïñg pærëñthësësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "tryWithoutExcept": "[6z9oA][นั้Try stætëmëñt mµst hævë æt lëæst øñë except ør finally çlæµsëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "tupleAssignmentMismatch": "[xySRW][นั้Ëxprëssïøñ wïth tÿpë \"{tÿpë}\" çæññøt þë æssïgñëð tø tærgët tupleẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "tupleInAnnotation": "[j2RAZ][นั้Tuple ëxprëssïøñ ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "tupleIndexOutOfRange": "[aNqDv][นั้Ïñðëx {ïñðëx} ïs øµt øf ræñgë før tÿpë {tÿpë}Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeAliasIllegalExpressionForm": "[4u4ay][นั้Ïñvælïð ëxprëssïøñ førm før tÿpë ælïæs ðëfïñïtïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeAliasIsRecursiveDirect": "[r8PyZ][นั้Tÿpë ælïæs \"{ñæmë}\" çæññøt µsë ïtsëlf ïñ ïts ðëfïñïtïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeAliasNotInModuleOrClass": "[iQpDJ][นั้Æ TypeAlias çæñ þë ðëfïñëð øñlÿ wïthïñ æ møðµlë ør çlæss sçøpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeAliasRedeclared": "[P036x][นั้\"{ñæmë}\" ïs ðëçlærëð æs æ TypeAlias æñð çæñ þë æssïgñëð øñlÿ øñçëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeAliasStatementBadScope": "[C24Up][นั้Æ type stætëmëñt çæñ þë µsëð øñlÿ wïthïñ æ møðµlë ør çlæss sçøpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeAliasStatementIllegal": "[2EW0Q][นั้Tÿpë ælïæs stætëmëñt rëqµïrës Pÿthøñ 3.12 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeAliasTypeBadScope": "[kdK34][นั้Æ tÿpë ælïæs çæñ þë ðëfïñëð øñlÿ wïthïñ æ møðµlë ør çlæss sçøpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeAliasTypeBaseClass": "[RIpMs][นั้Æ tÿpë ælïæs ðëfïñëð ïñ æ \"type\" stætëmëñt çæññøt þë µsëð æs æ þæsë çlæssẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "typeAliasTypeMustBeAssigned": "[aV4Nn][นั้TypeAliasType mµst þë æssïgñëð tø æ værïæþlë wïth thë sæmë ñæmë æs thë tÿpë ælïæsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeAliasTypeNameArg": "[dUUf1][นั้Fïrst ærgµmëñt tø TypeAliasType mµst þë æ strïñg lïtëræl rëprësëñtïñg thë ñæmë øf thë tÿpë ælïæsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeAliasTypeNameMismatch": "[jW1bQ][นั้Ñæmë øf tÿpë ælïæs mµst mætçh thë ñæmë øf thë værïæþlë tø whïçh ït ïs æssïgñëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typeAliasTypeParamInvalid": "[RdHRE][นั้Tÿpë pæræmëtër lïst mµst þë æ tuple çøñtæïñïñg øñlÿ TypeVar, TypeVarTuple, ør ParamSpecẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeAnnotationCall": "[7pNts][นั้Çæll ëxprëssïøñ ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeAnnotationVariable": "[GeXWQ][นั้Værïæþlë ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typeAnnotationWithCallable": "[JJENJ][นั้Tÿpë ærgµmëñt før \"type\" mµst þë æ çlæss; çællæþlës ærë ñøt sµppørtëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeArgListExpected": "[2efoA][นั้Ëxpëçtëð ParamSpec, ëllïpsïs, ør list øf tÿpësẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeArgListNotAllowed": "[oV7JF][นั้List ëxprëssïøñ ñøt ælløwëð før thïs tÿpë ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeArgsExpectingNone": "[faycH][นั้Ëxpëçtëð ñø tÿpë ærgµmëñts før çlæss \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeArgsMismatchOne": "[BBe1n][นั้Ëxpëçtëð øñë tÿpë ærgµmëñt þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeArgsMissingForAlias": "[hk8aw][นั้Ëxpëçtëð tÿpë ærgµmëñts før gëñërïç tÿpë ælïæs \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeArgsMissingForClass": "[SkdfG][นั้Ëxpëçtëð tÿpë ærgµmëñts før gëñërïç çlæss \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeArgsTooFew": "[6PAb0][นั้Tøø fëw tÿpë ærgµmëñts prøvïðëð før \"{ñæmë}\"; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeArgsTooMany": "[NKF2Z][นั้Tøø mæñÿ tÿpë ærgµmëñts prøvïðëð før \"{ñæmë}\"; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeAssignmentMismatch": "[wwjSP][นั้Tÿpë \"{søµrçëTÿpë}\" ïs ñøt æssïgñæþlë tø ðëçlærëð tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeAssignmentMismatchWildcard": "[qdgVA][นั้Ïmpørt sÿmþøl \"{ñæmë}\" hæs tÿpë \"{søµrçëTÿpë}\", whïçh ïs ñøt æssïgñæþlë tø ðëçlærëð tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeCallNotAllowed": "[OGMmG][นั้type() çæll shøµlð ñøt þë µsëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeCheckOnly": "[cSmKj][นั้\"{ñæmë}\" ïs mærkëð æs @type_check_only æñð çæñ þë µsëð øñlÿ ïñ tÿpë æññøtætïøñsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeCommentDeprecated": "[SRhVz][นั้Üsë øf type çømmëñts ïs ðëprëçætëð; µsë type æññøtætïøñ ïñstëæðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeExpectedClass": "[r0pdu][นั้Ëxpëçtëð çlæss þµt rëçëïvëð \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeFormArgs": "[ivrdh][นั้\"TypeForm\" æççëpts æ sïñglë pøsïtïøñæl ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "typeGuardArgCount": "[Zl47K][นั้Ëxpëçtëð æ sïñglë tÿpë ærgµmëñt æftër \"TypeGuard\" ør \"TypeIs\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeGuardParamCount": "[I3HUH][นั้Üsër-ðëfïñëð tÿpë gµærð fµñçtïøñs æñð mëthøðs mµst hævë æt lëæst øñë ïñpµt pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeIsReturnType": "[5bBbd][นั้Rëtµrñ tÿpë øf TypeIs (\"{rëtµrñTÿpë}\") ïs ñøt çøñsïstëñt wïth vælµë pæræmëtër tÿpë (\"{tÿpë}\")Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeNotAwaitable": "[NZ9Yu][นั้\"{tÿpë}\" ïs ñøt awaitableẤğ倪İЂҰक्र्นั้ढूँ]", + "typeNotIntantiable": "[f3xEe][นั้\"{tÿpë}\" çæññøt þë ïñstæñtïætëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeNotIterable": "[XMDzF][นั้\"{tÿpë}\" ïs ñøt ïtëræþlëẤğ倪İЂҰक्र्นั้ढूँ]", + "typeNotSpecializable": "[ZCsyD][นั้Çøµlð ñøt spëçïælïzë tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeNotSubscriptable": "[fpefi][นั้Øþjëçt øf tÿpë \"{tÿpë}\" ïs ñøt sµþsçrïptæþlëẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeNotSupportBinaryOperator": "[C0OGX][นั้Øpërætør \"{øpërætør}\" ñøt sµppørtëð før tÿpës \"{lëftTÿpë}\" æñð \"{rïghtTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typeNotSupportBinaryOperatorBidirectional": "[25nah][นั้Øpërætør \"{øpërætør}\" ñøt sµppørtëð før tÿpës \"{lëftTÿpë}\" æñð \"{rïghtTÿpë}\" whëñ ëxpëçtëð tÿpë ïs \"{ëxpëçtëðTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeNotSupportUnaryOperator": "[f2pEG][นั้Øpërætør \"{øpërætør}\" ñøt sµppørtëð før tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeNotSupportUnaryOperatorBidirectional": "[Z51QN][นั้Øpërætør \"{øpërætør}\" ñøt sµppørtëð før tÿpë \"{tÿpë}\" whëñ ëxpëçtëð tÿpë ïs \"{ëxpëçtëðTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeNotUsableWith": "[R7VpZ][นั้Øþjëçt øf tÿpë \"{tÿpë}\" çæññøt þë µsëð wïth \"wïth\" þëçæµsë ït ðøës ñøt çørrëçtlÿ ïmplëmëñt {mëthøð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeNotUsableWithAsync": "[VF7Rz][นั้Øþjëçt øf tÿpë \"{tÿpë}\" çæññøt þë µsëð wïth \"æsÿñç wïth\" þëçæµsë ït ðøës ñøt çørrëçtlÿ ïmplëmëñt {mëthøð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeParameterBoundNotAllowed": "[iA0kz][นั้ßøµñð ør çøñstræïñt çæññøt þë µsëð wïth æ værïæðïç tÿpë pæræmëtër ør ParamSpecẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typeParameterConstraintTuple": "[8wa57][นั้Tÿpë pæræmëtër çøñstræïñt mµst þë æ tµplë øf twø ør mørë tÿpësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeParameterExistingTypeParameter": "[M2QXP][นั้Tÿpë pæræmëtër \"{ñæmë}\" ïs ælrëæðÿ ïñ µsëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typeParameterNotDeclared": "[WD9B6][นั้Tÿpë pæræmëtër \"{ñæmë}\" ïs ñøt ïñçlµðëð ïñ thë tÿpë pæræmëtër lïst før \"{çøñtæïñër}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeParametersMissing": "[7nPE2][นั้Æt lëæst øñë tÿpë pæræmëtër mµst þë spëçïfïëðẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typePartiallyUnknown": "[K72xm][นั้Tÿpë øf \"{ñæmë}\" ïs pærtïællÿ µñkñøwñẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "typeUnknown": "[flab2][นั้Tÿpë øf \"{ñæmë}\" ïs µñkñøwñẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeVarAssignedName": "[AnBke][นั้TypeVar mµst þë æssïgñëð tø æ værïæþlë ñæmëð \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarAssignmentMismatch": "[IYCuH][นั้Tÿpë \"{tÿpë}\" çæññøt þë æssïgñëð tø tÿpë værïæþlë \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeVarBoundAndConstrained": "[nSFES][นั้TypeVar çæññøt þë þøth þøµñð æñð çøñstræïñëðẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeVarBoundGeneric": "[scFkM][นั้TypeVar þøµñð tÿpë çæññøt þë gëñërïçẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeVarConstraintGeneric": "[k7N05][นั้TypeVar çøñstræïñt tÿpë çæññøt þë gëñërïçẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typeVarDefaultBoundMismatch": "[knxtI][นั้TypeVar ðëfæµlt tÿpë mµst þë æ sµþtÿpë øf thë þøµñð tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarDefaultConstraintMismatch": "[BlQvu][นั้TypeVar ðëfæµlt tÿpë mµst þë øñë øf thë çøñstræïñëð tÿpësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeVarDefaultIllegal": "[Z5lrX][นั้Tÿpë værïæþlë ðëfæµlt tÿpës rëqµïrë Pÿthøñ 3.13 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarDefaultInvalidTypeVar": "[bOQ21][นั้Tÿpë pæræmëtër \"{ñæmë}\" hæs æ ðëfæµlt tÿpë thæt rëfërs tø øñë ør mørë tÿpë værïæþlës thæt ærë øµt øf sçøpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeVarFirstArg": "[XBVgA][นั้Ëxpëçtëð ñæmë øf TypeVar æs fïrst ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typeVarInvalidForMemberVariable": "[m45Yw][นั้Ættrïþµtë tÿpë çæññøt µsë tÿpë værïæþlë \"{ñæmë}\" sçøpëð tø løçæl mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeVarNoMember": "[Trelb][นั้TypeVar \"{tÿpë}\" hæs ñø ættrïþµtë \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typeVarNotSubscriptable": "[3KoEm][นั้TypeVar \"{tÿpë}\" ïs ñøt sµþsçrïptæþlëẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "typeVarNotUsedByOuterScope": "[on7uQ][นั้Tÿpë værïæþlë \"{ñæmë}\" hæs ñø mëæñïñg ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarPossiblyUnsolvable": "[PP5xz][นั้Tÿpë værïæþlë \"{ñæmë}\" mæÿ gø µñsølvëð ïf çællër sµpplïës ñø ærgµmëñt før pæræmëtër \"{pæræm}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeVarSingleConstraint": "[51MwX][นั้TypeVar mµst hævë æt lëæst twø çøñstræïñëð tÿpësẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "typeVarTupleConstraints": "[ouP8u][นั้TypeVarTuple çæññøt hævë vælµë çøñstræïñtsẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typeVarTupleContext": "[Q8vE2][นั้TypeVarTuple ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typeVarTupleDefaultNotUnpacked": "[S2joz][นั้TypeVarTuple ðëfæµlt tÿpë mµst þë æñ µñpæçkëð tuple ør TypeVarTupleẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeVarTupleMustBeUnpacked": "[TA5HX][นั้Üñpæçk øpërætør ïs rëqµïrëð før TypeVarTuple vælµëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeVarTupleUnknownParam": "[fOW23][นั้\"{ñæmë}\" ïs µñkñøwñ pæræmëtër tø TypeVarTupleẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typeVarUnknownParam": "[veXvU][นั้\"{ñæmë}\" ïs µñkñøwñ pæræmëtër tø TypeVarẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typeVarUsedByOuterScope": "[GJ5N3][นั้TypeVar \"{ñæmë}\" ïs ælrëæðÿ ïñ µsë þÿ æñ øµtër sçøpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarUsedOnlyOnce": "[vSn0W][นั้TypeVar \"{ñæmë}\" æppëærs øñlÿ øñçë ïñ gëñërïç fµñçtïøñ sïgñætµrëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeVarVariance": "[1Dxdn][นั้TypeVar çæññøt þë þøth çøværïæñt æñð çøñtræværïæñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeVarWithDefaultFollowsVariadic": "[h1V5a][นั้TypeVar \"{tÿpëVærÑæmë}\" hæs æ ðëfæµlt vælµë æñð çæññøt følløw TypeVarTuple \"{værïæðïçÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarWithoutDefault": "[w630R][นั้\"{ñæmë}\" çæññøt æppëær æftër \"{øthër}\" ïñ tÿpë pæræmëtër lïst þëçæµsë ït hæs ñø ðëfæµlt tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeVarsNotInGenericOrProtocol": "[ydmAV][นั้Generic[] ør Protocol[] mµst ïñçlµðë æll tÿpë værïæþlësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictAccess": "[55CCf][นั้Çøµlð ñøt æççëss ïtëm ïñ TypedDictẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typedDictAssignedName": "[Dkf5M][นั้TypedDict mµst þë æssïgñëð tø æ værïæþlë ñæmëð \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictBadVar": "[OL8Ox][นั้TypedDict çlæssës çæñ çøñtæïñ øñlÿ tÿpë æññøtætïøñsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typedDictBaseClass": "[HxyA2][นั้Æll þæsë çlæssës før TypedDict çlæssës mµst ælsø þë TypedDict çlæssësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typedDictBoolParam": "[GALOD][นั้Ëxpëçtëð \"{ñæmë}\" pæræmëtër tø hævë æ vælµë øf True ør FalseẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typedDictClosedExtras": "[mlkJO][นั้ßæsë çlæss \"{ñæmë}\" ïs æ TypedDict thæt lïmïts thë tÿpë øf ëxtræ ïtëms tø tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictClosedFalseNonOpenBase": "[ifIlm][นั้ßæsë çlæss \"{ñæmë}\" ïs ñøt æñ øpëñ TypedDict; closed=Fælsë ïs ñøt ælløwëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "typedDictClosedNoExtras": "[BCyXd][นั้ßæsë çlæss \"{ñæmë}\" ïs æ closed TypedDict; ëxtræ ïtëms ærë ñøt ælløwëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typedDictDelete": "[bdBu7][นั้Çøµlð ñøt ðëlëtë ïtëm ïñ TypedDictẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typedDictEmptyName": "[h45e7][นั้Ñæmës wïthïñ æ TypedDict çæññøt þë ëmptÿẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typedDictEntryName": "[NT4np][นั้Ëxpëçtëð strïñg lïtëræl før ðïçtïøñærÿ ëñtrÿ ñæmëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typedDictEntryUnique": "[nWy0L][นั้Ñæmës wïthïñ æ ðïçtïøñærÿ mµst þë µñïqµëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typedDictExtraArgs": "[0gX32][นั้Ëxtræ TypedDict ærgµmëñts ñøt sµppørtëðẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "typedDictExtraItemsClosed": "[5BDXE][นั้TypedDict çæñ µsë ëïthër \"closed\" ør \"extra_items\" þµt ñøt þøthẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typedDictFieldNotRequiredRedefinition": "[rNYD1][นั้TypedDict ïtëm \"{ñæmë}\" çæññøt þë rëðëfïñëð æs NotRequiredẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typedDictFieldReadOnlyRedefinition": "[8IFAz][นั้TypedDict ïtëm \"{ñæmë}\" çæññøt þë rëðëfïñëð æs ReadOnlyẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictFieldRequiredRedefinition": "[lpw97][นั้TypedDict ïtëm \"{ñæmë}\" çæññøt þë rëðëfïñëð æs RequiredẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictFirstArg": "[OPlNk][นั้Ëxpëçtëð TypedDict çlæss ñæmë æs fïrst ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "typedDictInClassPattern": "[IqJCA][นั้TypedDict çlæss ñøt ælløwëð ïñ çlæss pættërñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typedDictInitsubclassParameter": "[HMpfK][นั้TypedDict ðøës ñøt sµppørt __init_subclass__ pæræmëtër \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typedDictNotAllowed": "[UWg4F][นั้\"TypedDict\" çæññøt þë µsëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typedDictSecondArgDict": "[mwrv7][นั้Ëxpëçtëð dict ør këÿwørð pæræmëtër æs sëçøñð pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictSecondArgDictEntry": "[oAT5Z][นั้Ëxpëçtëð sïmplë ðïçtïøñærÿ ëñtrÿẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typedDictSet": "[30hTC][นั้Çøµlð ñøt æssïgñ ïtëm ïñ TypedDictẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "unaccessedClass": "[dou8i][นั้Çlæss \"{ñæmë}\" ïs ñøt æççëssëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unaccessedFunction": "[AdgDz][นั้Fµñçtïøñ \"{ñæmë}\" ïs ñøt æççëssëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unaccessedImport": "[2a90g][นั้Ïmpørt \"{ñæmë}\" ïs ñøt æççëssëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unaccessedSymbol": "[MmnM7][นั้\"{ñæmë}\" ïs ñøt æççëssëðẤğ倪İЂҰक्र्นั้ढूँ]", + "unaccessedVariable": "[n5l1e][นั้Værïæþlë \"{ñæmë}\" ïs ñøt æççëssëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unannotatedFunctionSkipped": "[Ovgyl][นั้Æñælÿsïs øf fµñçtïøñ \"{ñæmë}\" ïs skïppëð þëçæµsë ït ïs µñæññøtætëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unaryOperationNotAllowed": "[2WB31][นั้Üñærÿ øpërætør ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unexpectedAsyncToken": "[fKSJb][นั้Ëxpëçtëð \"def\", \"with\" ør \"for\" tø følløw \"async\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "unexpectedEof": "[SyST0][นั้Üñëxpëçtëð ËØFẤğ倪İЂҰนั้ढूँ]", + "unexpectedExprToken": "[MtBsu][นั้Üñëxpëçtëð tøkëñ æt ëñð øf ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "unexpectedIndent": "[uZUVS][นั้Üñëxpëçtëð ïñðëñtætïøñẤğ倪İЂҰक्र्นั้ढूँ]", + "unexpectedUnindent": "[yqwy4][นั้Üñïñðëñt ñøt ëxpëçtëðẤğ倪İЂҰक्นั้ढूँ]", + "unhashableDictKey": "[pIvHj][นั้Ðïçtïøñærÿ këÿ mµst þë hæshæþlëẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unhashableSetEntry": "[rWf72][นั้Set ëñtrÿ mµst þë hæshæþlëẤğ倪İЂҰक्र्นั้ढूँ]", + "uninitializedAbstractVariables": "[SpCPH][นั้Værïæþlës ðëfïñëð ïñ æþstræçt þæsë çlæss ærë ñøt ïñïtïælïzëð ïñ final çlæss \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "uninitializedInstanceVariable": "[5pgFw][นั้Ïñstæñçë værïæþlë \"{ñæmë}\" ïs ñøt ïñïtïælïzëð ïñ thë çlæss þøðÿ ør __init__ mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "unionForwardReferenceNotAllowed": "[MOLby][นั้Union sÿñtæx çæññøt þë µsëð wïth strïñg øpëræñð; µsë qµøtës ærøµñð ëñtïrë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "unionSyntaxIllegal": "[vbTDG][นั้Æltërñætïvë sÿñtæx før µñïøñs rëqµïrës Pÿthøñ 3.10 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "unionTypeArgCount": "[vc6vA][นั้Union rëqµïrës twø ør mørë tÿpë ærgµmëñtsẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "unionUnpackedTuple": "[owRjE][นั้Union çæññøt ïñçlµðë æñ µñpæçkëð tupleẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "unionUnpackedTypeVarTuple": "[a6msY][นั้Union çæññøt ïñçlµðë æñ µñpæçkëð TypeVarTupleẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unnecessaryCast": "[QgPoI][นั้Üññëçëssærÿ \"cast\" çæll; tÿpë ïs ælrëæðÿ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "unnecessaryIsInstanceAlways": "[gX4s7][นั้Üññëçëssærÿ isinstance çæll; \"{tëstTÿpë}\" ïs ælwæÿs æñ ïñstæñçë øf \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unnecessaryIsInstanceNever": "[60nSE][นั้Üññëçëssærÿ isinstance çæll; \"{tëstTÿpë}\" ïs ñëvër æñ ïñstæñçë øf \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unnecessaryIsSubclassAlways": "[BzHtx][นั้Üññëçëssærÿ issubclass çæll; \"{tëstTÿpë}\" ïs ælwæÿs æ sµþçlæss øf \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unnecessaryIsSubclassNever": "[bOMnf][นั้Üññëçëssærÿ issubclass çæll; \"{tëstTÿpë}\" ïs ñëvër æ sµþçlæss øf \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "unnecessaryPyrightIgnore": "[7QhdX][นั้Üññëçëssærÿ \"# pyright: ignore\" çømmëñtẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "unnecessaryPyrightIgnoreRule": "[0ESoQ][นั้Üññëçëssærÿ \"# pyright: ignore\" rµlë: \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unnecessaryTypeIgnore": "[IoWr9][นั้Üññëçëssærÿ \"# type: ignore\" çømmëñtẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "unpackArgCount": "[bkAT1][นั้Ëxpëçtëð æ sïñglë tÿpë ærgµmëñt æftër \"Unpack\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unpackExpectedTypeVarTuple": "[CWX8o][นั้Ëxpëçtëð TypeVarTuple ør tuple æs tÿpë ærgµmëñt før UnpackẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "unpackExpectedTypedDict": "[ha9qw][นั้Ëxpëçtëð TypedDict tÿpë ærgµmëñt før UnpackẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "unpackIllegalInComprehension": "[7a4pV][นั้Üñpæçk øpërætïøñ ñøt ælløwëð ïñ çømprëhëñsïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unpackInAnnotation": "[6gqFu][นั้Üñpæçk øpërætør ñøt ælløwëð ïñ tÿpë ëxprëssïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unpackInDict": "[eKn69][นั้Üñpæçk øpërætïøñ ñøt ælløwëð ïñ ðïçtïøñærïësẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unpackInSet": "[lKyO0][นั้Üñpæçk øpërætør ñøt ælløwëð wïthïñ æ setẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "unpackNotAllowed": "[MZq6e][นั้Unpack ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "unpackOperatorNotAllowed": "[lMq2B][นั้Üñpæçk øpërætïøñ ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "unpackTuplesIllegal": "[RJvzW][นั้Üñpæçk øpërætïøñ ñøt ælløwëð ïñ tµplës prïør tø Pÿthøñ 3.8Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "unpackedArgInTypeArgument": "[skxlo][นั้Üñpæçkëð ærgµmëñts çæññøt þë µsëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "unpackedArgWithVariadicParam": "[ZP3kP][นั้Üñpæçkëð ærgµmëñt çæññøt þë µsëð før TypeVarTuple pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "unpackedDictArgumentNotMapping": "[iSTnU][นั้Ærgµmëñt ëxprëssïøñ æftër ** mµst þë æ mæppïñg wïth æ \"str\" këÿ tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unpackedDictSubscriptIllegal": "[slATr][นั้Ðïçtïøñærÿ µñpæçk øpërætør ïñ sµþsçrïpt ïs ñøt ælløwëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "unpackedSubscriptIllegal": "[2CpZz][นั้Üñpæçk øpërætør ïñ sµþsçrïpt rëqµïrës Pÿthøñ 3.11 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "unpackedTypeVarTupleExpected": "[tgdHs][นั้Ëxpëçtëð µñpæçkëð TypeVarTuple; µsë Unpack[{name1}] ør *{name2}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unpackedTypedDictArgument": "[iCgjR][นั้Üñæþlë tø mætçh µñpæçkëð TypedDict ærgµmëñt tø pæræmëtërsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "unreachableCodeCondition": "[fTD3C][นั้Çøðë ïs ñøt æñælÿzëð þëçæµsë çøñðïtïøñ ïs stætïçællÿ ëvælµætëð æs fælsëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "unreachableCodeStructure": "[0OIU1][นั้Çøðë ïs strµçtµrællÿ µñrëæçhæþlëẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unreachableCodeType": "[v80nR][นั้Tÿpë æñælÿsïs ïñðïçætës çøðë ïs µñrëæçhæþlëẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "unreachableExcept": "[zFMWg][นั้Except çlæµsë ïs µñrëæçhæþlë þëçæµsë ëxçëptïøñ ïs ælrëæðÿ hæñðlëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "unsupportedDunderAllOperation": "[KsX0f][นั้Øpërætïøñ øñ \"__all__\" ïs ñøt sµppørtëð, sø ëxpørtëð sÿmþøl lïst mæÿ þë ïñçørrëçtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "unusedCallResult": "[9IsV5][นั้Rësµlt øf çæll ëxprëssïøñ ïs øf tÿpë \"{tÿpë}\" æñð ïs ñøt µsëð; æssïgñ tø værïæþlë \"_\" ïf thïs ïs ïñtëñtïøñælẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "unusedCoroutine": "[nQUJ2][นั้Rësµlt øf async fµñçtïøñ çæll ïs ñøt µsëð; µsë \"æwæït\" ør æssïgñ rësµlt tø værïæþlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "unusedExpression": "[ghmaU][นั้Ëxprëssïøñ vælµë ïs µñµsëðẤğ倪İЂҰक्र्นั้ढूँ]", + "varAnnotationIllegal": "[v2cs9][นั้Tÿpë æññøtætïøñs før værïæþlës rëqµïrës Pÿthøñ 3.6 ør ñëwër; µsë type çømmëñt før çømpætïþïlïtÿ wïth prëvïøµs vërsïøñsẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "variableFinalOverride": "[LcrNS][นั้Værïæþlë \"{ñæmë}\" ïs mærkëð Final æñð øvërrïðës ñøñ-Final værïæþlë øf sæmë ñæmë ïñ çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "variadicTypeArgsTooMany": "[1QX0D][นั้Tÿpë ærgµmëñt lïst çæñ hævë æt møst øñë µñpæçkëð TypeVarTuple ør tupleẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "variadicTypeParamTooManyAlias": "[43VIR][นั้Tÿpë ælïæs çæñ hævë æt møst øñë TypeVarTuple tÿpë pæræmëtër þµt rëçëïvëð mµltïplë ({ñæmës})Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "variadicTypeParamTooManyClass": "[fboqC][นั้Gëñërïç çlæss çæñ hævë æt møst øñë TypeVarTuple tÿpë pæræmëtër þµt rëçëïvëð mµltïplë ({ñæmës})Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "walrusIllegal": "[iR3y3][นั้Øpërætør \":=\" rëqµïrës Pÿthøñ 3.8 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "walrusNotAllowed": "[kdD5j][นั้Øpërætør \":=\" ïs ñøt ælløwëð ïñ thïs çøñtëxt wïthøµt sµrrøµñðïñg pærëñthësësẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "wildcardInFunction": "[NyGOv][นั้Wïlðçærð import ñøt ælløwëð wïthïñ æ çlæss ør fµñçtïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "wildcardLibraryImport": "[Yk3ai][นั้Wïlðçærð import frøm æ lïþrærÿ ñøt ælløwëðẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "wildcardPatternTypePartiallyUnknown": "[eRR5M][นั้Tÿpë çæptµrëð þÿ wïlðçærð pættërñ ïs pærtïællÿ µñkñøwñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "wildcardPatternTypeUnknown": "[Bo3gT][นั้Tÿpë çæptµrëð þÿ wïlðçærð pættërñ ïs µñkñøwñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "yieldFromIllegal": "[DkXto][นั้Üsë øf \"yield from\" rëqµïrës Pÿthøñ 3.3 ør ñëwërẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "yieldFromOutsideAsync": "[ZONEz][นั้\"yield from\" ñøt ælløwëð ïñ æñ async fµñçtïøñẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "yieldOutsideFunction": "[2lDBQ][นั้\"yield\" ñøt ælløwëð øµtsïðë øf æ fµñçtïøñ ør læmþðæẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "yieldWithinComprehension": "[yALS5][นั้\"yield\" ñøt ælløwëð ïñsïðë æ çømprëhëñsïøñẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "zeroCaseStatementsFound": "[ArU3j][นั้Match stætëmëñt mµst ïñçlµðë æt lëæst øñë case stætëmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "zeroLengthTupleNotAllowed": "[3gVpF][นั้Zërø-lëñgth tuple ïs ñøt ælløwëð ïñ thïs çøñtëxtẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "[Mws6g][นั้\"Annotated\" spëçïæl førm çæññøt þë µsëð wïth ïñstæñçë æñð çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "argParam": "[nmvvb][นั้Ærgµmëñt çørrëspøñðs tø pæræmëtër \"{pæræmÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "argParamFunction": "[7Xwg8][นั้Ærgµmëñt çørrëspøñðs tø pæræmëtër \"{pæræmÑæmë}\" ïñ fµñçtïøñ \"{fµñçtïøñÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "argsParamMissing": "[vg3b8][นั้Pæræmëtër \"*{pæræmÑæmë}\" hæs ñø çørrëspøñðïñg pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "argsPositionOnly": "[sNlU1][นั้Pøsïtïøñ-øñlÿ pæræmëtër mïsmætçh; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "argumentType": "[JJxeD][นั้Ærgµmëñt tÿpë ïs \"{tÿpë}\"Ấğ倪İЂҰक्र्นั้ढूँ]", + "argumentTypes": "[Omlwm][นั้Ærgµmëñt tÿpës: ({tÿpës})Ấğ倪İЂҰक्र्นั้ढूँ]", + "assignToNone": "[z249G][นั้Tÿpë ïs ñøt æssïgñæþlë tø \"None\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "asyncHelp": "[2nasS][นั้Ðïð ÿøµ mëæñ \"async with\"?Ấğ倪İЂҰक्र्นั้ढूँ]", + "baseClassIncompatible": "[oW6Ip][นั้ßæsë çlæss \"{þæsëÇlæss}\" ïs ïñçømpætïþlë wïth tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "baseClassIncompatibleSubclass": "[mMUCH][นั้ßæsë çlæss \"{þæsëÇlæss}\" ðërïvës frøm \"{sµþçlæss}\" whïçh ïs ïñçømpætïþlë wïth tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "baseClassOverriddenType": "[Hp8Sl][นั้ßæsë çlæss \"{þæsëÇlæss}\" prøvïðës tÿpë \"{tÿpë}\", whïçh ïs øvërrïððëñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "baseClassOverridesType": "[P7N4Y][นั้ßæsë çlæss \"{þæsëÇlæss}\" øvërrïðës wïth tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "bytesTypePromotions": "[qIXYb][นั้§ët disableBytesTypePromotions tø false tø ëñæþlë tÿpë prømøtïøñ þëhævïør før \"bytearray\" æñð \"memoryview\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "conditionalRequiresBool": "[k1G9a][นั้Mëthøð __bool__ før tÿpë \"{øpëræñðTÿpë}\" rëtµrñs tÿpë \"{þøølRëtµrñTÿpë}\" ræthër thæñ \"þøøl\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "dataClassFieldLocation": "[vQxtf][นั้Fïëlð ðëçlærætïøñẤğ倪İЂҰक्นั้ढूँ]", + "dataClassFrozen": "[d4uiK][นั้\"{ñæmë}\" ïs frøzëñẤğ倪İЂҰक्นั้ढूँ]", + "dataProtocolUnsupported": "[7gIT2][นั้\"{ñæmë}\" ïs æ ðætæ prøtøçølẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "descriptorAccessBindingFailed": "[RiEhE][นั้Fæïlëð tø þïñð mëthøð \"{ñæmë}\" før ðësçrïptør çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "descriptorAccessCallFailed": "[8EXvg][นั้Fæïlëð tø çæll mëthøð \"{ñæmë}\" før ðësçrïptør çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "finalMethod": "[zz1yN][นั้Final mëthøðẤğ倪İЂนั้ढूँ]", + "functionParamDefaultMissing": "[yWAIy][นั้Pæræmëtër \"{ñæmë}\" ïs mïssïñg ðëfæµlt ærgµmëñtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "functionParamName": "[NrJqx][นั้Pæræmëtër ñæmë mïsmætçh: \"{ðëstÑæmë}\" vërsµs \"{srçÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "functionParamPositionOnly": "[XOhUP][นั้Pøsïtïøñ-øñlÿ pæræmëtër mïsmætçh; pæræmëtër \"{ñæmë}\" ïs ñøt pøsïtïøñ-øñlÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "functionReturnTypeMismatch": "[x4qH0][นั้Fµñçtïøñ rëtµrñ tÿpë \"{søµrçëTÿpë}\" ïs ïñçømpætïþlë wïth tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "functionTooFewParams": "[575uy][นั้Fµñçtïøñ æççëpts tøø fëw pøsïtïøñæl pæræmëtërs; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "functionTooManyParams": "[zj9vw][นั้Fµñçtïøñ æççëpts tøø mæñÿ pøsïtïøñæl pæræmëtërs; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "genericClassNotAllowed": "[MDEt3][นั้Gëñërïç tÿpë wïth tÿpë ærgµmëñts ñøt ælløwëð før ïñstæñçë ør çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "incompatibleDeleter": "[LCJuj][นั้Property deleter mëthøð ïs ïñçømpætïþlëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "incompatibleGetter": "[yds2G][นั้Property getter mëthøð ïs ïñçømpætïþlëẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "incompatibleSetter": "[GDoso][นั้Property setter mëthøð ïs ïñçømpætïþlëẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "initMethodLocation": "[D4O2l][นั้Thë __init__ mëthøð ïs ðëfïñëð ïñ çlæss \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "initMethodSignature": "[EULjB][นั้§ïgñætµrë øf __init__ ïs \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "initSubclassLocation": "[eEcCS][นั้Thë __init_subclass__ mëthøð ïs ðëfïñëð ïñ çlæss \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "invariantSuggestionDict": "[OIoHo][นั้Çøñsïðër swïtçhïñg frøm \"dict\" tø \"Mapping\" whïçh ïs çøværïæñt ïñ thë vælµë tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "invariantSuggestionList": "[irYWI][นั้Çøñsïðër swïtçhïñg frøm \"list\" tø \"Sequence\" whïçh ïs çøværïæñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "invariantSuggestionSet": "[64U47][นั้Çøñsïðër swïtçhïñg frøm \"set\" tø \"Container\" whïçh ïs çøværïæñtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "isinstanceClassNotSupported": "[uTDu4][นั้\"{tÿpë}\" ïs ñøt sµppørtëð før ïñstæñçë æñð çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "keyNotRequired": "[K1bDP][นั้\"{ñæmë}\" ïs ñøt æ rëqµïrëð këÿ ïñ \"{tÿpë}\", sø æççëss mæÿ rësµlt ïñ rµñtïmë ëxçëptïøñẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "keyReadOnly": "[dhAH3][นั้\"{ñæmë}\" ïs æ rëæð-øñlÿ këÿ ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "keyRequiredDeleted": "[YeZa5][นั้\"{ñæmë}\" ïs æ rëqµïrëð këÿ æñð çæññøt þë ðëlëtëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "keyUndefined": "[6mQGu][นั้\"{ñæmë}\" ïs ñøt æ ðëfïñëð këÿ ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "kwargsParamMissing": "[KHgb2][นั้Pæræmëtër \"**{pæræmÑæmë}\" hæs ñø çørrëspøñðïñg pæræmëtërẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "listAssignmentMismatch": "[fERKI][นั้Tÿpë \"{tÿpë}\" ïs ïñçømpætïþlë wïth tærgët lïstẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "literalAssignmentMismatch": "[17LiQ][นั้\"{søµrçëTÿpë}\" ïs ñøt æssïgñæþlë tø tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "literalNotAllowed": "[kRsub][นั้\"Literal\" spëçïæl førm çæññøt þë µsëð wïth ïñstæñçë æñð çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "matchIsNotExhaustiveHint": "[3NR39][นั้Ïf ëxhæµstïvë hæñðlïñg ïs ñøt ïñtëñðëð, æðð \"case _: pass\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "matchIsNotExhaustiveType": "[9RN1P][นั้Üñhæñðlëð tÿpë: \"{tÿpë}\"Ấğ倪İЂҰक्र्นั้ढूँ]", + "memberAssignment": "[1WFCt][นั้Ëxprëssïøñ øf tÿpë \"{tÿpë}\" çæññøt þë æssïgñëð tø ættrïþµtë \"{ñæmë}\" øf çlæss \"{çlæssTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "memberIsAbstract": "[l912U][นั้\"{tÿpë}.{ñæmë}\" ïs ñøt ïmplëmëñtëðẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "memberIsAbstractMore": "[dgfwa][นั้æñð {çøµñt} mørëẤğ倪İЂҰนั้ढूँ]...", + "memberIsClassVarInProtocol": "[ZZeb4][นั้\"{ñæmë}\" ïs ðëfïñëð æs æ ClassVar ïñ prøtøçølẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "memberIsInitVar": "[0SGIB][นั้\"{ñæmë}\" ïs æñ init-only fïëlðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "memberIsInvariant": "[rBPX6][นั้\"{ñæmë}\" ïs ïñværïæñt þëçæµsë ït ïs mµtæþlëẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "memberIsNotClassVarInClass": "[bKhkE][นั้\"{ñæmë}\" mµst þë ðëfïñëð æs æ ClassVar tø þë çømpætïþlë wïth prøtøçølẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "memberIsNotClassVarInProtocol": "[OAmE1][นั้\"{ñæmë}\" ïs ñøt ðëfïñëð æs æ ClassVar ïñ prøtøçølẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "memberIsNotReadOnlyInProtocol": "[TKk1U][นั้\"{ñæmë}\" ïs ñøt rëæð-øñlÿ ïñ prøtøçølẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "memberIsReadOnlyInProtocol": "[xOSqy][นั้\"{ñæmë}\" ïs rëæð-øñlÿ ïñ prøtøçølẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "memberIsWritableInProtocol": "[x53Kg][นั้\"{ñæmë}\" ïs wrïtæþlë ïñ prøtøçølẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "memberSetClassVar": "[2pVfQ][นั้Ættrïþµtë \"{ñæmë}\" çæññøt þë æssïgñëð thrøµgh æ çlæss ïñstæñçë þëçæµsë ït ïs æ ClassVarẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "memberTypeMismatch": "[IHN4x][นั้\"{ñæmë}\" ïs æñ ïñçømpætïþlë tÿpëẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "memberUnknown": "[7kDIF][นั้Ættrïþµtë \"{ñæmë}\" ïs µñkñøwñẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "metaclassConflict": "[fjWW1][นั้Mëtæçlæss \"{mëtæçlæss1}\" çøñflïçts wïth \"{mëtæçlæss2}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "missingDeleter": "[5IVNI][นั้Property deleter mëthøð ïs mïssïñgẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "missingGetter": "[Mzn4K][นั้Property getter mëthøð ïs mïssïñgẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "missingSetter": "[goeIY][นั้Property setter mëthøð ïs mïssïñgẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "namedParamMissingInDest": "[dc07X][นั้Ëxtræ pæræmëtër \"{ñæmë}\"Ấğ倪İЂҰक्र्นั้ढूँ]", + "namedParamMissingInSource": "[N59fC][นั้Mïssïñg këÿwørð pæræmëtër \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "namedParamTypeMismatch": "[9CAV6][นั้Këÿwørð pæræmëtër \"{ñæmë}\" øf tÿpë \"{søµrçëTÿpë}\" ïs ïñçømpætïþlë wïth tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "namedTupleNotAllowed": "[gAlSp][นั้NamedTuple çæññøt þë µsëð før ïñstæñçë ør çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "newMethodLocation": "[n0dxL][นั้Thë __new__ mëthøð ïs ðëfïñëð ïñ çlæss \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "newMethodSignature": "[NeWKO][นั้§ïgñætµrë øf __new__ ïs \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "newTypeClassNotAllowed": "[JQmcY][นั้Tÿpë çrëætëð wïth NewType çæññøt þë µsëð wïth ïñstæñçë æñð çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "noOverloadAssignable": "[FJ88c][นั้Ñø øvërløæðëð fµñçtïøñ mætçhës tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "noneNotAllowed": "[Yn8Lx][นั้None çæññøt þë µsëð før ïñstæñçë ør çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "orPatternMissingName": "[kgiPM][นั้Mïssïñg ñæmës: {ñæmë}Ấğ倪İЂҰक्นั้ढूँ]", + "overloadIndex": "[vNPxL][นั้Øvërløæð {ïñðëx} ïs thë çløsëst mætçhẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "overloadNotAssignable": "[BA2kK][นั้Øñë ør mørë øvërløæðs øf \"{ñæmë}\" ïs ñøt æssïgñæþlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "overloadSignature": "[NPzwf][นั้Øvërløæð sïgñætµrë ïs ðëfïñëð hërëẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "overriddenMethod": "[CcUB2][นั้Øvërrïððëñ mëthøðẤğ倪İЂҰक्นั้ढूँ]", + "overriddenSymbol": "[cvpXz][นั้Øvërrïððëñ sÿmþølẤğ倪İЂҰक्นั้ढूँ]", + "overrideInvariantMismatch": "[uODzM][นั้Øvërrïðë tÿpë \"{øvërrïðëTÿpë}\" ïs ñøt thë sæmë æs þæsë tÿpë \"{þæsëTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "overrideIsInvariant": "[j45KZ][นั้Værïæþlë ïs mµtæþlë sø ïts tÿpë ïs ïñværïæñtẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "overrideNoOverloadMatches": "[vG14w][นั้Ñø øvërløæð sïgñætµrë ïñ øvërrïðë ïs çømpætïþlë wïth þæsë mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overrideNotClassMethod": "[t5OaH][นั้ßæsë mëthøð ïs ðëçlærëð æs æ classmethod þµt øvërrïðë ïs ñøtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "overrideNotInstanceMethod": "[e2Xo5][นั้ßæsë mëthøð ïs ðëçlærëð æs æñ ïñstæñçë mëthøð þµt øvërrïðë ïs ñøtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overrideNotStaticMethod": "[Eu8Oy][นั้ßæsë mëthøð ïs ðëçlærëð æs æ staticmethod þµt øvërrïðë ïs ñøtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "overrideOverloadNoMatch": "[smVSW][นั้Øvërrïðë ðøës ñøt hæñðlë æll øvërløæðs øf þæsë mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "overrideOverloadOrder": "[HrUeN][นั้Øvërløæðs før øvërrïðë mëthøð mµst þë ïñ thë sæmë ørðër æs thë þæsë mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "overrideParamKeywordNoDefault": "[yIoa8][นั้Këÿwørð pæræmëtër \"{ñæmë}\" mïsmætçh: þæsë pæræmëtër hæs ðëfæµlt ærgµmëñt vælµë, øvërrïðë pæræmëtër ðøës ñøtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "overrideParamKeywordType": "[SAOAr][นั้Këÿwørð pæræmëtër \"{ñæmë}\" tÿpë mïsmætçh: þæsë pæræmëtër ïs tÿpë \"{þæsëTÿpë}\", øvërrïðë pæræmëtër ïs tÿpë \"{øvërrïðëTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "overrideParamName": "[fRWyy][นั้Pæræmëtër {ïñðëx} ñæmë mïsmætçh: þæsë pæræmëtër ïs ñæmëð \"{þæsëÑæmë}\", øvërrïðë pæræmëtër ïs ñæmëð \"{øvërrïðëÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "overrideParamNameExtra": "[HEG0d][นั้Pæræmëtër \"{ñæmë}\" ïs mïssïñg ïñ þæsëẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "overrideParamNameMissing": "[PN8Rl][นั้Pæræmëtër \"{ñæmë}\" ïs mïssïñg ïñ øvërrïðëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "overrideParamNamePositionOnly": "[3yVlj][นั้Pæræmëtër {ïñðëx} mïsmætçh: þæsë pæræmëtër \"{þæsëÑæmë}\" ïs këÿwørð pæræmëtër, øvërrïðë pæræmëtër ïs pøsïtïøñ-øñlÿẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "overrideParamNoDefault": "[azvZD][นั้Pæræmëtër {ïñðëx} mïsmætçh: þæsë pæræmëtër hæs ðëfæµlt ærgµmëñt vælµë, øvërrïðë pæræmëtër ðøës ñøtẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overrideParamType": "[FXBcO][นั้Pæræmëtër {ïñðëx} tÿpë mïsmætçh: þæsë pæræmëtër ïs tÿpë \"{þæsëTÿpë}\", øvërrïðë pæræmëtër ïs tÿpë \"{øvërrïðëTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "overridePositionalParamCount": "[jJZNP][นั้Pøsïtïøñæl pæræmëtër çøµñt mïsmætçh; þæsë mëthøð hæs {þæsëÇøµñt}, þµt øvërrïðë hæs {øvërrïðëÇøµñt}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overrideReturnType": "[mdPwX][นั้Rëtµrñ tÿpë mïsmætçh: þæsë mëthøð rëtµrñs tÿpë \"{þæsëTÿpë}\", øvërrïðë rëtµrñs tÿpë \"{øvërrïðëTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "overrideType": "[ryAgb][นั้ßæsë çlæss ðëfïñës tÿpë æs \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "paramAssignment": "[HGg7D][นั้Pæræmëtër {ïñðëx}: tÿpë \"{søµrçëTÿpë}\" ïs ïñçømpætïþlë wïth tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "paramSpecMissingInOverride": "[AUge7][นั้ParamSpec pæræmëtërs ærë mïssïñg ïñ øvërrïðë mëthøðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "paramType": "[hHLAX][นั้Pæræmëtër tÿpë ïs \"{pæræmTÿpë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "privateImportFromPyTypedSource": "[8gX6u][นั้Ïmpørt frøm \"{møðµlë}\" ïñstëæðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "propertyAccessFromProtocolClass": "[h75EJ][นั้Æ prøpërtÿ ðëfïñëð wïthïñ æ prøtøçøl çlæss çæññøt þë æççëssëð æs æ çlæss værïæþlëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "propertyMethodIncompatible": "[dWDwG][นั้Property mëthøð \"{ñæmë}\" ïs ïñçømpætïþlëẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "propertyMethodMissing": "[xWlRK][นั้Property mëthøð \"{ñæmë}\" ïs mïssïñg ïñ øvërrïðëẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "propertyMissingDeleter": "[r2oGK][นั้Property \"{ñæmë}\" hæs ñø ðëfïñëð deleterẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "propertyMissingSetter": "[Sr1R9][นั้Property \"{ñæmë}\" hæs ñø ðëfïñëð setterẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "protocolIncompatible": "[4uTqc][นั้\"{søµrçëTÿpë}\" ïs ïñçømpætïþlë wïth prøtøçøl \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "protocolMemberMissing": "[Ad9PZ][นั้\"{ñæmë}\" ïs ñøt prësëñtẤğ倪İЂҰक्र्นั้ढूँ]", + "protocolRequiresRuntimeCheckable": "[c9ewn][นั้Protocol çlæss mµst þë @runtime_checkable tø þë µsëð wïth ïñstæñçë æñð çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "protocolSourceIsNotConcrete": "[DnLrN][นั้\"{søµrçëTÿpë}\" ïs ñøt æ çøñçrëtë çlæss tÿpë æñð çæññøt þë æssïgñëð tø tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "protocolUnsafeOverlap": "[fKiUM][นั้Ættrïþµtës øf \"{ñæmë}\" hævë thë sæmë ñæmës æs thë prøtøçølẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "pyrightCommentIgnoreTip": "[raFZN][นั้Üsë \"# pyright: ignore[]\" tø sµpprëss ðïægñøstïçs før æ sïñglë lïñëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "readOnlyAttribute": "[k9waY][นั้Ættrïþµtë \"{ñæmë}\" ïs rëæð-øñlÿẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "seeClassDeclaration": "[8sx7n][นั้§ëë çlæss ðëçlærætïøñẤğ倪İЂҰक्นั้ढूँ]", + "seeDeclaration": "[K0X6p][นั้§ëë ðëçlærætïøñẤğ倪İЂҰนั้ढूँ]", + "seeFunctionDeclaration": "[1N6ig][นั้§ëë fµñçtïøñ ðëçlærætïøñẤğ倪İЂҰक्र्นั้ढूँ]", + "seeMethodDeclaration": "[eJSye][นั้§ëë mëthøð ðëçlærætïøñẤğ倪İЂҰक्र्นั้ढूँ]", + "seeParameterDeclaration": "[mBEpT][นั้§ëë pæræmëtër ðëçlærætïøñẤğ倪İЂҰक्र्นั้ढूँ]", + "seeTypeAliasDeclaration": "[Pjnb8][นั้§ëë tÿpë ælïæs ðëçlærætïøñẤğ倪İЂҰक्र्นั้ढूँ]", + "seeVariableDeclaration": "[M3EiY][นั้§ëë værïæþlë ðëçlærætïøñẤğ倪İЂҰक्र्นั้ढूँ]", + "tupleAssignmentMismatch": "[aLGep][นั้Tÿpë \"{tÿpë}\" ïs ïñçømpætïþlë wïth tærgët tupleẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "tupleEntryTypeMismatch": "[ny8Sn][นั้Tuple ëñtrÿ {ëñtrÿ} ïs ïñçørrëçt tÿpëẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "tupleSizeIndeterminateSrc": "[EnNiw][นั้Tuple sïzë mïsmætçh; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð ïñðëtërmïñætëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "tupleSizeIndeterminateSrcDest": "[lrxYh][นั้Tuple sïzë mïsmætçh; ëxpëçtëð {ëxpëçtëð} ør mørë þµt rëçëïvëð ïñðëtërmïñætëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğนั้ढूँ]", + "tupleSizeMismatch": "[F2Yc7][นั้Tuple sïzë mïsmætçh; ëxpëçtëð {ëxpëçtëð} þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "tupleSizeMismatchIndeterminateDest": "[6vxdi][นั้Tuple sïzë mïsmætçh; ëxpëçtëð {ëxpëçtëð} ør mørë þµt rëçëïvëð {rëçëïvëð}Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "typeAliasInstanceCheck": "[29G7K][นั้Tÿpë ælïæs çrëætëð wïth \"type\" stætëmëñt çæññøt þë µsëð wïth ïñstæñçë æñð çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeAssignmentMismatch": "[VF9B4][นั้Tÿpë \"{søµrçëTÿpë}\" ïs ñøt æssïgñæþlë tø tÿpë \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeBound": "[AIZri][นั้Tÿpë \"{søµrçëTÿpë}\" ïs ñøt æssïgñæþlë tø µppër þøµñð \"{ðëstTÿpë}\" før tÿpë værïæþlë \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeConstrainedTypeVar": "[qHztb][นั้Tÿpë \"{tÿpë}\" ïs ñøt æssïgñæþlë tø çøñstræïñëð tÿpë værïæþlë \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typeIncompatible": "[L3llJ][นั้\"{søµrçëTÿpë}\" ïs ñøt æssïgñæþlë tø \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "typeNotClass": "[DTm1E][นั้\"{tÿpë}\" ïs ñøt æ çlæssẤğ倪İЂҰक्र्นั้ढूँ]", + "typeNotStringLiteral": "[D7UY3][นั้\"{tÿpë}\" ïs ñøt æ strïñg lïtërælẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeOfSymbol": "[qlRHN][นั้Tÿpë øf \"{ñæmë}\" ïs \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeParamSpec": "[m23b5][นั้Tÿpë \"{tÿpë}\" ïs ïñçømpætïþlë wïth ParamSpec \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeUnsupported": "[Dx3Cx][นั้Tÿpë \"{tÿpë}\" ïs µñsµppørtëðẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typeVarDefaultOutOfScope": "[05ALy][นั้Tÿpë værïæþlë \"{ñæmë}\" ïs ñøt ïñ sçøpëẤğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "typeVarIsContravariant": "[kup2Y][นั้Tÿpë pæræmëtër \"{ñæmë}\" ïs çøñtræværïæñt, þµt \"{søµrçëTÿpë}\" ïs ñøt æ sµpërtÿpë øf \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeVarIsCovariant": "[EeRng][นั้Tÿpë pæræmëtër \"{ñæmë}\" ïs çøværïæñt, þµt \"{søµrçëTÿpë}\" ïs ñøt æ sµþtÿpë øf \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarIsInvariant": "[WLZaN][นั้Tÿpë pæræmëtër \"{ñæmë}\" ïs ïñværïæñt, þµt \"{søµrçëTÿpë}\" ïs ñøt thë sæmë æs \"{ðëstTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typeVarNotAllowed": "[37OGF][นั้TypeVar ñøt ælløwëð før ïñstæñçë ør çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "typeVarTupleRequiresKnownLength": "[GGttd][นั้TypeVarTuple çæññøt þë þøµñð tø æ tuple øf µñkñøwñ lëñgthẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "typeVarUnnecessarySuggestion": "[k0XTd][นั้Üsë {tÿpë} ïñstëæðẤğ倪İЂҰक्นั้ढूँ]", + "typeVarUnsolvableRemedy": "[PaRa7][นั้Prøvïðë æñ øvërløæð thæt spëçïfïës thë rëtµrñ tÿpë whëñ thë ærgµmëñt ïs ñøt sµpplïëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "typeVarsMissing": "[R1SEV][นั้Mïssïñg tÿpë værïæþlës: {ñæmës}Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "typedDictBaseClass": "[Zv6vP][นั้Çlæss \"{tÿpë}\" ïs ñøt æ TypedDictẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "typedDictClassNotAllowed": "[Vgl7x][นั้TypedDict çlæss ñøt ælløwëð før ïñstæñçë ør çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictClosedExtraNotAllowed": "[zT7Rm][นั้Çæññøt æðð ïtëm \"{ñæmë}\"Ấğ倪İЂҰक्र्นั้ढूँ]", + "typedDictClosedExtraTypeMismatch": "[blC1e][นั้Çæññøt æðð ïtëm \"{ñæmë}\" wïth tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪นั้ढूँ]", + "typedDictClosedFieldNotReadOnly": "[45ICT][นั้Çæññøt æðð ïtëm \"{ñæmë}\" þëçæµsë ït mµst þë ReadOnlyẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictClosedFieldNotRequired": "[6rtDR][นั้Çæññøt æðð ïtëm \"{ñæmë}\" þëçæµsë ït mµst þë NotRequiredẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "typedDictExtraFieldNotAllowed": "[kFDh9][นั้\"{ñæmë}\" ïs ñøt prësëñt ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typedDictExtraFieldTypeMismatch": "[DnAhM][นั้Tÿpë øf \"{ñæmë}\" ïs ïñçømpætïþlë wïth tÿpë øf \"extra_items\" ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typedDictFieldMissing": "[rNzn7][นั้\"{ñæmë}\" ïs mïssïñg frøm \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "typedDictFieldNotReadOnly": "[BJy1V][นั้\"{ñæmë}\" ïs ñøt rëæð-øñlÿ ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤนั้ढूँ]", + "typedDictFieldNotRequired": "[eqatW][นั้\"{ñæmë}\" ïs ñøt rëqµïrëð ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまนั้ढूँ]", + "typedDictFieldRequired": "[ckyH4][นั้\"{ñæmë}\" ïs rëqµïrëð ïñ \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृนั้ढूँ]", + "typedDictFieldTypeMismatch": "[XYIBH][นั้Tÿpë \"{tÿpë}\" ïs ñøt æssïgñæþlë tø ïtëm \"{ñæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "typedDictFieldUndefined": "[UsDC9][นั้\"{ñæmë}\" ïs æñ µñðëfïñëð ïtëm ïñ tÿpë \"{tÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typedDictKeyAccess": "[67DLq][นั้Üsë [\"{ñæmë}\"] tø rëfërëñçë ïtëm ïñ TypedDictẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "typedDictNotAllowed": "[eTsPP][นั้TypedDict çæññøt þë µsëð før ïñstæñçë ør çlæss çhëçksẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "unhashableType": "[IJEeq][นั้Tÿpë \"{tÿpë}\" ïs ñøt hæshæþlëẤğ倪İЂҰक्र्तिृนั้ढूँ]", + "uninitializedAbstractVariable": "[uDuHt][นั้Ïñstæñçë værïæþlë \"{ñæmë}\" ïs ðëfïñëð ïñ æþstræçt þæsë çlæss \"{çlæssTÿpë}\" þµt ñøt ïñïtïælïzëðẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्นั้ढूँ]", + "unreachableExcept": "[3CSUL][นั้\"{ëxçëptïøñTÿpë}\" ïs æ sµþçlæss øf \"{pærëñtTÿpë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰนั้ढूँ]", + "useDictInstead": "[LReB5][นั้Üsë dict[T1, T2] tø ïñðïçætë æ ðïçtïøñærÿ tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]", + "useListInstead": "[RPu0E][นั้Üsë list[T] tø ïñðïçætë æ list tÿpë ør T1 | T2 tø ïñðïçætë æ union tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまนั้ढूँ]", + "useTupleInstead": "[jaFqC][นั้Üsë tuple[T1, ..., Tn] tø ïñðïçætë æ tuple tÿpë ør T1 | T2 tø ïñðïçætë æ union tÿpëẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]", + "useTypeInstead": "[Zig8D][นั้Üsë type[T] ïñstëæðẤğ倪İЂҰक्นั้ढूँ]", + "varianceMismatchForClass": "[fqhIl][นั้Værïæñçë øf tÿpë ærgµmëñt \"{tÿpëVærÑæmë}\" ïs ïñçømpætïþlë wïth þæsë çlæss \"{çlæssÑæmë}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्นั้ढूँ]", + "varianceMismatchForTypeAlias": "[YSiVx][นั้Værïæñçë øf tÿpë ærgµmëñt \"{tÿpëVærÑæmë}\" ïs ïñçømpætïþlë wïth \"{tÿpëÆlïæsPæræm}\"Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İนั้ढूँ]" + }, + "Service": { + "longOperation": "[Mvrp3][นั้Ëñµmërætïøñ øf wørkspæçë søµrçë fïlës ïs tækïñg æ løñg tïmë. Çøñsïðër øpëñïñg æ sµþ-følðër ïñstëæð. [£ëærñ mørë](https://ækæ.ms/wørkspæçë-tøø-mæñÿ-fïlës)Ấğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂҰक्र्तिृまẤğ倪İЂนั้ढूँ]" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.ru.json b/python-parser/packages/pyright-internal/src/localization/package.nls.ru.json new file mode 100644 index 00000000..e77519af --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.ru.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Создать Stub типа", + "createTypeStubFor": "Создать Stub типа для \"{moduleName}\"", + "executingCommand": "Производится выполнение команды", + "filesToAnalyzeCount": "{count} файлов для анализа", + "filesToAnalyzeOne": "1 файл для анализа", + "findingReferences": "Производится поиск ссылок", + "organizeImports": "Упорядочение импорта" + }, + "Completion": { + "autoImportDetail": "Автоматический импорт", + "indexValueDetail": "Значение индекса" + }, + "Diagnostic": { + "abstractMethodInvocation": "Невозможно вызвать метод \"{method}\", так как он является абстрактным и нереализованным", + "annotatedMetadataInconsistent": "Тип аннотированных метаданных \"{metadataType}\" несовместим с типом \"{type}\"", + "annotatedParamCountMismatch": "Несоответствие числа аннотаций параметра: ожидается {expected}, но получено {received}", + "annotatedTypeArgMissing": "Для \"Annotated\" ожидается один аргумент типа и одна или несколько заметок типа", + "annotationBytesString": "Выражения типа не могут использовать строковые литералы байтов", + "annotationFormatString": "В выражениях типа не могут использоваться строковые литералы формата (f-строки)", + "annotationNotSupported": "Заметка типа не поддерживается для этой инструкции", + "annotationRawString": "Выражения типа не могут использовать необработанные строковые литералы", + "annotationSpansStrings": "Выражения типа не могут охватывать несколько строковых литералов", + "annotationStringEscape": "Выражения типа не могут содержать escape-символы", + "annotationTemplateString": "Выражения типов не могут использовать шаблонные строковые литералы (t-строки)", + "argAssignment": "Аргумент типа \"{argType}\" нельзя присвоить параметру типа \"{paramType}\"", + "argAssignmentFunction": "Аргумент типа \"{argType}\" нельзя присвоить параметру типа \"{paramType}\" в функции \"{functionName}\"", + "argAssignmentParam": "Аргумент типа \"{argType}\" нельзя присвоить параметру \"{paramName}\" типа \"{paramType}\"", + "argAssignmentParamFunction": "Аргумент типа \"{argType}\" нельзя присвоить параметру \"{paramName}\" типа \"{paramType}\" в функции \"{functionName}\"", + "argMissingForParam": "Отсутствует аргумент для параметра {name}", + "argMissingForParams": "Отсутствуют аргументы для параметров {names}", + "argMorePositionalExpectedCount": "Ожидаются еще позиционные аргументы: {expected}", + "argMorePositionalExpectedOne": "Ожидается еще 1 позиционный аргумент", + "argPositional": "Ожидается позиционный аргумент", + "argPositionalExpectedCount": "Ожидаемые позиционные аргументы: {expected}", + "argPositionalExpectedOne": "Ожидается 1 позиционный аргумент", + "argTypePartiallyUnknown": "Тип аргумента частично неизвестен", + "argTypeUnknown": "Тип аргумента неизвестен", + "assertAlwaysTrue": "Выражение assert всегда вычисляется как true", + "assertTypeArgs": "Для \"assert_type\" ожидается два позиционных аргумента", + "assertTypeTypeMismatch": "Несоответствие типов \"assert_type\": ожидается \"{expected}\", но получено \"{received}\"", + "assignmentExprComprehension": "Цель выражения назначения \"{name}\" не может использовать то же имя, что и понимание для целевого объекта", + "assignmentExprContext": "Выражение присваивания должно находиться в модуле, функции или лямбде", + "assignmentExprInSubscript": "Выражения назначения внутри оператора получения подстроки можно использовать в Python версии не ниже 3.10", + "assignmentInProtocol": "Переменные экземпляра или класса в классе Protocol должны быть явно объявлены в тексте класса", + "assignmentTargetExpr": "Выражение не может быть целевым объектом присваивания", + "asyncNotInAsyncFunction": "Использование \"async\" не разрешено вне async функции", + "awaitIllegal": "Инструкцию \"await\" можно использовать в Python версии не ранее 3.5", + "awaitNotAllowed": "Выражения типа не могут использовать \"await\"", + "awaitNotInAsync": "Ключевое слово \"await\" допускается только в async функции", + "backticksIllegal": "Выражения, окруженные одиночными обратными кавычками, не поддерживаются в Python 3.x; используйте вместо этого функцию repr", + "baseClassCircular": "Класс не может быть производным от себя", + "baseClassFinal": "Базовый класс \"{type}\" помечен как final, и от него нельзя создать производный класс", + "baseClassIncompatible": "Базовые классы {type} взаимно несовместимы", + "baseClassInvalid": "Аргумент класса должен быть базовым классом", + "baseClassMethodTypeIncompatible": "Базовые классы класса \"{classType}\" определяют метод \"{name}\" несовместимым способом", + "baseClassUnknown": "Неизвестный тип базового класса скрывает тип производного класса", + "baseClassVariableTypeIncompatible": "Базовые классы класса \"{classType}\" определяют переменную \"{name}\" несовместимым способом", + "binaryOperationNotAllowed": "Бинарный оператор нельзя использовать в выражении типа", + "bindParamMissing": "Не удалось привязать метод \"{methodName}\", так как в нем отсутствует параметр \"self\" или \"cls\"", + "bindTypeMismatch": "Не удалось привязать метод \"{methodName}\", \"{type}\" не может быть назначен параметру \"{paramName}\"", + "breakInExceptionGroup": "Запрещено наличие \"break\" в блоке \"except*\"", + "breakOutsideLoop": "\"break\" можно использовать только внутри цикла", + "bytesUnsupportedEscape": "Неподдерживаемая escape-последовательность в литерале bytes", + "callableExtraArgs": "В \"Callable\" ожидается только два аргумента типа", + "callableFirstArg": "Ожидается список типов параметров или \"...\"", + "callableNotInstantiable": "Невозможно создать экземпляр типа \"{type}\"", + "callableSecondArg": "В качестве аргумента второго типа для \"Callable\" ожидается возвращаемый тип", + "casePatternIsIrrefutable": "Неопровержимый шаблон допускается только для последнего оператора case", + "classAlreadySpecialized": "Тип \"{type}\" уже специализирован", + "classDecoratorTypeUnknown": "Нетипизированный декоратор класса скрывает тип класса. Игнорирование декоратора", + "classDefinitionCycle": "Определение класса для \"{name}\" зависит от себя", + "classGetItemClsParam": "Переопределение метода __class_getitem__ должно принимать параметр \"cls\"", + "classMethodClsParam": "Методы класса должны принимать параметр cls", + "classNotRuntimeSubscriptable": "Операция взятия подстроки для класса \"{name}\" создаст исключение среды выполнения; заключите выражение типа в кавычки", + "classPatternBuiltInArgPositional": "Шаблон класса принимает только позиционный вложенный шаблон", + "classPatternNewType": "Не удается использовать \"{type}\" в шаблоне класса, так как он определен с помощью NewType", + "classPatternPositionalArgCount": "Слишком много позиционных шаблонов для класса \"{type}\"; ожидается {expected}, но получено {received}", + "classPatternTypeAlias": "\"{type}\" нельзя использовать в шаблоне класса, поскольку это псевдоним специализированного типа", + "classPropertyDeprecated": "Свойства класса упразднены в Python 3.11 и не будут поддерживаться в Python 3.13", + "classTypeParametersIllegal": "Синтаксис параметра типа класса может использоваться в Python версии не ниже 3.12.", + "classVarFirstArgMissing": "Ожидается аргумент типа после \"ClassVar\"", + "classVarNotAllowed": "Использование \"ClassVar\" в этом контексте не допускается", + "classVarOverridesInstanceVar": "Переменная класса \"{name}\" переопределяет переменную экземпляра с тем же именем в классе \"{className}\"", + "classVarTooManyArgs": "Ожидается только один аргумент типа после \"ClassVar\"", + "classVarWithTypeVar": "Тип \"ClassVar\" не может включать переменные типа", + "clsSelfParamTypeMismatch": "Тип параметра \"{name}\" должен быть супертипом своего класса \"{classType}\"", + "codeTooComplexToAnalyze": "Код слишком сложен для анализа; уменьшите сложность, разбив его на фрагменты (вложенные процедуры) или сократите количество условных конструкций", + "collectionAliasInstantiation": "Тип \"{type}\" не может быть создан, вместо этого используйте \"{alias}\"", + "comparisonAlwaysFalse": "Условие всегда будет оцениваться как False, поскольку типы \"{leftType}\" и \"{rightType}\" не перекрываются", + "comparisonAlwaysTrue": "Выражение всегда будет возвращать значение True, поскольку типы \"{leftType}\" и \"{rightType}\" не перекрываются", + "comprehensionInDict": "Понимание не может использоваться с другими записями словаря", + "comprehensionInSet": "Понимание не может использоваться с другими записями set", + "concatenateContext": "\"Concatenate\" не допускается в этом контексте", + "concatenateParamSpecMissing": "Последний аргумент типа для \"Concatenate\" должен быть ParamSpec или \"...\"", + "concatenateTypeArgsMissing": "Для \"Concatenate\" требуется по крайней мере два аргумента типа", + "conditionalOperandInvalid": "Недопустимый условный операнд типа \"{type}\"", + "constantRedefinition": "\"{name}\" является константой (поскольку оно в верхнем регистре) и не может быть переопределено.", + "constructorParametersMismatch": "Несоответствие подписи __new__ и __init__ в классе \"{classType}\"", + "containmentAlwaysFalse": "Выражение всегда будет оцениваться как False, так как типы \"{leftType}\" и \"{rightType}\" не перекрываются", + "containmentAlwaysTrue": "Выражение всегда будет оцениваться как True, так как типы \"{leftType}\" и \"{rightType}\" не перекрываются", + "continueInExceptionGroup": "Запрещено наличие \"continue\" в блоке \"except*\"", + "continueOutsideLoop": "Ключевое слово \"continue\" можно использовать только внутри цикла", + "coroutineInConditionalExpression": "Условное выражение ссылается на сопрограмму, которая всегда возвращает значение True", + "dataClassBaseClassFrozen": "Незафиксированный класс не может наследоваться от зафиксированного класса", + "dataClassBaseClassNotFrozen": "Незафиксированный класс не может наследоваться от зафиксированного класса", + "dataClassConverterFunction": "Аргумент типа \"{argType}\" не является допустимым преобразователем для поля \"{fieldName}\" типа \"{fieldType}\"", + "dataClassConverterOverloads": "Ни одна перегрузка \"{funcName}\" не является допустимым преобразователем поля \"{fieldName}\" типа \"{fieldType}\"", + "dataClassFieldInheritedDefault": "\"{fieldName}\" переопределяет поле с тем же именем, но в нем отсутствует значение по умолчанию", + "dataClassFieldWithDefault": "Поля без значений по умолчанию не могут отображаться после полей со значениями по умолчанию.", + "dataClassFieldWithPrivateName": "Поле класса данных не может использовать закрытое имя", + "dataClassFieldWithoutAnnotation": "Поле dataclass без заметки с типом вызовет исключение среды выполнения", + "dataClassPostInitParamCount": "Неверное число параметров в методе __post_init__ класса данных; ожидается число, соответствующее числу полей InitVar {expected}", + "dataClassPostInitType": "Тип параметра метода __post_init__ в классе данных не соответствует типу поля \"{fieldName}\"", + "dataClassSlotsOverwrite": "__slots__ уже определен в классе", + "dataClassTransformExpectedBoolLiteral": "Ожидается выражение, статически оцениваемое как True или False", + "dataClassTransformFieldSpecifier": "Ожидается tuple классов или функций, но получен тип \"{type}\"", + "dataClassTransformPositionalParam": "Все аргументы \"dataclass_transform\" должны быть аргументами типа \"ключевое слово\"", + "dataClassTransformUnknownArgument": "Аргумент \"{name}\" не поддерживается в dataclass_transform", + "dataProtocolInSubclassCheck": "Протоколы данных (включающие атрибуты, не связанные с методами) запрещены в вызовах issubclass", + "declaredReturnTypePartiallyUnknown": "Объявленный тип возвращаемого значения \"{returnType}\" частично неизвестен", + "declaredReturnTypeUnknown": "Объявленный тип возвращаемого значения неизвестен", + "defaultValueContainsCall": "Вызовы функций и изменяемые объекты не разрешены в выражении значения параметра по умолчанию", + "defaultValueNotAllowed": "Параметр с \"*\" или \"**\" не может иметь значение по умолчанию", + "delTargetExpr": "Не удается удалить выражение", + "deprecatedClass": "Класс \"{name}\" является нерекомендуемым", + "deprecatedConstructor": "Конструктор для класса \"{name}\" больше не рекомендуется к использованию", + "deprecatedDescriptorDeleter": "Метод \"__delete__\" для дескриптора \"{name}\" не рекомендуется", + "deprecatedDescriptorGetter": "Метод \"__get__\" для дескриптора \"{name}\" не рекомендуется", + "deprecatedDescriptorSetter": "Метод \"__set__\" для дескриптора \"{name}\" не рекомендуется", + "deprecatedFunction": "Функция \"{name}\" больше не рекомендуется к использованию", + "deprecatedMethod": "Метод \"{name}\" в классе \"{className}\" не рекомендуется к использованию", + "deprecatedPropertyDeleter": "Метод deleter для property \"{name}\" не рекомендуется", + "deprecatedPropertyGetter": "Метод getter для property \"{name}\" не рекомендуется", + "deprecatedPropertySetter": "Метод setter для property \"{name}\" не рекомендуется", + "deprecatedType": "Этот тип больше не рекомендуется к использованию начиная с версии Python {version}; используйте вместо него \"{replacement}\"", + "dictExpandIllegalInComprehension": "Расширение словаря в понимании не допускается", + "dictInAnnotation": "Словарное выражение не разрешено в выражении типа", + "dictKeyValuePairs": "Записи словаря должны содержать пары \"ключ-значение\"", + "dictUnpackIsNotMapping": "Ожидается сопоставление для оператора распаковки словаря", + "dunderAllSymbolNotPresent": "\"{name}\" указано в методе __all__, но отсутствует в модуле", + "duplicateArgsParam": "Разрешен только один параметр \"*\"", + "duplicateBaseClass": "Дублирование базового класса не допускается", + "duplicateCapturePatternTarget": "Целевой объект захвата \"{name}\" не может появляться более одного раза в одном шаблоне", + "duplicateCatchAll": "В блоке except допускается только одно предложение catch-all", + "duplicateEnumMember": "Элемент Enum типа \"{name}\" уже объявлен", + "duplicateGenericAndProtocolBase": "Допускается только один базовый класс Generic[...] или Protocol[...]", + "duplicateImport": "\"{importName}\" импортируется несколько раз", + "duplicateKeywordOnly": "Разрешен только один разделитель \"*\"", + "duplicateKwargsParam": "Разрешен только один параметр \"**\"", + "duplicateParam": "Параметр \"{name}\" повторяется", + "duplicatePositionOnly": "Разрешен только один параметр \"/\"", + "duplicateStarPattern": "В последовательности шаблонов допускается только один шаблон \"*\"", + "duplicateStarStarPattern": "Допускается только одна запись \"**\"", + "duplicateUnpack": "В list разрешена только одна операция распаковки", + "ellipsisAfterUnpacked": "\"...\" не может использоваться с распакованным элементом TypeVarTuple или tuple", + "ellipsisContext": "\"...\" не допускается в данном контексте", + "ellipsisSecondArg": "\"...\" разрешается только в качестве второго из двух аргументов", + "enumClassOverride": "Enum класс \"{name}\" является final и не может иметь производных классов", + "enumMemberDelete": "Не удается удалить элемент Enum \"{name}\"", + "enumMemberSet": "Не удается назначить элемент Enum \"{name}\"", + "enumMemberTypeAnnotation": "Аннотации типов не разрешены для элементов enum", + "exceptGroupMismatch": "Оператор Try не может одновременно включать \"except\" и \"except*\"", + "exceptGroupRequiresType": "Для синтаксиса группы исключений (\"except*\") требуется тип исключения", + "exceptRequiresParens": "Несколько типов исключений следует заключать в скобки до версии Python 3.14", + "exceptWithAsRequiresParens": "Несколько типов исключений следует заключать в скобки при использовании \"as\"", + "exceptionGroupIncompatible": "Синтаксис группы исключений (\"except*\") можно использовать в Python версии не ранее 3.11", + "exceptionGroupTypeIncorrect": "Тип исключения в except* не может быть производным от BaseGroupException", + "exceptionTypeIncorrect": "\"{type}\" не является производным от BaseException", + "exceptionTypeNotClass": "\"{type}\" не является допустимым классом исключений", + "exceptionTypeNotInstantiable": "Конструктору типа исключения \"{type}\" требуется один или несколько аргументов", + "expectedAfterDecorator": "Ожидаемая функция или объявление класса после декоратора", + "expectedArrow": "Ожидается \"->\", за которым следует заметка типа к возвращаемому типу", + "expectedAsAfterException": "Ожидается \"as\" после типа исключения", + "expectedAssignRightHandExpr": "Справа от \"=\" ожидается выражение", + "expectedBinaryRightHandExpr": "Справа от оператора ожидается выражение", + "expectedBoolLiteral": "Ожидается True или False", + "expectedCase": "Ожидается оператор case", + "expectedClassName": "Ожидается имя класса", + "expectedCloseBrace": "Для скобки \"{\" отсутствует закрывающая скобка", + "expectedCloseBracket": "Для скобки \"[\" отсутствует закрывающая скобка", + "expectedCloseParen": "Для скобки \"(\" отсутствует закрывающая скобка", + "expectedColon": "Ожидается \":\"", + "expectedComplexNumberLiteral": "Для сопоставления шаблонов ожидается сложный числовой литерал", + "expectedDecoratorExpr": "Форму выражения для декоратора можно использовать в Python версии не ниже 3.9", + "expectedDecoratorName": "Ожидается имя декоратора", + "expectedDecoratorNewline": "В конце декоратора ожидается символ перехода на новую строку", + "expectedDelExpr": "После \"del\" ожидается выражение", + "expectedElse": "Ожидается ключевое слово \"else\"", + "expectedEquals": "Ожидается \"=\"", + "expectedExceptionClass": "Недопустимый класс или объект исключения", + "expectedExceptionObj": "Ожидается объект исключения, класс исключения или значение None", + "expectedExpr": "Ожидается выражение", + "expectedFunctionAfterAsync": "Ожидается определение функции после \"async\"", + "expectedFunctionName": "После ключевого слова \"def\" ожидается имя функции", + "expectedIdentifier": "Ожидается идентификатор", + "expectedImport": "Ожидается \"import\"", + "expectedImportAlias": "После \"as\" ожидается символ", + "expectedImportSymbols": "После \"import\" ожидается одно или несколько имен символов", + "expectedIn": "Ожидается \"in\"", + "expectedInExpr": "После ключевого слова \"in\" ожидается выражение", + "expectedIndentedBlock": "Ожидается блок с отступом", + "expectedMemberName": "Требуется указать имя атрибута после \".\"", + "expectedModuleName": "Ожидаемое имя модуля", + "expectedNameAfterAs": "Ожидается имя символа после \"as\"", + "expectedNamedParameter": "За \"*\" должен следовать параметр типа \"ключевое слово\"", + "expectedNewline": "Ожидается новая строка", + "expectedNewlineOrSemicolon": "Операторы должны быть разделены символами перевода строки или точками с запятой", + "expectedOpenParen": "Ожидается \"(\"", + "expectedParamName": "Ожидается имя параметра", + "expectedPatternExpr": "Ожидается выражение шаблона", + "expectedPatternSubjectExpr": "Ожидаемое выражение субъекта шаблона", + "expectedPatternValue": "Ожидается выражение значения шаблона в форме \"a.b\"", + "expectedReturnExpr": "После \"return\" ожидается выражение", + "expectedSliceIndex": "Ожидается выражение индекса или среза", + "expectedTypeNotString": "Ожидался тип, но получен строковый литерал", + "expectedTypeParameterName": "Ожидаемое имя параметра типа", + "expectedYieldExpr": "Ожидаемое выражение в операторе yield", + "finalClassIsAbstract": "Класс \"{type}\" помечен как final (окончательный) и должен реализовать все абстрактные символы.", + "finalContext": "\"Final\" не допускается в данном контексте", + "finalInLoop": "Переменная \"Final\" не может быть назначена внутри цикла", + "finalMethodOverride": "Метод \"{name}\" не может переопределять final метод, определенный в классе \"{className}\"", + "finalNonMethod": "Функцию \"{name}\" нельзя пометить как @final, так как она не является методом", + "finalReassigned": "\"{name}\" объявляется Final и не может быть переназначено", + "finalRedeclaration": "\"{name}\" ранее объявлено как Final", + "finalRedeclarationBySubclass": "\"{name}\" невозможно повторно объявить, поскольку в родительском классе \"{className}\" он объявлен как Final", + "finalTooManyArgs": "После \"Final\" ожидается один аргумент типа", + "finalUnassigned": "\"{name}\" объявлен как Final, но ему не присвоено значение", + "finallyBreak": "Невозможно использовать \"break\" для выхода из блока \"finally\"", + "finallyContinue": "Невозможно использовать \"continue\" для выхода из блока \"finally\"", + "finallyReturn": "Невозможно использовать \"return\" для выхода из блока \"finally\"", + "formatStringBrace": "Одиночная закрывающая скобка не допускается в литерале f-строки. Используйте двойную закрывающую скобку", + "formatStringBytes": "Строковые литералы формата (f-строки) не могут быть двоичными", + "formatStringDebuggingIllegal": "Для описателя отладки F-строки \"=\" требуется версия Python 3.8 или более новая", + "formatStringEscape": "Escape-последовательность (обратная косая черта) не разрешена в части выражения f-строки до версии Python 3.12", + "formatStringExpectedConversion": "После \"!\" в f-строке ожидался спецификатор преобразования", + "formatStringIllegal": "Для строковых литералов формата (f-строки) требуется версия Python 3.6 или более новая", + "formatStringInPattern": "Строка формата не разрешена в шаблоне", + "formatStringNestedFormatSpecifier": "В строке спецификации формата слишком высок уровень вложенности выражений", + "formatStringNestedQuote": "Строки, вложенные в f-строку, не могут использовать тот же символ кавычка, что и f-строка до Python 3.12", + "formatStringTemplate": "Форматные строковые литералы (f-строки) не могут одновременно быть шаблонными строками (t-строками)", + "formatStringUnicode": "Строковые литералы формата (f-строки) не могут быть в кодировке Unicode", + "formatStringUnterminated": "Незавершенное выражение в f-строке; ожидается \"}\"", + "functionDecoratorTypeUnknown": "Нетипизированный декоратор функции скрывает тип функции; декоратор будет игнорирован", + "functionInConditionalExpression": "Условное выражение ссылается на функцию, которая всегда возвращает значение True", + "functionTypeParametersIllegal": "Для синтаксиса параметра типа функции требуется версия Python 3.12 или более новая", + "futureImportLocationNotAllowed": "Операторы импорта из __future__ должны находиться в начале файла", + "generatorAsyncReturnType": "Тип возвращаемого значения функции генератора async должен быть совместим с \"AsyncGenerator[{yieldType}, Any]\"", + "generatorNotParenthesized": "Выражения генератора следует взять в скобки, если аргументов больше одного", + "generatorSyncReturnType": "Возвращаемый тип функции генератора должен быть совместим с \"Generator[{yieldType}, Any, Any]\"", + "genericBaseClassNotAllowed": "Базовый класс \"Generic\" нельзя использовать с синтаксисом параметра типа", + "genericClassAssigned": "Универсальный тип класса невозможно присвоить", + "genericClassDeleted": "Не удается удалить универсальный тип класса", + "genericInstanceVariableAccess": "Доступ к универсальной переменной экземпляра через класс является неоднозначным", + "genericNotAllowed": "Ключевое слово \"Generic\" недопустимо в этом контексте", + "genericTypeAliasBoundTypeVar": "Псевдоним универсального типа в классе не может использовать переменные привязанного типа {names}", + "genericTypeArgMissing": "Для \"Generic\" требуется по крайней мере один аргумент типа", + "genericTypeArgTypeVar": "Аргумент типа для \"Generic\" должен быть переменной типа", + "genericTypeArgUnique": "Аргументы типа для \"Generic\" должны быть уникальными", + "globalReassignment": "Присвоение \"{name}\" происходит раньше global объявления", + "globalRedefinition": "Имя \"{name}\" уже объявлено ранее как global", + "implicitStringConcat": "Неявное объединение строк не разрешено", + "importCycleDetected": "Обнаружен цикл в цепочке импорта", + "importDepthExceeded": "Глубина цепочки импорта превысила {depth}", + "importResolveFailure": "Не удается разрешить импорт \"{importName}\"", + "importSourceResolveFailure": "Не удается разрешить импорт \"{importName}\" из источника", + "importSymbolUnknown": "\"{name}\" — неизвестный символ импорта", + "incompatibleMethodOverride": "Метод \"{name}\" переопределяет класс \"{className}\" несовместимым образом", + "inconsistentIndent": "Сумма отступа не соответствует предыдущему отступу", + "inconsistentTabs": "Непоследовательное использование вкладок и пробелов в отступах", + "initMethodSelfParamTypeVar": "Заметка типа для параметра \"self\" метода \"__init__\" не может содержать переменные типа в области класса.", + "initMustReturnNone": "Метод \"__init__\" должен возвращать тип None", + "initSubclassCallFailed": "Неверные аргументы ключевых слов для метода __init_subclass__", + "initSubclassClsParam": "Переопределение метода __init_subclass__ должно принимать параметр \"cls\"", + "initVarNotAllowed": "Использование \"InitVar\" в этом контексте не допускается", + "instanceMethodSelfParam": "Методы экземпляра должны принимать параметр \"self\"", + "instanceVarOverridesClassVar": "Переменная экземпляра \"{name}\" переопределяет переменную класса с тем же именем в классе \"{className}\"", + "instantiateAbstract": "Не удается создать экземпляр абстрактного класса \"{type}\"", + "instantiateProtocol": "Не удается создать экземпляр класса Protocol \"{type}\"", + "internalBindError": "При привязке файла \"{file}\" произошла внутренняя ошибка: {message}", + "internalParseError": "При разборе файла \"{file}\" произошла внутренняя ошибка: {message}", + "internalTypeCheckingError": "При проверке файла \"{file}\" произошла внутренняя ошибка: {message}", + "invalidIdentifierChar": "Недопустимый символ в идентификаторе", + "invalidStubStatement": "Инструкция не имеет смысла в файле stub типа", + "invalidTokenChars": "Недопустимый символ \"{text}\" в маркере", + "isInstanceInvalidType": "Второй аргумент \"isinstance\" должен быть классом или tuple классов", + "isSubclassInvalidType": "Второй аргумент в \"issubclass\" должен быть классом или tuple классов", + "keyValueInSet": "Пары \"ключ-значение\" не допускаются в set", + "keywordArgInTypeArgument": "Аргументы ключевых слов нельзя использовать в списках аргументов типа", + "keywordOnlyAfterArgs": "Разделитель аргументов типа \"только ключевое слово\" не разрешен после параметра \"*\"", + "keywordParameterMissing": "За параметром \"*\" должен следовать один или несколько параметров ключевого слова.", + "keywordSubscriptIllegal": "Аргументы ключевых слов в нижних индексах не поддерживаются", + "lambdaReturnTypePartiallyUnknown": "Тип возвращаемого лямбдой значения \"{returnType}\" частично неизвестен", + "lambdaReturnTypeUnknown": "Тип значения, возвращаемого лямбдой, неизвестен", + "listAssignmentMismatch": "Выражение с типом \"{type}\" нельзя присвоить целевому списку", + "listInAnnotation": "List выражение не разрешено в выражении типа", + "literalEmptyArgs": "Ожидается один или несколько аргументов типа после \"Literal\"", + "literalNamedUnicodeEscape": "Именованные escape-последовательности Юникода не поддерживаются в аннотациях строк Literal", + "literalNotAllowed": "Невозможно использовать \"Literal\" в этом контексте без аргумента типа", + "literalNotCallable": "Не удается создать экземпляр типа Literal", + "literalUnsupportedType": "Аргументы типа для \"Literal\" должны иметь значение None, литеральное значение (int, bool, str или bytes) или значение enum типа", + "matchIncompatible": "Операторы Match могут использоваться в Python версии не ниже 3.10", + "matchIsNotExhaustive": "Блоки case в операторе match обрабатывают не все множество возможных значений", + "maxParseDepthExceeded": "Превышена максимальная глубина разбора; разбейте выражение на составляющие выражения меньшего размера", + "memberAccess": "Не удается получить доступ к атрибуту \"{name}\" для класса \"{type}\"", + "memberDelete": "Не удается удалить атрибут \"{name}\" для класса \"{type}\"", + "memberSet": "Не удается назначить атрибуту \"{name}\" для класса \"{type}\"", + "metaclassConflict": "Метакласс производного класса должен быть производным классом метаклассов всех его базовых классов", + "metaclassDuplicate": "Можно указать только один метакласс", + "metaclassIsGeneric": "Метакласс не может быть универсальным", + "methodNotDefined": "Метод \"{name}\" не определен", + "methodNotDefinedOnType": "Метод \"{name}\" не определен для типа \"{type}\"", + "methodOrdering": "Не удается создать согласованный порядок методов", + "methodOverridden": "\"{name}\" переопределяет метод с тем же именем в классе \"{className}\" с несовместимым типом \"{type}\"", + "methodReturnsNonObject": "Метод \"{name}\" не возвращает объект", + "missingSuperCall": "Метод \"{methodName}\" не вызывает метод с тем же именем в родительском классе", + "mixingBytesAndStr": "Невозможно объединить bytes и str значения", + "moduleAsType": "Модуль не может использоваться в качестве типа", + "moduleNotCallable": "Модуль не является вызываемым", + "moduleUnknownMember": "\"{memberName}\" не является известным атрибутом модуля \"{moduleName}\"", + "namedExceptAfterCatchAll": "Именованное предложение except не может стоять после предложения catch-all except", + "namedParamAfterParamSpecArgs": "Параметр ключевого слова \"{name}\" не может размещаться в сигнатуре после параметра ParamSpec args", + "namedTupleEmptyName": "Имена в именованном tuple не могут быть пустыми", + "namedTupleEntryRedeclared": "Не удается переопределить \"{name}\", так как родительский класс \"{className}\" является именованным tuple", + "namedTupleFieldUnderscore": "Имена полей Named tuple не могут начинаться с символа подчеркивания", + "namedTupleFirstArg": "В качестве первого аргумента ожидалось имя именованного класса tuple", + "namedTupleMultipleInheritance": "Множественное наследование для NamedTuple не поддерживается", + "namedTupleNameKeyword": "Имена полей не могут быть ключевыми словами", + "namedTupleNameType": "Ожидается двухфакторный tuple с указанием имени и типа записи", + "namedTupleNameUnique": "Имена внутри именованного tuple должны быть уникальными", + "namedTupleNoTypes": "\"namedtuple\" не предоставляет типов для записей tuple; используйте вместо него \"NamedTuple\"", + "namedTupleSecondArg": "В качестве второго аргумента ожидается именованный list записей tuple", + "newClsParam": "Переопределение метода __new__ должно принимать параметр \"cls\"", + "newTypeAnyOrUnknown": "Второй аргумент для NewType должен быть известным классом, а не вариантом \"Any\" или \"Unknown\"", + "newTypeBadName": "Первый аргумент NewType должен быть строковым литералом", + "newTypeLiteral": "NewType нельзя использовать с типом Literal", + "newTypeNameMismatch": "NewType должен быть назначен переменной с тем же именем", + "newTypeNotAClass": "В NewType в качестве второго аргумента ожидается класс", + "newTypeParamCount": "Для NewType требуются два позиционных аргумента", + "newTypeProtocolClass": "NewType нельзя использовать со структурным типом (класс Protocol или TypedDict)", + "noOverload": "Не существует перегрузок для \"{name}\", соответствующих указанным аргументам", + "noReturnContainsReturn": "Функция с объявленным типом return значения \"NoReturn\" не может содержать оператор return", + "noReturnContainsYield": "Функция с объявленным типом возвращаемого значения \"NoReturn\" не может содержать инструкцию yield", + "noReturnReturnsNone": "Функция с объявленным типом возвращаемого значения \"NoReturn\" не может возвращать \"None\"", + "nonDefaultAfterDefault": "Аргумент, отличный от аргумента по умолчанию, следует за аргументом по умолчанию", + "nonLocalInModule": "Nonlocal объявления на уровне модулей не разрешены", + "nonLocalNoBinding": "Привязка для nonlocal элемента \"{name}\" не найдена", + "nonLocalReassignment": "Назначение \"{name}\" происходит раньше nonlocal объявления", + "nonLocalRedefinition": "\"{name}\" уже объявлено ранее как nonlocal", + "noneNotCallable": "Объект типа \"None\" не может быть вызван", + "noneNotIterable": "Объект типа \"None\" не может использоваться в качестве итерируемого значения", + "noneNotSubscriptable": "Объект типа \"None\" не подлежит подписке", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "Объект типа \"None\" нельзя использовать с \"async with\"", + "noneOperator": "Оператор \"{operator}\" не поддерживается для \"None\"", + "noneUnknownMember": "\"{name}\" не является известным атрибутом \"None\"", + "nonlocalTypeParam": "Привязка nonlocal не допускается для параметра типа \"{name}\"", + "notRequiredArgCount": "После \"NotRequired\" ожидается один аргумент типа", + "notRequiredNotInTypedDict": "Использование \"NotRequired\" в этом контексте не допускается", + "objectNotCallable": "Объект типа \"{type}\" не является вызываемым", + "obscuredClassDeclaration": "Объявление класса \"{name}\" скрывается объявлением с тем же именем", + "obscuredFunctionDeclaration": "Объявление функции \"{name}\" скрывается объявлением с тем же именем", + "obscuredMethodDeclaration": "Объявление метода \"{name}\" скрывается объявлением с тем же именем", + "obscuredParameterDeclaration": "Объявление параметра \"{name}\" скрывается объявлением с тем же именем", + "obscuredTypeAliasDeclaration": "Объявление псевдонима типа \"{name}\" скрывается объявлением с тем же именем", + "obscuredVariableDeclaration": "Объявление \"{name}\" скрывается объявлением с тем же именем", + "operatorLessOrGreaterDeprecated": "Оператор \"<>\" не поддерживается в Python 3; используйте вместо него \"!=\"", + "optionalExtraArgs": "Ожидается один аргумент типа после \"Optional\"", + "orPatternIrrefutable": "Неопровержимый шаблон допускается только в качестве последнего вложенного шаблона в шаблоне \"or\"", + "orPatternMissingName": "Все вложенные шаблоны в шаблоне \"or\" должны быть привязаны к одним и тем же именам", + "overlappingKeywordArgs": "Типизированный словарь перекрывается с параметром ключевого слова: {names}", + "overlappingOverload": "Перегрузка {obscured} для \"{name}\" никогда не будет использоваться, так как ее параметры перекрывают перегрузку {obscuredBy}", + "overloadAbstractImplMismatch": "Перегрузки должны соответствовать абстрактному статусу реализации", + "overloadAbstractMismatch": "Все перегрузки должны быть абстрактными или не абстрактными", + "overloadClassMethodInconsistent": "Перегрузки для \"{name}\" используют @classmethod несогласованно", + "overloadFinalImpl": "@final декоратор должен применяться только к реализации", + "overloadFinalNoImpl": "Только первая перегрузка должна быть помечена @final", + "overloadImplementationMismatch": "Перегруженная реализация не согласована с сигнатурой перегрузки {index}", + "overloadOverrideImpl": "@override декоратор должен применяться только к реализации", + "overloadOverrideNoImpl": "Только первая перегрузка должна быть помечена @override", + "overloadReturnTypeMismatch": "Перегрузка {prevIndex} для \"{name}\" перекрывает перегрузку {newIndex} и возвращает несовместимый тип", + "overloadStaticMethodInconsistent": "Перегрузки для \"{name}\" используют @staticmethod несогласованно", + "overloadWithoutImplementation": "\"{name}\" помечен как overload, но реализация не предоставлена", + "overriddenMethodNotFound": "Метод \"{name}\" помечен как override, но базового метода с таким же именем нет", + "overrideDecoratorMissing": "Метод \"{name}\" не помечен как override, но переопределяет метод в классе \"{className}\"", + "paramAfterKwargsParam": "Параметр не может следовать за параметром \"**\"", + "paramAlreadyAssigned": "Параметр \"{name}\" уже назначен.", + "paramAnnotationMissing": "Отсутствует заметка с типом для параметра \"{name}\"", + "paramAssignmentMismatch": "Выражение типа \"{sourceType}\" не может быть назначено параметру типа \"{paramType}\"", + "paramNameMissing": "Параметра с именем \"{name}\" не существует", + "paramSpecArgsKwargsDuplicate": "Аргументы для \"{type}\" ParamSpec уже предоставлены", + "paramSpecArgsKwargsUsage": "Атрибуты \"args\" и \"kwargs\" ParamSpec должны одновременно присутствовать в сигнатуре функции", + "paramSpecArgsMissing": "Отсутствуют аргументы для \"{type}\" ParamSpec", + "paramSpecArgsUsage": "Атрибут \"args\" ParamSpec допустим только при использовании с параметром *args", + "paramSpecAssignedName": "ParamSpec необходимо присвоить переменной с именем \"{name}\"", + "paramSpecContext": "ParamSpec не допускается в этом контексте", + "paramSpecDefaultNotTuple": "Для значения ParamSpec по умолчанию ожидается многоточие, выражение tuple или ParamSpec", + "paramSpecFirstArg": "Ожидается имя ParamSpec в качестве первого аргумента", + "paramSpecKwargsUsage": "Атрибут \"kwargs\" ParamSpec допустим только при использовании с параметром **kwargs", + "paramSpecNotUsedByOuterScope": "ParamSpec \"{name}\" не имеет смысла в этом контексте", + "paramSpecUnknownArg": "ParamSpec не поддерживает более одного аргумента", + "paramSpecUnknownMember": "\"{name}\" не является известным атрибутом ParamSpec", + "paramSpecUnknownParam": "\"{name}\" является неизвестным параметром для ParamSpec", + "paramTypeCovariant": "Переменную ковариантного типа нельзя использовать в типе параметра", + "paramTypePartiallyUnknown": "Тип параметра \"{paramName}\" частично неизвестен", + "paramTypeUnknown": "Тип параметра \"{paramName}\" неизвестен", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "Шаблон никогда не будет совпадать для типа субъекта \"{type}\"", + "positionArgAfterNamedArg": "Позиционный аргумент не может стоять после аргументов типа \"ключевое слово\"", + "positionArgAfterUnpackedDictArg": "Позиционный аргумент не может стоять после распаковки аргумента ключевого слова", + "positionOnlyAfterArgs": "Разделитель чисто позиционных параметров после параметра \"*\" не разрешен", + "positionOnlyAfterKeywordOnly": "Параметр \"/\" должен располагаться перед параметром \"*\"", + "positionOnlyAfterNon": "Чисто позиционный параметр не разрешен после параметра, который не является чисто позиционным", + "positionOnlyFirstParam": "Разделитель чисто позиционных параметров в качестве первого параметра не разрешен", + "positionOnlyIncompatible": "Разделитель чисто позиционных параметров можно использовать в Python версии не ниже 3.8", + "privateImportFromPyTypedModule": "\"{name}\" не экспортируется из модуля \"{module}\"", + "privateUsedOutsideOfClass": "Элемент \"{name}\" является закрытым, но используется вне класса, в котором объявлен", + "privateUsedOutsideOfModule": "\"{name}\" является закрытым, но используется вне модуля, в котором объявлен", + "propertyOverridden": "\"{name}\" неправильно переопределяет property с таким же именем в классе \"{className}\"", + "propertyStaticMethod": "Статические методы не разрешены в методах getter, setter и deleter property", + "protectedUsedOutsideOfClass": "\"{name}\" защищено и используется вне класса, в котором оно объявлено", + "protocolBaseClass": "Класс Protocol \"{classType}\" не может быть производным от класса \"{baseType}\", отличного от Protocol", + "protocolBaseClassWithTypeArgs": "Аргументы типа не допускаются с классом Protocol при использовании синтаксиса параметра типа", + "protocolIllegal": "Ключевое слово \"Protocol\" можно использовать в Python версии не ниже 3.7", + "protocolNotAllowed": "Невозможно использовать \"Protocol\" в этом контексте", + "protocolTypeArgMustBeTypeParam": "Аргумент типа для параметра \"Protocol\" должен быть параметром типа", + "protocolUnsafeOverlap": "Класс небезопасно перекрывает \"{name}\" и может вызвать совпадение во время выполнения", + "protocolVarianceContravariant": "Переменная типа \"{variable}\", используемая в универсальном Protocol \"{class}\", должна быть контравариантной.", + "protocolVarianceCovariant": "Переменная типа \"{variable}\", используемая в универсальном Protocol \"{class}\", должна быть ковариантной", + "protocolVarianceInvariant": "Переменная типа \"{variable}\", используемая в универсальном Protocol \"{class}\", должна быть инвариантной", + "pyrightCommentInvalidDiagnosticBoolValue": "За директивой комментария Pyright должно следовать \"=\" и значение true или false", + "pyrightCommentInvalidDiagnosticSeverityValue": "За директивой комментария Pyright должно следовать \"=\" и одно из следующих значений: true, false, error, warning, information или none", + "pyrightCommentMissingDirective": "После комментария Pyright должна следовать директива (basic или strict) или правило диагностики", + "pyrightCommentNotOnOwnLine": "Комментарии Pyright, используемые для управления параметрами на уровне файлов, должны располагаться в отдельной строке", + "pyrightCommentUnknownDiagnosticRule": "Правило диагностики \"{rule}\" для комментария pyright неизвестно", + "pyrightCommentUnknownDiagnosticSeverityValue": "Значение \"{value}\" недопустимо для комментария pyright; ожидается одно из значений true, false, error, warning, information или none", + "pyrightCommentUnknownDirective": "\"{directive}\" — это неизвестная директива для комментария pyright; ожидается \"strict\", \"standard\" или \"basic\"", + "readOnlyArgCount": "Ожидается один аргумент типа после \"ReadOnly\"", + "readOnlyNotInTypedDict": "Использование \"ReadOnly\" в этом контексте не допускается", + "recursiveDefinition": "Не удалось определить тип \"{name}\", так как он ссылается на себя", + "relativeImportNotAllowed": "Операции импорта с относительным путем нельзя использовать с формой \"import .a\"; используйте вместо этого \"from . import a\"", + "requiredArgCount": "Ожидается один аргумент типа после \"Required\"", + "requiredNotInTypedDict": "Использование \"Required\" в этом контексте не допускается", + "returnInAsyncGenerator": "Оператор return со значением не допускается в генераторе async", + "returnInExceptionGroup": "Запрещено наличие \"return\" в блоке \"except*\"", + "returnMissing": "Функция с объявленным типом возвращаемого значения \"{returnType}\" должна возвращать значение во всех путях кода", + "returnOutsideFunction": "\"return\" можно использовать только внутри функции", + "returnTypeContravariant": "Переменная контравариантного типа не может использоваться в возвращаемом типе", + "returnTypeMismatch": "Тип \"{exprType}\" не может быть присвоен для возврата типа \"{returnType}\"", + "returnTypePartiallyUnknown": "Тип возвращаемого значения \"{returnType}\" частично неизвестен", + "returnTypeUnknown": "Тип возвращаемого значения неизвестен", + "revealLocalsArgs": "Не ожидаются аргументы для вызова \"reveal_locals\"", + "revealLocalsNone": "В этой области нет locals", + "revealTypeArgs": "Для вызова \"reveal_type\" ожидается один позиционный аргумент", + "revealTypeExpectedTextArg": "Аргумент \"expected_text\" для функции \"reveal_type\" должен быть значением литерала str", + "revealTypeExpectedTextMismatch": "Несоответствие текста в типе; ожидалось \"{expected}\", но получено \"{received}\"", + "revealTypeExpectedTypeMismatch": "Несоответствие типов; ожидается \"{expected}\", но получено \"{received}\"", + "selfTypeContext": "'В этом контексте ключевое слово \"Self\" недопустимо", + "selfTypeMetaclass": "\"Self\" нельзя использовать в метаклассе (подкласс \"type\")", + "selfTypeWithTypedSelfOrCls": "Ключевое слово \"Self\" нельзя использовать в функции с параметром \"self\" или \"cls\" с заметкой типа, отличной от \"Self\".", + "sentinelBadName": "Первый аргумент для Sentinel должен быть строковым литералом", + "sentinelNameMismatch": "Sentinel должен быть присвоен переменной с таким же именем", + "sentinelParamCount": "Sentinel требует один позиционный аргумент", + "setterGetterTypeMismatch": "Тип значения setter property нельзя присвоить типу возвращаемого значения getter", + "singleOverload": "Элемент \"{name}\" помечен как перегруженный, но дополнительные перегрузки отсутствуют", + "slotsAttributeError": "\"{name}\" не указано в __slots__", + "slotsClassVarConflict": "\"{name}\" конфликтует с переменной экземпляра, объявленной в __slots__", + "starPatternInAsPattern": "Шаблон \"звездочка\" не может использоваться с целевым объектом \"as\"", + "starPatternInOrPattern": "Шаблон \"звездочка\" не может быть аргументом операции OR в других шаблонах", + "starStarWildcardNotAllowed": "** нельзя использовать с символом подстановки \"_\"", + "staticClsSelfParam": "Статические методы не принимают в качестве параметра \"self\" и \"cls\"", + "stringNonAsciiBytes": "Символы, отличные от ASCII, не допускаются в строковом литерале байтов", + "stringNotSubscriptable": "От строкового выражения нельзя взять подстроку в выражении типа; заключите все выражение в кавычки", + "stringUnsupportedEscape": "Неподдерживаемая escape-последовательность в строковом литерале", + "stringUnterminated": "В строковом литерале отсутствует символ конца строки", + "stubFileMissing": "Файл stub для \"{importName}\" не найден", + "stubUsesGetAttr": "Файл stub типа неполон; \"__getattr__\" скрывает ошибки типа для модуля", + "sublistParamsIncompatible": "Параметры sublist списка не поддерживаются в Python 3.x", + "superCallArgCount": "Ожидается не более двух аргументов для вызова \"super\"", + "superCallFirstArg": "В качестве первого аргумента для вызова \"super\" ожидался тип класса, но получен \"{type}\"", + "superCallSecondArg": "Второй аргумент для вызова \"super\" должен быть объектом или классом, производным от \"{type}\"", + "superCallZeroArgForm": "Форма вызова \"super\" с нулевым аргументом допустима только внутри метода", + "superCallZeroArgFormStaticMethod": "Форма вызова \"super\" с нулевым аргументом недопустима в статическом методе", + "symbolIsPossiblyUnbound": "Элемент \"{name}\", возможно, не привязан", + "symbolIsUnbound": "Элемент \"{name}\" не привязан", + "symbolIsUndefined": "\"{name}\" не определено", + "symbolOverridden": "\"{name}\" переопределяет символ с тем же именем в классе \"{className}\"", + "templateStringBytes": "Шаблонные строковые литералы (t-строки) не могут быть двоичными", + "templateStringIllegal": "Шаблонные строковые литералы (t-строки) требуют Python 3.14 или более новой версии", + "templateStringUnicode": "Шаблонные строковые литералы (t-строки) не могут быть в формате Юникод", + "ternaryNotAllowed": "Тернарное выражение не разрешено в выражении типа", + "totalOrderingMissingMethod": "Чтобы можно было использовать total_ordering, в классе должен быть определен один из операторов: \"__lt__\", \"__le__\", \"__gt__\" или \"__ge__\"", + "trailingCommaInFromImport": "Конечные запятые можно использовать только при заключении в скобки", + "tryWithoutExcept": "В операторе try должно быть хотя бы одно предложение except или finally", + "tupleAssignmentMismatch": "Выражение с типом \"{type}\" не может быть назначено целевому tuple", + "tupleInAnnotation": "Выражение tuple не разрешено в выражении типа", + "tupleIndexOutOfRange": "Индекс {index} выходит за пределы допустимого диапазона для типа {type}", + "typeAliasIllegalExpressionForm": "Недопустимая форма выражения для определения псевдонима типа", + "typeAliasIsRecursiveDirect": "Псевдоним типа \"{name}\" не может использовать себя в своем определении", + "typeAliasNotInModuleOrClass": "TypeAlias можно определить только внутри области модуля или класса", + "typeAliasRedeclared": "\"{name}\" объявлен как TypeAlias и может быть присвоен только один раз", + "typeAliasStatementBadScope": "Утверждение type можно использовать только внутри области модуля или класса", + "typeAliasStatementIllegal": "Оператор псевдонима типа можно использовать в Python версии не ниже 3.12", + "typeAliasTypeBadScope": "Псевдоним типа можно определить только внутри области модуля или класса", + "typeAliasTypeBaseClass": "Псевдоним типа, определяемый в операторе \"type\", не может использоваться в качестве базового класса.", + "typeAliasTypeMustBeAssigned": "TypeAliasType должен быть назначен переменной с тем же именем, что и псевдоним типа", + "typeAliasTypeNameArg": "Первый аргумент TypeAliasType должен быть строковым литералом, представляющим имя псевдонима типа", + "typeAliasTypeNameMismatch": "Имя псевдонима типа должно соответствовать имени переменной, которой оно назначено", + "typeAliasTypeParamInvalid": "Список параметров типа должен быть tuple, содержащим только TypeVar, TypeVarTuple или ParamSpec", + "typeAnnotationCall": "Выражение вызова не разрешено в выражении типа", + "typeAnnotationVariable": "Переменная не разрешена в выражении типа", + "typeAnnotationWithCallable": "Аргумент типа для \"type\" должен быть классом. Вызываемые объекты не поддерживаются", + "typeArgListExpected": "Ожидается ParamSpec, многоточие или list типов", + "typeArgListNotAllowed": "Выражение list не разрешено для аргумента этого типа", + "typeArgsExpectingNone": "Для класса \"{name}\" не ожидается аргументов типа", + "typeArgsMismatchOne": "Ожидается один аргумент типа, но получено {received}", + "typeArgsMissingForAlias": "Для псевдонима универсального типа \"{name}\" ожидаются аргументы типа", + "typeArgsMissingForClass": "Ожидаются аргументы типа для универсального класса \"{name}\"", + "typeArgsTooFew": "Указано слишком мало аргументов типа для \"{name}\"; ожидалось {expected}, но получено {received}", + "typeArgsTooMany": "Для \"{name}\" предоставлено слишком много аргументов типа. Ожидается {expected}, но получено {received}", + "typeAssignmentMismatch": "Тип \"{sourceType}\" не может быть назначен объявленному типу \"{destType}\"", + "typeAssignmentMismatchWildcard": "Символ импорта \"{name}\" имеет тип \"{sourceType}\", который не может быть назначен объявленному типу \"{destType}\"", + "typeCallNotAllowed": "Вызов type() не разрешен в выражении типа", + "typeCheckOnly": "\"{name}\" помечено как @type_check_only и может использоваться только в заметках с типом", + "typeCommentDeprecated": "Комментарии type больше не рекомендуются к использованию; вместо них используйте заметки type", + "typeExpectedClass": "Ожидался класс, но получен \"{type}\"", + "typeFormArgs": "\"TypeForm\" принимает один позиционный аргумент", + "typeGuardArgCount": "После \"TypeGuard\" или \"TypeIs\" ожидается один аргумент типа", + "typeGuardParamCount": "Определяемые пользователем функции и методы защиты типов должны иметь по крайней мере один входной параметр", + "typeIsReturnType": "Тип возвращаемого значения TypeIs (\"{returnType}\") не соответствует типу параметра значения (\"{type}\")", + "typeNotAwaitable": "\"{type}\" не является awaitable", + "typeNotIntantiable": "Не удается создать экземпляр \"{type}\"", + "typeNotIterable": "\"{type}\" не является итерируемым", + "typeNotSpecializable": "Не удалось специализировать тип \"{type}\"", + "typeNotSubscriptable": "Объект типа \"{type}\" не поддерживает операцию получения подстроки", + "typeNotSupportBinaryOperator": "Оператор \"{operator}\" не поддерживается для типов \"{leftType}\" и \"{rightType}\"", + "typeNotSupportBinaryOperatorBidirectional": "Оператор \"{operator}\" не поддерживается для типов \"{leftType}\" и \"{rightType}\", если ожидаемый тип является \"{expectedType}\"", + "typeNotSupportUnaryOperator": "Оператор \"{operator}\" не поддерживается для типа \"{type}\"", + "typeNotSupportUnaryOperatorBidirectional": "Оператор \"{operator}\" не поддерживается для типа \"{type}\", если ожидается тип \"{expectedType}\"", + "typeNotUsableWith": "Объект типа \"{type}\" нельзя использовать с ключевым словом \"with\", так как он неправильно реализует метод {method}", + "typeNotUsableWithAsync": "Объект типа \"{type}\" нельзя использовать с ключевым словом \"async with\", так как он неправильно реализует метод {method}", + "typeParameterBoundNotAllowed": "Привязку или ограничение нельзя использовать с параметром типа variadic или ParamSpec", + "typeParameterConstraintTuple": "Ограничение параметра типа должно быть кортежем двух или более типов", + "typeParameterExistingTypeParameter": "Параметр типа \"{name}\" уже используется", + "typeParameterNotDeclared": "Параметр типа \"{name}\" не включен в список параметров типа для контейнера \"{container}\"", + "typeParametersMissing": "Необходимо указать хотя бы один параметр типа", + "typePartiallyUnknown": "Тип \"{name}\" частично неизвестен", + "typeUnknown": "Тип \"{name}\" неизвестен", + "typeVarAssignedName": "TypeVar необходимо присвоить переменной с именем \"{name}\"", + "typeVarAssignmentMismatch": "Тип \"{type}\" не может быть присвоен переменной типа \"{name}\"", + "typeVarBoundAndConstrained": "TypeVar не может быть одновременно привязанным и ограниченным", + "typeVarBoundGeneric": "Тип привязки TypeVar не может быть универсальным", + "typeVarConstraintGeneric": "Тип ограничения TypeVar не может быть универсальным", + "typeVarDefaultBoundMismatch": "Тип по умолчанию TypeVar должен быть подтипом привязанного типа", + "typeVarDefaultConstraintMismatch": "Тип по умолчанию TypeVar должен быть одним из ограниченных типов", + "typeVarDefaultIllegal": "Для типов переменных по умолчанию требуется версия Python 3.13 или более новая", + "typeVarDefaultInvalidTypeVar": "Параметр типа \"{name}\" имеет тип по умолчанию, который ссылается на одну или несколько переменных типа, находящихся вне области", + "typeVarFirstArg": "В качестве первого аргумента ожидалось имя TypeVar", + "typeVarInvalidForMemberVariable": "Тип атрибута не может использовать переменную \"{name}\" в локальном методе", + "typeVarNoMember": "У TypeVar \"{type}\" нет атрибута \"{name}\"", + "typeVarNotSubscriptable": "Невозможно взять подстроку от \"{type}\" TypeVar", + "typeVarNotUsedByOuterScope": "Переменная типа \"{name}\" не имеет смысла в этом контексте", + "typeVarPossiblyUnsolvable": "Переменная типа \"{name}\" может остаться нерешенной, если вызывающая сторона не указывает аргумент для параметра \"{param}\"", + "typeVarSingleConstraint": "TypeVar должен иметь не менее двух ограниченных типов", + "typeVarTupleConstraints": "TypeVarTuple не может использовать ограничения значений", + "typeVarTupleContext": "TypeVarTuple не допускается в этом контексте", + "typeVarTupleDefaultNotUnpacked": "Тип по умолчанию TypeVarTuple должен быть распакованным tuple или TypeVarTuple", + "typeVarTupleMustBeUnpacked": "Для значения TypeVarTuple требуется оператор распаковки", + "typeVarTupleUnknownParam": "\"{name}\" является неизвестным параметром для TypeVarTuple", + "typeVarUnknownParam": "\"{name}\" является неизвестным параметром для TypeVar", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" уже используется внешней областью", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" появляется в сигнатуре универсальной функции только один раз", + "typeVarVariance": "TypeVar не может быть одновременно ковариантным и контравариантным", + "typeVarWithDefaultFollowsVariadic": "TypeVar \"{typeVarName}\" имеет значение по умолчанию и не может следовать за TypeVarTuple \"{variadicName}\"", + "typeVarWithoutDefault": "\"{name}\" не может появиться после \"{other}\" в списке параметров типа, так как у него нет типа по умолчанию", + "typeVarsNotInGenericOrProtocol": "Generic[] и Protocol[] должны включать все переменные типа", + "typedDictAccess": "Не удалось получить доступ к элементу в TypedDict", + "typedDictAssignedName": "TypedDict необходимо присвоить переменной с именем \"{name}\"", + "typedDictBadVar": "Классы TypedDict могут содержать только заметки с типом", + "typedDictBaseClass": "Все базовые классы для классов TypedDict также должны быть классами TypedDict", + "typedDictBoolParam": "От параметра \"{name}\" ожидается значение True или False", + "typedDictClosedExtras": "Базовый класс \"{name}\" является TypedDict, который ограничивает тип дополнительных элементов типом \"{type}\"", + "typedDictClosedFalseNonOpenBase": "Базовый класс \"{name}\" не является открытым TypedDict; значение closed=False не разрешено", + "typedDictClosedNoExtras": "Базовый класс \"{name}\" представляет собой closed TypedDict; дополнительные элементы не разрешены", + "typedDictDelete": "Не удалось удалить элемент в TypedDict", + "typedDictEmptyName": "Имена в TypedDict не могут быть пустыми", + "typedDictEntryName": "Для имени записи словаря ожидается строковый литерал", + "typedDictEntryUnique": "Имена в словаре должны быть уникальными", + "typedDictExtraArgs": "Дополнительные аргументы TypedDict не поддерживаются", + "typedDictExtraItemsClosed": "TypedDict может использовать \"closed\" или \"extra_items\", но не оба", + "typedDictFieldNotRequiredRedefinition": "Элемент TypedDict \"{name}\" нельзя переопределить как NotRequired", + "typedDictFieldReadOnlyRedefinition": "Элемент TypedDict \"{name}\" нельзя переопределить как доступный ReadOnly", + "typedDictFieldRequiredRedefinition": "Элемент TypedDict \"{name}\" нельзя переопределить как Required", + "typedDictFirstArg": "В качестве первого аргумента ожидается имя класса TypedDict", + "typedDictInClassPattern": "Класс TypedDict не разрешен в шаблоне класса", + "typedDictInitsubclassParameter": "TypedDict не поддерживает параметр __init_subclass__ \"{name}\"", + "typedDictNotAllowed": "Невозможно использовать \"TypedDict\" в этом контексте", + "typedDictSecondArgDict": "В качестве второго параметра ожидается dict или ключевое слово", + "typedDictSecondArgDictEntry": "Ожидается простая запись словаря", + "typedDictSet": "Не удалось назначить элемент в TypedDict", + "unaccessedClass": "Нет доступа к классу \"{name}\"", + "unaccessedFunction": "Доступ к функции \"{name}\" не производится", + "unaccessedImport": "Доступ к импорту \"{name}\" не производится", + "unaccessedSymbol": "Доступ к \"{name}\" не осуществляется", + "unaccessedVariable": "Доступ к переменной \"{name}\" не производится", + "unannotatedFunctionSkipped": "Анализ функции \"{name}\" пропущен, так как она не аннотирована.", + "unaryOperationNotAllowed": "Унарный оператор нельзя использовать в выражении типа", + "unexpectedAsyncToken": "После \"async\" ожидается \"def\", \"with\" или \"for\"", + "unexpectedEof": "Непредвиденный EOF", + "unexpectedExprToken": "Непредвиденный токен в конце выражения", + "unexpectedIndent": "Непредвиденный отступ", + "unexpectedUnindent": "Отступ не ожидается", + "unhashableDictKey": "Ключ словаря должен быть хэшируемым", + "unhashableSetEntry": "Запись set должна быть хэшируемой", + "uninitializedAbstractVariables": "Переменные, определенные в абстрактном базовом классе, не инициализированы в final классе \"{classType}\"", + "uninitializedInstanceVariable": "Переменная экземпляра \"{name}\" не инициализирована ни в тексте класса, ни в методе __init__", + "unionForwardReferenceNotAllowed": "Синтаксис Union не может использоваться со строковым операндом; заключите всё выражение в кавычки", + "unionSyntaxIllegal": "Альтернативный синтаксис объединений можно использовать в версии Python не ниже 3.10", + "unionTypeArgCount": "Для Union требуется два или более аргумента типа", + "unionUnpackedTuple": "Union не может включать распакованный tuple", + "unionUnpackedTypeVarTuple": "Union не может включать распакованный TypeVarTuple", + "unnecessaryCast": "Ненужный вызов \"cast\"; тип уже является \"{type}\"", + "unnecessaryIsInstanceAlways": "Ненужный вызов isinstance; \"{testType}\" всегда является экземпляром \"{classType}\"", + "unnecessaryIsInstanceNever": "Ненужный вызов isinstance; \"{testType}\" никогда не является экземпляром \"{classType}\"", + "unnecessaryIsSubclassAlways": "Ненужный вызов issubclass. \"{testType}\" всегда является подклассом \"{classType}\"", + "unnecessaryIsSubclassNever": "Ненужный вызов issubclass; \"{testType}\" никогда не является подклассом \"{classType}\"", + "unnecessaryPyrightIgnore": "Ненужный комментарий \"# pyright: ignore\"", + "unnecessaryPyrightIgnoreRule": "Ненужное правило \"# pyright: ignore\": \"{name}\"", + "unnecessaryTypeIgnore": "Ненужный комментарий \"# type: ignore\"", + "unpackArgCount": "Ожидается один аргумент типа после \"Unpack\"", + "unpackExpectedTypeVarTuple": "В качестве аргумента типа для Unpack ожидается элемент TypeVarTuple или tuple", + "unpackExpectedTypedDict": "Ожидается аргумент типа TypedDict для Unpack", + "unpackIllegalInComprehension": "Операция распаковки в понимании не разрешена", + "unpackInAnnotation": "Оператор распаковки нельзя использовать в выражении типа", + "unpackInDict": "Операция распаковки в словарях не разрешена", + "unpackInSet": "Оператор распаковки не разрешен в set", + "unpackNotAllowed": "Unpack допускается в этом контексте", + "unpackOperatorNotAllowed": "Операция распаковки допускается в этом контексте", + "unpackTuplesIllegal": "Операцию распаковки в кортежах можно использовать в Python версии не ниже 3.8", + "unpackedArgInTypeArgument": "В этом контексте нельзя использовать распакованные аргументы", + "unpackedArgWithVariadicParam": "Невозможно использовать распакованный аргумент для параметра TypeVarTuple", + "unpackedDictArgumentNotMapping": "Выражение аргумента после ** должно быть сопоставлением с типом ключа \"str\".", + "unpackedDictSubscriptIllegal": "Оператор распаковки словаря не допускается внутри операции взятия подстроки", + "unpackedSubscriptIllegal": "Оператор распаковки в операции взятия подстроки можно использовать в Python версии не ниже 3.11", + "unpackedTypeVarTupleExpected": "Ожидается распакованный TypeVarTuple; используйте Unpack[{name1}] или *{name2}", + "unpackedTypedDictArgument": "Не удалось сопоставить распакованный аргумент TypedDict с параметрами", + "unreachableCodeCondition": "Анализ кода не выполняется, так как условие статически оценивается как ЛОЖЬ", + "unreachableCodeStructure": "Код структурно недостижим", + "unreachableCodeType": "Анализ типа показывает, что код недоступен", + "unreachableExcept": "Предложение Except недоступно, так как исключение уже обработано", + "unsupportedDunderAllOperation": "Операция на \"__all__\" не поддерживается, поэтому список экспортируемых символов может быть неправильным", + "unusedCallResult": "Результат выражения вызова принадлежит к типу \"{type}\" и не используется. Назначьте переменной \"_\", если это сделано намеренно", + "unusedCoroutine": "Результат вызова async функции не используется; добавьте ключевое слово \"await\" или присвойте результат переменной", + "unusedExpression": "Значение выражения не используется", + "varAnnotationIllegal": "Заметки type для переменных можно использовать в Python версии не ниже 3.6. Для совместимости с более ранними версиями используйте комментарий к типу.", + "variableFinalOverride": "Переменная \"{name}\" помечена как Final и переопределяет non-Final переменную с тем же именем в классе \"{className}\"", + "variadicTypeArgsTooMany": "Список аргументов типа может содержать не более одного распакованного элемента TypeVarTuple или tuple", + "variadicTypeParamTooManyAlias": "Псевдоним типа может иметь не более одного параметра типа TypeVarTuple, но получил несколько ({names})", + "variadicTypeParamTooManyClass": "Универсальный класс может иметь не более одного параметра типа TypeVarTuple, но получил несколько ({names})", + "walrusIllegal": "Оператор \":=\" может использоваться в Python версии не ниже 3.8", + "walrusNotAllowed": "Оператор \":=\" не допускается в этом контексте без окружающих круглых скобок", + "wildcardInFunction": "import с подстановочными знаками в классе или функции запрещен", + "wildcardLibraryImport": "import подстановочных знаков из библиотеки запрещен", + "wildcardPatternTypePartiallyUnknown": "Тип, захваченный шаблоном подстановки, частично неизвестен", + "wildcardPatternTypeUnknown": "Тип, захваченный шаблоном подстановки, неизвестен", + "yieldFromIllegal": "\"yield from\" можно использовать в Python версии не ниже 3.3", + "yieldFromOutsideAsync": "\"yield from\" не допускается в async функции", + "yieldOutsideFunction": "\"yield\" не допускается за пределами функции или лямбда-выражении", + "yieldWithinComprehension": "\"yield\" не допускается внутри понимания", + "zeroCaseStatementsFound": "Операторе match должен включать по крайней мере один оператор case", + "zeroLengthTupleNotAllowed": "tuple нулевой длины не допускается в этом контексте" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "Специальную форму \"Annotated\" нельзя использовать с проверками экземпляра и класса", + "argParam": "Аргумент соответствует параметру \"{paramName}\"", + "argParamFunction": "Аргумент соответствует параметру \"{paramName}\" функции \"{functionName}\"", + "argsParamMissing": "У параметра \"*{paramName}\" нет соответствующего параметра", + "argsPositionOnly": "Несоответствие параметров только положения. Ожидается {expected}, но получено {received}", + "argumentType": "Аргумент принадлежит к типу \"{type}\"", + "argumentTypes": "Типы аргументов: ({types})", + "assignToNone": "Для типа не может быть назначено значение \"None\"", + "asyncHelp": "Вы имели в виду \"async with\"?", + "baseClassIncompatible": "Базовый класс \"{baseClass}\" несовместим с типом \"{type}\"", + "baseClassIncompatibleSubclass": "Базовый класс \"{baseClass}\" является производным от \"{subclass}\", который несовместим с типом \"{type}\"", + "baseClassOverriddenType": "Базовый класс \"{baseClass}\" предоставляет тип \"{type}\", который переопределен", + "baseClassOverridesType": "Базовый класс \"{baseClass}\" переопределяет тип \"{type}\"", + "bytesTypePromotions": "Установите для параметра disableBytesTypePromotions значение false, чтобы включить повышение типа для \"bytearray\" и \"memoryview\"", + "conditionalRequiresBool": "Метод __bool__ для типа \"{operandType}\" возвращает тип \"{boolReturnType}\", а не \"bool\"", + "dataClassFieldLocation": "Объявление поля", + "dataClassFrozen": "Элемент \"{name}\" зафиксирован", + "dataProtocolUnsupported": "\"{name}\" является протоколом данных", + "descriptorAccessBindingFailed": "Не удалось привязать метод \"{name}\" для класса дескриптора \"{className}\"", + "descriptorAccessCallFailed": "Не удалось вызвать метод \"{name}\" для класса дескриптора \"{className}\"", + "finalMethod": "Final метод", + "functionParamDefaultMissing": "В параметре \"{name}\" отсутствует аргумент по умолчанию.", + "functionParamName": "Несоответствие имени параметра: \"{destName}\" и \"{srcName}\"", + "functionParamPositionOnly": "Несоответствие исключительно позиционных параметров; параметр \"{name}\" не является исключительно позиционным", + "functionReturnTypeMismatch": "Тип возвращаемого значения функции \"{sourceType}\" несовместим с типом \"{destType}\"", + "functionTooFewParams": "Функция принимает слишком мало позиционных параметров; ожидалось {expected}, но получено {received}", + "functionTooManyParams": "Функция принимает слишком много позиционных параметров; ожидалось {expected}, но получено {received}", + "genericClassNotAllowed": "Универсальный тип с аргументами типа запрещен для проверок экземпляров или классов", + "incompatibleDeleter": "Метод deleter property является несовместимым", + "incompatibleGetter": "Несовместимый метод getter property", + "incompatibleSetter": "Метод setter property является несовместимым", + "initMethodLocation": "Метод __init__ определен в классе \"{type}\"", + "initMethodSignature": "Сигнатура __init__ — \"{type}\"", + "initSubclassLocation": "Метод __init_subclass__ определен в классе \"{name}\"", + "invariantSuggestionDict": "Рассмотрите возможность перехода с \"dict\" на \"Mapping\", являющийся ковариантным по типу значения.", + "invariantSuggestionList": "Рассмотрите возможность перехода с \"list\" на \"Sequence\", являющийся ковариантным.", + "invariantSuggestionSet": "Рассмотрите возможность переключения с \"set\" на \"Container\", который является ковариантным", + "isinstanceClassNotSupported": "\"{type}\" не поддерживается для проверок экземпляров и классов", + "keyNotRequired": "\"{name}\" не является обязательным ключом в \"{type}\", поэтому доступ может вызвать исключение во время выполнения", + "keyReadOnly": "\"{name}\" является ключом только для чтения в \"{type}\"", + "keyRequiredDeleted": "\"{name}\" является обязательным ключом и не подлежит удалению", + "keyUndefined": "\"{name}\" не является определенным ключом в \"{type}\"", + "kwargsParamMissing": "У параметра \"**{paramName}\" нет соответствующего параметра", + "listAssignmentMismatch": "Тип \"{type}\" несовместим с целевым списком", + "literalAssignmentMismatch": "\"{sourceType}\" невозможно назначить тип \"{destType}\"", + "literalNotAllowed": "Специальную форму \"Literal\" нельзя использовать с проверками экземпляра и класса", + "matchIsNotExhaustiveHint": "Если не предполагается исчерпывающая обработка, добавьте \"case _: pass\"", + "matchIsNotExhaustiveType": "Тип \"{type}\" не обрабатывается", + "memberAssignment": "Выражение типа \"{type}\" не может быть назначено атрибуту \"{name}\" класса \"{classType}\"", + "memberIsAbstract": "Отсутствует реализация \"{type}.{name}\".", + "memberIsAbstractMore": "и еще {{count}}...", + "memberIsClassVarInProtocol": "\"{name}\" определено как класс ClassVar в протоколе", + "memberIsInitVar": "\"{name}\" является полем только для init-only", + "memberIsInvariant": "Элемент \"{name}\" инвариантен, поскольку помечен как mutable", + "memberIsNotClassVarInClass": "Необходимо определить \"{name}\" как ClassVar для совместимости с протоколом.", + "memberIsNotClassVarInProtocol": "\"{name}\" не определено как класс ClassVar в протоколе", + "memberIsNotReadOnlyInProtocol": "\"{name}\" не является элементом только для чтения в протоколе", + "memberIsReadOnlyInProtocol": "\"{name}\" в протоколе только для чтения", + "memberIsWritableInProtocol": "\"{name}\" доступно для записи в протоколе", + "memberSetClassVar": "Атрибут \"{name}\" не может быть назначен через экземпляр класса, так как это ClassVar", + "memberTypeMismatch": "\"{name}\" является несовместимым типом", + "memberUnknown": "Атрибут \"{name}\" неизвестен", + "metaclassConflict": "Метакласс \"{metaclass1}\" конфликтует с \"{metaclass2}\"", + "missingDeleter": "Отсутствует метод deleter property", + "missingGetter": "Отсутствует метод getter property", + "missingSetter": "Отсутствует метод setter property", + "namedParamMissingInDest": "Дополнительный параметр \"{name}\"", + "namedParamMissingInSource": "Отсутствует параметр ключевого слова \"{name}\".", + "namedParamTypeMismatch": "Параметр ключевого слова \"{name}\" типа \"{sourceType}\" несовместим с типом \"{destType}\"", + "namedTupleNotAllowed": "NamedTuple не может использоваться для проверок экземпляров или классов", + "newMethodLocation": "Метод __new__ определен в классе \"{type}\"", + "newMethodSignature": "Сигнатура метода __new__ требует \"{type}\"", + "newTypeClassNotAllowed": "Тип, созданный с помощью NewType, нельзя использовать для проверки экземпляра и класса", + "noOverloadAssignable": "Нет перегруженной функции, соответствующей типу \"{type}\"", + "noneNotAllowed": "None невозможно использовать для проверок экземпляров или классов", + "orPatternMissingName": "Отсутствуют имена: {name}", + "overloadIndex": "Наилучшее совпадение: {index} перегрузки", + "overloadNotAssignable": "Одна или несколько перегрузок \"{name}\" не подлежат присвоению", + "overloadSignature": "Здесь определена сигнатура перегрузки", + "overriddenMethod": "Переопределенный метод", + "overriddenSymbol": "Переопределенный символ", + "overrideInvariantMismatch": "Тип переопределения \"{overrideType}\" не совпадает с базовым типом \"{baseType}\"", + "overrideIsInvariant": "Переменная изменяема, поэтому ее тип является инвариантным", + "overrideNoOverloadMatches": "В переопределении нет сигнатуры перегрузки, совместимой с базовым методом", + "overrideNotClassMethod": "Базовый метод объявлен как classmethod, а его переопределение — нет", + "overrideNotInstanceMethod": "Базовый метод объявлен как метод экземпляра, а его переопределение — нет", + "overrideNotStaticMethod": "Базовый метод объявлен как staticmethod, а его переопределение — нет", + "overrideOverloadNoMatch": "Переопределение не обрабатывает все перегрузки базового метода", + "overrideOverloadOrder": "Перегрузки в методе переопределения должны располагаться в том же порядке, что и в базовом методе", + "overrideParamKeywordNoDefault": "Несоответствие параметра ключевого слова \"{name}\": базовый параметр содержит значение аргумента по умолчанию, параметр переопределения — нет", + "overrideParamKeywordType": "Несоответствие типа параметра ключевого слова \"{name}\": базовый параметр имеет тип \"{baseType}\", параметр переопределения имеет тип \"{overrideType}\"", + "overrideParamName": "Несоответствие имени параметра {index}: базовый параметр называется \"{baseName}\", параметр переопределения называется \"{overrideName}\"", + "overrideParamNameExtra": "Параметр \"{name}\" отсутствует в базовом классе", + "overrideParamNameMissing": "Параметр \"{name}\" отсутствует в переопределении", + "overrideParamNamePositionOnly": "Несоответствие параметра {index}: базовый параметр \"{baseName}\" является параметром ключевого слова, параметр переопределения предназначен только для позиции", + "overrideParamNoDefault": "Несоответствие параметра {index}: базовый параметр содержит значение аргумента по умолчанию, параметр переопределения — нет", + "overrideParamType": "Несоответствие типа параметра {index}: базовый параметр имеет тип \"{baseType}\", параметр переопределения имеет тип \"{overrideType}\"", + "overridePositionalParamCount": "Несоответствие позиционного подсчета параметров. Базовый метод содержит {baseCount}, но переопределение содержит {overrideCount}", + "overrideReturnType": "Несоответствие типа возвращаемого значения: базовый метод возвращает тип \"{baseType}\", а переопределение — \"{overrideType}\"", + "overrideType": "Базовый класс определяет тип как \"{type}\"", + "paramAssignment": "Параметр {index}: типа \"{sourceType}\" несовместим с типом \"{destType}\"", + "paramSpecMissingInOverride": "В методе переопределения отсутствуют параметры ParamSpec.", + "paramType": "Параметр принадлежит к типу \"{paramType}\"", + "privateImportFromPyTypedSource": "Вместо этого используйте импорт из \"{module}\"", + "propertyAccessFromProtocolClass": "Свойство, определенное в классе протокола, не может быть доступно как переменная класса.", + "propertyMethodIncompatible": "Метод property \"{name}\" несовместим", + "propertyMethodMissing": "Метод property \"{name}\" отсутствует в переопределении", + "propertyMissingDeleter": "Для property \"{name}\" не определен метод deleter", + "propertyMissingSetter": "Для property \"{name}\" не определен метод setter", + "protocolIncompatible": "\"{sourceType}\" несовместим с протоколом \"{destType}\"", + "protocolMemberMissing": "\"{name}\" отсутствует.", + "protocolRequiresRuntimeCheckable": "Класс Protocol должен быть @runtime_checkable, чтобы его можно было использовать при проверках экземпляров и классов", + "protocolSourceIsNotConcrete": "\"{sourceType}\" не является конкретным типом класса и не может быть присвоен типу \"{destType}\"", + "protocolUnsafeOverlap": "Атрибуты \"{name}\" используют те же имена, что и протокол", + "pyrightCommentIgnoreTip": "Для подавления диагностики в одной строке используйте конструкцию \"# pyright: ignore[]\"", + "readOnlyAttribute": "Атрибут \"{name}\" доступен только для чтения", + "seeClassDeclaration": "См. объявление класса", + "seeDeclaration": "См. объявление", + "seeFunctionDeclaration": "См. объявление функции", + "seeMethodDeclaration": "См. объявление метода", + "seeParameterDeclaration": "Просмотреть объявление параметра", + "seeTypeAliasDeclaration": "Просмотреть объявление псевдонима типа", + "seeVariableDeclaration": "Просмотреть объявление переменной", + "tupleAssignmentMismatch": "Тип \"{type}\" несовместим с целевым tuple", + "tupleEntryTypeMismatch": "Запись tuple {entry} имеет неверный тип", + "tupleSizeIndeterminateSrc": "Несоответствие размеров tuple: ожидается \"{expected}\", но получено неопределенное значение", + "tupleSizeIndeterminateSrcDest": "Несоответствие размеров tuple: ожидается {expected} или больше, но получено неопределенное значение", + "tupleSizeMismatch": "Несоответствие размеров tuple: ожидается \"{expected}\", но получено \"{received}\"", + "tupleSizeMismatchIndeterminateDest": "Несоответствие размеров tuple: ожидается {expected} или больше, но получено {received}", + "typeAliasInstanceCheck": "Псевдоним типа, создаваемый оператором \"type\", не может использоваться с проверками экземпляра и класса.", + "typeAssignmentMismatch": "\"{sourceType}\" типа невозможно назначить тип \"{destType}\"", + "typeBound": "Тип \"{sourceType}\" не может быть назначен верхней границе \"{destType}\" для переменной типа \"{name}\"", + "typeConstrainedTypeVar": "Тип \"{type}\" не может быть назначен переменной ограниченного типа \"{name}\"", + "typeIncompatible": "\"{sourceType}\" невозможно назначить \"{destType}\"", + "typeNotClass": "\"{type}\" не является классом.", + "typeNotStringLiteral": "\"{type}\" не является строковым литералом", + "typeOfSymbol": "Тип \"{name}\" является \"{type}\"", + "typeParamSpec": "Тип \"{type}\" несовместим с ParamSpec \"{name}\"", + "typeUnsupported": "Тип \"{type}\" не поддерживается", + "typeVarDefaultOutOfScope": "Переменная типа \"{name}\" лежит за пределами области", + "typeVarIsContravariant": "Параметр типа \"{name}\" является контравариантным, но \"{sourceType}\" не является супертипом \"{destType}\"", + "typeVarIsCovariant": "Параметр типа \"{name}\" является ковариантным, но \"{sourceType}\" не является подтипом \"{destType}\"", + "typeVarIsInvariant": "Параметр типа \"{name}\" является инвариантным, но \"{sourceType}\" не совпадает с \"{destType}\"", + "typeVarNotAllowed": "TypeVar не допускается для проверок экземпляров или классов", + "typeVarTupleRequiresKnownLength": "TypeVarTuple не может граничить с tuple неизвестной длины", + "typeVarUnnecessarySuggestion": "Вместо этого используйте {type}", + "typeVarUnsolvableRemedy": "Укажите перегрузку, которая указывает тип возвращаемого значения, если аргумент не передается", + "typeVarsMissing": "Отсутствуют переменные типа: {names}", + "typedDictBaseClass": "Класс \"{type}\" не является TypedDict", + "typedDictClassNotAllowed": "Класс TypedDict не допускается для проверок экземпляров или классов", + "typedDictClosedExtraNotAllowed": "Не удается добавить элемент \"{name}\"", + "typedDictClosedExtraTypeMismatch": "Не удается добавить элемент \"{name}\" типа \"{type}\"", + "typedDictClosedFieldNotReadOnly": "Не удается добавить элемент \"{name}\", так как он должен быть ReadOnly", + "typedDictClosedFieldNotRequired": "Не удается добавить элемент \"{name}\", так как он должен быть помечен как NotRequired", + "typedDictExtraFieldNotAllowed": "Элемент \"{name}\" отсутствует в типе \"{type}\"", + "typedDictExtraFieldTypeMismatch": "Тип \"{name}\" несовместим с типом \"extra_items\" в \"{type}\"", + "typedDictFieldMissing": "\"{name}\" отсутствует в \"{type}\"", + "typedDictFieldNotReadOnly": "\"{name}\" не является элементом \"{type}\" только для чтения", + "typedDictFieldNotRequired": "\"{name}\" не является обязательным в \"{type}\"", + "typedDictFieldRequired": "\"{name}\" является обязательным в \"{type}\"", + "typedDictFieldTypeMismatch": "Тип \"{type}\" нельзя присвоить полю \"{name}\"", + "typedDictFieldUndefined": "Элемент \"{name}\" не определен в типе \"{type}\"", + "typedDictKeyAccess": "Использовать [\"{name}\"] для ссылки на элемент в TypedDict", + "typedDictNotAllowed": "TypedDict не может использоваться для проверок экземпляров или классов", + "unhashableType": "Тип \"{type}\" не является хэшируемым", + "uninitializedAbstractVariable": "Переменная экземпляра \"{name}\" определена в абстрактном базовом классе \"{classType}\", но не инициализирована", + "unreachableExcept": "\"{exceptionType}\" является подклассом \"{parentType}\"", + "useDictInstead": "Используйте dict[T1, T2] для указания типа словаря", + "useListInstead": "Используйте list[T] для указания типа list и T1 | T2 для указания типа union", + "useTupleInstead": "Используйте конструкцию tuple[T1, ..., Tn], чтобы указать тип tuple, и T1 | T2, чтобы указать тип union", + "useTypeInstead": "Используйте вместо этого type[T]", + "varianceMismatchForClass": "Вариант аргумента типа \"{typeVarName}\" несовместим с базовым классом \"{className}\"", + "varianceMismatchForTypeAlias": "Отклонение аргумента типа \"{typeVarName}\" несовместимо с \"{typeAliasParam}\"" + }, + "Service": { + "longOperation": "Перечисление исходных файлов рабочей области занимает много времени. Вместо этого рассмотрите возможность открыть вложенную папку. [Подробнее](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.tr.json b/python-parser/packages/pyright-internal/src/localization/package.nls.tr.json new file mode 100644 index 00000000..8086b8da --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.tr.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "Create Type Stub", + "createTypeStubFor": "Create Type Stub For \"{moduleName}\"", + "executingCommand": "Komut yürütülüyor", + "filesToAnalyzeCount": "analiz edilecek {count} dosya var", + "filesToAnalyzeOne": "Analiz edilecek 1 dosya", + "findingReferences": "Başvurular bulunuyor", + "organizeImports": "İçeri Aktarmaları Düzenle" + }, + "Completion": { + "autoImportDetail": "Otomatik içeri aktarma", + "indexValueDetail": "Dizin değeri" + }, + "Diagnostic": { + "abstractMethodInvocation": "\"{method}\" metodu soyut veya uygulanmamış olduğundan çağrılamaz", + "annotatedMetadataInconsistent": "\"{metadataType}\" not eklenmiş meta veri türü \"{type}\" türüyle uyumlu değil", + "annotatedParamCountMismatch": "Parametre ek açıklama sayısı uyuşmazlığı: {expected} bekleniyordu ancak {received} alındı", + "annotatedTypeArgMissing": "\"Annotated\" için bir tür bağımsız değişkeni ve bir veya daha fazla ek açıklama bekleniyordu", + "annotationBytesString": "Tür ifadeleri bayt sabit değerli dizeleri kullanamaz", + "annotationFormatString": "Tür ifadeleri biçim dizesi sabit değerlerini (f-strings) kullanamaz", + "annotationNotSupported": "Tür ek açıklaması bu deyim için desteklenmiyor", + "annotationRawString": "Tür ifadeleri ham dize sabit değerlerini kullanamaz", + "annotationSpansStrings": "Tür ifadeleri birden çok dize sabit değerine yayılamaz", + "annotationStringEscape": "Tür ifadeleri kaçış karakterleri içeremez", + "annotationTemplateString": "Tür ifadeleri şablon dizesi sabit değerlerini (f-string) kullanamaz", + "argAssignment": "\"{argType}\" türünde bağımsız değişken, \"{paramType}\" türündeki parametreye atanamaz", + "argAssignmentFunction": "\"{argType}\" türünde bağımsız değişken, \"{functionName}\" işlevi içinde \"{paramType}\" türündeki parametreye atanamaz", + "argAssignmentParam": "\"{argType}\" türünde bağımsız değişken, \"{paramName}\" türündeki \"{paramType}\" parametresine atanamaz", + "argAssignmentParamFunction": "\"{argType}\" türünde bağımsız değişken, \"{functionName}\" işlevi içinde \"{paramName}\" türündeki \"{paramType}\" parametresine atanamaz", + "argMissingForParam": "{name} parametresi için bağımsız değişken eksik", + "argMissingForParams": "{names} parametrelerinin bağımsız değişkenleri eksik", + "argMorePositionalExpectedCount": "{expected} tane daha konumsal bağımsız değişken bekleniyordu", + "argMorePositionalExpectedOne": "1 tane daha konumsal bağımsız değişken bekleniyordu", + "argPositional": "Konumsal bağımsız değişken bekleniyordu", + "argPositionalExpectedCount": "{expected} konumsal bağımsız değişken bekleniyordu", + "argPositionalExpectedOne": "1 konumsal bağımsız değişken bekleniyordu", + "argTypePartiallyUnknown": "Bağımsız değişken türü kısmen bilinmiyor", + "argTypeUnknown": "Bağımsız değişken türü bilinmiyor", + "assertAlwaysTrue": "Assert ifadesi her zaman true olarak değerlendirilir", + "assertTypeArgs": "\"assert_type\" iki konumsal bağımsız değişken bekliyor", + "assertTypeTypeMismatch": "\"assert_type\" uyuşmuyor; \"{expected}\" bekleniyordu ancak \"{received}\" alındı", + "assignmentExprComprehension": "\"{name}\" atama ifadesi hedefi, hedef için anlama ile aynı adı kullanamaz", + "assignmentExprContext": "Atama ifadesi modül, işlev veya lambda içinde olmalıdır", + "assignmentExprInSubscript": "Bir alt simge içindeki atama ifadeleri yalnızca Python 3.10 ve daha yeni sürümlerinde desteklenir", + "assignmentInProtocol": "Instance or class variables within a Protocol class must be explicitly declared within the class body", + "assignmentTargetExpr": "İfade, atama hedefi olamaz", + "asyncNotInAsyncFunction": "Use of \"async\" not allowed outside of async function", + "awaitIllegal": "\"await\" kullanımı için Python 3.5 veya daha yeni bir sürümü gerekiyor", + "awaitNotAllowed": "Tür ifadeleri \"await\" kullanamaz", + "awaitNotInAsync": "\"await\" allowed only within async function", + "backticksIllegal": "Eski kesme işaretleri arasında yer almayan ifadeler Python3.x’de desteklenmiyor; bunun yerine repr kullanın", + "baseClassCircular": "Sınıf kendi türevi olamaz", + "baseClassFinal": "\"{type}\" temel sınıfı final olarak işaretlendi ve alt sınıf olamaz", + "baseClassIncompatible": "{type} türündeki temel sınıflar karşılıklı olarak uyumsuzdur", + "baseClassInvalid": "Sınıfın bağımsız değişkeni bir temel sınıf olmalıdır", + "baseClassMethodTypeIncompatible": "\"{classType}\" sınıfına ait temel sınıflar, \"{name}\" metodunu uyumsuz bir şekilde tanımlıyor", + "baseClassUnknown": "Temel sınıf türü bilinmiyor, türetilmiş sınıfı gizliyor", + "baseClassVariableTypeIncompatible": "\"{classType}\" sınıfı için temel sınıflar, \"{name}\" değişkenini uyumsuz bir şekilde tanımlıyor", + "binaryOperationNotAllowed": "Tür ifadesinde ikili işleç kullanılamaz", + "bindParamMissing": "\"self\" veya \"cls\" parametresi eksik olduğundan \"{methodName}\" yöntemi bağlanamadı", + "bindTypeMismatch": "\"{type}\", \"{paramName}\" parametresine atanamadığından \"{methodName}\" metodu bağlanamadı", + "breakInExceptionGroup": "\"except*\" bloğunda \"break\" kullanılamaz", + "breakOutsideLoop": "\"break\" yalnızca bir döngü içinde kullanılabilir", + "bytesUnsupportedEscape": "bytes sabit değerinde desteklenmeyen kaçış dizisi", + "callableExtraArgs": "\"Callable\" için yalnızca iki tür bağımsız değişkeni bekleniyordu", + "callableFirstArg": "Parametre türü listesi veya \"...\" bekleniyordu", + "callableNotInstantiable": "\"{type}\" türünün örneği oluşturulamıyor", + "callableSecondArg": "\"Callable\" için ikinci tür bağımsız değişkeni olarak dönüş türü bekleniyordu", + "casePatternIsIrrefutable": "Reddedilemez desene yalnızca son case deyimi için izin verilir", + "classAlreadySpecialized": "\"{type}\" türü zaten özelleştirilmiş", + "classDecoratorTypeUnknown": "Türü belirtilmemiş sınıf dekoratörü sınıf türünü gizliyor; dekoratör yoksayılıyor", + "classDefinitionCycle": "\"{name}\" için sınıf tanımı kendisine bağımlı", + "classGetItemClsParam": "__class_getitem__ geçersiz kılması bir \"cls\" parametresi almalı", + "classMethodClsParam": "Sınıf metotları bir \"cls\" parametresi almalıdır", + "classNotRuntimeSubscriptable": "\"{name}\" sınıfına ait alt simge çalışma zamanı özel durumunu oluşturur; tür ifadelerini tırnak içine alın", + "classPatternBuiltInArgPositional": "Sınıf deseni yalnızca konumsal alt desen kabul eder", + "classPatternNewType": "\"{type}\", NewType kullanılarak tanımlandığı için sınıf desenlerinde kullanılamaz", + "classPatternPositionalArgCount": "\"{type}\" sınıfı için çok fazla konumsal desen var; {expected} bekleniyordu ancak {received} alındı", + "classPatternTypeAlias": "\"{type}\" özel bir tür diğer adı olduğundan sınıf deseninde kullanılamaz", + "classPropertyDeprecated": "Sınıf özellikleri Python 3.11'de kullanım dışıdır ve Python 3.13'te desteklenmez", + "classTypeParametersIllegal": "Sınıf türü parametresi söz dizimi için Python 3.12 veya daha yeni bir sürümü gerekiyor", + "classVarFirstArgMissing": "\"ClassVar\" sonrasında tür bağımsız değişkeni bekleniyordu", + "classVarNotAllowed": "Bu bağlamda \"ClassVar\" kullanımına izin verilmiyor", + "classVarOverridesInstanceVar": "\"{name}\" sınıf değişkeni \"{className}\" sınıfındaki aynı ada sahip örnek değişkenini geçersiz kılıyor", + "classVarTooManyArgs": "\"ClassVar\" sonrasında yalnızca bir tür bağımsız değişkeni bekleniyordu", + "classVarWithTypeVar": "\"ClassVar\" türü tür değişkenleri içeremez", + "clsSelfParamTypeMismatch": "\"{name}\" parametre türü, \"{classType}\" sınıfının üst türü olmalıdır", + "codeTooComplexToAnalyze": "Kod analiz edilemeyecek kadar karmaşık; alt yordamlara ayırarak yeniden düzenleyip veya koşullu kod yollarını azaltarak karmaşıklığı azaltın", + "collectionAliasInstantiation": "\"{type}\" türündeki örnek oluşturulamıyor, bunun yerine \"{alias}\" kullanın", + "comparisonAlwaysFalse": "\"{leftType}\" türleri ve \"{rightType}\" türleri çakışmadığından koşul her zaman False olarak değerlendirilir", + "comparisonAlwaysTrue": "\"{leftType}\" ve \"{rightType}\" türleri çakışmadığından ifade her zaman True olarak değerlendirilir", + "comprehensionInDict": "Anlama diğer küme girdileri ile kullanılamaz", + "comprehensionInSet": "Comprehension cannot be used with other set entries", + "concatenateContext": "Bu bağlamda \"Concatenate\" kullanılamaz", + "concatenateParamSpecMissing": "\"Concatenate\" için son tür bağımsız değişkeni bir ParamSpec veya \"...\" olmalıdır", + "concatenateTypeArgsMissing": "\"Concatenate\" requires at least two type arguments", + "conditionalOperandInvalid": "\"{type}\" türündeki koşullu işlenen geçersiz", + "constantRedefinition": "\"{name}\" sabit (büyük harf olduğundan) ve yeniden tanımlanamaz", + "constructorParametersMismatch": "\"{classType}\" sınıfındaki __new__ ve __init__ imzaları arasında uyuşmazlık var", + "containmentAlwaysFalse": "\"{leftType}\" türleri ve \"{rightType}\" türleri çakışmadığından ifade her zaman False olarak değerlendirilir", + "containmentAlwaysTrue": "\"{leftType}\" türleri ve \"{rightType}\" türleri çakışmadığından ifade her zaman True olarak değerlendirilir", + "continueInExceptionGroup": "\"except*\" bloğunda \"continue\" kullanılamaz", + "continueOutsideLoop": "\"continue\" yalnızca bir döngü içinde kullanılabilir", + "coroutineInConditionalExpression": "Koşullu ifade, her zaman True olarak değerlendirilen eş yordama başvurur", + "dataClassBaseClassFrozen": "Dondurulmuş olmayan bir sınıf dondurulmuş bir sınıftan devralamaz", + "dataClassBaseClassNotFrozen": "Dondurulmuş sınıf, dondurulmuş olmayan bir sınıftan devralamaz", + "dataClassConverterFunction": "\"{argType}\" türündeki bağımsız değişken, \"{fieldName}\" türündeki \"{fieldType}\" alanı için geçerli bir dönüştürücü değil", + "dataClassConverterOverloads": "\"{funcName}\" işlevinin aşırı yüklemelerinden hiçbiri \"{fieldType}\" türündeki \"{fieldName}\" alanı için geçerli dönüştürücüler değil", + "dataClassFieldInheritedDefault": "\"{fieldName}\", aynı ada sahip bir alanı geçersiz kılıyor ancak varsayılan değeri yok", + "dataClassFieldWithDefault": "Varsayılan değerleri olmayan alanlar, varsayılan değerleri olan alanlardan sonra gelemez", + "dataClassFieldWithPrivateName": "Veri sınıfı alanı özel ad kullanamıyor", + "dataClassFieldWithoutAnnotation": "Tür ek açıklaması olmayan veri sınıfı alanı çalışma zamanı özel durumuna neden olur", + "dataClassPostInitParamCount": "Veri sınıfı __post_init__ parametre sayısı yanlış; InitVar alanlarının sayısı {expected}", + "dataClassPostInitType": "Veri sınıfı __post_init__ metodu parametre türü ile \"{fieldName}\" alanı uyuşmuyor", + "dataClassSlotsOverwrite": "__slots__ zaten sınıfta tanımlı", + "dataClassTransformExpectedBoolLiteral": "Statik olarak True veya False olarak değerlendirilen ifade bekleniyordu", + "dataClassTransformFieldSpecifier": "Expected tuple of classes or functions but received type \"{type}\"", + "dataClassTransformPositionalParam": "\"dataclass_transform\" için tüm bağımsız değişkenlerin anahtar sözcük bağımsız değişkenleri olması gerekiyor", + "dataClassTransformUnknownArgument": "\"{name}\" bağımsız değişkeni, dataclass_transform tarafından desteklenmiyor", + "dataProtocolInSubclassCheck": "issubclass çağrılarında veri protokollerine (yöntem dışı öznitelikler dahil) izin verilmez", + "declaredReturnTypePartiallyUnknown": "Bildirilen \"{returnType}\" dönüş türü kısmen bilinmiyor", + "declaredReturnTypeUnknown": "Bildirilen dönüş türü bilinmiyor", + "defaultValueContainsCall": "Parametre varsayılan değer ifadesinde işlev çağrılarına ve değiştirilebilir nesnelere izin verilmez", + "defaultValueNotAllowed": "\"*\" veya \"**\" olan parametre varsayılan değere sahip olamaz", + "delTargetExpr": "İfade silinemiyor", + "deprecatedClass": "\"{name}\" sınıfı kullanım dışı", + "deprecatedConstructor": "\"{name}\" sınıfının oluşturucusu kullanım dışı", + "deprecatedDescriptorDeleter": "\"{name}\" tanımlayıcısı için \"__delete__\" yöntemi kullanım dışı", + "deprecatedDescriptorGetter": "\"{name}\" tanımlayıcısı için \"__get__\" yöntemi kullanım dışı", + "deprecatedDescriptorSetter": "\"{name}\" tanımlayıcısı için \"__set__\" yöntemi kullanım dışı", + "deprecatedFunction": "\"{name}\" işlevi kullanım dışı", + "deprecatedMethod": "\"{className}\" sınıfındaki \"{name}\" yöntemi kullanım dışı", + "deprecatedPropertyDeleter": "\"{name}\" property deleter kullanım dışı", + "deprecatedPropertyGetter": "\"{name}\" property getter kullanım dışı", + "deprecatedPropertySetter": "\"{name}\" property setter kullanım dışı", + "deprecatedType": "Bu tür Python {version} sürümünden itibaren kullanım dışı; bunun yerine \"{replacement}\" kullanın", + "dictExpandIllegalInComprehension": "Sözlük genişletmeye anlamada izin verilmiyor", + "dictInAnnotation": "Tür ifadesinde sözlük ifadesi kullanılamaz", + "dictKeyValuePairs": "Sözlük girdileri anahtar/değer çiftleri içermelidir", + "dictUnpackIsNotMapping": "Sözlük açma işleci için eşleme bekleniyordu", + "dunderAllSymbolNotPresent": "\"{name}\" __all__ ile belirtildi ancak modülde yok", + "duplicateArgsParam": "Yalnızca bir \"*\" parametresi kullanılabilir", + "duplicateBaseClass": "Yinelenen temel sınıfa izin verilmiyor", + "duplicateCapturePatternTarget": "\"{name}\" yakalama hedefi, aynı desen içinde birden çok kez bulunamaz", + "duplicateCatchAll": "Yalnızca bir catch-all except yan tümcesine izin verilir", + "duplicateEnumMember": "Enum member \"{name}\" is already declared", + "duplicateGenericAndProtocolBase": "Only one Generic[...] or Protocol[...] base class allowed", + "duplicateImport": "\"{importName}\" birden çok kez içeri aktarıldı", + "duplicateKeywordOnly": "Yalnızca bir \"*\" ayırıcısı kullanılabilir", + "duplicateKwargsParam": "Yalnızca bir \"**\" parametresine izin verilir", + "duplicateParam": "\"{name}\" yinelenen parametresi", + "duplicatePositionOnly": "Yalnızca bir \"/\" parametresine izin verilir", + "duplicateStarPattern": "Desen dizisinde yalnızca bir \"*\" desenine izin verilir", + "duplicateStarStarPattern": "Yalnızca bir \"**\" girdisine izin verilir", + "duplicateUnpack": "Only one unpack operation allowed in list", + "ellipsisAfterUnpacked": "\"...\" paketlenmemiş TypeVarTuple veya tuple ile kullanılamaz", + "ellipsisContext": "\"...\" kullanımına bu bağlamda izin verilmiyor", + "ellipsisSecondArg": "\"...\" yalnızca iki bağımsız değişkenin ikincisi olarak kullanılabilir", + "enumClassOverride": "Enum class \"{name}\" is final and cannot be subclassed", + "enumMemberDelete": "Enum üyesi \"{name}\" silinemiyor", + "enumMemberSet": "Enum üyesi \"{name}\" atanamıyor", + "enumMemberTypeAnnotation": "Type annotations are not allowed for enum members", + "exceptGroupMismatch": "Try ifadesi hem \"except*\" hem de \"except\" içeremez", + "exceptGroupRequiresType": "Özel durum grubu söz dizimi (\"except*\") biri özel durum türü gerektirir", + "exceptRequiresParens": "Python 3.14'ten önce birden çok özel durum türü parantez içine alınmalıdır", + "exceptWithAsRequiresParens": "\"as\" kullanılırken birden çok özel durum türü parantez içine alınmalıdır", + "exceptionGroupIncompatible": "Özel durum grubu söz dizimi (\"except*\") için Python 3.11 veya daha yeni bir sürümü gerekiyor", + "exceptionGroupTypeIncorrect": "except* altındaki özel durum türü BaseGroupException değerinden türetilemiyor", + "exceptionTypeIncorrect": "\"{type}\", BaseException türevi değil", + "exceptionTypeNotClass": "\"{type}\" geçerli bir özel durum sınıfı değil", + "exceptionTypeNotInstantiable": "\"{type}\" özel durum türü oluşturucusu bir veya daha fazla bağımsız değişken gerektiriyor", + "expectedAfterDecorator": "Dekoratörden sonra işlev veya sınıf bildirimi bekleniyordu", + "expectedArrow": "Dönüş türü ek açıklaması sonrasında \"->\" bekleniyordu", + "expectedAsAfterException": "Özel durum türünden sonra \"as\" bekleniyordu", + "expectedAssignRightHandExpr": "\"=\" öğesinin sağ tarafında ifade bekleniyordu", + "expectedBinaryRightHandExpr": "İşlecin sağında ifade bekleniyordu", + "expectedBoolLiteral": "True veya False bekleniyor", + "expectedCase": "\"case\" deyimi bekleniyordu", + "expectedClassName": "Sınıf adı bekleniyordu", + "expectedCloseBrace": "\"{\" kapatılmadı", + "expectedCloseBracket": "\"[\" kapatılmadı", + "expectedCloseParen": "\"(\" kapatılmadı", + "expectedColon": "\":\" bekleniyordu", + "expectedComplexNumberLiteral": "Desen eşleme için karmaşık sayı sabit değeri bekleniyordu", + "expectedDecoratorExpr": "İfade formu Python 3.9'dan önceki dekoratör için desteklenmiyor", + "expectedDecoratorName": "Dekoratör adı bekleniyordu", + "expectedDecoratorNewline": "Dekoratör sonunda yeni satır bekleniyordu", + "expectedDelExpr": "\"del\" sonrasında ifade bekleniyordu", + "expectedElse": "\"else\" bekleniyordu", + "expectedEquals": "\"=\" bekleniyordu", + "expectedExceptionClass": "Geçersiz özel durum sınıfı veya nesnesi", + "expectedExceptionObj": "Özel durum nesnesi, özel durum sınıfı veya None bekleniyordu", + "expectedExpr": "İfade bekleniyor", + "expectedFunctionAfterAsync": "\"async\" sonrasında işlev tanımı bekleniyordu", + "expectedFunctionName": "\"def\" sonrasında işlev adı bekleniyordu", + "expectedIdentifier": "Tanımlayıcı bekleniyordu", + "expectedImport": "\"import\" bekleniyordu", + "expectedImportAlias": "\"as\" sonrasında sembol bekleniyordu", + "expectedImportSymbols": "\"import\" sonrasında bir veya daha fazla sembol adı bekleniyordu", + "expectedIn": "\"in\" bekleniyordu", + "expectedInExpr": "\"in\" sonrasında ifade bekleniyordu", + "expectedIndentedBlock": "Girintili blok bekleniyordu", + "expectedMemberName": "\".\" sonrasında bir öznitelik adı bekleniyor", + "expectedModuleName": "Modül adı bekleniyordu", + "expectedNameAfterAs": "\"as\" sonrasında sembol adı bekleniyordu", + "expectedNamedParameter": "Anahtar sözcük parametresi, \"*\" sonrasında gelmelidir", + "expectedNewline": "Yeni satır bekleniyordu", + "expectedNewlineOrSemicolon": "Deyimler yeni satırlarla veya noktalı virgüllerle ayrılarak belirtilmelidir", + "expectedOpenParen": "\"(\" bekleniyordu", + "expectedParamName": "Parametre adı bekleniyordu", + "expectedPatternExpr": "Desen ifadesi bekleniyordu", + "expectedPatternSubjectExpr": "Beklenen desen konusu ifadesi", + "expectedPatternValue": "\"a.b\" biçiminde bir desen değeri ifadesi bekleniyordu", + "expectedReturnExpr": "\"return\" sonrasında ifade bekleniyordu", + "expectedSliceIndex": "Dizin veya dilim ifadesi bekleniyordu", + "expectedTypeNotString": "Tür bekleniyordu ancak sabit değerli dize alındı", + "expectedTypeParameterName": "Beklenen tür parametresi adı", + "expectedYieldExpr": "Expected expression in yield statement", + "finalClassIsAbstract": "Class \"{type}\" is marked final and must implement all abstract symbols", + "finalContext": "Bu bağlamda \"Final\" kullanılamaz", + "finalInLoop": "Bir döngü içinde “Final” değişkeni atanamaz", + "finalMethodOverride": "\"{name}\" yöntemi \"{className}\" sınıfı içinde tanımlanan final metodu geçersiz kılamaz", + "finalNonMethod": "\"{name}\" işlevi bir yöntem olmadığından @final olarak işaretlenemez", + "finalReassigned": "\"{name}\" is declared as Final and cannot be reassigned", + "finalRedeclaration": "\"{name}\" daha önce Final olarak bildirildi", + "finalRedeclarationBySubclass": "\"{name}\", \"{className}\" sınıf adı bu adı Final olarak bildirdiğinden yeniden bildirilemez", + "finalTooManyArgs": "\"Final\" sonrasında tek bir tür bağımsız değişken bekleniyordu", + "finalUnassigned": "\"{name}\" Final olarak bildirildi, ancak değer atanmadı", + "finallyBreak": "Bir \"break\" ifadesi \"finally\" bloğundan çıkmak için kullanılamaz", + "finallyContinue": "Bir \"continue\" ifadesi \"finally\" bloğundan çıkmak için kullanılamaz", + "finallyReturn": "Bir \"return\" ifadesi \"finally\" bloğundan çıkmak için kullanılamaz", + "formatStringBrace": "f dizesi sabit değerinde tek bir kapatma küme ayracı kullanılamaz; çift sağ ayraç kullanın", + "formatStringBytes": "Biçim dizesi sabit değerleri (f-strings) ikili olamaz", + "formatStringDebuggingIllegal": "F dizesi hata ayıklama belirticisi \"=\", Python 3.8 veya daha yeni bir sürüm gerektirir", + "formatStringEscape": "Python 3.12 öncesinde f dizesinin ifade bölümünde kaçış sırasına (ters eğik çizgi) izin verilmez", + "formatStringExpectedConversion": "f-string'de \"!\" öğesinden sonra bir dönüştürme tanımlayıcısı bekleniyordu", + "formatStringIllegal": "Biçim dizesi sabit değerleri (f dizeleri) Python 3.6 veya daha yeni bir sürüm gerektirir", + "formatStringInPattern": "Desende biçim dizesine izin verilmiyor", + "formatStringNestedFormatSpecifier": "İfadeler biçim dizesi belirticisi içinde çok derin iç içe geçmiş", + "formatStringNestedQuote": "F dizesi içinde iç içe geçmiş dizeler Python 3.12'den önceki f dizesiyle aynı tırnak karakterini kullanamaz", + "formatStringTemplate": "Biçim dizesi sabit değerleri (f-string) aynı zamanda şablon dizeleri (t-string) olamaz", + "formatStringUnicode": "Biçim dizesi sabit değerleri (f-strings) unicode olamaz", + "formatStringUnterminated": "F dizesi içinde sonlandırılmamış ifade; \"}\" bekleniyor", + "functionDecoratorTypeUnknown": "Türü belirsiz işlev dekoratörü işlevin türünü gizler; dekoratör yoksayılıyor", + "functionInConditionalExpression": "Koşullu ifade, her zaman True olarak değerlendirilen işleve başvurur", + "functionTypeParametersIllegal": "İşlev türü parametre sözdizimi Python 3.12 veya daha yeni bir sürüm gerektirir", + "futureImportLocationNotAllowed": "__future__ içeri aktarmaları dosyanın başında olmalıdır", + "generatorAsyncReturnType": "Return type of async generator function must be compatible with \"AsyncGenerator[{yieldType}, Any]\"", + "generatorNotParenthesized": "Tek bağımsız değişken olmadıklarında oluşturucu ifadeleri ayraç içine alınmalıdır", + "generatorSyncReturnType": "Oluşturucu işlevinin dönüş türü \"Generator[{yieldType}, Any, Any]\" ile uyumlu olmalıdır", + "genericBaseClassNotAllowed": "\"Generic\" temel sınıfı, tür parametresi sözdizimiyle kullanılamaz", + "genericClassAssigned": "Genel sınıf türü atanamıyor", + "genericClassDeleted": "Genel sınıf türü silinemiyor", + "genericInstanceVariableAccess": "Sınıf üzerinden genel örnek değişkenine erişim belirsiz", + "genericNotAllowed": "\"Generic\" bu bağlamda geçerli değil", + "genericTypeAliasBoundTypeVar": "Sınıf içindeki genel tür diğer adı {names} bağlı tür değişkenlerini kullanamıyor", + "genericTypeArgMissing": "\"Generic\" en az bir tür bağımsız değişkeni gerektirir", + "genericTypeArgTypeVar": "\"Generic\" için tür bağımsız değişkeni bir tür değişkeni olmalıdır", + "genericTypeArgUnique": "\"Generic\" için tür bağımsız değişkenleri benzersiz olmalıdır", + "globalReassignment": "\"{name}\" is assigned before global declaration", + "globalRedefinition": "\"{name}\" was already declared global", + "implicitStringConcat": "Örtük dize birleştirmesine izin verilmiyor", + "importCycleDetected": "İçeri aktarma zincirinde döngü algılandı", + "importDepthExceeded": "İçeri aktarma zinciri derinliği {depth} sınırını aştı", + "importResolveFailure": "\"{importName}\" adlı içeri aktarma çözümlenemedi", + "importSourceResolveFailure": "\"{importName}\" adlı içeri aktarma kaynaktan çözümlenemedi", + "importSymbolUnknown": "\"{name}\" alma simgesi bilinmiyor", + "incompatibleMethodOverride": "\"{name}\" metodu \"{className}\" sınıfını uyumsuz bir şekilde geçersiz kılıyor", + "inconsistentIndent": "Girintisiz miktar önceki girintiyle eşleşmiyor", + "inconsistentTabs": "Girinti içinde sekmelerin ve boşlukların kullanımı tutarsız", + "initMethodSelfParamTypeVar": "\"__init__\" metodunun \"self\" parametresi için tür ek açıklaması sınıf kapsamlı tür değişkenleri içeremez", + "initMustReturnNone": "\"__init__\" dönüş türü None olmalıdır", + "initSubclassCallFailed": "__init_subclass__ yöntemi için yanlış anahtar bağımsız değişkenleri", + "initSubclassClsParam": "__init_subclass__ geçersiz kılması bir \"cls\" parametresi almalı", + "initVarNotAllowed": "\"InitVar\" öğesi bu bağlamda kullanılamaz", + "instanceMethodSelfParam": "Örnek metotları bir \"self\" parametresi almalıdır", + "instanceVarOverridesClassVar": "\"{name}\" örnek değişkeni \"{className}\" sınıfındaki aynı ada sahip sınıf değişkenini geçersiz kılıyor", + "instantiateAbstract": "\"{type}\" soyut sınıfı örneği oluşturulamıyor", + "instantiateProtocol": "\"{type}\" Protocol sınıfının örneği oluşturulamıyor", + "internalBindError": "\"{file}\" dosyası bağlanırken dahili bir hata oluştu: {message}", + "internalParseError": "\"{file}\" dosyası ayrıştırılırken dahili bir hata oluştu: {message}", + "internalTypeCheckingError": "\"{file}\" dosyası tür denetimi gerçekleştirilirken dahili bir hata oluştu: {message}", + "invalidIdentifierChar": "Tanımlayıcıda geçersiz karakter", + "invalidStubStatement": "Statement is meaningless within a type stub file", + "invalidTokenChars": "Belirteçte geçersiz \"{text}\" karakteri var", + "isInstanceInvalidType": "Second argument to \"isinstance\" must be a class or tuple of classes", + "isSubclassInvalidType": "Second argument to \"issubclass\" must be a class or tuple of classes", + "keyValueInSet": "Key/value pairs are not allowed within a set", + "keywordArgInTypeArgument": "Anahtar sözcük bağımsız değişkenleri tür bağımsız değişken listelerinde kullanılamaz", + "keywordOnlyAfterArgs": "\"*\" parametresinden sonra keyword-only bağımsız değişken ayırıcısı kullanılamaz", + "keywordParameterMissing": "Bir veya daha fazla anahtar sözcük parametresi \"*\" parametresini izlemeli", + "keywordSubscriptIllegal": "Alt simge içindeki anahtar sözcük bağımsız değişkenleri desteklenmiyor", + "lambdaReturnTypePartiallyUnknown": "Lambdanın \"{returnType}\" dönüş türü kısmen bilinmiyor", + "lambdaReturnTypeUnknown": "Lambdanın dönüş türü bilinmiyor", + "listAssignmentMismatch": "\"{type}\" türündeki ifade hedef listesine atanamaz", + "listInAnnotation": "List expression not allowed in type expression", + "literalEmptyArgs": "\"Literal\" sonrasında bir veya daha fazla tür bağımsız değişkeni bekleniyordu", + "literalNamedUnicodeEscape": "Adlandırılmış unicode kaçış sıraları “Literal” dize ek açıklamalarında desteklenmiyor", + "literalNotAllowed": "\"Literal\" bir tür bağımsız değişken olmadan bu bağlamda kullanılamaz", + "literalNotCallable": "Literal type cannot be instantiated", + "literalUnsupportedType": "Type arguments for \"Literal\" must be None, a literal value (int, bool, str, or bytes), or an enum value", + "matchIncompatible": "Match statements require Python 3.10 or newer", + "matchIsNotExhaustive": "Cases within match statement do not exhaustively handle all values", + "maxParseDepthExceeded": "Maksimum ayrıştırma derinliği aşıldı; ifadeyi daha küçük alt ifadelere bölün", + "memberAccess": "Sınıf \"{type}\" için \"{name}\" özniteliğine erişilemiyor", + "memberDelete": "Sınıf \"{type}\" için \"{name}\" özniteliği silinemiyor", + "memberSet": "Sınıf \"{type}\" için \"{name}\" özniteliği atanamıyor", + "metaclassConflict": "Türetilmiş sınıfın meta sınıfı, tüm temel sınıflarının meta sınıflarının bir alt sınıfı olmalıdır", + "metaclassDuplicate": "Yalnızca bir meta sınıfı sağlanmalıdır", + "metaclassIsGeneric": "Meta sınıfı genel olamaz", + "methodNotDefined": "\"{name}\" metodu tanımlanmadı", + "methodNotDefinedOnType": "\"{name}\" metodu \"{type}\" türü üzerinde tanımlanmadı", + "methodOrdering": "Tutarlı metot sıralaması oluşturulamıyor", + "methodOverridden": "\"{name}\", uyumsuz \"{type}\" türüne sahip \"{className}\" sınıfında aynı ad metodunu geçersiz kılar", + "methodReturnsNonObject": "\"{name}\" metodu bir nesne döndürmez", + "missingSuperCall": "\"{methodName}\" metodu üst sınıftaki aynı ada sahip metodu çağıramaz", + "mixingBytesAndStr": "Bytes ve str değerleri birleştirilemez", + "moduleAsType": "Modül tür olarak kullanılamaz", + "moduleNotCallable": "Modül çağrılabilir değil", + "moduleUnknownMember": "\"{memberName}\", \"{moduleName}\" modülünün bilinen bir özniteliği değil", + "namedExceptAfterCatchAll": "Adlandırılmış except yan tümcesi, catch-all except yan tümcesinden sonra gelemez", + "namedParamAfterParamSpecArgs": "\"{name}\" anahtar sözcük parametresi ParamSpec args parametresinden sonra imzada yer alamaz", + "namedTupleEmptyName": "Names within a named tuple cannot be empty", + "namedTupleEntryRedeclared": "Üst sınıf \"{name}\" adlandırılmış bir tuple olduğundan \"{className}\" geçersiz kılınamıyor", + "namedTupleFieldUnderscore": "Named tuple alan adları alt çizgiyle başlayamaz", + "namedTupleFirstArg": "Expected named tuple class name as first argument", + "namedTupleMultipleInheritance": "NamedTuple bulunan birden çok devralma desteklenmiyor", + "namedTupleNameKeyword": "Alan adları anahtar sözcük olamaz", + "namedTupleNameType": "Expected two-entry tuple specifying entry name and type", + "namedTupleNameUnique": "Names within a named tuple must be unique", + "namedTupleNoTypes": "\"namedtuple\" tuple girdileri için tür sağlamaz; bunun yerine \"NamedTuple\" kullanın", + "namedTupleSecondArg": "Expected named tuple entry list as second argument", + "newClsParam": "__new__ geçersiz kılması bir \"cls\" parametresi almalı", + "newTypeAnyOrUnknown": "NewType'ın ikinci bağımsız değişkeni Any veya Unknown değil, bilinen bir sınıf olmalıdır", + "newTypeBadName": "NewType için ilk bağımsız değişken bir sabit değerli dize olmalıdır", + "newTypeLiteral": "NewType Literal türüyle kullanılamaz", + "newTypeNameMismatch": "NewType, aynı ada sahip bir değişkene atanmalıdır", + "newTypeNotAClass": "NewType için ikinci bağımsız değişken olarak sınıf bekleniyordu", + "newTypeParamCount": "NewType için iki konumsal bağımsız değişken gerekiyor", + "newTypeProtocolClass": "NewType yapısal türle (Protocol veya TypedDict sınıfı) kullanılamaz", + "noOverload": "\"{name}\" için aşırı yüklemelerin hiçbiri sağlanan bağımsız değişkenlerle eşleşmiyor", + "noReturnContainsReturn": "Function with declared return type \"NoReturn\" cannot include a return statement", + "noReturnContainsYield": "Bildirilen dönüş türü \"NoReturn\" olan işlev bir yield deyimi içeremez", + "noReturnReturnsNone": "Bildirilen \"NoReturn\" döndürme türüne sahip işlev \"None\" döndüremez", + "nonDefaultAfterDefault": "Varsayılan olmayan bağımsız değişken varsayılan bağımsız değişkeni izler", + "nonLocalInModule": "Nonlocal declaration not allowed at module level", + "nonLocalNoBinding": "No binding for nonlocal \"{name}\" found", + "nonLocalReassignment": "\"{name}\" is assigned before nonlocal declaration", + "nonLocalRedefinition": "\"{name}\" was already declared nonlocal", + "noneNotCallable": "\"None\" türündeki nesne çağrılamaz", + "noneNotIterable": "\"None\" türündeki nesne, yeniden kullanılabilir değer olarak kullanılamaz", + "noneNotSubscriptable": "\"None\" türündeki nesne alt simgeleştirilebilir değil", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "\"None\" türündeki nesne \"async\" ile kullanılamaz", + "noneOperator": "\"{operator}\" işleci \"None\" için desteklenmiyor", + "noneUnknownMember": "\"{name}\" bilinen bir \"None\" özniteliği değil", + "nonlocalTypeParam": "\"{name}\" tür parametresi için nonlocal bağlamaya izin verilmez", + "notRequiredArgCount": "\"NotRequired\" sonrasında tek bir tür bağımsız değişken bekleniyordu", + "notRequiredNotInTypedDict": "Bu bağlamda \"NotRequired\" kullanımına izin verilmiyor", + "objectNotCallable": "\"{type}\" türündeki nesne çağrılamaz", + "obscuredClassDeclaration": "\"{name}\" sınıf bildirimi aynı ada sahip bir bildirim tarafından gizlendi", + "obscuredFunctionDeclaration": "\"{name}\" işlev bildirimi aynı ada sahip bir bildirim tarafından gizlendi", + "obscuredMethodDeclaration": "\"{name}\" metot bildirimi aynı ada sahip bir bildirim tarafından gizlendi", + "obscuredParameterDeclaration": "\"{name}\" parametre bildirimi aynı ada sahip bir bildirim tarafından gizlendi", + "obscuredTypeAliasDeclaration": "\"{name}\" tür diğer ad bildirimi aynı ada sahip bir bildirim tarafından etkisiz kılındı", + "obscuredVariableDeclaration": "\"{name}\" bildirimi aynı ada sahip bir bildirim tarafından gizlendi", + "operatorLessOrGreaterDeprecated": "\"<>\" işleci Python 3'de desteklenmiyor; bunun yerine \"!=\" kullanın", + "optionalExtraArgs": "Expected one type argument after \"Optional\"", + "orPatternIrrefutable": "Reddedilemez desene yalnızca \"or\" deseninde son alt desen olarak izin verilir", + "orPatternMissingName": "Bir \"or\" deseni içindeki tüm alt desenlerde aynı adlar hedeflenmeli", + "overlappingKeywordArgs": "Türü belirlenmiş sözlük anahtar sözcük parametresiyle çakışıyor: {names}", + "overlappingOverload": "Parametreleri {obscuredBy} aşırı yüklemesi ile çakıştığından \"{name}\" {obscured} aşırı yüklemesi hiçbir zaman kullanılmayacak", + "overloadAbstractImplMismatch": "Aşırı yüklemeler uygulamanın özet durumuyla eşleşmelidir", + "overloadAbstractMismatch": "Aşırı yüklemelerin tümü soyut olmalı veya tümü olmamalıdır", + "overloadClassMethodInconsistent": "\"{name}\" için aşırı yüklemeler, @classmethod yöntemini tutarlı kullanıyor", + "overloadFinalImpl": "@final dekoratör yalnızca uygulamaya uygulanamaz", + "overloadFinalNoImpl": "Yalnızca ilk aşırı yükleme @final", + "overloadImplementationMismatch": "Aşırı yüklenmiş uygulama, {index} aşırı yükleme imzası ile tutarlı değil", + "overloadOverrideImpl": "@override dekoratör yalnızca uygulamaya uygulanamaz", + "overloadOverrideNoImpl": "Yalnızca ilk aşırı yükleme @override", + "overloadReturnTypeMismatch": "\"{name}\" için {prevIndex} aşırı yüklemesi {newIndex} aşırı yüklemesi ile çakışıyor ve uyumsuz bir tür döndürüyor", + "overloadStaticMethodInconsistent": "\"{name}\" için aşırı yüklemeler, @staticmethod yöntemini tutarsız kullanıyor", + "overloadWithoutImplementation": "\"{name}\" is marked as overload, but no implementation is provided", + "overriddenMethodNotFound": "Method \"{name}\" is marked as override, but no base method of same name is present", + "overrideDecoratorMissing": "Method \"{name}\" is not marked as override but is overriding a method in class \"{className}\"", + "paramAfterKwargsParam": "Parametre \"**\" parametresini izleyemez", + "paramAlreadyAssigned": "\"{name}\" parametresi zaten atanmış", + "paramAnnotationMissing": "\"{name}\" parametresi için tür ek açıklaması eksik", + "paramAssignmentMismatch": "\"{sourceType}\" türündeki ifade, \"{paramType}\" türündeki parametreye atanamaz", + "paramNameMissing": "\"{name}\" adlı parametre yok", + "paramSpecArgsKwargsDuplicate": "\"{type}\" ParamSpec'i için bağımsız değişkenler zaten sağlandı", + "paramSpecArgsKwargsUsage": "ParamSpec'in \"args\" ve \"kwargs\" özniteliklerinin ikisi de işlev imzasında görünmelidir", + "paramSpecArgsMissing": "ParamSpec \"{type}\" bağımsız değişkenleri eksik", + "paramSpecArgsUsage": "ParamSpec'in \"args\" özniteliği yalnızca *args parametresiyle kullanıldığında geçerlidir", + "paramSpecAssignedName": "ParamSpec, \"{name}\" adlı bir değişkene atanmalı", + "paramSpecContext": "ParamSpec bu bağlamda kullanılamaz", + "paramSpecDefaultNotTuple": "Expected ellipsis, a tuple expression, or ParamSpec for default value of ParamSpec", + "paramSpecFirstArg": "İlk bağımsız değişken olarak ParamSpec adı bekleniyordu", + "paramSpecKwargsUsage": "ParamSpec'in \"kwargs\" özniteliği yalnızca **kwargs parametresiyle kullanıldığında geçerlidir", + "paramSpecNotUsedByOuterScope": "\"{name}\" adlı ParamSpec bu bağlamda bir anlam ifade etmiyor", + "paramSpecUnknownArg": "ParamSpec birden fazla bağımsız değişkeni desteklemiyor", + "paramSpecUnknownMember": "\"{name}\" bilinen bir ParamSpec özniteliği değil", + "paramSpecUnknownParam": "\"{name}\", ParamSpec için bilinmeyen bir parametre", + "paramTypeCovariant": "Kovaryant türü değişkeni parametre türünde kullanılamaz", + "paramTypePartiallyUnknown": "\"{paramName}\" parametresinin türü kısmen bilinmiyor", + "paramTypeUnknown": "\"{paramName}\" parametresinin türü bilinmiyor", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "Desen, \"{type}\" konu türü ile hiçbir zaman eşleşmez", + "positionArgAfterNamedArg": "Konumsal bağımsız değişken anahtar sözcük bağımsız değişkenlerden sonra gelemez", + "positionArgAfterUnpackedDictArg": "Konumsal bağımsız değişken, anahtar sözcük bağımsız değişkenler paketinden çıkarıldıktan sonra görünemez", + "positionOnlyAfterArgs": "\"*\" parametresinden sonra yalnızca konum parametre ayırıcısı kullanılamaz", + "positionOnlyAfterKeywordOnly": "\"/\" parametresi \"*\" parametresinden önce gelmelidir", + "positionOnlyAfterNon": "Yalnızca konum parametresi olmayan parametreden sonra yalnızca konum parametresine izin verilmez", + "positionOnlyFirstParam": "Yalnızca konum parametre ayırıcısı ilk parametre olarak kullanılamaz", + "positionOnlyIncompatible": "Yalnızca konum parametre ayırıcısı için Python 3.8 veya daha yeni bir sürümü gerekiyor", + "privateImportFromPyTypedModule": "\"{name}\" \"{module}\" modülünden dışarı aktarılamadı", + "privateUsedOutsideOfClass": "\"{name}\" özeldir ve bildirildiği sınıfın dışında kullanılır", + "privateUsedOutsideOfModule": "\"{name}\" özeldir ve bildirildiği modülün dışında kullanılır", + "propertyOverridden": "\"{name}\" incorrectly overrides property of same name in class \"{className}\"", + "propertyStaticMethod": "Static methods not allowed for property getter, setter or deleter", + "protectedUsedOutsideOfClass": "\"{name}\" korumalıdır ve içinde bildirildiği sınıfın dışında kullanılır", + "protocolBaseClass": "\"{classType}\" Protocol sınıfı, Protocol olmayan \"{baseType}\" sınıfının türevi olamaz", + "protocolBaseClassWithTypeArgs": "Type arguments are not allowed with Protocol class when using type parameter syntax", + "protocolIllegal": "\"Protocol\" kullanımı için Python 3.7 veya daha yeni bir sürümü gerekiyor", + "protocolNotAllowed": "\"Protocol\" bu bağlamda kullanılamaz", + "protocolTypeArgMustBeTypeParam": "“Protocol” için tür bağımsız değişkeni bir tür parametresi olmalıdır", + "protocolUnsafeOverlap": "Sınıf, \"{name}\" ile güvenli olmayan bir şekilde çakışıyor ve çalışma zamanında bir eşleşme üretebilir", + "protocolVarianceContravariant": "Genel Protocol \"{class}\" için kullanılan \"{variable}\" tür değişkeni, değişken karşıtı olmalıdır", + "protocolVarianceCovariant": "Genel Protocol \"{class}\" için kullanılan \"{variable}\" tür değişkeni, birlikte değişen olmalıdır", + "protocolVarianceInvariant": "Genel Protocol \"{class}\" için kullanılan \"{variable}\" tür değişkeni sabit olmalıdır", + "pyrightCommentInvalidDiagnosticBoolValue": "Pyright açıklama yönergesinden sonra \"=\" ve true veya false değeri olmalıdır", + "pyrightCommentInvalidDiagnosticSeverityValue": "Pyright açıklama yönergesinden sonra \"=\" ve true, false, error, warning, information veya none değeri girilmelidir", + "pyrightCommentMissingDirective": "Pyright comment must be followed by a directive (basic or strict) or a diagnostic rule", + "pyrightCommentNotOnOwnLine": "Pyright comments used to control file-level settings must appear on their own line", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\", pyright açıklaması için bilinmeyen bir tanılama kuralı", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" pyright açıklaması için geçersiz değer; true, false, error, warning, information veya none bekleniyordu", + "pyrightCommentUnknownDirective": "\"{directive}\", pyright açıklaması için bilinmeyen bir yönergedir; \"strict\", \"standard\" veya \"basic\" bekleniyordu", + "readOnlyArgCount": "\"ReadOnly\" sonrasında tek bir tür bağımsız değişken bekleniyordu", + "readOnlyNotInTypedDict": "Bu bağlamda \"ReadOnly\" kullanımına izin verilmiyor", + "recursiveDefinition": "Kendine başvurduğundan \"{name}\" türü belirlenemedi", + "relativeImportNotAllowed": "Göreli içeri aktarmalar \"import .a\" formuyla kullanılamaz; bunun yerine \"from . import a\" kullanın", + "requiredArgCount": "\"Required\" sonrasında tek bir tür bağımsız değişken bekleniyordu", + "requiredNotInTypedDict": "Bu bağlamda \"Required\" kullanımına izin verilmiyor", + "returnInAsyncGenerator": "Return statement with value is not allowed in async generator", + "returnInExceptionGroup": "\"except*\" bloğunda \"return\" kullanılamaz", + "returnMissing": "Bildirilen \"{returnType}\" döndürme türüne sahip işlev, tüm kod yollarında değer döndürmelidir", + "returnOutsideFunction": "\"return\" yalnızca bir işlev içinde kullanılabilir", + "returnTypeContravariant": "Değişken karşıtı tür değişkeni dönüş türünde kullanılamaz", + "returnTypeMismatch": "\"{exprType}\" türü \"{returnType}\" dönüş türüne atanamaz", + "returnTypePartiallyUnknown": "\"{returnType}\" dönüş türü kısmen bilinmiyor", + "returnTypeUnknown": "Dönüş türü bilinmiyor", + "revealLocalsArgs": "\"reveal_locals\" çağrısı için bağımsız değişken beklenmiyordu", + "revealLocalsNone": "No locals in this scope", + "revealTypeArgs": "\"reveal_type\" çağrısı için tek bir konumsal bağımsız değişken bekleniyordu", + "revealTypeExpectedTextArg": "\"reveal_type\" bağımsız değişkeni için \"expected_text\" argümanı bir str sabit değeri olmalıdır", + "revealTypeExpectedTextMismatch": "Tür uyuşmazlığı; \"{expected}\" bekleniyordu ancak \"{received}\" alındı", + "revealTypeExpectedTypeMismatch": "Tür uyuşmazlığı; \"{expected}\" bekleniyordu ancak \"{received}\" alındı", + "selfTypeContext": "\"Self\" bu bağlamda geçerli değil", + "selfTypeMetaclass": "\"Self\" bir meta sınıfı (\"type\" alt sınıfı) içinde kullanılamaz", + "selfTypeWithTypedSelfOrCls": "\"Self\", \"Self\" dışında bir tür ek açıklamasına sahip bir `self` veya `cls` parametresine sahip bir işlevde kullanılamaz", + "sentinelBadName": "Sentinel için ilk bağımsız değişken bir sabit değerli dize olmalıdır", + "sentinelNameMismatch": "Sentinel, aynı ada sahip bir değişkene atanmalıdır", + "sentinelParamCount": "Sentinel bir konumsal bağımsız değişken gerektiriyor", + "setterGetterTypeMismatch": "Property setter value type is not assignable to the getter return type", + "singleOverload": "\"{name}\" aşırı yükleme olarak işaretlenmiş, ancak ek aşırı yüklemeler eksik", + "slotsAttributeError": "\"{name}\", __slots__ içinde belirtilmedi", + "slotsClassVarConflict": "\"{name}\", __slots__ içinde bildirilen örnek değişkenle çakışıyor", + "starPatternInAsPattern": "Yıldız deseni \"as\" hedefiyle kullanılamaz", + "starPatternInOrPattern": "Yıldız deseni diğer desenlerde ORed olamaz", + "starStarWildcardNotAllowed": "\"_\" joker karakteriyle ** kullanılamaz", + "staticClsSelfParam": "Static metotları \"self\" veya \"cls\" parametresi almamalıdır", + "stringNonAsciiBytes": "ASCII olmayan karaktere bayt sabit değerli dizesinde izin verilmez", + "stringNotSubscriptable": "Tür ifadesinde dize ifadesi alt simge olarak belirtilemez; ifadenin tamamını tırnak içine alın", + "stringUnsupportedEscape": "Dize sabit değerinde desteklenmeyen kaçış dizisi", + "stringUnterminated": "Sabit değerli dize sonlandırılmamış", + "stubFileMissing": "Stub file not found for \"{importName}\"", + "stubUsesGetAttr": "Type stub file is incomplete; \"__getattr__\" obscures type errors for module", + "sublistParamsIncompatible": "Sublist parameters are not supported in Python 3.x", + "superCallArgCount": "\"super\" çağrısı için ikiden fazla bağımsız değişken beklenmiyordu", + "superCallFirstArg": "\"super\" çağrısının ilk bağımsız değişkeni olarak sınıf türü bekleniyordu ancak \"{type}\" alındı", + "superCallSecondArg": "\"super\" çağrısının ikinci bağımsız değişkeni, \"{type}\" türünden türetilen nesne veya sınıf olmalıdır", + "superCallZeroArgForm": "\"super\" çağrısının zero-argument biçimi yalnızca bir metot içinde geçerlidir", + "superCallZeroArgFormStaticMethod": "\"super\" çağrısının zero-argument biçimi statik bir metot içinde geçerli değildir", + "symbolIsPossiblyUnbound": "\"{name}\" büyük olasılıkla bağlı değil", + "symbolIsUnbound": "\"{name}\" bağlı değil", + "symbolIsUndefined": "\"{name}\" tanımlanmadı", + "symbolOverridden": "\"{name}\", \"{className}\" sınıfında aynı ada sahip sembolü geçersiz kılar", + "templateStringBytes": "Şablon dizesi sabit değerleri (t-string) ikili olamaz", + "templateStringIllegal": "Şablon dizesi sabit değerleri (t-string) Python 3.14 veya daha yeni bir sürüm gerektirir", + "templateStringUnicode": "Şablon dizesi sabit değerleri (t-string) unicode olamaz", + "ternaryNotAllowed": "Tür ifadesinde üçlü ifade kullanılamaz", + "totalOrderingMissingMethod": "total_ordering kullanmak için sınıfta \"__lt__\", \"__le__\", \"__gt__\" veya \"__ge__\" metotlarından biri tanımlanmalıdır", + "trailingCommaInFromImport": "Çevreleyen parantezler olmadan sondaki virgüle izin verilmez", + "tryWithoutExcept": "Try deyimi en az bir except veya finally yan tümcesi içermelidir", + "tupleAssignmentMismatch": "Expression with type \"{type}\" cannot be assigned to target tuple", + "tupleInAnnotation": "Tuple expression not allowed in type expression", + "tupleIndexOutOfRange": "{index} dizini {type} türü için aralık dışında", + "typeAliasIllegalExpressionForm": "Tür diğer ad tanımı için geçersiz ifade form", + "typeAliasIsRecursiveDirect": "Tür diğer adı \"{name}\", tanımında kendisini kullanamaz", + "typeAliasNotInModuleOrClass": "TypeAlias yalnızca bir modül veya sınıf kapsamında tanımlanabilir", + "typeAliasRedeclared": "\"{name}\" bir TypeAlias olarak bildirilmiş ve yalnızca bir kez atanabilir", + "typeAliasStatementBadScope": "A type statement can be used only within a module or class scope", + "typeAliasStatementIllegal": "Tür diğer adı deyimi için Python 3.12 veya daha yeni bir sürümü gerekiyor", + "typeAliasTypeBadScope": "Tür diğer adı yalnızca bir modül veya sınıf kapsamında tanımlanabilir", + "typeAliasTypeBaseClass": "Bir \"type\" deyiminde tanımlanan type diğer adı temel sınıf olarak kullanılamaz", + "typeAliasTypeMustBeAssigned": "TypeAliasType, tür diğer adıyla aynı ada sahip bir değişkene atanmalıdır", + "typeAliasTypeNameArg": "TypeAliasType için ilk bağımsız değişken, tür diğer adının adını temsil eden bir sabit değerli dize olmalıdır", + "typeAliasTypeNameMismatch": "Tür diğer adının atandığı değişkenin adıyla eşleşmesi gerekiyor", + "typeAliasTypeParamInvalid": "Type parameter list must be a tuple containing only TypeVar, TypeVarTuple, or ParamSpec", + "typeAnnotationCall": "Tür ifadesinde çağrı ifadesine izin verilmiyor", + "typeAnnotationVariable": "Tür ifadesinde değişkene izin verilmiyor", + "typeAnnotationWithCallable": "Type argument for \"type\" must be a class; callables are not supported", + "typeArgListExpected": "ParamSpec, üç nokta veya tür listesi bekleniyordu", + "typeArgListNotAllowed": "Bu tür bağımsız değişkeni için liste ifadesine izin verilmiyor", + "typeArgsExpectingNone": "\"{name}\" sınıfı için tür bağımsız değişkeni beklenmiyordu", + "typeArgsMismatchOne": "Bir tane tür bağımsız değişkeni bekleniyordu ancak {received} alındı", + "typeArgsMissingForAlias": "\"{name}\" genel tür diğer adı için tür bağımsız değişkenleri bekleniyordu", + "typeArgsMissingForClass": "\"{name}\" genel sınıf adı için tür bağımsız değişkenleri bekleniyordu", + "typeArgsTooFew": "\"{name}\" için çok az tür bağımsız değişkeni sağlandı; {expected} bekleniyordu ancak {received} alındı", + "typeArgsTooMany": "\"{name}\" için çok fazla tür bağımsız değişkeni sağlandı; {expected} bekleniyordu ancak {received} alındı", + "typeAssignmentMismatch": "\"{sourceType}\" türü \"{destType}\" bildirilen türüne atanamaz", + "typeAssignmentMismatchWildcard": "\"{name}\" içeri aktarma sembolü \"{sourceType}\" türüne sahip ve bu tür \"{destType}\" bildirilen türüne atanamaz", + "typeCallNotAllowed": "Tür ifadesinde type() çağrısı kullanılmamalıdır", + "typeCheckOnly": "\"{name}\", @type_check_only olarak işaretlendi ve yalnızca tür ek açıklamalarında kullanılabilir", + "typeCommentDeprecated": "Use of type comments is deprecated; use type annotation instead", + "typeExpectedClass": "Sınıf bekleniyordu ancak \"{type}\" alındı", + "typeFormArgs": "\"TypeForm\" tek bir konumsal bağımsız değişkeni kabul eder", + "typeGuardArgCount": "Expected a single type argument after \"TypeGuard\" or \"TypeIs\"", + "typeGuardParamCount": "Kullanıcı tanımlı tür koruma işlevleri ve metotlarında en az bir giriş parametresi olmalıdır", + "typeIsReturnType": "TypeIs dönüş türü (\"{returnType}\"), değer parametresi türü (\"{type}\") ile tutarlı değil", + "typeNotAwaitable": "\"{type}\" is not awaitable", + "typeNotIntantiable": "\"{type}\" örneği oluşturulamıyor", + "typeNotIterable": "\"{type}\" yeniden kullanılamaz", + "typeNotSpecializable": "\"{type}\" türü özelleştirilemedi", + "typeNotSubscriptable": "\"{type}\" türündeki nesne alt simgeleştirilebilir değil", + "typeNotSupportBinaryOperator": "İşleç \"{operator}\", \"{leftType}\" ve \"{rightType}\" türleri için desteklenmiyor", + "typeNotSupportBinaryOperatorBidirectional": "\"{operator}\" işleci, beklenen tür \"{expectedType}\" olduğunda \"{leftType}\" ve \"{rightType}\" türleri için desteklenmez", + "typeNotSupportUnaryOperator": "\"{operator}\" işleci \"{type}\" türü için desteklenmiyor", + "typeNotSupportUnaryOperatorBidirectional": "\"{operator}\" işleci, beklenen tür \"{expectedType}\" olduğunda \"{type}\" türü için desteklenmiyor", + "typeNotUsableWith": "{method} metodunu doğru şekilde uygulamadığından, \"{type}\" türündeki nesne \"with\" ile kullanılamaz", + "typeNotUsableWithAsync": "{method} metodunu doğru şekilde uygulamadığından, \"{type}\" türündeki nesne \"async\" ile kullanılamaz", + "typeParameterBoundNotAllowed": "Sınır veya kısıtlama, değişen tür parametresi veya ParamSpec ile kullanılamaz", + "typeParameterConstraintTuple": "Tür parametresi kısıtlaması iki veya daha fazla türde bir demet olmalıdır", + "typeParameterExistingTypeParameter": "\"{name}\" tür parametresi zaten kullanılıyor", + "typeParameterNotDeclared": "\"{name}\" tür parametresi \"{container}\" öğesine ait tür parametreleri listesinde yok", + "typeParametersMissing": "En az bir tür parametresi belirtilmelidir", + "typePartiallyUnknown": "\"{name}\" türü kısmen bilinmiyor", + "typeUnknown": "\"{name}\" türü bilinmiyor", + "typeVarAssignedName": "TypeVar, \"{name}\" adlı bir değişkene atanmalı", + "typeVarAssignmentMismatch": "\"{type}\" türü \"{name}\" tür değişkenine atanamaz", + "typeVarBoundAndConstrained": "TypeVar hem bağlı hem de kısıtlanmış olamaz", + "typeVarBoundGeneric": "TypeVar bağlı türü genel olamaz", + "typeVarConstraintGeneric": "TypeVar kısıtlama türü genel olamaz", + "typeVarDefaultBoundMismatch": "TypeVar varsayılan türü, bağlı türün bir alt türü olmalıdır", + "typeVarDefaultConstraintMismatch": "TypeVar varsayılan türü kısıtlanmış türlerden biri olmalıdır", + "typeVarDefaultIllegal": "Tür değişkeni varsayılan türleri Python 3.13 veya daha yeni bir sürüm gerektirir", + "typeVarDefaultInvalidTypeVar": "\"{name}\" tür parametresi, kapsam dışı bir veya daha fazla tür değişkenine başvuran varsayılan bir türe sahip", + "typeVarFirstArg": "İlk bağımsız değişken olarak TypeVar adı bekleniyordu", + "typeVarInvalidForMemberVariable": "Öznitelik türü, yerel yöntem kapsamına alınan \"{name}\" tür değişkenini kullanamaz", + "typeVarNoMember": "TypeVar \"{type}\", \"{name}\" özniteliğine sahip değil", + "typeVarNotSubscriptable": "TypeVar \"{type}\" alt yazılabilir değil", + "typeVarNotUsedByOuterScope": "\"{name}\" tür değişkeni bu bağlamda bir anlam ifade etmiyor", + "typeVarPossiblyUnsolvable": "\"{name}\" tür değişkeni, çağıran \"{param}\" parametresi için bağımsız değişken sağlamazsa çözümlenemeyebilir", + "typeVarSingleConstraint": "TypeVar en az iki kısıtlanmış türe sahip olmalıdır", + "typeVarTupleConstraints": "TypeVarTuple değer kısıtlamalarına sahip olamaz", + "typeVarTupleContext": "TypeVarTuple bu bağlamda kullanılamaz", + "typeVarTupleDefaultNotUnpacked": "TypeVarTuple default type must be an unpacked tuple or TypeVarTuple", + "typeVarTupleMustBeUnpacked": "TypeVarTuple değeri için Paket açma işleci gereklidir", + "typeVarTupleUnknownParam": "\"{name}\" is unknown parameter to TypeVarTuple", + "typeVarUnknownParam": "\"{name}\", TypeVar için bilinmeyen bir parametre", + "typeVarUsedByOuterScope": "TypeVar \"{name}\" zaten bir dış kapsam tarafından kullanılıyor", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" genel işlev imzasında yalnızca bir kez görünür", + "typeVarVariance": "TypeVar hem birlikte değişken hem de değişken karşıtı olamaz", + "typeVarWithDefaultFollowsVariadic": "\"{typeVarName}\" TypeVar alanı bir varsayılan değer içerir ve \"{variadicName}\" TypeVarTuple alanına uyamaz", + "typeVarWithoutDefault": "\"{name}\" varsayılan türe sahip olmadığından tür parametresi listesinde \"{other}\" sonrasında bulunamaz", + "typeVarsNotInGenericOrProtocol": "Generic[] veya Protocol[] tüm tür değişkenlerini içermeli", + "typedDictAccess": "TypedDict'da öğeye erişilemedi", + "typedDictAssignedName": "TypedDict, \"{name}\" adlı bir değişkene atanmalı", + "typedDictBadVar": "TypedDict sınıfları yalnızca tür ek açıklamaları içerebilir", + "typedDictBaseClass": "TypedDict sınıfları için tüm temel sınıflar da TypedDict sınıfları olmalıdır", + "typedDictBoolParam": "True veya False değeri olması için \"{name}\" parametresi bekleniyordu", + "typedDictClosedExtras": "Temel sınıf \"{name}\", TypedDict öğe türünü türle sınırlanmış bir \"{type}\"", + "typedDictClosedFalseNonOpenBase": "\"{name}\" temel sınıfı açık bir TypedDict değil; closed=False değerine izin verilmez", + "typedDictClosedNoExtras": "\"{name}\" temel sınıfı closed bir TypedDict öğesidir; ek öğelere izin verilmiyor", + "typedDictDelete": "TypedDict'da öğe silinemedi", + "typedDictEmptyName": "TypedDict içindeki adlar boş olamaz", + "typedDictEntryName": "Sözlük girdisi adı için sabit değerli dize bekleniyordu", + "typedDictEntryUnique": "Sözlük içindeki adlar benzersiz olmalıdır", + "typedDictExtraArgs": "Ekstra TypedDict bağımsız değişkenleri desteklenmiyor", + "typedDictExtraItemsClosed": "TypedDict her ikisini birden \"closed\" veya \"extra_items\" ya da her ikisini birden kullanamaz", + "typedDictFieldNotRequiredRedefinition": "\"{name}\" TypedDict öğesi NotRequired olarak yeniden tanımlanamaz", + "typedDictFieldReadOnlyRedefinition": "\"{name}\" TypedDict öğesi ReadOnly olarak yeniden tanımlanamaz", + "typedDictFieldRequiredRedefinition": "\"{name}\" TypedDict öğesi Required olarak yeniden tanımlanamaz", + "typedDictFirstArg": "Birinci bağımsız değişken olarak TypedDict sınıf adı bekleniyordu", + "typedDictInClassPattern": "TypedDict sınıfına sınıf deseninde izin verilmez", + "typedDictInitsubclassParameter": "TypedDict, \"{name}\" __init_subclass__ parametresini desteklemez", + "typedDictNotAllowed": "\"TypedDict\" bu bağlamda kullanılamaz", + "typedDictSecondArgDict": "Expected dict or keyword parameter as second parameter", + "typedDictSecondArgDictEntry": "Basit sözlük girişi bekleniyordu", + "typedDictSet": "TypedDict içinde öğe atanamadı", + "unaccessedClass": "\"{name}\" sınıfına erişilmiyor", + "unaccessedFunction": "\"{name}\" işlevine erişilmiyor", + "unaccessedImport": "\"{name}\" içeri aktarma işlemine erişilmiyor", + "unaccessedSymbol": "\"{name}\" öğesine erişilmiyor", + "unaccessedVariable": "\"{name}\" değişkenine erişilmiyor", + "unannotatedFunctionSkipped": "\"{name}\" işlevinin analizi, açıklanmadığından atlandı", + "unaryOperationNotAllowed": "Tür ifadesinde birli işleç kullanılamaz", + "unexpectedAsyncToken": "\"async\" öğesinin ardından \"def\", \"with\" veya \"for\" bekleniyordu", + "unexpectedEof": "Beklenmeyen EOF", + "unexpectedExprToken": "İfadenin sonunda beklenmeyen belirteç", + "unexpectedIndent": "Beklenmeyen girinti", + "unexpectedUnindent": "Girintiyi kaldırma beklenmiyordu", + "unhashableDictKey": "Sözlük anahtarı karmalanabilir olmalıdır", + "unhashableSetEntry": "Set entry must be hashable", + "uninitializedAbstractVariables": "Soyut temel sınıfta tanımlanan değişkenler \"{classType}\" final sınıfında başlatılmaz", + "uninitializedInstanceVariable": "\"{name}\" örnek değişkeni sınıf gövdesinde veya __init__ metodunda başlatılmadı", + "unionForwardReferenceNotAllowed": "Union söz dizimi dize işleneni ile kullanılamaz; ifadenin tamamını tırnak içine alın", + "unionSyntaxIllegal": "Union işlemlerinde alternatif söz dizimi kullanılabilmesi için Python 3.10 veya daha yeni bir sürümü gerekiyor", + "unionTypeArgCount": "Union için iki veya daha fazla tür bağımsız değişkeni gerekiyor", + "unionUnpackedTuple": "Union, paketlenmemiş bir tuple içeremez", + "unionUnpackedTypeVarTuple": "Union, paketlenmemiş bir TypeVarTuple içeremez", + "unnecessaryCast": "Gereksiz \"cast\" çağrısı; tür zaten \"{type}\"", + "unnecessaryIsInstanceAlways": "Gereksiz isinstance çağrısı; \"{testType}\" her zaman bir \"{classType}\" örneğidir", + "unnecessaryIsInstanceNever": "Gereksiz isinstance çağrısı; \"{testType}\" asla bir \"{classType}\" örneği değildir", + "unnecessaryIsSubclassAlways": "Gereksiz issubclass çağrısı; \"{testType}\" her zaman \"{classType}\" sınıf türünün bir alt sınıfıdır", + "unnecessaryIsSubclassNever": "Gereksiz issubclass çağrısı; \"{testType}\" asla \"{classType}\" sınıf türünün bir alt sınıfı değildir", + "unnecessaryPyrightIgnore": "Gereksiz \"# pyright: ignore\" açıklaması", + "unnecessaryPyrightIgnoreRule": "\"# pyright: ignore\" rule: \"{name}\" gereksiz", + "unnecessaryTypeIgnore": "Unnecessary \"# type: ignore\" comment", + "unpackArgCount": "\"Unpack\" sonrasında tek bir tür bağımsız değişken bekleniyordu", + "unpackExpectedTypeVarTuple": "Unpack için tür bağımsız değişkeni olarak TypeVarTuple veya tuple bekleniyordu", + "unpackExpectedTypedDict": "Unpack için TypedDict tür bağımsız değişkeni bekleniyordu", + "unpackIllegalInComprehension": "Anlamada paket açma işlemi kullanılamaz", + "unpackInAnnotation": "Tür ifadesinde paket açma işleci kullanılamaz", + "unpackInDict": "Sözlüklerde paket açma işlemi kullanılamaz", + "unpackInSet": "Unpack operator not allowed within a set", + "unpackNotAllowed": "Unpack is not allowed in this context", + "unpackOperatorNotAllowed": "Bu bağlamda paket açma işlemi kullanılamaz", + "unpackTuplesIllegal": "Python 3.8'den önceki demetler içinde paket açma işlemi kullanılamıyor", + "unpackedArgInTypeArgument": "Paketlenmemiş bağımsız değişkenler bu bağlamda kullanılamaz", + "unpackedArgWithVariadicParam": "Paketlenmemiş bağımsız değişken, TypeVarTuple parametresi için kullanılamaz", + "unpackedDictArgumentNotMapping": "** sonrasındaki bağımsız değişken ifadesi \"str\" anahtar türüne sahip bir eşleme olmalıdır", + "unpackedDictSubscriptIllegal": "Alt simgede sözlük açma işlecine izin verilmiyor", + "unpackedSubscriptIllegal": "Alt simgede açma işleci için Python 3.11 veya daha yeni bir sürümü gerekiyor", + "unpackedTypeVarTupleExpected": "Paketlenmemiş TypeVarTuple bekleniyordu; Unpack[{name1}] veya *{name2} kullanın", + "unpackedTypedDictArgument": "Paketlenmemiş TypedDict bağımsız değişkeni parametrelerle eşlenemiyor", + "unreachableCodeCondition": "Koşul statik şekilde false olarak değerlendirildiği için kod analiz edilmedi", + "unreachableCodeStructure": "Kod yapısal olarak ulaşılamaz durumda", + "unreachableCodeType": "Tür analizi koda erişilemediğini gösteriyor", + "unreachableExcept": "Except clause is unreachable because exception is already handled", + "unsupportedDunderAllOperation": "\"__all__\" üzerinde işlem desteklenmiyor, bu nedenle dışarı aktarılan sembol listesi yanlış olabilir", + "unusedCallResult": "Çağrı ifadesinin sonucu \"{type}\" türünde ve kullanılmıyor; bilerek yapıldıysa \"_\" değişkenine atayın", + "unusedCoroutine": "Result of async function call is not used; use \"await\" or assign result to variable", + "unusedExpression": "İfade değeri kullanılmadı", + "varAnnotationIllegal": "Type annotations for variables requires Python 3.6 or newer; use type comment for compatibility with previous versions", + "variableFinalOverride": "\"{name}\" değişkeni Final olarak işaretlendi ve \"{className}\" sınıfı içinde aynı ada sahip Final olmayan değişkeni geçersiz kılıyor", + "variadicTypeArgsTooMany": "Tür bağımsız değişkeni listesinde en fazla bir paketlenmemiş TypeVarTuple veya tuple olabilir", + "variadicTypeParamTooManyAlias": "Tür diğer adı en fazla bir TypeVarTuple tür parametresine sahip olabilir ancak birden fazlası {names}) alındı", + "variadicTypeParamTooManyClass": "Genel sınıf en fazla bir TypeVarTuple tür parametresine sahip olabilir ancak birden fazlası {names}) alındı", + "walrusIllegal": "\":=\" işleci için Python 3.8 veya daha yeni bir sürümü gerekiyor", + "walrusNotAllowed": "Çevreleyen parantezler olmadan bu bağlamda \":=\" işlecine izin verilmiyor", + "wildcardInFunction": "Wildcard import not allowed within a class or function", + "wildcardLibraryImport": "Wildcard import from a library not allowed", + "wildcardPatternTypePartiallyUnknown": "Joker karakter deseni tarafından yakalanan tür kısmen bilinmiyor", + "wildcardPatternTypeUnknown": "Joker karakter deseni tarafından yakalanan tür bilinmiyor", + "yieldFromIllegal": "\"yield from\" kullanımı için Python 3.3 veya daha yeni bir sürümü gerekiyor", + "yieldFromOutsideAsync": "\"yield from\" not allowed in an async function", + "yieldOutsideFunction": "\"yield\", işlev veya lambda dışında kullanılamaz", + "yieldWithinComprehension": "Bir anlama içinde “yield” kullanılamaz", + "zeroCaseStatementsFound": "Match deyimi en az bir case deyimi içermeli", + "zeroLengthTupleNotAllowed": "Zero-length tuple is not allowed in this context" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "“Annotated” özel biçimi, örnek ve sınıf denetimleriyle kullanılamaz", + "argParam": "Bağımsız değişken \"{paramName}\" parametresine karşılık gelir", + "argParamFunction": "Bağımsız değişken, \"{functionName}\" işlevinde \"{paramName}\" parametresine karşılık gelir", + "argsParamMissing": "\"*{paramName}\" parametresine karşılık gelen bir parametre yok", + "argsPositionOnly": "Yalnızca konum parametresi uyuşmazlığı; {expected} bekleniyordu ancak {received} alındı", + "argumentType": "Bağımsız değişken türü \"{type}\"", + "argumentTypes": "Bağımsız değişken türleri: ({types})", + "assignToNone": "Tür \"None\" öğesine atanamaz", + "asyncHelp": "\"async with\" mi demek istediniz?", + "baseClassIncompatible": "\"{baseClass}\" temel sınıfı \"{type}\" türüyle uyumlu değil", + "baseClassIncompatibleSubclass": "\"{baseClass}\" temel sınıfı, \"{type}\" türüyle uyumlu olmayan \"{subclass}\" alt sınıfından türetiliyor", + "baseClassOverriddenType": "\"{baseClass}\" temel sınıfı geçersiz kılınan \"{type}\" türünü sağlar", + "baseClassOverridesType": "Temel sınıf \"{baseClass}\", \"{type}\" türünü geçersiz kılıyor", + "bytesTypePromotions": "“bytearray” ve “memoryview” için tür yükseltme davranışını etkinleştirmek için disableBytesTypePromotions seçeneğini “false” olarak ayarlayın", + "conditionalRequiresBool": "\"{operandType}\" türü için __bool__ metodu \"bool\" yerine \"{boolReturnType}\" türü döndürür", + "dataClassFieldLocation": "Alan bildirimi", + "dataClassFrozen": "\"{name}\" donduruldu", + "dataProtocolUnsupported": "\"{name}\" bir veri protokolü", + "descriptorAccessBindingFailed": "Tanımlayıcı sınıf \"{className}\" için \"{name}\" yöntemi bağlanamadı", + "descriptorAccessCallFailed": "Tanımlayıcı sınıf \"{className}\" için \"{name}\" yöntemi çağrılamadı", + "finalMethod": "Final metodu", + "functionParamDefaultMissing": "\"{name}\" parametresinde varsayılan bağımsız değişken eksik", + "functionParamName": "Parametre adı uyuşmazlığı: \"{destName}\" ve \"{srcName}\"", + "functionParamPositionOnly": "Yalnızca konum parametresi uyuşmazlığı; \"{name}\" parametresi yalnızca konum değil", + "functionReturnTypeMismatch": "\"{sourceType}\" işlev dönüş türü, \"{destType}\" türüyle uyumsuz", + "functionTooFewParams": "İşlev çok az konumsal parametre kabul ediyor; {expected} bekleniyordu ancak {received} alındı", + "functionTooManyParams": "İşlev çok fazla konumsal parametre kabul ediyor; {expected} bekleniyordu ancak {received} alındı", + "genericClassNotAllowed": "Örnek veya sınıf denetimleri için tür bağımsız değişkenlerine sahip genel türe izin verilmiyor", + "incompatibleDeleter": "Property deleter method is incompatible", + "incompatibleGetter": "Property getter method is incompatible", + "incompatibleSetter": "Property setter method is incompatible", + "initMethodLocation": "\"{type}\" sınıfı içinde __init__ metodu tanımlandı", + "initMethodSignature": "__init__ imzası \"{type}\"", + "initSubclassLocation": "__init_subclass__ yöntemi \"{name}\" sınıfı içinde tanımlandı", + "invariantSuggestionDict": "“dict” öğesinden değer türünde eş değişken olan “Mapping” öğesine geçmeyi deneyin", + "invariantSuggestionList": "“list” öğesinden eş değişken olan “Sequence” öğesine geçmeyi deneyin", + "invariantSuggestionSet": "Consider switching from \"set\" to \"Container\" which is covariant", + "isinstanceClassNotSupported": "\"{type}\", örnek ve sınıf denetimleri için desteklenmiyor", + "keyNotRequired": "\"{name}\", \"{type}\" türünde gerekli bir anahtar olmadığından çalışma zamanı özel durumuna neden olabilir", + "keyReadOnly": "\"{name}\", \"{type}\" içinde salt okunur", + "keyRequiredDeleted": "\"{name}\" gerekli bir anahtar olduğundan silinemez", + "keyUndefined": "\"{name}\", \"{type}\" içinde tanımlı bir anahtar değil", + "kwargsParamMissing": "\"**{paramName}\" parametresine karşılık gelen bir parametre yok", + "listAssignmentMismatch": "\"{type}\" türü hedef listeyle uyumsuz", + "literalAssignmentMismatch": "\"{sourceType}\" \"{destType}\" türüne atanamaz", + "literalNotAllowed": "“Literal” özel biçimi, örnek ve sınıf denetimleriyle kullanılamaz", + "matchIsNotExhaustiveHint": "Tümlemeli işleme amaçlanmadıysa \"case _: pass\" ekleyin", + "matchIsNotExhaustiveType": "\"{type}\" türü işlenmemiş", + "memberAssignment": "\"{type}\" türündeki ifade, \"{classType}\" sınıfının \"{name}\" özniteliğine atanamaz", + "memberIsAbstract": "\"{type}.{name}\" uygulanmadı", + "memberIsAbstractMore": "ve +{count} tane daha...", + "memberIsClassVarInProtocol": "\"{name}\", protokolde ClassVar olarak tanımlandı", + "memberIsInitVar": "\"{name}\" is an init-only field", + "memberIsInvariant": "\"{name}\" değiştirilebilir olduğundan sabit ayarlanır", + "memberIsNotClassVarInClass": "\"{name}\" protokolle uyumlu olması için ClassVar olarak tanımlanmalıdır", + "memberIsNotClassVarInProtocol": "\"{name}\" protokolde ClassVar olarak tanımlanmadı", + "memberIsNotReadOnlyInProtocol": "\"{name}\" protokolde salt okunur durumda değil", + "memberIsReadOnlyInProtocol": "\"{name}\" protokolde salt okunur durumda", + "memberIsWritableInProtocol": "\"{name}\" protokolde yazılabilir", + "memberSetClassVar": "\"{name}\" özniteliği bir ClassVar olduğundan sınıf örneği aracılığıyla atanamaz", + "memberTypeMismatch": "\"{name}\" uyumsuz bir tür", + "memberUnknown": "\"{name}\" özniteliği bilinmiyor", + "metaclassConflict": "Metaclass \"{metaclass1}\", \"{metaclass2}\" ile çakışıyor", + "missingDeleter": "Property deleter method is missing", + "missingGetter": "Property getter method is missing", + "missingSetter": "Property setter method is missing", + "namedParamMissingInDest": "\"{name}\" ek parametresi", + "namedParamMissingInSource": "\"{name}\" eksik anahtar sözcük parametresi", + "namedParamTypeMismatch": "\"{sourceType}\" türündeki \"{name}\" anahtar sözcük parametresi, \"{destType}\" türüyle uyumsuz", + "namedTupleNotAllowed": "NamedTuple örnek veya sınıf kontrolleri için kullanılamaz", + "newMethodLocation": "\"{type}\" sınıfı içinde __new__ metodu tanımlandı", + "newMethodSignature": "__new__ imzası \"{type}\"", + "newTypeClassNotAllowed": "NewType ile oluşturulan tür, örnek ve sınıf denetimleriyle kullanılamaz", + "noOverloadAssignable": "Aşırı yüklenmiş işlevlerden hiçbiri \"{type}\" türüyle uyuşmuyor", + "noneNotAllowed": "Örnek veya sınıf denetimleri için None kullanılamaz", + "orPatternMissingName": "Eksik adlar: {name}", + "overloadIndex": "Aşırı yükleme {index} en yakın eşleşmedir", + "overloadNotAssignable": "Bir veya daha fazla \"{name}\" aşırı yüklemesi atanabilir değil", + "overloadSignature": "Aşırı yükleme imzası burada tanımlı", + "overriddenMethod": "Geçersiz kılınan metot", + "overriddenSymbol": "Geçersiz kılınan simge", + "overrideInvariantMismatch": "\"{overrideType}\" geçersiz kılma türü \"{baseType}\" temel türüyle aynı değil", + "overrideIsInvariant": "Değişken değişebilir, bu nedenle türü sabit", + "overrideNoOverloadMatches": "Geçersiz kılmadaki hiçbir aşırı yükleme imzası temel metotla uyumlu değil", + "overrideNotClassMethod": "Base method is declared as a classmethod but override is not", + "overrideNotInstanceMethod": "Temel metot bir örnek metodu olarak bildirilir, ancak geçersiz kılma bu şekilde bildirilmez", + "overrideNotStaticMethod": "Temel metot bir staticmethod olarak bildirilir, ancak geçersiz kılma bu şekilde bildirilmez", + "overrideOverloadNoMatch": "Geçersiz kılma temel yöntemin tüm aşırı yüklemelerini işlemez", + "overrideOverloadOrder": "Geçersiz kılma metoduna ait aşırı yüklemeler temel metotla aynı sırada olmalıdır", + "overrideParamKeywordNoDefault": "\"{name}\" anahtar sözcük parametresi eşleşmiyor: Temel parametrede varsayılan bağımsız değişken değer var, geçersiz kılma parametresinde yok", + "overrideParamKeywordType": "\"{name}\" anahtar sözcük parametresi tür uyuşmazlığı: temel parametre \"{baseType}\" türünde, geçersiz kılma parametresi \"{overrideType}\" türünde", + "overrideParamName": "{index} parametresi ad uyuşmazlığı: temel parametre adı \"{baseName}\", geçersiz kılma parametresinin adı \"{overrideName}\"", + "overrideParamNameExtra": "Temelde \"{name}\" parametresi eksik", + "overrideParamNameMissing": "Geçersiz kılmada \"{name}\" parametresi eksik", + "overrideParamNamePositionOnly": "{index} parametresi eşleşmiyor: \"{baseName}\" temel parametresi, anahtar sözcük parametresidir, geçersiz kılma parametresi ise yalnızca konumdur", + "overrideParamNoDefault": "{index} parametresi eşleşmiyor: Temel parametrede varsayılan bağımsız değişken değer var, geçersiz kılma parametresinde yok", + "overrideParamType": "\"{index}\" parametresi tür uyuşmazlığı: temel parametre \"{baseType}\" türünde, geçersiz kılma parametresi \"{overrideType}\" türünde", + "overridePositionalParamCount": "Konumsal parametre sayısı uyuşmazlığı; temel metotta {baseCount} var, ancak geçersiz kılmada {overrideCount} var", + "overrideReturnType": "Dönüş türü uyuşmazlığı: temel metot tür \"{baseType}\" türü döndürür, geçersiz kılma ise \"{overrideType}\" türü döndürür", + "overrideType": "Temel sınıf, türü \"{type}\" olarak tanımlar", + "paramAssignment": "{index} parametresi: \"{sourceType}\" türü \"{destType}\" türüyle uyumsuz", + "paramSpecMissingInOverride": "Geçersiz kılma metodunda ParamSpec parametreleri eksik", + "paramType": "Parametre türü \"{paramType}\"", + "privateImportFromPyTypedSource": "Bunun yerine \"{module}\" üzerinden içeri aktarın", + "propertyAccessFromProtocolClass": "Protokol sınıfı içinde tanımlanan bir özelliğe sınıf değişkeni olarak erişilemez", + "propertyMethodIncompatible": "Property method \"{name}\" is incompatible", + "propertyMethodMissing": "Property method \"{name}\" is missing in override", + "propertyMissingDeleter": "Property \"{name}\" has no defined deleter", + "propertyMissingSetter": "Property \"{name}\" has no defined setter", + "protocolIncompatible": "\"{sourceType}\", \"{destType}\" protokol ayarlarıyla uyumsuz", + "protocolMemberMissing": "\"{name}\" yok", + "protocolRequiresRuntimeCheckable": "Protocol sınıfının örnekle ve sınıf denetimleriyle birlikte kullanılabilmesi için @runtime_checkable olması gerekir", + "protocolSourceIsNotConcrete": "\"{sourceType}\" somut bir sınıf türü değil ve \"{destType}\" türüne atanamaz", + "protocolUnsafeOverlap": "\"{name}\" öznitelikleri protokolle aynı adlara sahip", + "pyrightCommentIgnoreTip": "Tek bir satırda tanılamayı durdurmak için \"# pyright: ignore[]\" kullanın", + "readOnlyAttribute": "\"{name}\" özniteliği salt okunur", + "seeClassDeclaration": "Sınıf bildirimine bakın", + "seeDeclaration": "Bildirime bakın", + "seeFunctionDeclaration": "İşlev bildirimine bakın", + "seeMethodDeclaration": "Metot bildirimine bakın", + "seeParameterDeclaration": "Parametre bildirimine bakın", + "seeTypeAliasDeclaration": "Tür diğer adı bildirimine bakın", + "seeVariableDeclaration": "Değişken bildirimine bakın", + "tupleAssignmentMismatch": "Type \"{type}\" is incompatible with target tuple", + "tupleEntryTypeMismatch": "Tuple entry {entry} is incorrect type", + "tupleSizeIndeterminateSrc": "Tuple boyutu uyuşmuyor; {expected} bekleniyordu ancak indeterminate alındı", + "tupleSizeIndeterminateSrcDest": "Tuple boyutu uyuşmuyor; {expected} veya daha büyük bir değer bekleniyordu ancak belirsiz bir değer alındı", + "tupleSizeMismatch": "Tuple size mismatch; expected {expected} but received {received}", + "tupleSizeMismatchIndeterminateDest": "Tuple boyutu uyuşmuyor; {expected} veya daha büyük bir değer bekleniyordu ancak {received} alındı", + "typeAliasInstanceCheck": "“Type” deyimi ile oluşturulan type diğer adı örnek ve sınıf denetimleri kullanılamaz", + "typeAssignmentMismatch": "\"{sourceType}\" türü \"{destType}\" türüne atanamaz", + "typeBound": "\"{sourceType}\" türü \"{name}\" tür değişkeni için \"{destType}\" üst sınırına atanamaz", + "typeConstrainedTypeVar": "\"{type}\" türü \"{name}\" kısıtlanmış tür değişkenine atanamaz", + "typeIncompatible": "\"{sourceType}\" \"{destType}\" öğesine atanamaz", + "typeNotClass": "\"{type}\" bir sınıf değil", + "typeNotStringLiteral": "\"{type}\" bir sabit değeri dize değil", + "typeOfSymbol": "\"{name}\" türü \"{type}\"", + "typeParamSpec": "\"{type}\" türü \"{name}\" ParamSpec ile uyumlu değil", + "typeUnsupported": "\"{type}\" türü desteklenmiyor", + "typeVarDefaultOutOfScope": "\"{name}\" tür değişkeni kapsamda değil", + "typeVarIsContravariant": "\"{name}\" tür parametresi bir karşıt değişken ancak \"{sourceType}\", \"{destType}\" öğesinin bir üst türü değil", + "typeVarIsCovariant": "\"{name}\" tür parametresi bir eşdeğişken ancak \"{sourceType}\", \"{destType}\" öğesinin bir alt türü değil", + "typeVarIsInvariant": "\"{name}\" tür parametresi bir değişmez değer ancak \"{sourceType}\", \"{destType}\" ile aynı değil", + "typeVarNotAllowed": "Örnek veya sınıf denetimleri için TypeVar'a izin verilmiyor", + "typeVarTupleRequiresKnownLength": "TypeVarTuple cannot be bound to a tuple of unknown length", + "typeVarUnnecessarySuggestion": "Bunun yerine {type} kullanın", + "typeVarUnsolvableRemedy": "Bağımsız değişken sağlanmamışken dönüş türünü belirten bir aşırı yükleme belirtin", + "typeVarsMissing": "Eksik tür değişkenleri: {names}", + "typedDictBaseClass": "\"{type}\" sınıfı bir TypedDict değil", + "typedDictClassNotAllowed": "Örnek veya sınıf denetimleri için TypedDict sınıfına izin verilmiyor", + "typedDictClosedExtraNotAllowed": "\"{name}\" öğesi eklenemiyor", + "typedDictClosedExtraTypeMismatch": "\"{type}\" türündeki \"{name}\" öğesi eklenemiyor", + "typedDictClosedFieldNotReadOnly": "ReadOnly olması gerektiğinden \"{name}\" öğesi eklenemiyor", + "typedDictClosedFieldNotRequired": "NotRequired olması gerektiğinden \"{name}\" öğesi eklenemiyor", + "typedDictExtraFieldNotAllowed": "\"{name}\" öğesi \"{type}\" türünde mevcut değil", + "typedDictExtraFieldTypeMismatch": "\"{name}\" türü, \"{type}\" altındaki \"extra_items\" türüyle uyumlu değil", + "typedDictFieldMissing": "\"{type}\" için \"{name}\" eksik", + "typedDictFieldNotReadOnly": "\"{name}\", \"{type}\" içinde salt okunur değil", + "typedDictFieldNotRequired": "\"{type}\" içinde \"{name}\" gerekli değil", + "typedDictFieldRequired": "\"{type}\" içinde \"{name}\" gerekiyor", + "typedDictFieldTypeMismatch": "\"{type}\" türü \"{name}\" öğesine atanamaz", + "typedDictFieldUndefined": "\"{name}\", \"{type}\" türündeki tanımsız bir öğedir", + "typedDictKeyAccess": "TypedDict’te öğeye başvurmak için [\"{name}\"] değerini kullanın", + "typedDictNotAllowed": "TypedDict örnek veya sınıf kontrolleri için kullanılamaz", + "unhashableType": "\"{type}\" türü karmalanabilir değil", + "uninitializedAbstractVariable": "\"{name}\" örnek değişkeni, \"{classType}\" soyut temel sınıfında tanımlandı ancak başlatılmadı", + "unreachableExcept": "\"{exceptionType}\", \"{parentType}\" üst öğesinin bir alt sınıfı", + "useDictInstead": "Sözlük türünü belirtmek için dict[T1, T2] kullanın", + "useListInstead": "Use list[T] to indicate a list type or T1 | T2 to indicate a union type", + "useTupleInstead": "Use tuple[T1, ..., Tn] to indicate a tuple type or T1 | T2 to indicate a union type", + "useTypeInstead": "Bunun yerine type[T] kullanın", + "varianceMismatchForClass": "\"{typeVarName}\" tür bağımsız değişkeni \"{className}\" taban sınıfıyla uyumsuz", + "varianceMismatchForTypeAlias": "\"{typeVarName}\" tür bağımsız değişkeninin varyansı, \"{typeAliasParam}\" ile uyumsuz" + }, + "Service": { + "longOperation": "Çalışma alanı kaynak dosyalarının numaralandırılması uzun zaman alıyor. Bunun yerine bir alt klasör açabilirsiniz. [Daha fazla bilgi edinin](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.zh-cn.json b/python-parser/packages/pyright-internal/src/localization/package.nls.zh-cn.json new file mode 100644 index 00000000..059249e7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.zh-cn.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "创建类型 Stub", + "createTypeStubFor": "为 \"{moduleName}\" 创建类型 Stub", + "executingCommand": "正在执行命令", + "filesToAnalyzeCount": "要分析的 {count} 个文件", + "filesToAnalyzeOne": "1 个要分析的文件", + "findingReferences": "正在查找引用", + "organizeImports": "整理 Import" + }, + "Completion": { + "autoImportDetail": "自动导入", + "indexValueDetail": "索引值" + }, + "Diagnostic": { + "abstractMethodInvocation": "无法调用方法“{method}”,因为它是抽象的且未实施", + "annotatedMetadataInconsistent": "带批注的元数据类型“{metadataType}”与类型“{type}”不兼容", + "annotatedParamCountMismatch": "参数批注计数不匹配: 应为 {expected},但收到 {received}", + "annotatedTypeArgMissing": "“Annotated”应为一个类型参数和一个或多个批注", + "annotationBytesString": "类型表达式不能使用字节字符串文本", + "annotationFormatString": "类型表达式不能使用格式字符串文本(f-string)", + "annotationNotSupported": "此语句不支持类型批注", + "annotationRawString": "类型表达式不能使用原始字符串文本", + "annotationSpansStrings": "类型表达式不能跨越多个字符串文本", + "annotationStringEscape": "类型表达式不能包含转义字符", + "annotationTemplateString": "类型表达式不能使用模板字符串字面量(t-strings)", + "argAssignment": "无法将“{argType}”类型的参数分配给“{paramType}”类型的参数", + "argAssignmentFunction": "无法将\"{argType}\"类型的参数分配给函数\"{functionName}\"中的\"{paramType}\"类型参数", + "argAssignmentParam": "无法将“{argType}”类型的参数分配给“{paramType}”类型的参数“{paramName}”", + "argAssignmentParamFunction": "无法将“{argType}”类型的参数分配给函数“{functionName}”中类型为“{paramType}”的参数“{paramName}”", + "argMissingForParam": "参数 {name} 缺少参数", + "argMissingForParams": "参数 {names} 缺少参数", + "argMorePositionalExpectedCount": "预期还有 {expected} 个位置参数", + "argMorePositionalExpectedOne": "应为 1 个以上位置参数", + "argPositional": "应为位置参数", + "argPositionalExpectedCount": "应为 {expected} 个位置参数", + "argPositionalExpectedOne": "应为 1 个位置参数", + "argTypePartiallyUnknown": "参数类型部分未知", + "argTypeUnknown": "参数类型未知", + "assertAlwaysTrue": "断言表达式的计算结果始终为 true", + "assertTypeArgs": "“assert_type”需要两个位置参数", + "assertTypeTypeMismatch": "“assert_type”不匹配:应为“{expected}”,但收到“{received}”", + "assignmentExprComprehension": "赋值表达式目标“{name}”不能使用与目标推导式相同的名称", + "assignmentExprContext": "赋值表达式必须在模块、函数或 lambda 中", + "assignmentExprInSubscript": "仅在 Python 3.10 和更高版本中支持下标中的赋值表达式", + "assignmentInProtocol": "Protocol 类中的实例或类变量必须在类主体内显式声明", + "assignmentTargetExpr": "表达式不能是赋值目标", + "asyncNotInAsyncFunction": "不允许在 async 函数之外使用 \"async\"", + "awaitIllegal": "使用 “await” 需要 Python 3.5 或更高版本", + "awaitNotAllowed": "类型表达式不能使用 \"await\"", + "awaitNotInAsync": "仅允许在 async 函数内使用 \"await\"", + "backticksIllegal": "Python 3.x 中不支持由反引号环绕的表达式;请改用 repr", + "baseClassCircular": "类不能从自身派生", + "baseClassFinal": "基类 \"{type}\" 被标记为 final 类,无法子类化", + "baseClassIncompatible": "{type} 的基类相互不兼容", + "baseClassInvalid": "类的参数必须是基类", + "baseClassMethodTypeIncompatible": "类“{classType}”的基类以不兼容的方式定义方法“{name}”", + "baseClassUnknown": "基类类型未知,隐蔽派生类的类型", + "baseClassVariableTypeIncompatible": "类“{classType}”的基类以不兼容的方式定义变量“{name}”", + "binaryOperationNotAllowed": "类型表达式中不允许使用二元运算符", + "bindParamMissing": "无法绑定方法“{methodName}”,因为它缺少 “self” 或 “cls” 参数", + "bindTypeMismatch": "无法绑定方法“{methodName}”,因为“{type}”不能分配给参数“{paramName}”", + "breakInExceptionGroup": "\"except*\" 块中不允许 \"break\"", + "breakOutsideLoop": "“break”只能在循环中使用", + "bytesUnsupportedEscape": "bytes 文本中不受支持的转义序列", + "callableExtraArgs": "\"Callable\"应只有两个类型参数", + "callableFirstArg": "应为参数类型列表或“...”", + "callableNotInstantiable": "无法实例化类型“{type}”", + "callableSecondArg": "返回类型应为\"Callable\"的第二个类型参数", + "casePatternIsIrrefutable": "仅最后一个 case 语句允许使用无可辩驳的模式", + "classAlreadySpecialized": "类型\"{type}\"已专用化", + "classDecoratorTypeUnknown": "非类型化类修饰器遮盖类类型;忽略修饰器", + "classDefinitionCycle": "“{name}”的类定义取决于自身", + "classGetItemClsParam": "__class_getitem__替代应采用“cls”参数", + "classMethodClsParam": "类方法应采用“cls”参数", + "classNotRuntimeSubscriptable": "类 \"{name}\" 的下标将生成运行时异常; 请将类型表达式括在引号中", + "classPatternBuiltInArgPositional": "类模式仅接受位置子模式", + "classPatternNewType": "无法在类模式中使用“{type}”,因为它是使用 NewType 定义的", + "classPatternPositionalArgCount": "类“{type}”的位置模式太多; 应为 {expected},但收到了 {received}", + "classPatternTypeAlias": "无法在类模式中使用“{type}”,因为它是专用类型别名", + "classPropertyDeprecated": "类属性在 Python 3.11 中已弃用,在 Python 3.13 中将不受支持", + "classTypeParametersIllegal": "类类型参数语法需要 Python 3.12 或更高版本", + "classVarFirstArgMissing": "\"ClassVar\"后应为类型参数", + "classVarNotAllowed": "此上下文中不允许使用“ClassVar”", + "classVarOverridesInstanceVar": "类变量\"{name}\"替代类\"{className}\"中的同名实例变量", + "classVarTooManyArgs": "“ClassVar”后应只有一个类型参数", + "classVarWithTypeVar": "“ClassVar”类型不能包含类型变量", + "clsSelfParamTypeMismatch": "参数“{name}”的类型必须是类“{classType}”的超类型", + "codeTooComplexToAnalyze": "代码太复杂,无法分析;通过重构为子例程或减少条件代码路径来降低复杂性", + "collectionAliasInstantiation": "无法实例化类型“{type}”,请改用“{alias}”", + "comparisonAlwaysFalse": "条件的计算结果始终为 False,因为类型“{leftType}”和“{rightType}”没有重叠", + "comparisonAlwaysTrue": "条件的计算结果始终为 True,因为类型“{leftType}”和“{rightType}”没有重叠", + "comprehensionInDict": "推导式不能与其他字典条目一起使用", + "comprehensionInSet": "推导式不能与其他 set 条目一起使用", + "concatenateContext": "此上下文中不允许使用“Concatenate”", + "concatenateParamSpecMissing": "“Concatenate”的最后一个类型参数必须是 ParamSpec 或 \"...\"", + "concatenateTypeArgsMissing": "“Concatenate” 至少需要两个类型参数", + "conditionalOperandInvalid": "类型“{type}”的条件操作数无效", + "constantRedefinition": "“{name}”是常量 (因为它是大写)且无法重新定义", + "constructorParametersMismatch": "类“{classType}”中__new__和__init__的签名不匹配", + "containmentAlwaysFalse": "表达式的计算结果始终为 False,因为类型“{leftType}”和“{rightType}”没有重叠", + "containmentAlwaysTrue": "表达式的计算结果始终为 True,因为类型“{leftType}”和“{rightType}”没有重叠", + "continueInExceptionGroup": "\"except*\" 块中不允许 \"continue\"", + "continueOutsideLoop": "“continue”只能在循环中使用", + "coroutineInConditionalExpression": "始终计算结果为 True 的条件表达式引用协同例程", + "dataClassBaseClassFrozen": "非冻结类不能从已冻结类继承", + "dataClassBaseClassNotFrozen": "冻结类不能从未冻结的类继承", + "dataClassConverterFunction": "类型“{argType}”的参数不是类型为“{fieldType}”的字段“{fieldName}”的有效转换器", + "dataClassConverterOverloads": "“{funcName}”的重载不是类型为“{fieldType}”的字段“{fieldName}”的有效转换器", + "dataClassFieldInheritedDefault": "“{fieldName}”替代同名字段,但缺少默认值", + "dataClassFieldWithDefault": "没有默认值的字段不能出现在具有默认值的字段之后", + "dataClassFieldWithPrivateName": "数据类字段不能使用专用名称", + "dataClassFieldWithoutAnnotation": "不带类型批注的数据类字段将导致运行时异常", + "dataClassPostInitParamCount": "数据类__post_init__参数计数不正确;InitVar 字段数为 {expected}", + "dataClassPostInitType": "数据类__post_init__方法参数类型不匹配 \"{fieldName}\"字段", + "dataClassSlotsOverwrite": "__slots__已在类中定义", + "dataClassTransformExpectedBoolLiteral": "静态计算结果为 True 或 False 的预期表达式", + "dataClassTransformFieldSpecifier": "应为类或函数的 tuple,但收到类型 \"{type}\"", + "dataClassTransformPositionalParam": "“dataclass_transform”的所有参数都必须是关键字参数", + "dataClassTransformUnknownArgument": "dataclass_transform不支持参数“{name}”", + "dataProtocolInSubclassCheck": "issubclass 调用中不允许使用数据协议(包括非方法属性)", + "declaredReturnTypePartiallyUnknown": "声明的返回类型“{returnType}”部分未知", + "declaredReturnTypeUnknown": "声明的返回类型未知", + "defaultValueContainsCall": "参数默认值表达式中不允许函数调用和可变对象", + "defaultValueNotAllowed": "“*”或“**”的参数不能有默认值", + "delTargetExpr": "无法删除表达式", + "deprecatedClass": "类\"{name}\"已弃用", + "deprecatedConstructor": "类\"{name}\"的构造函数已弃用", + "deprecatedDescriptorDeleter": "已弃用描述符“{name}”的“__delete__”方法", + "deprecatedDescriptorGetter": "已弃用描述符“{name}”的“__get__”方法", + "deprecatedDescriptorSetter": "已弃用描述符“{name}”的“__set__”方法", + "deprecatedFunction": "函数“{name}”已弃用", + "deprecatedMethod": "类“{className}”中的“{name}”方法已弃用", + "deprecatedPropertyDeleter": "已弃用 property \"{name}\" 的 deleter", + "deprecatedPropertyGetter": "已弃用 property \"{name}\" 的 getter", + "deprecatedPropertySetter": "已弃用 property \"{name}\" 的 setter", + "deprecatedType": "自 Python {version} 起,此类型已弃用;请改用“{replacement}”", + "dictExpandIllegalInComprehension": "推导式中不允许使用字典扩展", + "dictInAnnotation": "类型表达式中不允许使用字典表达式", + "dictKeyValuePairs": "字典条目必须包含键/值对", + "dictUnpackIsNotMapping": "字典解包运算符的预期映射", + "dunderAllSymbolNotPresent": "\"{name}\"已在__all__中指定,但在模块中不存在", + "duplicateArgsParam": "只允许一个 \"*\" 参数", + "duplicateBaseClass": "不允许重复的基类", + "duplicateCapturePatternTarget": "捕获目标“{name}”不能在同一模式中出现多次", + "duplicateCatchAll": "仅允许一个 catch-all except 子句", + "duplicateEnumMember": "Enum 成员 \"{name}\" 已声明", + "duplicateGenericAndProtocolBase": "只允许一个 Generic[...] 或 Protocol[...] 基类", + "duplicateImport": "已多次导入“{importName}”", + "duplicateKeywordOnly": "只允许一个“*”分隔符", + "duplicateKwargsParam": "仅允许一个 “**” 参数", + "duplicateParam": "参数“{name}”重复", + "duplicatePositionOnly": "只允许一个“/”参数", + "duplicateStarPattern": "模式序列中只允许一个“*”模式", + "duplicateStarStarPattern": "只允许一个“**”条目", + "duplicateUnpack": "list 中仅允许一个解包操作", + "ellipsisAfterUnpacked": "\"...\" 不能与未打包的 TypeVarTuple 或 tuple 一起使用", + "ellipsisContext": "不允许在此上下文中使用 \"...\"", + "ellipsisSecondArg": "仅允许 \"...\" 作为两个参数中的第二个参数", + "enumClassOverride": "Enum 类 \"{name}\" 是 final 类,无法子类化", + "enumMemberDelete": "无法删除 Enum 成员 \"{name}\"", + "enumMemberSet": "无法分配 Enum 成员 \"{name}\"", + "enumMemberTypeAnnotation": "enum 成员不允许使用类型批注", + "exceptGroupMismatch": "Try 语句不能同时包含 \"except\" 和 \"except*\"", + "exceptGroupRequiresType": "异常组语法(\"except*\")需要异常类型", + "exceptRequiresParens": "在 Python 3.14 之前的版本中,多个异常类型必须用括号括起来", + "exceptWithAsRequiresParens": "使用 \"as\" 时,多个异常类型必须用括号括起来", + "exceptionGroupIncompatible": "异常组语法 (\"except*\") 需要 Python 3.11 或更高版本", + "exceptionGroupTypeIncorrect": "except* 中的异常类型不能派生自 BaseGroupException", + "exceptionTypeIncorrect": "\"{type}\" 不是派生自 BaseException", + "exceptionTypeNotClass": "“{type}”不是有效的异常类", + "exceptionTypeNotInstantiable": "异常类型\"{type}\"的构造函数需要一个或多个参数", + "expectedAfterDecorator": "修饰器后应有函数或类声明", + "expectedArrow": "应为“->”,后跟返回类型批注", + "expectedAsAfterException": "异常类型后应为“as”", + "expectedAssignRightHandExpr": "应为 “=” 右侧的表达式", + "expectedBinaryRightHandExpr": "应为运算符右侧的表达式", + "expectedBoolLiteral": "应为 True 或 False", + "expectedCase": "应为 “case” 语句", + "expectedClassName": "预期的类名", + "expectedCloseBrace": "\"{\" 未关闭", + "expectedCloseBracket": "“[”未关闭", + "expectedCloseParen": "\"(\" 未关闭", + "expectedColon": "应为 \":\"", + "expectedComplexNumberLiteral": "模式匹配的预期复数文本", + "expectedDecoratorExpr": "Python 3.9 之前的修饰器不支持表达式窗体", + "expectedDecoratorName": "预期的修饰器名称", + "expectedDecoratorNewline": "修饰器末尾应有新行", + "expectedDelExpr": "\"del\"后应为表达式", + "expectedElse": "应为 \"else\"", + "expectedEquals": "应为 \"=\"", + "expectedExceptionClass": "异常类或对象无效", + "expectedExceptionObj": "应为异常对象、异常类或 None", + "expectedExpr": "应为表达式", + "expectedFunctionAfterAsync": "\"async\"后面应有函数定义", + "expectedFunctionName": "“def”后应为函数名称", + "expectedIdentifier": "预期标识符", + "expectedImport": "应为 \"import\"", + "expectedImportAlias": "应为 “as” 后面的符号", + "expectedImportSymbols": "\"import\" 后应有一个或多个符号名称", + "expectedIn": "应为 \"in\"", + "expectedInExpr": "\"in\"后应为表达式", + "expectedIndentedBlock": "应为缩进块", + "expectedMemberName": "\".\" 后应为属性名称", + "expectedModuleName": "预期模块名称", + "expectedNameAfterAs": "\"as\" 后应有符号名", + "expectedNamedParameter": "关键字参数必须跟在 “*” 之后", + "expectedNewline": "需要换行符", + "expectedNewlineOrSemicolon": "语句必须用换行符或分号分隔", + "expectedOpenParen": "应为 \"(\"", + "expectedParamName": "应为参数名称", + "expectedPatternExpr": "预期的模式表达式", + "expectedPatternSubjectExpr": "预期的模式主题表达式", + "expectedPatternValue": "应为 “a.b”形式的模式值表达式", + "expectedReturnExpr": "“return” 后应有表达式", + "expectedSliceIndex": "预期索引或切片表达式", + "expectedTypeNotString": "应为类型,但收到字符串文本", + "expectedTypeParameterName": "应为类型参数名称", + "expectedYieldExpr": "yield 语句中的预期表达式", + "finalClassIsAbstract": "类“{type}”被标记为 final,并且必须实现所有抽象符号", + "finalContext": "不允许在此上下文中使用 \"Final\"", + "finalInLoop": "无法在循环中分配 \"Final\" 变量", + "finalMethodOverride": "方法 \"{name}\" 无法替代在类 \"{className}\" 中定义的 final 方法", + "finalNonMethod": "不能将函数“{name}”标记为 @final,因为它不是方法", + "finalReassigned": "\"{name}\"声明为 Final,无法重新分配", + "finalRedeclaration": "\"{name}\"以前声明为 Final", + "finalRedeclarationBySubclass": "无法重新声明“{name}”,因为父类“{className}”将其声明为 Final", + "finalTooManyArgs": "“Final”后应为单个类型参数", + "finalUnassigned": "\"{name}\" 被声明为 Final,但未分配值", + "finallyBreak": "“break” 不能用于退出 “finally” 块", + "finallyContinue": "“continue” 不能用于退出 “finally” 块", + "finallyReturn": "“return” 不能用于退出 “finally” 块", + "formatStringBrace": "f 字符串文本中不允许使用单个右大括号;使用双右大括号", + "formatStringBytes": "格式字符串文本(f 字符串)不能为二进制", + "formatStringDebuggingIllegal": "F 字符串调试说明符“=”需要 Python 3.8 或更高版本", + "formatStringEscape": "Python 3.12 之前 f 字符串的表达式部分中不允许使用转义序列(反斜杠)", + "formatStringExpectedConversion": "在 f 字符串中应为 \"!\" 之后的转换说明符", + "formatStringIllegal": "格式化字符串文本(f 字符串)需要 Python 3.6 或更高版本", + "formatStringInPattern": "模式中不允许使用格式字符串", + "formatStringNestedFormatSpecifier": "表达式嵌套在格式字符串说明符内太深", + "formatStringNestedQuote": "嵌套在 f 字符串中的字符串不能使用与 Python 3.12 之前的 f 字符串相同的引号字符", + "formatStringTemplate": "格式字符串字面量(f-strings)也不能是模板字符串(t-strings)", + "formatStringUnicode": "格式字符串文本(f 字符串)不能为 unicode", + "formatStringUnterminated": "f 字符串中未终止的表达式;应为 \"}\"", + "functionDecoratorTypeUnknown": "非类型化函数修饰器遮盖函数类型;忽略修饰器", + "functionInConditionalExpression": "始终计算结果为 True 的条件表达式引用函数", + "functionTypeParametersIllegal": "函数类型参数语法需要 Python 3.12 或更高版本", + "futureImportLocationNotAllowed": "从__future__导入必须位于文件的开头", + "generatorAsyncReturnType": "async 生成器函数的返回类型必须与 \"AsyncGenerator[{yieldType}, Any]\" 兼容", + "generatorNotParenthesized": "如果不是唯一参数,生成器表达式必须带圆括号", + "generatorSyncReturnType": "生成器函数的返回类型必须与 \"Generator[{yieldType}, Any, Any]\"兼容", + "genericBaseClassNotAllowed": "“Generic” 基类不能与类型参数语法一起使用", + "genericClassAssigned": "无法分配泛型类类型", + "genericClassDeleted": "无法删除泛型类类型", + "genericInstanceVariableAccess": "通过类访问泛型实例变量是不明确的", + "genericNotAllowed": "\"Generic\" 在此上下文中无效", + "genericTypeAliasBoundTypeVar": "类中的泛型类型别名不能使用绑定类型变量 {names}", + "genericTypeArgMissing": "“Generic”至少需要一个类型参数", + "genericTypeArgTypeVar": "“Generic”的类型参数必须是类型变量", + "genericTypeArgUnique": "“Generic”的类型参数必须是唯一", + "globalReassignment": "\"{name}\" 已在 global 声明之前分配", + "globalRedefinition": "\"{name}\" 已声明为 global", + "implicitStringConcat": "不允许隐式字符串串联", + "importCycleDetected": "在导入链中检测到的周期数", + "importDepthExceeded": "导入链深度超过 {depth}", + "importResolveFailure": "无法解析导入“{importName}”", + "importSourceResolveFailure": "无法从源解析导入“{importName}”", + "importSymbolUnknown": "“{name}”是未知的导入符号", + "incompatibleMethodOverride": "方法“{name}”以不兼容的方式替代类“{className}”", + "inconsistentIndent": "取消缩进量与以前的缩进不匹配", + "inconsistentTabs": "缩进中制表符和空格的使用不一致", + "initMethodSelfParamTypeVar": "\"__init__\" 方法 “self” 参数的类型注释不能包含类范围的类型变量", + "initMustReturnNone": "“__init__”的返回类型必须为 None", + "initSubclassCallFailed": "__init_subclass__ 方法的关键字参数不正确", + "initSubclassClsParam": "__init_subclass__替代应采用“cls”参数", + "initVarNotAllowed": "此上下文中不允许使用“InitVar”", + "instanceMethodSelfParam": "实例方法应采用 “self” 参数", + "instanceVarOverridesClassVar": "实例变量\"{name}\"替代类\"{className}\"中的同名类变量", + "instantiateAbstract": "无法实例化抽象类“{type}”", + "instantiateProtocol": "无法实例化 Protocol 类 \"{type}\"", + "internalBindError": "绑定文件“{file}”时发生内部错误:{message}", + "internalParseError": "分析文件“{file}”时发生内部错误:{message}", + "internalTypeCheckingError": "类型检查文件“{file}”时发生内部错误:{message}", + "invalidIdentifierChar": "标识符中的字符无效", + "invalidStubStatement": "语句在类型 stub 文件中无意义", + "invalidTokenChars": "令牌中的字符\"{text}\"无效", + "isInstanceInvalidType": "\"isinstance\" 的第二个参数必须是类或类的 tuple", + "isSubclassInvalidType": "\"issubclass\" 的第二个参数必须是类或类的 tuple", + "keyValueInSet": "不允许在 set 内使用键/值对", + "keywordArgInTypeArgument": "关键字参数不能在类型参数列表中使用", + "keywordOnlyAfterArgs": "“*”参数后不允许使用仅限关键字的参数分隔符", + "keywordParameterMissing": "一个或多个关键字参数必须遵循 \"*\"参数", + "keywordSubscriptIllegal": "不支持下标中的关键字参数", + "lambdaReturnTypePartiallyUnknown": "lambda 的返回类型“{returnType}”部分未知", + "lambdaReturnTypeUnknown": "lambda 的返回类型未知", + "listAssignmentMismatch": "无法将 \"{type}\" 类型的表达式分配给目标列表", + "listInAnnotation": "类型表达式中不允许使用 List 表达式", + "literalEmptyArgs": "“Literal”后应有一个或多个类型参数", + "literalNamedUnicodeEscape": "\"Literal\" 字符串批注不支持已命名的 unicode 转义序列", + "literalNotAllowed": "如果没有类型参数,则 \"Literal\" 不能用于此上下文", + "literalNotCallable": "无法实例化 Literal 类型", + "literalUnsupportedType": "\"Literal\" 的类型参数必须是 None、文本值(int、bool、str 或 bytes)或 enum 值", + "matchIncompatible": "Match 语句需要 Python 3.10 或更高版本", + "matchIsNotExhaustive": "match 语句中的事例不会彻底处理所有值", + "maxParseDepthExceeded": "超出最大分析深度;将表达式分解为较小的子表达式", + "memberAccess": "无法访问类“{type}”的属性“{name}”", + "memberDelete": "无法删除类“{type}”的属性“{name}”", + "memberSet": "无法为类“{type}”的属性“{name}”赋值。", + "metaclassConflict": "派生类的元类必须是其所有基类的元类的子类", + "metaclassDuplicate": "只能提供一个元类", + "metaclassIsGeneric": "元类不能是泛型", + "methodNotDefined": "未定义“{name}”方法", + "methodNotDefinedOnType": "未在类型\"{type}\"上定义\"{name}\"方法", + "methodOrdering": "无法创建一致的方法排序", + "methodOverridden": "“{name}”在类型“{type}”不兼容的类“{className}”中替代同名的方法", + "methodReturnsNonObject": "“{name}”方法不返回对象", + "missingSuperCall": "方法“{methodName}”在父类中不调用同名方法", + "mixingBytesAndStr": "Bytes 和 str 值无法串联", + "moduleAsType": "模块不能用作类型", + "moduleNotCallable": "模块不可调用", + "moduleUnknownMember": "“{memberName}”不是模块“{moduleName}”的已知属性", + "namedExceptAfterCatchAll": "命名的 except 子句不能出现在 catch-all except 子句后", + "namedParamAfterParamSpecArgs": "关键字参数“{name}”不能出现在 ParamSpec args 参数之后的签名中", + "namedTupleEmptyName": "命名 tuple 中的名称不能为空", + "namedTupleEntryRedeclared": "无法替代 \"{name}\",因为父类 \"{className}\" 是命名的 tuple", + "namedTupleFieldUnderscore": "Named tuple 字段名称不能以下划线开头", + "namedTupleFirstArg": "应将命名的 tuple 类名作为第一个参数", + "namedTupleMultipleInheritance": "不支持使用 NamedTuple 进行多个继承", + "namedTupleNameKeyword": "字段名称不能是关键字", + "namedTupleNameType": "应为指定条目名称和类型的双条目 tuple", + "namedTupleNameUnique": "命名的 tuple 中的名称必须唯一", + "namedTupleNoTypes": "“namedtuple”不提供 tuple 条目的类型;请改用“NamedTuple”", + "namedTupleSecondArg": "应将命名的 tuple 条目 list 作为第二个参数", + "newClsParam": "__new__替代应采用“cls”参数", + "newTypeAnyOrUnknown": "NewType 的第二个参数必须是已知类,而不是 Any 或 Unknown", + "newTypeBadName": "NewType 的第一个参数必须是字符串文本", + "newTypeLiteral": "NewType 不能与 Literal 类型一起使用", + "newTypeNameMismatch": "必须将 NewType 分配给同名变量", + "newTypeNotAClass": "应为 NewType 的第二个参数的类", + "newTypeParamCount": "NewType 需要两个位置参数", + "newTypeProtocolClass": "NewType 不能与结构类型(Protocol 或 TypedDict 类)一起使用", + "noOverload": "“{name}”的重载与提供的参数不匹配", + "noReturnContainsReturn": "声明的 return 类型为 \"NoReturn\" 的函数不能包含 return 语句", + "noReturnContainsYield": "声明的返回类型为 “NoReturn” 的函数不能包含 yield 语句", + "noReturnReturnsNone": "所声明的返回类型为 \"NoReturn\" 的函数无法返回 \"None\"", + "nonDefaultAfterDefault": "非默认参数遵循默认参数", + "nonLocalInModule": "模块级不允许使用 nonlocal 声明", + "nonLocalNoBinding": "找不到 nonlocal \"{name}\" 的绑定", + "nonLocalReassignment": "\"{name}\" 已在 nonlocal 声明之前分配", + "nonLocalRedefinition": "\"{name}\" 已声明为 nonlocal", + "noneNotCallable": "无法调用类型为“None”的对象", + "noneNotIterable": "类型为“None”的对象不能用作可迭代值", + "noneNotSubscriptable": "类型为“None”的对象不可下标", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "类型为 \"None\" 的对象不能与 \"async with\" 一起使用", + "noneOperator": "“None”不支持运算符\"{operator}\"", + "noneUnknownMember": "“{name}”不是 \"None\" 的已知属性", + "nonlocalTypeParam": "类型参数 \"{name}\" 不允许进行 nonlocal 绑定", + "notRequiredArgCount": "“NotRequired” 之后应为单个类型参数", + "notRequiredNotInTypedDict": "此上下文中不允许使用“NotRequired”", + "objectNotCallable": "类型为“{type}”的对象不可调用", + "obscuredClassDeclaration": "类声明\"{name}\"被同名的声明遮蔽", + "obscuredFunctionDeclaration": "函数声明“{name}”被同名声明遮盖", + "obscuredMethodDeclaration": "方法声明“{name}”被同名声明遮盖", + "obscuredParameterDeclaration": "参数声明“{name}”被同名声明遮盖", + "obscuredTypeAliasDeclaration": "类型别名声明“{name}”被同名声明遮盖", + "obscuredVariableDeclaration": "声明“{name}”被同名声明遮盖", + "operatorLessOrGreaterDeprecated": "Python 3 不支持运算符“<>”;请改用 \"!=\"", + "optionalExtraArgs": "\"Optional\"后面应有一个类型参数", + "orPatternIrrefutable": "无可辩驳的模式仅允许作为 \"or\"模式中的最后一个子模式", + "orPatternMissingName": "“or”模式中的所有子节点必须以相同的名称为目标", + "overlappingKeywordArgs": "类型化字典与以下关键字参数重叠: {names}", + "overlappingOverload": "永远不会使用“{name}”的重载 {obscured},因为其参数与重载 {obscuredBy} 重叠", + "overloadAbstractImplMismatch": "重载必须与实现的抽象状态匹配", + "overloadAbstractMismatch": "重载必须全是抽象重载,或者全都不是抽象重载", + "overloadClassMethodInconsistent": "“{name}”的重载使用 @classmethod 的方式不一致", + "overloadFinalImpl": "@final 修饰器应仅应用于实现", + "overloadFinalNoImpl": "只应将第一个重载标记为 @final", + "overloadImplementationMismatch": "重载实现与重载 {index} 的签名不一致", + "overloadOverrideImpl": "@override 修饰器应仅应用于实现", + "overloadOverrideNoImpl": "只应将第一个重载标记为 @override", + "overloadReturnTypeMismatch": "“{name}”的重载 {prevIndex} 与重载 {newIndex} 重叠,并返回不兼容的类型", + "overloadStaticMethodInconsistent": "“{name}”的重载使用 @staticmethod 的方式不一致", + "overloadWithoutImplementation": "\"{name}\" 被标记为 overload,但未提供实现", + "overriddenMethodNotFound": "方法 \"{name}\" 被标记为 override,但不存在同名的基方法", + "overrideDecoratorMissing": "方法 \"{name}\" 未被标记为替代,但 override 类 \"{className}\" 中的方法", + "paramAfterKwargsParam": "参数不能跟随“**”参数", + "paramAlreadyAssigned": "已分配参数“{name}”", + "paramAnnotationMissing": "参数“{name}”缺少类型批注", + "paramAssignmentMismatch": "无法将\"{sourceType}\"类型的表达式分配给\"{paramType}\"类型的参数", + "paramNameMissing": "没有名为“{name}”的参数", + "paramSpecArgsKwargsDuplicate": "已提供 ParamSpec \"{type}\" 的参数", + "paramSpecArgsKwargsUsage": "ParamSpec 的 \"args\" 和 \"kwargs\" 属性必须同时出现在函数签名中", + "paramSpecArgsMissing": "缺少 ParamSpec“{type}”的参数", + "paramSpecArgsUsage": "ParamSpec 的 \"args\" 属性仅在与 *args 参数一起使用时有效", + "paramSpecAssignedName": "必须将 ParamSpec 分配给名为“{name}”的变量", + "paramSpecContext": "此上下文中不允许使用 ParamSpec", + "paramSpecDefaultNotTuple": "ParamSpec 的默认值应为省略号、tuple 表达式或 ParamSpec", + "paramSpecFirstArg": "ParamSpec 作为第一个参数的预期名称", + "paramSpecKwargsUsage": "ParamSpec 的 \"kwargs\" 属性仅在与 **kwargs 参数一起使用时有效", + "paramSpecNotUsedByOuterScope": "ParamSpec“{name}”在此上下文中没有意义", + "paramSpecUnknownArg": "ParamSpec 不支持多个参数", + "paramSpecUnknownMember": "“{name}”不是 ParamSpec 的已知属性", + "paramSpecUnknownParam": "“{name}”是 ParamSpec 的未知参数", + "paramTypeCovariant": "不能在参数类型中使用协变类型变量", + "paramTypePartiallyUnknown": "参数\"{paramName}\"的类型部分未知", + "paramTypeUnknown": "参数“{paramName}”的类型未知", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "主题类型“{type}”的模式永远不会匹配", + "positionArgAfterNamedArg": "位置参数不能出现在关键字参数之后", + "positionArgAfterUnpackedDictArg": "位置参数不能出现在关键字参数解压缩之后", + "positionOnlyAfterArgs": "“*”参数后不允许使用仅位置参数分隔符", + "positionOnlyAfterKeywordOnly": "“/”参数必须出现在“*”参数之前", + "positionOnlyAfterNon": "非仅位置参数后面不允许仅位置参数", + "positionOnlyFirstParam": "不允许将仅位置参数分隔符用作第一个参数", + "positionOnlyIncompatible": "仅位置参数分隔符需要 Python 3.8 或更高版本", + "privateImportFromPyTypedModule": "未从模块“{module}”导出“{name}”", + "privateUsedOutsideOfClass": "\"{name}\"是专用的,在声明它的类之外使用", + "privateUsedOutsideOfModule": "“{name}”是专用的,在声明它的模块外部使用", + "propertyOverridden": "\"{name}\" 错误地替代了类 \"{className}\" 中同名的 property", + "propertyStaticMethod": "property getter、setter 或 deleter 不允许使用静态方法", + "protectedUsedOutsideOfClass": "“{name}”在声明它的类之外受到保护并被使用", + "protocolBaseClass": "Protocol 类 \"{classType}\" 不能派生自非 Protocol 类 \"{baseType}\"", + "protocolBaseClassWithTypeArgs": "使用类型参数语法时,Protocol 类不允许使用类型参数", + "protocolIllegal": "使用 \"Protocol\" 需要 Python 3.7 或更高版本", + "protocolNotAllowed": "\"Protocol\" 不能用于此上下文", + "protocolTypeArgMustBeTypeParam": "\"Protocol\" 的类型参数必须是类型参数", + "protocolUnsafeOverlap": "类与“{name}”不安全地重叠,并且可能在运行时生成匹配项", + "protocolVarianceContravariant": "泛型 Protocol \"{class}\" 中使用的类型变量 \"{variable}\" 应为反变量", + "protocolVarianceCovariant": "泛型 Protocol \"{class}\" 中使用的类型变量 \"{variable}\" 应为共变量", + "protocolVarianceInvariant": "泛型 Protocol \"{class}\" 中使用的类型变量 \"{variable}\" 应为固定变量", + "pyrightCommentInvalidDiagnosticBoolValue": "Pyright 注释指令后面必须跟有“=”和 true 或 false 值", + "pyrightCommentInvalidDiagnosticSeverityValue": "Pyright 注释指令后面必须跟有“=”,并且值为 true、false、error、warning、information 或 none", + "pyrightCommentMissingDirective": "Pyright 注释后面必须是指令(basic 或 strict)或诊断规则", + "pyrightCommentNotOnOwnLine": "用于控制文件级设置的 Pyright 注释必须显示在其自己的行上", + "pyrightCommentUnknownDiagnosticRule": "“{rule}”是 pyright 注释的未知诊断规则", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\"是 pyright 注释的无效值;应为 true、false、error、warning、information 或 none", + "pyrightCommentUnknownDirective": "“{directive}”是 pyright 注释的未知指令;应为 \"strict\"、\"standard\" 或 \"basic\"", + "readOnlyArgCount": "“ReadOnly”后应为单个类型参数", + "readOnlyNotInTypedDict": "此上下文中不允许使用“ReadOnly”", + "recursiveDefinition": "无法确定“{name}”的类型,因为它引用其自身", + "relativeImportNotAllowed": "相对导入不能与“import .a”窗体一起使用;改用 \"from . import a\"", + "requiredArgCount": "\"Required\"后应为单个类型参数", + "requiredNotInTypedDict": "此上下文中不允许使用\"Required\"", + "returnInAsyncGenerator": "async 生成器中不允许使用具有值的 return 语句", + "returnInExceptionGroup": "\"except*\" 块中不允许 \"return\"", + "returnMissing": "所声明的返回类型为“{returnType}”的函数必须在所有代码路径上返回值", + "returnOutsideFunction": "“return”只能在函数中使用", + "returnTypeContravariant": "逆变类型变量不能用于返回类型", + "returnTypeMismatch": "类型“{exprType}”不可分配给返回类型“{returnType}”", + "returnTypePartiallyUnknown": "返回类型“{returnType}”部分未知", + "returnTypeUnknown": "返回类型未知", + "revealLocalsArgs": "“reveal_locals”调用应没有参数", + "revealLocalsNone": "此范围内没有 locals 变量", + "revealTypeArgs": "“reveal_type”调用应为单个位置参数", + "revealTypeExpectedTextArg": "函数“reveal_type”的“expected_text”参数必须是 str 文本值", + "revealTypeExpectedTextMismatch": "类型文本不匹配;应为\"{expected}\"但收到\"{received}\"", + "revealTypeExpectedTypeMismatch": "类型不匹配;应为“{expected}”,但收到“{received}”", + "selfTypeContext": "“Self”在此上下文中无效", + "selfTypeMetaclass": "“Self”不能在元类(“type”的子类)中使用", + "selfTypeWithTypedSelfOrCls": "“Self”不能在具有“self”或“cls”参数的函数中使用,该参数的类型批注不是“Self”", + "sentinelBadName": "Sentinel 的第一个自变量必须是字符串字面量", + "sentinelNameMismatch": "必须将 Sentinel 分配给同名变量", + "sentinelParamCount": "Sentinel 需要一个位置参数", + "setterGetterTypeMismatch": "Property setter 值类型不可分配给 getter 返回类型", + "singleOverload": "“{name}”被标记为重载,但缺少其他重载", + "slotsAttributeError": "未在__slots__中指定“{name}”", + "slotsClassVarConflict": "\"{name}\"与__slots__中声明的实例变量冲突", + "starPatternInAsPattern": "星形模式不能与“as”目标一起使用", + "starPatternInOrPattern": "在其他模式中,星形图案不能为 ORed", + "starStarWildcardNotAllowed": "** 不能与通配符“_”一起使用", + "staticClsSelfParam": "静态方法不应采用“self”或“cls”参数", + "stringNonAsciiBytes": "不允许使用非 ASCII 字符(以字节为单位)字符串文本", + "stringNotSubscriptable": "字符串表达式不能在类型表达式中使用下标; 请将整个表达式括在引号中", + "stringUnsupportedEscape": "字符串文本中不受支持的转义序列", + "stringUnterminated": "字符串文本未终止", + "stubFileMissing": "找不到 \"{importName}\" 的 Stub 文件", + "stubUsesGetAttr": "类型 stub 文件不完整; \"__getattr__\" 遮盖了模块的类型错误", + "sublistParamsIncompatible": "Python 3.x 不支持 Sublist 参数", + "superCallArgCount": "“super” 调用应不超过两个参数", + "superCallFirstArg": "应将类类型作为“super”调用的第一个参数,但收到“{type}”", + "superCallSecondArg": "“super”调用的第二个参数必须是派生自“{type}”的对象或类", + "superCallZeroArgForm": "\"super\" 调用的零参数形式仅在方法中有效", + "superCallZeroArgFormStaticMethod": "“super”调用的零参数形式在静态方法中无效", + "symbolIsPossiblyUnbound": "“{name}”可能未绑定", + "symbolIsUnbound": "“{name}”未绑定", + "symbolIsUndefined": "未定义“{name}”", + "symbolOverridden": "“{name}”替代类“{className}”中的同名符号", + "templateStringBytes": "模板字符串字面量(t-strings)不能为二进制", + "templateStringIllegal": "模板字符串字面量(t-strings)需要 Python 3.14 或更高版本", + "templateStringUnicode": "模板字符串字面量(t-strings)不能为 unicode", + "ternaryNotAllowed": "类型表达式中不允许使用三元表达式", + "totalOrderingMissingMethod": "类必须定义“__lt__”、“__le__”、“__gt__”或“__ge__”之一才能使用total_ordering", + "trailingCommaInFromImport": "不允许使用尾随逗号,没有括号", + "tryWithoutExcept": "Try 语句必须至少有一个 except 或 finally 子句", + "tupleAssignmentMismatch": "无法将类型为 \"{type}\" 的表达式分配给目标 tuple", + "tupleInAnnotation": "类型表达式中不允许使用 tuple 表达式", + "tupleIndexOutOfRange": "类型 {type} 的索引 {index} 超出范围", + "typeAliasIllegalExpressionForm": "类型别名定义的表达式形式无效", + "typeAliasIsRecursiveDirect": "类型别名“{name}”不能在其定义中使用自身", + "typeAliasNotInModuleOrClass": "TypeAlias 只能在模块或类范围内定义", + "typeAliasRedeclared": "“{name}”声明为 TypeAlias,只能分配一次", + "typeAliasStatementBadScope": "type 语句只能在模块或类范围内使用", + "typeAliasStatementIllegal": "类型别名语句需要 Python 3.12 或更高版本", + "typeAliasTypeBadScope": "只能在模块或类范围内定义类型别名", + "typeAliasTypeBaseClass": "\"type\" 语句中定义的类型别名不能用作基类", + "typeAliasTypeMustBeAssigned": "必须将 TypeAliasType 分配给与类型别名同名的变量", + "typeAliasTypeNameArg": "TypeAliasType 的第一个参数必须是表示类型别名名称的字符串文本", + "typeAliasTypeNameMismatch": "类型别名的名称必须与分配到的变量的名称匹配", + "typeAliasTypeParamInvalid": "类型参数列表必须是仅包含 TypeVar、TypeVarTuple 或 ParamSpec 的 tuple", + "typeAnnotationCall": "类型表达式中不允许使用调用表达式", + "typeAnnotationVariable": "类型表达式中不允许使用变量", + "typeAnnotationWithCallable": "“type”的类型参数必须为类; 不支持可调用项", + "typeArgListExpected": "应为 ParamSpec、省略号或类型 list", + "typeArgListNotAllowed": "此类型参数不允许使用 list 表达式", + "typeArgsExpectingNone": "类“{name}”不应有类型参数", + "typeArgsMismatchOne": "应为一个类型参数,但收到 {received}", + "typeArgsMissingForAlias": "泛型类型别名“{name}”的预期类型参数", + "typeArgsMissingForClass": "泛型类“{name}”的预期类型参数", + "typeArgsTooFew": "为“{name}”提供的类型参数太少;应为 {expected},但收到 {received}", + "typeArgsTooMany": "为“{name}”提供的类型参数太多;应为 {expected},但收到 {received}", + "typeAssignmentMismatch": "类型“{sourceType}”不可分配给声明的类型“{destType}”", + "typeAssignmentMismatchWildcard": "导入符号“{name}”的类型为“{sourceType}”,该类型不可分配给声明的类型“{destType}”", + "typeCallNotAllowed": "不应在类型表达式中使用 type() 调用", + "typeCheckOnly": "“{name}”标记为 @type_check_only,并且只能在类型注释中使用", + "typeCommentDeprecated": "已弃用 type 注释; 请改用 type 批注", + "typeExpectedClass": "应为类,但收到“{type}”", + "typeFormArgs": "\"TypeForm\" 接受单个位置参数", + "typeGuardArgCount": "“TypeGuard”或“TypeIs”后应为单个类型参数", + "typeGuardParamCount": "用户定义的类型防护函数和方法必须至少有一个输入参数", + "typeIsReturnType": "TypeIs 的返回类型(“{returnType}”)与值参数类型(“{type}”)不一致", + "typeNotAwaitable": "\"{type}\" 并非 awaitable", + "typeNotIntantiable": "无法实例化\"{type}\"", + "typeNotIterable": "“{type}”不可迭代", + "typeNotSpecializable": "无法专用化类型“{type}”", + "typeNotSubscriptable": "类型为“{type}”的对象不可下标", + "typeNotSupportBinaryOperator": "类型“{leftType}”和“{rightType}”不支持运算符“{operator}”", + "typeNotSupportBinaryOperatorBidirectional": "预期类型为“{expectedType}”时,类型“{leftType}”和“{rightType}”不支持运算符“{operator}”", + "typeNotSupportUnaryOperator": "类型“{type}”不支持运算符“{operator}”", + "typeNotSupportUnaryOperatorBidirectional": "预期类型为 \"{expectedType}\"时,类型\"{type}\"不支持运算符\"{operator}\"", + "typeNotUsableWith": "\"{type}\" 类型的对象不能与 \"with\" 一起使用,因为它未实现 {method}", + "typeNotUsableWithAsync": "\"{type}\" 类型的对象不能与 \"async with\" 一起使用,因为它未实现 {method}", + "typeParameterBoundNotAllowed": "绑定或约束不能与 variadic 类型参数或 ParamSpec 一起使用", + "typeParameterConstraintTuple": "类型参数约束必须是两个或更多类型的元组", + "typeParameterExistingTypeParameter": "类型参数\"{name}\"已在使用中", + "typeParameterNotDeclared": "类型参数“{name}”未包含在“{container}”的类型参数列表中", + "typeParametersMissing": "必须至少指定一个类型参数", + "typePartiallyUnknown": "“{name}”的类型部分未知", + "typeUnknown": "\"{name}\"类型未知", + "typeVarAssignedName": "必须将 TypeVar 分配给名为“{name}”的变量", + "typeVarAssignmentMismatch": "无法将类型\"{type}\"分配给类型变量\"{name}\"", + "typeVarBoundAndConstrained": "TypeVar 不能同时绑定和约束", + "typeVarBoundGeneric": "TypeVar 绑定类型不能是泛型", + "typeVarConstraintGeneric": "TypeVar 约束类型不能是泛型", + "typeVarDefaultBoundMismatch": "TypeVar 默认类型必须是绑定类型的子类型", + "typeVarDefaultConstraintMismatch": "TypeVar 默认类型必须是受约束类型之一", + "typeVarDefaultIllegal": "类型变量默认类型需要 Python 3.13 或更高版本", + "typeVarDefaultInvalidTypeVar": "类型参数“{name}”的默认类型是指超出范围的一个或多个类型变量", + "typeVarFirstArg": "TypeVar 作为第一个参数的预期名称", + "typeVarInvalidForMemberVariable": "属性类型不能使用作用域为本地方法的类型变量\"{name}\"", + "typeVarNoMember": "TypeVar“{type}”没有属性“{name}”", + "typeVarNotSubscriptable": "TypeVar“{type}”不可下标", + "typeVarNotUsedByOuterScope": "类型变量“{name}”在此上下文中没有意义", + "typeVarPossiblyUnsolvable": "如果调用方没有为参数提供参数\"{param}\",则类型变量\"{name}\"可能无法解析", + "typeVarSingleConstraint": "TypeVar 必须至少有两种约束类型", + "typeVarTupleConstraints": "TypeVarTuple 不能有值约束", + "typeVarTupleContext": "此上下文中不允许使用 TypeVarTuple", + "typeVarTupleDefaultNotUnpacked": "TypeVarTuple 默认类型必须是未打包的 tuple 或 TypeVarTuple", + "typeVarTupleMustBeUnpacked": "TypeVarTuple 值需要解包运算符", + "typeVarTupleUnknownParam": "“{name}”是 TypeVarTuple 的未知参数", + "typeVarUnknownParam": "\"{name}\" 是 TypeVar 的未知参数", + "typeVarUsedByOuterScope": "TypeVar“{name}”已被外部作用域使用", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" 在泛型函数签名中仅显示一次", + "typeVarVariance": "TypeVar 不能同时为协变和逆变", + "typeVarWithDefaultFollowsVariadic": "TypeVar“{typeVarName}”具有默认值,并且不能遵循 TypeVarTuple“{variadicName}”", + "typeVarWithoutDefault": "“{name}”不能出现在类型参数列表的“{other}”后面,因为它没有默认类型", + "typeVarsNotInGenericOrProtocol": "Generic[] 或 Protocol[] 必须包括所有类型变量", + "typedDictAccess": "无法存取 TypedDict 中的项", + "typedDictAssignedName": "必须将 TypedDict 分配给名为“{name}”的变量", + "typedDictBadVar": "TypedDict 类只能包含类型批注", + "typedDictBaseClass": "TypedDict 类的所有基类也必须是 TypedDict 类", + "typedDictBoolParam": "预期“{name}”参数的值为 True 或 False", + "typedDictClosedExtras": "基类 \"{name}\" 是限制额外项类型 \"{type}\" 的 TypedDict", + "typedDictClosedFalseNonOpenBase": "基类“{name}”不是开放的 TypedDict;不允许 closed=False", + "typedDictClosedNoExtras": "基类 \"{name}\" 是 closed TypedDict; 不允许使用额外的项", + "typedDictDelete": "无法删除 TypedDict 中的项", + "typedDictEmptyName": "TypedDict 中的名称不能为空", + "typedDictEntryName": "字典条目名称的预期字符串文本", + "typedDictEntryUnique": "字典中的名称必须唯一", + "typedDictExtraArgs": "不支持额外的 TypedDict 参数", + "typedDictExtraItemsClosed": "TypedDict 可以使用 \"closed\" 或 \"extra_items\",但不能同时使用", + "typedDictFieldNotRequiredRedefinition": "无法将 TypedDict 项“{name}”重新定义为 NotRequired", + "typedDictFieldReadOnlyRedefinition": "无法将 TypedDict 项“{name}”重新定义为 ReadOnly", + "typedDictFieldRequiredRedefinition": "无法将 TypedDict 项“{name}”重新定义为 Required", + "typedDictFirstArg": "应将 TypedDict 类名作为第一个参数", + "typedDictInClassPattern": "类模式中不允许使用 TypedDict 类", + "typedDictInitsubclassParameter": "TypedDict 不支持 __init_subclass__ parameter“{name}”", + "typedDictNotAllowed": "\"TypedDict\" 不能用于此上下文", + "typedDictSecondArgDict": "预期的 dict 或关键字参数作为第二个参数", + "typedDictSecondArgDictEntry": "应为简单字典条目", + "typedDictSet": "无法在 TypedDict 中分配项", + "unaccessedClass": "未存取类“{name}”", + "unaccessedFunction": "无法存取函数\"{name}\"", + "unaccessedImport": "未存取导入“{name}”", + "unaccessedSymbol": "未存取“{name}”", + "unaccessedVariable": "无法存取变量“{name}”", + "unannotatedFunctionSkipped": "已跳过对函数“{name}”的分析,因为它未被批注", + "unaryOperationNotAllowed": "类型表达式中不允许使用一元运算符", + "unexpectedAsyncToken": "“def”、“with” 或 “for” 应跟随 “async”", + "unexpectedEof": "意外的 EOF", + "unexpectedExprToken": "表达式末尾出现意外标记", + "unexpectedIndent": "意外缩进", + "unexpectedUnindent": "不应取消缩进", + "unhashableDictKey": "字典密钥必须可哈希", + "unhashableSetEntry": "Set 条目必须是可哈希的", + "uninitializedAbstractVariables": "抽象基类中定义的变量未在 final 类 \"{classType}\" 中初始化", + "uninitializedInstanceVariable": "未在类体或__init__方法中初始化实例变量“{name}”", + "unionForwardReferenceNotAllowed": "Union 语法不能与字符串操作数一起使用; 请在整个表达式周围使用引号", + "unionSyntaxIllegal": "联合的替代语法需要 Python 3.10 或更高版本", + "unionTypeArgCount": "Union 需要两个或更多类型参数", + "unionUnpackedTuple": "Union 不能包含未打包的 tuple", + "unionUnpackedTypeVarTuple": "Union 不能包含未打包的 TypeVarTuple", + "unnecessaryCast": "不必要的 \"cast\" 调用;类型已为“{type}”", + "unnecessaryIsInstanceAlways": "不必要的 isinstance 调用;“{testType}”始终是“{classType}”的实例", + "unnecessaryIsInstanceNever": "不必要的 isinstance 调用;“{testType}”始终不是“{classType}”的实例", + "unnecessaryIsSubclassAlways": "不必要的 issubclass 调用;“{testType}”始终是“{classType}”的子类", + "unnecessaryIsSubclassNever": "不必要的 issubclass 调用;“{testType}”始终不是“{classType}”的子类", + "unnecessaryPyrightIgnore": "不必要的 \"# pyright: ignore\" 注释", + "unnecessaryPyrightIgnoreRule": "不必要的 \"# pyright: ignore\"规则: \"{name}\"", + "unnecessaryTypeIgnore": "不必要的 \"# type: ignore\" 注释", + "unpackArgCount": "\"Unpack\"后应为单个类型参数", + "unpackExpectedTypeVarTuple": "Unpack 预期接收 TypeVarTuple 或 tuple 作为类型参数", + "unpackExpectedTypedDict": "Unpack 预期接收 TypedDict 类型参数", + "unpackIllegalInComprehension": "在推导式中不允许使用解包操作", + "unpackInAnnotation": "类型表达式中不允许使用解包运算符", + "unpackInDict": "字典中不允许解压缩操作", + "unpackInSet": "set 内不允许使用解包运算符", + "unpackNotAllowed": "此上下文中不允许 Unpack", + "unpackOperatorNotAllowed": "此上下文中不允许解压缩操作", + "unpackTuplesIllegal": "Python 3.8 之前的元组中不允许解包操作", + "unpackedArgInTypeArgument": "未打包的参数不能用于此上下文", + "unpackedArgWithVariadicParam": "未打包的参数不能用于 TypeVarTuple 参数", + "unpackedDictArgumentNotMapping": "** 后面的参数表达式必须是具有“str”键类型的映射", + "unpackedDictSubscriptIllegal": "不允许在下标中使用字典解包运算符", + "unpackedSubscriptIllegal": "下标中的解包运算符需要 Python 3.11 或更高版本", + "unpackedTypeVarTupleExpected": "应为未打包的 TypeVarTuple;使用 Unpack[{name1}] 或 *{name2}", + "unpackedTypedDictArgument": "无法将解压缩的 TypedDict 参数与参数匹配", + "unreachableCodeCondition": "由于条件静态评估结果为 false,因此未对代码进行分析", + "unreachableCodeStructure": "代码在结构上无法访问", + "unreachableCodeType": "类型分析指示代码不可访问", + "unreachableExcept": "无法访问 Except 子句,因为已处理异常", + "unsupportedDunderAllOperation": "不支持对“__all__”执行操作,因此导出的符号列表可能不正确", + "unusedCallResult": "调用表达式的结果类型为 \"{type}\" 且未使用;如果这是有意为之,则分配给变量 “_”", + "unusedCoroutine": "未使用 async 函数调用的结果; 请使用 \"await\" 或将结果分配给变量", + "unusedExpression": "表达式值未使用", + "varAnnotationIllegal": "变量的 Type 批注需要 Python 3.6 或更高版本; 请使用 type 注释以与以前的版本兼容", + "variableFinalOverride": "变量\"{name}\"被标记为 Final,并替代类\"{className}\"中同名的非 Final 变量", + "variadicTypeArgsTooMany": "类型参数列表最多可以有一个未打包的 TypeVarTuple 或 tuple", + "variadicTypeParamTooManyAlias": "类型别名最多可以有一个 TypeVarTuple 类型参数,但收到多个 ({names})", + "variadicTypeParamTooManyClass": "泛型类最多可以有一个 TypeVarTuple 类型参数,但收到多个 ({names})", + "walrusIllegal": "运算符 \":=\" 需要 Python 3.8 或更高版本", + "walrusNotAllowed": "此上下文中不允许使用运算符 \":=\",且不带括号", + "wildcardInFunction": "类或函数中不允许使用通配符 import", + "wildcardLibraryImport": "不允许从库中 import 通配符", + "wildcardPatternTypePartiallyUnknown": "通配符模式捕获的类型部分未知", + "wildcardPatternTypeUnknown": "通配符模式捕获的类型未知", + "yieldFromIllegal": "使用“yield from”需要 Python 3.3 或更高版本", + "yieldFromOutsideAsync": "async 函数中不允许使用 \"yield from\"", + "yieldOutsideFunction": "不允许在函数或 lambda 之外使用“yield”", + "yieldWithinComprehension": "允许在推导式中使用 \"yield\"", + "zeroCaseStatementsFound": "Match 语句必须至少包含一个 case 语句", + "zeroLengthTupleNotAllowed": "此上下文中不允许使用零长度 tuple" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "\"Annotated\" 特殊形式不能与实例和类检查一起使用", + "argParam": "参数对应于参数“{paramName}”", + "argParamFunction": "参数对应于函数“{functionName}”中的参数“{paramName}”", + "argsParamMissing": "参数“*{paramName}”没有相应的参数", + "argsPositionOnly": "仅位置参数不匹配;应为 {expected},但收到 {received}", + "argumentType": "参数类型为“{type}”", + "argumentTypes": "参数类型:({types})", + "assignToNone": "类型不可分配给“None”", + "asyncHelp": "是否表示“async with”?", + "baseClassIncompatible": "基类“{baseClass}”与类型“{type}”不兼容", + "baseClassIncompatibleSubclass": "基类“{baseClass}”派生自与类型“{type}”不兼容的“{subclass}”", + "baseClassOverriddenType": "基类\"{baseClass}\"提供被替代的类型\"{type}\"", + "baseClassOverridesType": "基类“{baseClass}”替代类型{type}”", + "bytesTypePromotions": "将 disableBytesTypePromotions 设置为 false,以启用“bytearray”和“memoryview”的类型提升行为", + "conditionalRequiresBool": "类型“{operandType}”的方法__bool__返回类型“{boolReturnType}”而不是“bool”", + "dataClassFieldLocation": "字段声明", + "dataClassFrozen": "\"{name}\"已冻结", + "dataProtocolUnsupported": "“{name}”是数据协议", + "descriptorAccessBindingFailed": "无法绑定描述符类“{className}”的方法“{name}”", + "descriptorAccessCallFailed": "无法为描述符类“{className}”调用方法“{name}”", + "finalMethod": "Final 方法", + "functionParamDefaultMissing": "参数“{name}”缺少默认参数", + "functionParamName": "参数名称不匹配: \"{destName}\" versus \"{srcName}\"", + "functionParamPositionOnly": "仅位置参数不匹配; 参数“{name}”并非仅限位置", + "functionReturnTypeMismatch": "函数返回类型\"{sourceType}\"与类型\"{destType}\"不兼容", + "functionTooFewParams": "函数接受的位置参数太少;应为 {expected},但收到 {received}", + "functionTooManyParams": "函数接受太多位置参数;应为 {expected},但收到 {received}", + "genericClassNotAllowed": "不允许对实例或类检查使用具有类型参数的泛型类型", + "incompatibleDeleter": "Property deleter 方法不兼容", + "incompatibleGetter": "Property getter 方法不兼容", + "incompatibleSetter": "Property setter 方法不兼容", + "initMethodLocation": "__init__方法已在类“{type}”中定义", + "initMethodSignature": "__init__的签名为“{type}”", + "initSubclassLocation": "__init_subclass__ 方法在类“{name}”中定义", + "invariantSuggestionDict": "请考虑从 “dict” 切换到 “Mapping”(在值类型中为协变)", + "invariantSuggestionList": "考虑从“list”切换到“Sequence”(协变)", + "invariantSuggestionSet": "请考虑从“set”切换到“Container”,后者是协变的", + "isinstanceClassNotSupported": "实例和类检查不支持“{type}”", + "keyNotRequired": "“{type}”中“{name}”不是必需的密钥,因此访问可能会导致运行时异常", + "keyReadOnly": "“{name}”是“{type}”中的只读密钥", + "keyRequiredDeleted": "“{name}”是必需的密钥,无法删除", + "keyUndefined": "“{name}”不是“{type}”中定义的密钥", + "kwargsParamMissing": "参数“**{paramName}”没有相应的参数", + "listAssignmentMismatch": "类型“{type}”与目标列表不兼容", + "literalAssignmentMismatch": "“{sourceType}”不可分配给类型“{destType}”", + "literalNotAllowed": "“Literal” 特殊形式不能与实例和类检查一起使用", + "matchIsNotExhaustiveHint": "如果未进行详尽处理,请添加\"case _: pass\"", + "matchIsNotExhaustiveType": "未处理的类型: \"{type}\"", + "memberAssignment": "无法将类型“{type}”的表达式分配给类“{classType}”的属性“{name}”", + "memberIsAbstract": "未实现“{type}.{name}”", + "memberIsAbstractMore": "还有 {count} 个...", + "memberIsClassVarInProtocol": "“{name}”在协议中定义为 ClassVar", + "memberIsInitVar": "\"{name}\" 是 init-only 的字段", + "memberIsInvariant": "“{name}”是固定的,因为它是可变的", + "memberIsNotClassVarInClass": "“{name}”必须定义为 ClassVar 才能与协议兼容", + "memberIsNotClassVarInProtocol": "“{name}”未在协议中定义为 ClassVar", + "memberIsNotReadOnlyInProtocol": "“{name}”在协议中不是只读的", + "memberIsReadOnlyInProtocol": "“{name}”在协议中是只读的", + "memberIsWritableInProtocol": "“{name}”在协议中是可写入的", + "memberSetClassVar": "无法通过类实例分配属性“{name}”,因为它是 ClassVar", + "memberTypeMismatch": "\"{name}\"是不兼容的类型", + "memberUnknown": "属性“{name}”未知", + "metaclassConflict": "元类“{metaclass1}”与“{metaclass2}”存在冲突", + "missingDeleter": "缺少 Property deleter 方法", + "missingGetter": "缺少 Property getter 方法", + "missingSetter": "缺少 Property setter 方法", + "namedParamMissingInDest": "额外参数“{name}”", + "namedParamMissingInSource": "缺少关键字参数“{name}”", + "namedParamTypeMismatch": "类型为“{sourceType}”的关键字参数“{name}”与类型“{destType}”不兼容", + "namedTupleNotAllowed": "不能对实例或类检查使用 NamedTuple", + "newMethodLocation": "__new__方法已在类“{type}”中定义", + "newMethodSignature": "__new__的签名为“{type}”", + "newTypeClassNotAllowed": "不能将使用 NewType 创建的类型用于实例和类检查", + "noOverloadAssignable": "没有重载函数与类型“{type}”匹配", + "noneNotAllowed": "不能对实例或类检查使用 None", + "orPatternMissingName": "缺少名称: {name}", + "overloadIndex": "重载 {index} 是最接近的匹配项", + "overloadNotAssignable": "无法分配“{name}”的一个或多个重载", + "overloadSignature": "此处定义了重载签名", + "overriddenMethod": "替代的方法", + "overriddenSymbol": "替代符号", + "overrideInvariantMismatch": "替代类型“{overrideType}”与基类型“{baseType}”不同", + "overrideIsInvariant": "变量是可变的,因此其类型是固定的", + "overrideNoOverloadMatches": "替代中没有与基本方法兼容的重载签名", + "overrideNotClassMethod": "基方法声明为 classmethod,但替代不是", + "overrideNotInstanceMethod": "基方法声明为实例方法,但替代不是", + "overrideNotStaticMethod": "基方法声明为 staticmethod,但替代不是", + "overrideOverloadNoMatch": "替代不处理基方法的所有重载", + "overrideOverloadOrder": "替代方法的重载必须与基方法的顺序相同", + "overrideParamKeywordNoDefault": "关键字参数\"{name}\"不匹配: 基参数具有默认参数值,替代参数不匹配", + "overrideParamKeywordType": "关键字参数“{name}”类型不匹配: 基参数是类型“{baseType}”,替代参数为类型“{overrideType}”", + "overrideParamName": "参数{index}名称不匹配: 基参数命名为 \"{baseName}\",替代参数命名为 \"{overrideName}\"", + "overrideParamNameExtra": "基数中缺少参数“{name}”", + "overrideParamNameMissing": "替代中缺少参数“{name}”", + "overrideParamNamePositionOnly": "参数 {index} 不匹配: 基参数“{baseName}”是关键字参数,替代参数为仅位置参数", + "overrideParamNoDefault": "参数 {index} 不匹配: 基参数具有默认参数值,替代参数不匹配", + "overrideParamType": "参数 {index} 类型不匹配: 基参数为类型“{baseType}”,替代参数为类型“{overrideType}”", + "overridePositionalParamCount": "位置参数计数不匹配;基方法具有 {baseCount},但替代具有 {overrideCount}", + "overrideReturnType": "返回类型不匹配:基方法返回类型\"{baseType}\",替代返回类型\"{overrideType}\"", + "overrideType": "基类将类型定义为\"{type}\"", + "paramAssignment": "参数 {index}: 类型“{sourceType}”与类型“{destType}”不兼容", + "paramSpecMissingInOverride": "替代方法中缺少 ParamSpec 参数", + "paramType": "参数类型为“{paramType}”", + "privateImportFromPyTypedSource": "改为从\"{module}\"导入", + "propertyAccessFromProtocolClass": "不能以类变量形式存取协议类中定义的属性", + "propertyMethodIncompatible": "Property 方法 \"{name}\" 不兼容", + "propertyMethodMissing": "替代中缺少 Property 方法 \"{name}\"", + "propertyMissingDeleter": "Property \"{name}\" 没有定义的 deleter", + "propertyMissingSetter": "Property \"{name}\" 没有定义的 setter", + "protocolIncompatible": "“{sourceType}”与协议“{destType}”不兼容", + "protocolMemberMissing": "“{name}”不存在", + "protocolRequiresRuntimeCheckable": "Protocol 类必须为 @runtime_checkable 才能用于实例和类检查", + "protocolSourceIsNotConcrete": "“{sourceType}”不是具体类类型,无法分配给类型“{destType}”", + "protocolUnsafeOverlap": "“{name}”的属性与协议具有相同的名称", + "pyrightCommentIgnoreTip": "使用 \"# pyright: ignore[]\" 抑制单行诊断", + "readOnlyAttribute": "属性“{name}”为只读", + "seeClassDeclaration": "查看类声明", + "seeDeclaration": "参见声明", + "seeFunctionDeclaration": "请参阅函数声明", + "seeMethodDeclaration": "请参阅方法声明", + "seeParameterDeclaration": "请参阅参数声明", + "seeTypeAliasDeclaration": "请参阅类型别名声明", + "seeVariableDeclaration": "查看变量声明", + "tupleAssignmentMismatch": "类型 \"{type}\" 与目标 tuple 不兼容", + "tupleEntryTypeMismatch": "Tuple 条目 {entry} 的类型不正确", + "tupleSizeIndeterminateSrc": "Tuple 大小不匹配; 应为 {expected},但收到不确定的值", + "tupleSizeIndeterminateSrcDest": "Tuple 大小不匹配; 应为 {expected} 或更多,但收到不确定的值", + "tupleSizeMismatch": "Tuple 大小不匹配; 应为 {expected},但收到 {received}", + "tupleSizeMismatchIndeterminateDest": "Tuple 大小不匹配; 应为 {expected} 或更多,但收到 {received}", + "typeAliasInstanceCheck": "使用 \"type\" 语句创建的类型别名不能与实例和类检查一起使用", + "typeAssignmentMismatch": "类型“{sourceType}”不可分配给类型“{destType}”", + "typeBound": "类型“{sourceType}”不可分配给类型变量“{name}”的上限“{destType}”", + "typeConstrainedTypeVar": "类型“{type}”不可分配给受约束的类型变量“{name}”", + "typeIncompatible": "“{sourceType}”不可分配给“{destType}”", + "typeNotClass": "“{type}”不是类", + "typeNotStringLiteral": "“{type}”不是字符串文本", + "typeOfSymbol": "“{name}”的类型为“{type}”", + "typeParamSpec": "类型“{type}”与 ParamSpec“{name}”不兼容", + "typeUnsupported": "类型“{type}”不受支持", + "typeVarDefaultOutOfScope": "类型变量“{name}”不在范围内", + "typeVarIsContravariant": "类型参数 \"{name}\" 是逆变的,但 \"{sourceType}\" 不是 \"{destType}\" 的超类型", + "typeVarIsCovariant": "类型参数 \"{name}\" 是协变的,但 \"{sourceType}\" 不是 \"{destType}\" 的子类型", + "typeVarIsInvariant": "类型参数 \"{name}\" 是固定的,但 \"{sourceType}\" 与 \"{destType}\" 不同", + "typeVarNotAllowed": "不允许对实例或类检查使用 TypeVar", + "typeVarTupleRequiresKnownLength": "TypeVarTuple 不能绑定到长度未知的 tuple", + "typeVarUnnecessarySuggestion": "请改用 {type}", + "typeVarUnsolvableRemedy": "提供一个重载,该重载指定未提供参数时的返回类型", + "typeVarsMissing": "缺少类型变量: {names}", + "typedDictBaseClass": "类“{type}”不是 TypedDict", + "typedDictClassNotAllowed": "不允许对实例或类检查使用 TypedDict 类", + "typedDictClosedExtraNotAllowed": "无法添加项“{name}”", + "typedDictClosedExtraTypeMismatch": "无法添加类型为“{type}”的项“{name}”", + "typedDictClosedFieldNotReadOnly": "无法添加项“{name}”,因为它必须为 ReadOnly", + "typedDictClosedFieldNotRequired": "无法添加项“{name}”,因为它必须是 NotRequired", + "typedDictExtraFieldNotAllowed": "“{type}”中不存在“{name}”", + "typedDictExtraFieldTypeMismatch": "“{name}”的类型与“{type}”中的 “extra_items” 类型不兼容", + "typedDictFieldMissing": "\"{type}\"中缺少\"{name}\"", + "typedDictFieldNotReadOnly": "\"{name}\"在\"{type}\"中不是只读的", + "typedDictFieldNotRequired": "“{type}”中不需要“{name}”", + "typedDictFieldRequired": "\"{type}\"中需要\"{name}\"", + "typedDictFieldTypeMismatch": "类型“{type}”不可分配给项“{name}”", + "typedDictFieldUndefined": "“{name}”是类型“{type}”中的未定义项", + "typedDictKeyAccess": "使用 [\"{name}\"] 引用 TypedDict 中的项", + "typedDictNotAllowed": "不能对实例或类检查使用 TypedDict", + "unhashableType": "类型“{type}”不可哈希", + "uninitializedAbstractVariable": "实例变量“{name}”在抽象基类“{classType}”中定义,但未初始化", + "unreachableExcept": "“{exceptionType}”是“{parentType}”的子类", + "useDictInstead": "使用 dict[T1, T2] 指示字典类型", + "useListInstead": "使用 list[T] 指示 list 类型或使用 T1 | T2 指示 union 类型", + "useTupleInstead": "使用 tuple[T1, ..., Tn] 指示 tuple 类型或使用 T1 | T2 指示 union 类型", + "useTypeInstead": "改用 type[T]", + "varianceMismatchForClass": "类型参数\"{typeVarName}\"的差异与基类\"{className}\"不兼容", + "varianceMismatchForTypeAlias": "类型参数\"{typeVarName}\"的差异与\"{typeAliasParam}\"不兼容" + }, + "Service": { + "longOperation": "枚举工作区源文件需要很长时间。请考虑打开子文件夹。[了解详细信息](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/localization/package.nls.zh-tw.json b/python-parser/packages/pyright-internal/src/localization/package.nls.zh-tw.json new file mode 100644 index 00000000..79db17a3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/localization/package.nls.zh-tw.json @@ -0,0 +1,852 @@ +{ + "CodeAction": { + "createTypeStub": "建立類型 Stub", + "createTypeStubFor": "建立 \"{moduleName}\" 的類型 Stub", + "executingCommand": "執行命令", + "filesToAnalyzeCount": "要分析的 {count} 個檔案", + "filesToAnalyzeOne": "1 個要分析的檔案", + "findingReferences": "尋找參考", + "organizeImports": "整理匯入" + }, + "Completion": { + "autoImportDetail": "自動匯入", + "indexValueDetail": "索引值" + }, + "Diagnostic": { + "abstractMethodInvocation": "無法呼叫方法 \"{method}\",因為它是抽象且未執行", + "annotatedMetadataInconsistent": "標註的中繼資料類型 \"{metadataType}\" 與類型 \"{type}\" 不相容", + "annotatedParamCountMismatch": "參數註釋計數不符: 應為 {expected},但收到 {received}", + "annotatedTypeArgMissing": "預期 \"Annotated\" 有一個類型引數和一或多個註釋", + "annotationBytesString": "類型運算式無法使用位元組字串常值", + "annotationFormatString": "類型運算式不能使用格式字串常值 (f-strings)", + "annotationNotSupported": "此陳述式不支援類型註釋", + "annotationRawString": "類型運算式無法使用原始字串常值", + "annotationSpansStrings": "型別運算式無法跨越多個字串常值", + "annotationStringEscape": "型別運算式不可包含逸出字元", + "annotationTemplateString": "類型運算式無法使用範本字串常值 (t-strings)", + "argAssignment": "類型 \"{argType}\" 的引數不能指派至類型 \"{paramType}\" 的參數", + "argAssignmentFunction": "類型 \"{argType}\" 的引數不能指派至函式 \"{functionName}\" 中類型 \"{paramType}\" 的參數", + "argAssignmentParam": "類型 \"{argType}\" 的引數不能指派至類型 \"{paramType}\" 的參數 \"{paramName}\"", + "argAssignmentParamFunction": "類型 \"{argType}\" 的引數不能指派至函式 \"{functionName}\" 中類型 \"{paramType}\" 的參數 \"{paramName}\"", + "argMissingForParam": "參數 {name} 遺漏引數", + "argMissingForParams": "參數 {names} 的引數遺失", + "argMorePositionalExpectedCount": "預期有另 {expected} 個位置引數", + "argMorePositionalExpectedOne": "預期有另 1 個位置引數", + "argPositional": "預期為位置引數", + "argPositionalExpectedCount": "預期有 {expected} 個位置引數", + "argPositionalExpectedOne": "預期為 1 個位置引數", + "argTypePartiallyUnknown": "引數型別為部分未知", + "argTypeUnknown": "引數類型未知", + "assertAlwaysTrue": "Assert 運算式一律會評估為 true", + "assertTypeArgs": "\"assert_type\" 需要兩個位置引數", + "assertTypeTypeMismatch": "\"assert_type\" 不符: 預期為 \"{expected}\",但收到 \"{received}\"", + "assignmentExprComprehension": "指派運算式目標 \"{name}\" 不能使用與目標 Comprehension 相同的名稱", + "assignmentExprContext": "Assignment 運算式必須在模組、函式或 Lambda 內", + "assignmentExprInSubscript": "下標內的 Assignment 運算式僅在 Python 3.10 和更新版本中支援", + "assignmentInProtocol": "必須在類別主體內明確宣告 Protocol 類別內的執行個體或類別變數", + "assignmentTargetExpr": "運算式不能是指派目標", + "asyncNotInAsyncFunction": "不允許在非 async 之外使用 \"async\"", + "awaitIllegal": "使用 \"await\" 需要 Python 3.5 或更新版本", + "awaitNotAllowed": "類型運算式不能使用 \"await\"", + "awaitNotInAsync": "只在 async 函式內允許 \"await\"", + "backticksIllegal": "Python 3.x 中不支援以反引號括住的運算式; 請改為使用 repr", + "baseClassCircular": "類別無法從本身衍生", + "baseClassFinal": "基底類別 \"{type}\" 標示為 final,且不能設為子類別", + "baseClassIncompatible": "{type} 的基底類別互不相容", + "baseClassInvalid": "類別的引數必須是基底類別", + "baseClassMethodTypeIncompatible": "類別 \"{classType}\" 的基底類別以不相容的方式定義方法 \"{name}\"", + "baseClassUnknown": "基底類別類型未知,遮蔽衍生類別的類型", + "baseClassVariableTypeIncompatible": "類別 \"{classType}\" 的基底類別以不相容的方式定義變數 \"{name}\"", + "binaryOperationNotAllowed": "類型運算式中不允許二元運算子", + "bindParamMissing": "無法繫結方法 \"{methodName}\",因為缺少 \"self\" 或 \"cls\" 參數", + "bindTypeMismatch": "無法繫結方法 \"{methodName}\",因為 \"{type}\" 無法指派給參數 \"{paramName}\"", + "breakInExceptionGroup": "\"except*\" 區塊中不允許 \"break\"", + "breakOutsideLoop": "\"break\" 只能在迴圈內使用", + "bytesUnsupportedEscape": "bytes 常值中不支援的逸出序列", + "callableExtraArgs": "預期 \"Callable\" 只有兩個類型引數", + "callableFirstArg": "預期為參數類型清單或 \"...\"", + "callableNotInstantiable": "不能具現化類型 \"{type}\"", + "callableSecondArg": "預期為傳回類型作為 \"Callable\" 的第二個類型引數", + "casePatternIsIrrefutable": "僅允許最後一個案例陳述式使用無法推翻的模式", + "classAlreadySpecialized": "類型 \"{type}\" 已特殊化", + "classDecoratorTypeUnknown": "不具型別的類別裝飾項目會掩蓋類別的型別; 略過裝飾項目", + "classDefinitionCycle": "\"{name}\" 的類別定義視其本身而定", + "classGetItemClsParam": "__class_getitem__ 覆寫應接受 \"cls\" 參數", + "classMethodClsParam": "類別方法應採用 \"cls\" 參數", + "classNotRuntimeSubscriptable": "類別 \"{name}\" 的下標會產生執行階段例外; 以引號括住類型運算式", + "classPatternBuiltInArgPositional": "類別模式僅接受位置子模式", + "classPatternNewType": "\"{type}\" 無法用於類別模式,因為它是使用 NewType 所定義", + "classPatternPositionalArgCount": "類別 \"{type}\" 的位置模式太多;預期 {expected} 但收到 {received}", + "classPatternTypeAlias": "無法在類別模式中使用 \"{type}\",因為它是特殊的型別別名", + "classPropertyDeprecated": "類別屬性在 Python 3.11 中已取代,在 Python 3.13 中將不受支援", + "classTypeParametersIllegal": "類別類型參數語法需要 Python 3.12 或更新版本", + "classVarFirstArgMissing": "\"ClassVar\" 後面應有一個型別引數", + "classVarNotAllowed": "此內容中不允許 \"ClassVar\"", + "classVarOverridesInstanceVar": "類別變數 \"{name}\" 會覆寫類別 \"{className}\" 中相同名稱的執行個體變數", + "classVarTooManyArgs": "\"ClassVar\" 後面應只能有一個型別引數", + "classVarWithTypeVar": "\"ClassVar\" 型別不能包含型別變數", + "clsSelfParamTypeMismatch": "參數 \"{name}\" 的型別必須是其類別 \"{classType}\" 的超級型別", + "codeTooComplexToAnalyze": "程式碼太複雜而無法分析; 重構為副常式或減少條件式程式碼路徑,以降低複雜性", + "collectionAliasInstantiation": "無法將型別 \"{type}\" 具現化,請改用 \"{alias}\"", + "comparisonAlwaysFalse": "條件一律會評估為 False,因為類型 \"{leftType}\" 和 \"{rightType}\" 沒有重疊", + "comparisonAlwaysTrue": "條件一律會評估為 True,因為類型 \"{leftType}\" 和 \"{rightType}\" 沒有重疊", + "comprehensionInDict": "Comprehension 不能與其他字典項目搭配使用", + "comprehensionInSet": "Comprehension 無法與其他 set 輸入項目一起使用", + "concatenateContext": "此內容中不允許 \"Concatenate\"", + "concatenateParamSpecMissing": "\"Concatenate\" 的最後一個類型引數必須是 ParamSpec 或 \"...\"", + "concatenateTypeArgsMissing": "\"Concatenate\" 至少需要兩個型別引數", + "conditionalOperandInvalid": "型別 \"{type}\" 的條件運算元無效", + "constantRedefinition": "\"{name}\" 是常數 (因為它是大寫) 且無法重新定義", + "constructorParametersMismatch": "類別 \"{classType}\" 中__new__與__init__的簽章不相符", + "containmentAlwaysFalse": "運算式一律會評估為 False,因為型別 \"{leftType}\" 和 \"{rightType}\" 沒有重疊", + "containmentAlwaysTrue": "運算式一律會評估為 True,因為型別 \"{leftType}\" 和 \"{rightType}\" 沒有重疊", + "continueInExceptionGroup": "\"except*\" 區塊中不允許 \"continue\"", + "continueOutsideLoop": "\"continue\" 只能在 loop 內使用", + "coroutineInConditionalExpression": "條件運算式參考協同程式,一律評估為 True", + "dataClassBaseClassFrozen": "未凍結的類別無法繼承已凍結的類別", + "dataClassBaseClassNotFrozen": "凍結的類別不能從未凍結的類別繼承", + "dataClassConverterFunction": "類型 \"{argType}\" 的引數不是類型 \"{fieldType}\" 欄位 \"{fieldName}\" 的有效轉換程式", + "dataClassConverterOverloads": "\"{funcName}\" 沒有任何多載是類型 \"{fieldType}\" 欄位 \"{fieldName}\" 的有效轉換程式", + "dataClassFieldInheritedDefault": "\"{fieldName}\" 覆寫相同名稱的欄位,但缺少預設值", + "dataClassFieldWithDefault": "沒有預設值的欄位無法出現在具有預設值的欄位後面", + "dataClassFieldWithPrivateName": "Dataclass 欄位不能使用私人名稱", + "dataClassFieldWithoutAnnotation": "沒有型別註釋的 Dataclass 欄位會造成執行階段例外狀況", + "dataClassPostInitParamCount": "Dataclass __post_init__ 不正確的參數計數; InitVar 欄位數目為 {expected}", + "dataClassPostInitType": "欄位 \"{fieldName}\" 的 Dataclass __post_init__ 方法參數類型不符", + "dataClassSlotsOverwrite": "__slots__已定義在類別中", + "dataClassTransformExpectedBoolLiteral": "應為靜態評估為 True 或 False 的運算式", + "dataClassTransformFieldSpecifier": "應為類別或函式的 tuple,但收到的類別為 \"{type}\"", + "dataClassTransformPositionalParam": "\"dataclass_transform\" 的所有引數都必須是關鍵字引數", + "dataClassTransformUnknownArgument": "dataclass_transform 不支援引數 \"{name}\"", + "dataProtocolInSubclassCheck": "issubclass 呼叫中不允許資料通訊協定 (包含非方法屬性)", + "declaredReturnTypePartiallyUnknown": "宣告的傳回類型 \"{returnType}\" 部分未知", + "declaredReturnTypeUnknown": "宣告的傳回類型未知", + "defaultValueContainsCall": "參數預設值運算式內不允許函式呼叫和可變物件", + "defaultValueNotAllowed": "具有 \"*\" 或 \"**\" 的參數不能有預設值", + "delTargetExpr": "無法刪除運算式", + "deprecatedClass": "類別 \"{name}\" 已淘汰", + "deprecatedConstructor": "類別 \"{name}\" 的建構函式已取代", + "deprecatedDescriptorDeleter": "描述項 \"{name}\" 的 \"__delete__\" 方法已被取代", + "deprecatedDescriptorGetter": "描述項 \"{name}\" 的 \"__get__\" 方法已被取代", + "deprecatedDescriptorSetter": "描述項 \"{name}\" 的 \"__set__\" 方法已被取代", + "deprecatedFunction": "函式 \"{name}\" 已取代", + "deprecatedMethod": "類別 \"{className}\" 中的方法 \"{name}\" 已取代", + "deprecatedPropertyDeleter": "The deleter for property \"{name}\" is deprecated", + "deprecatedPropertyGetter": "The getter for property \"{name}\" is deprecated", + "deprecatedPropertySetter": "The setter for property \"{name}\" is deprecated", + "deprecatedType": "此類型已隨著 Python {version} 取代; 請改為使用 \"{replacement}\"", + "dictExpandIllegalInComprehension": "Comprehension 中不允許字典擴充", + "dictInAnnotation": "類型運算式中不允許字典運算式", + "dictKeyValuePairs": "字典項目必須包含金鑰/值組", + "dictUnpackIsNotMapping": "預期為字典解壓縮運算子的對應", + "dunderAllSymbolNotPresent": "\"{name}\" 已在 __all__ 中指定,但在模組中不存在", + "duplicateArgsParam": "只允許一個 \"*\" 參數", + "duplicateBaseClass": "不允許重複的基底類別", + "duplicateCapturePatternTarget": "擷取目標 \"{name}\" 不能在相同模式中出現一次以上", + "duplicateCatchAll": "只允許一個 catch-all except 子句", + "duplicateEnumMember": "已宣告 Enum 成員 \"{name}\"", + "duplicateGenericAndProtocolBase": "只允許一個 Generic[...] 或 Protocol[...] 基底類別", + "duplicateImport": "\"{importName}\" 已匯入多次", + "duplicateKeywordOnly": "只允許一個 \"*\" 分隔符號", + "duplicateKwargsParam": "只允許一個 \"**\" 參數", + "duplicateParam": "重複的參數 \"{name}\"", + "duplicatePositionOnly": "僅允許一個 \"/\" 參數", + "duplicateStarPattern": "模式序列中僅允許一個 \"*\" 模式", + "duplicateStarStarPattern": "僅允許輸入一個 \"**\"", + "duplicateUnpack": "list 中僅允許一個解除封裝作業", + "ellipsisAfterUnpacked": "\"...\" 不能與解壓縮的 TypeVarTuple 或 tuple 一起使用", + "ellipsisContext": "此內容中不允許 \"...\"", + "ellipsisSecondArg": "\"...\" 只允許做為兩個引數的第二個", + "enumClassOverride": "Enum 類別 \"{name}\" 為 final,且不能設為子類別", + "enumMemberDelete": "Enum member \"{name}\" cannot be deleted", + "enumMemberSet": "Enum member \"{name}\" cannot be assigned", + "enumMemberTypeAnnotation": "Type annotations are not allowed for enum members", + "exceptGroupMismatch": "Try 陳述式不能同時包含 \"except\" 與 “except*”", + "exceptGroupRequiresType": "例外狀況群組語法 (\"except*\") 需要例外狀況類型", + "exceptRequiresParens": "在 Python 3.14 之前,必須以小括號括住多個例外類型", + "exceptWithAsRequiresParens": "使用 \"as\" 時,必須以小括號括住多個例外類型", + "exceptionGroupIncompatible": "例外群組語法 (\"except*\") 需要 Python 3.11 或更新版本", + "exceptionGroupTypeIncorrect": "except* 中的例外狀況類型不能衍生自 BaseGroupException", + "exceptionTypeIncorrect": "\"{type}\" 不是衍生自 BaseException", + "exceptionTypeNotClass": "\"{type}\" 不是有效的例外類別", + "exceptionTypeNotInstantiable": "例外類型 \"{type}\" 的建構函式需要一或多個引數", + "expectedAfterDecorator": "裝飾項目後面應有函式或類別宣告", + "expectedArrow": "預期為 \"->\",後面接著傳回類型註釋", + "expectedAsAfterException": "例外狀況型別後面應有 \"as\"", + "expectedAssignRightHandExpr": "\"=\" 右側預期為運算式", + "expectedBinaryRightHandExpr": "運算子右側預期為運算式", + "expectedBoolLiteral": "應為 True 或 False", + "expectedCase": "預期為 \"case\" 陳述式", + "expectedClassName": "預期為類別名稱", + "expectedCloseBrace": "\"{\" 未關閉", + "expectedCloseBracket": "\"[\" 未關閉", + "expectedCloseParen": "\"(\" 未關閉", + "expectedColon": "預期為 \":\"", + "expectedComplexNumberLiteral": "模式比對預期為複數常值", + "expectedDecoratorExpr": "Python 3.9 之前的裝飾項目不支援運算式格式", + "expectedDecoratorName": "預期為裝飾項目名稱", + "expectedDecoratorNewline": "預期裝飾項目結尾為換行符號", + "expectedDelExpr": "\"del\" 後預期為運算式", + "expectedElse": "預期為 \"else\"", + "expectedEquals": "預期為 \"=\"", + "expectedExceptionClass": "不正確的例外狀況類別或物件", + "expectedExceptionObj": "預期為例外物件、例外類別或 None", + "expectedExpr": "應為運算式", + "expectedFunctionAfterAsync": "\"async\" 後面應有函式定義", + "expectedFunctionName": "\"def\" 後預期為函式名稱", + "expectedIdentifier": "應為識別碼", + "expectedImport": "預期為 \"import\"", + "expectedImportAlias": "\"as\" 之後預期為符號", + "expectedImportSymbols": "預期 \"import\" 後為一或多個符號名稱", + "expectedIn": "預期為 \"in\"", + "expectedInExpr": "\"in\" 後預期為運算式", + "expectedIndentedBlock": "預期為縮排區塊", + "expectedMemberName": "\".\" 後面應該接著屬性名稱。", + "expectedModuleName": "應為模組名稱", + "expectedNameAfterAs": "\"as\" 後面應有符號名稱", + "expectedNamedParameter": "關鍵字參數必須接著 \"*\"", + "expectedNewline": "預期為換行符號", + "expectedNewlineOrSemicolon": "陳述式必須以換行符號或分號分隔", + "expectedOpenParen": "應為 \"(\"", + "expectedParamName": "預期為參數名稱", + "expectedPatternExpr": "預期為模式運算式", + "expectedPatternSubjectExpr": "應為模式主體運算式", + "expectedPatternValue": "預期為格式 \"a.b\" 的模式值運算式", + "expectedReturnExpr": "\"return\" 後預期為運算式", + "expectedSliceIndex": "應為索引或切片運算式", + "expectedTypeNotString": "預期為類型,但收到字串常值", + "expectedTypeParameterName": "應為型別參數名稱", + "expectedYieldExpr": "yield 陳述式中應有運算式", + "finalClassIsAbstract": "類別 \"{type}\" 標示為 final,且必須實作所有抽象符號", + "finalContext": "此內容中不允許 \"Final\"", + "finalInLoop": "無法在迴圈內指派 \"Final\" 變數", + "finalMethodOverride": "方法 \"{name}\" 不能覆寫類別 \"{className}\" 中定義的 final 方法", + "finalNonMethod": "無法將函式 \"{name}\" 標示為 @final,因為它不是方法", + "finalReassigned": "\"{name}\" 已宣告為 Final,因此無法重新指派", + "finalRedeclaration": "\"{name}\" 先前已宣告為 Final", + "finalRedeclarationBySubclass": "不能重新宣告 \"{name}\",因為父類別 \"{className}\" 將其宣告為 Final", + "finalTooManyArgs": "預期 \"Final\" 之後為單一類型引數", + "finalUnassigned": "\"{name}\" 宣告為 Final,但未指派值", + "finallyBreak": "\"break\" 不能用來結束 “finally” 區塊", + "finallyContinue": "\"continue\" 不能用來結束 “finally” 區塊", + "finallyReturn": "\"return\" 不能用來結束 “finally” 區塊", + "formatStringBrace": "F 字串常值內不允許單一右大括弧; 請使用雙右大括弧", + "formatStringBytes": "格式字串常值 (f-strings) 不可為二進位", + "formatStringDebuggingIllegal": "F 字串偵錯指定名稱 \"=\" 需要 Python 3.8 或較新的版本", + "formatStringEscape": "Python 3.12 之前的 f 字串運算式部分不允許逸出序列 (反斜線)", + "formatStringExpectedConversion": "預期 f-string 中的 \"!\" 後為轉換指定元", + "formatStringIllegal": "格式字串常值 (f-strings) 需要 Python 3.6 或較新的版本", + "formatStringInPattern": "模式中不允許格式字串", + "formatStringNestedFormatSpecifier": "運算式在格式字串指定元內巢狀太深", + "formatStringNestedQuote": "f 字串內的巢狀字串無法使用與 Python 3.12 之前的 f 字串相同的引號字元", + "formatStringTemplate": "格式字串常值 (f-strings) 不能也是範本字串 (t-strings)", + "formatStringUnicode": "格式字串常值 (f-strings) 不能是 Unicode", + "formatStringUnterminated": "f 字串中有未結束的運算式; 應有 \"}\"", + "functionDecoratorTypeUnknown": "非類型化函式修飾項目會遮蔽函式的類型; 忽略裝飾項目", + "functionInConditionalExpression": "條件運算式參考函式,一律評估為 True", + "functionTypeParametersIllegal": "函式型別參數語法需要 Python 3.12 或較新的版本", + "futureImportLocationNotAllowed": "來自 __future__ 的匯入必須位於檔案的開頭", + "generatorAsyncReturnType": "Return type of async generator function must be compatible with \"AsyncGenerator[{yieldType}, Any]\"", + "generatorNotParenthesized": "如果不是唯一引數,則必須將產生器運算式用括弧括住", + "generatorSyncReturnType": "產生器函式的傳回類型必須與 \"Generator[{yieldType}, Any, Any]\" 相容", + "genericBaseClassNotAllowed": "\"Generic\" 基底類別不能與型別參數語法一起使用", + "genericClassAssigned": "不能指派一般類別類型", + "genericClassDeleted": "無法刪除一般類別型別", + "genericInstanceVariableAccess": "透過類別存取泛型執行個體變數不明確", + "genericNotAllowed": "\"Generic\" 在此內容中無效", + "genericTypeAliasBoundTypeVar": "類別內的一般類型別名不能使用繫結類型變數 {names}", + "genericTypeArgMissing": "\"Generic\" 至少需要一個類型引數", + "genericTypeArgTypeVar": "\"Generic\" 的類型引數必須是類型變數", + "genericTypeArgUnique": "\"Generic\" 的類型引數必須是唯一的", + "globalReassignment": "\"{name}\" 在 global 宣告之前指派", + "globalRedefinition": "\"{name}\" 已宣告為 global", + "implicitStringConcat": "不允許隱含字串串連", + "importCycleDetected": "在匯入鏈結中偵測到迴圈", + "importDepthExceeded": "匯入鏈結深度超過 {depth}", + "importResolveFailure": "無法解析匯入 \"{importName}\"", + "importSourceResolveFailure": "無法從來源解析匯入 \"{importName}\"", + "importSymbolUnknown": "\"{name}\" 是未知的匯入符號", + "incompatibleMethodOverride": "方法 \"{name}\" 會以不相容的方式覆寫類別 \"{className}\"", + "inconsistentIndent": "取消縮排量與先前縮排不符", + "inconsistentTabs": "在縮排中使用 Tab 字元和空格不一致", + "initMethodSelfParamTypeVar": "\"__init__\" 方法之 \"self\" 參數的類型註釋不得包含類別範圍的類型變數", + "initMustReturnNone": "\"__init__\" 的傳回類型必須為 None", + "initSubclassCallFailed": "__init_subclass__ 方法不正確的關鍵字引數", + "initSubclassClsParam": "__init_subclass__ 覆寫應接受 \"cls\" 參數", + "initVarNotAllowed": "此內容中不允許 \"InitVar\"", + "instanceMethodSelfParam": "執行個體方法應該採用 \"self\" 參數", + "instanceVarOverridesClassVar": "執行個體變數 \"{name}\" 覆寫類別 \"{className}\" 中相同名稱的類別變數", + "instantiateAbstract": "無法將抽象類別 \"{type}\" 具現化", + "instantiateProtocol": "無法將 Protocol 類別 \"{type}\" 具現化", + "internalBindError": "繫結檔案 \"{file}\" 時發生內部錯誤: {message}", + "internalParseError": "剖析檔案 \"{file}\" 時發生內部錯誤: {message}", + "internalTypeCheckingError": "類型檢查檔案 \"{file}\" 時發生內部錯誤: {message}", + "invalidIdentifierChar": "識別碼中的字元無效", + "invalidStubStatement": "陳述式在類型 stub 檔案內沒有意義", + "invalidTokenChars": "權杖中的字元 \"{text}\" 無效", + "isInstanceInvalidType": "\"isinstance\" 的第二個引數必須是類別或類別的tuple", + "isSubclassInvalidType": "\"issubclass\" 的第二個引數必須是類別的類別或 tuple", + "keyValueInSet": "set 內不允許金鑰/值組", + "keywordArgInTypeArgument": "關鍵字引數無法用於型別引數清單", + "keywordOnlyAfterArgs": "\"*\" 參數之後不允許僅限關鍵字的引數分隔符號", + "keywordParameterMissing": "一或多個關鍵字參數必須接在 \"*\" 參數後面", + "keywordSubscriptIllegal": "不支援下標內的關鍵字引數", + "lambdaReturnTypePartiallyUnknown": "Lambda 的傳回類型 \"{returnType}\" 部分未知", + "lambdaReturnTypeUnknown": "Lambda 的傳回類型未知", + "listAssignmentMismatch": "類型 \"{type}\" 的運算式不能指派至目標清單", + "listInAnnotation": "型別運算式中不允許 List 運算式", + "literalEmptyArgs": "\"Literal\" 後面應有一或多個型別引數", + "literalNamedUnicodeEscape": "\"Literal\" 字串常值中不支援具名 Unicode 逸出序列", + "literalNotAllowed": "沒有類型參數,\"Literal\" 不能在此內容中使用", + "literalNotCallable": "Literal 類型不能具現化", + "literalUnsupportedType": "\"Literal\" 的類型引數必須是 None、literal (int、bool、str 或 bytes) 或 enum 值", + "matchIncompatible": "Match 陳述式需要 Python 3.10 或更新版本", + "matchIsNotExhaustive": "match 陳述式內的案例並未完整處理所有值", + "maxParseDepthExceeded": "超過剖析深度上限; 將運算式分成較小的子運算式", + "memberAccess": "無法存取類別 \"{type}\" 的屬性 \"{name}\"", + "memberDelete": "無法刪除類別 \"{type}\" 的屬性 \"{name}\"", + "memberSet": "無法指派至類別 \"{type}\" 的屬性 \"{name}\"", + "metaclassConflict": "衍生類別的變換類型必須是其所有基底類別的變換類型的子類別", + "metaclassDuplicate": "只能提供一個變換類型", + "metaclassIsGeneric": "變換類型不能是一般", + "methodNotDefined": "\"{name}\" 方法未定義", + "methodNotDefinedOnType": "類型 \"{type}\" 上未定義 \"{name}\" 方法", + "methodOrdering": "不能建立一致的方法順序", + "methodOverridden": "\"{name}\" 以不相容型別 \"{type}\" 覆寫類別 \"{className}\" 中具有相同名稱的方法", + "methodReturnsNonObject": "\"{name}\" 方法未傳回物件", + "missingSuperCall": "方法 \"{methodName}\" 未呼叫父類別中相同名稱的方法", + "mixingBytesAndStr": "無法串連 Bytes 和 str 值", + "moduleAsType": "模組不能當作型別來使用", + "moduleNotCallable": "模組無法呼叫", + "moduleUnknownMember": "\"{memberName}\" 不是模組 \"{moduleName}\" 的已知屬性", + "namedExceptAfterCatchAll": "catch-all except 子句後面不能出現具名 except 子句", + "namedParamAfterParamSpecArgs": "關鍵字參數 \"{name}\" 不能在簽章中出現在 ParamSpec args 參數之後", + "namedTupleEmptyName": "具名 tuple 內的名稱不可為空白", + "namedTupleEntryRedeclared": "無法覆寫 \"{name}\",因為父代類別 \"{className}\" 是具名的 tuple", + "namedTupleFieldUnderscore": "Named tuple 欄位名稱不能以底線開頭", + "namedTupleFirstArg": "預期為具名 tuple 類別名稱作為第一個引數", + "namedTupleMultipleInheritance": "不支援使用 NamedTuple 的多重繼承", + "namedTupleNameKeyword": "欄位名稱不能是關鍵字", + "namedTupleNameType": "指定輸入項目名稱和類型預期有兩個輸入項目 tuple", + "namedTupleNameUnique": "具名 tuple 內的名稱必須是唯一的", + "namedTupleNoTypes": "\"namedtuple\" 未提供 tuple 項目的類型; 請改為使用 \"NamedTuple\"", + "namedTupleSecondArg": "預期為具名 tuple 項目 list 作為第二個引數", + "newClsParam": "__new__ 覆寫應接受 \"cls\" 參數", + "newTypeAnyOrUnknown": "NewType 的第二個引數必須是已知的類別,不能是 Any 或 Unknown", + "newTypeBadName": "NewType 的第一個引數必須是字串常值", + "newTypeLiteral": "NewType 不能與 Literal 類型搭配使用", + "newTypeNameMismatch": "NewType 必須指派給名稱相同的變數", + "newTypeNotAClass": "預期類別為 NewType 的第二個引數", + "newTypeParamCount": "NewType 需要兩個位置引數", + "newTypeProtocolClass": "NewType 無法與結構類型 (Protocol 或 TypedDict 類別) 搭配使用", + "noOverload": "\"{name}\" 沒有任何多載符合提供的引數", + "noReturnContainsReturn": "宣告 return 類型為 \"NoReturn\" 的函式不能包含 return 陳述式", + "noReturnContainsYield": "宣告傳回類型為 \"NoReturn\" 的函式不能包含 yield 陳述式", + "noReturnReturnsNone": "宣告類型為 \"NoReturn\" 的函式不能傳回 \"None\"", + "nonDefaultAfterDefault": "非預設引數遵循預設引數", + "nonLocalInModule": "模組層級不允許 nonlocal 宣告", + "nonLocalNoBinding": "找不到 nonlocal \"{name}\" 的繫結", + "nonLocalReassignment": "\"{name}\" 在 nonlocal 宣告之前指派", + "nonLocalRedefinition": "\"{name}\" 已宣告為 nonlocal", + "noneNotCallable": "無法呼叫型別 \"None\" 的物件", + "noneNotIterable": "類型 \"None\" 的物件不能作為可疊代的值", + "noneNotSubscriptable": "型別 \"None\" 的物件不能下標", + "noneNotUsableWith": "Object of type \"None\" cannot be used with \"with\"", + "noneNotUsableWithAsync": "類型 \"None\" 的物件不能與 \"async with\" 搭配使用", + "noneOperator": "\"None\" 不支援運算子 \"{operator}\"", + "noneUnknownMember": "\"{name}\" 不是 \"None\" 的已知屬性", + "nonlocalTypeParam": "類型參數 \"{name}\" 不允許使用非 Nonlocal 繫結", + "notRequiredArgCount": "預期 \"NotRequired\" 之後為單一類型引數", + "notRequiredNotInTypedDict": "此內容中不允許 \"NotRequired\"", + "objectNotCallable": "類型 \"{type}\" 的物件無法呼叫", + "obscuredClassDeclaration": "類別宣告 \"{name}\" 已被相同名稱的宣告遮蔽", + "obscuredFunctionDeclaration": "函式宣告 \"{name}\" 被相同名稱的宣告遮蔽", + "obscuredMethodDeclaration": "方法宣告 \"{name}\" 被相同名稱的宣告遮蔽", + "obscuredParameterDeclaration": "參數宣告 \"{name}\" 被相同名稱的宣告遮蔽", + "obscuredTypeAliasDeclaration": "類型別名宣告 \"{name}\" 被相同名稱的宣告遮蔽", + "obscuredVariableDeclaration": "宣告 \"{name}\" 被相同名稱的宣告遮蔽", + "operatorLessOrGreaterDeprecated": "Python 3 中不支援運算子 \"<>\"; 請改為使用 \"!=\"", + "optionalExtraArgs": "\"Optional\" 後面應有一個型別引數", + "orPatternIrrefutable": "無法推翻的模式僅允許作為 \"or\" 模式中的最後一個子模式", + "orPatternMissingName": "\"or\" 模式內的所有子模式都必須以相同的名稱為目標", + "overlappingKeywordArgs": "輸入的字典與關鍵字參數重疊:: {names}", + "overlappingOverload": "\"{name}\" 的多載 {obscured} 將永遠不會使用,因為它的參數與多載 {obscuredBy} 重疊", + "overloadAbstractImplMismatch": "多載必須符合實作的抽象狀態", + "overloadAbstractMismatch": "多載必須全為抽象或不抽象", + "overloadClassMethodInconsistent": "\"{name}\" 的多載不一致地使用 @classmethod", + "overloadFinalImpl": "@final 裝飾應該只套用到實作", + "overloadFinalNoImpl": "只應將第一個多載標示為 @final", + "overloadImplementationMismatch": "多載的實作與多載 {index} 的簽章不一致", + "overloadOverrideImpl": "@override 裝飾應該只套用到實作", + "overloadOverrideNoImpl": "只應將第一個多載標示為 @override", + "overloadReturnTypeMismatch": "\"{name}\" 的多載 {prevIndex} 與多載 {newIndex} 重疊,並傳回不相容的類型", + "overloadStaticMethodInconsistent": "\"{name}\" 的多載不一致地使用 @staticmethod", + "overloadWithoutImplementation": "\"{name}\" 標示為 overload,但未提供實作", + "overriddenMethodNotFound": "方法 \"{name}\" 已標示為 override,但不存在相同名稱的基底方法", + "overrideDecoratorMissing": "方法 \"{name}\" 未標示為 override,但正在覆寫類別 \"{className}\" 中的方法", + "paramAfterKwargsParam": "參數無法接在 \"**\" 參數後面", + "paramAlreadyAssigned": "已指派參數 \"{name}\"", + "paramAnnotationMissing": "參數 \"{name}\" 遺漏了型別註釋", + "paramAssignmentMismatch": "無法將型別 \"{sourceType}\" 的運算式指派給型別 \"{paramType}\" 的參數", + "paramNameMissing": "沒有名為 \"{name}\" 的參數", + "paramSpecArgsKwargsDuplicate": "ParamSpec \"{type}\" 的引數已提供", + "paramSpecArgsKwargsUsage": "ParamSpec 的 \"args\" 和 \"kwargs\" 屬性都必須出現在函式簽章內", + "paramSpecArgsMissing": "ParamSpec \"{type}\" 的引數遺失", + "paramSpecArgsUsage": "只有搭配 *args 參數使用時,ParamSpec 的 \"args\" 屬性才有效", + "paramSpecAssignedName": "ParamSpec 必須指派至名為 \"{name}\" 的變數", + "paramSpecContext": "此內容中不允許 ParamSpec", + "paramSpecDefaultNotTuple": "ParamSpec 的預設值必須是省略符號、tuple 運算式或 ParamSpec", + "paramSpecFirstArg": "應以 ParamSpec 的名稱作為第一個引數", + "paramSpecKwargsUsage": "只有搭配 **kwargs 參數使用時,ParamSpec 的 \"kwargs\" 屬性才有效", + "paramSpecNotUsedByOuterScope": "ParamSpec \"{name}\" 在此內容中沒有意義", + "paramSpecUnknownArg": "ParamSpec 不支援一個以上的引數", + "paramSpecUnknownMember": "\"{name}\" 不是 ParamSpec 的已知屬性", + "paramSpecUnknownParam": "\"{name}\" 對 ParamSpec 是未知的參數", + "paramTypeCovariant": "不能在參數類型中使用共變數類型變數", + "paramTypePartiallyUnknown": "參數 \"{paramName}\" 的類型部分未知", + "paramTypeUnknown": "參數 \"{paramName}\" 的類型未知", + "parenthesizedContextManagerIllegal": "Parentheses within \"with\" statement requires Python 3.9 or newer", + "patternNeverMatches": "模式永遠不會符合主體類型 \"{type}\"", + "positionArgAfterNamedArg": "位置引數不能出現在關鍵字引數之後", + "positionArgAfterUnpackedDictArg": "位置引數不能在關鍵字引數解壓縮後出現", + "positionOnlyAfterArgs": "\"*\" 參數之後不允許 Position-only 參數分隔符號", + "positionOnlyAfterKeywordOnly": "\"/\" 參數必須出現在 \"*\" 參數之前", + "positionOnlyAfterNon": "非 Position-only 參數之後不允許 Position-only 參數", + "positionOnlyFirstParam": "不允許 Position-only 參數分隔符號作為第一個參數", + "positionOnlyIncompatible": "Position-only 參數分隔符號需要 Python 3.8 或更新版本", + "privateImportFromPyTypedModule": "\"{name}\" 未從模組 \"{module}\" 匯出", + "privateUsedOutsideOfClass": "\"{name}\" 為私人,並用於宣告其所在的類別之外", + "privateUsedOutsideOfModule": "\"{name}\" 為私人,並用於宣告其所在的模組之外", + "propertyOverridden": "\"{name}\" 不正確地覆寫了類別 \"{className}\" 中相同名稱的 property", + "propertyStaticMethod": "Static methods not allowed for property getter, setter or deleter", + "protectedUsedOutsideOfClass": "\"{name}\" 受到保護,並用於其宣告所在的類別之外", + "protocolBaseClass": "Protocol 類別 \"{classType}\" 不能衍生自非 Protocol 類別 \"{baseType}\"", + "protocolBaseClassWithTypeArgs": "使用型別參數語法時,Protocol 類別不允許使用型別引數", + "protocolIllegal": "使用 \"Protocol\" 需要 Python 3.7 或更新版本", + "protocolNotAllowed": "\"Protocol\" 不能用在此內容中", + "protocolTypeArgMustBeTypeParam": "“Protocol” 的型別引數必須是型別參數", + "protocolUnsafeOverlap": "類別以不安全方式重疊 \"{name}\",且可能會在運行時間產生相符專案", + "protocolVarianceContravariant": "一般 Protocol \"{class}\" 中使用的類別變數 \"{variable}\" 必須為逆變數", + "protocolVarianceCovariant": "一般 Protocol \"{class}\" 中使用的類型變數 \"{variable}\" 必須為共變數", + "protocolVarianceInvariant": "一般 Protocol \"{class}\" 中使用的類別變數 \"{variable}\" 必須為不變數", + "pyrightCommentInvalidDiagnosticBoolValue": "Pyright 註解指示詞後面必須接著 \"=\",且值為 true 或 false", + "pyrightCommentInvalidDiagnosticSeverityValue": "Pyright 註解指示詞後面必須接著 \"=\",且值為 true、false、error、warning、information 或 none", + "pyrightCommentMissingDirective": "Pyright 註解後面必須接著指示詞 (basic 或 strict) 或診斷規則", + "pyrightCommentNotOnOwnLine": "用來控制檔案層級設定的 Pyright 註解必須出現在自己的行上", + "pyrightCommentUnknownDiagnosticRule": "\"{rule}\" 是 pyright 註解未知的診斷規則", + "pyrightCommentUnknownDiagnosticSeverityValue": "\"{value}\" 是 pyright 註解無效的值; 預期為 true、false、error、warning、information 或 none", + "pyrightCommentUnknownDirective": "\"{directive}\" 是 pyright 註解未知的指示詞; 預期為 \"strict\"、\"standard\" 或 \"basic\"", + "readOnlyArgCount": "\"ReadOnly\" 後面應有單一型別引數", + "readOnlyNotInTypedDict": "此內容中不允許 \"ReadOnly\"", + "recursiveDefinition": "無法判斷 \"{name}\" 型別,因為它參照了自己", + "relativeImportNotAllowed": "相對匯入不能與 \"import .a\" 格式搭配使用; 請改為使用 \"from . import a\"", + "requiredArgCount": "\"Required\" 後面應有單一型別引數", + "requiredNotInTypedDict": "此內容中不允許 \"Required\"", + "returnInAsyncGenerator": "Return statement with value is not allowed in async generator", + "returnInExceptionGroup": "\"except*\" 區塊中不允許 \"return\"", + "returnMissing": "宣告類型為 \"{returnType}\" 的函式必須在所有程式碼路徑上傳回值", + "returnOutsideFunction": "\"return\" 只能在函式內使用", + "returnTypeContravariant": "逆變數型別變數不能用在傳回型別中", + "returnTypeMismatch": "型別 \"{exprType}\" 無法指派給傳回型別 \"{returnType}\"", + "returnTypePartiallyUnknown": "傳回類型 \"{returnType}\" 部分未知", + "returnTypeUnknown": "傳回類型未知", + "revealLocalsArgs": "\"reveal_locals\" 呼叫不應有任何引數", + "revealLocalsNone": "此範圍中沒有任何 locals", + "revealTypeArgs": "\"reveal_type\" 呼叫預期為單一位置引數", + "revealTypeExpectedTextArg": "函式 \"reveal_type\" 的 \"expected_text\" 引數必須是 str 常值", + "revealTypeExpectedTextMismatch": "類型文字不符; 預期為 \"{expected}\",但收到 \"{received}\"", + "revealTypeExpectedTypeMismatch": "型別不符; 應為 \"{expected}\",但收到 \"{received}\"", + "selfTypeContext": "\"Self\" 在此內容中無效", + "selfTypeMetaclass": "“Self” 不能用於 Metaclass 内 (“type” 的子類別)", + "selfTypeWithTypedSelfOrCls": "\"Self\" 不能用在具有 `self` 或 `cls` 參數的函式中,其類型註釋不是 \"Self\"", + "sentinelBadName": "Sentinel 的第一個引數必須是字串常值", + "sentinelNameMismatch": "Sentinel 必須指派給名稱相同的變數", + "sentinelParamCount": "Sentinel 需要一個位置引數", + "setterGetterTypeMismatch": "Property setter 數值類型不能指派至 getter 傳回類型", + "singleOverload": "\"{name}\" 標示為多載,但遺失其他多載", + "slotsAttributeError": "未在__slots__中指定 \"{name}\"", + "slotsClassVarConflict": "\"{name}\" 與在 __slots__ 中宣告的執行個體變數衝突", + "starPatternInAsPattern": "星形模式不能與 \"as\" 目標搭配使用", + "starPatternInOrPattern": "星形模式不能在其他模式內使用 OR", + "starStarWildcardNotAllowed": "** 不能與萬用字元 \"_\" 搭配使用", + "staticClsSelfParam": "靜態方法不應採用 \"self\" 或 \"cls\" 參數", + "stringNonAsciiBytes": "位元組字串常值中不允許非 ASCII 字元", + "stringNotSubscriptable": "字串運算式不能在類型運算式中下標; 以引號括住整個運算式", + "stringUnsupportedEscape": "字串常值中不支援的逸出序列", + "stringUnterminated": "字串常值未結束", + "stubFileMissing": "找不到 \"{importName}\" 的 stub 檔案", + "stubUsesGetAttr": "類型 stub 檔案不完整; \"__getattr__\" 會遮蔽模組的類型錯誤", + "sublistParamsIncompatible": "Python 3.x 不支援 sublist 參數", + "superCallArgCount": "\"super\" 呼叫不應有兩個以上的引數", + "superCallFirstArg": "預期的類別類型為 \"super\" 呼叫的第一個引數,但收到 \"{type}\"", + "superCallSecondArg": "\"super\" 呼叫的第二個引數必須是衍生自 \"{type}\" 的物件或類別", + "superCallZeroArgForm": "\"super\" 呼叫的零引數形式只在方法內有效", + "superCallZeroArgFormStaticMethod": "\"super\" 呼叫的零引數形式在靜態方法內無效", + "symbolIsPossiblyUnbound": "\"{name}\" 可能未繫結", + "symbolIsUnbound": "\"{name}\" 未繫結", + "symbolIsUndefined": "\"{name}\" 未定義", + "symbolOverridden": "\"{name}\" 會覆寫類別 \"{className}\" 中相同名稱的符號", + "templateStringBytes": "範本字串常值 (t-strings) 不能是二進位", + "templateStringIllegal": "範本字串常值 (t-strings) 需要 Python 3.14 或較新的版本", + "templateStringUnicode": "範本字串常值 (t-strings) 不能是 Unicode", + "ternaryNotAllowed": "類型運算式中不允許三元運算式", + "totalOrderingMissingMethod": "類別必須定義 \"__lt__\"、\"__le__\"、\"__gt__\" 或 \"__ge__\" 其中一個,才能使用 total_ordering", + "trailingCommaInFromImport": "後置逗號不允許未使用括弧", + "tryWithoutExcept": "Try 陳述式必須至少有一個 except 或 finally 子句", + "tupleAssignmentMismatch": "無法將型別 \"{type}\" 的運算式指派至目標 tuple", + "tupleInAnnotation": "型別運算式中不允許 Tuple 運算式", + "tupleIndexOutOfRange": "索引 {index} 超過類型 {type} 的範圍", + "typeAliasIllegalExpressionForm": "類型別名定義無效的運算式格式", + "typeAliasIsRecursiveDirect": "型別別名 \"{name}\" 無法在其定義中使用它自己", + "typeAliasNotInModuleOrClass": "TypeAlias 只能在模組或類別範圍內定義", + "typeAliasRedeclared": "\"{name}\" 宣告為 TypeAlias,且只能指派一次", + "typeAliasStatementBadScope": "A type statement can be used only within a module or class scope", + "typeAliasStatementIllegal": "類型別名陳述式需要 Python 3.12 或更新版本", + "typeAliasTypeBadScope": "類型別名只能在模組或類別範圍內定義", + "typeAliasTypeBaseClass": "\"type\" 陳述式中定義的類型別名不能做為基底類別使用", + "typeAliasTypeMustBeAssigned": "TypeAliasType 必須指派給與型別別名相同的變數", + "typeAliasTypeNameArg": "TypeAliasType 的第一個引數必須是代表型別別名名稱的字串常值", + "typeAliasTypeNameMismatch": "類型別名的名稱必須與指派它的變數名稱相符", + "typeAliasTypeParamInvalid": "型別參數清單必須是只包含 TypeVar、TypeVarTuple 或 ParamSpec 的 tuple", + "typeAnnotationCall": "型別運算式中不允許呼叫運算式", + "typeAnnotationVariable": "型別運算式中不允許變數", + "typeAnnotationWithCallable": "\"type\" 的類型引數必須是類別; 不支援可呼叫項目", + "typeArgListExpected": "預期為 ParamSpec、省略符號或類型 list", + "typeArgListNotAllowed": "此型別引數不允許 list 運算式", + "typeArgsExpectingNone": "預期類別 \"{name}\" 沒有類型引數", + "typeArgsMismatchOne": "預期為一個類型引數,但收到 {received}", + "typeArgsMissingForAlias": "預期為一般類型別名 \"{name}\" 的類型引數", + "typeArgsMissingForClass": "應為一般類別 \"{name}\" 的型別引數", + "typeArgsTooFew": "為 \"{name}\" 提供太少類型引數; 預期為 {expected} 但收到 {received}", + "typeArgsTooMany": "已為 \"{name}\" 提供太多型別引數; 應為 {expected} 但收到 {received}", + "typeAssignmentMismatch": "型別 \"{sourceType}\" 無法指派給宣告的型別 \"{destType}\"", + "typeAssignmentMismatchWildcard": "匯入符號 \"{name}\" 具有型別 \"{sourceType}\",該型別無法指派給宣告的型別 \"{destType}\"", + "typeCallNotAllowed": "不應在類型運算式中使用 type() 呼叫", + "typeCheckOnly": "\"{name}\" 已標示為 @type_check_only,只能在型別註釋中使用", + "typeCommentDeprecated": "使用 type 註解已取代; 請改為使用 type 註釋", + "typeExpectedClass": "預期的類別,但已收到 \"{type}\"", + "typeFormArgs": "\"TypeForm\" 接受單一位置引數", + "typeGuardArgCount": "預期 \"TypeGuard\" 或 \"TypeIs\" 之後為單一類型引數", + "typeGuardParamCount": "使用者定義的類型防護函式和方法至少必須有一個輸入參數", + "typeIsReturnType": "TypeIs 的傳回類型 (\"{returnType}\") 與值參數類型 (\"{type}\") 不一致", + "typeNotAwaitable": "\"{type}\" 不可 awaitable", + "typeNotIntantiable": "\"{type}\" 不能具現化", + "typeNotIterable": "\"{type}\" 無法疊代", + "typeNotSpecializable": "無法將型別 \"{type}\" 特殊化", + "typeNotSubscriptable": "類型 \"{type}\" 的物件不能下標", + "typeNotSupportBinaryOperator": "型別 \"{leftType}\" 和 \"{rightType}\" 不支援運算子 \"{operator}\"", + "typeNotSupportBinaryOperatorBidirectional": "當預期的型別為 \"{expectedType}\" 時,型別 \"{leftType}\" 和 \"{rightType}\" 不支援運算子 \"{operator}\"", + "typeNotSupportUnaryOperator": "型別 \"{type}\" 不支援運算子 \"{operator}\"", + "typeNotSupportUnaryOperatorBidirectional": "預期的類型為 \"{expectedType}\" 時,類型 \"{type}\" 不支援運算子 \"{operator}\"", + "typeNotUsableWith": "類型 \"{type}\" 的物件不能與 \"with\" 搭配使用,因為它未正確實作 {method}", + "typeNotUsableWithAsync": "類型 \"{type}\" 的物件不能與 \"async with\" 搭配使用,因為它未正確實作 {method}", + "typeParameterBoundNotAllowed": "界限或條件約束不能與 variadic 型別參數或 ParamSpec 一起使用", + "typeParameterConstraintTuple": "類型參數限制式必須是兩個或兩個以上類型的 Tuple", + "typeParameterExistingTypeParameter": "類型參數 \"{name}\" 已在使用中", + "typeParameterNotDeclared": "類型參數 \"{name}\" 未包含在 \"{container}\" 的類型參數清單中", + "typeParametersMissing": "至少必須指定一個類型參數", + "typePartiallyUnknown": "\"{name}\" 的類型部分未知", + "typeUnknown": "\"{name}\" 的類型未知", + "typeVarAssignedName": "TypeVar 必須指派至名為 \"{name}\" 的變數", + "typeVarAssignmentMismatch": "無法將型別 \"{type}\" 指派給型別變數 \"{name}\"", + "typeVarBoundAndConstrained": "TypeVar 不能同時繫結和限制", + "typeVarBoundGeneric": "TypeVar 繫結類型不能是一般", + "typeVarConstraintGeneric": "TypeVar 限制式類型不能是一般", + "typeVarDefaultBoundMismatch": "TypeVar 預設型別必須是繫結型別的子型別", + "typeVarDefaultConstraintMismatch": "TypeVar 預設型別必須是其中一個限制型別", + "typeVarDefaultIllegal": "型別變數預設型別需要 Python 3.13 或較新的版本", + "typeVarDefaultInvalidTypeVar": "類型參數 \"{name}\" 的預設類型參考不在範圍的一或多個類型變數", + "typeVarFirstArg": "預期為 TypeVar 的名稱作為第一個引數", + "typeVarInvalidForMemberVariable": "屬性類型不能使用範圍為 local 方法的類型變數 \"{name}\"", + "typeVarNoMember": "TypeVar \"{type}\" 沒有屬性 \"{name}\"", + "typeVarNotSubscriptable": "TypeVar \"{type}\" 不能下標", + "typeVarNotUsedByOuterScope": "類型變數 \"{name}\" 在此內容中沒有意義", + "typeVarPossiblyUnsolvable": "如果呼叫者未提供參數 \"{param}\" 的引數,則型別變數 \"{name}\" 可能無法解析", + "typeVarSingleConstraint": "TypeVar 至少必須有兩個限制類型", + "typeVarTupleConstraints": "TypeVarTuple 不能有值條件約束", + "typeVarTupleContext": "此內容中不允許 TypeVarTuple", + "typeVarTupleDefaultNotUnpacked": "TypeVarTuple 預設型別必須是未封裝的 tuple 或 TypeVarTuple", + "typeVarTupleMustBeUnpacked": "TypeVarTuple 值需要解除封裝運算子", + "typeVarTupleUnknownParam": "\"{name}\" 是 TypeVarTuple 的未知參數", + "typeVarUnknownParam": "\"{name}\" 對 TypeVar 是未知的參數", + "typeVarUsedByOuterScope": "外部領域已在使用 TypeVar \"{name}\"", + "typeVarUsedOnlyOnce": "TypeVar \"{name}\" 只會在一般函式簽章中出現一次", + "typeVarVariance": "TypeVar 不能同時為共變數和逆變數", + "typeVarWithDefaultFollowsVariadic": "TypeVar \"{typeVarName}\" 具有預設值,無法遵循 TypeVarTuple \"{variadicName}\"", + "typeVarWithoutDefault": "\"{name}\" 無法出現在型別參數清單中的 \"{other}\" 後面,因為它沒有預設型別", + "typeVarsNotInGenericOrProtocol": "Generic[] 或 Protocol[] 必須包含所有類型變數", + "typedDictAccess": "無法存取 TypedDict 中的項目", + "typedDictAssignedName": "TypedDict 必須指派至名為 \"{name}\" 的變數", + "typedDictBadVar": "TypedDict 類別只能包含型別註釋", + "typedDictBaseClass": "TypedDict 類別的所有基底類別也必須是 TypedDict 類別", + "typedDictBoolParam": "預期 \"{name}\" 參數的值為 True 或 False", + "typedDictClosedExtras": "基類 \"{name}\" 是限制額外專案類型的 TypedDict \"{type}\"", + "typedDictClosedFalseNonOpenBase": "基底類別 \"{name}\" 不是開頭的 TypedDict; 不允許 closed=False", + "typedDictClosedNoExtras": "基底類別 \"{name}\" 是 closed 的 TypedDict; 不允許額外項目", + "typedDictDelete": "無法刪除 TypedDict 中的項目", + "typedDictEmptyName": "TypedDict 內的名稱不可為空白", + "typedDictEntryName": "字典項目名稱預期為字串常值", + "typedDictEntryUnique": "字典內的名稱必須是唯一的", + "typedDictExtraArgs": "不支援額外的 TypedDict 引數", + "typedDictExtraItemsClosed": "TypedDict 可以使用 \"closed\" 或 \"extra_items\",但不能同時使用兩者", + "typedDictFieldNotRequiredRedefinition": "TypedDict 項目 \"{name}\" 不能重新定義為 NotRequired", + "typedDictFieldReadOnlyRedefinition": "TypedDict 項目 \"{name}\" 不能重新定義為 ReadOnly", + "typedDictFieldRequiredRedefinition": "TypedDict 項目 \"{name}\" 不能重新定義為 Required", + "typedDictFirstArg": "預期 TypedDict 類別名稱作為第一個引數", + "typedDictInClassPattern": "類別模式中不允許 TypedDict 類別", + "typedDictInitsubclassParameter": "TypedDict 不支援__init_subclass__參數 \"{name}\"", + "typedDictNotAllowed": "\"TypedDict\" 不能用在此內容中", + "typedDictSecondArgDict": "預期為 dict 或關鍵字參數作為第二個參數", + "typedDictSecondArgDictEntry": "應為簡單字典輸入項目", + "typedDictSet": "無法在 TypedDict 中指派項目", + "unaccessedClass": "未存取類別 \"{name}\"", + "unaccessedFunction": "未存取函式 \"{name}\"", + "unaccessedImport": "未存取匯入 \"{name}\"", + "unaccessedSymbol": "未存取 \"{name}\"", + "unaccessedVariable": "未存取變數 \"{name}\"", + "unannotatedFunctionSkipped": "因為未標註函式 \"{name}\",所以略過其分析", + "unaryOperationNotAllowed": "類型運算式中不允許一元運算子", + "unexpectedAsyncToken": "預期為 \"def\"、\"with\" 或 \"for\" 來追蹤 \"async\"", + "unexpectedEof": "未預期的 EOF", + "unexpectedExprToken": "運算式結尾未預期的權杖", + "unexpectedIndent": "未預期的縮排", + "unexpectedUnindent": "取消縮排未預期", + "unhashableDictKey": "字典索引鍵必須是可雜湊的", + "unhashableSetEntry": "Set 項目必須是可雜湊", + "uninitializedAbstractVariables": "抽象基底類別中定義的變數未在 final 類別 \"{classType}\" 中初始化", + "uninitializedInstanceVariable": "執行個體變數 \"{name}\" 未在類別本文或 __init__ 方法上初始化", + "unionForwardReferenceNotAllowed": "Union 語法不能與字串運算元搭配使用; 使用引號括住整個運算式", + "unionSyntaxIllegal": "聯集的替代語法需要 Python 3.10 或更新版本", + "unionTypeArgCount": "Union 需要兩個或多個類型引數", + "unionUnpackedTuple": "Union 不能包含未封裝的 tuple", + "unionUnpackedTypeVarTuple": "Union 不能包含未封裝的 TypeVarTuple", + "unnecessaryCast": "不必要的 \"cast\" 呼叫; 型別已是 \"{type}\"", + "unnecessaryIsInstanceAlways": "不必要的 isinstance 呼叫; \"{testType}\" 一律是 \"{classType}\" 的執行個體", + "unnecessaryIsInstanceNever": "不必要的 isinstance 呼叫; \"{testType}\" 永遠不是 \"{classType}\" 的執行個體", + "unnecessaryIsSubclassAlways": "不必要的 issubclass 呼叫; \"{testType}\"永遠是 \"{classType}\" 的子類別", + "unnecessaryIsSubclassNever": "不必要的 issubclass 呼叫; \"{testType}\"永遠不是 \"{classType}\" 的子類別", + "unnecessaryPyrightIgnore": "不必要的 \"# pyright: ignore\" 註解", + "unnecessaryPyrightIgnoreRule": "不必要的 \"# pyright: ignore\" 規則: \"{name}\"", + "unnecessaryTypeIgnore": "不必要的 \"# type: ignore\" 註解", + "unpackArgCount": "\"Unpack\" 後面應為單一型別引數", + "unpackExpectedTypeVarTuple": "預期 TypeVarTuple 或 tuple 作為 Unpack 的類型引數", + "unpackExpectedTypedDict": "應為 Unpack 的 TypedDict 型別引數", + "unpackIllegalInComprehension": "Comprehension 中不允許解壓縮作業", + "unpackInAnnotation": "類型運算式中不允許解壓縮運算子", + "unpackInDict": "字典中不允許解壓縮作業", + "unpackInSet": "Unpack operator not allowed within a set", + "unpackNotAllowed": "此內容中不允許 Unpack", + "unpackOperatorNotAllowed": "此內容中不允許解壓縮作業", + "unpackTuplesIllegal": "Python 3.8 之前的 Tuple 中不允許解壓縮作業", + "unpackedArgInTypeArgument": "無法在此內容中使用未封裝的引數", + "unpackedArgWithVariadicParam": "未封裝的引數無法用於 TypeVarTuple 參數", + "unpackedDictArgumentNotMapping": "** 後的引數運算式必須是具有 \"str\" 金鑰類型的對應", + "unpackedDictSubscriptIllegal": "字典解壓縮運算子在下標中不被允許", + "unpackedSubscriptIllegal": "下標中的解壓縮運算子需要 Python 3.11 或更新版本", + "unpackedTypeVarTupleExpected": "預期為解壓縮的 TypeVarTuple; 使用 Unpack[{name1}] 或 *{name2}", + "unpackedTypedDictArgument": "無法比對解壓縮的 TypedDict 引數與參數", + "unreachableCodeCondition": "未分析程式碼,因為以靜態方式將條件評估為 False", + "unreachableCodeStructure": "程式碼在結構上無法觸及", + "unreachableCodeType": "類型分析指出程式碼無法連線", + "unreachableExcept": "無法連接 Except 子句,因為例外已處理", + "unsupportedDunderAllOperation": "不支援 \"__all__\" 上的作業,因此匯出的符號清單可能不正確", + "unusedCallResult": "呼叫運算式的結果是 \"{type}\" 型別,而且未使用; 如果這是刻意的,則指派給變數 \"_\"", + "unusedCoroutine": "未使用 async 函式呼叫的結果; 使用 \"await\" 或指派結果至變數", + "unusedExpression": "未使用運算式值", + "varAnnotationIllegal": "變數的 type 註釋需要 Python 3.6 或更新版本; 使用類型註解以獲得與先前版本的相容性", + "variableFinalOverride": "變數 \"{name}\" 標示為 Final,且會覆寫類別 \"{className}\" 中相同名稱的非 Final 變數", + "variadicTypeArgsTooMany": "類型引數清單最多只能有一個解壓縮的 TypeVarTuple 或 tuple", + "variadicTypeParamTooManyAlias": "類型別名最多只能有一個 TypeVarTuple 類型參數,但收到多個 ({names})", + "variadicTypeParamTooManyClass": "一般類別最多只能有一個 TypeVarTuple 類型參數,但收到多個 ({names})", + "walrusIllegal": "運算子 \":=\" 需要 Python 3.8 或更新版本", + "walrusNotAllowed": "此內容中不允許未使用括弧括住的運算子 \":=\"", + "wildcardInFunction": "類別或函式內不允許萬用字元 import", + "wildcardLibraryImport": "不允許從程式庫 import 萬用字元", + "wildcardPatternTypePartiallyUnknown": "萬用字元模式擷取的類型部分未知", + "wildcardPatternTypeUnknown": "萬用字元模式擷取的型別不明", + "yieldFromIllegal": "使用 \"yield from\" 需要 Python 3.3 或更新版本", + "yieldFromOutsideAsync": "\"yield from\" not allowed in an async function", + "yieldOutsideFunction": "在函式或 lambda 外部不允許 \"yield\"", + "yieldWithinComprehension": "Comprehension 內不允許 \"yield\"", + "zeroCaseStatementsFound": "Match 陳述式必須至少包含一個 case 陳述式", + "zeroLengthTupleNotAllowed": "此內容中不允許零長度 tuple" + }, + "DiagnosticAddendum": { + "annotatedNotAllowed": "[Annotated] 特殊表單不可與執行個體和類別檢查一起使用", + "argParam": "引數對應至參數 \"{paramName}\"", + "argParamFunction": "引數對應至函式 \"{functionName}\" 中的參數 \"{paramName}\"", + "argsParamMissing": "參數 \"*{paramName}\" 沒有對應的參數", + "argsPositionOnly": "僅限位置的參數不符; 應為 {expected},但收到 {received}", + "argumentType": "引數類型為 \"{type}\"", + "argumentTypes": "引數型別: ({types})", + "assignToNone": "型別無法指派給「None」", + "asyncHelp": "您是指 \"async with\" 嗎?", + "baseClassIncompatible": "基底類別 \"{baseClass}\" 與類型 \"{type}\" 不相容", + "baseClassIncompatibleSubclass": "基底類別 \"{baseClass}\" 衍生自與類型 \"{type}\" 不相容的 \"{subclass}\"", + "baseClassOverriddenType": "基底類別 \"{baseClass}\" 提供類型 \"{type}\",其已覆寫", + "baseClassOverridesType": "基底類別 \"{baseClass}\" 以型別 \"{type}\" 來覆寫", + "bytesTypePromotions": "將 disableBytesTypePromotions 設定為 false,以啟用「bytearray」和「memoryview」的類型升級行為", + "conditionalRequiresBool": "類型 \"{operandType}\" 的方法 __bool__ 傳回類型 \"{boolReturnType}\" 而非 \"bool\"", + "dataClassFieldLocation": "欄位宣告", + "dataClassFrozen": "\"{name}\" 已凍結", + "dataProtocolUnsupported": "\"{name}\" 是個資料通訊協定", + "descriptorAccessBindingFailed": "無法為描述項類別 \"{className}\" 繫結方法 \"{name}\"", + "descriptorAccessCallFailed": "無法呼叫描述項類別 \"{className}\" 的方法 \"{name}\"", + "finalMethod": "Final 方法", + "functionParamDefaultMissing": "參數 \"{name}\" 遺漏了預設引數", + "functionParamName": "參數名稱不符: \"{destName}\" 與 \"{srcName}\"", + "functionParamPositionOnly": "僅位置參數不符;參數 \"{name}\" 不是僅限位置", + "functionReturnTypeMismatch": "函式傳回型別 \"{sourceType}\" 與型別 \"{destType}\" 不相容", + "functionTooFewParams": "函式接受太少位置參數; 預期 {expected},但收到 {received}", + "functionTooManyParams": "函式接受太多位置參數; 預期 {expected},但收到 {received}", + "genericClassNotAllowed": "執行個體或類別檢查不允許具有類型引數的泛型類型", + "incompatibleDeleter": "Property deleter 方法不相容", + "incompatibleGetter": "Property getter 方法不相容", + "incompatibleSetter": "Property setter 方法不相容", + "initMethodLocation": "__init__ 方法於類別 \"{type}\" 中定義", + "initMethodSignature": "__init__ 的簽章為 \"{type}\"", + "initSubclassLocation": "__init_subclass__ 方法已於類別 \"{name}\" 中定義", + "invariantSuggestionDict": "請考慮從「dict」切換到實值型別中共變數的「Mapping」", + "invariantSuggestionList": "請考慮從共變數的「list」切換到「Sequence」", + "invariantSuggestionSet": "考慮從 “set”切換到共同變數 “Container”", + "isinstanceClassNotSupported": "執行個體和類別檢查不支援 \"{type}\"", + "keyNotRequired": "\"{name}\" 在 \"{type}\" 中不是必要的索引鍵,因此存取可能會導致執行階段例外狀況", + "keyReadOnly": "\"{name}\" 是 \"{type}\" 中的唯讀金鑰", + "keyRequiredDeleted": "\"{name}\" 是必要的金鑰,因此不能刪除", + "keyUndefined": "\"{name}\" 不是 \"{type}\" 中定義的金鑰", + "kwargsParamMissing": "參數 \"**{paramName}\" 沒有對應的參數", + "listAssignmentMismatch": "類型 \"{type}\" 與目標清單不相容", + "literalAssignmentMismatch": "\"{sourceType}\" 無法指派給型別 \"{destType}\"", + "literalNotAllowed": "「Literal」特殊表單不可與執行個體和類別檢查一起使用", + "matchIsNotExhaustiveHint": "如果不需要徹底處理,請新增 \"case _: pass\"", + "matchIsNotExhaustiveType": "未處理的類型: \"{type}\"", + "memberAssignment": "無法將型別 \"{type}\" 的運算式指派給類別 \"{classType}\" 的屬性 \"{name}\"", + "memberIsAbstract": "\"{type}.{name}\" 未實作", + "memberIsAbstractMore": "和其他 {count} 人...", + "memberIsClassVarInProtocol": "\"{name}\" 定義為通訊協定中的 ClassVar", + "memberIsInitVar": "\"{name}\" 是 init-only 的欄位", + "memberIsInvariant": "\"{name}\" 為不變數,因為它可變動", + "memberIsNotClassVarInClass": "\"{name}\" 必須定義為 ClassVar,才能與通訊協定相容", + "memberIsNotClassVarInProtocol": "\"{name}\" 未定義為通訊協定中的 ClassVar", + "memberIsNotReadOnlyInProtocol": "通訊協定中的 “{name}” 不是唯讀的", + "memberIsReadOnlyInProtocol": "通訊協定中的 “{name}” 是唯讀的", + "memberIsWritableInProtocol": "\"{name}\" 在通訊協定中是可寫入的", + "memberSetClassVar": "無法透過類別執行個體指派屬性 \"{name}\",因為它是 ClassVar", + "memberTypeMismatch": "\"{name}\" 是不相容的類型", + "memberUnknown": "屬性 \"{name}\" 不明", + "metaclassConflict": "Metaclass「{metaclass1}」與「{metaclass2}」衝突", + "missingDeleter": "Property deleter 方法遺失", + "missingGetter": "Property getter 方法遺失", + "missingSetter": "遺漏了 property setter 方法", + "namedParamMissingInDest": "額外參數 \"{name}\"", + "namedParamMissingInSource": "遺失關鍵詞參數 \"{name}\"", + "namedParamTypeMismatch": "類型 \"{sourceType}\" 的關鍵字參數 \"{name}\" 與類型 \"{destType}\" 不相容", + "namedTupleNotAllowed": "執行個體或類別檢查無法使用 NamedTuple", + "newMethodLocation": "__new__ 方法於類別 \"{type}\" 中定義", + "newMethodSignature": "__new__ 的簽章為 \"{type}\"", + "newTypeClassNotAllowed": "使用 NewType 建立的類型不能與執行個體和類別檢查一起使用", + "noOverloadAssignable": "沒有任何多載函式符合類型 \"{type}\"", + "noneNotAllowed": "無法對執行個體或類別檢查使用 None", + "orPatternMissingName": "遺失名稱: {name}", + "overloadIndex": "多載 {index} 是最接近的相符項目", + "overloadNotAssignable": "\"{name}\" 的一或多個多載無法指派", + "overloadSignature": "多載簽章在這裡定義", + "overriddenMethod": "覆寫方法", + "overriddenSymbol": "覆寫的符號", + "overrideInvariantMismatch": "覆寫類型 \"{overrideType}\" 與基底類型 \"{baseType}\" 不同", + "overrideIsInvariant": "變數是可變動的,所以其類型是不變的", + "overrideNoOverloadMatches": "覆寫中沒有任何多載簽章與基底方法相容", + "overrideNotClassMethod": "基底方法宣告為 classmethod,但無法覆寫", + "overrideNotInstanceMethod": "基底方法已宣告為執行個體方法,但無法覆寫", + "overrideNotStaticMethod": "基底方法宣告為 staticmethod,但無法覆寫", + "overrideOverloadNoMatch": "覆寫未處理基底方法的所有多載", + "overrideOverloadOrder": "覆寫方法的多載必須與基底方法的順序相同", + "overrideParamKeywordNoDefault": "關鍵字參數 \"{name}\" 不相符: 基底參數有預設引數值,覆寫參數則否", + "overrideParamKeywordType": "關鍵字參數 \"{name}\" 型別不符: 基底參數為型別 \"{baseType}\",覆寫參數為型別 \"{overrideType}\"", + "overrideParamName": "參數 {index} 名稱不符: 基底參數名為 \"{baseName}\",覆寫參數名為 \"{overrideName}\"", + "overrideParamNameExtra": "基底中遺失參數 \"{name}\"", + "overrideParamNameMissing": "覆寫中遺漏了參數 \"{name}\"", + "overrideParamNamePositionOnly": "參數 {index} 不符: 基底參數 \"{baseName}\" 為關鍵字參數,覆寫參數為 \"position-only\"", + "overrideParamNoDefault": "參數 {index} 不相符: 基底參數有預設引數值,覆寫參數則否", + "overrideParamType": "參數 \"{index}\" 型別不符: 基底參數為型別 \"{baseType}\",覆寫參數為型別 \"{overrideType}\"", + "overridePositionalParamCount": "位置參數計數不相符; 基底方法有 {baseCount} 個,但覆寫有 {overrideCount} 個", + "overrideReturnType": "傳回類型不符: 基底方法會傳回類型 \"{baseType}\",覆寫會傳回類型 \"{overrideType}\"", + "overrideType": "基底類別會將類型定義為 \"{type}\"", + "paramAssignment": "參數 {index}: 類型 \"{sourceType}\" 與類型 \"{destType}\" 不相容", + "paramSpecMissingInOverride": "覆寫方法中遺漏了 ParamSpec 參數", + "paramType": "參數類型為 \"{paramType}\"", + "privateImportFromPyTypedSource": "改為從 \"{module}\" 匯入", + "propertyAccessFromProtocolClass": "通訊協定類別中定義的屬性無法存取為類別變數", + "propertyMethodIncompatible": "Property 方法 \"{name}\" 不相容", + "propertyMethodMissing": "覆寫中遺漏了 property 方法 \"{name}\"", + "propertyMissingDeleter": "Property \"{name}\" 沒有定義的 deleter", + "propertyMissingSetter": "Property \"{name}\" 沒有定義的 setter", + "protocolIncompatible": "\"{sourceType}\" 與通訊協定 \"{destType}\" 不相容", + "protocolMemberMissing": "\"{name}\" 不存在", + "protocolRequiresRuntimeCheckable": "Protocol 類別必須為 @runtime_checkable,才能搭配執行個體和類別檢查使用", + "protocolSourceIsNotConcrete": "\"{sourceType}\" 不是實體類別型別,因此無法指派給型別 \"{destType}\"", + "protocolUnsafeOverlap": "\"{name}\" 的屬性與通訊協定的名稱相同", + "pyrightCommentIgnoreTip": "使用 \"# pyright: ignore[]\" 來隱藏單行的診斷", + "readOnlyAttribute": "屬性 \"{name}\" 為唯讀", + "seeClassDeclaration": "請參閱類別宣告", + "seeDeclaration": "請參閱宣告", + "seeFunctionDeclaration": "請參閱函式宣告", + "seeMethodDeclaration": "請參閱方法宣告", + "seeParameterDeclaration": "請參閱參數宣告", + "seeTypeAliasDeclaration": "請參閱類型別名宣告", + "seeVariableDeclaration": "請參閱變數宣告", + "tupleAssignmentMismatch": "型別 \"{type}\" 與目標 tuple 不相容", + "tupleEntryTypeMismatch": "Tuple 項目 {entry} 的類型不正確", + "tupleSizeIndeterminateSrc": "Tuple 大小不符; 預期為 {expected},但收到不確定的大小", + "tupleSizeIndeterminateSrcDest": "Tuple 大小不符; 預期為 {expected} 或其他,但收到不確定的大小", + "tupleSizeMismatch": "Tuple 大小不符; 預期為 {expected},但收到 {received}", + "tupleSizeMismatchIndeterminateDest": "Tuple 大小不符; 預期為 {expected} 或其他,但收到 {received}", + "typeAliasInstanceCheck": "使用 \"type\" 陳述式建立的類型別名不能搭配執行個體和類別檢查使用", + "typeAssignmentMismatch": "型別 \"{sourceType}\" 無法指派給型別 \"{destType}\"", + "typeBound": "型別 \"{sourceType}\" 無法指派給型別變數 \"{name}\" 的上限 \"{destType}\"", + "typeConstrainedTypeVar": "型別 \"{type}\" 無法指派給限制型別變數 \"{name}\"", + "typeIncompatible": "\"{sourceType}\" 無法指派給 \"{destType}\"", + "typeNotClass": "\"{type}\" 不是類別", + "typeNotStringLiteral": "\"{type}\" 不是字串常值", + "typeOfSymbol": "\"{name}\" 的型別為 \"{type}\"", + "typeParamSpec": "類型 \"{type}\" 與 ParamSpec \"{name}\" 不相容", + "typeUnsupported": "類型 \"{type}\" 不受支援", + "typeVarDefaultOutOfScope": "類型變數 \"{name}\" 不在範圍內", + "typeVarIsContravariant": "型別參數 \"{name}\" 具有反變性,但 \"{sourceType}\" 不是 \"{destType}\" 的超型別", + "typeVarIsCovariant": "型別參數 \"{name}\" 具有共變性,但 \"{sourceType}\" 不是 \"{destType}\" 的子型別", + "typeVarIsInvariant": "型別參數 \"{name}\" 具有不變性,但 \"{sourceType}\" 與 \"{destType}\" 不同", + "typeVarNotAllowed": "執行個體或類別檢查不允許 TypeVar", + "typeVarTupleRequiresKnownLength": "TypeVarTuple 無法繫結至長度不明的 tuple", + "typeVarUnnecessarySuggestion": "改用 {type}", + "typeVarUnsolvableRemedy": "提供多載,其指定未提供引數時的傳回類型", + "typeVarsMissing": "遺失類型變數: {names}", + "typedDictBaseClass": "類別 \"{type}\" 不是 TypedDict", + "typedDictClassNotAllowed": "執行個體或類別檢查不允許 TypedDict 類別", + "typedDictClosedExtraNotAllowed": "無法新增項目 \"{name}\"", + "typedDictClosedExtraTypeMismatch": "無法新增型別為 \"{type}\" 的項目 \"{name}\"", + "typedDictClosedFieldNotReadOnly": "無法新增項目 \"{name}\",因為它必須是 ReadOnly", + "typedDictClosedFieldNotRequired": "無法新增項目 \"{name}\",因為它必須是 NotRequired", + "typedDictExtraFieldNotAllowed": "\"{name}\" 不存在於 \"{type}\"", + "typedDictExtraFieldTypeMismatch": "\"{name}\" 的類型與 \"{type}\" 中 \"extra_items\" 的類型不相容", + "typedDictFieldMissing": "\"{type}\" 遺失 \"{name}\"", + "typedDictFieldNotReadOnly": "\"{name}\" 在 \"{type}\" 中不是唯讀", + "typedDictFieldNotRequired": "\"{type}\" 中不需要 \"{name}\"", + "typedDictFieldRequired": "\"{type}\" 中需要 \"{name}\"", + "typedDictFieldTypeMismatch": "型別 \"{type}\" 無法指派給項目 \"{name}\"", + "typedDictFieldUndefined": "\"{name}\" 是型別 \"{type}\" 中未定義的項目", + "typedDictKeyAccess": "使用 [\"{name}\"] 參考 TypedDict 中的項目", + "typedDictNotAllowed": "執行個體或類別檢查無法使用 TypedDict", + "unhashableType": "型別 \"{type}\" 無法雜湊", + "uninitializedAbstractVariable": "執行個體變數 \"{name}\" 在抽象基底類別 \"{classType}\" 中定義,但未初始化", + "unreachableExcept": "\"{exceptionType}\" 是 \"{parentType}\" 的子類別", + "useDictInstead": "使用 dict[T1, T2] 來表示字典類型", + "useListInstead": "使用 list[T] 來指出 list 類型,或使用 T1 | T2 來指出 union 類型", + "useTupleInstead": "使用 tuple[T1, ..., Tn] 來指出 tuple 類型,或使用 T1 | T2 來指出 union 類型", + "useTypeInstead": "改為使用 type[T]", + "varianceMismatchForClass": "型別引數 \"{typeVarName}\" 的變異數與基底類別 \"{className}\" 不相容", + "varianceMismatchForTypeAlias": "型別引數 \"{typeVarName}\" 的變異數與 \"{typeAliasParam}\" 不相容" + }, + "Service": { + "longOperation": "列舉工作區來源檔案需要很長的時間。請考慮改為開啟子資料夾。[深入了解](https://aka.ms/workspace-too-many-files)" + } +} diff --git a/python-parser/packages/pyright-internal/src/nodeMain.ts b/python-parser/packages/pyright-internal/src/nodeMain.ts new file mode 100644 index 00000000..9c680d08 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/nodeMain.ts @@ -0,0 +1,22 @@ +/* + * nodeMain.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provides the main entrypoint to the server when running in Node. + */ + +import { BackgroundAnalysisRunner } from './backgroundAnalysis'; +import { ServiceProvider } from './common/serviceProvider'; +import { run } from './nodeServer'; +import { PyrightServer } from './server'; + +export async function main(maxWorkers: number) { + await run( + (conn) => new PyrightServer(conn, maxWorkers), + () => { + const runner = new BackgroundAnalysisRunner(new ServiceProvider()); + runner.start(); + } + ); +} diff --git a/python-parser/packages/pyright-internal/src/nodeServer.ts b/python-parser/packages/pyright-internal/src/nodeServer.ts new file mode 100644 index 00000000..859c74b3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/nodeServer.ts @@ -0,0 +1,28 @@ +/* + * nodeServer.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements utilities for starting the language server in a node environment. + */ + +import { Connection, ConnectionOptions } from 'vscode-languageserver'; +import { createConnection } from 'vscode-languageserver/node'; +import { isMainThread } from 'worker_threads'; + +import { initializeDependencies } from './common/asyncInitialization'; +import { getCancellationStrategyFromArgv } from './common/fileBasedCancellationUtils'; + +export async function run(runServer: (connection: Connection) => void, runBackgroundThread: () => void) { + await initializeDependencies(); + + if (isMainThread) { + runServer(createConnection(getConnectionOptions())); + } else { + runBackgroundThread(); + } +} + +export function getConnectionOptions(): ConnectionOptions { + return { cancellationStrategy: getCancellationStrategyFromArgv(process.argv) }; +} diff --git a/python-parser/packages/pyright-internal/src/parser/characterStream.ts b/python-parser/packages/pyright-internal/src/parser/characterStream.ts new file mode 100644 index 00000000..a7065bff --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/characterStream.ts @@ -0,0 +1,144 @@ +/* + * characterStream.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Based on code from vscode-python repository: + * https://github.com/Microsoft/vscode-python + * + * Class that represents a stream of characters. + */ + +import { Char } from '../common/charCodes'; +import { isLineBreak, isWhiteSpace } from './characters'; + +export class CharacterStream { + private _text: string; + private _position: number; + private _currentChar: number; + private _isEndOfStream: boolean; + + constructor(text: string) { + this._text = text; + this._position = 0; + this._currentChar = text.length > 0 ? text.charCodeAt(0) : 0; + this._isEndOfStream = text.length === 0; + } + + get position(): number { + return this._position; + } + + set position(value: number) { + this._position = value; + this._checkBounds(); + } + + get currentChar(): number { + return this._currentChar; + } + + get nextChar(): number { + return this.position + 1 < this._text.length ? this._text.charCodeAt(this.position + 1) : 0; + } + + get prevChar(): number { + return this.position - 1 >= 0 ? this._text.charCodeAt(this.position - 1) : 0; + } + + get length(): number { + return this._text.length; + } + + getText(): string { + return this._text; + } + + // We also expose a (non-property) method that is + // the equivalent of currentChar above. This allows + // us to work around assumptions in the TypeScript + // compiler that method calls (e.g. moveNext()) don't + // modify properties. + getCurrentChar(): number { + return this._currentChar; + } + + isEndOfStream(): boolean { + return this._isEndOfStream; + } + + lookAhead(offset: number): number { + const pos = this._position + offset; + return pos < 0 || pos >= this._text.length ? 0 : this._text.charCodeAt(pos); + } + + advance(offset: number) { + this.position += offset; + } + + moveNext(): boolean { + if (this._position < this._text.length - 1) { + // Most common case, no need to check bounds extensively + this._position += 1; + this._currentChar = this._text.charCodeAt(this._position); + return true; + } + this.advance(1); + return !this.isEndOfStream(); + } + + isAtWhiteSpace(): boolean { + return isWhiteSpace(this.currentChar); + } + + isAtLineBreak(): boolean { + return isLineBreak(this.currentChar); + } + + skipLineBreak(): void { + if (this._currentChar === Char.CarriageReturn) { + this.moveNext(); + if (this.currentChar === Char.LineFeed) { + this.moveNext(); + } + } else if (this._currentChar === Char.LineFeed) { + this.moveNext(); + } + } + + skipWhitespace(): void { + while (!this.isEndOfStream() && this.isAtWhiteSpace()) { + this.moveNext(); + } + } + + skipToEol(): void { + while (!this.isEndOfStream() && !this.isAtLineBreak()) { + this.moveNext(); + } + } + + skipToWhitespace(): void { + while (!this.isEndOfStream() && !this.isAtWhiteSpace()) { + this.moveNext(); + } + } + + charCodeAt(index: number): number { + return this._text.charCodeAt(index); + } + + private _checkBounds(): void { + if (this._position < 0) { + this._position = 0; + } + + this._isEndOfStream = this._position >= this._text.length; + if (this._isEndOfStream) { + this._position = this._text.length; + } + + this._currentChar = this._isEndOfStream ? 0 : this._text.charCodeAt(this._position); + } +} diff --git a/python-parser/packages/pyright-internal/src/parser/characters.ts b/python-parser/packages/pyright-internal/src/parser/characters.ts new file mode 100644 index 00000000..1ed6b3df --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/characters.ts @@ -0,0 +1,284 @@ +/* + * characters.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Based on code from vscode-python repository: + * https://github.com/Microsoft/vscode-python + * + * Utility routines used by tokenizer. + */ + +import { Char } from '../common/charCodes'; +import * as unicode from './unicode'; + +enum CharCategory { + // Character cannot appear in identifier + NotIdentifierChar = 0, + + // Character can appear at beginning or within identifier + StartIdentifierChar = 1, + + // Character can appear only within identifier, not at beginning + IdentifierChar = 2, + + // Character is a surrogate, meaning that additional character + // needs to be consulted. + SurrogateChar = 3, +} + +// Table of first 256 character codes (the most common cases). +const _identifierCharFastTableSize = 256; +const _identifierCharFastTable: CharCategory[] = new Array(_identifierCharFastTableSize); + +// Map of remaining characters that can appear within identifier. +type CharCategoryMap = { [code: number]: CharCategory }; +const _identifierCharMap: CharCategoryMap = {}; + +// Secondary character map based on the primary (surrogate) character. +const _surrogateCharMap: { [code: number]: CharCategoryMap } = {}; + +// We do lazy initialization of this map because it's rarely used. +let _identifierCharMapInitialized = false; + +export function isIdentifierStartChar(char: number, nextChar?: number): boolean { + if (char < _identifierCharFastTableSize) { + return _identifierCharFastTable[char] === CharCategory.StartIdentifierChar; + } + + // Lazy initialize the char map. We'll rarely get here. + if (!_identifierCharMapInitialized) { + _buildIdentifierLookupTable(false); + _identifierCharMapInitialized = true; + } + + let charCategory: CharCategory; + if (nextChar !== undefined) { + charCategory = _lookUpSurrogate(char, nextChar); + } else { + charCategory = _identifierCharMap[char]; + } + + return charCategory === CharCategory.StartIdentifierChar; +} + +export function isIdentifierChar(char: number, nextChar?: number): boolean { + if (char < _identifierCharFastTableSize) { + return ( + _identifierCharFastTable[char] === CharCategory.StartIdentifierChar || + _identifierCharFastTable[char] === CharCategory.IdentifierChar + ); + } + + // Lazy initialize the char map. We'll rarely get here. + if (!_identifierCharMapInitialized) { + _buildIdentifierLookupTable(false); + _identifierCharMapInitialized = true; + } + + let charCategory: CharCategory; + if (nextChar !== undefined) { + charCategory = _lookUpSurrogate(char, nextChar); + } else { + charCategory = _identifierCharMap[char]; + } + + return charCategory === CharCategory.StartIdentifierChar || charCategory === CharCategory.IdentifierChar; +} + +export function isSurrogateChar(char: number): boolean { + if (char < _identifierCharFastTableSize) { + return false; + } + + // Lazy initialize the char map. We'll rarely get here. + if (!_identifierCharMapInitialized) { + _buildIdentifierLookupTable(false); + _identifierCharMapInitialized = true; + } + + return _identifierCharMap[char] === CharCategory.SurrogateChar; +} + +export function isWhiteSpace(ch: number): boolean { + return ch === Char.Space || ch === Char.Tab || ch === Char.FormFeed; +} + +export function isLineBreak(ch: number): boolean { + return ch === Char.CarriageReturn || ch === Char.LineFeed; +} + +export function isNumber(ch: number): boolean { + return (ch >= Char._0 && ch <= Char._9) || ch === Char.Underscore; +} + +export function isDecimal(ch: number): boolean { + return (ch >= Char._0 && ch <= Char._9) || ch === Char.Underscore; +} + +export function isHex(ch: number): boolean { + return isDecimal(ch) || (ch >= Char.a && ch <= Char.f) || (ch >= Char.A && ch <= Char.F) || ch === Char.Underscore; +} + +export function isOctal(ch: number): boolean { + return (ch >= Char._0 && ch <= Char._7) || ch === Char.Underscore; +} + +export function isBinary(ch: number): boolean { + return ch === Char._0 || ch === Char._1 || ch === Char.Underscore; +} + +function _lookUpSurrogate(char: number, nextChar: number): CharCategory { + if (_identifierCharMap[char] !== CharCategory.SurrogateChar) { + return CharCategory.NotIdentifierChar; + } + + const surrogateTable = _surrogateCharMap[char]; + if (!surrogateTable) { + return CharCategory.NotIdentifierChar; + } + + return surrogateTable[nextChar]; +} + +// Underscore is explicitly allowed to start an identifier. +// Characters with the Other_ID_Start property. +const _specialStartIdentifierChars: unicode.UnicodeRangeTable = [ + Char.Underscore, + 0x1885, + 0x1886, + 0x2118, + 0x212e, + 0x309b, + 0x309c, +]; + +const _startIdentifierCharRanges = [ + _specialStartIdentifierChars, + unicode.unicodeLu, + unicode.unicodeLl, + unicode.unicodeLt, + unicode.unicodeLo, + unicode.unicodeLm, + unicode.unicodeNl, +]; + +const _startCharSurrogateRanges = [ + unicode.unicodeLuSurrogate, + unicode.unicodeLlSurrogate, + unicode.unicodeLoSurrogate, + unicode.unicodeLmSurrogate, + unicode.unicodeNlSurrogate, +]; + +// Characters with the Other_ID_Start property. +const _specialIdentifierChars: unicode.UnicodeRangeTable = [ + 0x00b7, 0x0387, 0x1369, 0x136a, 0x136b, 0x136c, 0x136d, 0x136e, 0x136f, 0x1370, 0x1371, 0x19da, +]; + +const _identifierCharRanges = [ + _specialIdentifierChars, + unicode.unicodeMn, + unicode.unicodeMc, + unicode.unicodeNd, + unicode.unicodePc, +]; + +const _identifierCharSurrogateRanges = [ + unicode.unicodeMnSurrogate, + unicode.unicodeMcSurrogate, + unicode.unicodeNdSurrogate, +]; + +function _buildIdentifierLookupTableFromUnicodeRangeTable( + table: unicode.UnicodeRangeTable, + category: CharCategory, + fastTableOnly: boolean, + fastTable: CharCategoryMap, + fullTable: CharCategoryMap +): void { + for (let entryIndex = 0; entryIndex < table.length; entryIndex++) { + const entry = table[entryIndex]; + let rangeStart: number; + let rangeEnd: number; + + if (Array.isArray(entry)) { + rangeStart = entry[0]; + rangeEnd = entry[1]; + } else { + rangeStart = rangeEnd = entry; + } + + for (let i = rangeStart; i <= rangeEnd; i++) { + if (i < _identifierCharFastTableSize) { + fastTable[i] = category; + } else { + fullTable[i] = category; + } + } + + if (fastTableOnly && rangeStart >= _identifierCharFastTableSize) { + break; + } + } +} + +function _buildIdentifierLookupTableFromSurrogateRangeTable( + surrogateTable: unicode.UnicodeSurrogateRangeTable, + category: CharCategory +): void { + for (const surrogateChar in surrogateTable) { + if (!_surrogateCharMap[surrogateChar]) { + _surrogateCharMap[surrogateChar] = {}; + _identifierCharMap[surrogateChar] = CharCategory.SurrogateChar; + } + + _buildIdentifierLookupTableFromUnicodeRangeTable( + surrogateTable[surrogateChar], + category, + /* fastTableOnly */ false, + _surrogateCharMap[surrogateChar], + _surrogateCharMap[surrogateChar] + ); + } +} + +// Build a lookup table for to speed up tokenization of identifiers. +function _buildIdentifierLookupTable(fastTableOnly: boolean): void { + _identifierCharFastTable.fill(CharCategory.NotIdentifierChar); + + _identifierCharRanges.forEach((table) => { + _buildIdentifierLookupTableFromUnicodeRangeTable( + table, + CharCategory.IdentifierChar, + fastTableOnly, + _identifierCharFastTable, + _identifierCharMap + ); + }); + + _startIdentifierCharRanges.forEach((table) => { + _buildIdentifierLookupTableFromUnicodeRangeTable( + table, + CharCategory.StartIdentifierChar, + fastTableOnly, + _identifierCharFastTable, + _identifierCharMap + ); + }); + + // Populate the surrogate tables for characters that require two + // character codes. + if (!fastTableOnly) { + for (const surrogateTable of _identifierCharSurrogateRanges) { + _buildIdentifierLookupTableFromSurrogateRangeTable(surrogateTable, CharCategory.IdentifierChar); + } + + for (const surrogateTable of _startCharSurrogateRanges) { + _buildIdentifierLookupTableFromSurrogateRangeTable(surrogateTable, CharCategory.StartIdentifierChar); + } + } +} + +_buildIdentifierLookupTable(true); diff --git a/python-parser/packages/pyright-internal/src/parser/parseNodeUtils.ts b/python-parser/packages/pyright-internal/src/parser/parseNodeUtils.ts new file mode 100644 index 00000000..82c89115 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/parseNodeUtils.ts @@ -0,0 +1,161 @@ +/* + * parserNodeUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * + * ParseNodeType is a const enum which strips out the string keys + * This file is used to map the string keys to the const enum values. + */ +import { ParseNodeType } from './parseNodes'; +import { OperatorType } from './tokenizerTypes'; + +type ParseNodeEnumStringKeys = Exclude; +type ParseNodeTypeMapType = Record; + +export const ParseNodeTypeMap: ParseNodeTypeMapType = { + Error: ParseNodeType.Error, + Argument: ParseNodeType.Argument, + Assert: ParseNodeType.Assert, + Assignment: ParseNodeType.Assignment, + AssignmentExpression: ParseNodeType.AssignmentExpression, + AugmentedAssignment: ParseNodeType.AugmentedAssignment, + Await: ParseNodeType.Await, + BinaryOperation: ParseNodeType.BinaryOperation, + Break: ParseNodeType.Break, + Call: ParseNodeType.Call, + Class: ParseNodeType.Class, + Comprehension: ParseNodeType.Comprehension, + ComprehensionFor: ParseNodeType.ComprehensionFor, + ComprehensionIf: ParseNodeType.ComprehensionIf, + Constant: ParseNodeType.Constant, + Continue: ParseNodeType.Continue, + Decorator: ParseNodeType.Decorator, + Del: ParseNodeType.Del, + Dictionary: ParseNodeType.Dictionary, + DictionaryExpandEntry: ParseNodeType.DictionaryExpandEntry, + DictionaryKeyEntry: ParseNodeType.DictionaryKeyEntry, + Ellipsis: ParseNodeType.Ellipsis, + If: ParseNodeType.If, + Import: ParseNodeType.Import, + ImportAs: ParseNodeType.ImportAs, + ImportFrom: ParseNodeType.ImportFrom, + ImportFromAs: ParseNodeType.ImportFromAs, + Index: ParseNodeType.Index, + Except: ParseNodeType.Except, + For: ParseNodeType.For, + FormatString: ParseNodeType.FormatString, + Function: ParseNodeType.Function, + Global: ParseNodeType.Global, + Lambda: ParseNodeType.Lambda, + List: ParseNodeType.List, + MemberAccess: ParseNodeType.MemberAccess, + Module: ParseNodeType.Module, + ModuleName: ParseNodeType.ModuleName, + Name: ParseNodeType.Name, + Nonlocal: ParseNodeType.Nonlocal, + Number: ParseNodeType.Number, + Parameter: ParseNodeType.Parameter, + Pass: ParseNodeType.Pass, + Raise: ParseNodeType.Raise, + Return: ParseNodeType.Return, + Set: ParseNodeType.Set, + Slice: ParseNodeType.Slice, + StatementList: ParseNodeType.StatementList, + StringList: ParseNodeType.StringList, + String: ParseNodeType.String, + Suite: ParseNodeType.Suite, + Ternary: ParseNodeType.Ternary, + Tuple: ParseNodeType.Tuple, + Try: ParseNodeType.Try, + TypeAnnotation: ParseNodeType.TypeAnnotation, + UnaryOperation: ParseNodeType.UnaryOperation, + Unpack: ParseNodeType.Unpack, + While: ParseNodeType.While, + With: ParseNodeType.With, + WithItem: ParseNodeType.WithItem, + Yield: ParseNodeType.Yield, + YieldFrom: ParseNodeType.YieldFrom, + FunctionAnnotation: ParseNodeType.FunctionAnnotation, + Match: ParseNodeType.Match, + Case: ParseNodeType.Case, + PatternSequence: ParseNodeType.PatternSequence, + PatternAs: ParseNodeType.PatternAs, + PatternLiteral: ParseNodeType.PatternLiteral, + PatternClass: ParseNodeType.PatternClass, + PatternCapture: ParseNodeType.PatternCapture, + PatternMapping: ParseNodeType.PatternMapping, + PatternMappingKeyEntry: ParseNodeType.PatternMappingKeyEntry, + PatternMappingExpandEntry: ParseNodeType.PatternMappingExpandEntry, + PatternValue: ParseNodeType.PatternValue, + PatternClassArgument: ParseNodeType.PatternClassArgument, + TypeParameter: ParseNodeType.TypeParameter, + TypeParameterList: ParseNodeType.TypeParameterList, + TypeAlias: ParseNodeType.TypeAlias, +}; + +export type ParseNodeTypeMapKey = keyof typeof ParseNodeTypeMap; + +export const ParseNodeTypeNameMap: Record = Object.entries( + ParseNodeTypeMap +).reduce((acc, [name, value]) => { + acc[value] = name as ParseNodeEnumStringKeys; + return acc; +}, {} as Record); + +type OperatorTypeMapType = Record; + +export const OperatorTypeMap: OperatorTypeMapType = { + '+': OperatorType.Add, + '+=': OperatorType.AddEqual, + '=': OperatorType.Assign, + '&': OperatorType.BitwiseAnd, + '&=': OperatorType.BitwiseAndEqual, + '~': OperatorType.BitwiseInvert, + '|': OperatorType.BitwiseOr, + '|=': OperatorType.BitwiseOrEqual, + '^': OperatorType.BitwiseXor, + '^=': OperatorType.BitwiseXorEqual, + '/': OperatorType.Divide, + '/=': OperatorType.DivideEqual, + '==': OperatorType.Equals, + '//': OperatorType.FloorDivide, + '//=': OperatorType.FloorDivideEqual, + '>': OperatorType.GreaterThan, + '>=': OperatorType.GreaterThanOrEqual, + '<<': OperatorType.LeftShift, + '<<=': OperatorType.LeftShiftEqual, + '<>': OperatorType.LessOrGreaterThan, + '<': OperatorType.LessThan, + '<=': OperatorType.LessThanOrEqual, + '@': OperatorType.MatrixMultiply, + '@=': OperatorType.MatrixMultiplyEqual, + '%': OperatorType.Mod, + '%=': OperatorType.ModEqual, + '*': OperatorType.Multiply, + '*=': OperatorType.MultiplyEqual, + '!=': OperatorType.NotEquals, + '**': OperatorType.Power, + '**=': OperatorType.PowerEqual, + '>>': OperatorType.RightShift, + '>>=': OperatorType.RightShiftEqual, + '-': OperatorType.Subtract, + '-=': OperatorType.SubtractEqual, + and: OperatorType.And, + or: OperatorType.Or, + 'not ': OperatorType.Not, + is: OperatorType.Is, + 'is not': OperatorType.IsNot, + in: OperatorType.In, + 'not in': OperatorType.NotIn, +}; + +export const OperatorTypeNameMap: Record = Object.entries( + OperatorTypeMap +).reduce((acc, [name, value]) => { + acc[value] = name as ParseNodeEnumStringKeys; + return acc; +}, {} as Record); + +export type OperatorTypeMapKey = keyof typeof OperatorTypeMap; diff --git a/python-parser/packages/pyright-internal/src/parser/parseNodes.ts b/python-parser/packages/pyright-internal/src/parser/parseNodes.ts new file mode 100644 index 00000000..bb334e53 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/parseNodes.ts @@ -0,0 +1,2859 @@ +/* + * parseNodes.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Definition of parse nodes that make up the Python abstract + * syntax tree (AST). + */ + +import { TextRange } from '../common/textRange'; +import { + FStringEndToken, + FStringMiddleToken, + FStringStartToken, + IdentifierToken, + KeywordToken, + KeywordType, + NumberToken, + OperatorType, + StringToken, + Token, + TokenType, +} from './tokenizerTypes'; + +export const enum ParseNodeType { + Error, // 0 + + Argument, + Assert, + Assignment, + AssignmentExpression, + AugmentedAssignment, + Await, + BinaryOperation, + Break, + Call, + + Class, // 10 + Comprehension, + ComprehensionFor, + ComprehensionIf, + Constant, + Continue, + Decorator, + Del, + Dictionary, + DictionaryExpandEntry, + + DictionaryKeyEntry, // 20 + Ellipsis, + If, + Import, + ImportAs, + ImportFrom, + ImportFromAs, + Index, + Except, + For, + + FormatString, // 30 + Function, + Global, + Lambda, + List, + MemberAccess, + Module, + ModuleName, + Name, + Nonlocal, + + Number, // 40 + Parameter, + Pass, + Raise, + Return, + Set, + Slice, + StatementList, + StringList, + String, + + Suite, // 50 + Ternary, + Tuple, + Try, + TypeAnnotation, + UnaryOperation, + Unpack, + While, + With, + WithItem, + + Yield, // 60 + YieldFrom, + FunctionAnnotation, + Match, + Case, + PatternSequence, + PatternAs, + PatternLiteral, + PatternClass, + PatternCapture, + + PatternMapping, // 70 + PatternMappingKeyEntry, + PatternMappingExpandEntry, + PatternValue, + PatternClassArgument, + TypeParameter, + TypeParameterList, + TypeAlias, +} + +export const enum ErrorExpressionCategory { + MissingIn, + MissingElse, + MissingExpression, + MissingIndexOrSlice, + MissingDecoratorCallName, + MissingCallCloseParen, + MissingIndexCloseBracket, + MissingMemberAccessName, + MissingTupleCloseParen, + MissingListCloseBracket, + MissingFunctionParameterList, + MissingPattern, + MissingPatternSubject, + MissingDictValue, + MissingKeywordArgValue, + MaxDepthExceeded, +} + +export interface ParseNodeBase { + readonly nodeType: T; + readonly start: number; + readonly length: number; + + // A unique ID given to each parse node. + id: number; + + parent: ParseNode | undefined; + + // A reference to information computed in later passes. + a: object | undefined; + + // Additional details that are specific to the parse node type. + d: object; +} + +let _nextNodeId = 1; +export function getNextNodeId() { + return _nextNodeId++; +} + +export function extendRange(node: ParseNodeBase, newRange: TextRange) { + const extendedRange = TextRange.extend(node, newRange); + + // Temporarily allow writes to the range fields. + (node as any).start = extendedRange.start; + (node as any).length = extendedRange.length; +} + +export type ParseNodeArray = (ParseNode | undefined)[]; + +export interface ModuleNode extends ParseNodeBase { + d: { + statements: StatementNode[]; + }; +} + +export namespace ModuleNode { + export function create(range: TextRange) { + const node: ModuleNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Module, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { statements: [] }, + }; + + return node; + } +} + +export interface SuiteNode extends ParseNodeBase { + d: { + statements: StatementNode[]; + typeComment: StringToken | undefined; + }; +} + +export namespace SuiteNode { + export function create(range: TextRange) { + const node: SuiteNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Suite, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + statements: [], + typeComment: undefined, + }, + }; + + return node; + } +} + +export interface IfNode extends ParseNodeBase { + d: { + firstToken: Token; + testExpr: ExpressionNode; + ifSuite: SuiteNode; + elseSuite: SuiteNode | IfNode | undefined; + }; +} + +export namespace IfNode { + export function create(ifOrElifToken: Token, testExpr: ExpressionNode, ifSuite: SuiteNode, elseSuite?: SuiteNode) { + const node: IfNode = { + start: ifOrElifToken.start, + length: ifOrElifToken.length, + nodeType: ParseNodeType.If, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: ifOrElifToken, + testExpr, + ifSuite: ifSuite, + elseSuite: elseSuite, + }, + }; + + testExpr.parent = node; + ifSuite.parent = node; + + extendRange(node, testExpr); + extendRange(node, ifSuite); + if (elseSuite) { + extendRange(node, elseSuite); + elseSuite.parent = node; + } + + return node; + } +} + +export interface WhileNode extends ParseNodeBase { + d: { + firstToken: Token; + testExpr: ExpressionNode; + whileSuite: SuiteNode; + elseSuite?: SuiteNode | undefined; + }; +} + +export namespace WhileNode { + export function create(whileToken: Token, testExpr: ExpressionNode, whileSuite: SuiteNode) { + const node: WhileNode = { + start: whileToken.start, + length: whileToken.length, + nodeType: ParseNodeType.While, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: whileToken, + testExpr, + whileSuite, + }, + }; + + testExpr.parent = node; + whileSuite.parent = node; + + extendRange(node, whileSuite); + + return node; + } +} + +export interface ForNode extends ParseNodeBase { + d: { + firstToken: Token; + isAsync?: boolean; + asyncToken?: Token; + targetExpr: ExpressionNode; + iterableExpr: ExpressionNode; + forSuite: SuiteNode; + elseSuite?: SuiteNode | undefined; + typeComment?: StringToken; + }; +} + +export namespace ForNode { + export function create( + forToken: Token, + targetExpr: ExpressionNode, + iterableExpr: ExpressionNode, + forSuite: SuiteNode + ) { + const node: ForNode = { + start: forToken.start, + length: forToken.length, + nodeType: ParseNodeType.For, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: forToken, + targetExpr, + iterableExpr, + forSuite, + }, + }; + + targetExpr.parent = node; + iterableExpr.parent = node; + forSuite.parent = node; + + extendRange(node, forSuite); + + return node; + } +} + +export type ComprehensionForIfNode = ComprehensionForNode | ComprehensionIfNode; + +export interface ComprehensionForNode extends ParseNodeBase { + d: { + isAsync?: boolean; + asyncToken?: Token; + targetExpr: ExpressionNode; + iterableExpr: ExpressionNode; + }; +} + +export namespace ComprehensionForNode { + export function create(startToken: Token, targetExpr: ExpressionNode, iterableExpr: ExpressionNode) { + const node: ComprehensionForNode = { + start: startToken.start, + length: startToken.length, + nodeType: ParseNodeType.ComprehensionFor, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + targetExpr, + iterableExpr, + }, + }; + + targetExpr.parent = node; + iterableExpr.parent = node; + + extendRange(node, targetExpr); + extendRange(node, iterableExpr); + + return node; + } +} + +export interface ComprehensionIfNode extends ParseNodeBase { + d: { + testExpr: ExpressionNode; + }; +} + +export namespace ComprehensionIfNode { + export function create(ifToken: Token, testExpr: ExpressionNode) { + const node: ComprehensionIfNode = { + start: ifToken.start, + length: ifToken.length, + nodeType: ParseNodeType.ComprehensionIf, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { testExpr }, + }; + + testExpr.parent = node; + + extendRange(node, testExpr); + + return node; + } +} + +export interface TryNode extends ParseNodeBase { + d: { + firstToken: Token; + trySuite: SuiteNode; + exceptClauses: ExceptNode[]; + elseSuite?: SuiteNode | undefined; + finallySuite?: SuiteNode | undefined; + }; +} + +export namespace TryNode { + export function create(tryToken: Token, trySuite: SuiteNode) { + const node: TryNode = { + start: tryToken.start, + length: tryToken.length, + nodeType: ParseNodeType.Try, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: tryToken, + trySuite: trySuite, + exceptClauses: [], + }, + }; + + trySuite.parent = node; + + extendRange(node, trySuite); + + return node; + } +} + +export interface ExceptNode extends ParseNodeBase { + d: { + typeExpr?: ExpressionNode | undefined; + name?: NameNode | undefined; + exceptSuite: SuiteNode; + isExceptGroup: boolean; + exceptToken: Token; + }; +} + +export namespace ExceptNode { + export function create(exceptToken: Token, exceptSuite: SuiteNode, isExceptGroup: boolean) { + const node: ExceptNode = { + start: exceptToken.start, + length: exceptToken.length, + nodeType: ParseNodeType.Except, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + exceptSuite: exceptSuite, + isExceptGroup: isExceptGroup, + exceptToken, + }, + }; + + exceptSuite.parent = node; + + extendRange(node, exceptSuite); + + return node; + } +} + +export interface FunctionNode extends ParseNodeBase { + d: { + firstToken: Token; + decorators: DecoratorNode[]; + isAsync: boolean; + name: NameNode; + typeParams: TypeParameterListNode | undefined; + params: ParameterNode[]; + returnAnnotation: ExpressionNode | undefined; + funcAnnotationComment: FunctionAnnotationNode | undefined; + suite: SuiteNode; + }; +} + +export namespace FunctionNode { + export function create(defToken: Token, name: NameNode, suite: SuiteNode, typeParams?: TypeParameterListNode) { + const node: FunctionNode = { + start: defToken.start, + length: defToken.length, + nodeType: ParseNodeType.Function, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: defToken, + decorators: [], + isAsync: false, + name: name, + typeParams, + params: [], + returnAnnotation: undefined, + funcAnnotationComment: undefined, + suite: suite, + }, + }; + + name.parent = node; + suite.parent = node; + + if (typeParams) { + typeParams.parent = node; + } + + extendRange(node, suite); + + return node; + } +} + +export const enum ParamCategory { + Simple, + ArgsList, + KwargsDict, +} + +export interface ParameterNode extends ParseNodeBase { + d: { + category: ParamCategory; + name: NameNode | undefined; + annotation: ExpressionNode | undefined; + annotationComment: ExpressionNode | undefined; + defaultValue: ExpressionNode | undefined; + }; +} + +export namespace ParameterNode { + export function create(startToken: Token, paramCategory: ParamCategory) { + const node: ParameterNode = { + start: startToken.start, + length: startToken.length, + nodeType: ParseNodeType.Parameter, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + category: paramCategory, + name: undefined, + annotation: undefined, + annotationComment: undefined, + defaultValue: undefined, + }, + }; + + return node; + } +} + +export interface ClassNode extends ParseNodeBase { + d: { + firstToken: Token; + decorators: DecoratorNode[]; + name: NameNode; + typeParams: TypeParameterListNode | undefined; + arguments: ArgumentNode[]; + suite: SuiteNode; + }; +} + +export namespace ClassNode { + export function create(classToken: Token, name: NameNode, suite: SuiteNode, typeParams?: TypeParameterListNode) { + const node: ClassNode = { + start: classToken.start, + length: classToken.length, + nodeType: ParseNodeType.Class, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: classToken, + decorators: [], + name: name, + typeParams, + arguments: [], + suite: suite, + }, + }; + + name.parent = node; + suite.parent = node; + + if (typeParams) { + typeParams.parent = node; + } + + extendRange(node, suite); + + return node; + } + + // This variant is used to create a dummy class + // when the parser encounters decorators with no + // function or class declaration. + export function createDummyForDecorators(decorators: DecoratorNode[]) { + const node: ClassNode = { + start: decorators[0].start, + length: 0, + nodeType: ParseNodeType.Class, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: { + type: TokenType.Keyword, + start: 0, + length: 0, + comments: [], + }, + decorators, + name: { + start: decorators[0].start, + length: 0, + nodeType: ParseNodeType.Name, + id: 0, + parent: undefined, + a: undefined, + d: { + token: { + type: TokenType.Identifier, + start: 0, + length: 0, + comments: [], + value: '', + }, + value: '', + }, + }, + typeParams: undefined, + arguments: [], + suite: { + start: decorators[0].start, + length: 0, + nodeType: ParseNodeType.Suite, + id: 0, + parent: undefined, + a: undefined, + d: { statements: [], typeComment: undefined }, + }, + }, + }; + + decorators.forEach((decorator) => { + decorator.parent = node; + extendRange(node, decorator); + }); + + node.d.name.parent = node; + node.d.suite.parent = node; + + return node; + } +} + +export interface WithNode extends ParseNodeBase { + d: { + firstToken: Token; + isAsync?: boolean; + asyncToken?: Token; + withItems: WithItemNode[]; + suite: SuiteNode; + typeComment?: StringToken; + }; +} + +export namespace WithNode { + export function create(withToken: Token, suite: SuiteNode) { + const node: WithNode = { + start: withToken.start, + length: withToken.length, + nodeType: ParseNodeType.With, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: withToken, + withItems: [], + suite: suite, + }, + }; + + suite.parent = node; + + extendRange(node, suite); + + return node; + } +} + +export interface WithItemNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + target?: ExpressionNode | undefined; + }; +} + +export namespace WithItemNode { + export function create(expr: ExpressionNode) { + const node: WithItemNode = { + start: expr.start, + length: expr.length, + nodeType: ParseNodeType.WithItem, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr }, + }; + + expr.parent = node; + + return node; + } +} + +export interface DecoratorNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + }; +} + +export namespace DecoratorNode { + export function create(atToken: Token, expr: ExpressionNode) { + const node: DecoratorNode = { + start: atToken.start, + length: atToken.length, + nodeType: ParseNodeType.Decorator, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr }, + }; + + expr.parent = node; + + extendRange(node, expr); + + return node; + } +} + +export interface StatementListNode extends ParseNodeBase { + d: { + firstToken: Token; + statements: ParseNode[]; + }; +} + +export namespace StatementListNode { + export function create(atToken: Token) { + const node: StatementListNode = { + start: atToken.start, + length: atToken.length, + nodeType: ParseNodeType.StatementList, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { firstToken: atToken, statements: [] }, + }; + + return node; + } +} + +export type StatementNode = + | IfNode + | WhileNode + | ForNode + | TryNode + | FunctionNode + | ClassNode + | WithNode + | StatementListNode + | MatchNode + | TypeAliasNode + | ErrorNode; + +export type SmallStatementNode = + | ExpressionNode + | DelNode + | PassNode + | ImportNode + | GlobalNode + | NonlocalNode + | AssertNode; + +export type ExpressionNode = + | ErrorNode + | UnaryOperationNode + | BinaryOperationNode + | AssignmentNode + | TypeAnnotationNode + | AssignmentExpressionNode + | AugmentedAssignmentNode + | AwaitNode + | TernaryNode + | UnpackNode + | TupleNode + | CallNode + | ComprehensionNode + | IndexNode + | SliceNode + | YieldNode + | YieldFromNode + | MemberAccessNode + | LambdaNode + | NameNode + | ConstantNode + | EllipsisNode + | NumberNode + | StringNode + | FormatStringNode + | StringListNode + | DictionaryNode + | ListNode + | SetNode; + +export function isExpressionNode(node: ParseNode): node is ExpressionNode { + switch (node.nodeType) { + case ParseNodeType.Error: + case ParseNodeType.UnaryOperation: + case ParseNodeType.BinaryOperation: + case ParseNodeType.AssignmentExpression: + case ParseNodeType.TypeAnnotation: + case ParseNodeType.Await: + case ParseNodeType.Ternary: + case ParseNodeType.Unpack: + case ParseNodeType.Tuple: + case ParseNodeType.Call: + case ParseNodeType.Comprehension: + case ParseNodeType.Index: + case ParseNodeType.Slice: + case ParseNodeType.Yield: + case ParseNodeType.YieldFrom: + case ParseNodeType.MemberAccess: + case ParseNodeType.Lambda: + case ParseNodeType.Name: + case ParseNodeType.Constant: + case ParseNodeType.Ellipsis: + case ParseNodeType.Number: + case ParseNodeType.String: + case ParseNodeType.FormatString: + case ParseNodeType.StringList: + case ParseNodeType.Dictionary: + case ParseNodeType.List: + case ParseNodeType.Set: + return true; + + default: + return false; + } +} + +export interface ErrorNode extends ParseNodeBase { + d: { + readonly category: ErrorExpressionCategory; + readonly child: ExpressionNode | undefined; + readonly decorators?: DecoratorNode[] | undefined; + }; +} + +export namespace ErrorNode { + export function create( + initialRange: TextRange, + category: ErrorExpressionCategory, + child?: ExpressionNode, + decorators?: DecoratorNode[] + ) { + const node: ErrorNode = { + start: initialRange.start, + length: initialRange.length, + nodeType: ParseNodeType.Error, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + category, + child, + decorators, + }, + }; + + if (child) { + child.parent = node; + extendRange(node, child); + } + + if (decorators) { + decorators.forEach((decorator) => { + decorator.parent = node; + }); + + if (decorators.length > 0) { + extendRange(node, decorators[0]); + } + } + + return node; + } +} + +export interface UnaryOperationNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + operatorToken: Token; + operator: OperatorType; + hasParens: boolean; + }; +} + +export namespace UnaryOperationNode { + export function create(operatorToken: Token, expr: ExpressionNode, operator: OperatorType) { + const node: UnaryOperationNode = { + start: operatorToken.start, + length: operatorToken.length, + nodeType: ParseNodeType.UnaryOperation, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + operator, + operatorToken, + expr, + hasParens: false, + }, + }; + + expr.parent = node; + + extendRange(node, expr); + + return node; + } +} + +export interface BinaryOperationNode extends ParseNodeBase { + d: { + leftExpr: ExpressionNode; + operatorToken: Token; + operator: OperatorType; + rightExpr: ExpressionNode; + hasParens: boolean; + }; +} + +export namespace BinaryOperationNode { + export function create( + leftExpr: ExpressionNode, + rightExpr: ExpressionNode, + operatorToken: Token, + operator: OperatorType + ) { + const node: BinaryOperationNode = { + start: leftExpr.start, + length: leftExpr.length, + nodeType: ParseNodeType.BinaryOperation, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + leftExpr, + operatorToken, + operator, + rightExpr, + hasParens: false, + }, + }; + + leftExpr.parent = node; + rightExpr.parent = node; + + extendRange(node, rightExpr); + + return node; + } +} + +export interface AssignmentExpressionNode extends ParseNodeBase { + d: { + name: NameNode; + walrusToken: Token; + rightExpr: ExpressionNode; + hasParens: boolean; + }; +} + +export namespace AssignmentExpressionNode { + export function create(name: NameNode, walrusToken: Token, rightExpr: ExpressionNode) { + const node: AssignmentExpressionNode = { + start: name.start, + length: name.length, + nodeType: ParseNodeType.AssignmentExpression, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + name, + walrusToken, + rightExpr, + hasParens: false, + }, + }; + + name.parent = node; + rightExpr.parent = node; + + extendRange(node, rightExpr); + + return node; + } +} + +export interface AssignmentNode extends ParseNodeBase { + d: { + leftExpr: ExpressionNode; + rightExpr: ExpressionNode; + annotationComment?: ExpressionNode | undefined; + chainedAnnotationComment?: ExpressionNode | undefined; + }; +} + +export namespace AssignmentNode { + export function create(leftExpr: ExpressionNode, rightExpr: ExpressionNode) { + const node: AssignmentNode = { + start: leftExpr.start, + length: leftExpr.length, + nodeType: ParseNodeType.Assignment, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + leftExpr, + rightExpr, + }, + }; + + leftExpr.parent = node; + rightExpr.parent = node; + + extendRange(node, rightExpr); + + return node; + } +} + +export enum TypeParamKind { + TypeVar, + TypeVarTuple, + ParamSpec, +} + +export interface TypeParameterNode extends ParseNodeBase { + d: { + name: NameNode; + typeParamKind: TypeParamKind; + boundExpr?: ExpressionNode; + defaultExpr?: ExpressionNode; + }; +} + +export namespace TypeParameterNode { + export function create( + name: NameNode, + typeParamKind: TypeParamKind, + boundExpr?: ExpressionNode, + defaultExpr?: ExpressionNode + ) { + const node: TypeParameterNode = { + start: name.start, + length: name.length, + nodeType: ParseNodeType.TypeParameter, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + name, + typeParamKind, + boundExpr, + defaultExpr, + }, + }; + + name.parent = node; + + if (boundExpr) { + boundExpr.parent = node; + extendRange(node, boundExpr); + } + + if (defaultExpr) { + defaultExpr.parent = node; + extendRange(node, defaultExpr); + } + + return node; + } +} + +export interface TypeParameterListNode extends ParseNodeBase { + d: { + params: TypeParameterNode[]; + }; +} + +export namespace TypeParameterListNode { + export function create(startToken: Token, endToken: Token, params: TypeParameterNode[]) { + const node: TypeParameterListNode = { + start: startToken.start, + length: startToken.length, + nodeType: ParseNodeType.TypeParameterList, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { params }, + }; + + extendRange(node, endToken); + + params.forEach((param) => { + extendRange(node, param); + param.parent = node; + }); + + return node; + } +} + +export interface TypeAliasNode extends ParseNodeBase { + d: { + firstToken: Token; + name: NameNode; + typeParams?: TypeParameterListNode; + expr: ExpressionNode; + }; +} + +export namespace TypeAliasNode { + export function create( + typeToken: KeywordToken, + name: NameNode, + expr: ExpressionNode, + typeParams?: TypeParameterListNode + ) { + const node: TypeAliasNode = { + start: typeToken.start, + length: typeToken.length, + nodeType: ParseNodeType.TypeAlias, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: typeToken, + name, + typeParams, + expr, + }, + }; + + name.parent = node; + expr.parent = node; + + if (typeParams) { + typeParams.parent = node; + } + + extendRange(node, expr); + + return node; + } +} + +export interface TypeAnnotationNode extends ParseNodeBase { + d: { + valueExpr: ExpressionNode; + annotation: ExpressionNode; + }; +} + +export namespace TypeAnnotationNode { + export function create(valueExpr: ExpressionNode, annotation: ExpressionNode) { + const node: TypeAnnotationNode = { + start: valueExpr.start, + length: valueExpr.length, + nodeType: ParseNodeType.TypeAnnotation, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + valueExpr, + annotation, + }, + }; + + valueExpr.parent = node; + annotation.parent = node; + + extendRange(node, annotation); + + return node; + } +} + +export interface FunctionAnnotationNode extends ParseNodeBase { + d: { + isEllipsis: boolean; + paramAnnotations: ExpressionNode[]; + returnAnnotation: ExpressionNode; + }; +} + +export namespace FunctionAnnotationNode { + export function create( + openParenToken: Token, + isEllipsis: boolean, + paramAnnotations: ExpressionNode[], + returnAnnotation: ExpressionNode + ) { + const node: FunctionAnnotationNode = { + start: openParenToken.start, + length: openParenToken.length, + nodeType: ParseNodeType.FunctionAnnotation, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + isEllipsis, + paramAnnotations, + returnAnnotation, + }, + }; + + paramAnnotations.forEach((p) => { + p.parent = node; + }); + returnAnnotation.parent = node; + + extendRange(node, returnAnnotation); + + return node; + } +} + +export interface AugmentedAssignmentNode extends ParseNodeBase { + d: { + leftExpr: ExpressionNode; + operator: OperatorType; + rightExpr: ExpressionNode; + + // The destExpression is a copy of the leftExpression + // node. We use it as a place to hang the result type, + // as opposed to the source type. + destExpr: ExpressionNode; + }; +} + +export namespace AugmentedAssignmentNode { + export function create( + leftExpr: ExpressionNode, + rightExpr: ExpressionNode, + operator: OperatorType, + destExpr: ExpressionNode + ) { + const node: AugmentedAssignmentNode = { + start: leftExpr.start, + length: leftExpr.length, + nodeType: ParseNodeType.AugmentedAssignment, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + leftExpr, + operator, + rightExpr, + destExpr, + }, + }; + + leftExpr.parent = node; + rightExpr.parent = node; + destExpr.parent = node; + + extendRange(node, rightExpr); + + return node; + } +} + +export interface AwaitNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + awaitToken: Token; + hasParens: boolean; + }; +} + +export namespace AwaitNode { + export function create(awaitToken: Token, expr: ExpressionNode) { + const node: AwaitNode = { + start: awaitToken.start, + length: awaitToken.length, + nodeType: ParseNodeType.Await, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr, awaitToken, hasParens: false }, + }; + + expr.parent = node; + + extendRange(node, expr); + + return node; + } +} + +export interface TernaryNode extends ParseNodeBase { + d: { + ifExpr: ExpressionNode; + testExpr: ExpressionNode; + elseExpr: ExpressionNode; + }; +} + +export namespace TernaryNode { + export function create(ifExpr: ExpressionNode, testExpr: ExpressionNode, elseExpr: ExpressionNode) { + const node: TernaryNode = { + start: ifExpr.start, + length: ifExpr.length, + nodeType: ParseNodeType.Ternary, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + ifExpr, + testExpr, + elseExpr, + }, + }; + + ifExpr.parent = node; + testExpr.parent = node; + elseExpr.parent = node; + + extendRange(node, elseExpr); + + return node; + } +} + +export interface UnpackNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + starToken: Token; + }; +} + +export namespace UnpackNode { + export function create(starToken: Token, expr: ExpressionNode) { + const node: UnpackNode = { + start: starToken.start, + length: starToken.length, + nodeType: ParseNodeType.Unpack, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + expr, + starToken, + }, + }; + + expr.parent = node; + + extendRange(node, expr); + + return node; + } +} + +export interface TupleNode extends ParseNodeBase { + d: { + items: ExpressionNode[]; + hasParens: boolean; + }; +} + +export namespace TupleNode { + export function create(range: TextRange, hasParens: boolean) { + const node: TupleNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Tuple, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + items: [], + hasParens, + }, + }; + + return node; + } +} + +export interface CallNode extends ParseNodeBase { + d: { + leftExpr: ExpressionNode; + args: ArgumentNode[]; + trailingComma: boolean; + }; +} + +export namespace CallNode { + export function create(leftExpr: ExpressionNode, args: ArgumentNode[], trailingComma: boolean) { + const node: CallNode = { + start: leftExpr.start, + length: leftExpr.length, + nodeType: ParseNodeType.Call, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + leftExpr, + args, + trailingComma, + }, + }; + + leftExpr.parent = node; + + if (args.length > 0) { + args.forEach((arg) => { + arg.parent = node; + }); + extendRange(node, args[args.length - 1]); + } + + return node; + } +} + +export interface ComprehensionNode extends ParseNodeBase { + d: { + expr: ParseNode; + forIfNodes: ComprehensionForIfNode[]; + isGenerator: boolean; + hasParens: boolean; + }; +} + +export namespace ComprehensionNode { + export function create(expr: ParseNode, isGenerator: boolean) { + const node: ComprehensionNode = { + start: expr.start, + length: expr.length, + nodeType: ParseNodeType.Comprehension, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + expr, + forIfNodes: [], + isGenerator, + hasParens: false, + }, + }; + + expr.parent = node; + + return node; + } +} + +export interface IndexNode extends ParseNodeBase { + d: { + leftExpr: ExpressionNode; + items: ArgumentNode[]; + trailingComma: boolean; + }; +} + +export namespace IndexNode { + export function create( + leftExpr: ExpressionNode, + items: ArgumentNode[], + trailingComma: boolean, + closeBracketToken: Token + ) { + const node: IndexNode = { + start: leftExpr.start, + length: leftExpr.length, + nodeType: ParseNodeType.Index, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + leftExpr, + items, + trailingComma, + }, + }; + + leftExpr.parent = node; + items.forEach((item) => { + item.parent = node; + }); + + extendRange(node, closeBracketToken); + + return node; + } +} + +export interface SliceNode extends ParseNodeBase { + d: { + startValue?: ExpressionNode | undefined; + endValue?: ExpressionNode | undefined; + stepValue?: ExpressionNode | undefined; + }; +} + +export namespace SliceNode { + export function create(range: TextRange) { + const node: SliceNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Slice, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: {}, + }; + + return node; + } +} + +export interface YieldNode extends ParseNodeBase { + d: { + expr?: ExpressionNode | undefined; + }; +} + +export namespace YieldNode { + export function create(yieldToken: Token, expr?: ExpressionNode) { + const node: YieldNode = { + start: yieldToken.start, + length: yieldToken.length, + nodeType: ParseNodeType.Yield, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr }, + }; + + if (expr) { + expr.parent = node; + extendRange(node, expr); + } + + return node; + } +} + +export interface YieldFromNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + }; +} + +export namespace YieldFromNode { + export function create(yieldToken: Token, expr: ExpressionNode) { + const node: YieldFromNode = { + start: yieldToken.start, + length: yieldToken.length, + nodeType: ParseNodeType.YieldFrom, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr }, + }; + + expr.parent = node; + + extendRange(node, expr); + + return node; + } +} + +export interface MemberAccessNode extends ParseNodeBase { + d: { + leftExpr: ExpressionNode; + member: NameNode; + }; +} + +export namespace MemberAccessNode { + export function create(leftExpr: ExpressionNode, member: NameNode) { + const node: MemberAccessNode = { + start: leftExpr.start, + length: leftExpr.length, + nodeType: ParseNodeType.MemberAccess, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + leftExpr: leftExpr, + member: member, + }, + }; + + leftExpr.parent = node; + member.parent = node; + + extendRange(node, member); + + return node; + } +} + +export interface LambdaNode extends ParseNodeBase { + d: { + params: ParameterNode[]; + expr: ExpressionNode; + }; +} + +export namespace LambdaNode { + export function create(lambdaToken: Token, expr: ExpressionNode) { + const node: LambdaNode = { + start: lambdaToken.start, + length: lambdaToken.length, + nodeType: ParseNodeType.Lambda, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + params: [], + expr, + }, + }; + + expr.parent = node; + + extendRange(node, expr); + + return node; + } +} + +export interface NameNode extends ParseNodeBase { + d: { + token: IdentifierToken; + value: string; + }; +} + +export namespace NameNode { + export function create(nameToken: IdentifierToken) { + const node: NameNode = { + start: nameToken.start, + length: nameToken.length, + nodeType: ParseNodeType.Name, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + token: nameToken, + value: nameToken.value, + }, + }; + + return node; + } +} + +export interface ConstantNode extends ParseNodeBase { + d: { + constType: KeywordType; + }; +} + +export namespace ConstantNode { + export function create(token: KeywordToken) { + const node: ConstantNode = { + start: token.start, + length: token.length, + nodeType: ParseNodeType.Constant, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { constType: token.keywordType }, + }; + + return node; + } +} + +export interface EllipsisNode extends ParseNodeBase {} + +export namespace EllipsisNode { + export function create(range: TextRange) { + const node: EllipsisNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Ellipsis, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: {}, + }; + + return node; + } +} + +export interface NumberNode extends ParseNodeBase { + d: { + value: number | bigint; + isInteger: boolean; + isImaginary: boolean; + }; +} + +export namespace NumberNode { + export function create(token: NumberToken) { + const node: NumberNode = { + start: token.start, + length: token.length, + nodeType: ParseNodeType.Number, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + value: token.value, + isInteger: token.isInteger, + isImaginary: token.isImaginary, + }, + }; + + return node; + } +} + +export interface StringNode extends ParseNodeBase { + d: { + token: StringToken; + value: string; + }; +} + +export namespace StringNode { + export function create(token: StringToken, value: string) { + const node: StringNode = { + start: token.start, + length: token.length, + nodeType: ParseNodeType.String, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + token, + value, + }, + }; + + return node; + } +} + +// Used for both f-strings and t-strings +export interface FormatStringNode extends ParseNodeBase { + d: { + token: FStringStartToken; + middleTokens: FStringMiddleToken[]; + fieldExprs: ExpressionNode[]; + formatExprs: ExpressionNode[]; + + // Include a dummy "value" to simplify other code. + value: ''; + }; +} + +export namespace FormatStringNode { + export function create( + startToken: FStringStartToken, + endToken: FStringEndToken | undefined, + middleTokens: FStringMiddleToken[], + fieldExprs: ExpressionNode[], + formatExprs: ExpressionNode[] + ) { + const node: FormatStringNode = { + start: startToken.start, + length: startToken.length, + nodeType: ParseNodeType.FormatString, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + token: startToken, + middleTokens, + fieldExprs, + formatExprs, + value: '', + }, + }; + + fieldExprs.forEach((expr) => { + expr.parent = node; + extendRange(node, expr); + }); + + if (formatExprs) { + formatExprs.forEach((expr) => { + expr.parent = node; + extendRange(node, expr); + }); + } + + if (endToken) { + extendRange(node, endToken); + } + + return node; + } +} + +export interface StringListNode extends ParseNodeBase { + d: { + strings: (StringNode | FormatStringNode)[]; + + // If strings are found within the context of + // a type annotation, they are further parsed + // into an expression. + annotation: ExpressionNode | undefined; + + // Indicates that the string list is enclosed in parens. + hasParens: boolean; + }; +} + +export namespace StringListNode { + export function create(strings: (StringNode | FormatStringNode)[]) { + const node: StringListNode = { + start: strings[0].start, + length: strings[0].length, + nodeType: ParseNodeType.StringList, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + strings, + annotation: undefined, + hasParens: false, + }, + }; + + if (strings.length > 0) { + strings.forEach((str) => { + str.parent = node; + }); + extendRange(node, strings[strings.length - 1]); + } + + return node; + } +} + +export interface DictionaryNode extends ParseNodeBase { + d: { + items: DictionaryEntryNode[]; + trailingCommaToken: Token | undefined; + }; +} + +export namespace DictionaryNode { + export function create(range: TextRange) { + const node: DictionaryNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Dictionary, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + items: [], + trailingCommaToken: undefined, + }, + }; + + return node; + } +} + +export interface DictionaryKeyEntryNode extends ParseNodeBase { + d: { + keyExpr: ExpressionNode; + valueExpr: ExpressionNode; + }; +} + +export namespace DictionaryKeyEntryNode { + export function create(keyExpr: ExpressionNode, valueExpr: ExpressionNode) { + const node: DictionaryKeyEntryNode = { + start: keyExpr.start, + length: keyExpr.length, + nodeType: ParseNodeType.DictionaryKeyEntry, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + keyExpr, + valueExpr, + }, + }; + + keyExpr.parent = node; + valueExpr.parent = node; + + extendRange(node, valueExpr); + + return node; + } +} + +export interface DictionaryExpandEntryNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + }; +} + +export namespace DictionaryExpandEntryNode { + export function create(expr: ExpressionNode) { + const node: DictionaryExpandEntryNode = { + start: expr.start, + length: expr.length, + nodeType: ParseNodeType.DictionaryExpandEntry, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr }, + }; + + expr.parent = node; + + return node; + } +} + +export type DictionaryEntryNode = DictionaryKeyEntryNode | DictionaryExpandEntryNode | ComprehensionNode; + +export interface SetNode extends ParseNodeBase { + d: { items: ExpressionNode[] }; +} + +export namespace SetNode { + export function create(range: TextRange) { + const node: SetNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Set, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { items: [] }, + }; + + return node; + } +} + +export interface ListNode extends ParseNodeBase { + d: { + items: ExpressionNode[]; + }; +} + +export namespace ListNode { + export function create(range: TextRange) { + const node: ListNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.List, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { items: [] }, + }; + + return node; + } +} + +export const enum ArgCategory { + Simple, + UnpackedList, + UnpackedDictionary, +} + +export interface ArgumentNode extends ParseNodeBase { + d: { + argCategory: ArgCategory; + name: NameNode | undefined; + valueExpr: ExpressionNode; + }; +} + +export namespace ArgumentNode { + export function create(startToken: Token | undefined, valueExpr: ExpressionNode, argCategory: ArgCategory) { + const node: ArgumentNode = { + start: startToken ? startToken.start : valueExpr.start, + length: startToken ? startToken.length : valueExpr.length, + nodeType: ParseNodeType.Argument, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + argCategory, + name: undefined, + valueExpr, + }, + }; + + valueExpr.parent = node; + + extendRange(node, valueExpr); + + return node; + } +} + +export interface DelNode extends ParseNodeBase { + d: { + targets: ExpressionNode[]; + }; +} + +export namespace DelNode { + export function create(delToken: Token) { + const node: DelNode = { + start: delToken.start, + length: delToken.length, + nodeType: ParseNodeType.Del, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { targets: [] }, + }; + + return node; + } +} + +export interface PassNode extends ParseNodeBase {} + +export namespace PassNode { + export function create(passToken: TextRange) { + const node: PassNode = { + start: passToken.start, + length: passToken.length, + nodeType: ParseNodeType.Pass, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: {}, + }; + + return node; + } +} + +export interface ImportNode extends ParseNodeBase { + d: { + list: ImportAsNode[]; + }; +} + +export namespace ImportNode { + export function create(importToken: TextRange) { + const node: ImportNode = { + start: importToken.start, + length: importToken.length, + nodeType: ParseNodeType.Import, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { list: [] }, + }; + + return node; + } +} + +export interface ModuleNameNode extends ParseNodeBase { + d: { + leadingDots: number; + nameParts: NameNode[]; + + // This is an error condition used only for type completion. + hasTrailingDot?: boolean; + }; +} + +export namespace ModuleNameNode { + export function create(range: TextRange) { + const node: ModuleNameNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.ModuleName, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + leadingDots: 0, + nameParts: [], + }, + }; + + return node; + } +} + +export interface ImportAsNode extends ParseNodeBase { + d: { + module: ModuleNameNode; + alias?: NameNode | undefined; + }; +} + +export namespace ImportAsNode { + export function create(module: ModuleNameNode) { + const node: ImportAsNode = { + start: module.start, + length: module.length, + nodeType: ParseNodeType.ImportAs, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { module: module }, + }; + + module.parent = node; + + return node; + } +} + +export interface ImportFromNode extends ParseNodeBase { + d: { + module: ModuleNameNode; + imports: ImportFromAsNode[]; + isWildcardImport: boolean; + usesParens: boolean; + wildcardToken?: Token; + missingImport?: boolean; + }; +} + +export namespace ImportFromNode { + export function create(fromToken: Token, module: ModuleNameNode) { + const node: ImportFromNode = { + start: fromToken.start, + length: fromToken.length, + nodeType: ParseNodeType.ImportFrom, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + module, + imports: [], + isWildcardImport: false, + usesParens: false, + }, + }; + + module.parent = node; + + extendRange(node, module); + + return node; + } +} + +export interface ImportFromAsNode extends ParseNodeBase { + d: { + name: NameNode; + alias?: NameNode | undefined; + }; +} + +export namespace ImportFromAsNode { + export function create(name: NameNode) { + const node: ImportFromAsNode = { + start: name.start, + length: name.length, + nodeType: ParseNodeType.ImportFromAs, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { name: name }, + }; + + name.parent = node; + + return node; + } +} + +export interface GlobalNode extends ParseNodeBase { + d: { + targets: NameNode[]; + }; +} + +export namespace GlobalNode { + export function create(range: TextRange) { + const node: GlobalNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Global, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { targets: [] }, + }; + + return node; + } +} + +export interface NonlocalNode extends ParseNodeBase { + d: { + targets: NameNode[]; + }; +} + +export namespace NonlocalNode { + export function create(range: TextRange) { + const node: NonlocalNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Nonlocal, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { targets: [] }, + }; + + return node; + } +} + +export interface AssertNode extends ParseNodeBase { + d: { + testExpr: ExpressionNode; + exceptionExpr?: ExpressionNode | undefined; + }; +} + +export namespace AssertNode { + export function create(assertToken: Token, testExpr: ExpressionNode) { + const node: AssertNode = { + start: assertToken.start, + length: assertToken.length, + nodeType: ParseNodeType.Assert, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { testExpr }, + }; + + testExpr.parent = node; + + extendRange(node, testExpr); + + return node; + } +} + +export interface BreakNode extends ParseNodeBase {} + +export namespace BreakNode { + export function create(range: TextRange) { + const node: BreakNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Break, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: {}, + }; + + return node; + } +} + +export interface ContinueNode extends ParseNodeBase {} + +export namespace ContinueNode { + export function create(range: TextRange) { + const node: ContinueNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Continue, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: {}, + }; + + return node; + } +} + +export interface ReturnNode extends ParseNodeBase { + d: { + expr?: ExpressionNode | undefined; + }; +} + +export namespace ReturnNode { + export function create(range: TextRange) { + const node: ReturnNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Return, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: {}, + }; + + return node; + } +} + +export interface RaiseNode extends ParseNodeBase { + d: { + expr?: ExpressionNode | undefined; + fromExpr?: ExpressionNode | undefined; + }; +} + +export namespace RaiseNode { + export function create(range: TextRange) { + const node: RaiseNode = { + start: range.start, + length: range.length, + nodeType: ParseNodeType.Raise, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: {}, + }; + + return node; + } +} + +export interface MatchNode extends ParseNodeBase { + d: { + firstToken: Token; + expr: ExpressionNode; + cases: CaseNode[]; + }; +} + +export namespace MatchNode { + export function create(matchToken: Token, expr: ExpressionNode) { + const node: MatchNode = { + start: matchToken.start, + length: matchToken.length, + nodeType: ParseNodeType.Match, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + firstToken: matchToken, + expr, + cases: [], + }, + }; + + expr.parent = node; + + extendRange(node, expr); + + return node; + } +} + +export interface CaseNode extends ParseNodeBase { + d: { + pattern: PatternAtomNode; + isIrrefutable: boolean; + guardExpr?: ExpressionNode | undefined; + suite: SuiteNode; + }; +} + +export namespace CaseNode { + export function create( + caseToken: TextRange, + pattern: PatternAtomNode, + isIrrefutable: boolean, + guardExpr: ExpressionNode | undefined, + suite: SuiteNode + ) { + const node: CaseNode = { + start: caseToken.start, + length: caseToken.length, + nodeType: ParseNodeType.Case, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + pattern, + isIrrefutable, + guardExpr, + suite, + }, + }; + + extendRange(node, suite); + + pattern.parent = node; + suite.parent = node; + + if (guardExpr) { + guardExpr.parent = node; + } + + return node; + } +} + +export interface PatternSequenceNode extends ParseNodeBase { + d: { + entries: PatternAsNode[]; + starEntryIndex: number | undefined; + }; +} + +export namespace PatternSequenceNode { + export function create(firstToken: TextRange, entries: PatternAsNode[]) { + const starEntryIndex = entries.findIndex( + (entry) => + entry.d.orPatterns.length === 1 && + entry.d.orPatterns[0].nodeType === ParseNodeType.PatternCapture && + entry.d.orPatterns[0].d.isStar + ); + + const node: PatternSequenceNode = { + start: firstToken.start, + length: firstToken.length, + nodeType: ParseNodeType.PatternSequence, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + entries, + starEntryIndex: starEntryIndex >= 0 ? starEntryIndex : undefined, + }, + }; + + if (entries.length > 0) { + extendRange(node, entries[entries.length - 1]); + } + + entries.forEach((entry) => { + entry.parent = node; + }); + + return node; + } +} + +export interface PatternAsNode extends ParseNodeBase { + d: { + orPatterns: PatternAtomNode[]; + target?: NameNode | undefined; + }; +} + +export namespace PatternAsNode { + export function create(orPatterns: PatternAtomNode[], target?: NameNode) { + const node: PatternAsNode = { + start: orPatterns[0].start, + length: orPatterns[0].length, + nodeType: ParseNodeType.PatternAs, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + orPatterns, + target, + }, + }; + + if (orPatterns.length > 1) { + extendRange(node, orPatterns[orPatterns.length - 1]); + } + + orPatterns.forEach((pattern) => { + pattern.parent = node; + }); + + if (target) { + extendRange(node, target); + target.parent = node; + } + + return node; + } +} + +export interface PatternLiteralNode extends ParseNodeBase { + d: { + expr: ExpressionNode; + }; +} + +export namespace PatternLiteralNode { + export function create(expr: ExpressionNode) { + const node: PatternLiteralNode = { + start: expr.start, + length: expr.length, + nodeType: ParseNodeType.PatternLiteral, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr }, + }; + + expr.parent = node; + + return node; + } +} + +export interface PatternClassNode extends ParseNodeBase { + d: { + className: NameNode | MemberAccessNode; + args: PatternClassArgumentNode[]; + }; +} + +export namespace PatternClassNode { + export function create(className: NameNode | MemberAccessNode, args: PatternClassArgumentNode[]) { + const node: PatternClassNode = { + start: className.start, + length: className.length, + nodeType: ParseNodeType.PatternClass, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + className, + args, + }, + }; + + className.parent = node; + args.forEach((arg) => { + arg.parent = node; + }); + + if (args.length > 0) { + extendRange(node, args[args.length - 1]); + } + + return node; + } +} + +export interface PatternClassArgumentNode extends ParseNodeBase { + d: { + name?: NameNode | undefined; + pattern: PatternAsNode; + }; +} + +export namespace PatternClassArgumentNode { + export function create(pattern: PatternAsNode, name?: NameNode) { + const node: PatternClassArgumentNode = { + start: pattern.start, + length: pattern.length, + nodeType: ParseNodeType.PatternClassArgument, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + pattern, + name, + }, + }; + + pattern.parent = node; + + if (name) { + extendRange(node, name); + name.parent = node; + } + + return node; + } +} + +export interface PatternCaptureNode extends ParseNodeBase { + d: { + target: NameNode; + isStar: boolean; + isWildcard: boolean; + }; +} + +export namespace PatternCaptureNode { + export function create(target: NameNode, starToken?: TextRange) { + const node: PatternCaptureNode = { + start: target.start, + length: target.length, + nodeType: ParseNodeType.PatternCapture, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + target, + isStar: starToken !== undefined, + isWildcard: target.d.value === '_', + }, + }; + + target.parent = node; + + if (starToken) { + extendRange(node, starToken); + } + + return node; + } +} + +export interface PatternMappingNode extends ParseNodeBase { + d: { + entries: PatternMappingEntryNode[]; + }; +} + +export namespace PatternMappingNode { + export function create(startToken: TextRange, entries: PatternMappingEntryNode[]) { + const node: PatternMappingNode = { + start: startToken.start, + length: startToken.length, + nodeType: ParseNodeType.PatternMapping, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { entries }, + }; + + if (entries.length > 0) { + extendRange(node, entries[entries.length - 1]); + } + + entries.forEach((entry) => { + entry.parent = node; + }); + + return node; + } +} + +export type PatternMappingEntryNode = PatternMappingKeyEntryNode | PatternMappingExpandEntryNode; + +export interface PatternMappingKeyEntryNode extends ParseNodeBase { + d: { + keyPattern: PatternLiteralNode | PatternValueNode | ErrorNode; + valuePattern: PatternAsNode | ErrorNode; + }; +} + +export namespace PatternMappingKeyEntryNode { + export function create( + keyPattern: PatternLiteralNode | PatternValueNode | ErrorNode, + valuePattern: PatternAsNode | ErrorNode + ) { + const node: PatternMappingKeyEntryNode = { + start: keyPattern.start, + length: keyPattern.length, + nodeType: ParseNodeType.PatternMappingKeyEntry, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { + keyPattern, + valuePattern, + }, + }; + + keyPattern.parent = node; + valuePattern.parent = node; + + extendRange(node, valuePattern); + + return node; + } +} + +export interface PatternMappingExpandEntryNode extends ParseNodeBase { + d: { + target: NameNode; + }; +} + +export namespace PatternMappingExpandEntryNode { + export function create(starStarToken: TextRange, target: NameNode) { + const node: PatternMappingExpandEntryNode = { + start: starStarToken.start, + length: starStarToken.length, + nodeType: ParseNodeType.PatternMappingExpandEntry, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { target }, + }; + + target.parent = node; + + extendRange(node, target); + + return node; + } +} + +export interface PatternValueNode extends ParseNodeBase { + d: { + expr: MemberAccessNode; + }; +} + +export namespace PatternValueNode { + export function create(expr: MemberAccessNode) { + const node: PatternValueNode = { + start: expr.start, + length: expr.length, + nodeType: ParseNodeType.PatternValue, + id: _nextNodeId++, + parent: undefined, + a: undefined, + d: { expr }, + }; + + expr.parent = node; + + return node; + } +} + +export type PatternAtomNode = + | PatternSequenceNode + | PatternLiteralNode + | PatternClassNode + | PatternAsNode + | PatternCaptureNode + | PatternMappingNode + | PatternValueNode + | ErrorNode; + +export type ParseNode = + | ErrorNode + | ArgumentNode + | AssertNode + | AssignmentExpressionNode + | AssignmentNode + | AugmentedAssignmentNode + | AwaitNode + | BinaryOperationNode + | BreakNode + | CallNode + | CaseNode + | ClassNode + | ComprehensionNode + | ComprehensionForNode + | ComprehensionIfNode + | ConstantNode + | ContinueNode + | DecoratorNode + | DelNode + | DictionaryNode + | DictionaryExpandEntryNode + | DictionaryKeyEntryNode + | EllipsisNode + | IfNode + | ImportNode + | ImportAsNode + | ImportFromNode + | ImportFromAsNode + | IndexNode + | ExceptNode + | ForNode + | FormatStringNode + | FunctionNode + | FunctionAnnotationNode + | GlobalNode + | LambdaNode + | ListNode + | MatchNode + | MemberAccessNode + | ModuleNameNode + | ModuleNode + | NameNode + | NonlocalNode + | NumberNode + | ParameterNode + | PassNode + | PatternAsNode + | PatternClassNode + | PatternClassArgumentNode + | PatternCaptureNode + | PatternLiteralNode + | PatternMappingExpandEntryNode + | PatternMappingKeyEntryNode + | PatternMappingNode + | PatternSequenceNode + | PatternValueNode + | RaiseNode + | ReturnNode + | SetNode + | SliceNode + | StatementListNode + | StringListNode + | StringNode + | SuiteNode + | TernaryNode + | TupleNode + | TryNode + | TypeAliasNode + | TypeAnnotationNode + | TypeParameterNode + | TypeParameterListNode + | UnaryOperationNode + | UnpackNode + | WhileNode + | WithNode + | WithItemNode + | YieldNode + | YieldFromNode; + +export type EvaluationScopeNode = + | LambdaNode + | FunctionNode + | ModuleNode + | ClassNode + | ComprehensionNode + | TypeParameterListNode; +export type ExecutionScopeNode = LambdaNode | FunctionNode | ModuleNode | TypeParameterListNode; +export type TypeParameterScopeNode = FunctionNode | ClassNode | TypeAliasNode; diff --git a/python-parser/packages/pyright-internal/src/parser/parser.ts b/python-parser/packages/pyright-internal/src/parser/parser.ts new file mode 100644 index 00000000..4e6e86f6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/parser.ts @@ -0,0 +1,5415 @@ +/* + * parser.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Based on code from python-language-server repository: + * https://github.com/Microsoft/python-language-server + * + * Parser for the Python language. Converts a stream of tokens + * into an abstract syntax tree (AST). + */ + +import { Char } from '../common/charCodes'; +import { appendArray } from '../common/collectionUtils'; +import { assert } from '../common/debug'; +import { Diagnostic, DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticSink } from '../common/diagnosticSink'; +import { convertOffsetsToRange } from '../common/positionUtils'; +import { + PythonVersion, + latestStablePythonVersion, + pythonVersion3_10, + pythonVersion3_11, + pythonVersion3_12, + pythonVersion3_13, + pythonVersion3_14, + pythonVersion3_3, + pythonVersion3_5, + pythonVersion3_6, + pythonVersion3_8, + pythonVersion3_9, +} from '../common/pythonVersion'; +import { hashString } from '../common/stringUtils'; +import { TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { timingStats } from '../common/timing'; +import { LocAddendum, LocMessage } from '../localization/localize'; +import { + ArgCategory, + ArgumentNode, + AssertNode, + AssignmentExpressionNode, + AssignmentNode, + AugmentedAssignmentNode, + AwaitNode, + BinaryOperationNode, + BreakNode, + CallNode, + CaseNode, + ClassNode, + ComprehensionForIfNode, + ComprehensionForNode, + ComprehensionIfNode, + ComprehensionNode, + ConstantNode, + ContinueNode, + DecoratorNode, + DelNode, + DictionaryEntryNode, + DictionaryExpandEntryNode, + DictionaryKeyEntryNode, + DictionaryNode, + EllipsisNode, + ErrorExpressionCategory, + ErrorNode, + ExceptNode, + ExpressionNode, + ForNode, + FormatStringNode, + FunctionAnnotationNode, + FunctionNode, + GlobalNode, + IfNode, + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + ImportNode, + IndexNode, + LambdaNode, + ListNode, + MatchNode, + MemberAccessNode, + ModuleNameNode, + ModuleNode, + NameNode, + NonlocalNode, + NumberNode, + ParamCategory, + ParameterNode, + ParseNode, + ParseNodeType, + PassNode, + PatternAsNode, + PatternAtomNode, + PatternCaptureNode, + PatternClassArgumentNode, + PatternClassNode, + PatternLiteralNode, + PatternMappingEntryNode, + PatternMappingExpandEntryNode, + PatternMappingKeyEntryNode, + PatternMappingNode, + PatternSequenceNode, + PatternValueNode, + RaiseNode, + ReturnNode, + SetNode, + SliceNode, + StatementListNode, + StatementNode, + StringListNode, + StringNode, + SuiteNode, + TernaryNode, + TryNode, + TupleNode, + TypeAliasNode, + TypeAnnotationNode, + TypeParamKind, + TypeParameterListNode, + TypeParameterNode, + UnaryOperationNode, + UnpackNode, + WhileNode, + WithItemNode, + WithNode, + YieldFromNode, + YieldNode, + extendRange, + getNextNodeId, +} from './parseNodes'; +import * as StringTokenUtils from './stringTokenUtils'; +import { Tokenizer, TokenizerOutput } from './tokenizer'; +import { + DedentToken, + FStringEndToken, + FStringMiddleToken, + FStringStartToken, + IdentifierToken, + IndentToken, + KeywordToken, + KeywordType, + NumberToken, + OperatorToken, + OperatorType, + StringToken, + StringTokenFlags, + Token, + TokenType, +} from './tokenizerTypes'; + +interface ListResult { + list: T[]; + trailingComma: boolean; + parseError?: ErrorNode | undefined; +} + +interface SubscriptListResult { + list: ArgumentNode[]; + trailingComma: boolean; +} + +const commentRegEx = /^(\s*#\s*type:\s*)([^\r\n]*)/; +const ignoreCommentRegEx = /^ignore(\s|\[|$)/; + +export class ParseOptions { + isStubFile: boolean; + pythonVersion: PythonVersion; + reportInvalidStringEscapeSequence: boolean; + skipFunctionAndClassBody: boolean; + useNotebookMode: boolean; + reportErrorsForParsedStringContents: boolean; + + constructor() { + this.isStubFile = false; + this.pythonVersion = latestStablePythonVersion; + this.reportInvalidStringEscapeSequence = false; + this.skipFunctionAndClassBody = false; + this.useNotebookMode = false; + this.reportErrorsForParsedStringContents = false; + } +} + +export interface ParserOutput { + parseTree: ModuleNode; + importedModules: ModuleImport[]; + futureImports: Set; + containsWildcardImport: boolean; + typingSymbolAliases: Map; + hasTypeAnnotations: boolean; + lines: TextRangeCollection; +} + +export interface ParseFileResults { + text: string; + contentHash: number; + parserOutput: ParserOutput; + tokenizerOutput: TokenizerOutput; +} + +export interface ParseExpressionTextResults { + parseTree?: T | undefined; + lines: TextRangeCollection; + diagnostics: Diagnostic[]; +} + +export interface ModuleImport { + nameNode: ModuleNameNode; + leadingDots: number; + nameParts: string[]; + + // Used for "from X import Y" pattern. An empty + // array implies "from X import *". + importedSymbols: Set | undefined; +} + +export interface ArgListResult { + args: ArgumentNode[]; + trailingComma: boolean; +} + +export const enum ParseTextMode { + Expression, + VariableAnnotation, + FunctionAnnotation, +} + +// Limit the max child node depth to prevent stack overflows. +const maxChildNodeDepth = 256; + +export class Parser { + private _fileContents?: string; + private _tokenizerOutput?: TokenizerOutput; + private _tokenIndex = 0; + private _areErrorsSuppressed = false; + private _parseOptions: ParseOptions = new ParseOptions(); + private _diagSink: DiagnosticSink = new DiagnosticSink(); + private _isInLoop = false; + private _isInFunction = false; + private _isInExceptionGroup = false; + private _isParsingTypeAnnotation = false; + private _isParsingIndexTrailer = false; + private _isParsingQuotedText = false; + private _isInFinallyBlock = false; + private _isInFinallyLoop = false; + private _futureImports = new Set(); + private _importedModules: ModuleImport[] = []; + private _containsWildcardImport = false; + private _assignmentExpressionsAllowed = true; + private _typingImportAliases: string[] = []; + private _typingSymbolAliases: Map = new Map(); + private _maxChildDepthMap = new Map(); + private _hasTypeAnnotations = false; + + parseSourceFile(fileContents: string, parseOptions: ParseOptions, diagSink: DiagnosticSink): ParseFileResults { + this._hasTypeAnnotations = false; + timingStats.tokenizeFileTime.timeOperation(() => { + this._startNewParse(fileContents, 0, fileContents.length, parseOptions, diagSink); + }); + + const moduleNode = ModuleNode.create({ start: 0, length: fileContents.length }); + + timingStats.parseFileTime.timeOperation(() => { + while (!this._atEof()) { + if (!this._consumeTokenIfType(TokenType.NewLine)) { + // Handle a common error case and try to recover. + const nextToken = this._peekToken(); + if (nextToken.type === TokenType.Indent) { + this._getNextToken(); + const indentToken = nextToken as IndentToken; + if (indentToken.isIndentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), indentToken); + } else { + this._addSyntaxError(LocMessage.unexpectedIndent(), nextToken); + } + } + + const statement = this._parseStatement(); + if (!statement) { + // Perform basic error recovery to get to the next line. + this._consumeTokensUntilType([TokenType.NewLine]); + } else { + statement.parent = moduleNode; + moduleNode.d.statements.push(statement); + } + } + } + }); + + assert(this._tokenizerOutput !== undefined); + return { + text: fileContents, + contentHash: hashString(fileContents), + parserOutput: { + parseTree: moduleNode, + importedModules: this._importedModules, + futureImports: this._futureImports, + containsWildcardImport: this._containsWildcardImport, + typingSymbolAliases: this._typingSymbolAliases, + hasTypeAnnotations: this._hasTypeAnnotations, + lines: this._tokenizerOutput!.lines, + }, + tokenizerOutput: this._tokenizerOutput!, + }; + } + + parseTextExpression( + fileContents: string, + textOffset: number, + textLength: number, + parseOptions: ParseOptions, + parseTextMode: ParseTextMode.Expression, + initialParenDepth?: number, + typingSymbolAliases?: Map + ): ParseExpressionTextResults; + parseTextExpression( + fileContents: string, + textOffset: number, + textLength: number, + parseOptions: ParseOptions, + parseTextMode: ParseTextMode.VariableAnnotation, + initialParenDepth?: number, + typingSymbolAliases?: Map + ): ParseExpressionTextResults; + parseTextExpression( + fileContents: string, + textOffset: number, + textLength: number, + parseOptions: ParseOptions, + parseTextMode: ParseTextMode.FunctionAnnotation, + initialParenDepth?: number, + typingSymbolAliases?: Map + ): ParseExpressionTextResults; + parseTextExpression( + fileContents: string, + textOffset: number, + textLength: number, + parseOptions: ParseOptions, + parseTextMode = ParseTextMode.Expression, + initialParenDepth = 0, + typingSymbolAliases?: Map + ): ParseExpressionTextResults { + const diagSink = new DiagnosticSink(); + this._startNewParse(fileContents, textOffset, textLength, parseOptions, diagSink, initialParenDepth); + + if (typingSymbolAliases) { + this._typingSymbolAliases = new Map(typingSymbolAliases); + } + + let parseTree: ExpressionNode | FunctionAnnotationNode | undefined; + if (parseTextMode === ParseTextMode.VariableAnnotation) { + this._isParsingQuotedText = true; + parseTree = this._parseTypeAnnotation(); + } else if (parseTextMode === ParseTextMode.FunctionAnnotation) { + this._isParsingQuotedText = true; + parseTree = this._parseFunctionTypeAnnotation(); + } else { + const exprListResult = this._parseTestOrStarExpressionList( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true + ); + if (exprListResult.parseError) { + parseTree = exprListResult.parseError; + } else { + if (exprListResult.list.length === 0) { + this._addSyntaxError(LocMessage.expectedExpr(), this._peekToken()); + } + parseTree = this._makeExpressionOrTuple(exprListResult, /* enclosedInParens */ false); + } + } + + if (this._peekTokenType() === TokenType.NewLine) { + this._getNextToken(); + } + + if (!this._atEof()) { + this._addSyntaxError(LocMessage.unexpectedExprToken(), this._peekToken()); + } + + return { + parseTree, + lines: this._tokenizerOutput!.lines, + diagnostics: diagSink.fetchAndClear(), + }; + } + + private _startNewParse( + fileContents: string, + textOffset: number, + textLength: number, + parseOptions: ParseOptions, + diagSink: DiagnosticSink, + initialParenDepth = 0 + ) { + this._fileContents = fileContents; + this._parseOptions = parseOptions; + this._diagSink = diagSink; + + // Tokenize the file contents. + const tokenizer = new Tokenizer(); + this._tokenizerOutput = tokenizer.tokenize( + fileContents, + textOffset, + textLength, + initialParenDepth, + this._parseOptions.useNotebookMode + ); + this._tokenIndex = 0; + } + + // stmt: simple_stmt | compound_stmt + // compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt + // | funcdef | classdef | decorated | async_stmt + private _parseStatement(): StatementNode | ErrorNode | undefined { + // Handle the errant condition of a dedent token here to provide + // better recovery. + if (this._consumeTokenIfType(TokenType.Dedent)) { + this._addSyntaxError(LocMessage.unexpectedUnindent(), this._peekToken()); + } + + switch (this._peekKeywordType()) { + case KeywordType.If: + return this._parseIfStatement(); + + case KeywordType.While: + return this._parseWhileStatement(); + + case KeywordType.For: + return this._parseForStatement(); + + case KeywordType.Try: + return this._parseTryStatement(); + + case KeywordType.With: + return this._parseWithStatement(); + + case KeywordType.Def: + return this._parseFunctionDef(); + + case KeywordType.Class: + return this._parseClassDef(); + + case KeywordType.Async: + return this._parseAsyncStatement(); + + case KeywordType.Match: { + // Match is considered a "soft" keyword, so we will treat + // it as an identifier if it is followed by an unexpected + // token. + const peekToken = this._peekToken(1); + let isInvalidMatchToken = false; + + if ( + peekToken.type === TokenType.Colon || + peekToken.type === TokenType.Semicolon || + peekToken.type === TokenType.Comma || + peekToken.type === TokenType.Dot || + peekToken.type === TokenType.NewLine || + peekToken.type === TokenType.EndOfStream + ) { + isInvalidMatchToken = true; + } else if (peekToken.type === TokenType.Operator) { + const operatorToken = peekToken as OperatorToken; + if ( + operatorToken.operatorType !== OperatorType.Multiply && + operatorToken.operatorType !== OperatorType.Add && + operatorToken.operatorType !== OperatorType.BitwiseInvert && + operatorToken.operatorType !== OperatorType.Subtract + ) { + isInvalidMatchToken = true; + } + } + + if (!isInvalidMatchToken) { + // Try to parse the match statement. If it doesn't appear to + // be a match statement, treat as a non-keyword and reparse. + const matchStatement = this._parseMatchStatement(); + if (matchStatement) { + return matchStatement; + } + } + break; + } + } + + if (this._peekOperatorType() === OperatorType.MatrixMultiply) { + return this._parseDecorated(); + } + + return this._parseSimpleStatement(); + } + + // async_stmt: 'async' (funcdef | with_stmt | for_stmt) + private _parseAsyncStatement(): StatementNode | undefined { + const asyncToken = this._getKeywordToken(KeywordType.Async); + + switch (this._peekKeywordType()) { + case KeywordType.Def: + return this._parseFunctionDef(asyncToken); + + case KeywordType.With: + return this._parseWithStatement(asyncToken); + + case KeywordType.For: + return this._parseForStatement(asyncToken); + } + + this._addSyntaxError(LocMessage.unexpectedAsyncToken(), asyncToken); + + return undefined; + } + + // type_alias_stmt: "type" name [type_param_seq] = expr + private _parseTypeAliasStatement(): TypeAliasNode { + const typeToken = this._getKeywordToken(KeywordType.Type); + + if (!this._parseOptions.isStubFile && PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_12)) { + this._addSyntaxError(LocMessage.typeAliasStatementIllegal(), typeToken); + } + + const nameToken = this._getTokenIfIdentifier(); + assert(nameToken !== undefined); + const name = NameNode.create(nameToken); + + let typeParameters: TypeParameterListNode | undefined; + if (this._peekToken().type === TokenType.OpenBracket) { + typeParameters = this._parseTypeParameterList(); + } + + const assignToken = this._peekToken(); + if ( + assignToken.type !== TokenType.Operator || + (assignToken as OperatorToken).operatorType !== OperatorType.Assign + ) { + this._addSyntaxError(LocMessage.expectedEquals(), assignToken); + } else { + this._getNextToken(); + } + + const wasParsingTypeAnnotation = this._isParsingTypeAnnotation; + this._isParsingTypeAnnotation = true; + const expression = this._parseTestExpression(/* allowAssignmentExpression */ false); + this._isParsingTypeAnnotation = wasParsingTypeAnnotation; + + return TypeAliasNode.create(typeToken, name, expression, typeParameters); + } + + // type_param_seq: '[' (type_param ',')+ ']' + private _parseTypeParameterList(): TypeParameterListNode { + const typeVariableNodes: TypeParameterNode[] = []; + + const openBracketToken = this._getNextToken(); + assert(openBracketToken.type === TokenType.OpenBracket); + + while (true) { + const firstToken = this._peekToken(); + + if (firstToken.type === TokenType.CloseBracket) { + if (typeVariableNodes.length === 0) { + this._addSyntaxError(LocMessage.typeParametersMissing(), this._peekToken()); + } + break; + } + + const typeVarNode = this._parseTypeParameter(); + if (!typeVarNode) { + break; + } + + typeVariableNodes.push(typeVarNode); + + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + } + + const closingToken = this._peekToken(); + if (closingToken.type !== TokenType.CloseBracket) { + this._addSyntaxError(LocMessage.expectedCloseBracket(), this._peekToken()); + this._consumeTokensUntilType([TokenType.NewLine, TokenType.CloseBracket, TokenType.Colon]); + } else { + this._getNextToken(); + } + + return TypeParameterListNode.create(openBracketToken, closingToken, typeVariableNodes); + } + + // type_param: ['*' | '**'] NAME [':' bound_expr] ['=' default_expr] + private _parseTypeParameter(): TypeParameterNode | undefined { + let typeParamCategory = TypeParamKind.TypeVar; + if (this._consumeTokenIfOperator(OperatorType.Multiply)) { + typeParamCategory = TypeParamKind.TypeVarTuple; + } else if (this._consumeTokenIfOperator(OperatorType.Power)) { + typeParamCategory = TypeParamKind.ParamSpec; + } + + const nameToken = this._getTokenIfIdentifier(); + if (!nameToken) { + this._addSyntaxError(LocMessage.expectedTypeParameterName(), this._peekToken()); + return undefined; + } + + const name = NameNode.create(nameToken); + + let boundExpression: ExpressionNode | undefined; + if (this._consumeTokenIfType(TokenType.Colon)) { + boundExpression = this._parseExpression(/* allowUnpack */ false); + + if (typeParamCategory !== TypeParamKind.TypeVar) { + this._addSyntaxError(LocMessage.typeParameterBoundNotAllowed(), boundExpression); + } + } + + let defaultExpression: ExpressionNode | undefined; + if (this._consumeTokenIfOperator(OperatorType.Assign)) { + defaultExpression = this._parseExpression( + /* allowUnpack */ typeParamCategory === TypeParamKind.TypeVarTuple + ); + + if ( + !this._parseOptions.isStubFile && + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_13) + ) { + this._addSyntaxError(LocMessage.typeVarDefaultIllegal(), defaultExpression); + } + } + + return TypeParameterNode.create(name, typeParamCategory, boundExpression, defaultExpression); + } + + // match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT + // subject_expr: + // | star_named_expression ',' star_named_expressions? + // | named_expression + private _parseMatchStatement(): MatchNode | undefined { + // Parse the subject expression with errors suppressed. If it's not + // followed by a colon, we'll assume this is not a match statement. + // We need to do this because "match" is considered a soft keyword, + // and we need to distinguish between "match(2)" and "match (2):" + // and between "match[2]" and "match [2]:" + let smellsLikeMatchStatement = false; + this._suppressErrors(() => { + const curTokenIndex = this._tokenIndex; + + this._getKeywordToken(KeywordType.Match); + const expression = this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingPatternSubject, + () => LocMessage.expectedReturnExpr() + ); + smellsLikeMatchStatement = + expression.nodeType !== ParseNodeType.Error && this._peekToken().type === TokenType.Colon; + + // Set the token index back to the start. + this._tokenIndex = curTokenIndex; + }); + + if (!smellsLikeMatchStatement) { + return undefined; + } + + const matchToken = this._getKeywordToken(KeywordType.Match); + + const subjectExpression = this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingPatternSubject, + () => LocMessage.expectedReturnExpr() + ); + const matchNode = MatchNode.create(matchToken, subjectExpression); + + const nextToken = this._peekToken(); + + if (!this._consumeTokenIfType(TokenType.Colon)) { + this._addSyntaxError(LocMessage.expectedColon(), nextToken); + + // Try to perform parse recovery by consuming tokens until + // we find the end of the line. + if (this._consumeTokensUntilType([TokenType.NewLine, TokenType.Colon])) { + this._getNextToken(); + } + } else { + extendRange(matchNode, nextToken); + + if (!this._consumeTokenIfType(TokenType.NewLine)) { + this._addSyntaxError(LocMessage.expectedNewline(), nextToken); + } else { + const possibleIndent = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.Indent)) { + this._addSyntaxError(LocMessage.expectedIndentedBlock(), this._peekToken()); + } else { + const indentToken = possibleIndent as IndentToken; + if (indentToken.isIndentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), indentToken); + } + } + + while (true) { + // Handle a common error here and see if we can recover. + const possibleUnexpectedIndent = this._peekToken(); + if (possibleUnexpectedIndent.type === TokenType.Indent) { + this._getNextToken(); + const indentToken = possibleUnexpectedIndent as IndentToken; + if (indentToken.isIndentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), indentToken); + } else { + this._addSyntaxError(LocMessage.unexpectedIndent(), possibleUnexpectedIndent); + } + } + + const caseStatement = this._parseCaseStatement(); + if (!caseStatement) { + // Perform basic error recovery to get to the next line. + if (this._consumeTokensUntilType([TokenType.NewLine, TokenType.Colon])) { + this._getNextToken(); + } + } else { + caseStatement.parent = matchNode; + matchNode.d.cases.push(caseStatement); + } + + const dedentToken = this._peekToken() as DedentToken; + if (this._consumeTokenIfType(TokenType.Dedent)) { + if (!dedentToken.matchesIndent) { + this._addSyntaxError(LocMessage.inconsistentIndent(), dedentToken); + } + if (dedentToken.isDedentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), dedentToken); + } + break; + } + + if (this._peekTokenType() === TokenType.EndOfStream) { + break; + } + } + } + + if (matchNode.d.cases.length > 0) { + extendRange(matchNode, matchNode.d.cases[matchNode.d.cases.length - 1]); + } else { + this._addSyntaxError(LocMessage.zeroCaseStatementsFound(), matchToken); + } + } + + // This feature requires Python 3.10. + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_10)) { + this._addSyntaxError(LocMessage.matchIncompatible(), matchToken); + } + + // Validate that only the last entry uses an irrefutable pattern. + for (let i = 0; i < matchNode.d.cases.length - 1; i++) { + const caseNode = matchNode.d.cases[i]; + if (!caseNode.d.guardExpr && caseNode.d.isIrrefutable) { + this._addSyntaxError(LocMessage.casePatternIsIrrefutable(), caseNode.d.pattern); + } + } + + return matchNode; + } + + // case_block: "case" patterns [guard] ':' block + // patterns: sequence_pattern | as_pattern + // guard: 'if' named_expression + private _parseCaseStatement(): CaseNode | undefined { + const caseToken = this._peekToken(); + + if (!this._consumeTokenIfKeyword(KeywordType.Case)) { + this._addSyntaxError(LocMessage.expectedCase(), caseToken); + return undefined; + } + + const patternList = this._parsePatternSequence(); + let casePattern: PatternAtomNode; + + if (patternList.parseError) { + casePattern = patternList.parseError; + } else if (patternList.list.length === 0) { + this._addSyntaxError(LocMessage.expectedPatternExpr(), this._peekToken()); + casePattern = ErrorNode.create(caseToken, ErrorExpressionCategory.MissingPattern); + } else if (patternList.list.length === 1 && !patternList.trailingComma) { + const pattern = patternList.list[0].d.orPatterns[0]; + + if (pattern.nodeType === ParseNodeType.PatternCapture && pattern.d.isStar) { + casePattern = PatternSequenceNode.create(patternList.list[0], patternList.list); + } else { + casePattern = patternList.list[0]; + } + } else { + casePattern = PatternSequenceNode.create(patternList.list[0], patternList.list); + } + + if (casePattern.nodeType !== ParseNodeType.Error) { + const globalNameMap = new Map(); + const localNameMap = new Map(); + this._reportDuplicatePatternCaptureTargets(casePattern, globalNameMap, localNameMap); + } + + let guardExpression: ExpressionNode | undefined; + if (this._consumeTokenIfKeyword(KeywordType.If)) { + guardExpression = this._parseTestExpression(/* allowAssignmentExpression */ true); + } + + const suite = this._parseSuite(this._isInFunction); + return CaseNode.create(caseToken, casePattern, this._isPatternIrrefutable(casePattern), guardExpression, suite); + } + + // PEP 634 defines the concept of an "irrefutable" pattern - a pattern that + // will always be matched. + private _isPatternIrrefutable(node: PatternAtomNode): boolean { + if (node.nodeType === ParseNodeType.PatternCapture) { + return true; + } + + if (node.nodeType === ParseNodeType.PatternAs) { + return node.d.orPatterns.some((pattern) => this._isPatternIrrefutable(pattern)); + } + + return false; + } + + // Reports any situations where a capture target (a variable that receives part of a pattern) + // appears twice within the same pattern. This is complicated by the fact that duplicate targets + // are allowed in separate "or" clauses, so we need to track the targets we've seen globally + // as well as the targets we've seen locally within the current "or" clause. + private _reportDuplicatePatternCaptureTargets( + node: PatternAtomNode, + globalNameMap: Map, + localNameMap: Map + ) { + const reportTargetIfDuplicate = (nameNode: NameNode) => { + if (globalNameMap.has(nameNode.d.value) || localNameMap.has(nameNode.d.value)) { + this._addSyntaxError( + LocMessage.duplicateCapturePatternTarget().format({ + name: nameNode.d.value, + }), + nameNode + ); + } else { + localNameMap.set(nameNode.d.value, nameNode); + } + }; + + switch (node.nodeType) { + case ParseNodeType.PatternSequence: { + node.d.entries.forEach((subpattern) => { + this._reportDuplicatePatternCaptureTargets(subpattern, globalNameMap, localNameMap); + }); + break; + } + + case ParseNodeType.PatternClass: { + node.d.args.forEach((arg) => { + this._reportDuplicatePatternCaptureTargets(arg.d.pattern, globalNameMap, localNameMap); + }); + break; + } + + case ParseNodeType.PatternAs: { + if (node.d.target) { + reportTargetIfDuplicate(node.d.target); + } + + const orLocalNameMaps = node.d.orPatterns.map((subpattern) => { + const orLocalNameMap = new Map(); + this._reportDuplicatePatternCaptureTargets(subpattern, localNameMap, orLocalNameMap); + return orLocalNameMap; + }); + + const combinedLocalOrNameMap = new Map(); + orLocalNameMaps.forEach((orLocalNameMap) => { + orLocalNameMap.forEach((node) => { + if (!combinedLocalOrNameMap.has(node.d.value)) { + combinedLocalOrNameMap.set(node.d.value, node); + reportTargetIfDuplicate(node); + } + }); + }); + break; + } + + case ParseNodeType.PatternCapture: { + if (!node.d.isWildcard) { + reportTargetIfDuplicate(node.d.target); + } + break; + } + + case ParseNodeType.PatternMapping: { + node.d.entries.forEach((mapEntry) => { + if (mapEntry.nodeType === ParseNodeType.PatternMappingExpandEntry) { + reportTargetIfDuplicate(mapEntry.d.target); + } else { + this._reportDuplicatePatternCaptureTargets(mapEntry.d.keyPattern, globalNameMap, localNameMap); + this._reportDuplicatePatternCaptureTargets( + mapEntry.d.valuePattern, + globalNameMap, + localNameMap + ); + } + }); + break; + } + + case ParseNodeType.PatternLiteral: + case ParseNodeType.PatternValue: + case ParseNodeType.Error: { + break; + } + } + } + + private _getPatternTargetNames(node: PatternAtomNode, nameSet: Set): void { + switch (node.nodeType) { + case ParseNodeType.PatternSequence: { + node.d.entries.forEach((subpattern) => { + this._getPatternTargetNames(subpattern, nameSet); + }); + break; + } + + case ParseNodeType.PatternClass: { + node.d.args.forEach((arg) => { + this._getPatternTargetNames(arg.d.pattern, nameSet); + }); + break; + } + + case ParseNodeType.PatternAs: { + if (node.d.target) { + nameSet.add(node.d.target.d.value); + } + node.d.orPatterns.forEach((subpattern) => { + this._getPatternTargetNames(subpattern, nameSet); + }); + break; + } + + case ParseNodeType.PatternCapture: { + if (!node.d.isWildcard) { + nameSet.add(node.d.target.d.value); + } + break; + } + + case ParseNodeType.PatternMapping: { + node.d.entries.forEach((mapEntry) => { + if (mapEntry.nodeType === ParseNodeType.PatternMappingExpandEntry) { + nameSet.add(mapEntry.d.target.d.value); + } else { + this._getPatternTargetNames(mapEntry.d.keyPattern, nameSet); + this._getPatternTargetNames(mapEntry.d.valuePattern, nameSet); + } + }); + break; + } + + case ParseNodeType.PatternLiteral: + case ParseNodeType.PatternValue: + case ParseNodeType.Error: { + break; + } + } + } + + private _parsePatternSequence() { + const patternList = this._parseExpressionListGeneric(() => this._parsePatternAs()); + + // Check for more than one star entry. + const starEntries = patternList.list.filter( + (entry) => + entry.d.orPatterns.length === 1 && + entry.d.orPatterns[0].nodeType === ParseNodeType.PatternCapture && + entry.d.orPatterns[0].d.isStar + ); + + if (starEntries.length > 1) { + this._addSyntaxError(LocMessage.duplicateStarPattern(), starEntries[1].d.orPatterns[0]); + } + + return patternList; + } + + // as_pattern: or_pattern ['as' NAME] + // or_pattern: '|'.pattern_atom+ + private _parsePatternAs(): PatternAsNode { + const orPatterns: PatternAtomNode[] = []; + + while (true) { + const patternAtom = this._parsePatternAtom(); + orPatterns.push(patternAtom); + + if (!this._consumeTokenIfOperator(OperatorType.BitwiseOr)) { + break; + } + } + + if (orPatterns.length > 1) { + // Star patterns cannot be ORed with other patterns. + orPatterns.forEach((patternAtom) => { + if (patternAtom.nodeType === ParseNodeType.PatternCapture && patternAtom.d.isStar) { + this._addSyntaxError(LocMessage.starPatternInOrPattern(), patternAtom); + } + }); + } + + let target: NameNode | undefined; + if (this._consumeTokenIfKeyword(KeywordType.As)) { + const nameToken = this._getTokenIfIdentifier(); + if (nameToken) { + target = NameNode.create(nameToken); + } else { + this._addSyntaxError(LocMessage.expectedNameAfterAs(), this._peekToken()); + } + } + + // Star patterns cannot be used with AS pattern. + if ( + target && + orPatterns.length === 1 && + orPatterns[0].nodeType === ParseNodeType.PatternCapture && + orPatterns[0].d.isStar + ) { + this._addSyntaxError(LocMessage.starPatternInAsPattern(), orPatterns[0]); + } + + // Validate that irrefutable patterns are not in any entries other than the last. + orPatterns.forEach((orPattern, index) => { + if (index < orPatterns.length - 1 && this._isPatternIrrefutable(orPattern)) { + this._addSyntaxError(LocMessage.orPatternIrrefutable(), orPattern); + } + }); + + // Validate that all bound variables are the same within all or patterns. + const fullNameSet = new Set(); + orPatterns.forEach((orPattern) => { + this._getPatternTargetNames(orPattern, fullNameSet); + }); + + orPatterns.forEach((orPattern) => { + const localNameSet = new Set(); + this._getPatternTargetNames(orPattern, localNameSet); + + if (localNameSet.size < fullNameSet.size) { + const missingNames = Array.from(fullNameSet.keys()).filter((name) => !localNameSet.has(name)); + const diag = new DiagnosticAddendum(); + diag.addMessage( + LocAddendum.orPatternMissingName().format({ + name: missingNames.map((name) => `"${name}"`).join(', '), + }) + ); + this._addSyntaxError(LocMessage.orPatternMissingName() + diag.getString(), orPattern); + } + }); + + return PatternAsNode.create(orPatterns, target); + } + + // pattern_atom: + // | literal_pattern + // | name_or_attr + // | '(' as_pattern ')' + // | '[' [sequence_pattern] ']' + // | '(' [sequence_pattern] ')' + // | '{' [items_pattern] '}' + // | name_or_attr '(' [pattern_arguments ','?] ')' + // name_or_attr: attr | NAME + // attr: name_or_attr '.' NAME + // sequence_pattern: ','.maybe_star_pattern+ ','? + // maybe_star_pattern: '*' NAME | pattern + // items_pattern: ','.key_value_pattern+ ','? + private _parsePatternAtom(): PatternAtomNode { + const patternLiteral = this._parsePatternLiteral(); + if (patternLiteral) { + return patternLiteral; + } + + const patternCaptureOrValue = this._parsePatternCaptureOrValue(); + if (patternCaptureOrValue) { + const openParenToken = this._peekToken(); + if ( + patternCaptureOrValue.nodeType === ParseNodeType.Error || + !this._consumeTokenIfType(TokenType.OpenParenthesis) + ) { + return patternCaptureOrValue; + } + + const args = this._parseClassPatternArgList(); + + const classNameExpr = + patternCaptureOrValue.nodeType === ParseNodeType.PatternCapture + ? patternCaptureOrValue.d.target + : patternCaptureOrValue.d.expr; + const classPattern = PatternClassNode.create(classNameExpr, args); + + if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { + this._addSyntaxError(LocMessage.expectedCloseParen(), openParenToken); + + // Consume the remainder of tokens on the line for error + // recovery. + this._consumeTokensUntilType([TokenType.NewLine]); + + // Extend the node's range to include the rest of the line. + // This helps the signatureHelpProvider. + extendRange(classPattern, this._peekToken()); + } + + return classPattern; + } + + const nextToken = this._peekToken(); + const nextOperator = this._peekOperatorType(); + + if (nextOperator === OperatorType.Multiply) { + const starToken = this._getNextToken(); + const identifierToken = this._getTokenIfIdentifier(); + if (!identifierToken) { + this._addSyntaxError(LocMessage.expectedIdentifier(), this._peekToken()); + return ErrorNode.create(starToken, ErrorExpressionCategory.MissingExpression); + } else { + return PatternCaptureNode.create(NameNode.create(identifierToken), starToken); + } + } + + if (nextToken.type === TokenType.OpenParenthesis || nextToken.type === TokenType.OpenBracket) { + const startToken = this._getNextToken(); + const patternList = this._parsePatternSequence(); + let casePattern: PatternAtomNode; + + if (patternList.parseError) { + casePattern = patternList.parseError; + } else if ( + patternList.list.length === 1 && + !patternList.trailingComma && + startToken.type === TokenType.OpenParenthesis + ) { + const pattern = patternList.list[0].d.orPatterns[0]; + + if (pattern.nodeType === ParseNodeType.PatternCapture && pattern.d.isStar) { + casePattern = PatternSequenceNode.create(startToken, patternList.list); + } else { + casePattern = patternList.list[0]; + } + + extendRange(casePattern, nextToken); + } else { + casePattern = PatternSequenceNode.create(startToken, patternList.list); + } + + const endToken = this._peekToken(); + if ( + this._consumeTokenIfType( + nextToken.type === TokenType.OpenParenthesis ? TokenType.CloseParenthesis : TokenType.CloseBracket + ) + ) { + extendRange(casePattern, endToken); + } else { + this._addSyntaxError( + nextToken.type === TokenType.OpenParenthesis + ? LocMessage.expectedCloseParen() + : LocMessage.expectedCloseBracket(), + nextToken + ); + this._consumeTokensUntilType([ + TokenType.Colon, + nextToken.type === TokenType.OpenParenthesis ? TokenType.CloseParenthesis : TokenType.CloseBracket, + ]); + } + + return casePattern; + } else if (nextToken.type === TokenType.OpenCurlyBrace) { + const firstToken = this._getNextToken(); + const mappingPattern = this._parsePatternMapping(firstToken); + const lastToken = this._peekToken(); + + if (this._consumeTokenIfType(TokenType.CloseCurlyBrace)) { + extendRange(mappingPattern, lastToken); + } else { + this._addSyntaxError(LocMessage.expectedCloseBrace(), nextToken); + this._consumeTokensUntilType([TokenType.Colon, TokenType.CloseCurlyBrace]); + } + + return mappingPattern; + } + + return this._handleExpressionParseError( + ErrorExpressionCategory.MissingPattern, + LocMessage.expectedPatternExpr() + ); + } + + // pattern_arguments: + // | positional_patterns [',' keyword_patterns] + // | keyword_patterns + // positional_patterns: ','.as_pattern+ + // keyword_patterns: ','.keyword_pattern+ + private _parseClassPatternArgList(): PatternClassArgumentNode[] { + const argList: PatternClassArgumentNode[] = []; + let sawKeywordArg = false; + + while (true) { + const nextTokenType = this._peekTokenType(); + if ( + nextTokenType === TokenType.CloseParenthesis || + nextTokenType === TokenType.NewLine || + nextTokenType === TokenType.EndOfStream + ) { + break; + } + + const arg = this._parseClassPatternArgument(); + if (arg.d.name) { + sawKeywordArg = true; + } else if (sawKeywordArg && !arg.d.name) { + this._addSyntaxError(LocMessage.positionArgAfterNamedArg(), arg); + } + argList.push(arg); + + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + } + + return argList; + } + + // keyword_pattern: NAME '=' as_pattern + private _parseClassPatternArgument(): PatternClassArgumentNode { + const firstToken = this._peekToken(); + const secondToken = this._peekToken(1); + + let keywordName: NameNode | undefined; + + if ( + (firstToken.type === TokenType.Identifier || firstToken.type === TokenType.Keyword) && + secondToken.type === TokenType.Operator && + (secondToken as OperatorToken).operatorType === OperatorType.Assign + ) { + const classNameToken = this._getTokenIfIdentifier(); + if (classNameToken !== undefined) { + keywordName = NameNode.create(classNameToken); + this._getNextToken(); + } + } + + const pattern = this._parsePatternAs(); + + return PatternClassArgumentNode.create(pattern, keywordName); + } + + // literal_pattern: + // | signed_number + // | signed_number '+' NUMBER + // | signed_number '-' NUMBER + // | strings + // | 'None' + // | 'True' + // | 'False' + private _parsePatternLiteral(): PatternLiteralNode | undefined { + const nextToken = this._peekToken(); + const nextOperator = this._peekOperatorType(); + + if (nextToken.type === TokenType.Number || nextOperator === OperatorType.Subtract) { + return this._parsePatternLiteralNumber(); + } + + if (nextToken.type === TokenType.String) { + const stringList = this._parseAtom() as StringListNode; + assert(stringList.nodeType === ParseNodeType.StringList); + + // Check for f-strings, which are not allowed. + stringList.d.strings.forEach((stringAtom) => { + if (stringAtom.nodeType === ParseNodeType.FormatString) { + this._addSyntaxError(LocMessage.formatStringInPattern(), stringAtom); + } + }); + + return PatternLiteralNode.create(stringList); + } + + if (nextToken.type === TokenType.Keyword) { + const keywordToken = nextToken as KeywordToken; + if ( + keywordToken.keywordType === KeywordType.False || + keywordToken.keywordType === KeywordType.True || + keywordToken.keywordType === KeywordType.None + ) { + return PatternLiteralNode.create(this._parseAtom()); + } + } + + return undefined; + } + + // signed_number: NUMBER | '-' NUMBER + private _parsePatternLiteralNumber(): PatternLiteralNode { + const expression = this._parseArithmeticExpression(); + let realValue: ExpressionNode | undefined; + let imagValue: ExpressionNode | undefined; + + if (expression.nodeType === ParseNodeType.BinaryOperation) { + if (expression.d.operator === OperatorType.Subtract || expression.d.operator === OperatorType.Add) { + realValue = expression.d.leftExpr; + imagValue = expression.d.rightExpr; + } + } else { + realValue = expression; + } + + if (realValue) { + if (realValue.nodeType === ParseNodeType.UnaryOperation && realValue.d.operator === OperatorType.Subtract) { + realValue = realValue.d.expr; + } + + if (realValue.nodeType !== ParseNodeType.Number || (imagValue !== undefined && realValue.d.isImaginary)) { + this._addSyntaxError(LocMessage.expectedComplexNumberLiteral(), expression); + imagValue = undefined; + } + } + + if (imagValue) { + if (imagValue.nodeType === ParseNodeType.UnaryOperation && imagValue.d.operator === OperatorType.Subtract) { + imagValue = imagValue.d.expr; + } + + if (imagValue.nodeType !== ParseNodeType.Number || !imagValue.d.isImaginary) { + this._addSyntaxError(LocMessage.expectedComplexNumberLiteral(), expression); + } + } + + return PatternLiteralNode.create(expression); + } + + private _parsePatternMapping(firstToken: Token): PatternMappingNode | ErrorNode { + const itemList = this._parseExpressionListGeneric(() => this._parsePatternMappingItem()); + + if (itemList.list.length > 0) { + // Verify there's at most one ** entry. + const starStarEntries = itemList.list.filter( + (entry) => entry.nodeType === ParseNodeType.PatternMappingExpandEntry + ); + if (starStarEntries.length > 1) { + this._addSyntaxError(LocMessage.duplicateStarStarPattern(), starStarEntries[1]); + } + + return PatternMappingNode.create(firstToken, itemList.list); + } + + return itemList.parseError || ErrorNode.create(this._peekToken(), ErrorExpressionCategory.MissingPattern); + } + + // key_value_pattern: + // | (literal_pattern | attr) ':' as_pattern + // | '**' NAME + private _parsePatternMappingItem(): PatternMappingEntryNode | ErrorNode { + let keyExpression: PatternLiteralNode | PatternValueNode | ErrorNode | undefined; + const doubleStar = this._peekToken(); + + if (this._consumeTokenIfOperator(OperatorType.Power)) { + const identifierToken = this._getTokenIfIdentifier(); + if (!identifierToken) { + this._addSyntaxError(LocMessage.expectedIdentifier(), this._peekToken()); + return ErrorNode.create(this._peekToken(), ErrorExpressionCategory.MissingPattern); + } + + const nameNode = NameNode.create(identifierToken); + if (identifierToken.value === '_') { + this._addSyntaxError(LocMessage.starStarWildcardNotAllowed(), nameNode); + } + + return PatternMappingExpandEntryNode.create(doubleStar, nameNode); + } + + const patternLiteral = this._parsePatternLiteral(); + if (patternLiteral) { + keyExpression = patternLiteral; + } else { + const patternCaptureOrValue = this._parsePatternCaptureOrValue(); + if (patternCaptureOrValue) { + if (patternCaptureOrValue.nodeType === ParseNodeType.PatternValue) { + keyExpression = patternCaptureOrValue; + } else { + this._addSyntaxError(LocMessage.expectedPatternValue(), patternCaptureOrValue); + keyExpression = ErrorNode.create(this._peekToken(), ErrorExpressionCategory.MissingPattern); + } + } + } + + if (!keyExpression) { + this._addSyntaxError(LocMessage.expectedPatternExpr(), this._peekToken()); + keyExpression = ErrorNode.create(this._peekToken(), ErrorExpressionCategory.MissingPattern); + } + + let valuePattern: PatternAtomNode | undefined; + if (!this._consumeTokenIfType(TokenType.Colon)) { + this._addSyntaxError(LocMessage.expectedColon(), this._peekToken()); + valuePattern = ErrorNode.create(this._peekToken(), ErrorExpressionCategory.MissingPattern); + } else { + valuePattern = this._parsePatternAs(); + } + + return PatternMappingKeyEntryNode.create(keyExpression, valuePattern); + } + + private _parsePatternCaptureOrValue(): PatternCaptureNode | PatternValueNode | ErrorNode | undefined { + const nextToken = this._peekToken(); + + if (nextToken.type === TokenType.Identifier || nextToken.type === TokenType.Keyword) { + let nameOrMember: NameNode | MemberAccessNode | undefined; + + while (true) { + const identifierToken = this._getTokenIfIdentifier(); + if (identifierToken) { + const nameNode = NameNode.create(identifierToken); + nameOrMember = nameOrMember ? MemberAccessNode.create(nameOrMember, nameNode) : nameNode; + } else { + this._addSyntaxError(LocMessage.expectedIdentifier(), this._peekToken()); + break; + } + + if (!this._consumeTokenIfType(TokenType.Dot)) { + break; + } + } + + if (!nameOrMember) { + this._addSyntaxError(LocMessage.expectedIdentifier(), this._peekToken()); + return ErrorNode.create(this._peekToken(), ErrorExpressionCategory.MissingPattern); + } + + if (nameOrMember.nodeType === ParseNodeType.MemberAccess) { + return PatternValueNode.create(nameOrMember); + } + + return PatternCaptureNode.create(nameOrMember); + } + + return undefined; + } + + // if_stmt: 'if' test_suite ('elif' test_suite)* ['else' suite] + // test_suite: test suite + // test: or_test ['if' or_test 'else' test] | lambdef + private _parseIfStatement(keywordType: KeywordType.If | KeywordType.Elif = KeywordType.If): IfNode { + const ifOrElifToken = this._getKeywordToken(keywordType); + + const test = this._parseTestExpression(/* allowAssignmentExpression */ true); + const suite = this._parseSuite(this._isInFunction); + const ifNode = IfNode.create(ifOrElifToken, test, suite); + + if (this._consumeTokenIfKeyword(KeywordType.Else)) { + ifNode.d.elseSuite = this._parseSuite(this._isInFunction); + ifNode.d.elseSuite.parent = ifNode; + extendRange(ifNode, ifNode.d.elseSuite); + } else if (this._peekKeywordType() === KeywordType.Elif) { + // Recursively handle an "elif" statement. + ifNode.d.elseSuite = this._parseIfStatement(KeywordType.Elif); + ifNode.d.elseSuite.parent = ifNode; + extendRange(ifNode, ifNode.d.elseSuite); + } + + return ifNode; + } + + private _parseExceptSuite(isExceptionGroup: boolean, callback: () => T): T { + const wasInExceptionGroup = this._isInExceptionGroup; + + if (isExceptionGroup) { + this._isInExceptionGroup = true; + } + const result = callback(); + + this._isInExceptionGroup = wasInExceptionGroup; + + return result; + } + + private _parseLoopSuite(): SuiteNode { + const wasInLoop = this._isInLoop; + const wasInExceptionGroup = this._isInExceptionGroup; + this._isInExceptionGroup = false; + this._isInLoop = true; + + // Record the fact that we are no longer in a finally block + // that is contained within a loop. A loop within the finally + // block resets this. See PEP 765 for details. + const wasInFinallyLoop = this._isInFinallyLoop; + this._isInFinallyLoop = false; + + let typeComment: StringToken | undefined; + const suite = this._parseSuite(this._isInFunction, /* skipBody */ false, () => { + const comment = this._getTypeAnnotationCommentText(); + if (comment) { + typeComment = comment; + } + }); + + this._isInLoop = wasInLoop; + this._isInFinallyLoop = wasInFinallyLoop; + this._isInExceptionGroup = wasInExceptionGroup; + + if (typeComment) { + suite.d.typeComment = typeComment; + } + + return suite; + } + + // suite: ':' (simple_stmt | NEWLINE INDENT stmt+ DEDENT) + private _parseSuite(isFunction = false, skipBody = false, postColonCallback?: () => void): SuiteNode { + const nextToken = this._peekToken(); + const suite = SuiteNode.create(nextToken); + + if (!this._consumeTokenIfType(TokenType.Colon)) { + this._addSyntaxError(LocMessage.expectedColon(), nextToken); + + // Try to perform parse recovery by consuming tokens. + if (this._consumeTokensUntilType([TokenType.NewLine, TokenType.Colon])) { + if (this._peekTokenType() === TokenType.Colon) { + this._getNextToken(); + } else if (this._peekToken(1).type !== TokenType.Indent) { + // Bail so we resume the at the next statement. + // We can't parse as a simple statement as we've skipped all but the newline. + this._getNextToken(); + return suite; + } + } + } + + if (skipBody) { + if (this._consumeTokenIfType(TokenType.NewLine)) { + let indent = 0; + while (true) { + const nextToken = this._getNextToken(); + if (nextToken.type === TokenType.Indent) { + indent++; + } + + if (nextToken.type === TokenType.Dedent) { + if ((nextToken as DedentToken).isDedentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), nextToken); + } + + indent--; + + if (indent === 0) { + break; + } + } + + if (nextToken.type === TokenType.EndOfStream) { + break; + } + } + } else { + // consume tokens + this._parseSimpleStatement(); + } + + if (this._tokenIndex > 0) { + extendRange(suite, this._tokenizerOutput!.tokens.getItemAt(this._tokenIndex - 1)); + } + + return suite; + } + + if (postColonCallback) { + postColonCallback(); + } + + const wasFunction = this._isInFunction; + this._isInFunction = isFunction; + + if (this._consumeTokenIfType(TokenType.NewLine)) { + if (postColonCallback) { + postColonCallback(); + } + + const possibleIndent = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.Indent)) { + this._addSyntaxError(LocMessage.expectedIndentedBlock(), this._peekToken()); + return suite; + } + + const bodyIndentToken = possibleIndent as IndentToken; + if (bodyIndentToken.isIndentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), bodyIndentToken); + } + + while (true) { + // Handle a common error here and see if we can recover. + const nextToken = this._peekToken(); + if (nextToken.type === TokenType.Indent) { + this._getNextToken(); + const indentToken = nextToken as IndentToken; + if (indentToken.isIndentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), indentToken); + } else { + this._addSyntaxError(LocMessage.unexpectedIndent(), nextToken); + } + } else if (nextToken.type === TokenType.Dedent) { + // When we see a dedent, stop before parsing the dedented statement. + const dedentToken = nextToken as DedentToken; + if (!dedentToken.matchesIndent) { + this._addSyntaxError(LocMessage.inconsistentIndent(), dedentToken); + } + if (dedentToken.isDedentAmbiguous) { + this._addSyntaxError(LocMessage.inconsistentTabs(), dedentToken); + } + + // When the suite is incomplete (no statements), leave the dedent token for + // recovery. This allows a single dedent token to cause us to break out of + // multiple levels of nested suites. Also extend the suite's range in this + // case so it is multi-line as this works better with indentationUtils. + if (suite.d.statements.length > 0) { + this._consumeTokenIfType(TokenType.Dedent); + } else { + extendRange(suite, dedentToken); + } + + // Did this dedent take us to an indent amount that is less than the + // initial indent of the suite body? + if (!bodyIndentToken || dedentToken.indentAmount < bodyIndentToken.indentAmount) { + break; + } else if (dedentToken.indentAmount === bodyIndentToken.indentAmount) { + // If the next token is also a dedent that reduces the indent + // level to a less than the initial indent of the suite body, swallow + // the extra dedent to help recover the parse. + const nextToken = this._peekToken(); + if (this._consumeTokenIfType(TokenType.Dedent)) { + extendRange(suite, nextToken); + break; + } + } + } + + const statement = this._parseStatement(); + if (!statement) { + // Perform basic error recovery to get to the next line. + this._consumeTokensUntilType([TokenType.NewLine]); + } else { + statement.parent = suite; + suite.d.statements.push(statement); + } + + if (this._peekTokenType() === TokenType.EndOfStream) { + break; + } + } + } else { + const simpleStatement = this._parseSimpleStatement(); + suite.d.statements.push(simpleStatement); + simpleStatement.parent = suite; + } + + if (suite.d.statements.length > 0) { + extendRange(suite, suite.d.statements[suite.d.statements.length - 1]); + } + + this._isInFunction = wasFunction; + + return suite; + } + + // for_stmt: [async] 'for' exprlist 'in' testlist suite ['else' suite] + private _parseForStatement(asyncToken?: KeywordToken): ForNode { + const forToken = this._getKeywordToken(KeywordType.For); + + const targetExpr = this._parseExpressionListAsPossibleTuple( + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedExpr(), + forToken + ); + + let seqExpr: ExpressionNode; + let forSuite: SuiteNode; + let elseSuite: SuiteNode | undefined; + + if (!this._consumeTokenIfKeyword(KeywordType.In)) { + seqExpr = this._handleExpressionParseError(ErrorExpressionCategory.MissingIn, LocMessage.expectedIn()); + forSuite = SuiteNode.create(this._peekToken()); + } else { + seqExpr = this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedInExpr() + ); + + forSuite = this._parseLoopSuite(); + + // Versions of Python earlier than 3.9 didn't allow unpack operators if the + // tuple wasn't enclosed in parentheses. + if ( + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_9) && + !this._parseOptions.isStubFile + ) { + if (seqExpr.nodeType === ParseNodeType.Tuple && !seqExpr.d.hasParens) { + let sawStar = false; + seqExpr.d.items.forEach((expr) => { + if (expr.nodeType === ParseNodeType.Unpack && !sawStar) { + this._addSyntaxError(LocMessage.unpackOperatorNotAllowed(), expr); + sawStar = true; + } + }); + } + } + + if (this._consumeTokenIfKeyword(KeywordType.Else)) { + elseSuite = this._parseSuite(this._isInFunction); + } + } + + const forNode = ForNode.create(forToken, targetExpr, seqExpr, forSuite); + forNode.d.elseSuite = elseSuite; + if (elseSuite) { + extendRange(forNode, elseSuite); + elseSuite.parent = forNode; + } + + if (asyncToken) { + forNode.d.isAsync = true; + forNode.d.asyncToken = asyncToken; + extendRange(forNode, asyncToken); + } + + if (forSuite.d.typeComment) { + forNode.d.typeComment = forSuite.d.typeComment; + } + + return forNode; + } + + // comp_iter: comp_for | comp_if + private _tryParseComprehension(target: ParseNode, isGenerator: boolean): ComprehensionNode | undefined { + const compFor = this._tryParseCompForStatement(); + + if (!compFor) { + return undefined; + } + + if (target.nodeType === ParseNodeType.Unpack) { + this._addSyntaxError(LocMessage.unpackIllegalInComprehension(), target); + } else if (target.nodeType === ParseNodeType.DictionaryExpandEntry) { + this._addSyntaxError(LocMessage.dictExpandIllegalInComprehension(), target); + } + + const compNode = ComprehensionNode.create(target, isGenerator); + + const forIfList: ComprehensionForIfNode[] = [compFor]; + while (true) { + const compIter = this._tryParseCompForStatement() || this._tryParseCompIfStatement(); + if (!compIter) { + break; + } + compIter.parent = compNode; + forIfList.push(compIter); + } + + compNode.d.forIfNodes = forIfList; + if (forIfList.length > 0) { + forIfList.forEach((comp) => { + comp.parent = compNode; + }); + extendRange(compNode, forIfList[forIfList.length - 1]); + } + return compNode; + } + + // comp_for: ['async'] 'for' exprlist 'in' or_test [comp_iter] + private _tryParseCompForStatement(): ComprehensionForNode | undefined { + const startTokenKeywordType = this._peekKeywordType(); + + if (startTokenKeywordType === KeywordType.Async) { + const nextToken = this._peekToken(1) as KeywordToken; + if (nextToken.type !== TokenType.Keyword || nextToken.keywordType !== KeywordType.For) { + return undefined; + } + } else if (startTokenKeywordType !== KeywordType.For) { + return undefined; + } + + let asyncToken: KeywordToken | undefined; + if (this._peekKeywordType() === KeywordType.Async) { + asyncToken = this._getKeywordToken(KeywordType.Async); + } + + const forToken = this._getKeywordToken(KeywordType.For); + + const targetExpr = this._parseExpressionListAsPossibleTuple( + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedExpr(), + forToken + ); + let seqExpr: ExpressionNode | undefined; + + if (!this._consumeTokenIfKeyword(KeywordType.In)) { + seqExpr = this._handleExpressionParseError(ErrorExpressionCategory.MissingIn, LocMessage.expectedIn()); + } else { + this._disallowAssignmentExpression(() => { + seqExpr = this._parseOrTest(); + }); + } + + const compForNode = ComprehensionForNode.create(asyncToken || forToken, targetExpr, seqExpr!); + + if (asyncToken) { + compForNode.d.isAsync = true; + compForNode.d.asyncToken = asyncToken; + } + + return compForNode; + } + + // comp_if: 'if' test_nocond [comp_iter] + // comp_iter: comp_for | comp_if + private _tryParseCompIfStatement(): ComprehensionIfNode | undefined { + if (this._peekKeywordType() !== KeywordType.If) { + return undefined; + } + + const ifToken = this._getKeywordToken(KeywordType.If); + const ifExpr = + this._tryParseLambdaExpression() || + this._parseAssignmentExpression(/* disallowAssignmentExpression */ true); + + const compIfNode = ComprehensionIfNode.create(ifToken, ifExpr); + + return compIfNode; + } + + // while_stmt: 'while' test suite ['else' suite] + private _parseWhileStatement(): WhileNode { + const whileToken = this._getKeywordToken(KeywordType.While); + + const whileNode = WhileNode.create( + whileToken, + this._parseTestExpression(/* allowAssignmentExpression */ true), + this._parseLoopSuite() + ); + + if (this._consumeTokenIfKeyword(KeywordType.Else)) { + whileNode.d.elseSuite = this._parseSuite(this._isInFunction); + whileNode.d.elseSuite.parent = whileNode; + extendRange(whileNode, whileNode.d.elseSuite); + } + + return whileNode; + } + + // try_stmt: ('try' suite + // ((except_clause suite)+ + // ['else' suite] + // ['finally' suite] | + // 'finally' suite)) + // except_clause: 'except' [test ['as' NAME]] + private _parseTryStatement(): TryNode { + const tryToken = this._getKeywordToken(KeywordType.Try); + const trySuite = this._parseSuite(this._isInFunction); + const tryNode = TryNode.create(tryToken, trySuite); + let sawCatchAllExcept = false; + let reportedExceptGroupMismatch = false; + + while (true) { + const exceptToken = this._peekToken(); + if (!this._consumeTokenIfKeyword(KeywordType.Except)) { + break; + } + + // See if this is a Python 3.11 exception group. + const possibleStarToken = this._peekToken(); + let isExceptGroup = false; + if (this._consumeTokenIfOperator(OperatorType.Multiply)) { + if ( + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_11) && + !this._parseOptions.isStubFile + ) { + this._addSyntaxError(LocMessage.exceptionGroupIncompatible(), possibleStarToken); + } + + isExceptGroup = true; + + if (!reportedExceptGroupMismatch && tryNode.d.exceptClauses.some((clause) => !clause.d.isExceptGroup)) { + this._addSyntaxError(LocMessage.exceptGroupMismatch(), possibleStarToken); + reportedExceptGroupMismatch = true; + } + } else { + if (!reportedExceptGroupMismatch && tryNode.d.exceptClauses.some((clause) => clause.d.isExceptGroup)) { + this._addSyntaxError(LocMessage.exceptGroupMismatch(), possibleStarToken); + reportedExceptGroupMismatch = true; + } + } + + let typeExpr: ExpressionNode | undefined; + let symbolName: IdentifierToken | undefined; + let isAsKeywordAllowed = true; + + if (this._peekTokenType() !== TokenType.Colon) { + const listResult = this._parseExpressionListGeneric(() => + this._parseTestExpression(/* allowAssignmentExpression */ true) + ); + if (listResult.parseError) { + typeExpr = listResult.parseError; + } else { + typeExpr = this._makeExpressionOrTuple(listResult, /* enclosedInParens */ false); + + // Python 3.14 allows more than one exception type to be provided in + // an except clause. + if (listResult.list.length > 1) { + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_14)) { + this._addSyntaxError(LocMessage.exceptRequiresParens(), typeExpr); + } + + isAsKeywordAllowed = false; + } + } + + if (this._consumeTokenIfKeyword(KeywordType.As)) { + if (!isAsKeywordAllowed) { + this._addSyntaxError(LocMessage.exceptWithAsRequiresParens(), typeExpr); + } + + symbolName = this._getTokenIfIdentifier(); + if (!symbolName) { + this._addSyntaxError(LocMessage.expectedNameAfterAs(), this._peekToken()); + } + } + } else if (isExceptGroup) { + this._addSyntaxError(LocMessage.exceptGroupRequiresType(), this._peekToken()); + } + + if (!typeExpr) { + if (sawCatchAllExcept) { + this._addSyntaxError(LocMessage.duplicateCatchAll(), exceptToken); + } + sawCatchAllExcept = true; + } else { + if (sawCatchAllExcept) { + this._addSyntaxError(LocMessage.namedExceptAfterCatchAll(), typeExpr); + } + } + + const exceptSuite = this._parseExceptSuite(isExceptGroup, () => this._parseSuite(this._isInFunction)); + const exceptNode = ExceptNode.create(exceptToken, exceptSuite, isExceptGroup); + if (typeExpr) { + exceptNode.d.typeExpr = typeExpr; + exceptNode.d.typeExpr.parent = exceptNode; + } + + if (symbolName) { + exceptNode.d.name = NameNode.create(symbolName); + exceptNode.d.name.parent = exceptNode; + } + + tryNode.d.exceptClauses.push(exceptNode); + exceptNode.parent = tryNode; + } + + if (tryNode.d.exceptClauses.length > 0) { + extendRange(tryNode, tryNode.d.exceptClauses[tryNode.d.exceptClauses.length - 1]); + + if (this._consumeTokenIfKeyword(KeywordType.Else)) { + tryNode.d.elseSuite = this._parseSuite(this._isInFunction); + tryNode.d.elseSuite.parent = tryNode; + extendRange(tryNode, tryNode.d.elseSuite); + } + } + + if (this._consumeTokenIfKeyword(KeywordType.Finally)) { + const wasInFinallyBlock = this._isInFinallyBlock; + const wasInFinallyLoop = this._isInFinallyLoop; + this._isInFinallyBlock = true; + this._isInFinallyLoop = this._isInLoop; + + tryNode.d.finallySuite = this._parseSuite(this._isInFunction); + + this._isInFinallyBlock = wasInFinallyBlock; + this._isInFinallyLoop = wasInFinallyLoop; + + tryNode.d.finallySuite.parent = tryNode; + extendRange(tryNode, tryNode.d.finallySuite); + } + + if (!tryNode.d.finallySuite && tryNode.d.exceptClauses.length === 0) { + this._addSyntaxError(LocMessage.tryWithoutExcept(), tryToken); + } + + return tryNode; + } + + // funcdef: 'def' NAME parameters ['->' test] ':' suite + // parameters: '(' [typedargslist] ')' + private _parseFunctionDef(asyncToken?: KeywordToken, decorators?: DecoratorNode[]): FunctionNode | ErrorNode { + const defToken = this._getKeywordToken(KeywordType.Def); + + const nameToken = this._getTokenIfIdentifier(); + if (!nameToken) { + this._addSyntaxError(LocMessage.expectedFunctionName(), defToken); + return ErrorNode.create( + defToken, + ErrorExpressionCategory.MissingFunctionParameterList, + undefined, + decorators + ); + } + + let typeParameters: TypeParameterListNode | undefined; + const possibleOpenBracket = this._peekToken(); + if (possibleOpenBracket.type === TokenType.OpenBracket) { + typeParameters = this._parseTypeParameterList(); + + if ( + !this._parseOptions.isStubFile && + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_12) + ) { + this._addSyntaxError(LocMessage.functionTypeParametersIllegal(), typeParameters); + } + } + const openParenToken = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.OpenParenthesis)) { + this._addSyntaxError(LocMessage.expectedOpenParen(), this._peekToken()); + return ErrorNode.create( + nameToken, + ErrorExpressionCategory.MissingFunctionParameterList, + NameNode.create(nameToken), + decorators + ); + } + + const paramList = this._parseVarArgsList(TokenType.CloseParenthesis, /* allowAnnotations */ true); + + if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { + this._addSyntaxError(LocMessage.expectedCloseParen(), openParenToken); + this._consumeTokensUntilType([TokenType.Colon]); + } + + let returnType: ExpressionNode | undefined; + if (this._consumeTokenIfType(TokenType.Arrow)) { + returnType = this._parseTypeAnnotation(); + } + + let functionTypeAnnotationToken: StringToken | undefined; + const wasInExceptionGroup = this._isInExceptionGroup; + this._isInExceptionGroup = false; + + const wasInFinallyBlock = this._isInFinallyBlock; + const wasInFinallyLoop = this._isInFinallyLoop; + this._isInFinallyBlock = false; + this._isInFinallyLoop = false; + + const suite = this._parseSuite(/* isFunction */ true, this._parseOptions.skipFunctionAndClassBody, () => { + if (!functionTypeAnnotationToken) { + functionTypeAnnotationToken = this._getTypeAnnotationCommentText(); + } + }); + + this._isInExceptionGroup = wasInExceptionGroup; + this._isInFinallyBlock = wasInFinallyBlock; + this._isInFinallyLoop = wasInFinallyLoop; + + const functionNode = FunctionNode.create(defToken, NameNode.create(nameToken), suite, typeParameters); + if (asyncToken) { + functionNode.d.isAsync = true; + extendRange(functionNode, asyncToken); + } + + functionNode.d.params = paramList; + paramList.forEach((param) => { + param.parent = functionNode; + }); + + if (decorators) { + functionNode.d.decorators = decorators; + decorators.forEach((decorator) => { + decorator.parent = functionNode; + }); + + if (decorators.length > 0) { + extendRange(functionNode, decorators[0]); + } + } + + if (returnType) { + functionNode.d.returnAnnotation = returnType; + functionNode.d.returnAnnotation.parent = functionNode; + extendRange(functionNode, returnType); + } + + // If there was a type annotation comment for the function, + // parse it now. + if (functionTypeAnnotationToken) { + this._parseFunctionTypeAnnotationComment(functionTypeAnnotationToken, functionNode); + } + + return functionNode; + } + + // typedargslist: ( + // tfpdef ['=' test] (',' tfpdef ['=' test])* + // [ ',' + // [ + // '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + // | '**' tfpdef [','] + // ] + // ] + // | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + // | '**' tfpdef [',']) + // tfpdef: NAME [':' test] + // vfpdef: NAME; + private _parseVarArgsList(terminator: TokenType, allowAnnotations: boolean): ParameterNode[] { + const paramMap = new Map(); + const paramList: ParameterNode[] = []; + let sawDefaultParam = false; + let reportedNonDefaultParamErr = false; + let sawKeywordOnlySeparator = false; + let sawPositionOnlySeparator = false; + let sawKeywordOnlyParamAfterSeparator = false; + let sawArgs = false; + let sawKwArgs = false; + + while (true) { + if (this._peekTokenType() === terminator) { + break; + } + + const param = this._parseParameter(allowAnnotations); + if (!param) { + this._consumeTokensUntilType([terminator]); + break; + } + + if (param.d.name) { + const name = param.d.name.d.value; + if (paramMap.has(name)) { + this._addSyntaxError(LocMessage.duplicateParam().format({ name }), param.d.name); + } else { + paramMap.set(name, name); + } + } else if (param.d.category === ParamCategory.Simple) { + if (paramList.length === 0) { + this._addSyntaxError(LocMessage.positionOnlyFirstParam(), param); + } + } + + if (param.d.category === ParamCategory.Simple) { + if (!param.d.name) { + if (sawPositionOnlySeparator) { + this._addSyntaxError(LocMessage.duplicatePositionOnly(), param); + } else if (sawKeywordOnlySeparator) { + this._addSyntaxError(LocMessage.positionOnlyAfterKeywordOnly(), param); + } else if (sawArgs) { + this._addSyntaxError(LocMessage.positionOnlyAfterArgs(), param); + } + sawPositionOnlySeparator = true; + } else { + if (sawKeywordOnlySeparator) { + sawKeywordOnlyParamAfterSeparator = true; + } + + if (param.d.defaultValue) { + sawDefaultParam = true; + } else if (sawDefaultParam && !sawKeywordOnlySeparator && !sawArgs) { + // Report this error only once. + if (!reportedNonDefaultParamErr) { + this._addSyntaxError(LocMessage.nonDefaultAfterDefault(), param); + reportedNonDefaultParamErr = true; + } + } + } + } + + paramList.push(param); + + if (param.d.category === ParamCategory.ArgsList) { + if (!param.d.name) { + if (sawKeywordOnlySeparator) { + this._addSyntaxError(LocMessage.duplicateKeywordOnly(), param); + } else if (sawArgs) { + this._addSyntaxError(LocMessage.keywordOnlyAfterArgs(), param); + } + sawKeywordOnlySeparator = true; + } else { + if (sawKeywordOnlySeparator || sawArgs) { + this._addSyntaxError(LocMessage.duplicateArgsParam(), param); + } + sawArgs = true; + } + } + + if (param.d.category === ParamCategory.KwargsDict) { + if (sawKwArgs) { + this._addSyntaxError(LocMessage.duplicateKwargsParam(), param); + } + sawKwArgs = true; + + // A **kwargs cannot immediately follow a keyword-only separator ("*"). + if (sawKeywordOnlySeparator && !sawKeywordOnlyParamAfterSeparator) { + this._addSyntaxError(LocMessage.keywordParameterMissing(), param); + } + } else if (sawKwArgs) { + this._addSyntaxError(LocMessage.paramAfterKwargsParam(), param); + } + + const foundComma = this._consumeTokenIfType(TokenType.Comma); + + if (allowAnnotations && !param.d.annotation) { + // Look for a type annotation comment at the end of the line. + const typeAnnotationComment = this._parseVariableTypeAnnotationComment(); + if (typeAnnotationComment) { + param.d.annotationComment = typeAnnotationComment; + param.d.annotationComment.parent = param; + extendRange(param, param.d.annotationComment); + } + } + + if (!foundComma) { + break; + } + } + + if (paramList.length > 0) { + const lastParam = paramList[paramList.length - 1]; + if (lastParam.d.category === ParamCategory.ArgsList && !lastParam.d.name) { + this._addSyntaxError(LocMessage.expectedNamedParameter(), lastParam); + } + } + + return paramList; + } + + private _parseParameter(allowAnnotations: boolean): ParameterNode { + let starCount = 0; + let slashCount = 0; + const firstToken = this._peekToken(); + + if (this._consumeTokenIfOperator(OperatorType.Multiply)) { + starCount = 1; + } else if (this._consumeTokenIfOperator(OperatorType.Power)) { + starCount = 2; + } else if (this._consumeTokenIfOperator(OperatorType.Divide)) { + if ( + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_8) && + !this._parseOptions.isStubFile + ) { + this._addSyntaxError(LocMessage.positionOnlyIncompatible(), firstToken); + } + slashCount = 1; + } + + const paramName = this._getTokenIfIdentifier(); + if (!paramName) { + if (starCount === 1) { + const paramNode = ParameterNode.create(firstToken, ParamCategory.ArgsList); + return paramNode; + } else if (slashCount === 1) { + const paramNode = ParameterNode.create(firstToken, ParamCategory.Simple); + return paramNode; + } + + // Check for the Python 2.x parameter sublist syntax and handle it gracefully. + if (this._peekTokenType() === TokenType.OpenParenthesis) { + const sublistStart = this._getNextToken(); + if (this._consumeTokensUntilType([TokenType.CloseParenthesis])) { + this._getNextToken(); + } + this._addSyntaxError(LocMessage.sublistParamsIncompatible(), sublistStart); + } else { + this._addSyntaxError(LocMessage.expectedParamName(), this._peekToken()); + } + } + + let paramType = ParamCategory.Simple; + if (starCount === 1) { + paramType = ParamCategory.ArgsList; + } else if (starCount === 2) { + paramType = ParamCategory.KwargsDict; + } + const paramNode = ParameterNode.create(firstToken, paramType); + if (paramName) { + paramNode.d.name = NameNode.create(paramName); + paramNode.d.name.parent = paramNode; + extendRange(paramNode, paramName); + } + + if (allowAnnotations && this._consumeTokenIfType(TokenType.Colon)) { + paramNode.d.annotation = this._parseTypeAnnotation(paramType === ParamCategory.ArgsList); + paramNode.d.annotation.parent = paramNode; + extendRange(paramNode, paramNode.d.annotation); + } + + if (this._consumeTokenIfOperator(OperatorType.Assign)) { + paramNode.d.defaultValue = this._parseTestExpression(/* allowAssignmentExpression */ false); + paramNode.d.defaultValue.parent = paramNode; + extendRange(paramNode, paramNode.d.defaultValue); + + if (starCount > 0) { + this._addSyntaxError(LocMessage.defaultValueNotAllowed(), paramNode.d.defaultValue); + } + } + + return paramNode; + } + + // with_stmt: 'with' with_item (',' with_item)* ':' suite + // Python 3.10 adds support for optional parentheses around + // with_item list. + private _parseWithStatement(asyncToken?: KeywordToken): WithNode { + const withToken = this._getKeywordToken(KeywordType.With); + let withItemList: WithItemNode[] = []; + + const possibleParen = this._peekToken(); + + // If the expression starts with a paren, parse it as though the + // paren is enclosing the list of "with items". This is done as a + // "dry run" to determine whether the entire list of "with items" + // is enclosed in parentheses. + let isParenthesizedWithItemList = false; + let isParenthesizedDisallowed = false; + + if (possibleParen.type === TokenType.OpenParenthesis) { + const openParenTokenIndex = this._tokenIndex; + + this._suppressErrors(() => { + this._getNextToken(); + while (true) { + withItemList.push(this._parseWithItem()); + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + + if (this._peekToken().type === TokenType.CloseParenthesis) { + break; + } + } + + if ( + this._peekToken().type === TokenType.CloseParenthesis && + this._peekToken(1).type === TokenType.Colon + ) { + isParenthesizedWithItemList = true; + + // Some forms of parenthesized context with statements were not + // allowed prior to Python 3.9. Is this such a form? + isParenthesizedDisallowed = withItemList.length !== 1 || withItemList[0].d.target !== undefined; + } + + this._tokenIndex = openParenTokenIndex; + withItemList = []; + }); + } + + if (isParenthesizedWithItemList) { + this._consumeTokenIfType(TokenType.OpenParenthesis); + if (isParenthesizedDisallowed && PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_9)) { + this._addSyntaxError(LocMessage.parenthesizedContextManagerIllegal(), possibleParen); + } + } + + while (true) { + withItemList.push(this._parseWithItem()); + + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + + if (this._peekToken().type === TokenType.CloseParenthesis) { + break; + } + } + + if (isParenthesizedWithItemList) { + if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { + this._addSyntaxError(LocMessage.expectedCloseParen(), possibleParen); + } + } + + let typeComment: StringToken | undefined; + const withSuite = this._parseSuite(this._isInFunction, /* skipBody */ false, () => { + const comment = this._getTypeAnnotationCommentText(); + if (comment) { + typeComment = comment; + } + }); + const withNode = WithNode.create(withToken, withSuite); + if (asyncToken) { + withNode.d.isAsync = true; + withNode.d.asyncToken = asyncToken; + extendRange(withNode, asyncToken); + } + + if (typeComment) { + withNode.d.typeComment = typeComment; + } + + withNode.d.withItems = withItemList; + withItemList.forEach((withItem) => { + withItem.parent = withNode; + }); + + return withNode; + } + + // with_item: test ['as' expr] + private _parseWithItem(): WithItemNode { + const expr = this._parseTestExpression(/* allowAssignmentExpression */ true); + const itemNode = WithItemNode.create(expr); + + if (this._consumeTokenIfKeyword(KeywordType.As)) { + itemNode.d.target = this._parseExpression(/* allowUnpack */ false); + itemNode.d.target.parent = itemNode; + extendRange(itemNode, itemNode.d.target); + } + + return itemNode; + } + + // decorators: decorator+ + // decorated: decorators (classdef | funcdef | async_funcdef) + private _parseDecorated(): StatementNode | undefined { + const decoratorList: DecoratorNode[] = []; + + while (true) { + if (this._peekOperatorType() === OperatorType.MatrixMultiply) { + decoratorList.push(this._parseDecorator()); + } else { + break; + } + } + + const nextToken = this._peekToken() as KeywordToken; + if (nextToken.type === TokenType.Keyword) { + if (nextToken.keywordType === KeywordType.Async) { + this._getNextToken(); + + if (this._peekKeywordType() !== KeywordType.Def) { + this._addSyntaxError(LocMessage.expectedFunctionAfterAsync(), this._peekToken()); + } else { + return this._parseFunctionDef(nextToken, decoratorList); + } + } else if (nextToken.keywordType === KeywordType.Def) { + return this._parseFunctionDef(undefined, decoratorList); + } else if (nextToken.keywordType === KeywordType.Class) { + return this._parseClassDef(decoratorList); + } + } + + this._addSyntaxError(LocMessage.expectedAfterDecorator(), this._peekToken()); + + // Return a dummy class declaration so the completion provider has + // some parse nodes to work with. + return ClassNode.createDummyForDecorators(decoratorList); + } + + // decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + private _parseDecorator(): DecoratorNode { + const atOperator = this._getNextToken() as OperatorToken; + assert(atOperator.operatorType === OperatorType.MatrixMultiply); + + const expression = this._parseTestExpression(/* allowAssignmentExpression */ true); + + // Versions of Python prior to 3.9 support a limited set of + // expression forms. + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_9)) { + let isSupportedExpressionForm = false; + if (this._isNameOrMemberAccessExpression(expression)) { + isSupportedExpressionForm = true; + } else if ( + expression.nodeType === ParseNodeType.Call && + this._isNameOrMemberAccessExpression(expression.d.leftExpr) + ) { + isSupportedExpressionForm = true; + } + + if (!isSupportedExpressionForm) { + this._addSyntaxError(LocMessage.expectedDecoratorExpr(), expression); + } + } + + const decoratorNode = DecoratorNode.create(atOperator, expression); + + if (!this._consumeTokenIfType(TokenType.NewLine)) { + this._addSyntaxError(LocMessage.expectedDecoratorNewline(), this._peekToken()); + this._consumeTokensUntilType([TokenType.NewLine]); + } + + return decoratorNode; + } + + private _isNameOrMemberAccessExpression(expression: ExpressionNode): boolean { + if (expression.nodeType === ParseNodeType.Name) { + return true; + } else if (expression.nodeType === ParseNodeType.MemberAccess) { + return this._isNameOrMemberAccessExpression(expression.d.leftExpr); + } + + return false; + } + + // classdef: 'class' NAME ['(' [arglist] ')'] suite + private _parseClassDef(decorators?: DecoratorNode[]): ClassNode { + const classToken = this._getKeywordToken(KeywordType.Class); + + let nameToken = this._getTokenIfIdentifier(); + if (!nameToken) { + this._addSyntaxError(LocMessage.expectedClassName(), this._peekToken()); + nameToken = IdentifierToken.create(0, 0, '', /* comments */ undefined); + } + + let typeParameters: TypeParameterListNode | undefined; + const possibleOpenBracket = this._peekToken(); + if (possibleOpenBracket.type === TokenType.OpenBracket) { + typeParameters = this._parseTypeParameterList(); + + if ( + !this._parseOptions.isStubFile && + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_12) + ) { + this._addSyntaxError(LocMessage.classTypeParametersIllegal(), typeParameters); + } + } + + let argList: ArgumentNode[] = []; + const openParenToken = this._peekToken(); + if (this._consumeTokenIfType(TokenType.OpenParenthesis)) { + argList = this._parseArgList().args; + + if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { + this._addSyntaxError(LocMessage.expectedCloseParen(), openParenToken); + } + } + + const suite = this._parseSuite(/* isFunction */ false, this._parseOptions.skipFunctionAndClassBody); + + const classNode = ClassNode.create(classToken, NameNode.create(nameToken), suite, typeParameters); + classNode.d.arguments = argList; + argList.forEach((arg) => { + arg.parent = classNode; + }); + + if (decorators) { + classNode.d.decorators = decorators; + if (decorators.length > 0) { + decorators.forEach((decorator) => { + decorator.parent = classNode; + }); + extendRange(classNode, decorators[0]); + } + } + + return classNode; + } + + private _parsePassStatement(): PassNode { + return PassNode.create(this._getKeywordToken(KeywordType.Pass)); + } + + private _parseBreakStatement(): BreakNode { + const breakToken = this._getKeywordToken(KeywordType.Break); + + if (!this._isInLoop) { + this._addSyntaxError(LocMessage.breakOutsideLoop(), breakToken); + } else if (this._isInExceptionGroup) { + this._addSyntaxError(LocMessage.breakInExceptionGroup(), breakToken); + } + + if (this._isInFinallyLoop && PythonVersion.isGreaterOrEqualTo(this._getLanguageVersion(), pythonVersion3_14)) { + this._addSyntaxError(LocMessage.finallyBreak(), breakToken); + } + + return BreakNode.create(breakToken); + } + + private _parseContinueStatement(): ContinueNode { + const continueToken = this._getKeywordToken(KeywordType.Continue); + + if (!this._isInLoop) { + this._addSyntaxError(LocMessage.continueOutsideLoop(), continueToken); + } else if (this._isInExceptionGroup) { + this._addSyntaxError(LocMessage.continueInExceptionGroup(), continueToken); + } + + if (this._isInFinallyLoop && PythonVersion.isGreaterOrEqualTo(this._getLanguageVersion(), pythonVersion3_14)) { + this._addSyntaxError(LocMessage.finallyContinue(), continueToken); + } + + return ContinueNode.create(continueToken); + } + + // return_stmt: 'return' [testlist] + private _parseReturnStatement(): ReturnNode { + const returnToken = this._getKeywordToken(KeywordType.Return); + + const returnNode = ReturnNode.create(returnToken); + + if (!this._isInFunction) { + this._addSyntaxError(LocMessage.returnOutsideFunction(), returnToken); + } else if (this._isInExceptionGroup) { + this._addSyntaxError(LocMessage.returnInExceptionGroup(), returnToken); + } + + if (this._isInFinallyBlock && PythonVersion.isGreaterOrEqualTo(this._getLanguageVersion(), pythonVersion3_14)) { + this._addSyntaxError(LocMessage.finallyReturn(), returnToken); + } + + if (!this._isNextTokenNeverExpression()) { + const returnExpr = this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedReturnExpr() + ); + this._reportConditionalErrorForStarTupleElement(returnExpr); + returnNode.d.expr = returnExpr; + returnNode.d.expr.parent = returnNode; + extendRange(returnNode, returnExpr); + } + + return returnNode; + } + + // import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + // 'import' ('*' | '(' import_as_names ')' | import_as_names)) + // import_as_names: import_as_name (',' import_as_name)* [','] + // import_as_name: NAME ['as' NAME] + private _parseFromStatement(): ImportFromNode { + const fromToken = this._getKeywordToken(KeywordType.From); + + const modName = this._parseDottedModuleName(/* allowJustDots */ true); + const importFromNode = ImportFromNode.create(fromToken, modName); + + // Handle imports from __future__ specially because they can + // change the way we interpret the rest of the file. + const isFutureImport = + modName.d.leadingDots === 0 && + modName.d.nameParts.length === 1 && + modName.d.nameParts[0].d.value === '__future__'; + + const possibleInputToken = this._peekToken(); + if (!this._consumeTokenIfKeyword(KeywordType.Import)) { + this._addSyntaxError(LocMessage.expectedImport(), this._peekToken()); + if (!modName.d.hasTrailingDot) { + importFromNode.d.missingImport = true; + } + } else { + extendRange(importFromNode, possibleInputToken); + + // Look for "*" token. + const possibleStarToken = this._peekToken(); + if (this._consumeTokenIfOperator(OperatorType.Multiply)) { + extendRange(importFromNode, possibleStarToken); + importFromNode.d.isWildcardImport = true; + importFromNode.d.wildcardToken = possibleStarToken; + this._containsWildcardImport = true; + } else { + const openParenToken = this._peekToken(); + const inParen = this._consumeTokenIfType(TokenType.OpenParenthesis); + let trailingCommaToken: Token | undefined; + + while (true) { + const importName = this._getTokenIfIdentifier(); + if (!importName) { + break; + } + + trailingCommaToken = undefined; + + const importFromAsNode = ImportFromAsNode.create(NameNode.create(importName)); + + if (this._consumeTokenIfKeyword(KeywordType.As)) { + const aliasName = this._getTokenIfIdentifier(); + if (!aliasName) { + this._addSyntaxError(LocMessage.expectedImportAlias(), this._peekToken()); + } else { + importFromAsNode.d.alias = NameNode.create(aliasName); + importFromAsNode.d.alias.parent = importFromAsNode; + extendRange(importFromAsNode, aliasName); + } + } + + importFromNode.d.imports.push(importFromAsNode); + importFromAsNode.parent = importFromNode; + extendRange(importFromNode, importFromAsNode); + + if (isFutureImport) { + // Add the future import by name. + this._futureImports.add(importName.value); + } + + const nextToken = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + trailingCommaToken = nextToken; + } + + if (importFromNode.d.imports.length === 0) { + this._addSyntaxError(LocMessage.expectedImportSymbols(), this._peekToken()); + } + + if (inParen) { + importFromNode.d.usesParens = true; + + const nextToken = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { + this._addSyntaxError(LocMessage.expectedCloseParen(), openParenToken); + } else { + extendRange(importFromNode, nextToken); + } + } else if (trailingCommaToken) { + this._addSyntaxError(LocMessage.trailingCommaInFromImport(), trailingCommaToken); + } + } + } + + this._importedModules.push({ + nameNode: importFromNode.d.module, + leadingDots: importFromNode.d.module.d.leadingDots, + nameParts: importFromNode.d.module.d.nameParts.map((p) => p.d.value), + importedSymbols: new Set(importFromNode.d.imports.map((imp) => imp.d.name.d.value)), + }); + + let isTypingImport = false; + if (importFromNode.d.module.d.nameParts.length === 1) { + const firstNamePartValue = importFromNode.d.module.d.nameParts[0].d.value; + if (firstNamePartValue === 'typing' || firstNamePartValue === 'typing_extensions') { + isTypingImport = true; + } + } + + if (isTypingImport) { + const typingSymbolsOfInterest = ['Literal', 'TypeAlias', 'Annotated']; + + if (importFromNode.d.isWildcardImport) { + typingSymbolsOfInterest.forEach((s) => { + this._typingSymbolAliases.set(s, s); + }); + } else { + importFromNode.d.imports.forEach((imp) => { + if (typingSymbolsOfInterest.some((s) => s === imp.d.name.d.value)) { + this._typingSymbolAliases.set(imp.d.alias?.d.value || imp.d.name.d.value, imp.d.name.d.value); + } + }); + } + } + + return importFromNode; + } + + // import_name: 'import' dotted_as_names + // dotted_as_names: dotted_as_name (',' dotted_as_name)* + // dotted_as_name: dotted_name ['as' NAME] + private _parseImportStatement(): ImportNode { + const importToken = this._getKeywordToken(KeywordType.Import); + + const importNode = ImportNode.create(importToken); + + while (true) { + const modName = this._parseDottedModuleName(); + + const importAsNode = ImportAsNode.create(modName); + + if (this._consumeTokenIfKeyword(KeywordType.As)) { + const aliasToken = this._getTokenIfIdentifier(); + if (aliasToken) { + importAsNode.d.alias = NameNode.create(aliasToken); + importAsNode.d.alias.parent = importAsNode; + extendRange(importAsNode, importAsNode.d.alias); + } else { + this._addSyntaxError(LocMessage.expectedImportAlias(), this._peekToken()); + } + } + + if (importAsNode.d.module.d.leadingDots > 0) { + this._addSyntaxError(LocMessage.relativeImportNotAllowed(), importAsNode.d.module); + } + + importNode.d.list.push(importAsNode); + importAsNode.parent = importNode; + + const nameParts = importAsNode.d.module.d.nameParts.map((p) => p.d.value); + + if ( + importAsNode.d.alias || + importAsNode.d.module.d.leadingDots > 0 || + importAsNode.d.module.d.nameParts.length === 0 + ) { + this._importedModules.push({ + nameNode: importAsNode.d.module, + leadingDots: importAsNode.d.module.d.leadingDots, + nameParts, + importedSymbols: undefined, + }); + } else { + // Implicitly import all modules in the multi-part name if we + // are not assigning the final module to an alias. + importAsNode.d.module.d.nameParts.forEach((_, index) => { + this._importedModules.push({ + nameNode: importAsNode.d.module, + leadingDots: importAsNode.d.module.d.leadingDots, + nameParts: nameParts.slice(0, index + 1), + importedSymbols: undefined, + }); + }); + } + + if (modName.d.nameParts.length === 1) { + const firstNamePartValue = modName.d.nameParts[0].d.value; + if (firstNamePartValue === 'typing' || firstNamePartValue === 'typing_extensions') { + this._typingImportAliases.push(importAsNode.d.alias?.d.value || firstNamePartValue); + } + } + + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + } + + if (importNode.d.list.length > 0) { + extendRange(importNode, importNode.d.list[importNode.d.list.length - 1]); + } + + return importNode; + } + + // ('.' | '...')* dotted_name | ('.' | '...')+ + // dotted_name: NAME ('.' NAME)* + private _parseDottedModuleName(allowJustDots = false): ModuleNameNode { + const moduleNameNode = ModuleNameNode.create(this._peekToken()); + + while (true) { + const token = this._getTokenIfType(TokenType.Ellipsis) ?? this._getTokenIfType(TokenType.Dot); + if (token) { + if (token.type === TokenType.Ellipsis) { + moduleNameNode.d.leadingDots += 3; + } else { + moduleNameNode.d.leadingDots++; + } + + extendRange(moduleNameNode, token); + } else { + break; + } + } + + while (true) { + const identifier = this._getTokenIfIdentifier(); + if (!identifier) { + if (!allowJustDots || moduleNameNode.d.leadingDots === 0 || moduleNameNode.d.nameParts.length > 0) { + this._addSyntaxError(LocMessage.expectedModuleName(), this._peekToken()); + moduleNameNode.d.hasTrailingDot = true; + } + break; + } + + const namePart = NameNode.create(identifier); + moduleNameNode.d.nameParts.push(namePart); + namePart.parent = moduleNameNode; + extendRange(moduleNameNode, namePart); + + const nextToken = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.Dot)) { + break; + } + + // Extend the module name to include the dot. + extendRange(moduleNameNode, nextToken); + } + + return moduleNameNode; + } + + private _parseGlobalStatement(): GlobalNode { + const globalToken = this._getKeywordToken(KeywordType.Global); + + const globalNode = GlobalNode.create(globalToken); + globalNode.d.targets = this._parseNameList(); + if (globalNode.d.targets.length > 0) { + globalNode.d.targets.forEach((name) => { + name.parent = globalNode; + }); + extendRange(globalNode, globalNode.d.targets[globalNode.d.targets.length - 1]); + } + return globalNode; + } + + private _parseNonlocalStatement(): NonlocalNode { + const nonlocalToken = this._getKeywordToken(KeywordType.Nonlocal); + + const nonlocalNode = NonlocalNode.create(nonlocalToken); + nonlocalNode.d.targets = this._parseNameList(); + if (nonlocalNode.d.targets.length > 0) { + nonlocalNode.d.targets.forEach((name) => { + name.parent = nonlocalNode; + }); + extendRange(nonlocalNode, nonlocalNode.d.targets[nonlocalNode.d.targets.length - 1]); + } + return nonlocalNode; + } + + private _parseNameList(): NameNode[] { + const nameList: NameNode[] = []; + + while (true) { + const name = this._getTokenIfIdentifier(); + if (!name) { + this._addSyntaxError(LocMessage.expectedIdentifier(), this._peekToken()); + break; + } + + nameList.push(NameNode.create(name)); + + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + } + + return nameList; + } + + // raise_stmt: 'raise' [test ['from' test]] + // (old) raise_stmt: 'raise' [test [',' test [',' test]]] + private _parseRaiseStatement(): RaiseNode { + const raiseToken = this._getKeywordToken(KeywordType.Raise); + + const raiseNode = RaiseNode.create(raiseToken); + if (!this._isNextTokenNeverExpression()) { + raiseNode.d.expr = this._parseTestExpression(/* allowAssignmentExpression */ true); + raiseNode.d.expr.parent = raiseNode; + extendRange(raiseNode, raiseNode.d.expr); + + if (this._consumeTokenIfKeyword(KeywordType.From)) { + raiseNode.d.fromExpr = this._parseTestExpression(/* allowAssignmentExpression */ true); + raiseNode.d.fromExpr.parent = raiseNode; + extendRange(raiseNode, raiseNode.d.fromExpr); + } + } + + return raiseNode; + } + + // assert_stmt: 'assert' test [',' test] + private _parseAssertStatement(): AssertNode { + const assertToken = this._getKeywordToken(KeywordType.Assert); + + const expr = this._parseTestExpression(/* allowAssignmentExpression */ false); + const assertNode = AssertNode.create(assertToken, expr); + + if (this._consumeTokenIfType(TokenType.Comma)) { + const exceptionExpr = this._parseTestExpression(/* allowAssignmentExpression */ false); + assertNode.d.exceptionExpr = exceptionExpr; + assertNode.d.exceptionExpr.parent = assertNode; + extendRange(assertNode, exceptionExpr); + } + + return assertNode; + } + + // del_stmt: 'del' exprlist + private _parseDelStatement(): DelNode { + const delToken = this._getKeywordToken(KeywordType.Del); + + const exprListResult = this._parseExpressionList(/* allowStar */ true); + if (!exprListResult.parseError && exprListResult.list.length === 0) { + this._addSyntaxError(LocMessage.expectedDelExpr(), this._peekToken()); + } + const delNode = DelNode.create(delToken); + delNode.d.targets = exprListResult.list; + if (delNode.d.targets.length > 0) { + delNode.d.targets.forEach((expr) => { + expr.parent = delNode; + }); + extendRange(delNode, delNode.d.targets[delNode.d.targets.length - 1]); + } + return delNode; + } + + // yield_expr: 'yield' [yield_arg] + // yield_arg: 'from' test | testlist + private _parseYieldExpression(): YieldNode | YieldFromNode { + const yieldToken = this._getKeywordToken(KeywordType.Yield); + + const nextToken = this._peekToken(); + if (this._consumeTokenIfKeyword(KeywordType.From)) { + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_3)) { + this._addSyntaxError(LocMessage.yieldFromIllegal(), nextToken); + } + return YieldFromNode.create(yieldToken, this._parseTestExpression(/* allowAssignmentExpression */ false)); + } + + let exprList: ExpressionNode | undefined; + if (!this._isNextTokenNeverExpression()) { + exprList = this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedYieldExpr() + ); + this._reportConditionalErrorForStarTupleElement(exprList); + } + + return YieldNode.create(yieldToken, exprList); + } + + private _tryParseYieldExpression(): YieldNode | YieldFromNode | undefined { + if (this._peekKeywordType() !== KeywordType.Yield) { + return undefined; + } + + return this._parseYieldExpression(); + } + + // simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE + private _parseSimpleStatement(): StatementListNode { + const statement = StatementListNode.create(this._peekToken()); + + while (true) { + // Swallow invalid tokens to make sure we make forward progress. + if (this._peekTokenType() === TokenType.Invalid) { + const invalidToken = this._getNextToken(); + const text = this._fileContents!.substr(invalidToken.start, invalidToken.length); + + const firstCharCode = text.charCodeAt(0); + + // If the invalid token is a line-continuation backslash at the end of the file, + // report a clearer error message consistent with Python: "Unexpected EOF". + const nextTok = this._peekToken(); + const nextNextTok = this._peekToken(1); + const isBackslash = firstCharCode === Char.Backslash; + const isAtEofLineContinuation = + isBackslash && nextTok.type === TokenType.NewLine && nextNextTok.type === TokenType.EndOfStream; + + if (isAtEofLineContinuation) { + this._addSyntaxError(LocMessage.unexpectedEof(), invalidToken); + } else { + // Remove any non-printable characters. + this._addSyntaxError( + LocMessage.invalidTokenChars().format({ text: `\\u${firstCharCode.toString(16)}` }), + invalidToken + ); + } + + this._consumeTokensUntilType([TokenType.NewLine]); + break; + } + + const smallStatement = this._parseSmallStatement(); + statement.d.statements.push(smallStatement); + smallStatement.parent = statement; + extendRange(statement, smallStatement); + + if (smallStatement.nodeType === ParseNodeType.Error) { + // No need to log an error here. We assume that + // it was already logged by _parseSmallStatement. + break; + } + + // Consume the semicolon if present. + if (!this._consumeTokenIfType(TokenType.Semicolon)) { + break; + } + + const nextTokenType = this._peekTokenType(); + if (nextTokenType === TokenType.NewLine || nextTokenType === TokenType.EndOfStream) { + break; + } + } + + if (!this._consumeTokenIfType(TokenType.NewLine)) { + this._addSyntaxError(LocMessage.expectedNewlineOrSemicolon(), this._peekToken()); + } + + return statement; + } + + // small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + // import_stmt | global_stmt | nonlocal_stmt | assert_stmt) + // flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt + // import_stmt: import_name | import_from + private _parseSmallStatement(): ParseNode { + switch (this._peekKeywordType()) { + case KeywordType.Pass: + return this._parsePassStatement(); + + case KeywordType.Break: + return this._parseBreakStatement(); + + case KeywordType.Continue: + return this._parseContinueStatement(); + + case KeywordType.Return: + return this._parseReturnStatement(); + + case KeywordType.From: + return this._parseFromStatement(); + + case KeywordType.Import: + return this._parseImportStatement(); + + case KeywordType.Global: + return this._parseGlobalStatement(); + + case KeywordType.Nonlocal: + return this._parseNonlocalStatement(); + + case KeywordType.Raise: + return this._parseRaiseStatement(); + + case KeywordType.Assert: + return this._parseAssertStatement(); + + case KeywordType.Del: + return this._parseDelStatement(); + + case KeywordType.Yield: + return this._parseYieldExpression(); + + case KeywordType.Type: { + // Type is considered a "soft" keyword, so we will treat it + // as an identifier if it is followed by an unexpected token. + + const peekToken1 = this._peekToken(1); + const peekToken2 = this._peekToken(2); + let isInvalidTypeToken = true; + + if ( + peekToken1.type === TokenType.Identifier || + (peekToken1.type === TokenType.Keyword && KeywordToken.isSoftKeyword(peekToken1 as KeywordToken)) + ) { + if (peekToken2.type === TokenType.OpenBracket) { + isInvalidTypeToken = false; + } else if ( + peekToken2.type === TokenType.Operator && + (peekToken2 as OperatorToken).operatorType === OperatorType.Assign + ) { + isInvalidTypeToken = false; + } + } + + if (!isInvalidTypeToken) { + return this._parseTypeAliasStatement(); + } + break; + } + } + + return this._parseExpressionStatement(); + } + + private _makeExpressionOrTuple( + exprListResult: ListResult, + enclosedInParens: boolean + ): ExpressionNode { + // A single-element tuple with no trailing comma is simply an expression + // that's surrounded by parens. + if (exprListResult.list.length === 1 && !exprListResult.trailingComma) { + if (exprListResult.list[0].nodeType === ParseNodeType.Unpack) { + this._addSyntaxError(LocMessage.unpackOperatorNotAllowed(), exprListResult.list[0]); + } + return exprListResult.list[0]; + } + + // To accommodate empty tuples ("()"), we will reach back to get + // the opening parenthesis as the opening token. + + const tupleStartRange: TextRange = + exprListResult.list.length > 0 ? exprListResult.list[0] : this._peekToken(-1); + + const tupleNode = TupleNode.create(tupleStartRange, enclosedInParens); + tupleNode.d.items = exprListResult.list; + if (exprListResult.list.length > 0) { + exprListResult.list.forEach((expr) => { + expr.parent = tupleNode; + }); + extendRange(tupleNode, exprListResult.list[exprListResult.list.length - 1]); + } + + return tupleNode; + } + + private _parseExpressionListAsPossibleTuple( + errorCategory: ErrorExpressionCategory, + getErrorString: () => string, + errorToken: Token + ): ExpressionNode { + if (this._isNextTokenNeverExpression()) { + this._addSyntaxError(getErrorString(), errorToken); + return ErrorNode.create(errorToken, errorCategory); + } + + const exprListResult = this._parseExpressionList(/* allowStar */ true); + if (exprListResult.parseError) { + return exprListResult.parseError; + } + return this._makeExpressionOrTuple(exprListResult, /* enclosedInParens */ false); + } + + private _parseTestListAsExpression( + errorCategory: ErrorExpressionCategory, + getErrorString: () => string + ): ExpressionNode { + if (this._isNextTokenNeverExpression()) { + return this._handleExpressionParseError(errorCategory, getErrorString()); + } + + const exprListResult = this._parseTestExpressionList(); + if (exprListResult.parseError) { + return exprListResult.parseError; + } + return this._makeExpressionOrTuple(exprListResult, /* enclosedInParens */ false); + } + + private _parseTestOrStarListAsExpression( + allowAssignmentExpression: boolean, + allowMultipleUnpack: boolean, + errorCategory: ErrorExpressionCategory, + getErrorString: () => string + ): ExpressionNode { + if (this._isNextTokenNeverExpression()) { + return this._handleExpressionParseError(errorCategory, getErrorString()); + } + + const exprListResult = this._parseTestOrStarExpressionList(allowAssignmentExpression, allowMultipleUnpack); + if (exprListResult.parseError) { + return exprListResult.parseError; + } + return this._makeExpressionOrTuple(exprListResult, /* enclosedInParens */ false); + } + + private _parseExpressionList(allowStar: boolean): ListResult { + return this._parseExpressionListGeneric(() => this._parseExpression(allowStar)); + } + + // testlist: test (',' test)* [','] + private _parseTestExpressionList(): ListResult { + return this._parseExpressionListGeneric(() => this._parseTestExpression(/* allowAssignmentExpression */ false)); + } + + private _parseTestOrStarExpressionList( + allowAssignmentExpression: boolean, + allowMultipleUnpack: boolean + ): ListResult { + const exprListResult = this._parseExpressionListGeneric(() => + this._parseTestOrStarExpression(allowAssignmentExpression) + ); + + if (!allowMultipleUnpack && !exprListResult.parseError) { + let sawStar = false; + for (const expr of exprListResult.list) { + if (expr.nodeType === ParseNodeType.Unpack) { + if (sawStar) { + this._addSyntaxError(LocMessage.duplicateUnpack(), expr); + break; + } + sawStar = true; + } + } + } + + return exprListResult; + } + + // exp_or_star: expr | star_expr + // expr: xor_expr ('|' xor_expr)* + // star_expr: '*' expr + private _parseExpression(allowUnpack: boolean): ExpressionNode { + const startToken = this._peekToken(); + + if (allowUnpack && this._consumeTokenIfOperator(OperatorType.Multiply)) { + return UnpackNode.create(startToken, this._parseExpression(/* allowUnpack */ false)); + } + + return this._parseBitwiseOrExpression(); + } + + // test_or_star: test | star_expr + private _parseTestOrStarExpression(allowAssignmentExpression: boolean): ExpressionNode { + if (this._peekOperatorType() === OperatorType.Multiply) { + return this._parseExpression(/* allowUnpack */ true); + } + + return this._parseTestExpression(allowAssignmentExpression); + } + + // test: or_test ['if' or_test 'else' test] | lambdef + private _parseTestExpression(allowAssignmentExpression: boolean): ExpressionNode { + if (this._peekKeywordType() === KeywordType.Lambda) { + return this._parseLambdaExpression(); + } + + const ifExpr = this._parseAssignmentExpression(!allowAssignmentExpression); + if (ifExpr.nodeType === ParseNodeType.Error) { + return ifExpr; + } + + if (!this._consumeTokenIfKeyword(KeywordType.If)) { + return ifExpr; + } + + const testExpr = this._parseOrTest(); + if (testExpr.nodeType === ParseNodeType.Error) { + return testExpr; + } + + if (!this._consumeTokenIfKeyword(KeywordType.Else)) { + return TernaryNode.create( + ifExpr, + testExpr, + this._handleExpressionParseError(ErrorExpressionCategory.MissingElse, LocMessage.expectedElse()) + ); + } + + const elseExpr = this._parseTestExpression(/* allowAssignmentExpression */ true); + + return TernaryNode.create(ifExpr, testExpr, elseExpr); + } + + // assign_expr: NAME := test + private _parseAssignmentExpression(disallowAssignmentExpression = false) { + const leftExpr = this._parseOrTest(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + if (leftExpr.nodeType !== ParseNodeType.Name) { + return leftExpr; + } + + const walrusToken = this._peekToken(); + if (!this._consumeTokenIfOperator(OperatorType.Walrus)) { + return leftExpr; + } + + if (!this._assignmentExpressionsAllowed || disallowAssignmentExpression) { + this._addSyntaxError(LocMessage.walrusNotAllowed(), walrusToken); + } + + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_8)) { + this._addSyntaxError(LocMessage.walrusIllegal(), walrusToken); + } + + const rightExpr = this._parseTestExpression(/* allowAssignmentExpression */ false); + + return AssignmentExpressionNode.create(leftExpr, walrusToken, rightExpr); + } + + // or_test: and_test ('or' and_test)* + private _parseOrTest(): ExpressionNode { + let leftExpr = this._parseAndTest(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + while (true) { + const peekToken = this._peekToken(); + if (!this._consumeTokenIfKeyword(KeywordType.Or)) { + break; + } + const rightExpr = this._parseAndTest(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.Or); + } + + return leftExpr; + } + + // and_test: not_test ('and' not_test)* + private _parseAndTest(): ExpressionNode { + let leftExpr = this._parseNotTest(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + while (true) { + const peekToken = this._peekToken(); + if (!this._consumeTokenIfKeyword(KeywordType.And)) { + break; + } + const rightExpr = this._parseNotTest(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.And); + } + + return leftExpr; + } + + // not_test: 'not' not_test | comparison + private _parseNotTest(): ExpressionNode { + const notToken = this._peekToken(); + if (this._consumeTokenIfKeyword(KeywordType.Not)) { + const notExpr = this._parseNotTest(); + return this._createUnaryOperationNode(notToken, notExpr, OperatorType.Not); + } + + return this._parseComparison(); + } + + // comparison: expr (comp_op expr)* + // comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' + private _parseComparison(): ExpressionNode { + let leftExpr = this._parseBitwiseOrExpression(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + while (true) { + let comparisonOperator: OperatorType | undefined; + const peekToken = this._peekToken(); + + if (Tokenizer.isOperatorComparison(this._peekOperatorType())) { + comparisonOperator = this._peekOperatorType(); + if (comparisonOperator === OperatorType.LessOrGreaterThan) { + this._addSyntaxError(LocMessage.operatorLessOrGreaterDeprecated(), peekToken); + comparisonOperator = OperatorType.NotEquals; + } + this._getNextToken(); + } else if (this._consumeTokenIfKeyword(KeywordType.In)) { + comparisonOperator = OperatorType.In; + } else if (this._consumeTokenIfKeyword(KeywordType.Is)) { + if (this._consumeTokenIfKeyword(KeywordType.Not)) { + comparisonOperator = OperatorType.IsNot; + } else { + comparisonOperator = OperatorType.Is; + } + } else if (this._peekKeywordType() === KeywordType.Not) { + const tokenAfterNot = this._peekToken(1); + if ( + tokenAfterNot.type === TokenType.Keyword && + (tokenAfterNot as KeywordToken).keywordType === KeywordType.In + ) { + this._getNextToken(); + this._getNextToken(); + comparisonOperator = OperatorType.NotIn; + } + } + + if (comparisonOperator === undefined) { + break; + } + + const rightExpr = this._parseComparison(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, comparisonOperator); + } + + return leftExpr; + } + + // expr: xor_expr ('|' xor_expr)* + private _parseBitwiseOrExpression(): ExpressionNode { + let leftExpr = this._parseBitwiseXorExpression(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + while (true) { + const peekToken = this._peekToken(); + if (!this._consumeTokenIfOperator(OperatorType.BitwiseOr)) { + break; + } + const rightExpr = this._parseBitwiseXorExpression(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.BitwiseOr); + } + + return leftExpr; + } + + // xor_expr: and_expr ('^' and_expr)* + private _parseBitwiseXorExpression(): ExpressionNode { + let leftExpr = this._parseBitwiseAndExpression(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + while (true) { + const peekToken = this._peekToken(); + if (!this._consumeTokenIfOperator(OperatorType.BitwiseXor)) { + break; + } + const rightExpr = this._parseBitwiseAndExpression(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.BitwiseXor); + } + + return leftExpr; + } + + // and_expr: shift_expr ('&' shift_expr)* + private _parseBitwiseAndExpression(): ExpressionNode { + let leftExpr = this._parseShiftExpression(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + while (true) { + const peekToken = this._peekToken(); + if (!this._consumeTokenIfOperator(OperatorType.BitwiseAnd)) { + break; + } + const rightExpr = this._parseShiftExpression(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.BitwiseAnd); + } + + return leftExpr; + } + + // shift_expr: arith_expr (('<<'|'>>') arith_expr)* + private _parseShiftExpression(): ExpressionNode { + let leftExpr = this._parseArithmeticExpression(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + let peekToken = this._peekToken(); + let nextOperator = this._peekOperatorType(); + while (nextOperator === OperatorType.LeftShift || nextOperator === OperatorType.RightShift) { + this._getNextToken(); + const rightExpr = this._parseArithmeticExpression(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, nextOperator); + peekToken = this._peekToken(); + nextOperator = this._peekOperatorType(); + } + + return leftExpr; + } + + // arith_expr: term (('+'|'-') term)* + private _parseArithmeticExpression(): ExpressionNode { + let leftExpr = this._parseArithmeticTerm(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + let peekToken = this._peekToken(); + let nextOperator = this._peekOperatorType(); + while (nextOperator === OperatorType.Add || nextOperator === OperatorType.Subtract) { + this._getNextToken(); + const rightExpr = this._parseArithmeticTerm(); + if (rightExpr.nodeType === ParseNodeType.Error) { + return rightExpr; + } + + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, nextOperator); + peekToken = this._peekToken(); + nextOperator = this._peekOperatorType(); + } + + return leftExpr; + } + + // term: factor (('*'|'@'|'/'|'%'|'//') factor)* + private _parseArithmeticTerm(): ExpressionNode { + let leftExpr = this._parseArithmeticFactor(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + let peekToken = this._peekToken(); + let nextOperator = this._peekOperatorType(); + while ( + nextOperator === OperatorType.Multiply || + nextOperator === OperatorType.MatrixMultiply || + nextOperator === OperatorType.Divide || + nextOperator === OperatorType.Mod || + nextOperator === OperatorType.FloorDivide + ) { + this._getNextToken(); + const rightExpr = this._parseArithmeticFactor(); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, nextOperator); + peekToken = this._peekToken(); + nextOperator = this._peekOperatorType(); + } + + return leftExpr; + } + + // factor: ('+'|'-'|'~') factor | power + // power: atom_expr ['**' factor] + private _parseArithmeticFactor(): ExpressionNode { + const nextToken = this._peekToken(); + const nextOperator = this._peekOperatorType(); + if ( + nextOperator === OperatorType.Add || + nextOperator === OperatorType.Subtract || + nextOperator === OperatorType.BitwiseInvert + ) { + this._getNextToken(); + const expression = this._parseArithmeticFactor(); + return this._createUnaryOperationNode(nextToken, expression, nextOperator); + } + + const leftExpr = this._parseAtomExpression(); + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + const peekToken = this._peekToken(); + if (this._consumeTokenIfOperator(OperatorType.Power)) { + const rightExpr = this._parseArithmeticFactor(); + return this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.Power); + } + + return leftExpr; + } + + // Determines whether the expression refers to a type exported by the typing + // or typing_extensions modules. We can directly evaluate the types at binding + // time. We assume here that the code isn't making use of some custom type alias + // to refer to the typing types. + private _isTypingAnnotation(typeAnnotation: ExpressionNode, name: string): boolean { + if (typeAnnotation.nodeType === ParseNodeType.Name) { + const alias = this._typingSymbolAliases.get(typeAnnotation.d.value); + if (alias === name) { + return true; + } + } else if (typeAnnotation.nodeType === ParseNodeType.MemberAccess) { + if (typeAnnotation.d.leftExpr.nodeType === ParseNodeType.Name && typeAnnotation.d.member.d.value === name) { + const baseName = typeAnnotation.d.leftExpr.d.value; + return this._typingImportAliases.some((alias) => alias === baseName); + } + } + + return false; + } + + // atom_expr: ['await'] atom trailer* + // trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME + private _parseAtomExpression(): ExpressionNode { + let awaitToken: KeywordToken | undefined; + if (this._peekKeywordType() === KeywordType.Await) { + awaitToken = this._getKeywordToken(KeywordType.Await); + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_5)) { + this._addSyntaxError(LocMessage.awaitIllegal(), awaitToken); + } + } + + let atomExpression = this._parseAtom(); + if (atomExpression.nodeType === ParseNodeType.Error) { + return atomExpression; + } + + // Consume trailers. + while (true) { + // Is it a function call? + const startOfTrailerToken = this._peekToken(); + if (this._consumeTokenIfType(TokenType.OpenParenthesis)) { + // Generally, function calls are not allowed within type annotations, + // but they are permitted in "Annotated" annotations. + const wasParsingTypeAnnotation = this._isParsingTypeAnnotation; + this._isParsingTypeAnnotation = false; + + const argListResult = this._parseArgList(); + const callNode = CallNode.create(atomExpression, argListResult.args, argListResult.trailingComma); + + if (argListResult.args.length > 1 || argListResult.trailingComma) { + argListResult.args.forEach((arg) => { + if (arg.d.valueExpr.nodeType === ParseNodeType.Comprehension) { + if (!arg.d.valueExpr.d.hasParens) { + this._addSyntaxError(LocMessage.generatorNotParenthesized(), arg.d.valueExpr); + } + } + }); + } + + const nextToken = this._peekToken(); + let isArgListTerminated = false; + if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { + this._addSyntaxError(LocMessage.expectedCloseParen(), startOfTrailerToken); + + // Consume the remainder of tokens on the line for error + // recovery. + this._consumeTokensUntilType([TokenType.NewLine]); + + // Extend the node's range to include the rest of the line. + // This helps the signatureHelpProvider. + extendRange(callNode, this._peekToken()); + } else { + extendRange(callNode, nextToken); + isArgListTerminated = true; + } + + this._isParsingTypeAnnotation = wasParsingTypeAnnotation; + + const maxDepth = this._maxChildDepthMap.get(atomExpression.id) ?? 0; + if (maxDepth >= maxChildNodeDepth) { + atomExpression = ErrorNode.create(callNode, ErrorExpressionCategory.MaxDepthExceeded); + this._addSyntaxError(LocMessage.maxParseDepthExceeded(), atomExpression); + } else { + atomExpression = callNode; + this._maxChildDepthMap.set(callNode.id, maxDepth + 1); + } + + // If the argument list wasn't terminated, break out of the loop + if (!isArgListTerminated) { + break; + } + } else if (this._consumeTokenIfType(TokenType.OpenBracket)) { + // Is it an index operator? + + // This is an unfortunate hack that's necessary to accommodate 'Literal' + // and 'Annotated' type annotations properly. We need to suspend treating + // strings as type annotations within a Literal or Annotated subscript. + const wasParsingIndexTrailer = this._isParsingIndexTrailer; + const wasParsingTypeAnnotation = this._isParsingTypeAnnotation; + + if ( + this._isTypingAnnotation(atomExpression, 'Literal') || + this._isTypingAnnotation(atomExpression, 'Annotated') + ) { + this._isParsingTypeAnnotation = false; + } + + this._isParsingIndexTrailer = true; + const subscriptList = this._parseSubscriptList(); + this._isParsingTypeAnnotation = wasParsingTypeAnnotation; + this._isParsingIndexTrailer = wasParsingIndexTrailer; + + const closingToken = this._peekToken(); + + const indexNode = IndexNode.create( + atomExpression, + subscriptList.list, + subscriptList.trailingComma, + closingToken + ); + extendRange(indexNode, indexNode); + + if (!this._consumeTokenIfType(TokenType.CloseBracket)) { + // Handle the error case, but don't use the error node in this + // case because it creates problems for the completion provider. + this._handleExpressionParseError( + ErrorExpressionCategory.MissingIndexCloseBracket, + LocMessage.expectedCloseBracket(), + startOfTrailerToken, + indexNode + ); + } + + const maxDepth = this._maxChildDepthMap.get(atomExpression.id) ?? 0; + if (maxDepth >= maxChildNodeDepth) { + atomExpression = ErrorNode.create(indexNode, ErrorExpressionCategory.MaxDepthExceeded); + this._addSyntaxError(LocMessage.maxParseDepthExceeded(), atomExpression); + } else { + atomExpression = indexNode; + this._maxChildDepthMap.set(indexNode.id, maxDepth + 1); + } + } else if (this._consumeTokenIfType(TokenType.Dot)) { + // Is it a member access? + const memberName = this._getTokenIfIdentifier(); + if (!memberName) { + return this._handleExpressionParseError( + ErrorExpressionCategory.MissingMemberAccessName, + LocMessage.expectedMemberName(), + startOfTrailerToken, + atomExpression, + [TokenType.Keyword] + ); + } + + const memberAccessNode = MemberAccessNode.create(atomExpression, NameNode.create(memberName)); + + const maxDepth = this._maxChildDepthMap.get(atomExpression.id) ?? 0; + if (maxDepth >= maxChildNodeDepth) { + atomExpression = ErrorNode.create(memberAccessNode, ErrorExpressionCategory.MaxDepthExceeded); + this._addSyntaxError(LocMessage.maxParseDepthExceeded(), atomExpression); + } else { + atomExpression = memberAccessNode; + this._maxChildDepthMap.set(memberAccessNode.id, maxDepth + 1); + } + } else { + break; + } + } + + if (awaitToken) { + return AwaitNode.create(awaitToken, atomExpression); + } + + return atomExpression; + } + + // subscriptlist: subscript (',' subscript)* [','] + private _parseSubscriptList(): SubscriptListResult { + const argList: ArgumentNode[] = []; + let sawKeywordArg = false; + let trailingComma = false; + + while (true) { + const firstToken = this._peekToken(); + + if (firstToken.type !== TokenType.Colon && this._isNextTokenNeverExpression()) { + break; + } + + let argType = ArgCategory.Simple; + if (this._consumeTokenIfOperator(OperatorType.Multiply)) { + argType = ArgCategory.UnpackedList; + } else if (this._consumeTokenIfOperator(OperatorType.Power)) { + argType = ArgCategory.UnpackedDictionary; + } + + const startOfSubscriptIndex = this._tokenIndex; + let valueExpr = this._parsePossibleSlice(); + let nameIdentifier: IdentifierToken | undefined; + + // Is this a keyword argument? + if (argType === ArgCategory.Simple) { + if (this._consumeTokenIfOperator(OperatorType.Assign)) { + const nameExpr = valueExpr; + valueExpr = this._parsePossibleSlice(); + + if (nameExpr.nodeType === ParseNodeType.Name) { + nameIdentifier = nameExpr.d.token; + } else { + this._addSyntaxError(LocMessage.expectedParamName(), nameExpr); + } + } else if ( + valueExpr.nodeType === ParseNodeType.Name && + this._peekOperatorType() === OperatorType.Walrus + ) { + this._tokenIndex = startOfSubscriptIndex; + valueExpr = this._parseTestExpression(/* allowAssignmentExpression */ true); + + // Python 3.10 and newer allow assignment expressions to be used inside of a subscript. + if ( + !this._parseOptions.isStubFile && + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_10) + ) { + this._addSyntaxError(LocMessage.assignmentExprInSubscript(), valueExpr); + } + } + } + + const argNode = ArgumentNode.create(firstToken, valueExpr, argType); + if (nameIdentifier) { + argNode.d.name = NameNode.create(nameIdentifier); + argNode.d.name.parent = argNode; + } + + if (argNode.d.name) { + sawKeywordArg = true; + } else if (sawKeywordArg && argNode.d.argCategory === ArgCategory.Simple) { + this._addSyntaxError(LocMessage.positionArgAfterNamedArg(), argNode); + } + argList.push(argNode); + + if (argNode.d.name) { + this._addSyntaxError(LocMessage.keywordSubscriptIllegal(), argNode.d.name); + } + + if (argType !== ArgCategory.Simple) { + const unpackListAllowed = + this._parseOptions.isStubFile || + this._isParsingQuotedText || + PythonVersion.isGreaterOrEqualTo(this._getLanguageVersion(), pythonVersion3_11); + + if (argType === ArgCategory.UnpackedList && !unpackListAllowed) { + this._addSyntaxError(LocMessage.unpackedSubscriptIllegal(), argNode); + } + + if (argType === ArgCategory.UnpackedDictionary) { + this._addSyntaxError(LocMessage.unpackedDictSubscriptIllegal(), argNode); + } + } + + if (!this._consumeTokenIfType(TokenType.Comma)) { + trailingComma = false; + break; + } + + trailingComma = true; + } + + // An empty subscript list is illegal. + if (argList.length === 0) { + const errorNode = this._handleExpressionParseError( + ErrorExpressionCategory.MissingIndexOrSlice, + LocMessage.expectedSliceIndex(), + /* targetToken */ undefined, + /* childNode */ undefined, + [TokenType.CloseBracket] + ); + argList.push(ArgumentNode.create(this._peekToken(), errorNode, ArgCategory.Simple)); + } + + return { + list: argList, + trailingComma, + }; + } + + // subscript: test | [test] ':' [test] [sliceop] + // sliceop: ':' [test] + private _parsePossibleSlice(): ExpressionNode { + const firstToken = this._peekToken(); + const sliceExpressions: (ExpressionNode | undefined)[] = [undefined, undefined, undefined]; + let sliceIndex = 0; + let sawColon = false; + + while (true) { + const nextTokenType = this._peekTokenType(); + if (nextTokenType === TokenType.CloseBracket || nextTokenType === TokenType.Comma) { + break; + } + + if (nextTokenType !== TokenType.Colon) { + // Python 3.10 and newer allow assignment expressions to be used inside of a subscript. + const allowAssignmentExpression = + this._parseOptions.isStubFile || + PythonVersion.isGreaterOrEqualTo(this._getLanguageVersion(), pythonVersion3_10); + sliceExpressions[sliceIndex] = this._parseTestExpression(allowAssignmentExpression); + } + sliceIndex++; + + if (sliceIndex >= 3 || !this._consumeTokenIfType(TokenType.Colon)) { + break; + } + sawColon = true; + } + + // If this was a simple expression with no colons return it. + if (!sawColon) { + if (sliceExpressions[0]) { + return sliceExpressions[0]; + } + + return ErrorNode.create(this._peekToken(), ErrorExpressionCategory.MissingIndexOrSlice); + } + + const sliceNode = SliceNode.create(firstToken); + sliceNode.d.startValue = sliceExpressions[0]; + if (sliceNode.d.startValue) { + sliceNode.d.startValue.parent = sliceNode; + } + sliceNode.d.endValue = sliceExpressions[1]; + if (sliceNode.d.endValue) { + sliceNode.d.endValue.parent = sliceNode; + } + sliceNode.d.stepValue = sliceExpressions[2]; + if (sliceNode.d.stepValue) { + sliceNode.d.stepValue.parent = sliceNode; + } + const extension = sliceExpressions[2] || sliceExpressions[1] || sliceExpressions[0]; + if (extension) { + extendRange(sliceNode, extension); + } + + return sliceNode; + } + + // arglist: argument (',' argument)* [','] + private _parseArgList(): ArgListResult { + const argList: ArgumentNode[] = []; + let sawKeywordArg = false; + let sawUnpackedKeywordArg = false; + let trailingComma = false; + + while (true) { + const nextTokenType = this._peekTokenType(); + if ( + nextTokenType === TokenType.CloseParenthesis || + nextTokenType === TokenType.NewLine || + nextTokenType === TokenType.EndOfStream + ) { + break; + } + + trailingComma = false; + const arg = this._parseArgument(); + if (arg.d.name) { + sawKeywordArg = true; + } else { + if (sawKeywordArg && arg.d.argCategory === ArgCategory.Simple) { + this._addSyntaxError(LocMessage.positionArgAfterNamedArg(), arg); + } + + if (sawUnpackedKeywordArg && arg.d.argCategory !== ArgCategory.UnpackedDictionary) { + this._addSyntaxError(LocMessage.positionArgAfterUnpackedDictArg(), arg); + } + } + if (arg.d.argCategory === ArgCategory.UnpackedDictionary) { + sawUnpackedKeywordArg = true; + } + argList.push(arg); + + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + + trailingComma = true; + } + + return { args: argList, trailingComma }; + } + + // argument: ( test [comp_for] | + // test '=' test | + // '**' test | + // '*' test ) + private _parseArgument(): ArgumentNode { + const firstToken = this._peekToken(); + + let argType = ArgCategory.Simple; + if (this._consumeTokenIfOperator(OperatorType.Multiply)) { + argType = ArgCategory.UnpackedList; + } else if (this._consumeTokenIfOperator(OperatorType.Power)) { + argType = ArgCategory.UnpackedDictionary; + } + + let valueExpr = this._parseTestExpression(/* allowAssignmentExpression */ true); + let nameIdentifier: IdentifierToken | undefined; + + if (argType === ArgCategory.Simple) { + if (this._consumeTokenIfOperator(OperatorType.Assign)) { + const nameExpr = valueExpr; + valueExpr = this._parseTestExpression(/* allowAssignmentExpression */ false); + + if (nameExpr.nodeType === ParseNodeType.Name) { + nameIdentifier = nameExpr.d.token; + } else { + this._addSyntaxError(LocMessage.expectedParamName(), nameExpr); + } + } else { + const comprehension = this._tryParseComprehension(valueExpr, /* isGenerator */ true); + if (comprehension) { + valueExpr = comprehension; + } + } + } + + const argNode = ArgumentNode.create(firstToken, valueExpr, argType); + if (nameIdentifier) { + argNode.d.name = NameNode.create(nameIdentifier); + argNode.d.name.parent = argNode; + } + + return argNode; + } + + // atom: ('(' [yield_expr | testlist_comp] ')' | + // '[' [testlist_comp] ']' | + // '{' [dictorsetmaker] '}' | + // NAME | NUMBER | STRING+ | '...' | 'None' | 'True' | 'False' | '__debug__') + private _parseAtom(): ExpressionNode { + const nextToken = this._peekToken(); + + if (nextToken.type === TokenType.Ellipsis) { + return EllipsisNode.create(this._getNextToken()); + } + + if (nextToken.type === TokenType.Number) { + return NumberNode.create(this._getNextToken() as NumberToken); + } + + if (nextToken.type === TokenType.Identifier) { + return NameNode.create(this._getNextToken() as IdentifierToken); + } + + if (nextToken.type === TokenType.String || nextToken.type === TokenType.FStringStart) { + return this._parseStringList(); + } + + if (nextToken.type === TokenType.Backtick) { + this._getNextToken(); + + // Atoms with backticks are no longer allowed in Python 3.x, but they + // were a thing in Python 2.x. We'll parse them to improve parse recovery + // and emit an error. + this._addSyntaxError(LocMessage.backticksIllegal(), nextToken); + + const expressionNode = this._parseTestListAsExpression(ErrorExpressionCategory.MissingExpression, () => + LocMessage.expectedExpr() + ); + + this._consumeTokenIfType(TokenType.Backtick); + return expressionNode; + } + + if (nextToken.type === TokenType.OpenParenthesis) { + const possibleTupleNode = this._parseTupleAtom(); + + if ( + possibleTupleNode.nodeType === ParseNodeType.UnaryOperation || + possibleTupleNode.nodeType === ParseNodeType.Await || + possibleTupleNode.nodeType === ParseNodeType.BinaryOperation + ) { + // Mark binary expressions as parenthesized so we don't attempt + // to use comparison chaining, which isn't appropriate when the + // expression is parenthesized. Unary and await expressions + // are also marked to be able to display them unambiguously. + possibleTupleNode.d.hasParens = true; + } + + if ( + possibleTupleNode.nodeType === ParseNodeType.StringList || + possibleTupleNode.nodeType === ParseNodeType.Comprehension || + possibleTupleNode.nodeType === ParseNodeType.AssignmentExpression + ) { + possibleTupleNode.d.hasParens = true; + } + + return possibleTupleNode; + } else if (nextToken.type === TokenType.OpenBracket) { + return this._parseListAtom(); + } else if (nextToken.type === TokenType.OpenCurlyBrace) { + return this._parseDictionaryOrSetAtom(); + } + + if (nextToken.type === TokenType.Keyword) { + const keywordToken = nextToken as KeywordToken; + if ( + keywordToken.keywordType === KeywordType.False || + keywordToken.keywordType === KeywordType.True || + keywordToken.keywordType === KeywordType.Debug || + keywordToken.keywordType === KeywordType.None + ) { + return ConstantNode.create(this._getNextToken() as KeywordToken); + } + + // Make an identifier out of the keyword. + const keywordAsIdentifier = this._getTokenIfIdentifier(); + if (keywordAsIdentifier) { + return NameNode.create(keywordAsIdentifier); + } + } + + return this._handleExpressionParseError(ErrorExpressionCategory.MissingExpression, LocMessage.expectedExpr()); + } + + // Allocates a dummy "error expression" and consumes the remainder + // of the tokens on the line for error recovery. A partially-completed + // child node can be passed to help the completion provider determine + // what to do. + private _handleExpressionParseError( + category: ErrorExpressionCategory, + errorMsg: string, + targetToken?: Token, + childNode?: ExpressionNode, + additionalStopTokens?: TokenType[] + ): ErrorNode { + this._addSyntaxError(errorMsg, targetToken ?? this._peekToken()); + + const stopTokens = [TokenType.NewLine]; + if (additionalStopTokens) { + appendArray(stopTokens, additionalStopTokens); + } + + // Using a token that is not included in the error node creates problems. + // Sibling nodes in parse tree shouldn't overlap each other. + const nextToken = this._peekToken(); + const initialRange: TextRange = stopTokens.some((k) => nextToken.type === k) + ? targetToken ?? childNode ?? TextRange.create(nextToken.start, /* length */ 0) + : nextToken; + const expr = ErrorNode.create(initialRange, category, childNode); + this._consumeTokensUntilType(stopTokens); + + return expr; + } + + // lambdef: 'lambda' [varargslist] ':' test + private _parseLambdaExpression(allowConditional = true): LambdaNode { + const lambdaToken = this._getKeywordToken(KeywordType.Lambda); + + const argList = this._parseVarArgsList(TokenType.Colon, /* allowAnnotations */ false); + + if (!this._consumeTokenIfType(TokenType.Colon)) { + this._addSyntaxError(LocMessage.expectedColon(), this._peekToken()); + } + + let testExpr: ExpressionNode; + if (allowConditional) { + testExpr = this._parseTestExpression(/* allowAssignmentExpression */ false); + } else { + testExpr = this._tryParseLambdaExpression(/* allowConditional */ false) || this._parseOrTest(); + } + + const lambdaNode = LambdaNode.create(lambdaToken, testExpr); + lambdaNode.d.params = argList; + argList.forEach((arg) => { + arg.parent = lambdaNode; + }); + return lambdaNode; + } + + private _tryParseLambdaExpression(allowConditional = true): LambdaNode | undefined { + if (this._peekKeywordType() !== KeywordType.Lambda) { + return undefined; + } + + return this._parseLambdaExpression(allowConditional); + } + + // ('(' [yield_expr | testlist_comp] ')' + // testlist_comp: (test | star_expr) (comp_for | (',' (test | star_expr))* [',']) + private _parseTupleAtom(): ExpressionNode { + const startParen = this._getNextToken(); + assert(startParen.type === TokenType.OpenParenthesis); + + const yieldExpr = this._tryParseYieldExpression(); + if (yieldExpr) { + if (this._peekTokenType() !== TokenType.CloseParenthesis) { + return this._handleExpressionParseError( + ErrorExpressionCategory.MissingTupleCloseParen, + LocMessage.expectedCloseParen(), + startParen, + yieldExpr + ); + } else { + extendRange(yieldExpr, this._getNextToken()); + } + + return yieldExpr; + } + + const exprListResult = this._parseTestListWithComprehension(/* isGenerator */ true); + const tupleOrExpression = this._makeExpressionOrTuple(exprListResult, /* enclosedInParens */ true); + + extendRange(tupleOrExpression, startParen); + + if (this._peekTokenType() !== TokenType.CloseParenthesis) { + return this._handleExpressionParseError( + ErrorExpressionCategory.MissingTupleCloseParen, + LocMessage.expectedCloseParen(), + startParen, + exprListResult.parseError ?? tupleOrExpression + ); + } else { + extendRange(tupleOrExpression, this._getNextToken()); + } + + return tupleOrExpression; + } + + // '[' [testlist_comp] ']' + // testlist_comp: (test | star_expr) (comp_for | (',' (test | star_expr))* [',']) + private _parseListAtom(): ListNode | ErrorNode { + const startBracket = this._getNextToken(); + assert(startBracket.type === TokenType.OpenBracket); + + const exprListResult = this._parseTestListWithComprehension(/* isGenerator */ false); + const closeBracket: Token | undefined = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.CloseBracket)) { + return this._handleExpressionParseError( + ErrorExpressionCategory.MissingListCloseBracket, + LocMessage.expectedCloseBracket(), + startBracket, + exprListResult.parseError ?? _createList() + ); + } + + return _createList(); + + function _createList() { + const listAtom = ListNode.create(startBracket); + + if (closeBracket) { + extendRange(listAtom, closeBracket); + } + + if (exprListResult.list.length > 0) { + exprListResult.list.forEach((expr) => { + expr.parent = listAtom; + }); + extendRange(listAtom, exprListResult.list[exprListResult.list.length - 1]); + } + + listAtom.d.items = exprListResult.list; + return listAtom; + } + } + + private _parseTestListWithComprehension(isGenerator: boolean): ListResult { + let sawComprehension = false; + + return this._parseExpressionListGeneric( + () => { + let expr = this._parseTestOrStarExpression(/* allowAssignmentExpression */ true); + const comprehension = this._tryParseComprehension(expr, isGenerator); + if (comprehension) { + expr = comprehension; + sawComprehension = true; + } + return expr; + }, + () => this._isNextTokenNeverExpression(), + () => sawComprehension + ); + } + + // '{' [dictorsetmaker] '}' + // dictorsetmaker: ( + // (dictentry (comp_for | (',' dictentry)* [','])) + // | (setentry (comp_for | (',' setentry)* [','])) + // ) + // dictentry: (test ':' test | '**' expr) + // setentry: test | star_expr + private _parseDictionaryOrSetAtom(): DictionaryNode | SetNode { + const startBrace = this._getNextToken(); + assert(startBrace.type === TokenType.OpenCurlyBrace); + + const dictionaryEntries: DictionaryEntryNode[] = []; + const setEntries: ExpressionNode[] = []; + let isDictionary = false; + let isSet = false; + let sawComprehension = false; + let isFirstEntry = true; + let trailingCommaToken: Token | undefined; + + while (true) { + if (this._peekTokenType() === TokenType.CloseCurlyBrace) { + break; + } + + trailingCommaToken = undefined; + + let doubleStarExpression: ExpressionNode | undefined; + let keyExpression: ExpressionNode | undefined; + let valueExpression: ExpressionNode | undefined; + const doubleStar = this._peekToken(); + + if (this._consumeTokenIfOperator(OperatorType.Power)) { + doubleStarExpression = this._parseExpression(/* allowUnpack */ false); + } else { + keyExpression = this._parseTestOrStarExpression(/* allowAssignmentExpression */ true); + + // Allow walrus operators in this context only for Python 3.10 and newer. + // Older versions of Python generated a syntax error in this context. + let isWalrusAllowed = PythonVersion.isGreaterOrEqualTo(this._getLanguageVersion(), pythonVersion3_10); + + if (this._consumeTokenIfType(TokenType.Colon)) { + valueExpression = this._parseTestExpression(/* allowAssignmentExpression */ false); + isWalrusAllowed = false; + } + + if ( + !isWalrusAllowed && + keyExpression.nodeType === ParseNodeType.AssignmentExpression && + !keyExpression.d.hasParens + ) { + this._addSyntaxError(LocMessage.walrusNotAllowed(), keyExpression.d.walrusToken); + } + } + + if (keyExpression && valueExpression) { + if (keyExpression.nodeType === ParseNodeType.Unpack) { + this._addSyntaxError(LocMessage.unpackInDict(), keyExpression); + } + + if (isSet) { + this._addSyntaxError(LocMessage.keyValueInSet(), valueExpression); + } else { + const keyEntryNode = DictionaryKeyEntryNode.create(keyExpression, valueExpression); + let dictEntry: DictionaryEntryNode = keyEntryNode; + const comprehension = this._tryParseComprehension(keyEntryNode, /* isGenerator */ false); + if (comprehension) { + dictEntry = comprehension; + sawComprehension = true; + + if (!isFirstEntry) { + this._addSyntaxError(LocMessage.comprehensionInDict(), dictEntry); + } + } + dictionaryEntries.push(dictEntry); + isDictionary = true; + } + } else if (doubleStarExpression) { + if (isSet) { + this._addSyntaxError(LocMessage.unpackInSet(), doubleStarExpression); + } else { + const listEntryNode = DictionaryExpandEntryNode.create(doubleStarExpression); + extendRange(listEntryNode, doubleStar); + let expandEntryNode: DictionaryEntryNode = listEntryNode; + const comprehension = this._tryParseComprehension(listEntryNode, /* isGenerator */ false); + if (comprehension) { + expandEntryNode = comprehension; + sawComprehension = true; + + if (!isFirstEntry) { + this._addSyntaxError(LocMessage.comprehensionInDict(), doubleStarExpression); + } + } + dictionaryEntries.push(expandEntryNode); + isDictionary = true; + } + } else { + assert(keyExpression !== undefined); + if (keyExpression) { + if (isDictionary) { + const missingValueErrorNode = ErrorNode.create( + this._peekToken(), + ErrorExpressionCategory.MissingDictValue + ); + const keyEntryNode = DictionaryKeyEntryNode.create(keyExpression, missingValueErrorNode); + dictionaryEntries.push(keyEntryNode); + this._addSyntaxError(LocMessage.dictKeyValuePairs(), keyExpression); + } else { + const comprehension = this._tryParseComprehension(keyExpression, /* isGenerator */ false); + if (comprehension) { + keyExpression = comprehension; + sawComprehension = true; + + if (!isFirstEntry) { + this._addSyntaxError(LocMessage.comprehensionInSet(), keyExpression); + } + } + setEntries.push(keyExpression); + isSet = true; + } + } + } + + // List comprehension statements always end the list. + if (sawComprehension) { + break; + } + + if (this._peekTokenType() !== TokenType.Comma) { + break; + } + + trailingCommaToken = this._getNextToken(); + + isFirstEntry = false; + } + + let closeCurlyBrace: Token | undefined = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.CloseCurlyBrace)) { + this._addSyntaxError(LocMessage.expectedCloseBrace(), startBrace); + closeCurlyBrace = undefined; + } + + if (isSet) { + const setAtom = SetNode.create(startBrace); + if (closeCurlyBrace) { + extendRange(setAtom, closeCurlyBrace); + } + + if (setEntries.length > 0) { + extendRange(setAtom, setEntries[setEntries.length - 1]); + } + + setEntries.forEach((entry) => { + entry.parent = setAtom; + }); + + setAtom.d.items = setEntries; + return setAtom; + } + + const dictionaryAtom = DictionaryNode.create(startBrace); + + if (trailingCommaToken) { + dictionaryAtom.d.trailingCommaToken = trailingCommaToken; + extendRange(dictionaryAtom, trailingCommaToken); + } + + if (closeCurlyBrace) { + extendRange(dictionaryAtom, closeCurlyBrace); + } + + if (dictionaryEntries.length > 0) { + dictionaryEntries.forEach((entry) => { + entry.parent = dictionaryAtom; + }); + extendRange(dictionaryAtom, dictionaryEntries[dictionaryEntries.length - 1]); + } + dictionaryAtom.d.items = dictionaryEntries; + return dictionaryAtom; + } + + private _parseExpressionListGeneric( + parser: () => T | ErrorNode, + terminalCheck: () => boolean = () => this._isNextTokenNeverExpression(), + finalEntryCheck: () => boolean = () => false + ): ListResult { + let trailingComma = false; + const list: T[] = []; + let parseError: ErrorNode | undefined; + + while (true) { + if (terminalCheck()) { + break; + } + + const expr = parser(); + if (expr.nodeType === ParseNodeType.Error) { + parseError = expr as ErrorNode; + break; + } + list.push(expr); + + // Should we stop without checking for a trailing comma? + if (finalEntryCheck()) { + break; + } + + if (!this._consumeTokenIfType(TokenType.Comma)) { + trailingComma = false; + break; + } + + trailingComma = true; + } + + return { trailingComma, list, parseError }; + } + + // expr_stmt: testlist_star_expr (annassign | augassign (yield_expr | testlist) | + // ('=' (yield_expr | testlist_star_expr))*) + // testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] + // annassign: ':' test ['=' (yield_expr | testlist_star_expr)] + // augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + // '<<=' | '>>=' | '**=' | '//=') + private _parseExpressionStatement(): ExpressionNode { + let leftExpr = this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ false, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedExpr() + ); + let annotationExpr: ExpressionNode | undefined; + + if (leftExpr.nodeType === ParseNodeType.Error) { + return leftExpr; + } + + // Is this a type annotation assignment? + if (this._consumeTokenIfType(TokenType.Colon)) { + annotationExpr = this._parseTypeAnnotation(); + leftExpr = TypeAnnotationNode.create(leftExpr, annotationExpr); + + if ( + !this._parseOptions.isStubFile && + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_6) + ) { + this._addSyntaxError(LocMessage.varAnnotationIllegal(), annotationExpr); + } + + if (!this._consumeTokenIfOperator(OperatorType.Assign)) { + return leftExpr; + } + + // This is an unfortunate hack that's necessary to accommodate 'TypeAlias' + // declarations properly. We need to treat this assignment differently than + // most because the expression on the right side is treated like a type + // annotation and therefore allows string-literal forward declarations. + const isTypeAliasDeclaration = this._isTypingAnnotation(annotationExpr, 'TypeAlias'); + + const wasParsingTypeAnnotation = this._isParsingTypeAnnotation; + if (isTypeAliasDeclaration) { + this._isParsingTypeAnnotation = true; + } + + const rightExpr = + this._tryParseYieldExpression() ?? + this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedAssignRightHandExpr() + ); + + this._isParsingTypeAnnotation = wasParsingTypeAnnotation; + + return AssignmentNode.create(leftExpr, rightExpr); + } + + // Is this a simple assignment? + if (this._consumeTokenIfOperator(OperatorType.Assign)) { + return this._parseChainAssignments(leftExpr); + } + + if (Tokenizer.isOperatorAssignment(this._peekOperatorType())) { + const operatorToken = this._getNextToken() as OperatorToken; + + const rightExpr = + this._tryParseYieldExpression() ?? + this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedBinaryRightHandExpr() + ); + this._reportConditionalErrorForStarTupleElement(rightExpr, pythonVersion3_9); + + // Make a shallow copy of the dest expression but give it a new ID. + const destExpr = Object.assign({}, leftExpr); + destExpr.id = getNextNodeId(); + + return AugmentedAssignmentNode.create(leftExpr, rightExpr, operatorToken.operatorType, destExpr); + } + + return leftExpr; + } + + private _parseChainAssignments(leftExpr: ExpressionNode): ExpressionNode { + // Make a list of assignment targets. + const assignmentTargets = [leftExpr]; + let rightExpr: ExpressionNode; + + while (true) { + rightExpr = + this._tryParseYieldExpression() ?? + this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedAssignRightHandExpr() + ); + + if (rightExpr.nodeType === ParseNodeType.Error) { + break; + } + + // Continue until we've consumed the entire chain. + if (!this._consumeTokenIfOperator(OperatorType.Assign)) { + break; + } + + assignmentTargets.push(rightExpr); + } + + // Create a tree of assignment expressions starting with the first one. + // The final RHS value is assigned to the targets left to right in Python. + let assignmentNode = AssignmentNode.create(assignmentTargets[0], rightExpr); + + // Look for a type annotation comment at the end of the line. + const typeAnnotationComment = this._parseVariableTypeAnnotationComment(); + if (typeAnnotationComment) { + if (assignmentTargets.length > 1) { + // Type comments are not allowed for chained assignments for the + // same reason that variable type annotations don't support + // chained assignments. Note that a type comment was used here + // so it can be later reported as an error by the binder. + assignmentNode.d.chainedAnnotationComment = typeAnnotationComment; + } else { + assignmentNode.d.annotationComment = typeAnnotationComment; + assignmentNode.d.annotationComment.parent = assignmentNode; + extendRange(assignmentNode, assignmentNode.d.annotationComment); + } + } + + assignmentTargets.forEach((target, index) => { + if (index > 0) { + assignmentNode = AssignmentNode.create(target, assignmentNode); + } + }); + + return assignmentNode; + } + + private _parseFunctionTypeAnnotation(): FunctionAnnotationNode | undefined { + const openParenToken = this._peekToken(); + if (!this._consumeTokenIfType(TokenType.OpenParenthesis)) { + this._addSyntaxError(LocMessage.expectedOpenParen(), this._peekToken()); + return undefined; + } + + let paramAnnotations: ExpressionNode[] = []; + + while (true) { + const nextTokenType = this._peekTokenType(); + if ( + nextTokenType === TokenType.CloseParenthesis || + nextTokenType === TokenType.NewLine || + nextTokenType === TokenType.EndOfStream + ) { + break; + } + + // Consume "*" or "**" indicators but don't do anything with them. + // (We don't enforce that these are present, absent, or match + // the corresponding parameter types.) + this._consumeTokenIfOperator(OperatorType.Multiply) || this._consumeTokenIfOperator(OperatorType.Power); + + const paramAnnotation = this._parseTypeAnnotation(); + paramAnnotations.push(paramAnnotation); + + if (!this._consumeTokenIfType(TokenType.Comma)) { + break; + } + } + + if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { + this._addSyntaxError(LocMessage.expectedCloseParen(), openParenToken); + this._consumeTokensUntilType([TokenType.Colon]); + } + + if (!this._consumeTokenIfType(TokenType.Arrow)) { + this._addSyntaxError(LocMessage.expectedArrow(), this._peekToken()); + return undefined; + } + + const returnType = this._parseTypeAnnotation(); + + let isParamListEllipsis = false; + if (paramAnnotations.length === 1 && paramAnnotations[0].nodeType === ParseNodeType.Ellipsis) { + paramAnnotations = []; + isParamListEllipsis = true; + } + + return FunctionAnnotationNode.create(openParenToken, isParamListEllipsis, paramAnnotations, returnType); + } + + private _parseTypeAnnotation(allowUnpack = false): ExpressionNode { + // Temporary set a flag that indicates we're parsing a type annotation. + const wasParsingTypeAnnotation = this._isParsingTypeAnnotation; + this._isParsingTypeAnnotation = true; + + // Allow unpack operators. + const startToken = this._peekToken(); + const isUnpack = this._consumeTokenIfOperator(OperatorType.Multiply); + + if ( + isUnpack && + allowUnpack && + !this._parseOptions.isStubFile && + !this._isParsingQuotedText && + PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_11) + ) { + this._addSyntaxError(LocMessage.unpackedSubscriptIllegal(), startToken); + } + + let result = this._parseTestExpression(/* allowAssignmentExpression */ false); + if (isUnpack) { + result = UnpackNode.create(startToken, result); + } + + this._isParsingTypeAnnotation = wasParsingTypeAnnotation; + this._hasTypeAnnotations = true; + + return result; + } + + private _reportStringTokenErrors( + stringToken: StringToken | FStringStartToken, + unescapedResult?: StringTokenUtils.UnescapedString + ) { + if (stringToken.flags & StringTokenFlags.Unterminated) { + this._addSyntaxError(LocMessage.stringUnterminated(), stringToken); + } + + if (unescapedResult?.nonAsciiInBytes) { + this._addSyntaxError(LocMessage.stringNonAsciiBytes(), stringToken); + } + + if (stringToken.flags & StringTokenFlags.Format) { + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_6)) { + this._addSyntaxError(LocMessage.formatStringIllegal(), stringToken); + } + + if (stringToken.flags & StringTokenFlags.Bytes) { + this._addSyntaxError(LocMessage.formatStringBytes(), stringToken); + } + + if (stringToken.flags & StringTokenFlags.Unicode) { + this._addSyntaxError(LocMessage.formatStringUnicode(), stringToken); + } + + if (stringToken.flags & StringTokenFlags.Template) { + this._addSyntaxError(LocMessage.formatStringTemplate(), stringToken); + } + } + + if (stringToken.flags & StringTokenFlags.Template) { + if (PythonVersion.isLessThan(this._getLanguageVersion(), pythonVersion3_14)) { + this._addSyntaxError(LocMessage.templateStringIllegal(), stringToken); + } + + if (stringToken.flags & StringTokenFlags.Bytes) { + this._addSyntaxError(LocMessage.templateStringBytes(), stringToken); + } + + if (stringToken.flags & StringTokenFlags.Unicode) { + this._addSyntaxError(LocMessage.templateStringUnicode(), stringToken); + } + } + } + + private _makeStringNode(stringToken: StringToken): StringNode { + const unescapedResult = StringTokenUtils.getUnescapedString(stringToken); + this._reportStringTokenErrors(stringToken, unescapedResult); + return StringNode.create(stringToken, unescapedResult.value); + } + + private _getTypeAnnotationCommentText(): StringToken | undefined { + if (this._tokenIndex === 0) { + return undefined; + } + + const curToken = this._tokenizerOutput!.tokens.getItemAt(this._tokenIndex - 1); + const nextToken = this._tokenizerOutput!.tokens.getItemAt(this._tokenIndex); + + if (curToken.start + curToken.length === nextToken.start) { + return undefined; + } + + const interTokenContents = this._fileContents!.slice(curToken.start + curToken.length, nextToken.start); + const match = interTokenContents.match(commentRegEx); + if (!match) { + return undefined; + } + + // Synthesize a string token and StringNode. + const typeString = match[2]; + + // Ignore all "ignore" comments. Include "[" in the regular + // expression because mypy supports ignore comments of the + // form ignore[errorCode, ...]. We'll treat these as regular + // ignore statements (as though no errorCodes were included). + if (typeString.trim().match(ignoreCommentRegEx)) { + return undefined; + } + + const tokenOffset = curToken.start + curToken.length + match[1].length; + return StringToken.create( + tokenOffset, + typeString.length, + StringTokenFlags.None, + typeString, + 0, + /* comments */ undefined + ); + } + + private _parseVariableTypeAnnotationComment(): ExpressionNode | undefined { + const stringToken = this._getTypeAnnotationCommentText(); + if (!stringToken) { + return undefined; + } + + const stringNode = this._makeStringNode(stringToken); + const stringListNode = StringListNode.create([stringNode]); + const parser = new Parser(); + const parseResults = parser.parseTextExpression( + this._fileContents!, + stringToken.start, + stringToken.length, + this._parseOptions, + ParseTextMode.VariableAnnotation, + /* initialParenDepth */ undefined, + this._typingSymbolAliases + ); + + parseResults.diagnostics.forEach((diag) => { + this._addSyntaxError(diag.message, stringListNode); + }); + + if (!parseResults.parseTree) { + return undefined; + } + + return parseResults.parseTree; + } + + private _parseFunctionTypeAnnotationComment(stringToken: StringToken, functionNode: FunctionNode): void { + const stringNode = this._makeStringNode(stringToken); + const stringListNode = StringListNode.create([stringNode]); + const parser = new Parser(); + const parseResults = parser.parseTextExpression( + this._fileContents!, + stringToken.start, + stringToken.length, + this._parseOptions, + ParseTextMode.FunctionAnnotation, + /* initialParenDepth */ undefined, + this._typingSymbolAliases + ); + + parseResults.diagnostics.forEach((diag) => { + this._addSyntaxError(diag.message, stringListNode); + }); + + if (!parseResults.parseTree) { + return; + } + + const functionAnnotation = parseResults.parseTree; + + functionNode.d.funcAnnotationComment = functionAnnotation; + functionAnnotation.parent = functionNode; + extendRange(functionNode, functionAnnotation); + } + + private _parseFStringReplacementField( + fieldExpressions: ExpressionNode[], + middleTokens: FStringMiddleToken[], + formatExpressions: ExpressionNode[], + nestingDepth = 0 + ): boolean { + let nextToken = this._getNextToken(); + + // The caller should have already confirmed that the next token is an open brace. + assert(nextToken.type === TokenType.OpenCurlyBrace); + + // Consume the expression. + const expr = + this._tryParseYieldExpression() ?? + this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, + ErrorExpressionCategory.MissingExpression, + () => LocMessage.expectedExpr() + ); + + fieldExpressions.push(expr); + + if (expr.nodeType === ParseNodeType.Error) { + return false; + } + + // Consume an optional "=" token after the expression. + nextToken = this._peekToken(); + if ( + nextToken.type === TokenType.Operator && + (nextToken as OperatorToken).operatorType === OperatorType.Assign + ) { + // This feature requires Python 3.8 or newer. + if (PythonVersion.isLessThan(this._parseOptions.pythonVersion, pythonVersion3_8)) { + this._addSyntaxError(LocMessage.formatStringDebuggingIllegal(), nextToken); + } + + this._getNextToken(); + nextToken = this._peekToken(); + } + + // Consume an optional !r, !s, or !a token. + if (nextToken.type === TokenType.ExclamationMark) { + this._getNextToken(); + nextToken = this._peekToken(); + + if (nextToken.type !== TokenType.Identifier) { + this._addSyntaxError(LocMessage.formatStringExpectedConversion(), nextToken); + } else { + this._getNextToken(); + nextToken = this._peekToken(); + } + } + + if (nextToken.type === TokenType.Colon) { + this._getNextToken(); + this._parseFStringFormatString(fieldExpressions, middleTokens, formatExpressions, nestingDepth); + nextToken = this._peekToken(); + } + + if (nextToken.type !== TokenType.CloseCurlyBrace) { + this._addSyntaxError(LocMessage.formatStringUnterminated(), nextToken); + return false; + } else { + this._getNextToken(); + } + + // Indicate success. + return true; + } + + private _parseFStringFormatString( + fieldExpressions: ExpressionNode[], + middleTokens: FStringMiddleToken[], + formatExpressions: ExpressionNode[], + nestingDepth: number + ) { + while (true) { + const nextToken = this._peekToken(); + + if (nextToken.type === TokenType.CloseCurlyBrace || nextToken.type === TokenType.FStringEnd) { + break; + } + + if (nextToken.type === TokenType.FStringMiddle) { + this._getNextToken(); + continue; + } + + if (nextToken.type === TokenType.OpenCurlyBrace) { + // The Python interpreter reports an error at the point where the + // nesting level exceeds 1. Don't report the error again for deeper nestings. + if (nestingDepth === 2) { + this._addSyntaxError(LocMessage.formatStringNestedFormatSpecifier(), nextToken); + } + + this._parseFStringReplacementField(fieldExpressions, middleTokens, formatExpressions, nestingDepth + 1); + continue; + } + + break; + } + } + + private _parseFormatString(startToken: FStringStartToken): FormatStringNode { + const middleTokens: FStringMiddleToken[] = []; + const fieldExpressions: ExpressionNode[] = []; + const formatExpressions: ExpressionNode[] = []; + let endToken: FStringEndToken | undefined = undefined; + + // Consume middle tokens and expressions until we hit a "{" or "}" token. + while (true) { + const nextToken = this._peekToken(); + + if (nextToken.type === TokenType.FStringEnd) { + endToken = nextToken as FStringEndToken; + + if ((endToken.flags & StringTokenFlags.Unterminated) !== 0) { + this._addSyntaxError(LocMessage.stringUnterminated(), startToken); + } + this._getNextToken(); + break; + } + + if (nextToken.type === TokenType.FStringMiddle) { + middleTokens.push(nextToken as FStringMiddleToken); + this._getNextToken(); + continue; + } + + if (nextToken.type === TokenType.OpenCurlyBrace) { + if (!this._parseFStringReplacementField(fieldExpressions, middleTokens, formatExpressions)) { + // An error was reported. Try to recover the parse. + if (this._consumeTokensUntilType([TokenType.FStringEnd, TokenType.NewLine])) { + if (this._peekToken().type === TokenType.FStringEnd) { + this._getNextToken(); + } + } + break; + } + continue; + } + + // We've hit an error. Try to recover as gracefully as possible. + if (nextToken.type !== TokenType.NewLine) { + // Consume tokens until we find the end. + if (this._consumeTokensUntilType([TokenType.FStringEnd])) { + this._getNextToken(); + } + } + + this._addSyntaxError( + nextToken.type === TokenType.CloseCurlyBrace + ? LocMessage.formatStringBrace() + : LocMessage.stringUnterminated(), + nextToken + ); + break; + } + + this._reportStringTokenErrors(startToken); + + return FormatStringNode.create(startToken, endToken, middleTokens, fieldExpressions, formatExpressions); + } + + private _createBinaryOperationNode( + leftExpression: ExpressionNode, + rightExpression: ExpressionNode, + operatorToken: Token, + operator: OperatorType + ) { + const binaryNode = BinaryOperationNode.create(leftExpression, rightExpression, operatorToken, operator); + + // Determine if we're exceeding the max parse depth. If so, replace + // the subnode with an error node. Otherwise we risk crashing in the binder + // or type evaluator. + const leftMaxDepth = this._maxChildDepthMap.get(leftExpression.id) ?? 0; + const rightMaxDepth = this._maxChildDepthMap.get(rightExpression.id) ?? 0; + + if (leftMaxDepth >= maxChildNodeDepth || rightMaxDepth >= maxChildNodeDepth) { + this._addSyntaxError(LocMessage.maxParseDepthExceeded(), binaryNode); + return ErrorNode.create(binaryNode, ErrorExpressionCategory.MaxDepthExceeded); + } + + this._maxChildDepthMap.set(binaryNode.id, Math.max(leftMaxDepth, rightMaxDepth) + 1); + return binaryNode; + } + + private _createUnaryOperationNode(operatorToken: Token, expression: ExpressionNode, operator: OperatorType) { + const unaryNode = UnaryOperationNode.create(operatorToken, expression, operator); + + // Determine if we're exceeding the max parse depth. If so, replace + // the subnode with an error node. Otherwise we risk crashing in the binder + // or type evaluator. + + const maxDepth = this._maxChildDepthMap.get(expression.id) ?? 0; + if (maxDepth >= maxChildNodeDepth) { + this._addSyntaxError(LocMessage.maxParseDepthExceeded(), unaryNode); + return ErrorNode.create(unaryNode, ErrorExpressionCategory.MaxDepthExceeded); + } + + this._maxChildDepthMap.set(unaryNode.id, maxDepth + 1); + return unaryNode; + } + + private _parseStringList(): StringListNode { + const stringList: (StringNode | FormatStringNode)[] = []; + + while (true) { + const nextToken = this._peekToken(); + if (nextToken.type === TokenType.String) { + stringList.push(this._makeStringNode(this._getNextToken() as StringToken)); + } else if (nextToken.type === TokenType.FStringStart) { + stringList.push(this._parseFormatString(this._getNextToken() as FStringStartToken)); + } else { + break; + } + } + + const stringNode = StringListNode.create(stringList); + + // If we're parsing a type annotation, parse the contents of the string. + if (this._isParsingTypeAnnotation) { + // Don't allow multiple strings because we have no way of reporting + // parse errors that span strings. + if (stringNode.d.strings.length > 1) { + if (this._isParsingQuotedText) { + this._addSyntaxError(LocMessage.annotationSpansStrings(), stringNode); + } + } else if (stringNode.d.strings[0].nodeType === ParseNodeType.FormatString) { + if (this._isParsingQuotedText) { + this._addSyntaxError(LocMessage.annotationFormatString(), stringNode); + } + } else { + const stringToken = stringNode.d.strings[0].d.token; + const stringValue = StringTokenUtils.getUnescapedString( + stringNode.d.strings[0].d.token, + /* elideCrlf */ false + ); + const unescapedString = stringValue.value; + const tokenOffset = stringToken.start; + const prefixLength = stringToken.prefixLength + stringToken.quoteMarkLength; + + // Don't allow escape characters because we have no way of mapping + // error ranges back to the escaped text. + if (unescapedString.length !== stringToken.length - prefixLength - stringToken.quoteMarkLength) { + if (this._isParsingQuotedText) { + this._addSyntaxError(LocMessage.annotationStringEscape(), stringNode); + } + } else if ( + (stringToken.flags & + (StringTokenFlags.Raw | + StringTokenFlags.Bytes | + StringTokenFlags.Format | + StringTokenFlags.Template)) === + 0 + ) { + const parser = new Parser(); + const parseResults = parser.parseTextExpression( + this._fileContents!, + tokenOffset + prefixLength, + unescapedString.length, + this._parseOptions, + ParseTextMode.VariableAnnotation, + (stringNode.d.strings[0].d.token.flags & StringTokenFlags.Triplicate) !== 0 ? 1 : 0, + this._typingSymbolAliases + ); + + if ( + parseResults.diagnostics.length === 0 || + this._parseOptions.reportErrorsForParsedStringContents + ) { + parseResults.diagnostics.forEach((diag) => { + this._addSyntaxError(diag.message, stringNode); + }); + + if (parseResults.parseTree) { + stringNode.d.annotation = parseResults.parseTree; + stringNode.d.annotation.parent = stringNode; + } + } + } + } + } + + return stringNode; + } + + // Python 3.8 added support for star (unpack) expressions in tuples + // following a return or yield statement in cases where the tuple + // wasn't surrounded in parentheses. + private _reportConditionalErrorForStarTupleElement( + possibleTupleExpr: ExpressionNode, + pythonVersion = pythonVersion3_8 + ) { + if (possibleTupleExpr.nodeType !== ParseNodeType.Tuple) { + return; + } + + if (possibleTupleExpr.d.hasParens) { + return; + } + + if (PythonVersion.isGreaterOrEqualTo(this._parseOptions.pythonVersion, pythonVersion)) { + return; + } + + for (const expr of possibleTupleExpr.d.items) { + if (expr.nodeType === ParseNodeType.Unpack) { + this._addSyntaxError(LocMessage.unpackTuplesIllegal(), expr); + return; + } + } + } + + // Peeks at the next token and returns true if it can never + // represent the start of an expression. + private _isNextTokenNeverExpression(): boolean { + const nextToken = this._peekToken(); + switch (nextToken.type) { + case TokenType.Keyword: { + switch (this._peekKeywordType()) { + case KeywordType.For: + case KeywordType.In: + case KeywordType.If: + return true; + } + break; + } + + case TokenType.Operator: { + switch (this._peekOperatorType()) { + case OperatorType.AddEqual: + case OperatorType.SubtractEqual: + case OperatorType.MultiplyEqual: + case OperatorType.DivideEqual: + case OperatorType.ModEqual: + case OperatorType.BitwiseAndEqual: + case OperatorType.BitwiseOrEqual: + case OperatorType.BitwiseXorEqual: + case OperatorType.LeftShiftEqual: + case OperatorType.RightShiftEqual: + case OperatorType.PowerEqual: + case OperatorType.FloorDivideEqual: + case OperatorType.Assign: + return true; + } + break; + } + + case TokenType.Indent: + case TokenType.Dedent: + case TokenType.NewLine: + case TokenType.EndOfStream: + case TokenType.Semicolon: + case TokenType.CloseParenthesis: + case TokenType.CloseBracket: + case TokenType.CloseCurlyBrace: + case TokenType.Comma: + case TokenType.Colon: + case TokenType.ExclamationMark: + case TokenType.FStringMiddle: + case TokenType.FStringEnd: + return true; + } + + return false; + } + + private _disallowAssignmentExpression(callback: () => void) { + const wasAllowed = this._assignmentExpressionsAllowed; + this._assignmentExpressionsAllowed = false; + + callback(); + + this._assignmentExpressionsAllowed = wasAllowed; + } + + private _getNextToken(): Token { + const token = this._tokenizerOutput!.tokens.getItemAt(this._tokenIndex); + if (!this._atEof()) { + this._tokenIndex++; + } + + return token; + } + + private _atEof(): boolean { + // Are we pointing at the last token in the stream (which is + // assumed to be an end-of-stream token)? + return this._tokenIndex >= this._tokenizerOutput!.tokens.count - 1; + } + + private _peekToken(count = 0): Token { + if (this._tokenIndex + count < 0) { + return this._tokenizerOutput!.tokens.getItemAt(0); + } + + if (this._tokenIndex + count >= this._tokenizerOutput!.tokens.count) { + return this._tokenizerOutput!.tokens.getItemAt(this._tokenizerOutput!.tokens.count - 1); + } + + return this._tokenizerOutput!.tokens.getItemAt(this._tokenIndex + count); + } + + private _peekTokenType(): TokenType { + return this._peekToken().type; + } + + private _peekKeywordType(): KeywordType | undefined { + const nextToken = this._peekToken(); + if (nextToken.type !== TokenType.Keyword) { + return undefined; + } + + return (nextToken as KeywordToken).keywordType; + } + + private _peekOperatorType(): OperatorType | undefined { + const nextToken = this._peekToken(); + if (nextToken.type !== TokenType.Operator) { + return undefined; + } + + return (nextToken as OperatorToken).operatorType; + } + + private _getTokenIfIdentifier(): IdentifierToken | undefined { + const nextToken = this._peekToken(); + if (nextToken.type === TokenType.Identifier) { + return this._getNextToken() as IdentifierToken; + } + + // If the next token is invalid, treat it as an identifier. + if (nextToken.type === TokenType.Invalid) { + this._getNextToken(); + this._addSyntaxError(LocMessage.invalidIdentifierChar(), nextToken); + return IdentifierToken.create(nextToken.start, nextToken.length, '', nextToken.comments); + } + + // If this is a "soft keyword", it can be converted into an identifier. + if (nextToken.type === TokenType.Keyword) { + const keywordToken = nextToken as KeywordToken; + if (KeywordToken.isSoftKeyword(keywordToken)) { + const keywordText = this._fileContents!.substr(nextToken.start, nextToken.length); + this._getNextToken(); + return IdentifierToken.create(nextToken.start, nextToken.length, keywordText, nextToken.comments); + } + } + + return undefined; + } + + // Consumes tokens until the next one in the stream is + // either a specified terminator or the end-of-stream + // token. + private _consumeTokensUntilType(terminators: TokenType[]): boolean { + while (true) { + const token = this._peekToken(); + if (terminators.some((term) => term === token.type)) { + return true; + } + + if (token.type === TokenType.EndOfStream) { + return false; + } + + this._getNextToken(); + } + } + + private _getTokenIfType(tokenType: TokenType): Token | undefined { + if (this._peekTokenType() === tokenType) { + return this._getNextToken(); + } + + return undefined; + } + + private _consumeTokenIfType(tokenType: TokenType): boolean { + return !!this._getTokenIfType(tokenType); + } + + private _consumeTokenIfKeyword(keywordType: KeywordType): boolean { + if (this._peekKeywordType() === keywordType) { + this._getNextToken(); + return true; + } + + return false; + } + + private _consumeTokenIfOperator(operatorType: OperatorType): boolean { + if (this._peekOperatorType() === operatorType) { + this._getNextToken(); + return true; + } + + return false; + } + + private _getKeywordToken(keywordType: KeywordType): KeywordToken { + const keywordToken = this._getNextToken() as KeywordToken; + assert(keywordToken.type === TokenType.Keyword); + assert(keywordToken.keywordType === keywordType); + return keywordToken; + } + + private _getLanguageVersion() { + return this._parseOptions.pythonVersion; + } + + private _suppressErrors(callback: () => void) { + const errorsWereSuppressed = this._areErrorsSuppressed; + try { + this._areErrorsSuppressed = true; + callback(); + } finally { + this._areErrorsSuppressed = errorsWereSuppressed; + } + } + + private _addSyntaxError(message: string, range: TextRange) { + assert(range !== undefined); + + if (!this._areErrorsSuppressed) { + this._diagSink.addError( + message, + convertOffsetsToRange(range.start, range.start + range.length, this._tokenizerOutput!.lines) + ); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/parser/stringTokenUtils.ts b/python-parser/packages/pyright-internal/src/parser/stringTokenUtils.ts new file mode 100644 index 00000000..975d5c5b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/stringTokenUtils.ts @@ -0,0 +1,383 @@ +/* + * stringTokenUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Methods that handle unescaping of escaped string token + * literal values. + */ + +import { Char } from '../common/charCodes'; +import { FStringMiddleToken, StringToken, StringTokenFlags } from './tokenizerTypes'; + +export const enum UnescapeErrorType { + InvalidEscapeSequence, +} + +export interface UnescapeError { + // Offset within the unescaped string where + // this error begins. + offset: number; + + // Length of section associated with error. + length: number; + + // Type of error. + errorType: UnescapeErrorType; +} + +export interface UnescapedString { + value: string; + unescapeErrors: UnescapeError[]; + nonAsciiInBytes: boolean; +} + +interface IncompleteUnescapedString { + valueParts: string[]; + unescapeErrors: UnescapeError[]; + nonAsciiInBytes: boolean; +} + +function completeUnescapedString(incomplete: IncompleteUnescapedString, originalString: string): UnescapedString { + const newValue = incomplete.valueParts.join(''); + // Use the original string if it's identical. This prevents us from allocating + // memory to hold a copy. A copy is made because the original string is a + // 'slice' of another, so it doesn't exist in the cache yet. + const value = originalString !== newValue ? newValue : originalString; + return { + ...incomplete, + value, + }; +} + +export function getUnescapedString(stringToken: StringToken | FStringMiddleToken, elideCrlf = true): UnescapedString { + const escapedString = stringToken.escapedValue; + const isRaw = (stringToken.flags & StringTokenFlags.Raw) !== 0; + + if (isRaw) { + return { + value: escapedString, + unescapeErrors: [], + nonAsciiInBytes: false, + }; + } + + const charCodes: number[] = []; + for (let index = 0; index < escapedString.length; index++) { + charCodes.push(escapedString.charCodeAt(index)); + } + + const isBytes = (stringToken.flags & StringTokenFlags.Bytes) !== 0; + + // Handle the common case in an expedited manner. + if ( + !charCodes.some( + (curChar) => curChar === Char.CarriageReturn || curChar === Char.LineFeed || curChar === Char.Backslash + ) + ) { + return { + value: escapedString, + unescapeErrors: [], + nonAsciiInBytes: isBytes && charCodes.some((curChar) => curChar >= 128), + }; + } + + let strOffset = 0; + const output: IncompleteUnescapedString = { + valueParts: [], + unescapeErrors: [], + nonAsciiInBytes: false, + }; + + const addInvalidEscapeOffset = () => { + // Invalid escapes are not reported for raw strings. + if (!isRaw) { + output.unescapeErrors.push({ + offset: strOffset - 1, + length: 2, + errorType: UnescapeErrorType.InvalidEscapeSequence, + }); + } + }; + + const getEscapedCharacter = (offset = 0) => { + if (strOffset + offset >= charCodes.length) { + return Char.EndOfText; + } + + return charCodes[strOffset + offset]; + }; + + const scanHexEscape = (digitCount: number) => { + let foundIllegalHexDigit = false; + let hexValue = 0; + let localValue = ''; + + for (let i = 0; i < digitCount; i++) { + const charCode = getEscapedCharacter(1 + i); + if (!_isHexCharCode(charCode)) { + foundIllegalHexDigit = true; + break; + } + hexValue = 16 * hexValue + _getHexDigitValue(charCode); + } + + if (foundIllegalHexDigit) { + addInvalidEscapeOffset(); + localValue = '\\' + String.fromCharCode(getEscapedCharacter()); + strOffset++; + } else { + localValue = String.fromCharCode(hexValue); + strOffset += 1 + digitCount; + } + + return localValue; + }; + + const appendOutputChar = (charCode: number) => { + const char = String.fromCharCode(charCode); + output.valueParts.push(char); + }; + + while (true) { + let curChar = getEscapedCharacter(); + if (curChar === Char.EndOfText) { + return completeUnescapedString(output, escapedString); + } + + if (curChar === Char.Backslash) { + // Move past the escape (backslash) character. + strOffset++; + + if (isRaw) { + appendOutputChar(curChar); + continue; + } + + curChar = getEscapedCharacter(); + let localValue = ''; + + if (curChar === Char.CarriageReturn || curChar === Char.LineFeed) { + if (curChar === Char.CarriageReturn && getEscapedCharacter(1) === Char.LineFeed) { + if (isRaw) { + localValue += String.fromCharCode(curChar); + } + strOffset++; + curChar = getEscapedCharacter(); + } + if (isRaw) { + localValue = '\\' + localValue + String.fromCharCode(curChar); + } + strOffset++; + } else { + if (isRaw) { + localValue = '\\' + String.fromCharCode(curChar); + strOffset++; + } else { + switch (curChar) { + case Char.Backslash: + case Char.SingleQuote: + case Char.DoubleQuote: + localValue = String.fromCharCode(curChar); + strOffset++; + break; + + case Char.a: + localValue = '\u0007'; + strOffset++; + break; + + case Char.b: + localValue = '\b'; + strOffset++; + break; + + case Char.f: + localValue = '\f'; + strOffset++; + break; + + case Char.n: + localValue = '\n'; + strOffset++; + break; + + case Char.r: + localValue = '\r'; + strOffset++; + break; + + case Char.t: + localValue = '\t'; + strOffset++; + break; + + case Char.v: + localValue = '\v'; + strOffset++; + break; + + case Char.x: + localValue = scanHexEscape(2); + break; + + case Char.N: { + let foundIllegalChar = false; + let charCount = 1; + + // This type of escape isn't allowed for bytes. + if (isBytes) { + foundIllegalChar = true; + } + + if (getEscapedCharacter(charCount) !== Char.OpenBrace) { + foundIllegalChar = true; + } else { + charCount++; + while (true) { + const lookaheadChar = getEscapedCharacter(charCount); + if (lookaheadChar === Char.CloseBrace) { + break; + } else if ( + !_isAlphaNumericChar(lookaheadChar) && + lookaheadChar !== Char.Hyphen && + !_isWhitespaceChar(lookaheadChar) + ) { + foundIllegalChar = true; + break; + } else { + charCount++; + } + } + } + + if (foundIllegalChar) { + addInvalidEscapeOffset(); + localValue = '\\' + String.fromCharCode(curChar); + strOffset++; + } else { + // We don't have the Unicode name database handy, so + // assume that the name is valid and use a '-' as a + // replacement character. + localValue = '-'; + strOffset += 1 + charCount; + } + break; + } + + case Char.u: + case Char.U: + // This type of escape isn't allowed for bytes. + if (isBytes) { + addInvalidEscapeOffset(); + } + localValue = scanHexEscape(curChar === Char.u ? 4 : 8); + break; + + default: + if (_isOctalCharCode(curChar)) { + let octalCode = curChar - Char._0; + strOffset++; + curChar = getEscapedCharacter(); + if (_isOctalCharCode(curChar)) { + octalCode = octalCode * 8 + curChar - Char._0; + strOffset++; + curChar = getEscapedCharacter(); + + if (_isOctalCharCode(curChar)) { + octalCode = octalCode * 8 + curChar - Char._0; + strOffset++; + } + } + + localValue = String.fromCharCode(octalCode); + } else { + localValue = '\\'; + addInvalidEscapeOffset(); + } + break; + } + } + } + + output.valueParts.push(localValue); + } else if (curChar === Char.LineFeed || curChar === Char.CarriageReturn) { + // Skip over the escaped new line (either one or two characters). + if (curChar === Char.CarriageReturn && getEscapedCharacter(1) === Char.LineFeed) { + if (!elideCrlf) { + appendOutputChar(curChar); + } + strOffset++; + curChar = getEscapedCharacter(); + } + + appendOutputChar(curChar); + strOffset++; + } else { + // There's nothing to unescape, so output the escaped character directly. + if (isBytes && curChar >= 128) { + output.nonAsciiInBytes = true; + } + + appendOutputChar(curChar); + strOffset++; + } + } +} + +function _isWhitespaceChar(charCode: number): boolean { + return charCode === Char.Space || charCode === Char.Tab; +} + +function _isAlphaNumericChar(charCode: number): boolean { + if (charCode >= Char._0 && charCode <= Char._9) { + return true; + } + + if (charCode >= Char.a && charCode <= Char.z) { + return true; + } + + if (charCode >= Char.A && charCode <= Char.Z) { + return true; + } + + return false; +} + +function _isOctalCharCode(charCode: number): boolean { + return charCode >= Char._0 && charCode <= Char._7; +} + +function _isHexCharCode(charCode: number): boolean { + if (charCode >= Char._0 && charCode <= Char._9) { + return true; + } + + if (charCode >= Char.a && charCode <= Char.f) { + return true; + } + + if (charCode >= Char.A && charCode <= Char.F) { + return true; + } + + return false; +} + +function _getHexDigitValue(charCode: number): number { + if (charCode >= Char._0 && charCode <= Char._9) { + return charCode - Char._0; + } + + if (charCode >= Char.a && charCode <= Char.f) { + return charCode - Char.a + 10; + } + + if (charCode >= Char.A && charCode <= Char.F) { + return charCode - Char.A + 10; + } + + return 0; +} diff --git a/python-parser/packages/pyright-internal/src/parser/tokenizer.ts b/python-parser/packages/pyright-internal/src/parser/tokenizer.ts new file mode 100644 index 00000000..56232858 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/tokenizer.ts @@ -0,0 +1,1800 @@ +/* + * tokenizer.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Based on code from vscode-python repository: + * https://github.com/Microsoft/vscode-python + * + * Converts a Python program text stream into a stream of tokens. + */ + +import { Char } from '../common/charCodes'; +import { cloneStr } from '../common/core'; +import { TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { + isBinary, + isDecimal, + isHex, + isIdentifierChar, + isIdentifierStartChar, + isOctal, + isSurrogateChar, +} from './characters'; +import { CharacterStream } from './characterStream'; +import { + Comment, + CommentType, + DedentToken, + FStringEndToken, + FStringMiddleToken, + FStringStartToken, + IdentifierToken, + IndentToken, + KeywordToken, + KeywordType, + NewLineToken, + NewLineType, + NumberToken, + OperatorFlags, + OperatorToken, + OperatorType, + StringToken, + StringTokenFlags, + Token, + TokenType, +} from './tokenizerTypes'; + +// This must be a Map, as operations like {}["constructor"] succeed. +const _keywords: Map = new Map([ + ['and', KeywordType.And], + ['as', KeywordType.As], + ['assert', KeywordType.Assert], + ['async', KeywordType.Async], + ['await', KeywordType.Await], + ['break', KeywordType.Break], + ['case', KeywordType.Case], + ['class', KeywordType.Class], + ['continue', KeywordType.Continue], + ['__debug__', KeywordType.Debug], + ['def', KeywordType.Def], + ['del', KeywordType.Del], + ['elif', KeywordType.Elif], + ['else', KeywordType.Else], + ['except', KeywordType.Except], + ['finally', KeywordType.Finally], + ['for', KeywordType.For], + ['from', KeywordType.From], + ['global', KeywordType.Global], + ['if', KeywordType.If], + ['import', KeywordType.Import], + ['in', KeywordType.In], + ['is', KeywordType.Is], + ['lambda', KeywordType.Lambda], + ['match', KeywordType.Match], + ['nonlocal', KeywordType.Nonlocal], + ['not', KeywordType.Not], + ['or', KeywordType.Or], + ['pass', KeywordType.Pass], + ['raise', KeywordType.Raise], + ['return', KeywordType.Return], + ['try', KeywordType.Try], + ['type', KeywordType.Type], + ['while', KeywordType.While], + ['with', KeywordType.With], + ['yield', KeywordType.Yield], + ['False', KeywordType.False], + ['None', KeywordType.None], + ['True', KeywordType.True], +]); + +const _softKeywords = new Set(['match', 'case', 'type']); + +const _operatorInfo: { [key: number]: OperatorFlags } = { + [OperatorType.Add]: OperatorFlags.Unary | OperatorFlags.Binary, + [OperatorType.AddEqual]: OperatorFlags.Assignment, + [OperatorType.Assign]: OperatorFlags.Assignment, + [OperatorType.BitwiseAnd]: OperatorFlags.Binary, + [OperatorType.BitwiseAndEqual]: OperatorFlags.Assignment, + [OperatorType.BitwiseInvert]: OperatorFlags.Unary, + [OperatorType.BitwiseOr]: OperatorFlags.Binary, + [OperatorType.BitwiseOrEqual]: OperatorFlags.Assignment, + [OperatorType.BitwiseXor]: OperatorFlags.Binary, + [OperatorType.BitwiseXorEqual]: OperatorFlags.Assignment, + [OperatorType.Divide]: OperatorFlags.Binary, + [OperatorType.DivideEqual]: OperatorFlags.Assignment, + [OperatorType.Equals]: OperatorFlags.Binary | OperatorFlags.Comparison, + [OperatorType.FloorDivide]: OperatorFlags.Binary, + [OperatorType.FloorDivideEqual]: OperatorFlags.Assignment, + [OperatorType.GreaterThan]: OperatorFlags.Binary | OperatorFlags.Comparison, + [OperatorType.GreaterThanOrEqual]: OperatorFlags.Binary | OperatorFlags.Comparison, + [OperatorType.LeftShift]: OperatorFlags.Binary, + [OperatorType.LeftShiftEqual]: OperatorFlags.Assignment, + [OperatorType.LessOrGreaterThan]: OperatorFlags.Binary | OperatorFlags.Comparison | OperatorFlags.Deprecated, + [OperatorType.LessThan]: OperatorFlags.Binary | OperatorFlags.Comparison, + [OperatorType.LessThanOrEqual]: OperatorFlags.Binary | OperatorFlags.Comparison, + [OperatorType.MatrixMultiply]: OperatorFlags.Binary, + [OperatorType.MatrixMultiplyEqual]: OperatorFlags.Assignment, + [OperatorType.Mod]: OperatorFlags.Binary, + [OperatorType.ModEqual]: OperatorFlags.Assignment, + [OperatorType.Multiply]: OperatorFlags.Binary, + [OperatorType.MultiplyEqual]: OperatorFlags.Assignment, + [OperatorType.NotEquals]: OperatorFlags.Binary | OperatorFlags.Comparison, + [OperatorType.Power]: OperatorFlags.Binary, + [OperatorType.PowerEqual]: OperatorFlags.Assignment, + [OperatorType.RightShift]: OperatorFlags.Binary, + [OperatorType.RightShiftEqual]: OperatorFlags.Assignment, + [OperatorType.Subtract]: OperatorFlags.Binary, + [OperatorType.SubtractEqual]: OperatorFlags.Assignment, + + [OperatorType.And]: OperatorFlags.Binary, + [OperatorType.Or]: OperatorFlags.Binary, + [OperatorType.Not]: OperatorFlags.Unary, + [OperatorType.Is]: OperatorFlags.Binary, + [OperatorType.IsNot]: OperatorFlags.Binary, + [OperatorType.In]: OperatorFlags.Binary, + [OperatorType.NotIn]: OperatorFlags.Binary, +}; + +const _byteOrderMarker = 0xfeff; + +const defaultTabSize = 8; +const magicsRegEx = /\\\s*$/; +const typeIgnoreCommentRegEx = /((^|#)\s*)type:\s*ignore(\s*\[([\s\w-,]*)\]|\s|$)/; +const pyrightIgnoreCommentRegEx = /((^|#)\s*)pyright:\s*ignore(\s*\[([\s\w-,]*)\]|\s|$)/; +const underscoreRegEx = /_/g; + +export interface TokenizerOutput { + // List of all tokens. + tokens: TextRangeCollection; + + // List of ranges that comprise the lines. + lines: TextRangeCollection; + + // Map of all line numbers that end in a "type: ignore" comment. + typeIgnoreLines: Map; + + // Map of all line numbers that end in a "pyright: ignore" comment. + pyrightIgnoreLines: Map; + + // Program starts with a "type: ignore" comment. + typeIgnoreAll: IgnoreComment | undefined; + + // Line-end sequence ('/n', '/r', or '/r/n'). + predominantEndOfLineSequence: string; + + // True if the tokenizer was able to identify the file's predominant + // tab sequence. False if predominantTabSequence is set to our default. + hasPredominantTabSequence: boolean; + + // Tab sequence ('/t or consecutive spaces). + predominantTabSequence: string; + + // Does the code mostly use single or double quote + // characters for string literals? + predominantSingleQuoteCharacter: string; +} + +interface StringScannerOutput { + escapedValue: string; + flags: StringTokenFlags; +} + +interface IndentInfo { + tab1Spaces: number; + tab8Spaces: number; + isSpacePresent: boolean; + isTabPresent: boolean; +} + +export interface IgnoreCommentRule { + text: string; + range: TextRange; +} + +export interface IgnoreComment { + range: TextRange; + rulesList: IgnoreCommentRule[] | undefined; +} + +interface FStringReplacementFieldContext { + inFormatSpecifier: boolean; + parenDepth: number; +} + +interface FStringContext { + startToken: FStringStartToken; + replacementFieldStack: FStringReplacementFieldContext[]; + activeReplacementField?: FStringReplacementFieldContext; +} + +enum MagicsKind { + None, + Line, + Cell, +} + +export class Tokenizer { + private _cs = new CharacterStream(''); + private _tokens: Token[] = []; + private _prevLineStart = 0; + private _parenDepth = 0; + private _lineRanges: TextRange[] = []; + private _indentAmounts: IndentInfo[] = []; + private _typeIgnoreAll: IgnoreComment | undefined; + private _typeIgnoreLines = new Map(); + private _pyrightIgnoreLines = new Map(); + private _comments: Comment[] | undefined; + private _fStringStack: FStringContext[] = []; + private _activeFString: FStringContext | undefined; + + // Total times CR, CR/LF, and LF are used to terminate + // lines. Used to determine the predominant line ending. + private _crCount = 0; + private _crLfCount = 0; + private _lfCount = 0; + + // Number of times an indent token is emitted. + private _indentCount = 0; + + // Number of times an indent token is emitted and a tab character + // is present (used to determine predominant tab sequence). + private _indentTabCount = 0; + + // Number of spaces that are added for an indent token + // (used to determine predominant tab sequence). + private _indentSpacesTotal = 0; + + // Number of single or double quote string literals found + // in the code. + private _singleQuoteCount = 0; + private _doubleQuoteCount = 0; + + // Assume Jupyter notebook tokenization rules? + private _useNotebookMode = false; + + // Intern identifier strings within a single tokenization pass. This reduces + // per-identifier allocations while still ensuring we don't retain substrings + // that reference the original source text. + private readonly _identifierInternedStrings = new Map(); + + tokenize( + text: string, + start?: number, + length?: number, + initialParenDepth = 0, + useNotebookMode = false + ): TokenizerOutput { + if (start === undefined) { + start = 0; + } else if (start < 0 || start > text.length) { + throw new Error(`Invalid range start (start=${start}, text.length=${text.length})`); + } + + if (length === undefined) { + length = text.length; + } else if (length < 0 || start + length > text.length) { + throw new Error(`Invalid range length (start=${start}, length=${length}, text.length=${text.length})`); + } else if (start + length < text.length) { + text = text.slice(0, start + length); + } + + this._cs = new CharacterStream(text); + this._cs.position = start; + this._tokens = []; + this._prevLineStart = 0; + this._parenDepth = initialParenDepth; + this._lineRanges = []; + this._indentAmounts = []; + this._useNotebookMode = useNotebookMode; + this._identifierInternedStrings.clear(); + + const end = start + length; + + if (start === 0) { + this._readIndentationAfterNewLine(); + } + + while (!this._cs.isEndOfStream()) { + this._addNextToken(); + + if (this._cs.position >= end) { + break; + } + } + + // Insert any implied FStringEnd tokens. + while (this._activeFString) { + this._tokens.push( + FStringEndToken.create( + this._cs.position, + 0, + this._activeFString.startToken.flags | StringTokenFlags.Unterminated + ) + ); + this._activeFString = this._fStringStack.pop(); + } + + // Insert an implied new line to make parsing easier. + if (this._tokens.length === 0 || this._tokens[this._tokens.length - 1].type !== TokenType.NewLine) { + if (this._parenDepth === 0) { + this._tokens.push(NewLineToken.create(this._cs.position, 0, NewLineType.Implied, this._getComments())); + } + } + + // Insert any implied dedent tokens. + this._setIndent(this._cs.position, 0, 0, /* isSpacePresent */ false, /* isTabPresent */ false); + + // Add a final end-of-stream token to make parsing easier. + this._tokens.push(Token.create(TokenType.EndOfStream, this._cs.position, 0, this._getComments())); + + // Add the final line range. + this._addLineRange(); + + // If the last line ended in a line-end character, add an empty line. + if (this._lineRanges.length > 0) { + const lastLine = this._lineRanges[this._lineRanges.length - 1]; + const lastCharOfLastLine = text.charCodeAt(lastLine.start + lastLine.length - 1); + if (lastCharOfLastLine === Char.CarriageReturn || lastCharOfLastLine === Char.LineFeed) { + this._lineRanges.push({ start: this._cs.position, length: 0 }); + } + } + + let predominantEndOfLineSequence = '\n'; + if (this._crCount > this._crLfCount && this._crCount > this._lfCount) { + predominantEndOfLineSequence = '\r'; + } else if (this._crLfCount > this._crCount && this._crLfCount > this._lfCount) { + predominantEndOfLineSequence = '\r\n'; + } + + let predominantTabSequence = ' '; + let hasPredominantTabSequence = false; + // If more than half of the indents use tab sequences, + // assume we're using tabs rather than spaces. + if (this._indentTabCount > this._indentCount / 2) { + hasPredominantTabSequence = true; + predominantTabSequence = '\t'; + } else if (this._indentCount > 0) { + hasPredominantTabSequence = true; + // Compute the average number of spaces per indent + // to estimate the predominant tab value. + let averageSpacePerIndent = Math.round(this._indentSpacesTotal / this._indentCount); + if (averageSpacePerIndent < 1) { + averageSpacePerIndent = 1; + } else if (averageSpacePerIndent > defaultTabSize) { + averageSpacePerIndent = defaultTabSize; + } + predominantTabSequence = ''; + for (let i = 0; i < averageSpacePerIndent; i++) { + predominantTabSequence += ' '; + } + } + + return { + tokens: new TextRangeCollection(this._tokens), + lines: new TextRangeCollection(this._lineRanges), + typeIgnoreLines: this._typeIgnoreLines, + typeIgnoreAll: this._typeIgnoreAll, + pyrightIgnoreLines: this._pyrightIgnoreLines, + predominantEndOfLineSequence, + hasPredominantTabSequence, + predominantTabSequence, + predominantSingleQuoteCharacter: this._singleQuoteCount >= this._doubleQuoteCount ? "'" : '"', + }; + } + + static getOperatorInfo(operatorType: OperatorType): OperatorFlags { + return _operatorInfo[operatorType]; + } + + static isWhitespace(token: Token) { + return token.type === TokenType.NewLine || token.type === TokenType.Indent || token.type === TokenType.Dedent; + } + + static isPythonKeyword(name: string, includeSoftKeywords = false): boolean { + const keyword = _keywords.get(name); + if (!keyword) { + return false; + } + + if (includeSoftKeywords) { + return true; + } + + return !_softKeywords.has(name); + } + + static isPythonIdentifier(value: string) { + for (let i = 0; i < value.length; i++) { + if (i === 0 ? !isIdentifierStartChar(value.charCodeAt(i)) : !isIdentifierChar(value.charCodeAt(i))) { + return false; + } + } + + return true; + } + + static isOperatorAssignment(operatorType?: OperatorType): boolean { + if (operatorType === undefined || _operatorInfo[operatorType] === undefined) { + return false; + } + return (_operatorInfo[operatorType] & OperatorFlags.Assignment) !== 0; + } + + static isOperatorComparison(operatorType?: OperatorType): boolean { + if (operatorType === undefined || _operatorInfo[operatorType] === undefined) { + return false; + } + return (_operatorInfo[operatorType] & OperatorFlags.Comparison) !== 0; + } + + private _addNextToken(): void { + // Are we in the middle of an f-string but not in a replacement field? + if ( + this._activeFString && + (!this._activeFString.activeReplacementField || + this._activeFString.activeReplacementField.inFormatSpecifier) + ) { + this._handleFStringMiddle(); + } else { + this._cs.skipWhitespace(); + } + + if (this._cs.isEndOfStream()) { + return; + } + + if (!this._handleCharacter()) { + this._cs.moveNext(); + } + } + + // Consumes one or more characters from the character stream and pushes + // tokens onto the token list. Returns true if the caller should advance + // to the next character. + private _handleCharacter(): boolean { + // f-strings, b-strings, etc + const stringPrefixLength = this._getStringPrefixLength(); + + if (stringPrefixLength >= 0) { + let stringPrefix = ''; + if (stringPrefixLength > 0) { + stringPrefix = this._cs.getText().slice(this._cs.position, this._cs.position + stringPrefixLength); + // Indeed a string + this._cs.advance(stringPrefixLength); + } + + const quoteTypeFlags = this._getQuoteTypeFlags(stringPrefix); + if (quoteTypeFlags !== StringTokenFlags.None) { + this._handleString(quoteTypeFlags, stringPrefixLength); + return true; + } + } + + if (this._cs.currentChar === Char.Hash) { + this._handleComment(); + return true; + } + + if (this._useNotebookMode) { + const kind = this._getIPythonMagicsKind(); + if (kind === MagicsKind.Line) { + this._handleIPythonMagics( + this._cs.currentChar === Char.Percent ? CommentType.IPythonMagic : CommentType.IPythonShellEscape + ); + return true; + } + + if (kind === MagicsKind.Cell) { + this._handleIPythonMagics( + this._cs.currentChar === Char.Percent + ? CommentType.IPythonCellMagic + : CommentType.IPythonCellShellEscape + ); + return true; + } + } + + switch (this._cs.currentChar) { + case _byteOrderMarker: { + // Skip the BOM if it's at the start of the file. + if (this._cs.position === 0) { + return false; + } + return this._handleInvalid(); + } + + case Char.CarriageReturn: { + const length = this._cs.nextChar === Char.LineFeed ? 2 : 1; + const newLineType = length === 2 ? NewLineType.CarriageReturnLineFeed : NewLineType.CarriageReturn; + this._handleNewLine(length, newLineType); + return true; + } + + case Char.LineFeed: { + this._handleNewLine(1, NewLineType.LineFeed); + return true; + } + + case Char.Backslash: { + if (this._cs.nextChar === Char.CarriageReturn) { + const advance = this._cs.lookAhead(2) === Char.LineFeed ? 3 : 2; + + // If a line continuation (\\ + CR[LF]) appears at EOF, it's an error. + if (this._cs.position + advance >= this._cs.length) { + return this._handleInvalid(); + } + + this._cs.advance(advance); + this._addLineRange(); + + if (this._tokens.length > 0 && this._tokens[this._tokens.length - 1].type === TokenType.NewLine) { + this._readIndentationAfterNewLine(); + } + return true; + } + + if (this._cs.nextChar === Char.LineFeed) { + const advance = 2; + + // If a line continuation (\\ + LF) appears at EOF, it's an error. + if (this._cs.position + advance >= this._cs.length) { + return this._handleInvalid(); + } + + this._cs.advance(advance); + this._addLineRange(); + + if (this._tokens.length > 0 && this._tokens[this._tokens.length - 1].type === TokenType.NewLine) { + this._readIndentationAfterNewLine(); + } + return true; + } + + return this._handleInvalid(); + } + + case Char.OpenParenthesis: { + this._parenDepth++; + this._tokens.push(Token.create(TokenType.OpenParenthesis, this._cs.position, 1, this._getComments())); + break; + } + + case Char.CloseParenthesis: { + if (this._parenDepth > 0) { + this._parenDepth--; + } + this._tokens.push(Token.create(TokenType.CloseParenthesis, this._cs.position, 1, this._getComments())); + break; + } + + case Char.OpenBracket: { + this._parenDepth++; + this._tokens.push(Token.create(TokenType.OpenBracket, this._cs.position, 1, this._getComments())); + break; + } + + case Char.CloseBracket: { + if (this._parenDepth > 0) { + this._parenDepth--; + } + this._tokens.push(Token.create(TokenType.CloseBracket, this._cs.position, 1, this._getComments())); + break; + } + + case Char.OpenBrace: { + this._parenDepth++; + this._tokens.push(Token.create(TokenType.OpenCurlyBrace, this._cs.position, 1, this._getComments())); + + if (this._activeFString) { + // Are we starting a new replacement field? + if ( + !this._activeFString.activeReplacementField || + this._activeFString.activeReplacementField.inFormatSpecifier + ) { + // If there is already an active replacement field, push it + // on the stack so we can pop it later. + if (this._activeFString.activeReplacementField) { + this._activeFString.replacementFieldStack.push(this._activeFString.activeReplacementField); + } + + // Create a new active replacement field context. + this._activeFString.activeReplacementField = { + inFormatSpecifier: false, + parenDepth: this._parenDepth, + }; + } + } + break; + } + + case Char.CloseBrace: { + if ( + this._activeFString && + this._activeFString.activeReplacementField?.parenDepth === this._parenDepth + ) { + this._activeFString.activeReplacementField = this._activeFString.replacementFieldStack.pop(); + } + + if (this._parenDepth > 0) { + this._parenDepth--; + } + this._tokens.push(Token.create(TokenType.CloseCurlyBrace, this._cs.position, 1, this._getComments())); + break; + } + + case Char.Comma: { + this._tokens.push(Token.create(TokenType.Comma, this._cs.position, 1, this._getComments())); + break; + } + + case Char.Backtick: { + this._tokens.push(Token.create(TokenType.Backtick, this._cs.position, 1, this._getComments())); + break; + } + + case Char.Semicolon: { + this._tokens.push(Token.create(TokenType.Semicolon, this._cs.position, 1, this._getComments())); + break; + } + + case Char.Colon: { + if (this._cs.nextChar === Char.Equal) { + if ( + !this._activeFString || + !this._activeFString.activeReplacementField || + this._activeFString.activeReplacementField.parenDepth !== this._parenDepth + ) { + this._tokens.push( + OperatorToken.create(this._cs.position, 2, OperatorType.Walrus, this._getComments()) + ); + this._cs.advance(1); + break; + } + } + + this._tokens.push(Token.create(TokenType.Colon, this._cs.position, 1, this._getComments())); + + if ( + this._activeFString?.activeReplacementField && + this._parenDepth === this._activeFString.activeReplacementField.parenDepth + ) { + this._activeFString.activeReplacementField.inFormatSpecifier = true; + } + break; + } + + default: { + if (this._isPossibleNumber()) { + if (this._tryNumber()) { + return true; + } + } + + if (this._cs.currentChar === Char.Period) { + if (this._cs.nextChar === Char.Period && this._cs.lookAhead(2) === Char.Period) { + this._tokens.push(Token.create(TokenType.Ellipsis, this._cs.position, 3, this._getComments())); + this._cs.advance(3); + return true; + } + this._tokens.push(Token.create(TokenType.Dot, this._cs.position, 1, this._getComments())); + break; + } + + if (!this._tryIdentifier()) { + if (!this._tryOperator()) { + return this._handleInvalid(); + } + } + return true; + } + } + return false; + } + + private _addLineRange() { + const lineLength = this._cs.position - this._prevLineStart; + if (lineLength > 0) { + this._lineRanges.push({ start: this._prevLineStart, length: lineLength }); + } + + this._prevLineStart = this._cs.position; + } + + private _handleNewLine(length: number, newLineType: NewLineType) { + if (this._parenDepth === 0 && newLineType !== NewLineType.Implied) { + // New lines are ignored within parentheses. + // We'll also avoid adding multiple newlines in a row to simplify parsing. + if (this._tokens.length === 0 || this._tokens[this._tokens.length - 1].type !== TokenType.NewLine) { + this._tokens.push(NewLineToken.create(this._cs.position, length, newLineType, this._getComments())); + } + } + if (newLineType === NewLineType.CarriageReturn) { + this._crCount++; + } else if (newLineType === NewLineType.CarriageReturnLineFeed) { + this._crLfCount++; + } else { + this._lfCount++; + } + this._cs.advance(length); + this._addLineRange(); + this._readIndentationAfterNewLine(); + } + + private _readIndentationAfterNewLine() { + let tab1Spaces = 0; + let tab8Spaces = 0; + let isTabPresent = false; + let isSpacePresent = false; + + const startOffset = this._cs.position; + + while (!this._cs.isEndOfStream()) { + switch (this._cs.currentChar) { + case Char.Space: + tab1Spaces++; + tab8Spaces++; + isSpacePresent = true; + this._cs.moveNext(); + break; + + case Char.Tab: + // Translate tabs into spaces assuming both 1-space + // and 8-space tab stops. + tab1Spaces++; + tab8Spaces += defaultTabSize - (tab8Spaces % defaultTabSize); + isTabPresent = true; + this._cs.moveNext(); + break; + + case Char.FormFeed: + tab1Spaces = 0; + tab8Spaces = 0; + isTabPresent = false; + isSpacePresent = false; + this._cs.moveNext(); + break; + + case Char.Hash: + case Char.LineFeed: + case Char.CarriageReturn: + // Blank line -- no need to adjust indentation. + return; + + default: + // Non-blank line. Set the current indent level. + this._setIndent(startOffset, tab1Spaces, tab8Spaces, isSpacePresent, isTabPresent); + return; + } + } + } + + // The caller must specify two space count values. The first assumes + // that tabs are translated into one-space tab stops. The second assumes + // that tabs are translated into eight-space tab stops. + private _setIndent( + startOffset: number, + tab1Spaces: number, + tab8Spaces: number, + isSpacePresent: boolean, + isTabPresent: boolean + ) { + // Indentations are ignored within a parenthesized clause. + if (this._parenDepth > 0) { + return; + } + + // Insert indent or dedent tokens as necessary. + if (this._indentAmounts.length === 0) { + if (tab8Spaces > 0) { + this._indentCount++; + if (isTabPresent) { + this._indentTabCount++; + } + this._indentSpacesTotal += tab8Spaces; + + this._indentAmounts.push({ + tab1Spaces, + tab8Spaces, + isSpacePresent, + isTabPresent, + }); + this._tokens.push(IndentToken.create(startOffset, tab1Spaces, tab8Spaces, false, this._getComments())); + } + } else { + const prevTabInfo = this._indentAmounts[this._indentAmounts.length - 1]; + if (prevTabInfo.tab8Spaces < tab8Spaces) { + // The Python spec says that if there is ambiguity about how tabs should + // be translated into spaces because the user has intermixed tabs and + // spaces, it should be an error. We'll record this condition in the token + // so the parser can later report it. + const isIndentAmbiguous = + ((prevTabInfo.isSpacePresent && isTabPresent) || (prevTabInfo.isTabPresent && isSpacePresent)) && + prevTabInfo.tab1Spaces >= tab1Spaces; + + this._indentCount++; + if (isTabPresent) { + this._indentTabCount++; + } + this._indentSpacesTotal += tab8Spaces - this._indentAmounts[this._indentAmounts.length - 1].tab8Spaces; + + this._indentAmounts.push({ + tab1Spaces, + tab8Spaces, + isSpacePresent, + isTabPresent, + }); + + this._tokens.push( + IndentToken.create(startOffset, tab1Spaces, tab8Spaces, isIndentAmbiguous, this._getComments()) + ); + } else if (prevTabInfo.tab8Spaces === tab8Spaces) { + // The Python spec says that if there is ambiguity about how tabs should + // be translated into spaces because the user has intermixed tabs and + // spaces, it should be an error. We'll record this condition in the token + // so the parser can later report it. + if ((prevTabInfo.isSpacePresent && isTabPresent) || (prevTabInfo.isTabPresent && isSpacePresent)) { + this._tokens.push( + IndentToken.create(startOffset, tab1Spaces, tab8Spaces, true, this._getComments()) + ); + } + } else { + // The Python spec says that if there is ambiguity about how tabs should + // be translated into spaces because the user has intermixed tabs and + // spaces, it should be an error. We'll record this condition in the token + // so the parser can later report it. + let isDedentAmbiguous = + (prevTabInfo.isSpacePresent && isTabPresent) || (prevTabInfo.isTabPresent && isSpacePresent); + + // The Python spec says that dedent amounts need to match the indent + // amount exactly. An error is generated at runtime if it doesn't. + // We'll record that error condition within the token, allowing the + // parser to report it later. + const dedentPoints: number[] = []; + while ( + this._indentAmounts.length > 0 && + this._indentAmounts[this._indentAmounts.length - 1].tab8Spaces > tab8Spaces + ) { + dedentPoints.push( + this._indentAmounts.length > 1 + ? this._indentAmounts[this._indentAmounts.length - 2].tab8Spaces + : 0 + ); + this._indentAmounts.pop(); + } + + dedentPoints.forEach((dedentAmount, index) => { + const matchesIndent = index < dedentPoints.length - 1 || dedentAmount === tab8Spaces; + const actualDedentAmount = index < dedentPoints.length - 1 ? dedentAmount : tab8Spaces; + this._tokens.push( + DedentToken.create( + this._cs.position, + 0, + actualDedentAmount, + matchesIndent, + isDedentAmbiguous, + this._getComments() + ) + ); + + isDedentAmbiguous = false; + }); + } + } + } + + private _tryIdentifier(): boolean { + const swallowRemainingChars = () => { + while (true) { + if (isIdentifierChar(this._cs.currentChar)) { + this._cs.moveNext(); + } else if (isIdentifierChar(this._cs.currentChar, this._cs.nextChar)) { + this._cs.moveNext(); + this._cs.moveNext(); + } else { + break; + } + } + }; + + const start = this._cs.position; + if (isIdentifierStartChar(this._cs.currentChar)) { + this._cs.moveNext(); + swallowRemainingChars(); + } else if (isIdentifierStartChar(this._cs.currentChar, this._cs.nextChar)) { + this._cs.moveNext(); + this._cs.moveNext(); + swallowRemainingChars(); + } + + if (this._cs.position > start) { + const value = this._cs.getText().slice(start, this._cs.position); + const keywordType = _keywords.get(value); + if (keywordType !== undefined) { + this._tokens.push( + KeywordToken.create(start, this._cs.position - start, keywordType, this._getComments()) + ); + } else { + const internedValue = this._identifierInternedStrings.get(value) ?? this._internIdentifierString(value); + this._tokens.push( + IdentifierToken.create(start, this._cs.position - start, internedValue, this._getComments()) + ); + } + return true; + } + return false; + } + + private _internIdentifierString(value: string) { + const clonedValue = cloneStr(value); + this._identifierInternedStrings.set(clonedValue, clonedValue); + return clonedValue; + } + + private _isPossibleNumber(): boolean { + if (isDecimal(this._cs.currentChar)) { + return true; + } + + if (this._cs.currentChar === Char.Period && isDecimal(this._cs.nextChar)) { + return true; + } + + return false; + } + + private _tryNumber(): boolean { + const start = this._cs.position; + + if (this._cs.currentChar === Char._0) { + let radix = 0; + let leadingChars = 0; + + // Try hex => hexinteger: "0" ("x" | "X") (["_"] hexdigit)+ + if ((this._cs.nextChar === Char.x || this._cs.nextChar === Char.X) && isHex(this._cs.lookAhead(2))) { + this._cs.advance(2); + leadingChars = 2; + while (isHex(this._cs.currentChar)) { + this._cs.moveNext(); + } + radix = 16; + } + + // Try binary => bininteger: "0" ("b" | "B") (["_"] bindigit)+ + else if ( + (this._cs.nextChar === Char.b || this._cs.nextChar === Char.B) && + isBinary(this._cs.lookAhead(2)) + ) { + this._cs.advance(2); + leadingChars = 2; + while (isBinary(this._cs.currentChar)) { + this._cs.moveNext(); + } + radix = 2; + } + + // Try octal => octinteger: "0" ("o" | "O") (["_"] octdigit)+ + else if ((this._cs.nextChar === Char.o || this._cs.nextChar === Char.O) && isOctal(this._cs.lookAhead(2))) { + this._cs.advance(2); + leadingChars = 2; + while (isOctal(this._cs.currentChar)) { + this._cs.moveNext(); + } + radix = 8; + } + + if (radix > 0) { + const text = this._cs.getText().slice(start, this._cs.position); + const simpleIntText = text.replace(underscoreRegEx, ''); + let intValue: number | bigint = parseInt(simpleIntText.slice(leadingChars), radix); + + if (!isNaN(intValue)) { + const bigIntValue = BigInt(simpleIntText); + if ( + !isFinite(intValue) || + intValue < Number.MIN_SAFE_INTEGER || + intValue > Number.MAX_SAFE_INTEGER + ) { + intValue = bigIntValue; + } + + this._tokens.push( + NumberToken.create(start, text.length, intValue, true, false, this._getComments()) + ); + return true; + } + } + } + + let isDecimalInteger = false; + let mightBeFloatingPoint = false; + // Try decimal int => + // decinteger: nonzerodigit (["_"] digit)* | "0" (["_"] "0")* + // nonzerodigit: "1"..."9" + // digit: "0"..."9" + if (this._cs.currentChar >= Char._1 && this._cs.currentChar <= Char._9) { + while (isDecimal(this._cs.currentChar)) { + mightBeFloatingPoint = true; + this._cs.moveNext(); + } + isDecimalInteger = + this._cs.currentChar !== Char.Period && + this._cs.currentChar !== Char.e && + this._cs.currentChar !== Char.E; + } + + // "0" (["_"] "0")* + if (this._cs.currentChar === Char._0) { + mightBeFloatingPoint = true; + while (this._cs.currentChar === Char._0 || this._cs.currentChar === Char.Underscore) { + this._cs.moveNext(); + } + isDecimalInteger = + this._cs.currentChar !== Char.Period && + this._cs.currentChar !== Char.e && + this._cs.currentChar !== Char.E && + (this._cs.currentChar < Char._1 || this._cs.currentChar > Char._9); + } + + if (isDecimalInteger) { + let text = this._cs.getText().slice(start, this._cs.position); + const simpleIntText = text.replace(underscoreRegEx, ''); + let intValue: number | bigint = parseInt(simpleIntText, 10); + + if (!isNaN(intValue)) { + let isImaginary = false; + + const bigIntValue = BigInt(simpleIntText); + if ( + !isFinite(intValue) || + bigIntValue < Number.MIN_SAFE_INTEGER || + bigIntValue > Number.MAX_SAFE_INTEGER + ) { + intValue = bigIntValue; + } + + if (this._cs.currentChar === Char.j || this._cs.currentChar === Char.J) { + isImaginary = true; + text += String.fromCharCode(this._cs.currentChar); + this._cs.moveNext(); + } + + this._tokens.push( + NumberToken.create(start, text.length, intValue, true, isImaginary, this._getComments()) + ); + return true; + } + } + + // Floating point. Sign and leading digits were already skipped over. + this._cs.position = start; + if ( + mightBeFloatingPoint || + (this._cs.currentChar === Char.Period && this._cs.nextChar >= Char._0 && this._cs.nextChar <= Char._9) + ) { + if (this._skipFloatingPointCandidate()) { + let text = this._cs.getText().slice(start, this._cs.position); + const value = parseFloat(text); + if (!isNaN(value)) { + let isImaginary = false; + if (this._cs.currentChar === Char.j || this._cs.currentChar === Char.J) { + isImaginary = true; + text += String.fromCharCode(this._cs.currentChar); + this._cs.moveNext(); + } + this._tokens.push( + NumberToken.create( + start, + this._cs.position - start, + value, + false, + isImaginary, + this._getComments() + ) + ); + return true; + } + } + } + + this._cs.position = start; + return false; + } + + private _tryOperator(): boolean { + let length = 0; + const nextChar = this._cs.nextChar; + let operatorType: OperatorType; + + switch (this._cs.currentChar) { + case Char.Plus: + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.AddEqual : OperatorType.Add; + break; + + case Char.Ampersand: + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.BitwiseAndEqual : OperatorType.BitwiseAnd; + break; + + case Char.Bar: + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.BitwiseOrEqual : OperatorType.BitwiseOr; + break; + + case Char.Caret: + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.BitwiseXorEqual : OperatorType.BitwiseXor; + break; + + case Char.Equal: + if ( + this._activeFString?.activeReplacementField && + this._activeFString?.activeReplacementField.parenDepth === this._parenDepth && + !this._activeFString.activeReplacementField.inFormatSpecifier && + nextChar !== Char.Equal + ) { + length = 1; + operatorType = OperatorType.Assign; + break; + } + + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.Equals : OperatorType.Assign; + break; + + case Char.ExclamationMark: + if (nextChar !== Char.Equal) { + if (this._activeFString) { + // Handle the conversion separator (!) within an f-string. + this._tokens.push( + Token.create(TokenType.ExclamationMark, this._cs.position, 1, this._getComments()) + ); + this._cs.advance(1); + return true; + } + + return false; + } + length = 2; + operatorType = OperatorType.NotEquals; + break; + + case Char.Percent: + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.ModEqual : OperatorType.Mod; + break; + + case Char.Tilde: + length = 1; + operatorType = OperatorType.BitwiseInvert; + break; + + case Char.Hyphen: + if (nextChar === Char.Greater) { + this._tokens.push(Token.create(TokenType.Arrow, this._cs.position, 2, this._getComments())); + this._cs.advance(2); + return true; + } + + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.SubtractEqual : OperatorType.Subtract; + break; + + case Char.Asterisk: + if (nextChar === Char.Asterisk) { + length = this._cs.lookAhead(2) === Char.Equal ? 3 : 2; + operatorType = length === 3 ? OperatorType.PowerEqual : OperatorType.Power; + } else { + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.MultiplyEqual : OperatorType.Multiply; + } + break; + + case Char.Slash: + if (nextChar === Char.Slash) { + length = this._cs.lookAhead(2) === Char.Equal ? 3 : 2; + operatorType = length === 3 ? OperatorType.FloorDivideEqual : OperatorType.FloorDivide; + } else { + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.DivideEqual : OperatorType.Divide; + } + break; + + case Char.Less: + if (nextChar === Char.Less) { + length = this._cs.lookAhead(2) === Char.Equal ? 3 : 2; + operatorType = length === 3 ? OperatorType.LeftShiftEqual : OperatorType.LeftShift; + } else if (nextChar === Char.Greater) { + length = 2; + operatorType = OperatorType.LessOrGreaterThan; + } else { + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.LessThanOrEqual : OperatorType.LessThan; + } + break; + + case Char.Greater: + if (nextChar === Char.Greater) { + length = this._cs.lookAhead(2) === Char.Equal ? 3 : 2; + operatorType = length === 3 ? OperatorType.RightShiftEqual : OperatorType.RightShift; + } else { + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.GreaterThanOrEqual : OperatorType.GreaterThan; + } + break; + + case Char.At: + length = nextChar === Char.Equal ? 2 : 1; + operatorType = length === 2 ? OperatorType.MatrixMultiplyEqual : OperatorType.MatrixMultiply; + break; + + default: + return false; + } + this._tokens.push(OperatorToken.create(this._cs.position, length, operatorType, this._getComments())); + this._cs.advance(length); + return length > 0; + } + + private _handleInvalid(): boolean { + const start = this._cs.position; + while (true) { + if ( + this._cs.currentChar === Char.LineFeed || + this._cs.currentChar === Char.CarriageReturn || + this._cs.isAtWhiteSpace() || + this._cs.isEndOfStream() + ) { + break; + } + + if (isSurrogateChar(this._cs.currentChar)) { + this._cs.moveNext(); + this._cs.moveNext(); + } else { + this._cs.moveNext(); + } + } + const length = this._cs.position - start; + if (length > 0) { + this._tokens.push(Token.create(TokenType.Invalid, start, length, this._getComments())); + return true; + } + return false; + } + + private _getComments(): Comment[] | undefined { + const prevComments = this._comments; + this._comments = undefined; + return prevComments; + } + + private _getIPythonMagicsKind(): MagicsKind { + const curChar = this._cs.currentChar; + if (curChar !== Char.Percent && curChar !== Char.ExclamationMark) { + return MagicsKind.None; + } + + const prevToken = this._tokens.length > 0 ? this._tokens[this._tokens.length - 1] : undefined; + if (prevToken !== undefined && !Tokenizer.isWhitespace(prevToken)) { + return MagicsKind.None; + } + + if (this._cs.nextChar === curChar) { + // Eat up next magic char. + this._cs.moveNext(); + return MagicsKind.Cell; + } + + return MagicsKind.Line; + } + + private _handleIPythonMagics(type: CommentType): void { + const start = this._cs.position + 1; + + let begin = start; + while (true) { + this._cs.skipToEol(); + + if (type === CommentType.IPythonMagic || type === CommentType.IPythonShellEscape) { + const length = this._cs.position - begin; + const value = this._cs.getText().slice(begin, begin + length); + + // is it multiline magics? + // %magic command \ + // next arguments + if (!value.match(magicsRegEx)) { + break; + } + } + + this._cs.moveNext(); + begin = this._cs.position + 1; + + if (this._cs.isEndOfStream()) { + break; + } + } + + const length = this._cs.position - start; + const comment = Comment.create(start, length, this._cs.getText().slice(start, start + length), type); + this._addComments(comment); + } + + private _handleComment(): void { + const start = this._cs.position + 1; + this._cs.skipToEol(); + + const length = this._cs.position - start; + const comment = Comment.create(start, length, this._cs.getText().slice(start, start + length)); + + const typeIgnoreRegexMatch = comment.value.match(typeIgnoreCommentRegEx); + if (typeIgnoreRegexMatch) { + const commentStart = start + (typeIgnoreRegexMatch.index ?? 0); + const textRange: TextRange = { + start: commentStart + typeIgnoreRegexMatch[1].length, + length: typeIgnoreRegexMatch[0].length - typeIgnoreRegexMatch[1].length, + }; + const ignoreComment: IgnoreComment = { + range: textRange, + rulesList: this._getIgnoreCommentRulesList(commentStart, typeIgnoreRegexMatch), + }; + + if (this._tokens.findIndex((t) => t.type !== TokenType.NewLine && t && t.type !== TokenType.Indent) < 0) { + this._typeIgnoreAll = ignoreComment; + } else { + this._typeIgnoreLines.set(this._lineRanges.length, ignoreComment); + } + } + + const pyrightIgnoreRegexMatch = comment.value.match(pyrightIgnoreCommentRegEx); + if (pyrightIgnoreRegexMatch) { + const commentStart = start + (pyrightIgnoreRegexMatch.index ?? 0); + const textRange: TextRange = { + start: commentStart + pyrightIgnoreRegexMatch[1].length, + length: pyrightIgnoreRegexMatch[0].length - pyrightIgnoreRegexMatch[1].length, + }; + const ignoreComment: IgnoreComment = { + range: textRange, + rulesList: this._getIgnoreCommentRulesList(commentStart, pyrightIgnoreRegexMatch), + }; + this._pyrightIgnoreLines.set(this._lineRanges.length, ignoreComment); + } + + this._addComments(comment); + } + + // Extracts the individual rules within a "type: ignore [x, y, z]" comment. + private _getIgnoreCommentRulesList(start: number, match: RegExpMatchArray): IgnoreCommentRule[] | undefined { + if (match.length < 5 || match[4] === undefined) { + return undefined; + } + + const splitElements = match[4].split(','); + const commentRules: IgnoreCommentRule[] = []; + let currentOffset = start + match[0].indexOf('[') + 1; + + for (const element of splitElements) { + const frontTrimmed = element.trimStart(); + currentOffset += element.length - frontTrimmed.length; + const endTrimmed = frontTrimmed.trimEnd(); + + if (endTrimmed.length > 0) { + commentRules.push({ + range: { start: currentOffset, length: endTrimmed.length }, + text: cloneStr(endTrimmed), + }); + } + + currentOffset += frontTrimmed.length + 1; + } + + return commentRules; + } + + private _addComments(comment: Comment) { + if (this._comments) { + this._comments.push(comment); + } else { + this._comments = [comment]; + } + } + + private _getStringPrefixLength(): number { + if (this._cs.currentChar === Char.SingleQuote || this._cs.currentChar === Char.DoubleQuote) { + // Simple string, no prefix + return 0; + } + + if (this._cs.nextChar === Char.SingleQuote || this._cs.nextChar === Char.DoubleQuote) { + switch (this._cs.currentChar) { + case Char.f: + case Char.F: + case Char.r: + case Char.R: + case Char.b: + case Char.B: + case Char.u: + case Char.U: + case Char.t: + case Char.T: + // Single-char prefix like u"" or r"" + return 1; + default: + break; + } + } + + if (this._cs.lookAhead(2) === Char.SingleQuote || this._cs.lookAhead(2) === Char.DoubleQuote) { + const prefix = this._cs + .getText() + .slice(this._cs.position, this._cs.position + 2) + .toLowerCase(); + switch (prefix) { + case 'rf': + case 'fr': + case 'rt': + case 'tr': + case 'br': + case 'rb': + return 2; + default: + break; + } + } + return -1; + } + + private _getQuoteTypeFlags(prefix: string): StringTokenFlags { + let flags = StringTokenFlags.None; + + prefix = prefix.toLowerCase(); + for (let i = 0; i < prefix.length; i++) { + switch (prefix[i]) { + case 'u': + flags |= StringTokenFlags.Unicode; + break; + + case 'b': + flags |= StringTokenFlags.Bytes; + break; + + case 'r': + flags |= StringTokenFlags.Raw; + break; + + case 'f': + flags |= StringTokenFlags.Format; + break; + + case 't': + flags |= StringTokenFlags.Template; + break; + } + } + + if (this._cs.currentChar === Char.SingleQuote) { + flags |= StringTokenFlags.SingleQuote; + if (this._cs.nextChar === Char.SingleQuote && this._cs.lookAhead(2) === Char.SingleQuote) { + flags |= StringTokenFlags.Triplicate; + } + } else if (this._cs.currentChar === Char.DoubleQuote) { + flags |= StringTokenFlags.DoubleQuote; + if (this._cs.nextChar === Char.DoubleQuote && this._cs.lookAhead(2) === Char.DoubleQuote) { + flags |= StringTokenFlags.Triplicate; + } + } + + return flags; + } + + private _handleString(flags: StringTokenFlags, stringPrefixLength: number): void { + const start = this._cs.position - stringPrefixLength; + + if (flags & (StringTokenFlags.Format | StringTokenFlags.Template)) { + if (flags & StringTokenFlags.Triplicate) { + this._cs.advance(3); + } else { + this._cs.moveNext(); + } + + const end = this._cs.position; + + const fStringStartToken = FStringStartToken.create( + start, + end - start, + flags, + stringPrefixLength, + this._getComments() + ); + + // Create a new f-string context and push it on the stack. + const fStringContext: FStringContext = { + startToken: fStringStartToken, + replacementFieldStack: [], + }; + + if (this._activeFString) { + this._fStringStack.push(this._activeFString); + } + this._activeFString = fStringContext; + + this._tokens.push(fStringStartToken); + } else { + if (flags & StringTokenFlags.Triplicate) { + this._cs.advance(3); + } else { + this._cs.moveNext(); + + if (flags & StringTokenFlags.SingleQuote) { + this._singleQuoteCount++; + } else { + this._doubleQuoteCount++; + } + } + + const stringLiteralInfo = this._skipToEndOfStringLiteral(flags); + const end = this._cs.position; + + // If this is an unterminated string, see if it matches the string type + // of an active f-string. If so, we'll treat it as an f-string end + // token rather than an unterminated regular string. This helps with + // parse error recovery if a closing bracket is missing in an f-string. + if ( + (stringLiteralInfo.flags & StringTokenFlags.Unterminated) !== 0 && + this._activeFString?.activeReplacementField + ) { + if ( + (flags & + (StringTokenFlags.Bytes | + StringTokenFlags.Unicode | + StringTokenFlags.Raw | + StringTokenFlags.Format | + StringTokenFlags.Template)) === + 0 + ) { + const quoteTypeMask = + StringTokenFlags.Triplicate | StringTokenFlags.DoubleQuote | StringTokenFlags.SingleQuote; + if ((this._activeFString.startToken.flags & quoteTypeMask) === (flags & quoteTypeMask)) { + // Unwind to the start of this string token and terminate any replacement fields + // that are active. This will cause the tokenizer to re-process the quote as an + // FStringEnd token. + this._cs.position = start; + while (this._activeFString.replacementFieldStack.length > 0) { + this._activeFString.activeReplacementField = + this._activeFString.replacementFieldStack.pop(); + } + this._parenDepth = this._activeFString.activeReplacementField!.parenDepth - 1; + this._activeFString.activeReplacementField = undefined; + return; + } + } + } + + this._tokens.push( + StringToken.create( + start, + end - start, + stringLiteralInfo.flags, + stringLiteralInfo.escapedValue, + stringPrefixLength, + this._getComments() + ) + ); + } + } + + // Scans for either the FString end token or a replacement field. + private _handleFStringMiddle(): void { + const activeFString = this._activeFString!; + const inFormatSpecifier = !!this._activeFString!.activeReplacementField?.inFormatSpecifier; + const start = this._cs.position; + const flags = activeFString.startToken.flags; + const stringLiteralInfo = this._skipToEndOfStringLiteral(flags, inFormatSpecifier); + const end = this._cs.position; + + const isUnterminated = (stringLiteralInfo.flags & StringTokenFlags.Unterminated) !== 0; + const sawReplacementFieldStart = (stringLiteralInfo.flags & StringTokenFlags.ReplacementFieldStart) !== 0; + const sawReplacementFieldEnd = (stringLiteralInfo.flags & StringTokenFlags.ReplacementFieldEnd) !== 0; + const sawEndQuote = !isUnterminated && !sawReplacementFieldStart && !sawReplacementFieldEnd; + + let middleTokenLength = end - start; + if (sawEndQuote) { + middleTokenLength -= activeFString.startToken.quoteMarkLength; + } + + if (middleTokenLength > 0 || isUnterminated) { + this._tokens.push( + FStringMiddleToken.create( + start, + middleTokenLength, + stringLiteralInfo.flags, + stringLiteralInfo.escapedValue + ) + ); + } + + if (sawEndQuote) { + this._tokens.push( + FStringEndToken.create( + start + middleTokenLength, + activeFString.startToken.quoteMarkLength, + stringLiteralInfo.flags + ) + ); + + this._activeFString = this._fStringStack.pop(); + } else if (isUnterminated) { + this._activeFString = this._fStringStack.pop(); + } + } + + private _skipToEndOfStringLiteral(flags: StringTokenFlags, inFormatSpecifier = false): StringScannerOutput { + const quoteChar = flags & StringTokenFlags.SingleQuote ? Char.SingleQuote : Char.DoubleQuote; + const isTriplicate = (flags & StringTokenFlags.Triplicate) !== 0; + const isFString = (flags & (StringTokenFlags.Format | StringTokenFlags.Template)) !== 0; + let isInNamedUnicodeEscape = false; + const start = this._cs.position; + let escapedValueLength = 0; + const getEscapedValue = () => cloneStr(this._cs.getText().slice(start, start + escapedValueLength)); + + while (true) { + if (this._cs.isEndOfStream()) { + // Hit the end of file without a termination. + flags |= StringTokenFlags.Unterminated; + return { + escapedValue: getEscapedValue(), + flags, + }; + } + + if (this._cs.currentChar === Char.Backslash) { + escapedValueLength++; + + // Move past the escape (backslash) character. + this._cs.moveNext(); + + // Handle the special escape sequence /N{name} for unicode characters. + if ( + !isInNamedUnicodeEscape && + this._cs.getCurrentChar() === Char.N && + this._cs.nextChar === Char.OpenBrace + ) { + flags |= StringTokenFlags.NamedUnicodeEscape; + isInNamedUnicodeEscape = true; + } else { + // If this is an f-string, the only escapes that are allowed is for + // a single or double quote symbol or a newline/carriage return. + const isEscapedQuote = + this._cs.getCurrentChar() === Char.SingleQuote || + this._cs.getCurrentChar() === Char.DoubleQuote; + const isEscapedNewLine = + this._cs.getCurrentChar() === Char.CarriageReturn || + this._cs.getCurrentChar() === Char.LineFeed; + const isEscapedBackslash = this._cs.getCurrentChar() === Char.Backslash; + + if (!isFString || isEscapedBackslash || isEscapedQuote || isEscapedNewLine) { + if (isEscapedNewLine) { + if ( + this._cs.getCurrentChar() === Char.CarriageReturn && + this._cs.nextChar === Char.LineFeed + ) { + escapedValueLength++; + this._cs.moveNext(); + } + escapedValueLength++; + this._cs.moveNext(); + this._addLineRange(); + } else { + escapedValueLength++; + this._cs.moveNext(); + } + } + } + } else if (this._cs.currentChar === Char.LineFeed || this._cs.currentChar === Char.CarriageReturn) { + if (!isTriplicate) { + if (!isFString || !this._activeFString?.activeReplacementField) { + // Unterminated single-line string + flags |= StringTokenFlags.Unterminated; + return { + escapedValue: getEscapedValue(), + flags, + }; + } + } + + // Skip over the new line (either one or two characters). + if (this._cs.currentChar === Char.CarriageReturn && this._cs.nextChar === Char.LineFeed) { + escapedValueLength++; + this._cs.moveNext(); + } + + escapedValueLength++; + this._cs.moveNext(); + this._addLineRange(); + } else if (!isTriplicate && this._cs.currentChar === quoteChar) { + this._cs.moveNext(); + break; + } else if ( + isTriplicate && + this._cs.currentChar === quoteChar && + this._cs.nextChar === quoteChar && + this._cs.lookAhead(2) === quoteChar + ) { + this._cs.advance(3); + break; + } else if (!isInNamedUnicodeEscape && isFString && this._cs.currentChar === Char.OpenBrace) { + if (inFormatSpecifier || this._cs.nextChar !== Char.OpenBrace) { + flags |= StringTokenFlags.ReplacementFieldStart; + break; + } else { + escapedValueLength++; + this._cs.moveNext(); + escapedValueLength++; + this._cs.moveNext(); + } + } else if (isInNamedUnicodeEscape && this._cs.currentChar === Char.CloseBrace) { + isInNamedUnicodeEscape = false; + escapedValueLength++; + this._cs.moveNext(); + } else if (isFString && this._cs.currentChar === Char.CloseBrace) { + if (inFormatSpecifier || this._cs.nextChar !== Char.CloseBrace) { + flags |= StringTokenFlags.ReplacementFieldEnd; + break; + } else { + escapedValueLength++; + this._cs.moveNext(); + escapedValueLength++; + this._cs.moveNext(); + } + } else { + escapedValueLength++; + this._cs.moveNext(); + } + } + + return { + escapedValue: getEscapedValue(), + flags, + }; + } + + private _skipFloatingPointCandidate(): boolean { + // Determine end of the potential floating point number + const start = this._cs.position; + this._skipFractionalNumber(); + if (this._cs.position > start) { + // Optional exponent sign + if (this._cs.currentChar === Char.e || this._cs.currentChar === Char.E) { + this._cs.moveNext(); + + // Skip exponent value + this._skipDecimalNumber(/* allowSign */ true); + } + } + return this._cs.position > start; + } + + private _skipFractionalNumber(): void { + this._skipDecimalNumber(false); + if (this._cs.currentChar === Char.Period) { + // Optional period + this._cs.moveNext(); + } + this._skipDecimalNumber(false); + } + + private _skipDecimalNumber(allowSign: boolean): void { + if (allowSign && (this._cs.currentChar === Char.Hyphen || this._cs.currentChar === Char.Plus)) { + // Optional sign + this._cs.moveNext(); + } + while (isDecimal(this._cs.currentChar)) { + // Skip integer part + this._cs.moveNext(); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/parser/tokenizerTypes.ts b/python-parser/packages/pyright-internal/src/parser/tokenizerTypes.ts new file mode 100644 index 00000000..19dcae59 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/tokenizerTypes.ts @@ -0,0 +1,513 @@ +/* + * tokenizerTypes.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Based on code from vscode-python repository: + * https://github.com/Microsoft/vscode-python + * + * Interface, enumeration and class definitions used within + * the Python tokenizer. + */ + +import { TextRange } from '../common/textRange'; + +export const enum TokenType { + Invalid, + EndOfStream, + NewLine, + Indent, + Dedent, + String, + Number, + Identifier, + Keyword, + Operator, + Colon, + Semicolon, + Comma, + OpenParenthesis, + CloseParenthesis, + OpenBracket, + CloseBracket, + OpenCurlyBrace, + CloseCurlyBrace, + Ellipsis, + Dot, + Arrow, + Backtick, + ExclamationMark, + FStringStart, + FStringMiddle, + FStringEnd, +} + +export const enum NewLineType { + CarriageReturn, + LineFeed, + CarriageReturnLineFeed, + Implied, +} + +export const enum OperatorType { + // These operators are used with tokens + // of type TokenType.Operator. + Add, + AddEqual, + Assign, + BitwiseAnd, + BitwiseAndEqual, + BitwiseInvert, + BitwiseOr, + BitwiseOrEqual, + BitwiseXor, + BitwiseXorEqual, + Divide, + DivideEqual, + Equals, + FloorDivide, + FloorDivideEqual, + GreaterThan, + GreaterThanOrEqual, + LeftShift, + LeftShiftEqual, + LessOrGreaterThan, + LessThan, + LessThanOrEqual, + MatrixMultiply, + MatrixMultiplyEqual, + Mod, + ModEqual, + Multiply, + MultiplyEqual, + NotEquals, + Power, + PowerEqual, + RightShift, + RightShiftEqual, + Subtract, + SubtractEqual, + Walrus, + + // These operators are used with tokens + // of type TokenType.Keyword. + And, + Or, + Not, + Is, + IsNot, + In, + NotIn, +} + +export const enum OperatorFlags { + Unary = 1 << 0, + Binary = 1 << 1, + Assignment = 1 << 2, + Comparison = 1 << 3, + Deprecated = 1 << 4, +} + +export const enum KeywordType { + And, + As, + Assert, + Async, + Await, + Break, + Case, + Class, + Continue, + Debug, + Def, + Del, + Elif, + Else, + Except, + False, + Finally, + For, + From, + Global, + If, + Import, + In, + Is, + Lambda, + Match, + None, + Nonlocal, + Not, + Or, + Pass, + Raise, + Return, + True, + Try, + Type, + While, + With, + Yield, +} + +export const softKeywords = [KeywordType.Debug, KeywordType.Match, KeywordType.Case, KeywordType.Type]; + +export const enum StringTokenFlags { + None = 0, + + // Quote types + SingleQuote = 1 << 0, + DoubleQuote = 1 << 1, + Triplicate = 1 << 2, + + // String content format + Raw = 1 << 3, + Unicode = 1 << 4, + Bytes = 1 << 5, + Format = 1 << 6, + Template = 1 << 7, + + // Other conditions + ReplacementFieldStart = 1 << 8, + ReplacementFieldEnd = 1 << 9, + NamedUnicodeEscape = 1 << 10, + + // Error conditions + Unterminated = 1 << 16, +} + +export const enum CommentType { + Regular, + IPythonMagic, + IPythonShellEscape, + IPythonCellMagic, + IPythonCellShellEscape, +} + +export interface Comment extends TextRange { + readonly type: CommentType; + readonly value: string; + readonly start: number; + readonly length: number; +} + +export namespace Comment { + export function create(start: number, length: number, value: string, type = CommentType.Regular) { + const comment: Comment = { + type, + start, + length, + value, + }; + + return comment; + } +} + +export interface TokenBase extends TextRange { + readonly type: TokenType; + + // Comments prior to the token. + readonly comments?: Comment[] | undefined; +} + +export interface Token extends TokenBase {} + +export namespace Token { + export function create(type: TokenType, start: number, length: number, comments: Comment[] | undefined) { + const token: Token = { + start, + length, + type, + comments, + }; + + return token; + } +} + +export interface IndentToken extends Token { + readonly type: TokenType.Indent; + readonly indentAmount: number; + readonly isIndentAmbiguous: boolean; +} + +export namespace IndentToken { + export function create( + start: number, + length: number, + indentAmount: number, + isIndentAmbiguous: boolean, + comments: Comment[] | undefined + ) { + const token: IndentToken = { + start, + length, + type: TokenType.Indent, + isIndentAmbiguous, + comments, + indentAmount, + }; + + return token; + } +} + +export interface DedentToken extends Token { + readonly type: TokenType.Dedent; + readonly indentAmount: number; + readonly matchesIndent: boolean; + readonly isDedentAmbiguous: boolean; +} + +export namespace DedentToken { + export function create( + start: number, + length: number, + indentAmount: number, + matchesIndent: boolean, + isDedentAmbiguous: boolean, + comments: Comment[] | undefined + ) { + const token: DedentToken = { + start, + length, + type: TokenType.Dedent, + comments, + indentAmount, + matchesIndent, + isDedentAmbiguous, + }; + + return token; + } +} + +export interface NewLineToken extends Token { + readonly type: TokenType.NewLine; + readonly newLineType: NewLineType; +} + +export namespace NewLineToken { + export function create(start: number, length: number, newLineType: NewLineType, comments: Comment[] | undefined) { + const token: NewLineToken = { + start, + length, + type: TokenType.NewLine, + comments, + newLineType, + }; + + return token; + } +} + +export interface KeywordToken extends Token { + readonly type: TokenType.Keyword; + readonly keywordType: KeywordType; +} + +export namespace KeywordToken { + export function create(start: number, length: number, keywordType: KeywordType, comments: Comment[] | undefined) { + const token: KeywordToken = { + start, + length, + type: TokenType.Keyword, + comments, + keywordType, + }; + + return token; + } + + export function isSoftKeyword(token: KeywordToken) { + return softKeywords.some((t) => token.keywordType === t); + } +} + +export interface StringToken extends Token { + readonly type: TokenType.String; + readonly flags: StringTokenFlags; + + // Use StringTokenUtils to convert escaped value to unescaped value. + readonly escapedValue: string; + + // Number of characters in token that appear before + // the quote marks (e.g. "r" or "UR"). + readonly prefixLength: number; + + // Number of characters in token that make up the quote + // (either 1 or 3). + readonly quoteMarkLength: number; +} + +export namespace StringToken { + export function create( + start: number, + length: number, + flags: StringTokenFlags, + escapedValue: string, + prefixLength: number, + comments: Comment[] | undefined + ) { + const token: StringToken = { + start, + length, + type: TokenType.String, + flags, + escapedValue, + prefixLength, + quoteMarkLength: flags & StringTokenFlags.Triplicate ? 3 : 1, + comments, + }; + + return token; + } +} + +export interface FStringStartToken extends Token { + readonly type: TokenType.FStringStart; + readonly flags: StringTokenFlags; + + // Number of characters in token that appear before + // the quote marks (e.g. "r" or "UR"). + readonly prefixLength: number; + + // Number of characters in token that make up the quote + // (either 1 or 3). + readonly quoteMarkLength: number; +} + +export namespace FStringStartToken { + export function create( + start: number, + length: number, + flags: StringTokenFlags, + prefixLength: number, + comments: Comment[] | undefined + ) { + const token: FStringStartToken = { + start, + length, + type: TokenType.FStringStart, + flags, + prefixLength, + quoteMarkLength: flags & StringTokenFlags.Triplicate ? 3 : 1, + comments, + }; + + return token; + } +} + +export interface FStringMiddleToken extends Token { + readonly type: TokenType.FStringMiddle; + readonly flags: StringTokenFlags; + + // Use StringTokenUtils to convert escaped value to unescaped value. + readonly escapedValue: string; +} + +export namespace FStringMiddleToken { + export function create(start: number, length: number, flags: StringTokenFlags, escapedValue: string) { + const token: FStringMiddleToken = { + start, + length, + type: TokenType.FStringMiddle, + flags, + escapedValue, + }; + + return token; + } +} + +export interface FStringEndToken extends Token { + readonly type: TokenType.FStringEnd; + readonly flags: StringTokenFlags; +} + +export namespace FStringEndToken { + export function create(start: number, length: number, flags: StringTokenFlags) { + const token: FStringEndToken = { + start, + length, + type: TokenType.FStringEnd, + flags, + }; + + return token; + } +} + +export interface NumberToken extends Token { + readonly type: TokenType.Number; + readonly value: number | bigint; + readonly isInteger: boolean; + readonly isImaginary: boolean; +} + +export namespace NumberToken { + export function create( + start: number, + length: number, + value: number | bigint, + isInteger: boolean, + isImaginary: boolean, + comments: Comment[] | undefined + ) { + const token: NumberToken = { + start, + length, + type: TokenType.Number, + isInteger, + isImaginary, + value, + comments, + }; + + return token; + } +} + +export interface OperatorToken extends Token { + readonly type: TokenType.Operator; + readonly operatorType: OperatorType; +} + +export namespace OperatorToken { + export function create(start: number, length: number, operatorType: OperatorType, comments: Comment[] | undefined) { + const token: OperatorToken = { + start, + length, + type: TokenType.Operator, + operatorType, + comments, + }; + + return token; + } +} + +export interface IdentifierToken extends Token { + readonly type: TokenType.Identifier; + readonly value: string; +} + +export namespace IdentifierToken { + export function create(start: number, length: number, value: string, comments: Comment[] | undefined) { + // Perform "NFKC normalization", as per the Python lexical spec. + const normalizedValue = value.normalize('NFKC'); + + const token: IdentifierToken = { + start, + length, + type: TokenType.Identifier, + value: normalizedValue, + comments, + }; + + return token; + } +} diff --git a/python-parser/packages/pyright-internal/src/parser/unicode.ts b/python-parser/packages/pyright-internal/src/parser/unicode.ts new file mode 100644 index 00000000..8a1ac111 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/parser/unicode.ts @@ -0,0 +1,3648 @@ +/* + * unicode.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tables that encode Unicode character codes for various Unicode- + * defined categories used in the Python spec. + * + * Generated by build/generateUnicodeTables.py from the UnicodeData.txt + * metadata file for Unicode 16.0. + */ + +export type UnicodeRange = [number, number] | number; +export type UnicodeRangeTable = UnicodeRange[]; +export type UnicodeSurrogateRangeTable = { [surrogate: number]: UnicodeRange[] }; + +export const unicodeLu: UnicodeRangeTable = [ + [0x0041, 0x005a], + [0x00c0, 0x00d6], + [0x00d8, 0x00de], + 0x0100, + 0x0102, + 0x0104, + 0x0106, + 0x0108, + 0x010a, + 0x010c, + 0x010e, + 0x0110, + 0x0112, + 0x0114, + 0x0116, + 0x0118, + 0x011a, + 0x011c, + 0x011e, + 0x0120, + 0x0122, + 0x0124, + 0x0126, + 0x0128, + 0x012a, + 0x012c, + 0x012e, + 0x0130, + 0x0132, + 0x0134, + 0x0136, + 0x0139, + 0x013b, + 0x013d, + 0x013f, + 0x0141, + 0x0143, + 0x0145, + 0x0147, + 0x014a, + 0x014c, + 0x014e, + 0x0150, + 0x0152, + 0x0154, + 0x0156, + 0x0158, + 0x015a, + 0x015c, + 0x015e, + 0x0160, + 0x0162, + 0x0164, + 0x0166, + 0x0168, + 0x016a, + 0x016c, + 0x016e, + 0x0170, + 0x0172, + 0x0174, + 0x0176, + [0x0178, 0x0179], + 0x017b, + 0x017d, + [0x0181, 0x0182], + 0x0184, + [0x0186, 0x0187], + [0x0189, 0x018b], + [0x018e, 0x0191], + [0x0193, 0x0194], + [0x0196, 0x0198], + [0x019c, 0x019d], + [0x019f, 0x01a0], + 0x01a2, + 0x01a4, + [0x01a6, 0x01a7], + 0x01a9, + 0x01ac, + [0x01ae, 0x01af], + [0x01b1, 0x01b3], + 0x01b5, + [0x01b7, 0x01b8], + 0x01bc, + 0x01c4, + 0x01c7, + 0x01ca, + 0x01cd, + 0x01cf, + 0x01d1, + 0x01d3, + 0x01d5, + 0x01d7, + 0x01d9, + 0x01db, + 0x01de, + 0x01e0, + 0x01e2, + 0x01e4, + 0x01e6, + 0x01e8, + 0x01ea, + 0x01ec, + 0x01ee, + 0x01f1, + 0x01f4, + [0x01f6, 0x01f8], + 0x01fa, + 0x01fc, + 0x01fe, + 0x0200, + 0x0202, + 0x0204, + 0x0206, + 0x0208, + 0x020a, + 0x020c, + 0x020e, + 0x0210, + 0x0212, + 0x0214, + 0x0216, + 0x0218, + 0x021a, + 0x021c, + 0x021e, + 0x0220, + 0x0222, + 0x0224, + 0x0226, + 0x0228, + 0x022a, + 0x022c, + 0x022e, + 0x0230, + 0x0232, + [0x023a, 0x023b], + [0x023d, 0x023e], + 0x0241, + [0x0243, 0x0246], + 0x0248, + 0x024a, + 0x024c, + 0x024e, + 0x0370, + 0x0372, + 0x0376, + 0x037f, + 0x0386, + [0x0388, 0x038a], + 0x038c, + [0x038e, 0x038f], + [0x0391, 0x03a1], + [0x03a3, 0x03ab], + 0x03cf, + [0x03d2, 0x03d4], + 0x03d8, + 0x03da, + 0x03dc, + 0x03de, + 0x03e0, + 0x03e2, + 0x03e4, + 0x03e6, + 0x03e8, + 0x03ea, + 0x03ec, + 0x03ee, + 0x03f4, + 0x03f7, + [0x03f9, 0x03fa], + [0x03fd, 0x042f], + 0x0460, + 0x0462, + 0x0464, + 0x0466, + 0x0468, + 0x046a, + 0x046c, + 0x046e, + 0x0470, + 0x0472, + 0x0474, + 0x0476, + 0x0478, + 0x047a, + 0x047c, + 0x047e, + 0x0480, + 0x048a, + 0x048c, + 0x048e, + 0x0490, + 0x0492, + 0x0494, + 0x0496, + 0x0498, + 0x049a, + 0x049c, + 0x049e, + 0x04a0, + 0x04a2, + 0x04a4, + 0x04a6, + 0x04a8, + 0x04aa, + 0x04ac, + 0x04ae, + 0x04b0, + 0x04b2, + 0x04b4, + 0x04b6, + 0x04b8, + 0x04ba, + 0x04bc, + 0x04be, + [0x04c0, 0x04c1], + 0x04c3, + 0x04c5, + 0x04c7, + 0x04c9, + 0x04cb, + 0x04cd, + 0x04d0, + 0x04d2, + 0x04d4, + 0x04d6, + 0x04d8, + 0x04da, + 0x04dc, + 0x04de, + 0x04e0, + 0x04e2, + 0x04e4, + 0x04e6, + 0x04e8, + 0x04ea, + 0x04ec, + 0x04ee, + 0x04f0, + 0x04f2, + 0x04f4, + 0x04f6, + 0x04f8, + 0x04fa, + 0x04fc, + 0x04fe, + 0x0500, + 0x0502, + 0x0504, + 0x0506, + 0x0508, + 0x050a, + 0x050c, + 0x050e, + 0x0510, + 0x0512, + 0x0514, + 0x0516, + 0x0518, + 0x051a, + 0x051c, + 0x051e, + 0x0520, + 0x0522, + 0x0524, + 0x0526, + 0x0528, + 0x052a, + 0x052c, + 0x052e, + [0x0531, 0x0556], + [0x10a0, 0x10c5], + 0x10c7, + 0x10cd, + [0x13a0, 0x13f5], + 0x1c89, + [0x1c90, 0x1cba], + [0x1cbd, 0x1cbf], + 0x1e00, + 0x1e02, + 0x1e04, + 0x1e06, + 0x1e08, + 0x1e0a, + 0x1e0c, + 0x1e0e, + 0x1e10, + 0x1e12, + 0x1e14, + 0x1e16, + 0x1e18, + 0x1e1a, + 0x1e1c, + 0x1e1e, + 0x1e20, + 0x1e22, + 0x1e24, + 0x1e26, + 0x1e28, + 0x1e2a, + 0x1e2c, + 0x1e2e, + 0x1e30, + 0x1e32, + 0x1e34, + 0x1e36, + 0x1e38, + 0x1e3a, + 0x1e3c, + 0x1e3e, + 0x1e40, + 0x1e42, + 0x1e44, + 0x1e46, + 0x1e48, + 0x1e4a, + 0x1e4c, + 0x1e4e, + 0x1e50, + 0x1e52, + 0x1e54, + 0x1e56, + 0x1e58, + 0x1e5a, + 0x1e5c, + 0x1e5e, + 0x1e60, + 0x1e62, + 0x1e64, + 0x1e66, + 0x1e68, + 0x1e6a, + 0x1e6c, + 0x1e6e, + 0x1e70, + 0x1e72, + 0x1e74, + 0x1e76, + 0x1e78, + 0x1e7a, + 0x1e7c, + 0x1e7e, + 0x1e80, + 0x1e82, + 0x1e84, + 0x1e86, + 0x1e88, + 0x1e8a, + 0x1e8c, + 0x1e8e, + 0x1e90, + 0x1e92, + 0x1e94, + 0x1e9e, + 0x1ea0, + 0x1ea2, + 0x1ea4, + 0x1ea6, + 0x1ea8, + 0x1eaa, + 0x1eac, + 0x1eae, + 0x1eb0, + 0x1eb2, + 0x1eb4, + 0x1eb6, + 0x1eb8, + 0x1eba, + 0x1ebc, + 0x1ebe, + 0x1ec0, + 0x1ec2, + 0x1ec4, + 0x1ec6, + 0x1ec8, + 0x1eca, + 0x1ecc, + 0x1ece, + 0x1ed0, + 0x1ed2, + 0x1ed4, + 0x1ed6, + 0x1ed8, + 0x1eda, + 0x1edc, + 0x1ede, + 0x1ee0, + 0x1ee2, + 0x1ee4, + 0x1ee6, + 0x1ee8, + 0x1eea, + 0x1eec, + 0x1eee, + 0x1ef0, + 0x1ef2, + 0x1ef4, + 0x1ef6, + 0x1ef8, + 0x1efa, + 0x1efc, + 0x1efe, + [0x1f08, 0x1f0f], + [0x1f18, 0x1f1d], + [0x1f28, 0x1f2f], + [0x1f38, 0x1f3f], + [0x1f48, 0x1f4d], + 0x1f59, + 0x1f5b, + 0x1f5d, + 0x1f5f, + [0x1f68, 0x1f6f], + [0x1fb8, 0x1fbb], + [0x1fc8, 0x1fcb], + [0x1fd8, 0x1fdb], + [0x1fe8, 0x1fec], + [0x1ff8, 0x1ffb], + 0x2102, + 0x2107, + [0x210b, 0x210d], + [0x2110, 0x2112], + 0x2115, + [0x2119, 0x211d], + 0x2124, + 0x2126, + 0x2128, + [0x212a, 0x212d], + [0x2130, 0x2133], + [0x213e, 0x213f], + 0x2145, + 0x2183, + [0x2c00, 0x2c2f], + 0x2c60, + [0x2c62, 0x2c64], + 0x2c67, + 0x2c69, + 0x2c6b, + [0x2c6d, 0x2c70], + 0x2c72, + 0x2c75, + [0x2c7e, 0x2c80], + 0x2c82, + 0x2c84, + 0x2c86, + 0x2c88, + 0x2c8a, + 0x2c8c, + 0x2c8e, + 0x2c90, + 0x2c92, + 0x2c94, + 0x2c96, + 0x2c98, + 0x2c9a, + 0x2c9c, + 0x2c9e, + 0x2ca0, + 0x2ca2, + 0x2ca4, + 0x2ca6, + 0x2ca8, + 0x2caa, + 0x2cac, + 0x2cae, + 0x2cb0, + 0x2cb2, + 0x2cb4, + 0x2cb6, + 0x2cb8, + 0x2cba, + 0x2cbc, + 0x2cbe, + 0x2cc0, + 0x2cc2, + 0x2cc4, + 0x2cc6, + 0x2cc8, + 0x2cca, + 0x2ccc, + 0x2cce, + 0x2cd0, + 0x2cd2, + 0x2cd4, + 0x2cd6, + 0x2cd8, + 0x2cda, + 0x2cdc, + 0x2cde, + 0x2ce0, + 0x2ce2, + 0x2ceb, + 0x2ced, + 0x2cf2, + 0xa640, + 0xa642, + 0xa644, + 0xa646, + 0xa648, + 0xa64a, + 0xa64c, + 0xa64e, + 0xa650, + 0xa652, + 0xa654, + 0xa656, + 0xa658, + 0xa65a, + 0xa65c, + 0xa65e, + 0xa660, + 0xa662, + 0xa664, + 0xa666, + 0xa668, + 0xa66a, + 0xa66c, + 0xa680, + 0xa682, + 0xa684, + 0xa686, + 0xa688, + 0xa68a, + 0xa68c, + 0xa68e, + 0xa690, + 0xa692, + 0xa694, + 0xa696, + 0xa698, + 0xa69a, + 0xa722, + 0xa724, + 0xa726, + 0xa728, + 0xa72a, + 0xa72c, + 0xa72e, + 0xa732, + 0xa734, + 0xa736, + 0xa738, + 0xa73a, + 0xa73c, + 0xa73e, + 0xa740, + 0xa742, + 0xa744, + 0xa746, + 0xa748, + 0xa74a, + 0xa74c, + 0xa74e, + 0xa750, + 0xa752, + 0xa754, + 0xa756, + 0xa758, + 0xa75a, + 0xa75c, + 0xa75e, + 0xa760, + 0xa762, + 0xa764, + 0xa766, + 0xa768, + 0xa76a, + 0xa76c, + 0xa76e, + 0xa779, + 0xa77b, + [0xa77d, 0xa77e], + 0xa780, + 0xa782, + 0xa784, + 0xa786, + 0xa78b, + 0xa78d, + 0xa790, + 0xa792, + 0xa796, + 0xa798, + 0xa79a, + 0xa79c, + 0xa79e, + 0xa7a0, + 0xa7a2, + 0xa7a4, + 0xa7a6, + 0xa7a8, + [0xa7aa, 0xa7ae], + [0xa7b0, 0xa7b4], + 0xa7b6, + 0xa7b8, + 0xa7ba, + 0xa7bc, + 0xa7be, + 0xa7c0, + 0xa7c2, + [0xa7c4, 0xa7c7], + 0xa7c9, + [0xa7cb, 0xa7cc], + 0xa7d0, + 0xa7d6, + 0xa7d8, + 0xa7da, + 0xa7dc, + 0xa7f5, + [0xff21, 0xff3a], + [0x10400, 0x10427], + [0x104b0, 0x104d3], + [0x10570, 0x1057a], + [0x1057c, 0x1058a], + [0x1058c, 0x10592], + [0x10594, 0x10595], + [0x10c80, 0x10cb2], + [0x10d50, 0x10d65], + [0x118a0, 0x118bf], + [0x16e40, 0x16e5f], + [0x1d400, 0x1d419], + [0x1d434, 0x1d44d], + [0x1d468, 0x1d481], + 0x1d49c, + [0x1d49e, 0x1d49f], + 0x1d4a2, + [0x1d4a5, 0x1d4a6], + [0x1d4a9, 0x1d4ac], + [0x1d4ae, 0x1d4b5], + [0x1d4d0, 0x1d4e9], + [0x1d504, 0x1d505], + [0x1d507, 0x1d50a], + [0x1d50d, 0x1d514], + [0x1d516, 0x1d51c], + [0x1d538, 0x1d539], + [0x1d53b, 0x1d53e], + [0x1d540, 0x1d544], + 0x1d546, + [0x1d54a, 0x1d550], + [0x1d56c, 0x1d585], + [0x1d5a0, 0x1d5b9], + [0x1d5d4, 0x1d5ed], + [0x1d608, 0x1d621], + [0x1d63c, 0x1d655], + [0x1d670, 0x1d689], + [0x1d6a8, 0x1d6c0], + [0x1d6e2, 0x1d6fa], + [0x1d71c, 0x1d734], + [0x1d756, 0x1d76e], + [0x1d790, 0x1d7a8], + 0x1d7ca, + [0x1e900, 0x1e921], +]; + +export const unicodeLuSurrogate: UnicodeSurrogateRangeTable = { + 0xd801: [ + [0xdc00, 0xdc27], // 0x10400..0x10427 + [0xdcb0, 0xdcd3], // 0x104B0..0x104D3 + [0xdd70, 0xdd7a], // 0x10570..0x1057A + [0xdd7c, 0xdd8a], // 0x1057C..0x1058A + [0xdd8c, 0xdd92], // 0x1058C..0x10592 + [0xdd94, 0xdd95], // 0x10594..0x10595 + ], + 0xd803: [ + [0xdc80, 0xdcb2], // 0x10C80..0x10CB2 + [0xdd50, 0xdd65], // 0x10D50..0x10D65 + ], + 0xd806: [ + [0xdca0, 0xdcbf], // 0x118A0..0x118BF + ], + 0xd81b: [ + [0xde40, 0xde5f], // 0x16E40..0x16E5F + ], + 0xd835: [ + [0xdc00, 0xdc19], // 0x1D400..0x1D419 + [0xdc34, 0xdc4d], // 0x1D434..0x1D44D + [0xdc68, 0xdc81], // 0x1D468..0x1D481 + 0xdc9c, // 0x1D49C + [0xdc9e, 0xdc9f], // 0x1D49E..0x1D49F + 0xdca2, // 0x1D4A2 + [0xdca5, 0xdca6], // 0x1D4A5..0x1D4A6 + [0xdca9, 0xdcac], // 0x1D4A9..0x1D4AC + [0xdcae, 0xdcb5], // 0x1D4AE..0x1D4B5 + [0xdcd0, 0xdce9], // 0x1D4D0..0x1D4E9 + [0xdd04, 0xdd05], // 0x1D504..0x1D505 + [0xdd07, 0xdd0a], // 0x1D507..0x1D50A + [0xdd0d, 0xdd14], // 0x1D50D..0x1D514 + [0xdd16, 0xdd1c], // 0x1D516..0x1D51C + [0xdd38, 0xdd39], // 0x1D538..0x1D539 + [0xdd3b, 0xdd3e], // 0x1D53B..0x1D53E + [0xdd40, 0xdd44], // 0x1D540..0x1D544 + 0xdd46, // 0x1D546 + [0xdd4a, 0xdd50], // 0x1D54A..0x1D550 + [0xdd6c, 0xdd85], // 0x1D56C..0x1D585 + [0xdda0, 0xddb9], // 0x1D5A0..0x1D5B9 + [0xddd4, 0xdded], // 0x1D5D4..0x1D5ED + [0xde08, 0xde21], // 0x1D608..0x1D621 + [0xde3c, 0xde55], // 0x1D63C..0x1D655 + [0xde70, 0xde89], // 0x1D670..0x1D689 + [0xdea8, 0xdec0], // 0x1D6A8..0x1D6C0 + [0xdee2, 0xdefa], // 0x1D6E2..0x1D6FA + [0xdf1c, 0xdf34], // 0x1D71C..0x1D734 + [0xdf56, 0xdf6e], // 0x1D756..0x1D76E + [0xdf90, 0xdfa8], // 0x1D790..0x1D7A8 + 0xdfca, // 0x1D7CA + ], + 0xd83a: [ + [0xdd00, 0xdd21], // 0x1E900..0x1E921 + ], +}; + +export const unicodeLl: UnicodeRangeTable = [ + [0x0061, 0x007a], + 0x00b5, + [0x00df, 0x00f6], + [0x00f8, 0x00ff], + 0x0101, + 0x0103, + 0x0105, + 0x0107, + 0x0109, + 0x010b, + 0x010d, + 0x010f, + 0x0111, + 0x0113, + 0x0115, + 0x0117, + 0x0119, + 0x011b, + 0x011d, + 0x011f, + 0x0121, + 0x0123, + 0x0125, + 0x0127, + 0x0129, + 0x012b, + 0x012d, + 0x012f, + 0x0131, + 0x0133, + 0x0135, + [0x0137, 0x0138], + 0x013a, + 0x013c, + 0x013e, + 0x0140, + 0x0142, + 0x0144, + 0x0146, + [0x0148, 0x0149], + 0x014b, + 0x014d, + 0x014f, + 0x0151, + 0x0153, + 0x0155, + 0x0157, + 0x0159, + 0x015b, + 0x015d, + 0x015f, + 0x0161, + 0x0163, + 0x0165, + 0x0167, + 0x0169, + 0x016b, + 0x016d, + 0x016f, + 0x0171, + 0x0173, + 0x0175, + 0x0177, + 0x017a, + 0x017c, + [0x017e, 0x0180], + 0x0183, + 0x0185, + 0x0188, + [0x018c, 0x018d], + 0x0192, + 0x0195, + [0x0199, 0x019b], + 0x019e, + 0x01a1, + 0x01a3, + 0x01a5, + 0x01a8, + [0x01aa, 0x01ab], + 0x01ad, + 0x01b0, + 0x01b4, + 0x01b6, + [0x01b9, 0x01ba], + [0x01bd, 0x01bf], + 0x01c6, + 0x01c9, + 0x01cc, + 0x01ce, + 0x01d0, + 0x01d2, + 0x01d4, + 0x01d6, + 0x01d8, + 0x01da, + [0x01dc, 0x01dd], + 0x01df, + 0x01e1, + 0x01e3, + 0x01e5, + 0x01e7, + 0x01e9, + 0x01eb, + 0x01ed, + [0x01ef, 0x01f0], + 0x01f3, + 0x01f5, + 0x01f9, + 0x01fb, + 0x01fd, + 0x01ff, + 0x0201, + 0x0203, + 0x0205, + 0x0207, + 0x0209, + 0x020b, + 0x020d, + 0x020f, + 0x0211, + 0x0213, + 0x0215, + 0x0217, + 0x0219, + 0x021b, + 0x021d, + 0x021f, + 0x0221, + 0x0223, + 0x0225, + 0x0227, + 0x0229, + 0x022b, + 0x022d, + 0x022f, + 0x0231, + [0x0233, 0x0239], + 0x023c, + [0x023f, 0x0240], + 0x0242, + 0x0247, + 0x0249, + 0x024b, + 0x024d, + [0x024f, 0x0293], + [0x0295, 0x02af], + 0x0371, + 0x0373, + 0x0377, + [0x037b, 0x037d], + 0x0390, + [0x03ac, 0x03ce], + [0x03d0, 0x03d1], + [0x03d5, 0x03d7], + 0x03d9, + 0x03db, + 0x03dd, + 0x03df, + 0x03e1, + 0x03e3, + 0x03e5, + 0x03e7, + 0x03e9, + 0x03eb, + 0x03ed, + [0x03ef, 0x03f3], + 0x03f5, + 0x03f8, + [0x03fb, 0x03fc], + [0x0430, 0x045f], + 0x0461, + 0x0463, + 0x0465, + 0x0467, + 0x0469, + 0x046b, + 0x046d, + 0x046f, + 0x0471, + 0x0473, + 0x0475, + 0x0477, + 0x0479, + 0x047b, + 0x047d, + 0x047f, + 0x0481, + 0x048b, + 0x048d, + 0x048f, + 0x0491, + 0x0493, + 0x0495, + 0x0497, + 0x0499, + 0x049b, + 0x049d, + 0x049f, + 0x04a1, + 0x04a3, + 0x04a5, + 0x04a7, + 0x04a9, + 0x04ab, + 0x04ad, + 0x04af, + 0x04b1, + 0x04b3, + 0x04b5, + 0x04b7, + 0x04b9, + 0x04bb, + 0x04bd, + 0x04bf, + 0x04c2, + 0x04c4, + 0x04c6, + 0x04c8, + 0x04ca, + 0x04cc, + [0x04ce, 0x04cf], + 0x04d1, + 0x04d3, + 0x04d5, + 0x04d7, + 0x04d9, + 0x04db, + 0x04dd, + 0x04df, + 0x04e1, + 0x04e3, + 0x04e5, + 0x04e7, + 0x04e9, + 0x04eb, + 0x04ed, + 0x04ef, + 0x04f1, + 0x04f3, + 0x04f5, + 0x04f7, + 0x04f9, + 0x04fb, + 0x04fd, + 0x04ff, + 0x0501, + 0x0503, + 0x0505, + 0x0507, + 0x0509, + 0x050b, + 0x050d, + 0x050f, + 0x0511, + 0x0513, + 0x0515, + 0x0517, + 0x0519, + 0x051b, + 0x051d, + 0x051f, + 0x0521, + 0x0523, + 0x0525, + 0x0527, + 0x0529, + 0x052b, + 0x052d, + 0x052f, + [0x0560, 0x0588], + [0x10d0, 0x10fa], + [0x10fd, 0x10ff], + [0x13f8, 0x13fd], + [0x1c80, 0x1c88], + 0x1c8a, + [0x1d00, 0x1d2b], + [0x1d6b, 0x1d77], + [0x1d79, 0x1d9a], + 0x1e01, + 0x1e03, + 0x1e05, + 0x1e07, + 0x1e09, + 0x1e0b, + 0x1e0d, + 0x1e0f, + 0x1e11, + 0x1e13, + 0x1e15, + 0x1e17, + 0x1e19, + 0x1e1b, + 0x1e1d, + 0x1e1f, + 0x1e21, + 0x1e23, + 0x1e25, + 0x1e27, + 0x1e29, + 0x1e2b, + 0x1e2d, + 0x1e2f, + 0x1e31, + 0x1e33, + 0x1e35, + 0x1e37, + 0x1e39, + 0x1e3b, + 0x1e3d, + 0x1e3f, + 0x1e41, + 0x1e43, + 0x1e45, + 0x1e47, + 0x1e49, + 0x1e4b, + 0x1e4d, + 0x1e4f, + 0x1e51, + 0x1e53, + 0x1e55, + 0x1e57, + 0x1e59, + 0x1e5b, + 0x1e5d, + 0x1e5f, + 0x1e61, + 0x1e63, + 0x1e65, + 0x1e67, + 0x1e69, + 0x1e6b, + 0x1e6d, + 0x1e6f, + 0x1e71, + 0x1e73, + 0x1e75, + 0x1e77, + 0x1e79, + 0x1e7b, + 0x1e7d, + 0x1e7f, + 0x1e81, + 0x1e83, + 0x1e85, + 0x1e87, + 0x1e89, + 0x1e8b, + 0x1e8d, + 0x1e8f, + 0x1e91, + 0x1e93, + [0x1e95, 0x1e9d], + 0x1e9f, + 0x1ea1, + 0x1ea3, + 0x1ea5, + 0x1ea7, + 0x1ea9, + 0x1eab, + 0x1ead, + 0x1eaf, + 0x1eb1, + 0x1eb3, + 0x1eb5, + 0x1eb7, + 0x1eb9, + 0x1ebb, + 0x1ebd, + 0x1ebf, + 0x1ec1, + 0x1ec3, + 0x1ec5, + 0x1ec7, + 0x1ec9, + 0x1ecb, + 0x1ecd, + 0x1ecf, + 0x1ed1, + 0x1ed3, + 0x1ed5, + 0x1ed7, + 0x1ed9, + 0x1edb, + 0x1edd, + 0x1edf, + 0x1ee1, + 0x1ee3, + 0x1ee5, + 0x1ee7, + 0x1ee9, + 0x1eeb, + 0x1eed, + 0x1eef, + 0x1ef1, + 0x1ef3, + 0x1ef5, + 0x1ef7, + 0x1ef9, + 0x1efb, + 0x1efd, + [0x1eff, 0x1f07], + [0x1f10, 0x1f15], + [0x1f20, 0x1f27], + [0x1f30, 0x1f37], + [0x1f40, 0x1f45], + [0x1f50, 0x1f57], + [0x1f60, 0x1f67], + [0x1f70, 0x1f7d], + [0x1f80, 0x1f87], + [0x1f90, 0x1f97], + [0x1fa0, 0x1fa7], + [0x1fb0, 0x1fb4], + [0x1fb6, 0x1fb7], + 0x1fbe, + [0x1fc2, 0x1fc4], + [0x1fc6, 0x1fc7], + [0x1fd0, 0x1fd3], + [0x1fd6, 0x1fd7], + [0x1fe0, 0x1fe7], + [0x1ff2, 0x1ff4], + [0x1ff6, 0x1ff7], + 0x210a, + [0x210e, 0x210f], + 0x2113, + 0x212f, + 0x2134, + 0x2139, + [0x213c, 0x213d], + [0x2146, 0x2149], + 0x214e, + 0x2184, + [0x2c30, 0x2c5f], + 0x2c61, + [0x2c65, 0x2c66], + 0x2c68, + 0x2c6a, + 0x2c6c, + 0x2c71, + [0x2c73, 0x2c74], + [0x2c76, 0x2c7b], + 0x2c81, + 0x2c83, + 0x2c85, + 0x2c87, + 0x2c89, + 0x2c8b, + 0x2c8d, + 0x2c8f, + 0x2c91, + 0x2c93, + 0x2c95, + 0x2c97, + 0x2c99, + 0x2c9b, + 0x2c9d, + 0x2c9f, + 0x2ca1, + 0x2ca3, + 0x2ca5, + 0x2ca7, + 0x2ca9, + 0x2cab, + 0x2cad, + 0x2caf, + 0x2cb1, + 0x2cb3, + 0x2cb5, + 0x2cb7, + 0x2cb9, + 0x2cbb, + 0x2cbd, + 0x2cbf, + 0x2cc1, + 0x2cc3, + 0x2cc5, + 0x2cc7, + 0x2cc9, + 0x2ccb, + 0x2ccd, + 0x2ccf, + 0x2cd1, + 0x2cd3, + 0x2cd5, + 0x2cd7, + 0x2cd9, + 0x2cdb, + 0x2cdd, + 0x2cdf, + 0x2ce1, + [0x2ce3, 0x2ce4], + 0x2cec, + 0x2cee, + 0x2cf3, + [0x2d00, 0x2d25], + 0x2d27, + 0x2d2d, + 0xa641, + 0xa643, + 0xa645, + 0xa647, + 0xa649, + 0xa64b, + 0xa64d, + 0xa64f, + 0xa651, + 0xa653, + 0xa655, + 0xa657, + 0xa659, + 0xa65b, + 0xa65d, + 0xa65f, + 0xa661, + 0xa663, + 0xa665, + 0xa667, + 0xa669, + 0xa66b, + 0xa66d, + 0xa681, + 0xa683, + 0xa685, + 0xa687, + 0xa689, + 0xa68b, + 0xa68d, + 0xa68f, + 0xa691, + 0xa693, + 0xa695, + 0xa697, + 0xa699, + 0xa69b, + 0xa723, + 0xa725, + 0xa727, + 0xa729, + 0xa72b, + 0xa72d, + [0xa72f, 0xa731], + 0xa733, + 0xa735, + 0xa737, + 0xa739, + 0xa73b, + 0xa73d, + 0xa73f, + 0xa741, + 0xa743, + 0xa745, + 0xa747, + 0xa749, + 0xa74b, + 0xa74d, + 0xa74f, + 0xa751, + 0xa753, + 0xa755, + 0xa757, + 0xa759, + 0xa75b, + 0xa75d, + 0xa75f, + 0xa761, + 0xa763, + 0xa765, + 0xa767, + 0xa769, + 0xa76b, + 0xa76d, + 0xa76f, + [0xa771, 0xa778], + 0xa77a, + 0xa77c, + 0xa77f, + 0xa781, + 0xa783, + 0xa785, + 0xa787, + 0xa78c, + 0xa78e, + 0xa791, + [0xa793, 0xa795], + 0xa797, + 0xa799, + 0xa79b, + 0xa79d, + 0xa79f, + 0xa7a1, + 0xa7a3, + 0xa7a5, + 0xa7a7, + 0xa7a9, + 0xa7af, + 0xa7b5, + 0xa7b7, + 0xa7b9, + 0xa7bb, + 0xa7bd, + 0xa7bf, + 0xa7c1, + 0xa7c3, + 0xa7c8, + 0xa7ca, + 0xa7cd, + 0xa7d1, + 0xa7d3, + 0xa7d5, + 0xa7d7, + 0xa7d9, + 0xa7db, + 0xa7f6, + 0xa7fa, + [0xab30, 0xab5a], + [0xab60, 0xab68], + [0xab70, 0xabbf], + [0xfb00, 0xfb06], + [0xfb13, 0xfb17], + [0xff41, 0xff5a], + [0x10428, 0x1044f], + [0x104d8, 0x104fb], + [0x10597, 0x105a1], + [0x105a3, 0x105b1], + [0x105b3, 0x105b9], + [0x105bb, 0x105bc], + [0x10cc0, 0x10cf2], + [0x10d70, 0x10d85], + [0x118c0, 0x118df], + [0x16e60, 0x16e7f], + [0x1d41a, 0x1d433], + [0x1d44e, 0x1d454], + [0x1d456, 0x1d467], + [0x1d482, 0x1d49b], + [0x1d4b6, 0x1d4b9], + 0x1d4bb, + [0x1d4bd, 0x1d4c3], + [0x1d4c5, 0x1d4cf], + [0x1d4ea, 0x1d503], + [0x1d51e, 0x1d537], + [0x1d552, 0x1d56b], + [0x1d586, 0x1d59f], + [0x1d5ba, 0x1d5d3], + [0x1d5ee, 0x1d607], + [0x1d622, 0x1d63b], + [0x1d656, 0x1d66f], + [0x1d68a, 0x1d6a5], + [0x1d6c2, 0x1d6da], + [0x1d6dc, 0x1d6e1], + [0x1d6fc, 0x1d714], + [0x1d716, 0x1d71b], + [0x1d736, 0x1d74e], + [0x1d750, 0x1d755], + [0x1d770, 0x1d788], + [0x1d78a, 0x1d78f], + [0x1d7aa, 0x1d7c2], + [0x1d7c4, 0x1d7c9], + 0x1d7cb, + [0x1df00, 0x1df09], + [0x1df0b, 0x1df1e], + [0x1df25, 0x1df2a], + [0x1e922, 0x1e943], +]; + +export const unicodeLlSurrogate: UnicodeSurrogateRangeTable = { + 0xd801: [ + [0xdc28, 0xdc4f], // 0x10428..0x1044F + [0xdcd8, 0xdcfb], // 0x104D8..0x104FB + [0xdd97, 0xdda1], // 0x10597..0x105A1 + [0xdda3, 0xddb1], // 0x105A3..0x105B1 + [0xddb3, 0xddb9], // 0x105B3..0x105B9 + [0xddbb, 0xddbc], // 0x105BB..0x105BC + ], + 0xd803: [ + [0xdcc0, 0xdcf2], // 0x10CC0..0x10CF2 + [0xdd70, 0xdd85], // 0x10D70..0x10D85 + ], + 0xd806: [ + [0xdcc0, 0xdcdf], // 0x118C0..0x118DF + ], + 0xd81b: [ + [0xde60, 0xde7f], // 0x16E60..0x16E7F + ], + 0xd835: [ + [0xdc1a, 0xdc33], // 0x1D41A..0x1D433 + [0xdc4e, 0xdc54], // 0x1D44E..0x1D454 + [0xdc56, 0xdc67], // 0x1D456..0x1D467 + [0xdc82, 0xdc9b], // 0x1D482..0x1D49B + [0xdcb6, 0xdcb9], // 0x1D4B6..0x1D4B9 + 0xdcbb, // 0x1D4BB + [0xdcbd, 0xdcc3], // 0x1D4BD..0x1D4C3 + [0xdcc5, 0xdccf], // 0x1D4C5..0x1D4CF + [0xdcea, 0xdd03], // 0x1D4EA..0x1D503 + [0xdd1e, 0xdd37], // 0x1D51E..0x1D537 + [0xdd52, 0xdd6b], // 0x1D552..0x1D56B + [0xdd86, 0xdd9f], // 0x1D586..0x1D59F + [0xddba, 0xddd3], // 0x1D5BA..0x1D5D3 + [0xddee, 0xde07], // 0x1D5EE..0x1D607 + [0xde22, 0xde3b], // 0x1D622..0x1D63B + [0xde56, 0xde6f], // 0x1D656..0x1D66F + [0xde8a, 0xdea5], // 0x1D68A..0x1D6A5 + [0xdec2, 0xdeda], // 0x1D6C2..0x1D6DA + [0xdedc, 0xdee1], // 0x1D6DC..0x1D6E1 + [0xdefc, 0xdf14], // 0x1D6FC..0x1D714 + [0xdf16, 0xdf1b], // 0x1D716..0x1D71B + [0xdf36, 0xdf4e], // 0x1D736..0x1D74E + [0xdf50, 0xdf55], // 0x1D750..0x1D755 + [0xdf70, 0xdf88], // 0x1D770..0x1D788 + [0xdf8a, 0xdf8f], // 0x1D78A..0x1D78F + [0xdfaa, 0xdfc2], // 0x1D7AA..0x1D7C2 + [0xdfc4, 0xdfc9], // 0x1D7C4..0x1D7C9 + 0xdfcb, // 0x1D7CB + ], + 0xd837: [ + [0xdf00, 0xdf09], // 0x1DF00..0x1DF09 + [0xdf0b, 0xdf1e], // 0x1DF0B..0x1DF1E + [0xdf25, 0xdf2a], // 0x1DF25..0x1DF2A + ], + 0xd83a: [ + [0xdd22, 0xdd43], // 0x1E922..0x1E943 + ], +}; + +export const unicodeLt: UnicodeRangeTable = [ + 0x01c5, + 0x01c8, + 0x01cb, + 0x01f2, + [0x1f88, 0x1f8f], + [0x1f98, 0x1f9f], + [0x1fa8, 0x1faf], + 0x1fbc, + 0x1fcc, + 0x1ffc, +]; + +export const unicodeLo: UnicodeRangeTable = [ + 0x00aa, + 0x00ba, + 0x01bb, + [0x01c0, 0x01c3], + 0x0294, + [0x05d0, 0x05ea], + [0x05ef, 0x05f2], + [0x0620, 0x063f], + [0x0641, 0x064a], + [0x066e, 0x066f], + [0x0671, 0x06d3], + 0x06d5, + [0x06ee, 0x06ef], + [0x06fa, 0x06fc], + 0x06ff, + 0x0710, + [0x0712, 0x072f], + [0x074d, 0x07a5], + 0x07b1, + [0x07ca, 0x07ea], + [0x0800, 0x0815], + [0x0840, 0x0858], + [0x0860, 0x086a], + [0x0870, 0x0887], + [0x0889, 0x088e], + [0x08a0, 0x08c8], + [0x0904, 0x0939], + 0x093d, + 0x0950, + [0x0958, 0x0961], + [0x0972, 0x0980], + [0x0985, 0x098c], + [0x098f, 0x0990], + [0x0993, 0x09a8], + [0x09aa, 0x09b0], + 0x09b2, + [0x09b6, 0x09b9], + 0x09bd, + 0x09ce, + [0x09dc, 0x09dd], + [0x09df, 0x09e1], + [0x09f0, 0x09f1], + 0x09fc, + [0x0a05, 0x0a0a], + [0x0a0f, 0x0a10], + [0x0a13, 0x0a28], + [0x0a2a, 0x0a30], + [0x0a32, 0x0a33], + [0x0a35, 0x0a36], + [0x0a38, 0x0a39], + [0x0a59, 0x0a5c], + 0x0a5e, + [0x0a72, 0x0a74], + [0x0a85, 0x0a8d], + [0x0a8f, 0x0a91], + [0x0a93, 0x0aa8], + [0x0aaa, 0x0ab0], + [0x0ab2, 0x0ab3], + [0x0ab5, 0x0ab9], + 0x0abd, + 0x0ad0, + [0x0ae0, 0x0ae1], + 0x0af9, + [0x0b05, 0x0b0c], + [0x0b0f, 0x0b10], + [0x0b13, 0x0b28], + [0x0b2a, 0x0b30], + [0x0b32, 0x0b33], + [0x0b35, 0x0b39], + 0x0b3d, + [0x0b5c, 0x0b5d], + [0x0b5f, 0x0b61], + 0x0b71, + 0x0b83, + [0x0b85, 0x0b8a], + [0x0b8e, 0x0b90], + [0x0b92, 0x0b95], + [0x0b99, 0x0b9a], + 0x0b9c, + [0x0b9e, 0x0b9f], + [0x0ba3, 0x0ba4], + [0x0ba8, 0x0baa], + [0x0bae, 0x0bb9], + 0x0bd0, + [0x0c05, 0x0c0c], + [0x0c0e, 0x0c10], + [0x0c12, 0x0c28], + [0x0c2a, 0x0c39], + 0x0c3d, + [0x0c58, 0x0c5a], + 0x0c5d, + [0x0c60, 0x0c61], + 0x0c80, + [0x0c85, 0x0c8c], + [0x0c8e, 0x0c90], + [0x0c92, 0x0ca8], + [0x0caa, 0x0cb3], + [0x0cb5, 0x0cb9], + 0x0cbd, + [0x0cdd, 0x0cde], + [0x0ce0, 0x0ce1], + [0x0cf1, 0x0cf2], + [0x0d04, 0x0d0c], + [0x0d0e, 0x0d10], + [0x0d12, 0x0d3a], + 0x0d3d, + 0x0d4e, + [0x0d54, 0x0d56], + [0x0d5f, 0x0d61], + [0x0d7a, 0x0d7f], + [0x0d85, 0x0d96], + [0x0d9a, 0x0db1], + [0x0db3, 0x0dbb], + 0x0dbd, + [0x0dc0, 0x0dc6], + [0x0e01, 0x0e30], + [0x0e32, 0x0e33], + [0x0e40, 0x0e45], + [0x0e81, 0x0e82], + 0x0e84, + [0x0e86, 0x0e8a], + [0x0e8c, 0x0ea3], + 0x0ea5, + [0x0ea7, 0x0eb0], + [0x0eb2, 0x0eb3], + 0x0ebd, + [0x0ec0, 0x0ec4], + [0x0edc, 0x0edf], + 0x0f00, + [0x0f40, 0x0f47], + [0x0f49, 0x0f6c], + [0x0f88, 0x0f8c], + [0x1000, 0x102a], + 0x103f, + [0x1050, 0x1055], + [0x105a, 0x105d], + 0x1061, + [0x1065, 0x1066], + [0x106e, 0x1070], + [0x1075, 0x1081], + 0x108e, + [0x1100, 0x1248], + [0x124a, 0x124d], + [0x1250, 0x1256], + 0x1258, + [0x125a, 0x125d], + [0x1260, 0x1288], + [0x128a, 0x128d], + [0x1290, 0x12b0], + [0x12b2, 0x12b5], + [0x12b8, 0x12be], + 0x12c0, + [0x12c2, 0x12c5], + [0x12c8, 0x12d6], + [0x12d8, 0x1310], + [0x1312, 0x1315], + [0x1318, 0x135a], + [0x1380, 0x138f], + [0x1401, 0x166c], + [0x166f, 0x167f], + [0x1681, 0x169a], + [0x16a0, 0x16ea], + [0x16f1, 0x16f8], + [0x1700, 0x1711], + [0x171f, 0x1731], + [0x1740, 0x1751], + [0x1760, 0x176c], + [0x176e, 0x1770], + [0x1780, 0x17b3], + 0x17dc, + [0x1820, 0x1842], + [0x1844, 0x1878], + [0x1880, 0x1884], + [0x1887, 0x18a8], + 0x18aa, + [0x18b0, 0x18f5], + [0x1900, 0x191e], + [0x1950, 0x196d], + [0x1970, 0x1974], + [0x1980, 0x19ab], + [0x19b0, 0x19c9], + [0x1a00, 0x1a16], + [0x1a20, 0x1a54], + [0x1b05, 0x1b33], + [0x1b45, 0x1b4c], + [0x1b83, 0x1ba0], + [0x1bae, 0x1baf], + [0x1bba, 0x1be5], + [0x1c00, 0x1c23], + [0x1c4d, 0x1c4f], + [0x1c5a, 0x1c77], + [0x1ce9, 0x1cec], + [0x1cee, 0x1cf3], + [0x1cf5, 0x1cf6], + 0x1cfa, + [0x2135, 0x2138], + [0x2d30, 0x2d67], + [0x2d80, 0x2d96], + [0x2da0, 0x2da6], + [0x2da8, 0x2dae], + [0x2db0, 0x2db6], + [0x2db8, 0x2dbe], + [0x2dc0, 0x2dc6], + [0x2dc8, 0x2dce], + [0x2dd0, 0x2dd6], + [0x2dd8, 0x2dde], + 0x3006, + 0x303c, + [0x3041, 0x3096], + 0x309f, + [0x30a1, 0x30fa], + 0x30ff, + [0x3105, 0x312f], + [0x3131, 0x318e], + [0x31a0, 0x31bf], + [0x31f0, 0x31ff], + [0x3400, 0x4dbf], + [0x4e00, 0xa014], + [0xa016, 0xa48c], + [0xa4d0, 0xa4f7], + [0xa500, 0xa60b], + [0xa610, 0xa61f], + [0xa62a, 0xa62b], + 0xa66e, + [0xa6a0, 0xa6e5], + 0xa78f, + 0xa7f7, + [0xa7fb, 0xa801], + [0xa803, 0xa805], + [0xa807, 0xa80a], + [0xa80c, 0xa822], + [0xa840, 0xa873], + [0xa882, 0xa8b3], + [0xa8f2, 0xa8f7], + 0xa8fb, + [0xa8fd, 0xa8fe], + [0xa90a, 0xa925], + [0xa930, 0xa946], + [0xa960, 0xa97c], + [0xa984, 0xa9b2], + [0xa9e0, 0xa9e4], + [0xa9e7, 0xa9ef], + [0xa9fa, 0xa9fe], + [0xaa00, 0xaa28], + [0xaa40, 0xaa42], + [0xaa44, 0xaa4b], + [0xaa60, 0xaa6f], + [0xaa71, 0xaa76], + 0xaa7a, + [0xaa7e, 0xaaaf], + 0xaab1, + [0xaab5, 0xaab6], + [0xaab9, 0xaabd], + 0xaac0, + 0xaac2, + [0xaadb, 0xaadc], + [0xaae0, 0xaaea], + 0xaaf2, + [0xab01, 0xab06], + [0xab09, 0xab0e], + [0xab11, 0xab16], + [0xab20, 0xab26], + [0xab28, 0xab2e], + [0xabc0, 0xabe2], + [0xac00, 0xd7a3], + [0xd7b0, 0xd7c6], + [0xd7cb, 0xd7fb], + [0xf900, 0xfa6d], + [0xfa70, 0xfad9], + 0xfb1d, + [0xfb1f, 0xfb28], + [0xfb2a, 0xfb36], + [0xfb38, 0xfb3c], + 0xfb3e, + [0xfb40, 0xfb41], + [0xfb43, 0xfb44], + [0xfb46, 0xfbb1], + [0xfbd3, 0xfd3d], + [0xfd50, 0xfd8f], + [0xfd92, 0xfdc7], + [0xfdf0, 0xfdfb], + [0xfe70, 0xfe74], + [0xfe76, 0xfefc], + [0xff66, 0xff6f], + [0xff71, 0xff9d], + [0xffa0, 0xffbe], + [0xffc2, 0xffc7], + [0xffca, 0xffcf], + [0xffd2, 0xffd7], + [0xffda, 0xffdc], + [0x10000, 0x1000b], + [0x1000d, 0x10026], + [0x10028, 0x1003a], + [0x1003c, 0x1003d], + [0x1003f, 0x1004d], + [0x10050, 0x1005d], + [0x10080, 0x100fa], + [0x10280, 0x1029c], + [0x102a0, 0x102d0], + [0x10300, 0x1031f], + [0x1032d, 0x10340], + [0x10342, 0x10349], + [0x10350, 0x10375], + [0x10380, 0x1039d], + [0x103a0, 0x103c3], + [0x103c8, 0x103cf], + [0x10450, 0x1049d], + [0x10500, 0x10527], + [0x10530, 0x10563], + [0x105c0, 0x105f3], + [0x10600, 0x10736], + [0x10740, 0x10755], + [0x10760, 0x10767], + [0x10800, 0x10805], + 0x10808, + [0x1080a, 0x10835], + [0x10837, 0x10838], + 0x1083c, + [0x1083f, 0x10855], + [0x10860, 0x10876], + [0x10880, 0x1089e], + [0x108e0, 0x108f2], + [0x108f4, 0x108f5], + [0x10900, 0x10915], + [0x10920, 0x10939], + [0x10980, 0x109b7], + [0x109be, 0x109bf], + 0x10a00, + [0x10a10, 0x10a13], + [0x10a15, 0x10a17], + [0x10a19, 0x10a35], + [0x10a60, 0x10a7c], + [0x10a80, 0x10a9c], + [0x10ac0, 0x10ac7], + [0x10ac9, 0x10ae4], + [0x10b00, 0x10b35], + [0x10b40, 0x10b55], + [0x10b60, 0x10b72], + [0x10b80, 0x10b91], + [0x10c00, 0x10c48], + [0x10d00, 0x10d23], + [0x10d4a, 0x10d4d], + 0x10d4f, + [0x10e80, 0x10ea9], + [0x10eb0, 0x10eb1], + [0x10ec2, 0x10ec4], + [0x10f00, 0x10f1c], + 0x10f27, + [0x10f30, 0x10f45], + [0x10f70, 0x10f81], + [0x10fb0, 0x10fc4], + [0x10fe0, 0x10ff6], + [0x11003, 0x11037], + [0x11071, 0x11072], + 0x11075, + [0x11083, 0x110af], + [0x110d0, 0x110e8], + [0x11103, 0x11126], + 0x11144, + 0x11147, + [0x11150, 0x11172], + 0x11176, + [0x11183, 0x111b2], + [0x111c1, 0x111c4], + 0x111da, + 0x111dc, + [0x11200, 0x11211], + [0x11213, 0x1122b], + [0x1123f, 0x11240], + [0x11280, 0x11286], + 0x11288, + [0x1128a, 0x1128d], + [0x1128f, 0x1129d], + [0x1129f, 0x112a8], + [0x112b0, 0x112de], + [0x11305, 0x1130c], + [0x1130f, 0x11310], + [0x11313, 0x11328], + [0x1132a, 0x11330], + [0x11332, 0x11333], + [0x11335, 0x11339], + 0x1133d, + 0x11350, + [0x1135d, 0x11361], + [0x11380, 0x11389], + 0x1138b, + 0x1138e, + [0x11390, 0x113b5], + 0x113b7, + 0x113d1, + 0x113d3, + [0x11400, 0x11434], + [0x11447, 0x1144a], + [0x1145f, 0x11461], + [0x11480, 0x114af], + [0x114c4, 0x114c5], + 0x114c7, + [0x11580, 0x115ae], + [0x115d8, 0x115db], + [0x11600, 0x1162f], + 0x11644, + [0x11680, 0x116aa], + 0x116b8, + [0x11700, 0x1171a], + [0x11740, 0x11746], + [0x11800, 0x1182b], + [0x118ff, 0x11906], + 0x11909, + [0x1190c, 0x11913], + [0x11915, 0x11916], + [0x11918, 0x1192f], + 0x1193f, + 0x11941, + [0x119a0, 0x119a7], + [0x119aa, 0x119d0], + 0x119e1, + 0x119e3, + 0x11a00, + [0x11a0b, 0x11a32], + 0x11a3a, + 0x11a50, + [0x11a5c, 0x11a89], + 0x11a9d, + [0x11ab0, 0x11af8], + [0x11bc0, 0x11be0], + [0x11c00, 0x11c08], + [0x11c0a, 0x11c2e], + 0x11c40, + [0x11c72, 0x11c8f], + [0x11d00, 0x11d06], + [0x11d08, 0x11d09], + [0x11d0b, 0x11d30], + 0x11d46, + [0x11d60, 0x11d65], + [0x11d67, 0x11d68], + [0x11d6a, 0x11d89], + 0x11d98, + [0x11ee0, 0x11ef2], + 0x11f02, + [0x11f04, 0x11f10], + [0x11f12, 0x11f33], + 0x11fb0, + [0x12000, 0x12399], + [0x12480, 0x12543], + [0x12f90, 0x12ff0], + [0x13000, 0x1342f], + [0x13441, 0x13446], + [0x13460, 0x143fa], + [0x14400, 0x14646], + [0x16100, 0x1611d], + [0x16800, 0x16a38], + [0x16a40, 0x16a5e], + [0x16a70, 0x16abe], + [0x16ad0, 0x16aed], + [0x16b00, 0x16b2f], + [0x16b63, 0x16b77], + [0x16b7d, 0x16b8f], + [0x16d43, 0x16d6a], + [0x16f00, 0x16f4a], + 0x16f50, + [0x17000, 0x187f7], + [0x18800, 0x18cd5], + [0x18cff, 0x18d08], + [0x1b000, 0x1b122], + 0x1b132, + [0x1b150, 0x1b152], + 0x1b155, + [0x1b164, 0x1b167], + [0x1b170, 0x1b2fb], + [0x1bc00, 0x1bc6a], + [0x1bc70, 0x1bc7c], + [0x1bc80, 0x1bc88], + [0x1bc90, 0x1bc99], + 0x1df0a, + [0x1e100, 0x1e12c], + 0x1e14e, + [0x1e290, 0x1e2ad], + [0x1e2c0, 0x1e2eb], + [0x1e4d0, 0x1e4ea], + [0x1e5d0, 0x1e5ed], + 0x1e5f0, + [0x1e7e0, 0x1e7e6], + [0x1e7e8, 0x1e7eb], + [0x1e7ed, 0x1e7ee], + [0x1e7f0, 0x1e7fe], + [0x1e800, 0x1e8c4], + [0x1ee00, 0x1ee03], + [0x1ee05, 0x1ee1f], + [0x1ee21, 0x1ee22], + 0x1ee24, + 0x1ee27, + [0x1ee29, 0x1ee32], + [0x1ee34, 0x1ee37], + 0x1ee39, + 0x1ee3b, + 0x1ee42, + 0x1ee47, + 0x1ee49, + 0x1ee4b, + [0x1ee4d, 0x1ee4f], + [0x1ee51, 0x1ee52], + 0x1ee54, + 0x1ee57, + 0x1ee59, + 0x1ee5b, + 0x1ee5d, + 0x1ee5f, + [0x1ee61, 0x1ee62], + 0x1ee64, + [0x1ee67, 0x1ee6a], + [0x1ee6c, 0x1ee72], + [0x1ee74, 0x1ee77], + [0x1ee79, 0x1ee7c], + 0x1ee7e, + [0x1ee80, 0x1ee89], + [0x1ee8b, 0x1ee9b], + [0x1eea1, 0x1eea3], + [0x1eea5, 0x1eea9], + [0x1eeab, 0x1eebb], + [0x20000, 0x2a6df], + [0x2a700, 0x2b739], + [0x2b740, 0x2b81d], + [0x2b820, 0x2cea1], + [0x2ceb0, 0x2ebe0], + [0x2ebf0, 0x2ee5d], + [0x2f800, 0x2fa1d], + [0x30000, 0x3134a], + [0x31350, 0x323af], +]; + +export const unicodeLoSurrogate: UnicodeSurrogateRangeTable = { + 0xd800: [ + [0xdc00, 0xdc0b], // 0x10000..0x1000B + [0xdc0d, 0xdc26], // 0x1000D..0x10026 + [0xdc28, 0xdc3a], // 0x10028..0x1003A + [0xdc3c, 0xdc3d], // 0x1003C..0x1003D + [0xdc3f, 0xdc4d], // 0x1003F..0x1004D + [0xdc50, 0xdc5d], // 0x10050..0x1005D + [0xdc80, 0xdcfa], // 0x10080..0x100FA + [0xde80, 0xde9c], // 0x10280..0x1029C + [0xdea0, 0xded0], // 0x102A0..0x102D0 + [0xdf00, 0xdf1f], // 0x10300..0x1031F + [0xdf2d, 0xdf40], // 0x1032D..0x10340 + [0xdf42, 0xdf49], // 0x10342..0x10349 + [0xdf50, 0xdf75], // 0x10350..0x10375 + [0xdf80, 0xdf9d], // 0x10380..0x1039D + [0xdfa0, 0xdfc3], // 0x103A0..0x103C3 + [0xdfc8, 0xdfcf], // 0x103C8..0x103CF + ], + 0xd801: [ + [0xdc50, 0xdc9d], // 0x10450..0x1049D + [0xdd00, 0xdd27], // 0x10500..0x10527 + [0xdd30, 0xdd63], // 0x10530..0x10563 + [0xddc0, 0xddf3], // 0x105C0..0x105F3 + [0xde00, 0xdf36], // 0x10600..0x10736 + [0xdf40, 0xdf55], // 0x10740..0x10755 + [0xdf60, 0xdf67], // 0x10760..0x10767 + ], + 0xd802: [ + [0xdc00, 0xdc05], // 0x10800..0x10805 + 0xdc08, // 0x10808 + [0xdc0a, 0xdc35], // 0x1080A..0x10835 + [0xdc37, 0xdc38], // 0x10837..0x10838 + 0xdc3c, // 0x1083C + [0xdc3f, 0xdc55], // 0x1083F..0x10855 + [0xdc60, 0xdc76], // 0x10860..0x10876 + [0xdc80, 0xdc9e], // 0x10880..0x1089E + [0xdce0, 0xdcf2], // 0x108E0..0x108F2 + [0xdcf4, 0xdcf5], // 0x108F4..0x108F5 + [0xdd00, 0xdd15], // 0x10900..0x10915 + [0xdd20, 0xdd39], // 0x10920..0x10939 + [0xdd80, 0xddb7], // 0x10980..0x109B7 + [0xddbe, 0xddbf], // 0x109BE..0x109BF + 0xde00, // 0x10A00 + [0xde10, 0xde13], // 0x10A10..0x10A13 + [0xde15, 0xde17], // 0x10A15..0x10A17 + [0xde19, 0xde35], // 0x10A19..0x10A35 + [0xde60, 0xde7c], // 0x10A60..0x10A7C + [0xde80, 0xde9c], // 0x10A80..0x10A9C + [0xdec0, 0xdec7], // 0x10AC0..0x10AC7 + [0xdec9, 0xdee4], // 0x10AC9..0x10AE4 + [0xdf00, 0xdf35], // 0x10B00..0x10B35 + [0xdf40, 0xdf55], // 0x10B40..0x10B55 + [0xdf60, 0xdf72], // 0x10B60..0x10B72 + [0xdf80, 0xdf91], // 0x10B80..0x10B91 + ], + 0xd803: [ + [0xdc00, 0xdc48], // 0x10C00..0x10C48 + [0xdd00, 0xdd23], // 0x10D00..0x10D23 + [0xdd4a, 0xdd4d], // 0x10D4A..0x10D4D + 0xdd4f, // 0x10D4F + [0xde80, 0xdea9], // 0x10E80..0x10EA9 + [0xdeb0, 0xdeb1], // 0x10EB0..0x10EB1 + [0xdec2, 0xdec4], // 0x10EC2..0x10EC4 + [0xdf00, 0xdf1c], // 0x10F00..0x10F1C + 0xdf27, // 0x10F27 + [0xdf30, 0xdf45], // 0x10F30..0x10F45 + [0xdf70, 0xdf81], // 0x10F70..0x10F81 + [0xdfb0, 0xdfc4], // 0x10FB0..0x10FC4 + [0xdfe0, 0xdff6], // 0x10FE0..0x10FF6 + ], + 0xd804: [ + [0xdc03, 0xdc37], // 0x11003..0x11037 + [0xdc71, 0xdc72], // 0x11071..0x11072 + 0xdc75, // 0x11075 + [0xdc83, 0xdcaf], // 0x11083..0x110AF + [0xdcd0, 0xdce8], // 0x110D0..0x110E8 + [0xdd03, 0xdd26], // 0x11103..0x11126 + 0xdd44, // 0x11144 + 0xdd47, // 0x11147 + [0xdd50, 0xdd72], // 0x11150..0x11172 + 0xdd76, // 0x11176 + [0xdd83, 0xddb2], // 0x11183..0x111B2 + [0xddc1, 0xddc4], // 0x111C1..0x111C4 + 0xddda, // 0x111DA + 0xdddc, // 0x111DC + [0xde00, 0xde11], // 0x11200..0x11211 + [0xde13, 0xde2b], // 0x11213..0x1122B + [0xde3f, 0xde40], // 0x1123F..0x11240 + [0xde80, 0xde86], // 0x11280..0x11286 + 0xde88, // 0x11288 + [0xde8a, 0xde8d], // 0x1128A..0x1128D + [0xde8f, 0xde9d], // 0x1128F..0x1129D + [0xde9f, 0xdea8], // 0x1129F..0x112A8 + [0xdeb0, 0xdede], // 0x112B0..0x112DE + [0xdf05, 0xdf0c], // 0x11305..0x1130C + [0xdf0f, 0xdf10], // 0x1130F..0x11310 + [0xdf13, 0xdf28], // 0x11313..0x11328 + [0xdf2a, 0xdf30], // 0x1132A..0x11330 + [0xdf32, 0xdf33], // 0x11332..0x11333 + [0xdf35, 0xdf39], // 0x11335..0x11339 + 0xdf3d, // 0x1133D + 0xdf50, // 0x11350 + [0xdf5d, 0xdf61], // 0x1135D..0x11361 + [0xdf80, 0xdf89], // 0x11380..0x11389 + 0xdf8b, // 0x1138B + 0xdf8e, // 0x1138E + [0xdf90, 0xdfb5], // 0x11390..0x113B5 + 0xdfb7, // 0x113B7 + 0xdfd1, // 0x113D1 + 0xdfd3, // 0x113D3 + ], + 0xd805: [ + [0xdc00, 0xdc34], // 0x11400..0x11434 + [0xdc47, 0xdc4a], // 0x11447..0x1144A + [0xdc5f, 0xdc61], // 0x1145F..0x11461 + [0xdc80, 0xdcaf], // 0x11480..0x114AF + [0xdcc4, 0xdcc5], // 0x114C4..0x114C5 + 0xdcc7, // 0x114C7 + [0xdd80, 0xddae], // 0x11580..0x115AE + [0xddd8, 0xdddb], // 0x115D8..0x115DB + [0xde00, 0xde2f], // 0x11600..0x1162F + 0xde44, // 0x11644 + [0xde80, 0xdeaa], // 0x11680..0x116AA + 0xdeb8, // 0x116B8 + [0xdf00, 0xdf1a], // 0x11700..0x1171A + [0xdf40, 0xdf46], // 0x11740..0x11746 + ], + 0xd806: [ + [0xdc00, 0xdc2b], // 0x11800..0x1182B + [0xdcff, 0xdd06], // 0x118FF..0x11906 + 0xdd09, // 0x11909 + [0xdd0c, 0xdd13], // 0x1190C..0x11913 + [0xdd15, 0xdd16], // 0x11915..0x11916 + [0xdd18, 0xdd2f], // 0x11918..0x1192F + 0xdd3f, // 0x1193F + 0xdd41, // 0x11941 + [0xdda0, 0xdda7], // 0x119A0..0x119A7 + [0xddaa, 0xddd0], // 0x119AA..0x119D0 + 0xdde1, // 0x119E1 + 0xdde3, // 0x119E3 + 0xde00, // 0x11A00 + [0xde0b, 0xde32], // 0x11A0B..0x11A32 + 0xde3a, // 0x11A3A + 0xde50, // 0x11A50 + [0xde5c, 0xde89], // 0x11A5C..0x11A89 + 0xde9d, // 0x11A9D + [0xdeb0, 0xdef8], // 0x11AB0..0x11AF8 + [0xdfc0, 0xdfe0], // 0x11BC0..0x11BE0 + ], + 0xd807: [ + [0xdc00, 0xdc08], // 0x11C00..0x11C08 + [0xdc0a, 0xdc2e], // 0x11C0A..0x11C2E + 0xdc40, // 0x11C40 + [0xdc72, 0xdc8f], // 0x11C72..0x11C8F + [0xdd00, 0xdd06], // 0x11D00..0x11D06 + [0xdd08, 0xdd09], // 0x11D08..0x11D09 + [0xdd0b, 0xdd30], // 0x11D0B..0x11D30 + 0xdd46, // 0x11D46 + [0xdd60, 0xdd65], // 0x11D60..0x11D65 + [0xdd67, 0xdd68], // 0x11D67..0x11D68 + [0xdd6a, 0xdd89], // 0x11D6A..0x11D89 + 0xdd98, // 0x11D98 + [0xdee0, 0xdef2], // 0x11EE0..0x11EF2 + 0xdf02, // 0x11F02 + [0xdf04, 0xdf10], // 0x11F04..0x11F10 + [0xdf12, 0xdf33], // 0x11F12..0x11F33 + 0xdfb0, // 0x11FB0 + ], + 0xd808: [ + [0xdc00, 0xdf99], // 0x12000..0x12399 + ], + 0xd809: [ + [0xdc80, 0xdd43], // 0x12480..0x12543 + ], + 0xd80b: [ + [0xdf90, 0xdff0], // 0x12F90..0x12FF0 + ], + 0xd80c: [ + [0xdc00, 0xdfff], // 0x13000..0x133FF + ], + 0xd80d: [ + [0xdc00, 0xdc2f], // 0x13400..0x1342F + [0xdc41, 0xdc46], // 0x13441..0x13446 + [0xdc60, 0xdfff], // 0x13460..0x137FF + ], + 0xd80e: [ + [0xdc00, 0xdfff], // 0x13800..0x13BFF + ], + 0xd80f: [ + [0xdc00, 0xdfff], // 0x13C00..0x13FFF + ], + 0xd810: [ + [0xdc00, 0xdffa], // 0x14000..0x143FA + ], + 0xd811: [ + [0xdc00, 0xde46], // 0x14400..0x14646 + ], + 0xd818: [ + [0xdd00, 0xdd1d], // 0x16100..0x1611D + ], + 0xd81a: [ + [0xdc00, 0xde38], // 0x16800..0x16A38 + [0xde40, 0xde5e], // 0x16A40..0x16A5E + [0xde70, 0xdebe], // 0x16A70..0x16ABE + [0xded0, 0xdeed], // 0x16AD0..0x16AED + [0xdf00, 0xdf2f], // 0x16B00..0x16B2F + [0xdf63, 0xdf77], // 0x16B63..0x16B77 + [0xdf7d, 0xdf8f], // 0x16B7D..0x16B8F + ], + 0xd81b: [ + [0xdd43, 0xdd6a], // 0x16D43..0x16D6A + [0xdf00, 0xdf4a], // 0x16F00..0x16F4A + 0xdf50, // 0x16F50 + ], + 0xd81c: [ + [0xdc00, 0xdfff], // 0x17000..0x173FF + ], + 0xd81d: [ + [0xdc00, 0xdfff], // 0x17400..0x177FF + ], + 0xd81e: [ + [0xdc00, 0xdfff], // 0x17800..0x17BFF + ], + 0xd81f: [ + [0xdc00, 0xdfff], // 0x17C00..0x17FFF + ], + 0xd820: [ + [0xdc00, 0xdfff], // 0x18000..0x183FF + ], + 0xd821: [ + [0xdc00, 0xdff7], // 0x18400..0x187F7 + ], + 0xd822: [ + [0xdc00, 0xdfff], // 0x18800..0x18BFF + ], + 0xd823: [ + [0xdc00, 0xdcd5], // 0x18C00..0x18CD5 + [0xdcff, 0xdd08], // 0x18CFF..0x18D08 + ], + 0xd82c: [ + [0xdc00, 0xdd22], // 0x1B000..0x1B122 + 0xdd32, // 0x1B132 + [0xdd50, 0xdd52], // 0x1B150..0x1B152 + 0xdd55, // 0x1B155 + [0xdd64, 0xdd67], // 0x1B164..0x1B167 + [0xdd70, 0xdefb], // 0x1B170..0x1B2FB + ], + 0xd82f: [ + [0xdc00, 0xdc6a], // 0x1BC00..0x1BC6A + [0xdc70, 0xdc7c], // 0x1BC70..0x1BC7C + [0xdc80, 0xdc88], // 0x1BC80..0x1BC88 + [0xdc90, 0xdc99], // 0x1BC90..0x1BC99 + ], + 0xd837: [ + 0xdf0a, // 0x1DF0A + ], + 0xd838: [ + [0xdd00, 0xdd2c], // 0x1E100..0x1E12C + 0xdd4e, // 0x1E14E + [0xde90, 0xdead], // 0x1E290..0x1E2AD + [0xdec0, 0xdeeb], // 0x1E2C0..0x1E2EB + ], + 0xd839: [ + [0xdcd0, 0xdcea], // 0x1E4D0..0x1E4EA + [0xddd0, 0xdded], // 0x1E5D0..0x1E5ED + 0xddf0, // 0x1E5F0 + [0xdfe0, 0xdfe6], // 0x1E7E0..0x1E7E6 + [0xdfe8, 0xdfeb], // 0x1E7E8..0x1E7EB + [0xdfed, 0xdfee], // 0x1E7ED..0x1E7EE + [0xdff0, 0xdffe], // 0x1E7F0..0x1E7FE + ], + 0xd83a: [ + [0xdc00, 0xdcc4], // 0x1E800..0x1E8C4 + ], + 0xd83b: [ + [0xde00, 0xde03], // 0x1EE00..0x1EE03 + [0xde05, 0xde1f], // 0x1EE05..0x1EE1F + [0xde21, 0xde22], // 0x1EE21..0x1EE22 + 0xde24, // 0x1EE24 + 0xde27, // 0x1EE27 + [0xde29, 0xde32], // 0x1EE29..0x1EE32 + [0xde34, 0xde37], // 0x1EE34..0x1EE37 + 0xde39, // 0x1EE39 + 0xde3b, // 0x1EE3B + 0xde42, // 0x1EE42 + 0xde47, // 0x1EE47 + 0xde49, // 0x1EE49 + 0xde4b, // 0x1EE4B + [0xde4d, 0xde4f], // 0x1EE4D..0x1EE4F + [0xde51, 0xde52], // 0x1EE51..0x1EE52 + 0xde54, // 0x1EE54 + 0xde57, // 0x1EE57 + 0xde59, // 0x1EE59 + 0xde5b, // 0x1EE5B + 0xde5d, // 0x1EE5D + 0xde5f, // 0x1EE5F + [0xde61, 0xde62], // 0x1EE61..0x1EE62 + 0xde64, // 0x1EE64 + [0xde67, 0xde6a], // 0x1EE67..0x1EE6A + [0xde6c, 0xde72], // 0x1EE6C..0x1EE72 + [0xde74, 0xde77], // 0x1EE74..0x1EE77 + [0xde79, 0xde7c], // 0x1EE79..0x1EE7C + 0xde7e, // 0x1EE7E + [0xde80, 0xde89], // 0x1EE80..0x1EE89 + [0xde8b, 0xde9b], // 0x1EE8B..0x1EE9B + [0xdea1, 0xdea3], // 0x1EEA1..0x1EEA3 + [0xdea5, 0xdea9], // 0x1EEA5..0x1EEA9 + [0xdeab, 0xdebb], // 0x1EEAB..0x1EEBB + ], + 0xd840: [ + [0xdc00, 0xdfff], // 0x20000..0x203FF + ], + 0xd841: [ + [0xdc00, 0xdfff], // 0x20400..0x207FF + ], + 0xd842: [ + [0xdc00, 0xdfff], // 0x20800..0x20BFF + ], + 0xd843: [ + [0xdc00, 0xdfff], // 0x20C00..0x20FFF + ], + 0xd844: [ + [0xdc00, 0xdfff], // 0x21000..0x213FF + ], + 0xd845: [ + [0xdc00, 0xdfff], // 0x21400..0x217FF + ], + 0xd846: [ + [0xdc00, 0xdfff], // 0x21800..0x21BFF + ], + 0xd847: [ + [0xdc00, 0xdfff], // 0x21C00..0x21FFF + ], + 0xd848: [ + [0xdc00, 0xdfff], // 0x22000..0x223FF + ], + 0xd849: [ + [0xdc00, 0xdfff], // 0x22400..0x227FF + ], + 0xd84a: [ + [0xdc00, 0xdfff], // 0x22800..0x22BFF + ], + 0xd84b: [ + [0xdc00, 0xdfff], // 0x22C00..0x22FFF + ], + 0xd84c: [ + [0xdc00, 0xdfff], // 0x23000..0x233FF + ], + 0xd84d: [ + [0xdc00, 0xdfff], // 0x23400..0x237FF + ], + 0xd84e: [ + [0xdc00, 0xdfff], // 0x23800..0x23BFF + ], + 0xd84f: [ + [0xdc00, 0xdfff], // 0x23C00..0x23FFF + ], + 0xd850: [ + [0xdc00, 0xdfff], // 0x24000..0x243FF + ], + 0xd851: [ + [0xdc00, 0xdfff], // 0x24400..0x247FF + ], + 0xd852: [ + [0xdc00, 0xdfff], // 0x24800..0x24BFF + ], + 0xd853: [ + [0xdc00, 0xdfff], // 0x24C00..0x24FFF + ], + 0xd854: [ + [0xdc00, 0xdfff], // 0x25000..0x253FF + ], + 0xd855: [ + [0xdc00, 0xdfff], // 0x25400..0x257FF + ], + 0xd856: [ + [0xdc00, 0xdfff], // 0x25800..0x25BFF + ], + 0xd857: [ + [0xdc00, 0xdfff], // 0x25C00..0x25FFF + ], + 0xd858: [ + [0xdc00, 0xdfff], // 0x26000..0x263FF + ], + 0xd859: [ + [0xdc00, 0xdfff], // 0x26400..0x267FF + ], + 0xd85a: [ + [0xdc00, 0xdfff], // 0x26800..0x26BFF + ], + 0xd85b: [ + [0xdc00, 0xdfff], // 0x26C00..0x26FFF + ], + 0xd85c: [ + [0xdc00, 0xdfff], // 0x27000..0x273FF + ], + 0xd85d: [ + [0xdc00, 0xdfff], // 0x27400..0x277FF + ], + 0xd85e: [ + [0xdc00, 0xdfff], // 0x27800..0x27BFF + ], + 0xd85f: [ + [0xdc00, 0xdfff], // 0x27C00..0x27FFF + ], + 0xd860: [ + [0xdc00, 0xdfff], // 0x28000..0x283FF + ], + 0xd861: [ + [0xdc00, 0xdfff], // 0x28400..0x287FF + ], + 0xd862: [ + [0xdc00, 0xdfff], // 0x28800..0x28BFF + ], + 0xd863: [ + [0xdc00, 0xdfff], // 0x28C00..0x28FFF + ], + 0xd864: [ + [0xdc00, 0xdfff], // 0x29000..0x293FF + ], + 0xd865: [ + [0xdc00, 0xdfff], // 0x29400..0x297FF + ], + 0xd866: [ + [0xdc00, 0xdfff], // 0x29800..0x29BFF + ], + 0xd867: [ + [0xdc00, 0xdfff], // 0x29C00..0x29FFF + ], + 0xd868: [ + [0xdc00, 0xdfff], // 0x2A000..0x2A3FF + ], + 0xd869: [ + [0xdc00, 0xdedf], // 0x2A400..0x2A6DF + [0xdf00, 0xdfff], // 0x2A700..0x2A7FF + ], + 0xd86a: [ + [0xdc00, 0xdfff], // 0x2A800..0x2ABFF + ], + 0xd86b: [ + [0xdc00, 0xdfff], // 0x2AC00..0x2AFFF + ], + 0xd86c: [ + [0xdc00, 0xdfff], // 0x2B000..0x2B3FF + ], + 0xd86d: [ + [0xdc00, 0xdf39], // 0x2B400..0x2B739 + [0xdf40, 0xdfff], // 0x2B740..0x2B7FF + ], + 0xd86e: [ + [0xdc00, 0xdc1d], // 0x2B800..0x2B81D + [0xdc20, 0xdfff], // 0x2B820..0x2BBFF + ], + 0xd86f: [ + [0xdc00, 0xdfff], // 0x2BC00..0x2BFFF + ], + 0xd870: [ + [0xdc00, 0xdfff], // 0x2C000..0x2C3FF + ], + 0xd871: [ + [0xdc00, 0xdfff], // 0x2C400..0x2C7FF + ], + 0xd872: [ + [0xdc00, 0xdfff], // 0x2C800..0x2CBFF + ], + 0xd873: [ + [0xdc00, 0xdea1], // 0x2CC00..0x2CEA1 + [0xdeb0, 0xdfff], // 0x2CEB0..0x2CFFF + ], + 0xd874: [ + [0xdc00, 0xdfff], // 0x2D000..0x2D3FF + ], + 0xd875: [ + [0xdc00, 0xdfff], // 0x2D400..0x2D7FF + ], + 0xd876: [ + [0xdc00, 0xdfff], // 0x2D800..0x2DBFF + ], + 0xd877: [ + [0xdc00, 0xdfff], // 0x2DC00..0x2DFFF + ], + 0xd878: [ + [0xdc00, 0xdfff], // 0x2E000..0x2E3FF + ], + 0xd879: [ + [0xdc00, 0xdfff], // 0x2E400..0x2E7FF + ], + 0xd87a: [ + [0xdc00, 0xdfe0], // 0x2E800..0x2EBE0 + [0xdff0, 0xdfff], // 0x2EBF0..0x2EBFF + ], + 0xd87b: [ + [0xdc00, 0xde5d], // 0x2EC00..0x2EE5D + ], + 0xd87e: [ + [0xdc00, 0xde1d], // 0x2F800..0x2FA1D + ], + 0xd880: [ + [0xdc00, 0xdfff], // 0x30000..0x303FF + ], + 0xd881: [ + [0xdc00, 0xdfff], // 0x30400..0x307FF + ], + 0xd882: [ + [0xdc00, 0xdfff], // 0x30800..0x30BFF + ], + 0xd883: [ + [0xdc00, 0xdfff], // 0x30C00..0x30FFF + ], + 0xd884: [ + [0xdc00, 0xdf4a], // 0x31000..0x3134A + [0xdf50, 0xdfff], // 0x31350..0x313FF + ], + 0xd885: [ + [0xdc00, 0xdfff], // 0x31400..0x317FF + ], + 0xd886: [ + [0xdc00, 0xdfff], // 0x31800..0x31BFF + ], + 0xd887: [ + [0xdc00, 0xdfff], // 0x31C00..0x31FFF + ], + 0xd888: [ + [0xdc00, 0xdfaf], // 0x32000..0x323AF + ], +}; + +export const unicodeLm: UnicodeRangeTable = [ + [0x02b0, 0x02c1], + [0x02c6, 0x02d1], + [0x02e0, 0x02e4], + 0x02ec, + 0x02ee, + 0x0374, + 0x037a, + 0x0559, + 0x0640, + [0x06e5, 0x06e6], + [0x07f4, 0x07f5], + 0x07fa, + 0x081a, + 0x0824, + 0x0828, + 0x08c9, + 0x0971, + 0x0e46, + 0x0ec6, + 0x10fc, + 0x17d7, + 0x1843, + 0x1aa7, + [0x1c78, 0x1c7d], + [0x1d2c, 0x1d6a], + 0x1d78, + [0x1d9b, 0x1dbf], + 0x2071, + 0x207f, + [0x2090, 0x209c], + [0x2c7c, 0x2c7d], + 0x2d6f, + 0x2e2f, + 0x3005, + [0x3031, 0x3035], + 0x303b, + [0x309d, 0x309e], + [0x30fc, 0x30fe], + 0xa015, + [0xa4f8, 0xa4fd], + 0xa60c, + 0xa67f, + [0xa69c, 0xa69d], + [0xa717, 0xa71f], + 0xa770, + 0xa788, + [0xa7f2, 0xa7f4], + [0xa7f8, 0xa7f9], + 0xa9cf, + 0xa9e6, + 0xaa70, + 0xaadd, + [0xaaf3, 0xaaf4], + [0xab5c, 0xab5f], + 0xab69, + 0xff70, + [0xff9e, 0xff9f], + [0x10780, 0x10785], + [0x10787, 0x107b0], + [0x107b2, 0x107ba], + 0x10d4e, + 0x10d6f, + [0x16b40, 0x16b43], + [0x16d40, 0x16d42], + [0x16d6b, 0x16d6c], + [0x16f93, 0x16f9f], + [0x16fe0, 0x16fe1], + 0x16fe3, + [0x1aff0, 0x1aff3], + [0x1aff5, 0x1affb], + [0x1affd, 0x1affe], + [0x1e030, 0x1e06d], + [0x1e137, 0x1e13d], + 0x1e4eb, + 0x1e94b, +]; + +export const unicodeLmSurrogate: UnicodeSurrogateRangeTable = { + 0xd801: [ + [0xdf80, 0xdf85], // 0x10780..0x10785 + [0xdf87, 0xdfb0], // 0x10787..0x107B0 + [0xdfb2, 0xdfba], // 0x107B2..0x107BA + ], + 0xd803: [ + 0xdd4e, // 0x10D4E + 0xdd6f, // 0x10D6F + ], + 0xd81a: [ + [0xdf40, 0xdf43], // 0x16B40..0x16B43 + ], + 0xd81b: [ + [0xdd40, 0xdd42], // 0x16D40..0x16D42 + [0xdd6b, 0xdd6c], // 0x16D6B..0x16D6C + [0xdf93, 0xdf9f], // 0x16F93..0x16F9F + [0xdfe0, 0xdfe1], // 0x16FE0..0x16FE1 + 0xdfe3, // 0x16FE3 + ], + 0xd82b: [ + [0xdff0, 0xdff3], // 0x1AFF0..0x1AFF3 + [0xdff5, 0xdffb], // 0x1AFF5..0x1AFFB + [0xdffd, 0xdffe], // 0x1AFFD..0x1AFFE + ], + 0xd838: [ + [0xdc30, 0xdc6d], // 0x1E030..0x1E06D + [0xdd37, 0xdd3d], // 0x1E137..0x1E13D + ], + 0xd839: [ + 0xdceb, // 0x1E4EB + ], + 0xd83a: [ + 0xdd4b, // 0x1E94B + ], +}; + +export const unicodeNl: UnicodeRangeTable = [ + [0x16ee, 0x16f0], + [0x2160, 0x2182], + [0x2185, 0x2188], + 0x3007, + [0x3021, 0x3029], + [0x3038, 0x303a], + [0xa6e6, 0xa6ef], + [0x10140, 0x10174], + 0x10341, + 0x1034a, + [0x103d1, 0x103d5], + [0x12400, 0x1246e], +]; + +export const unicodeNlSurrogate: UnicodeSurrogateRangeTable = { + 0xd800: [ + [0xdd40, 0xdd74], // 0x10140..0x10174 + 0xdf41, // 0x10341 + 0xdf4a, // 0x1034A + [0xdfd1, 0xdfd5], // 0x103D1..0x103D5 + ], + 0xd809: [ + [0xdc00, 0xdc6e], // 0x12400..0x1246E + ], +}; + +export const unicodeMn: UnicodeRangeTable = [ + [0x0300, 0x036f], + [0x0483, 0x0487], + [0x0591, 0x05bd], + 0x05bf, + [0x05c1, 0x05c2], + [0x05c4, 0x05c5], + 0x05c7, + [0x0610, 0x061a], + [0x064b, 0x065f], + 0x0670, + [0x06d6, 0x06dc], + [0x06df, 0x06e4], + [0x06e7, 0x06e8], + [0x06ea, 0x06ed], + 0x0711, + [0x0730, 0x074a], + [0x07a6, 0x07b0], + [0x07eb, 0x07f3], + 0x07fd, + [0x0816, 0x0819], + [0x081b, 0x0823], + [0x0825, 0x0827], + [0x0829, 0x082d], + [0x0859, 0x085b], + [0x0897, 0x089f], + [0x08ca, 0x08e1], + [0x08e3, 0x0902], + 0x093a, + 0x093c, + [0x0941, 0x0948], + 0x094d, + [0x0951, 0x0957], + [0x0962, 0x0963], + 0x0981, + 0x09bc, + [0x09c1, 0x09c4], + 0x09cd, + [0x09e2, 0x09e3], + 0x09fe, + [0x0a01, 0x0a02], + 0x0a3c, + [0x0a41, 0x0a42], + [0x0a47, 0x0a48], + [0x0a4b, 0x0a4d], + 0x0a51, + [0x0a70, 0x0a71], + 0x0a75, + [0x0a81, 0x0a82], + 0x0abc, + [0x0ac1, 0x0ac5], + [0x0ac7, 0x0ac8], + 0x0acd, + [0x0ae2, 0x0ae3], + [0x0afa, 0x0aff], + 0x0b01, + 0x0b3c, + 0x0b3f, + [0x0b41, 0x0b44], + 0x0b4d, + [0x0b55, 0x0b56], + [0x0b62, 0x0b63], + 0x0b82, + 0x0bc0, + 0x0bcd, + 0x0c00, + 0x0c04, + 0x0c3c, + [0x0c3e, 0x0c40], + [0x0c46, 0x0c48], + [0x0c4a, 0x0c4d], + [0x0c55, 0x0c56], + [0x0c62, 0x0c63], + 0x0c81, + 0x0cbc, + 0x0cbf, + 0x0cc6, + [0x0ccc, 0x0ccd], + [0x0ce2, 0x0ce3], + [0x0d00, 0x0d01], + [0x0d3b, 0x0d3c], + [0x0d41, 0x0d44], + 0x0d4d, + [0x0d62, 0x0d63], + 0x0d81, + 0x0dca, + [0x0dd2, 0x0dd4], + 0x0dd6, + 0x0e31, + [0x0e34, 0x0e3a], + [0x0e47, 0x0e4e], + 0x0eb1, + [0x0eb4, 0x0ebc], + [0x0ec8, 0x0ece], + [0x0f18, 0x0f19], + 0x0f35, + 0x0f37, + 0x0f39, + [0x0f71, 0x0f7e], + [0x0f80, 0x0f84], + [0x0f86, 0x0f87], + [0x0f8d, 0x0f97], + [0x0f99, 0x0fbc], + 0x0fc6, + [0x102d, 0x1030], + [0x1032, 0x1037], + [0x1039, 0x103a], + [0x103d, 0x103e], + [0x1058, 0x1059], + [0x105e, 0x1060], + [0x1071, 0x1074], + 0x1082, + [0x1085, 0x1086], + 0x108d, + 0x109d, + [0x135d, 0x135f], + [0x1712, 0x1714], + [0x1732, 0x1733], + [0x1752, 0x1753], + [0x1772, 0x1773], + [0x17b4, 0x17b5], + [0x17b7, 0x17bd], + 0x17c6, + [0x17c9, 0x17d3], + 0x17dd, + [0x180b, 0x180d], + 0x180f, + [0x1885, 0x1886], + 0x18a9, + [0x1920, 0x1922], + [0x1927, 0x1928], + 0x1932, + [0x1939, 0x193b], + [0x1a17, 0x1a18], + 0x1a1b, + 0x1a56, + [0x1a58, 0x1a5e], + 0x1a60, + 0x1a62, + [0x1a65, 0x1a6c], + [0x1a73, 0x1a7c], + 0x1a7f, + [0x1ab0, 0x1abd], + [0x1abf, 0x1ace], + [0x1b00, 0x1b03], + 0x1b34, + [0x1b36, 0x1b3a], + 0x1b3c, + 0x1b42, + [0x1b6b, 0x1b73], + [0x1b80, 0x1b81], + [0x1ba2, 0x1ba5], + [0x1ba8, 0x1ba9], + [0x1bab, 0x1bad], + 0x1be6, + [0x1be8, 0x1be9], + 0x1bed, + [0x1bef, 0x1bf1], + [0x1c2c, 0x1c33], + [0x1c36, 0x1c37], + [0x1cd0, 0x1cd2], + [0x1cd4, 0x1ce0], + [0x1ce2, 0x1ce8], + 0x1ced, + 0x1cf4, + [0x1cf8, 0x1cf9], + [0x1dc0, 0x1dff], + [0x20d0, 0x20dc], + 0x20e1, + [0x20e5, 0x20f0], + [0x2cef, 0x2cf1], + 0x2d7f, + [0x2de0, 0x2dff], + [0x302a, 0x302d], + [0x3099, 0x309a], + 0xa66f, + [0xa674, 0xa67d], + [0xa69e, 0xa69f], + [0xa6f0, 0xa6f1], + 0xa802, + 0xa806, + 0xa80b, + [0xa825, 0xa826], + 0xa82c, + [0xa8c4, 0xa8c5], + [0xa8e0, 0xa8f1], + 0xa8ff, + [0xa926, 0xa92d], + [0xa947, 0xa951], + [0xa980, 0xa982], + 0xa9b3, + [0xa9b6, 0xa9b9], + [0xa9bc, 0xa9bd], + 0xa9e5, + [0xaa29, 0xaa2e], + [0xaa31, 0xaa32], + [0xaa35, 0xaa36], + 0xaa43, + 0xaa4c, + 0xaa7c, + 0xaab0, + [0xaab2, 0xaab4], + [0xaab7, 0xaab8], + [0xaabe, 0xaabf], + 0xaac1, + [0xaaec, 0xaaed], + 0xaaf6, + 0xabe5, + 0xabe8, + 0xabed, + 0xfb1e, + [0xfe00, 0xfe0f], + [0xfe20, 0xfe2f], + 0x101fd, + 0x102e0, + [0x10376, 0x1037a], + [0x10a01, 0x10a03], + [0x10a05, 0x10a06], + [0x10a0c, 0x10a0f], + [0x10a38, 0x10a3a], + 0x10a3f, + [0x10ae5, 0x10ae6], + [0x10d24, 0x10d27], + [0x10d69, 0x10d6d], + [0x10eab, 0x10eac], + [0x10efc, 0x10eff], + [0x10f46, 0x10f50], + [0x10f82, 0x10f85], + 0x11001, + [0x11038, 0x11046], + 0x11070, + [0x11073, 0x11074], + [0x1107f, 0x11081], + [0x110b3, 0x110b6], + [0x110b9, 0x110ba], + 0x110c2, + [0x11100, 0x11102], + [0x11127, 0x1112b], + [0x1112d, 0x11134], + 0x11173, + [0x11180, 0x11181], + [0x111b6, 0x111be], + [0x111c9, 0x111cc], + 0x111cf, + [0x1122f, 0x11231], + 0x11234, + [0x11236, 0x11237], + 0x1123e, + 0x11241, + 0x112df, + [0x112e3, 0x112ea], + [0x11300, 0x11301], + [0x1133b, 0x1133c], + 0x11340, + [0x11366, 0x1136c], + [0x11370, 0x11374], + [0x113bb, 0x113c0], + 0x113ce, + 0x113d0, + 0x113d2, + [0x113e1, 0x113e2], + [0x11438, 0x1143f], + [0x11442, 0x11444], + 0x11446, + 0x1145e, + [0x114b3, 0x114b8], + 0x114ba, + [0x114bf, 0x114c0], + [0x114c2, 0x114c3], + [0x115b2, 0x115b5], + [0x115bc, 0x115bd], + [0x115bf, 0x115c0], + [0x115dc, 0x115dd], + [0x11633, 0x1163a], + 0x1163d, + [0x1163f, 0x11640], + 0x116ab, + 0x116ad, + [0x116b0, 0x116b5], + 0x116b7, + 0x1171d, + 0x1171f, + [0x11722, 0x11725], + [0x11727, 0x1172b], + [0x1182f, 0x11837], + [0x11839, 0x1183a], + [0x1193b, 0x1193c], + 0x1193e, + 0x11943, + [0x119d4, 0x119d7], + [0x119da, 0x119db], + 0x119e0, + [0x11a01, 0x11a0a], + [0x11a33, 0x11a38], + [0x11a3b, 0x11a3e], + 0x11a47, + [0x11a51, 0x11a56], + [0x11a59, 0x11a5b], + [0x11a8a, 0x11a96], + [0x11a98, 0x11a99], + [0x11c30, 0x11c36], + [0x11c38, 0x11c3d], + 0x11c3f, + [0x11c92, 0x11ca7], + [0x11caa, 0x11cb0], + [0x11cb2, 0x11cb3], + [0x11cb5, 0x11cb6], + [0x11d31, 0x11d36], + 0x11d3a, + [0x11d3c, 0x11d3d], + [0x11d3f, 0x11d45], + 0x11d47, + [0x11d90, 0x11d91], + 0x11d95, + 0x11d97, + [0x11ef3, 0x11ef4], + [0x11f00, 0x11f01], + [0x11f36, 0x11f3a], + 0x11f40, + 0x11f42, + 0x11f5a, + 0x13440, + [0x13447, 0x13455], + [0x1611e, 0x16129], + [0x1612d, 0x1612f], + [0x16af0, 0x16af4], + [0x16b30, 0x16b36], + 0x16f4f, + [0x16f8f, 0x16f92], + 0x16fe4, + [0x1bc9d, 0x1bc9e], + [0x1cf00, 0x1cf2d], + [0x1cf30, 0x1cf46], + [0x1d167, 0x1d169], + [0x1d17b, 0x1d182], + [0x1d185, 0x1d18b], + [0x1d1aa, 0x1d1ad], + [0x1d242, 0x1d244], + [0x1da00, 0x1da36], + [0x1da3b, 0x1da6c], + 0x1da75, + 0x1da84, + [0x1da9b, 0x1da9f], + [0x1daa1, 0x1daaf], + [0x1e000, 0x1e006], + [0x1e008, 0x1e018], + [0x1e01b, 0x1e021], + [0x1e023, 0x1e024], + [0x1e026, 0x1e02a], + 0x1e08f, + [0x1e130, 0x1e136], + 0x1e2ae, + [0x1e2ec, 0x1e2ef], + [0x1e4ec, 0x1e4ef], + [0x1e5ee, 0x1e5ef], + [0x1e8d0, 0x1e8d6], + [0x1e944, 0x1e94a], + [0xe0100, 0xe01ef], +]; + +export const unicodeMnSurrogate: UnicodeSurrogateRangeTable = { + 0xd800: [ + 0xddfd, // 0x101FD + 0xdee0, // 0x102E0 + [0xdf76, 0xdf7a], // 0x10376..0x1037A + ], + 0xd802: [ + [0xde01, 0xde03], // 0x10A01..0x10A03 + [0xde05, 0xde06], // 0x10A05..0x10A06 + [0xde0c, 0xde0f], // 0x10A0C..0x10A0F + [0xde38, 0xde3a], // 0x10A38..0x10A3A + 0xde3f, // 0x10A3F + [0xdee5, 0xdee6], // 0x10AE5..0x10AE6 + ], + 0xd803: [ + [0xdd24, 0xdd27], // 0x10D24..0x10D27 + [0xdd69, 0xdd6d], // 0x10D69..0x10D6D + [0xdeab, 0xdeac], // 0x10EAB..0x10EAC + [0xdefc, 0xdeff], // 0x10EFC..0x10EFF + [0xdf46, 0xdf50], // 0x10F46..0x10F50 + [0xdf82, 0xdf85], // 0x10F82..0x10F85 + ], + 0xd804: [ + 0xdc01, // 0x11001 + [0xdc38, 0xdc46], // 0x11038..0x11046 + 0xdc70, // 0x11070 + [0xdc73, 0xdc74], // 0x11073..0x11074 + [0xdc7f, 0xdc81], // 0x1107F..0x11081 + [0xdcb3, 0xdcb6], // 0x110B3..0x110B6 + [0xdcb9, 0xdcba], // 0x110B9..0x110BA + 0xdcc2, // 0x110C2 + [0xdd00, 0xdd02], // 0x11100..0x11102 + [0xdd27, 0xdd2b], // 0x11127..0x1112B + [0xdd2d, 0xdd34], // 0x1112D..0x11134 + 0xdd73, // 0x11173 + [0xdd80, 0xdd81], // 0x11180..0x11181 + [0xddb6, 0xddbe], // 0x111B6..0x111BE + [0xddc9, 0xddcc], // 0x111C9..0x111CC + 0xddcf, // 0x111CF + [0xde2f, 0xde31], // 0x1122F..0x11231 + 0xde34, // 0x11234 + [0xde36, 0xde37], // 0x11236..0x11237 + 0xde3e, // 0x1123E + 0xde41, // 0x11241 + 0xdedf, // 0x112DF + [0xdee3, 0xdeea], // 0x112E3..0x112EA + [0xdf00, 0xdf01], // 0x11300..0x11301 + [0xdf3b, 0xdf3c], // 0x1133B..0x1133C + 0xdf40, // 0x11340 + [0xdf66, 0xdf6c], // 0x11366..0x1136C + [0xdf70, 0xdf74], // 0x11370..0x11374 + [0xdfbb, 0xdfc0], // 0x113BB..0x113C0 + 0xdfce, // 0x113CE + 0xdfd0, // 0x113D0 + 0xdfd2, // 0x113D2 + [0xdfe1, 0xdfe2], // 0x113E1..0x113E2 + ], + 0xd805: [ + [0xdc38, 0xdc3f], // 0x11438..0x1143F + [0xdc42, 0xdc44], // 0x11442..0x11444 + 0xdc46, // 0x11446 + 0xdc5e, // 0x1145E + [0xdcb3, 0xdcb8], // 0x114B3..0x114B8 + 0xdcba, // 0x114BA + [0xdcbf, 0xdcc0], // 0x114BF..0x114C0 + [0xdcc2, 0xdcc3], // 0x114C2..0x114C3 + [0xddb2, 0xddb5], // 0x115B2..0x115B5 + [0xddbc, 0xddbd], // 0x115BC..0x115BD + [0xddbf, 0xddc0], // 0x115BF..0x115C0 + [0xdddc, 0xdddd], // 0x115DC..0x115DD + [0xde33, 0xde3a], // 0x11633..0x1163A + 0xde3d, // 0x1163D + [0xde3f, 0xde40], // 0x1163F..0x11640 + 0xdeab, // 0x116AB + 0xdead, // 0x116AD + [0xdeb0, 0xdeb5], // 0x116B0..0x116B5 + 0xdeb7, // 0x116B7 + 0xdf1d, // 0x1171D + 0xdf1f, // 0x1171F + [0xdf22, 0xdf25], // 0x11722..0x11725 + [0xdf27, 0xdf2b], // 0x11727..0x1172B + ], + 0xd806: [ + [0xdc2f, 0xdc37], // 0x1182F..0x11837 + [0xdc39, 0xdc3a], // 0x11839..0x1183A + [0xdd3b, 0xdd3c], // 0x1193B..0x1193C + 0xdd3e, // 0x1193E + 0xdd43, // 0x11943 + [0xddd4, 0xddd7], // 0x119D4..0x119D7 + [0xddda, 0xdddb], // 0x119DA..0x119DB + 0xdde0, // 0x119E0 + [0xde01, 0xde0a], // 0x11A01..0x11A0A + [0xde33, 0xde38], // 0x11A33..0x11A38 + [0xde3b, 0xde3e], // 0x11A3B..0x11A3E + 0xde47, // 0x11A47 + [0xde51, 0xde56], // 0x11A51..0x11A56 + [0xde59, 0xde5b], // 0x11A59..0x11A5B + [0xde8a, 0xde96], // 0x11A8A..0x11A96 + [0xde98, 0xde99], // 0x11A98..0x11A99 + ], + 0xd807: [ + [0xdc30, 0xdc36], // 0x11C30..0x11C36 + [0xdc38, 0xdc3d], // 0x11C38..0x11C3D + 0xdc3f, // 0x11C3F + [0xdc92, 0xdca7], // 0x11C92..0x11CA7 + [0xdcaa, 0xdcb0], // 0x11CAA..0x11CB0 + [0xdcb2, 0xdcb3], // 0x11CB2..0x11CB3 + [0xdcb5, 0xdcb6], // 0x11CB5..0x11CB6 + [0xdd31, 0xdd36], // 0x11D31..0x11D36 + 0xdd3a, // 0x11D3A + [0xdd3c, 0xdd3d], // 0x11D3C..0x11D3D + [0xdd3f, 0xdd45], // 0x11D3F..0x11D45 + 0xdd47, // 0x11D47 + [0xdd90, 0xdd91], // 0x11D90..0x11D91 + 0xdd95, // 0x11D95 + 0xdd97, // 0x11D97 + [0xdef3, 0xdef4], // 0x11EF3..0x11EF4 + [0xdf00, 0xdf01], // 0x11F00..0x11F01 + [0xdf36, 0xdf3a], // 0x11F36..0x11F3A + 0xdf40, // 0x11F40 + 0xdf42, // 0x11F42 + 0xdf5a, // 0x11F5A + ], + 0xd80d: [ + 0xdc40, // 0x13440 + [0xdc47, 0xdc55], // 0x13447..0x13455 + ], + 0xd818: [ + [0xdd1e, 0xdd29], // 0x1611E..0x16129 + [0xdd2d, 0xdd2f], // 0x1612D..0x1612F + ], + 0xd81a: [ + [0xdef0, 0xdef4], // 0x16AF0..0x16AF4 + [0xdf30, 0xdf36], // 0x16B30..0x16B36 + ], + 0xd81b: [ + 0xdf4f, // 0x16F4F + [0xdf8f, 0xdf92], // 0x16F8F..0x16F92 + 0xdfe4, // 0x16FE4 + ], + 0xd82f: [ + [0xdc9d, 0xdc9e], // 0x1BC9D..0x1BC9E + ], + 0xd833: [ + [0xdf00, 0xdf2d], // 0x1CF00..0x1CF2D + [0xdf30, 0xdf46], // 0x1CF30..0x1CF46 + ], + 0xd834: [ + [0xdd67, 0xdd69], // 0x1D167..0x1D169 + [0xdd7b, 0xdd82], // 0x1D17B..0x1D182 + [0xdd85, 0xdd8b], // 0x1D185..0x1D18B + [0xddaa, 0xddad], // 0x1D1AA..0x1D1AD + [0xde42, 0xde44], // 0x1D242..0x1D244 + ], + 0xd836: [ + [0xde00, 0xde36], // 0x1DA00..0x1DA36 + [0xde3b, 0xde6c], // 0x1DA3B..0x1DA6C + 0xde75, // 0x1DA75 + 0xde84, // 0x1DA84 + [0xde9b, 0xde9f], // 0x1DA9B..0x1DA9F + [0xdea1, 0xdeaf], // 0x1DAA1..0x1DAAF + ], + 0xd838: [ + [0xdc00, 0xdc06], // 0x1E000..0x1E006 + [0xdc08, 0xdc18], // 0x1E008..0x1E018 + [0xdc1b, 0xdc21], // 0x1E01B..0x1E021 + [0xdc23, 0xdc24], // 0x1E023..0x1E024 + [0xdc26, 0xdc2a], // 0x1E026..0x1E02A + 0xdc8f, // 0x1E08F + [0xdd30, 0xdd36], // 0x1E130..0x1E136 + 0xdeae, // 0x1E2AE + [0xdeec, 0xdeef], // 0x1E2EC..0x1E2EF + ], + 0xd839: [ + [0xdcec, 0xdcef], // 0x1E4EC..0x1E4EF + [0xddee, 0xddef], // 0x1E5EE..0x1E5EF + ], + 0xd83a: [ + [0xdcd0, 0xdcd6], // 0x1E8D0..0x1E8D6 + [0xdd44, 0xdd4a], // 0x1E944..0x1E94A + ], + 0xdb40: [ + [0xdd00, 0xddef], // 0xE0100..0xE01EF + ], +}; + +export const unicodeMc: UnicodeRangeTable = [ + 0x0903, + 0x093b, + [0x093e, 0x0940], + [0x0949, 0x094c], + [0x094e, 0x094f], + [0x0982, 0x0983], + [0x09be, 0x09c0], + [0x09c7, 0x09c8], + [0x09cb, 0x09cc], + 0x09d7, + 0x0a03, + [0x0a3e, 0x0a40], + 0x0a83, + [0x0abe, 0x0ac0], + 0x0ac9, + [0x0acb, 0x0acc], + [0x0b02, 0x0b03], + 0x0b3e, + 0x0b40, + [0x0b47, 0x0b48], + [0x0b4b, 0x0b4c], + 0x0b57, + [0x0bbe, 0x0bbf], + [0x0bc1, 0x0bc2], + [0x0bc6, 0x0bc8], + [0x0bca, 0x0bcc], + 0x0bd7, + [0x0c01, 0x0c03], + [0x0c41, 0x0c44], + [0x0c82, 0x0c83], + 0x0cbe, + [0x0cc0, 0x0cc4], + [0x0cc7, 0x0cc8], + [0x0cca, 0x0ccb], + [0x0cd5, 0x0cd6], + 0x0cf3, + [0x0d02, 0x0d03], + [0x0d3e, 0x0d40], + [0x0d46, 0x0d48], + [0x0d4a, 0x0d4c], + 0x0d57, + [0x0d82, 0x0d83], + [0x0dcf, 0x0dd1], + [0x0dd8, 0x0ddf], + [0x0df2, 0x0df3], + [0x0f3e, 0x0f3f], + 0x0f7f, + [0x102b, 0x102c], + 0x1031, + 0x1038, + [0x103b, 0x103c], + [0x1056, 0x1057], + [0x1062, 0x1064], + [0x1067, 0x106d], + [0x1083, 0x1084], + [0x1087, 0x108c], + 0x108f, + [0x109a, 0x109c], + 0x1715, + 0x1734, + 0x17b6, + [0x17be, 0x17c5], + [0x17c7, 0x17c8], + [0x1923, 0x1926], + [0x1929, 0x192b], + [0x1930, 0x1931], + [0x1933, 0x1938], + [0x1a19, 0x1a1a], + 0x1a55, + 0x1a57, + 0x1a61, + [0x1a63, 0x1a64], + [0x1a6d, 0x1a72], + 0x1b04, + 0x1b35, + 0x1b3b, + [0x1b3d, 0x1b41], + [0x1b43, 0x1b44], + 0x1b82, + 0x1ba1, + [0x1ba6, 0x1ba7], + 0x1baa, + 0x1be7, + [0x1bea, 0x1bec], + 0x1bee, + [0x1bf2, 0x1bf3], + [0x1c24, 0x1c2b], + [0x1c34, 0x1c35], + 0x1ce1, + 0x1cf7, + [0x302e, 0x302f], + [0xa823, 0xa824], + 0xa827, + [0xa880, 0xa881], + [0xa8b4, 0xa8c3], + [0xa952, 0xa953], + 0xa983, + [0xa9b4, 0xa9b5], + [0xa9ba, 0xa9bb], + [0xa9be, 0xa9c0], + [0xaa2f, 0xaa30], + [0xaa33, 0xaa34], + 0xaa4d, + 0xaa7b, + 0xaa7d, + 0xaaeb, + [0xaaee, 0xaaef], + 0xaaf5, + [0xabe3, 0xabe4], + [0xabe6, 0xabe7], + [0xabe9, 0xabea], + 0xabec, + 0x11000, + 0x11002, + 0x11082, + [0x110b0, 0x110b2], + [0x110b7, 0x110b8], + 0x1112c, + [0x11145, 0x11146], + 0x11182, + [0x111b3, 0x111b5], + [0x111bf, 0x111c0], + 0x111ce, + [0x1122c, 0x1122e], + [0x11232, 0x11233], + 0x11235, + [0x112e0, 0x112e2], + [0x11302, 0x11303], + [0x1133e, 0x1133f], + [0x11341, 0x11344], + [0x11347, 0x11348], + [0x1134b, 0x1134d], + 0x11357, + [0x11362, 0x11363], + [0x113b8, 0x113ba], + 0x113c2, + 0x113c5, + [0x113c7, 0x113ca], + [0x113cc, 0x113cd], + 0x113cf, + [0x11435, 0x11437], + [0x11440, 0x11441], + 0x11445, + [0x114b0, 0x114b2], + 0x114b9, + [0x114bb, 0x114be], + 0x114c1, + [0x115af, 0x115b1], + [0x115b8, 0x115bb], + 0x115be, + [0x11630, 0x11632], + [0x1163b, 0x1163c], + 0x1163e, + 0x116ac, + [0x116ae, 0x116af], + 0x116b6, + 0x1171e, + [0x11720, 0x11721], + 0x11726, + [0x1182c, 0x1182e], + 0x11838, + [0x11930, 0x11935], + [0x11937, 0x11938], + 0x1193d, + 0x11940, + 0x11942, + [0x119d1, 0x119d3], + [0x119dc, 0x119df], + 0x119e4, + 0x11a39, + [0x11a57, 0x11a58], + 0x11a97, + 0x11c2f, + 0x11c3e, + 0x11ca9, + 0x11cb1, + 0x11cb4, + [0x11d8a, 0x11d8e], + [0x11d93, 0x11d94], + 0x11d96, + [0x11ef5, 0x11ef6], + 0x11f03, + [0x11f34, 0x11f35], + [0x11f3e, 0x11f3f], + 0x11f41, + [0x1612a, 0x1612c], + [0x16f51, 0x16f87], + [0x16ff0, 0x16ff1], + [0x1d165, 0x1d166], + [0x1d16d, 0x1d172], +]; + +export const unicodeMcSurrogate: UnicodeSurrogateRangeTable = { + 0xd804: [ + 0xdc00, // 0x11000 + 0xdc02, // 0x11002 + 0xdc82, // 0x11082 + [0xdcb0, 0xdcb2], // 0x110B0..0x110B2 + [0xdcb7, 0xdcb8], // 0x110B7..0x110B8 + 0xdd2c, // 0x1112C + [0xdd45, 0xdd46], // 0x11145..0x11146 + 0xdd82, // 0x11182 + [0xddb3, 0xddb5], // 0x111B3..0x111B5 + [0xddbf, 0xddc0], // 0x111BF..0x111C0 + 0xddce, // 0x111CE + [0xde2c, 0xde2e], // 0x1122C..0x1122E + [0xde32, 0xde33], // 0x11232..0x11233 + 0xde35, // 0x11235 + [0xdee0, 0xdee2], // 0x112E0..0x112E2 + [0xdf02, 0xdf03], // 0x11302..0x11303 + [0xdf3e, 0xdf3f], // 0x1133E..0x1133F + [0xdf41, 0xdf44], // 0x11341..0x11344 + [0xdf47, 0xdf48], // 0x11347..0x11348 + [0xdf4b, 0xdf4d], // 0x1134B..0x1134D + 0xdf57, // 0x11357 + [0xdf62, 0xdf63], // 0x11362..0x11363 + [0xdfb8, 0xdfba], // 0x113B8..0x113BA + 0xdfc2, // 0x113C2 + 0xdfc5, // 0x113C5 + [0xdfc7, 0xdfca], // 0x113C7..0x113CA + [0xdfcc, 0xdfcd], // 0x113CC..0x113CD + 0xdfcf, // 0x113CF + ], + 0xd805: [ + [0xdc35, 0xdc37], // 0x11435..0x11437 + [0xdc40, 0xdc41], // 0x11440..0x11441 + 0xdc45, // 0x11445 + [0xdcb0, 0xdcb2], // 0x114B0..0x114B2 + 0xdcb9, // 0x114B9 + [0xdcbb, 0xdcbe], // 0x114BB..0x114BE + 0xdcc1, // 0x114C1 + [0xddaf, 0xddb1], // 0x115AF..0x115B1 + [0xddb8, 0xddbb], // 0x115B8..0x115BB + 0xddbe, // 0x115BE + [0xde30, 0xde32], // 0x11630..0x11632 + [0xde3b, 0xde3c], // 0x1163B..0x1163C + 0xde3e, // 0x1163E + 0xdeac, // 0x116AC + [0xdeae, 0xdeaf], // 0x116AE..0x116AF + 0xdeb6, // 0x116B6 + 0xdf1e, // 0x1171E + [0xdf20, 0xdf21], // 0x11720..0x11721 + 0xdf26, // 0x11726 + ], + 0xd806: [ + [0xdc2c, 0xdc2e], // 0x1182C..0x1182E + 0xdc38, // 0x11838 + [0xdd30, 0xdd35], // 0x11930..0x11935 + [0xdd37, 0xdd38], // 0x11937..0x11938 + 0xdd3d, // 0x1193D + 0xdd40, // 0x11940 + 0xdd42, // 0x11942 + [0xddd1, 0xddd3], // 0x119D1..0x119D3 + [0xdddc, 0xdddf], // 0x119DC..0x119DF + 0xdde4, // 0x119E4 + 0xde39, // 0x11A39 + [0xde57, 0xde58], // 0x11A57..0x11A58 + 0xde97, // 0x11A97 + ], + 0xd807: [ + 0xdc2f, // 0x11C2F + 0xdc3e, // 0x11C3E + 0xdca9, // 0x11CA9 + 0xdcb1, // 0x11CB1 + 0xdcb4, // 0x11CB4 + [0xdd8a, 0xdd8e], // 0x11D8A..0x11D8E + [0xdd93, 0xdd94], // 0x11D93..0x11D94 + 0xdd96, // 0x11D96 + [0xdef5, 0xdef6], // 0x11EF5..0x11EF6 + 0xdf03, // 0x11F03 + [0xdf34, 0xdf35], // 0x11F34..0x11F35 + [0xdf3e, 0xdf3f], // 0x11F3E..0x11F3F + 0xdf41, // 0x11F41 + ], + 0xd818: [ + [0xdd2a, 0xdd2c], // 0x1612A..0x1612C + ], + 0xd81b: [ + [0xdf51, 0xdf87], // 0x16F51..0x16F87 + [0xdff0, 0xdff1], // 0x16FF0..0x16FF1 + ], + 0xd834: [ + [0xdd65, 0xdd66], // 0x1D165..0x1D166 + [0xdd6d, 0xdd72], // 0x1D16D..0x1D172 + ], +}; + +export const unicodeNd: UnicodeRangeTable = [ + [0x0030, 0x0039], + [0x0660, 0x0669], + [0x06f0, 0x06f9], + [0x07c0, 0x07c9], + [0x0966, 0x096f], + [0x09e6, 0x09ef], + [0x0a66, 0x0a6f], + [0x0ae6, 0x0aef], + [0x0b66, 0x0b6f], + [0x0be6, 0x0bef], + [0x0c66, 0x0c6f], + [0x0ce6, 0x0cef], + [0x0d66, 0x0d6f], + [0x0de6, 0x0def], + [0x0e50, 0x0e59], + [0x0ed0, 0x0ed9], + [0x0f20, 0x0f29], + [0x1040, 0x1049], + [0x1090, 0x1099], + [0x17e0, 0x17e9], + [0x1810, 0x1819], + [0x1946, 0x194f], + [0x19d0, 0x19d9], + [0x1a80, 0x1a89], + [0x1a90, 0x1a99], + [0x1b50, 0x1b59], + [0x1bb0, 0x1bb9], + [0x1c40, 0x1c49], + [0x1c50, 0x1c59], + [0xa620, 0xa629], + [0xa8d0, 0xa8d9], + [0xa900, 0xa909], + [0xa9d0, 0xa9d9], + [0xa9f0, 0xa9f9], + [0xaa50, 0xaa59], + [0xabf0, 0xabf9], + [0xff10, 0xff19], + [0x104a0, 0x104a9], + [0x10d30, 0x10d39], + [0x10d40, 0x10d49], + [0x11066, 0x1106f], + [0x110f0, 0x110f9], + [0x11136, 0x1113f], + [0x111d0, 0x111d9], + [0x112f0, 0x112f9], + [0x11450, 0x11459], + [0x114d0, 0x114d9], + [0x11650, 0x11659], + [0x116c0, 0x116c9], + [0x116d0, 0x116e3], + [0x11730, 0x11739], + [0x118e0, 0x118e9], + [0x11950, 0x11959], + [0x11bf0, 0x11bf9], + [0x11c50, 0x11c59], + [0x11d50, 0x11d59], + [0x11da0, 0x11da9], + [0x11f50, 0x11f59], + [0x16130, 0x16139], + [0x16a60, 0x16a69], + [0x16ac0, 0x16ac9], + [0x16b50, 0x16b59], + [0x16d70, 0x16d79], + [0x1ccf0, 0x1ccf9], + [0x1d7ce, 0x1d7ff], + [0x1e140, 0x1e149], + [0x1e2f0, 0x1e2f9], + [0x1e4f0, 0x1e4f9], + [0x1e5f1, 0x1e5fa], + [0x1e950, 0x1e959], + [0x1fbf0, 0x1fbf9], +]; + +export const unicodeNdSurrogate: UnicodeSurrogateRangeTable = { + 0xd801: [ + [0xdca0, 0xdca9], // 0x104A0..0x104A9 + ], + 0xd803: [ + [0xdd30, 0xdd39], // 0x10D30..0x10D39 + [0xdd40, 0xdd49], // 0x10D40..0x10D49 + ], + 0xd804: [ + [0xdc66, 0xdc6f], // 0x11066..0x1106F + [0xdcf0, 0xdcf9], // 0x110F0..0x110F9 + [0xdd36, 0xdd3f], // 0x11136..0x1113F + [0xddd0, 0xddd9], // 0x111D0..0x111D9 + [0xdef0, 0xdef9], // 0x112F0..0x112F9 + ], + 0xd805: [ + [0xdc50, 0xdc59], // 0x11450..0x11459 + [0xdcd0, 0xdcd9], // 0x114D0..0x114D9 + [0xde50, 0xde59], // 0x11650..0x11659 + [0xdec0, 0xdec9], // 0x116C0..0x116C9 + [0xded0, 0xdee3], // 0x116D0..0x116E3 + [0xdf30, 0xdf39], // 0x11730..0x11739 + ], + 0xd806: [ + [0xdce0, 0xdce9], // 0x118E0..0x118E9 + [0xdd50, 0xdd59], // 0x11950..0x11959 + [0xdff0, 0xdff9], // 0x11BF0..0x11BF9 + ], + 0xd807: [ + [0xdc50, 0xdc59], // 0x11C50..0x11C59 + [0xdd50, 0xdd59], // 0x11D50..0x11D59 + [0xdda0, 0xdda9], // 0x11DA0..0x11DA9 + [0xdf50, 0xdf59], // 0x11F50..0x11F59 + ], + 0xd818: [ + [0xdd30, 0xdd39], // 0x16130..0x16139 + ], + 0xd81a: [ + [0xde60, 0xde69], // 0x16A60..0x16A69 + [0xdec0, 0xdec9], // 0x16AC0..0x16AC9 + [0xdf50, 0xdf59], // 0x16B50..0x16B59 + ], + 0xd81b: [ + [0xdd70, 0xdd79], // 0x16D70..0x16D79 + ], + 0xd833: [ + [0xdcf0, 0xdcf9], // 0x1CCF0..0x1CCF9 + ], + 0xd835: [ + [0xdfce, 0xdfff], // 0x1D7CE..0x1D7FF + ], + 0xd838: [ + [0xdd40, 0xdd49], // 0x1E140..0x1E149 + [0xdef0, 0xdef9], // 0x1E2F0..0x1E2F9 + ], + 0xd839: [ + [0xdcf0, 0xdcf9], // 0x1E4F0..0x1E4F9 + [0xddf1, 0xddfa], // 0x1E5F1..0x1E5FA + ], + 0xd83a: [ + [0xdd50, 0xdd59], // 0x1E950..0x1E959 + ], + 0xd83e: [ + [0xdff0, 0xdff9], // 0x1FBF0..0x1FBF9 + ], +}; + +export const unicodePc: UnicodeRangeTable = [ + 0x005f, + [0x203f, 0x2040], + 0x2054, + [0xfe33, 0xfe34], + [0xfe4d, 0xfe4f], + 0xff3f, +]; diff --git a/python-parser/packages/pyright-internal/src/partialStubService.ts b/python-parser/packages/pyright-internal/src/partialStubService.ts new file mode 100644 index 00000000..53524f83 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/partialStubService.ts @@ -0,0 +1,154 @@ +/* + * partialStubService.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * A service that maps partial stub packages into the original directory of the installed library. + */ + +import type * as fs from 'fs'; + +import { Disposable } from 'vscode-jsonrpc'; +import { getPyTypedInfo, PyTypedInfo } from './analyzer/pyTypedUtils'; +import { ExecutionEnvironment } from './common/configOptions'; +import { FileSystem } from './common/fileSystem'; +import { stubsSuffix } from './common/pathConsts'; +import { Uri } from './common/uri/uri'; +import { isDirectory, tryStat } from './common/uri/uriUtils'; + +export interface SupportPartialStubs { + isPartialStubPackagesScanned(execEnv: ExecutionEnvironment): boolean; + isPathScanned(path: Uri): boolean; + processPartialStubPackages(paths: Uri[], roots: Uri[], bundledStubPath?: Uri): void; + clearPartialStubs(): void; +} + +export namespace SupportPartialStubs { + export function is(value: any): value is SupportPartialStubs { + return ( + value.isPartialStubPackagesScanned && + value.isPathScanned && + value.processPartialStubPackages && + value.clearPartialStubs + ); + } +} + +export class PartialStubService implements SupportPartialStubs { + // Root paths processed + private readonly _rootSearched = new Set(); + + // Partial stub package paths processed + private readonly _partialStubPackagePaths = new Set(); + + // Disposables to cleanup moved directories + private _movedDirectories: Disposable[] = []; + + constructor(private _realFs: FileSystem) {} + + isPartialStubPackagesScanned(execEnv: ExecutionEnvironment): boolean { + return execEnv.root ? this.isPathScanned(execEnv.root) : false; + } + + isPathScanned(uri: Uri): boolean { + return this._rootSearched.has(uri.key); + } + + processPartialStubPackages( + paths: Uri[], + roots: Uri[], + bundledStubPath?: Uri, + allowMoving?: ( + isBundled: boolean, + packagePyTyped: PyTypedInfo | undefined, + _stubPyTyped: PyTypedInfo + ) => boolean + ): void { + const allowMovingFn = allowMoving ?? this._allowMoving.bind(this); + for (const path of paths) { + this._rootSearched.add(path.key); + + if (!this._realFs.existsSync(path) || !isDirectory(this._realFs, path)) { + continue; + } + + let dirEntries: fs.Dirent[] = []; + + try { + dirEntries = this._realFs.readdirEntriesSync(path); + } catch { + // Leave empty set of dir entries to process. + } + + const isBundledStub = path.equals(bundledStubPath); + for (const entry of dirEntries) { + const partialStubPackagePath = path.combinePaths(entry.name); + const isDirectory = !entry.isSymbolicLink() + ? entry.isDirectory() + : !!tryStat(this._realFs, partialStubPackagePath)?.isDirectory(); + + if (!isDirectory || !entry.name.endsWith(stubsSuffix)) { + continue; + } + + const pyTypedInfo = getPyTypedInfo(this._realFs, partialStubPackagePath); + if (!pyTypedInfo || !pyTypedInfo.isPartiallyTyped) { + // Stub-Package is fully typed. + continue; + } + + // We found partially typed stub-packages. + this._partialStubPackagePaths.add(partialStubPackagePath.key); + + // Search the root to see whether we have matching package installed. + const packageName = entry.name.substr(0, entry.name.length - stubsSuffix.length); + for (const root of roots) { + const packagePath = root.combinePaths(packageName); + try { + const stat = tryStat(this._realFs, packagePath); + if (!stat?.isDirectory()) { + continue; + } + + // If partial stub we found is from bundled stub and library installed is marked as py.typed + // ignore bundled partial stub. + if (!allowMovingFn(isBundledStub, getPyTypedInfo(this._realFs, packagePath), pyTypedInfo)) { + continue; + } + // Merge partial stub packages to the library. + this._movedDirectories.push( + this._realFs.mapDirectory( + packagePath, + partialStubPackagePath, + (u, fs) => u.hasExtension('.pyi') || (fs.existsSync(u) && fs.statSync(u).isDirectory()) + ) + ); + } catch { + // ignore + } + } + } + } + } + + clearPartialStubs(): void { + this._rootSearched.clear(); + this._partialStubPackagePaths.clear(); + this._movedDirectories.forEach((d) => d.dispose()); + this._movedDirectories = []; + } + + private _allowMoving( + isBundled: boolean, + packagePyTyped: PyTypedInfo | undefined, + _stubPyTyped: PyTypedInfo + ): boolean { + if (!isBundled) { + return true; + } + + // If partial stub we found is from bundled stub and library installed is marked as py.typed + // allow moving only if the package is marked as partially typed. + return !packagePyTyped || packagePyTyped.isPartiallyTyped; + } +} diff --git a/python-parser/packages/pyright-internal/src/pprof/profiler.ts b/python-parser/packages/pyright-internal/src/pprof/profiler.ts new file mode 100644 index 00000000..941b547b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/pprof/profiler.ts @@ -0,0 +1,63 @@ +/** + * Functions used for running pyright with pprof. + * + * Steps taken to get this to work: + * - Install VC++ Desktop C++ workload with at least one Windows SDK + - Git clone DataDog/pprof-nodejs: pprof support for Node.js (github.com) + ○ Going to use this to generate an electron.node file for loading the profiler + - Switch to packages\vscode-pylance + - Npm install --save-dev node-abi@latest + ○ this is so electron-rebuild can find the right ABI + - Npm install --save-dev @electron/rebuild + - Electron rebuild the git cloned datadog/pprof-nodejs based on the version in VS code + ○ .\node_modules\.bin\electron-rebuild -v -m + - Npm install --save-dev @datadog/pprof + - Copy the build output from the electron-rebuild of the datadog git repository to the node_modules datadog + ○ It should be named something like bin\win32-x64-110\pprof-nodejs.node + ○ Copy it to the node_modules\@datadog\pprof\prebuilds\win32-x64 + ○ Rename it to electron-110.node (or whatever ABI version it is using) + - Modify pylance to use pprof around problem location using the pyright\packages\pyright-internal\pprof\profiler.ts + ○ startProfile before + ○ finishProfile after, passing it a file name + - Rebuild Pylance + - Make sure to turn off background analysis + - Launch the CPU profiling profile + - Reproduce the problem + - Install Go (Get Started - The Go Programming Language) + - Install Graphviz + ○ Choco install graphviz + - Install the pprof cli + ○ go install github.com/google/pprof@latest + - Run pprof -http to look at results. + ○ Profile should be in same directory as vscode-pylance output. + ○ Example pprof -http=: + */ + +declare const __webpack_require__: typeof require; +declare const __non_webpack_require__: typeof require; + +function getRequire(path: string) { + const r = typeof __webpack_require__ === 'function' ? __non_webpack_require__ : require; + try { + return r(`../node_modules/${path}`); + } catch (err) { + console.log(err); + } +} + +let counter = 1; + +export function startProfile(): void { + const pprof = getRequire('@datadog/pprof'); + pprof?.time.start({}); + console.log(`Starting profile : ${counter}`); +} +export function finishProfile(outputFile: string): void { + const pprof = getRequire('@datadog/pprof'); + const profile = pprof?.time.stop(); + if (profile) { + const fs = getRequire('fs-extra') as typeof import('fs-extra'); + const buffer = pprof?.encodeSync(profile); + fs.writeFileSync(`${counter++}${outputFile}`, buffer); + } +} diff --git a/python-parser/packages/pyright-internal/src/pyright.ts b/python-parser/packages/pyright-internal/src/pyright.ts new file mode 100644 index 00000000..257dd65c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/pyright.ts @@ -0,0 +1,1386 @@ +/* + * pyright.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Command-line entry point for pyright type checker. + */ + +// Add the start timer at the very top of the file, before we import other modules. + +/* eslint-disable */ +import { timingStats } from './common/timing'; +/* eslint-enable */ + +import chalk from 'chalk'; +import commandLineArgs, { CommandLineOptions, OptionDefinition } from 'command-line-args'; +import * as os from 'os'; + +import { ChildProcess, fork } from 'child_process'; +import { AnalysisResults } from './analyzer/analysis'; +import { PackageTypeReport, TypeKnownStatus } from './analyzer/packageTypeReport'; +import { PackageTypeVerifier } from './analyzer/packageTypeVerifier'; +import { AnalyzerService } from './analyzer/service'; +import { maxSourceFileSize } from './analyzer/sourceFile'; +import { SourceFileInfo } from './analyzer/sourceFileInfo'; +import { initializeDependencies } from './common/asyncInitialization'; +import { ChokidarFileWatcherProvider } from './common/chokidarFileWatcherProvider'; +import { CommandLineOptions as PyrightCommandLineOptions } from './common/commandLineOptions'; +import { ConsoleInterface, LogLevel, StandardConsole, StderrConsole } from './common/console'; +import { fail } from './common/debug'; +import { createDeferred } from './common/deferred'; +import { Diagnostic, DiagnosticCategory, compareDiagnostics } from './common/diagnostic'; +import { FileDiagnostics } from './common/diagnosticSink'; +import { FullAccessHost } from './common/fullAccessHost'; +import { combinePaths, normalizePath } from './common/pathUtils'; +import { PythonVersion } from './common/pythonVersion'; +import { RealTempFile, createFromRealFileSystem } from './common/realFileSystem'; +import { ServiceKeys } from './common/serviceKeys'; +import { ServiceProvider } from './common/serviceProvider'; +import { createServiceProvider } from './common/serviceProviderExtensions'; +import { getStdin } from './common/streamUtils'; +import { Range, isEmptyRange } from './common/textRange'; +import { Uri } from './common/uri/uri'; +import { getFileSpec, tryStat } from './common/uri/uriUtils'; +import { PyrightFileSystem } from './pyrightFileSystem'; + +const toolName = 'pyright'; + +type SeverityLevel = 'error' | 'warning' | 'information'; + +// These values are publicly documented. Do not change them. +enum ExitStatus { + NoErrors = 0, + ErrorsReported = 1, + FatalError = 2, + ConfigFileParseError = 3, + ParameterError = 4, +} + +// The schema for this object is publicly documented. Do not change it. +interface PyrightJsonResults { + version: string; + time: string; + generalDiagnostics: PyrightJsonDiagnostic[]; + summary: PyrightJsonSummary; + typeCompleteness?: PyrightTypeCompletenessReport; +} + +// The schema for this object is publicly documented. Do not change it. +interface PyrightSymbolCount { + withKnownType: number; + withAmbiguousType: number; + withUnknownType: number; +} + +// The schema for this object is publicly documented. Do not change it. +interface PyrightTypeCompletenessReport { + packageName: string; + packageRootDirectory?: string | undefined; + moduleName: string; + moduleRootDirectory?: string | undefined; + ignoreUnknownTypesFromImports: boolean; + pyTypedPath?: string | undefined; + exportedSymbolCounts: PyrightSymbolCount; + otherSymbolCounts: PyrightSymbolCount; + missingFunctionDocStringCount: number; + missingClassDocStringCount: number; + missingDefaultParamCount: number; + completenessScore: number; + modules: PyrightPublicModuleReport[]; + symbols: PyrightPublicSymbolReport[]; +} + +// The schema for this object is publicly documented. Do not change it. +interface PyrightPublicModuleReport { + name: string; +} + +// The schema for this object is publicly documented. Do not change it. +interface PyrightPublicSymbolReport { + category: string; + name: string; + referenceCount: number; + isTypeKnown: boolean; + isTypeAmbiguous: boolean; + isExported: boolean; + diagnostics: PyrightJsonDiagnostic[]; + alternateNames?: string[] | undefined; +} + +// The schema for this object is publicly documented. Do not change it. +interface PyrightJsonDiagnostic { + file: string; + severity: SeverityLevel; + message: string; + range?: Range | undefined; + rule?: string | undefined; +} + +// The schema for this object is publicly documented. Do not change it. +interface PyrightJsonSummary { + filesAnalyzed: number; + errorCount: number; + warningCount: number; + informationCount: number; + timeInSec: number; +} + +// The schema for this object is publicly documented. Do not change it. +interface DiagnosticResult { + errorCount: number; + warningCount: number; + informationCount: number; + diagnosticCount: number; +} + +const cancellationNone = Object.freeze({ + isCancellationRequested: false, + onCancellationRequested: function () { + return { + dispose() { + /* empty */ + }, + }; + }, +}); + +async function processArgs(): Promise { + const optionDefinitions: OptionDefinition[] = [ + { name: 'createstub', type: String }, + { name: 'dependencies', type: Boolean }, + { name: 'files', type: String, multiple: true, defaultOption: true }, + { name: 'help', alias: 'h', type: Boolean }, + { name: 'ignoreexternal', type: Boolean }, + { name: 'lib', type: Boolean }, + { name: 'level', type: String }, + { name: 'outputjson', type: Boolean }, + { name: 'project', alias: 'p', type: String }, + { name: 'pythonpath', type: String }, + { name: 'pythonplatform', type: String }, + { name: 'pythonversion', type: String }, + { name: 'skipunannotated', type: Boolean }, + { name: 'stats', type: Boolean }, + { name: 'threads', type: parseThreadsArgValue }, + { name: 'typeshed-path', type: String }, + { name: 'typeshedpath', alias: 't', type: String }, + { name: 'venv-path', type: String }, + { name: 'venvpath', alias: 'v', type: String }, + { name: 'verifytypes', type: String }, + { name: 'verbose', type: Boolean }, + { name: 'version', type: Boolean }, + { name: 'warnings', type: Boolean }, + { name: 'watch', alias: 'w', type: Boolean }, + ]; + + let args: CommandLineOptions; + + try { + args = commandLineArgs(optionDefinitions); + } catch (e: any) { + const argErr: { name: string; optionName: string } = e; + if (argErr && argErr.optionName) { + console.error(`Unexpected option ${argErr.optionName}.\n${toolName} --help for usage`); + return ExitStatus.ParameterError; + } + + console.error(`Unexpected error\n${toolName} --help for usage`); + return ExitStatus.ParameterError; + } + + if (args.help !== undefined) { + printUsage(); + return ExitStatus.NoErrors; + } + + if (args.version !== undefined) { + printVersion(console); + return ExitStatus.NoErrors; + } + + for (const [arg, value] of Object.entries(args)) { + if (value === null && arg !== 'threads') { + console.error(`'${arg}' option requires a value`); + return ExitStatus.ParameterError; + } + } + + if (args.outputjson) { + const incompatibleArgs = ['stats', 'verbose', 'createstub', 'dependencies']; + for (const arg of incompatibleArgs) { + if (args[arg] !== undefined) { + console.error(`'outputjson' option cannot be used with '${arg}' option`); + return ExitStatus.ParameterError; + } + } + } + + if (args.verifytypes !== undefined) { + const incompatibleArgs = ['watch', 'stats', 'createstub', 'dependencies', 'skipunannotated', 'threads']; + for (const arg of incompatibleArgs) { + if (args[arg] !== undefined) { + console.error(`'verifytypes' option cannot be used with '${arg}' option`); + return ExitStatus.ParameterError; + } + } + } + + if (args.createstub) { + const incompatibleArgs = ['watch', 'stats', 'verifytypes', 'dependencies', 'skipunannotated', 'threads']; + for (const arg of incompatibleArgs) { + if (args[arg] !== undefined) { + console.error(`'createstub' option cannot be used with '${arg}' option`); + return ExitStatus.ParameterError; + } + } + } + + if (args.threads) { + const incompatibleArgs = ['watch', 'stats', 'dependencies']; + for (const arg of incompatibleArgs) { + if (args[arg] !== undefined) { + console.error(`'threads' option cannot be used with '${arg}' option`); + return ExitStatus.ParameterError; + } + } + } + + const options = new PyrightCommandLineOptions(process.cwd(), false); + const tempFile = new RealTempFile(); + + // Assume any relative paths are relative to the working directory. + if (args.files && Array.isArray(args.files)) { + let fileSpecList = args.files; + + // Has the caller indicated that the file list will be supplied by stdin? + if (args.files.length === 1 && args.files[0] === '-') { + try { + const stdText = await getStdin(); + fileSpecList = stdText + .replace(/[\r\n]/g, ' ') + .trim() + .split(' ') + .map((s) => s.trim()) + .filter((s) => !!s); + } catch (e) { + console.error('Invalid file list specified by stdin input'); + return ExitStatus.ParameterError; + } + } + + options.configSettings.includeFileSpecsOverride = fileSpecList; + options.configSettings.includeFileSpecsOverride = options.configSettings.includeFileSpecsOverride.map((f) => + combinePaths(process.cwd(), f) + ); + + // Verify the specified file specs to make sure their wildcard roots exist. + const tempFileSystem = new PyrightFileSystem(createFromRealFileSystem(tempFile)); + + for (const fileDesc of options.configSettings.includeFileSpecsOverride) { + const includeSpec = getFileSpec(Uri.file(process.cwd(), tempFile), fileDesc); + try { + const stat = tryStat(tempFileSystem, includeSpec.wildcardRoot); + if (!stat) { + console.error(`File or directory "${includeSpec.wildcardRoot}" does not exist`); + return ExitStatus.ParameterError; + } + } catch { + // Ignore exception in this case. + } + } + } + + if (args.project) { + options.configFilePath = combinePaths(process.cwd(), normalizePath(args.project)); + } + + if (args.pythonplatform) { + if ( + args.pythonplatform === 'Darwin' || + args.pythonplatform === 'Linux' || + args.pythonplatform === 'Windows' || + args.pythonplatform === 'iOS' || + args.pythonplatform === 'Android' + ) { + options.configSettings.pythonPlatform = args.pythonplatform; + } else { + console.error( + `'${args.pythonplatform}' is not a supported Python platform; specify Darwin, Linux, Windows, iOS, or Android.` + ); + return ExitStatus.ParameterError; + } + } + + if (args.pythonversion) { + const version = PythonVersion.fromString(args.pythonversion); + if (version) { + options.configSettings.pythonVersion = version; + } else { + console.error(`'${args.pythonversion}' is not a supported Python version; specify 3.3, 3.4, etc.`); + return ExitStatus.ParameterError; + } + } + + if (args.pythonpath !== undefined) { + const incompatibleArgs = ['venv-path', 'venvpath']; + for (const arg of incompatibleArgs) { + if (args[arg] !== undefined) { + console.error(`'pythonpath' option cannot be used with '${arg}' option`); + return ExitStatus.ParameterError; + } + } + + options.configSettings.pythonPath = combinePaths(process.cwd(), normalizePath(args['pythonpath'])); + } + + if (args['venv-path']) { + console.warn(`'venv-path' option is deprecated; use 'venvpath' instead`); + options.configSettings.venvPath = combinePaths(process.cwd(), normalizePath(args['venv-path'])); + } + + if (args['venvpath']) { + options.configSettings.venvPath = combinePaths(process.cwd(), normalizePath(args['venvpath'])); + } + + if (args['typeshed-path']) { + console.warn(`'typeshed-path' option is deprecated; use 'typeshedpath' instead`); + options.configSettings.typeshedPath = combinePaths(process.cwd(), normalizePath(args['typeshed-path'])); + } + + if (args['typeshedpath']) { + options.configSettings.typeshedPath = combinePaths(process.cwd(), normalizePath(args['typeshedpath'])); + } + + if (args.createstub) { + options.languageServerSettings.typeStubTargetImportName = args.createstub; + } + + if (args.skipunannotated) { + options.configSettings.analyzeUnannotatedFunctions = false; + } + + if (args.verbose) { + options.configSettings.verboseOutput = true; + } + + // Always enable autoSearchPaths when using the command line. + options.configSettings.autoSearchPaths = true; + + if (args.lib) { + console.warn(`The --lib option is deprecated. Pyright now defaults to using library code to infer types.`); + } + + let minSeverityLevel: SeverityLevel = 'information'; + if (args.level && typeof args.level === 'string') { + const levelValue = args.level.toLowerCase(); + if (levelValue === 'error' || levelValue === 'warning') { + minSeverityLevel = levelValue; + } else { + console.error(`'${args.level}' is not a valid value for --level; specify error or warning.`); + return ExitStatus.ParameterError; + } + } + + options.languageServerSettings.checkOnlyOpenFiles = false; + + if (!!args.stats && !!args.verbose) { + options.languageServerSettings.logTypeEvaluationTime = true; + } + + let logLevel = LogLevel.Error; + if (args.stats || args.verbose) { + logLevel = LogLevel.Info; + } + + // If using outputjson, redirect all console output to stderr so it doesn't mess + // up the JSON output, which goes to stdout. + const output = args.outputjson ? new StderrConsole(logLevel) : new StandardConsole(logLevel); + const fileSystem = new PyrightFileSystem( + createFromRealFileSystem(tempFile, output, new ChokidarFileWatcherProvider(output)) + ); + + const serviceProvider = createServiceProvider(fileSystem, output, tempFile); + + // The package type verification uses a different path. + if (args['verifytypes'] !== undefined) { + return verifyPackageTypes( + serviceProvider, + args['verifytypes'] || '', + options, + !!args.outputjson, + minSeverityLevel, + args['ignoreexternal'] + ); + } else if (args['ignoreexternal'] !== undefined) { + console.error(`'--ignoreexternal' is valid only when used with '--verifytypes'`); + return ExitStatus.ParameterError; + } + + const watch = args.watch !== undefined; + options.languageServerSettings.watchForSourceChanges = watch; + options.languageServerSettings.watchForConfigChanges = watch; + + const service = new AnalyzerService('', serviceProvider, { + console: output, + hostFactory: () => new FullAccessHost(serviceProvider), + // Refresh service 2 seconds after the last library file change is detected. + libraryReanalysisTimeProvider: () => 2 * 1000, + shouldRunAnalysis: () => true, + }); + + if ('threads' in args) { + let threadCount = args['threads']; + + // If the thread count was unspecified, use the number of + // logical CPUs (i.e. hardware threads). We find empirically + // that going below 4 threads usually doesn't help. + if (threadCount === null) { + threadCount = os.cpus().length; + if (threadCount < 4) { + threadCount = 1; + } + } + + if (threadCount > 1) { + return runMultiThreaded(args, options, threadCount, service, minSeverityLevel, output); + } + } + + return runSingleThreaded(args, options, service, minSeverityLevel, output); +} + +async function runSingleThreaded( + args: CommandLineOptions, + options: PyrightCommandLineOptions, + service: AnalyzerService, + minSeverityLevel: SeverityLevel, + output: ConsoleInterface +) { + const watch = args.watch !== undefined; + const treatWarningsAsErrors = !!args.warnings; + + const exitStatus = createDeferred(); + + service.setCompletionCallback((results) => { + if (results.fatalErrorOccurred) { + exitStatus.resolve(ExitStatus.FatalError); + return; + } + + if (results.configParseErrorOccurred) { + exitStatus.resolve(ExitStatus.ConfigFileParseError); + return; + } + + let errorCount = 0; + if (!args.createstub && !args.verifytypes) { + // Sort all file diagnostics by the file URI so + // we have a deterministic ordering. + const fileDiagnostics = results.diagnostics.sort((a, b) => + a.fileUri.toString() < b.fileUri.toString() ? -1 : 1 + ); + + if (args.outputjson) { + const report = reportDiagnosticsAsJson( + fileDiagnostics, + minSeverityLevel, + results.filesInProgram, + results.elapsedTime + ); + errorCount += report.errorCount; + if (treatWarningsAsErrors) { + errorCount += report.warningCount; + } + } else { + printVersion(output); + const report = reportDiagnosticsAsText(fileDiagnostics, minSeverityLevel); + errorCount += report.errorCount; + if (treatWarningsAsErrors) { + errorCount += report.warningCount; + } + } + } + + if (args.createstub) { + try { + service.writeTypeStub(cancellationNone); + service.dispose(); + console.info(`Type stub was created for '${args.createstub}'`); + } catch (err) { + let errMessage = ''; + if (err instanceof Error) { + errMessage = err.message; + } + + console.error(`Error occurred when creating type stub: ${errMessage}`); + exitStatus.resolve(ExitStatus.FatalError); + return; + } + exitStatus.resolve(ExitStatus.NoErrors); + return; + } + + if (!args.outputjson) { + if (!watch) { + // Print the total time. + timingStats.printSummary(output); + } + + if (args.stats) { + // Print the stats details. + service.printStats(); + timingStats.printDetails(console); + + if (args.verbose) { + service.printDetailedAnalysisTimes(); + } + } + + if (args.dependencies) { + service.printDependencies(!!args.verbose); + } + } + + if (!watch) { + exitStatus.resolve(errorCount > 0 ? ExitStatus.ErrorsReported : ExitStatus.NoErrors); + return; + } else if (!args.outputjson) { + console.info('Watching for file changes...'); + } + }); + + // This will trigger the analyzer. + service.setOptions(options); + service.enumerateSourceFiles(0); + + return await exitStatus.promise; +} + +async function runMultiThreaded( + args: CommandLineOptions, + options: PyrightCommandLineOptions, + maxThreadCount: number, + service: AnalyzerService, + minSeverityLevel: SeverityLevel, + output: ConsoleInterface +) { + const workers: ChildProcess[] = []; + const workersShutdown = new Set(); + const startTime = Date.now(); + const treatWarningsAsErrors = !!args.warnings; + const exitStatus = createDeferred(); + + // Specify that only open files should be checked. This will allow us + // to control which files are checked by which workers. + options.languageServerSettings.checkOnlyOpenFiles = true; + + // This will trigger discovery of files in the project. + service.setOptions(options); + service.enumerateSourceFiles(0); + const program = service.backgroundAnalysisProgram.program; + + // Get the list of "tracked" source files -- those that will be type checked. + const sourceFilesToAnalyze = program.getSourceFileInfoList().filter((info) => info.isTracked); + + // Don't create more workers than there are files. + const workerCount = Math.min(maxThreadCount, sourceFilesToAnalyze.length); + + // Split the source files into affinity queues, one for each worker. We assume + // that files that are next to each other in the directory hierarchy probably + // have more common imports, so we want to analyze them with the same worker + // if possible to maximize type cache hits. + const affinityQueues: SourceFileInfo[][] = new Array(workerCount); + const filesPerAffinityQueue = sourceFilesToAnalyze.length / workerCount; + + for (let i = 0; i < sourceFilesToAnalyze.length; i++) { + const affinityIndex = Math.floor(i / filesPerAffinityQueue); + if (affinityQueues[affinityIndex] === undefined) { + affinityQueues[affinityIndex] = []; + } + + affinityQueues[affinityIndex].push(sourceFilesToAnalyze[i]); + } + + output.info(`Found ${sourceFilesToAnalyze.length} files to analyze`); + output.info(`Using ${workerCount} threads`); + + let fileDiagnostics: FileDiagnostics[] = []; + let pendingAnalysisCount = 0; + + const sendMessageToWorker = (worker: ChildProcess, message: string, data: any) => { + worker.send(JSON.stringify({ action: message, data: data })); + }; + + const analyzeNextFile = (workerIndex: number) => { + const worker = workers[workerIndex]; + let nextFileToAnalyze: SourceFileInfo | undefined; + + // Determine the next file to analyze for this worker. + for (let i = 0; i < affinityQueues.length; i++) { + const affinityIndex = (workerIndex + i) % affinityQueues.length; + if (affinityQueues[affinityIndex].length > 0) { + nextFileToAnalyze = affinityQueues[affinityIndex].shift()!; + break; + } + } + + if (nextFileToAnalyze) { + // Tell the worker to analyze the next file. + const fileUri = nextFileToAnalyze.uri.toString(); + + sendMessageToWorker(worker, 'analyzeFile', fileUri); + + pendingAnalysisCount++; + } else { + // Kill the worker since there's nothing left to do. + workersShutdown.add(worker); + worker.kill(); + + if (pendingAnalysisCount === 0) { + // If there are no more files to analyze and all pending analysis + // is complete, report the results and exit. + if (!exitStatus.resolved) { + const elapsedTime = (Date.now() - startTime) / 1000; + let errorCount = 0; + + // Sort all file diagnostics by the file URI so + // we have a deterministic ordering. + fileDiagnostics = fileDiagnostics.sort((a, b) => + a.fileUri.toString() < b.fileUri.toString() ? -1 : 1 + ); + + if (args.outputjson) { + const report = reportDiagnosticsAsJson( + fileDiagnostics, + minSeverityLevel, + sourceFilesToAnalyze.length, + elapsedTime + ); + errorCount += report.errorCount; + if (treatWarningsAsErrors) { + errorCount += report.warningCount; + } + } else { + printVersion(output); + const report = reportDiagnosticsAsText(fileDiagnostics, minSeverityLevel); + errorCount += report.errorCount; + if (treatWarningsAsErrors) { + errorCount += report.warningCount; + } + + // Print the total time. + output.info(`Completed in ${elapsedTime}sec`); + } + + exitStatus.resolve(errorCount > 0 ? ExitStatus.ErrorsReported : ExitStatus.NoErrors); + } + } + } + }; + + // Launch worker processes. + for (let i = 0; i < workerCount; i++) { + const mainModulePath = process.mainModule!.filename; + + // Ensure forked processes use the temp folder owned by the main process. + // This allows for automatic deletion when the main process exits. + const worker = fork(mainModulePath, [ + 'worker', + i.toString(), + service.serviceProvider.get(ServiceKeys.tempFile).tmpdir().getFilePath(), + ]); + + worker.on('message', (message) => { + let messageObj: any; + + try { + messageObj = JSON.parse(message as string); + } catch { + output.error(`Invalid message from worker: ${message}`); + exitStatus.resolve(ExitStatus.FatalError); + } + + // If the exit status has already been resolved, another thread + // generated a fatal error, so we shouldn't continue. + if (exitStatus.resolved) { + return; + } + + switch (messageObj.action) { + case 'analysisResults': { + pendingAnalysisCount--; + const results = messageObj.data as AnalysisResults; + + if (results.fatalErrorOccurred) { + output.error(`Fatal error from worker`); + exitStatus.resolve(ExitStatus.FatalError); + return; + } + + if (results.configParseErrorOccurred) { + exitStatus.resolve(ExitStatus.ConfigFileParseError); + return; + } + + for (const fileDiag of results.diagnostics) { + fileDiagnostics.push(FileDiagnostics.fromJsonObj(fileDiag)); + } + + analyzeNextFile(i); + break; + } + + default: { + output.error(`Unknown message from worker: ${message}`); + exitStatus.resolve(ExitStatus.FatalError); + break; + } + } + }); + + worker.on('error', (err) => { + output.error(`Failed to start child process: ${err}`); + exitStatus.resolve(ExitStatus.FatalError); + }); + + worker.on('exit', (code, signal) => { + if (workersShutdown.has(worker)) { + return; + } + + output.error(`Worker process exited unexpectedly: exit code=${code}, signal=${signal}`); + exitStatus.resolve(ExitStatus.FatalError); + }); + + sendMessageToWorker(worker, 'setOptions', options); + workers.push(worker); + + // Tell the worker to analyze the next file. + analyzeNextFile(i); + } + + return await exitStatus.promise; +} + +// This is the message loop for a worker process used used for +// multi-threaded analysis. +function runWorkerMessageLoop(workerNum: number, tempFolderName: string) { + let serviceProvider: ServiceProvider | undefined; + let service: AnalyzerService | undefined; + let fileSystem: PyrightFileSystem | undefined; + let lastOpenFileUri: Uri | undefined; + + const sendMessageToParent = (message: string, data: any) => { + process.send?.(JSON.stringify({ action: message, data: data })); + }; + + process.on('message', (message) => { + let messageObj: any; + + try { + messageObj = JSON.parse(message as string); + } catch { + console.error(`Invalid message from parent: ${message}`); + return; + } + + switch (messageObj.action) { + case 'setOptions': { + const options = new PyrightCommandLineOptions(process.cwd(), false); + + Object.keys(messageObj.data).forEach((key) => { + (options as any)[key] = messageObj.data[key]; + }); + + let logLevel = LogLevel.Error; + if (options.configSettings.verboseOutput) { + logLevel = LogLevel.Info; + } + + const output = new StderrConsole(logLevel); + const tempFile = new RealTempFile(tempFolderName); + fileSystem = new PyrightFileSystem( + createFromRealFileSystem(tempFile, output, new ChokidarFileWatcherProvider(output)) + ); + + serviceProvider = createServiceProvider(fileSystem, output, tempFile); + service = new AnalyzerService('', serviceProvider, { + console: output, + hostFactory: () => new FullAccessHost(serviceProvider!), + // Refresh service 2 seconds after the last library file change is detected. + libraryReanalysisTimeProvider: () => 2 * 1000, + shouldRunAnalysis: () => true, + }); + + service.setCompletionCallback((results) => { + // We're interested only in diagnostics for the last open file. + const fileDiags = results.diagnostics.filter((fileDiag) => + fileDiag.fileUri.equals(lastOpenFileUri) + ); + + // Convert JSON-compatible format. + const resultsObj = { + ...results, + diagnostics: fileDiags.map((fileDiag) => FileDiagnostics.toJsonObj(fileDiag)), + }; + + sendMessageToParent('analysisResults', resultsObj); + }); + + service.setOptions(options); + break; + } + + case 'analyzeFile': { + if (serviceProvider && fileSystem && service) { + const uri = Uri.parse(messageObj.data as string, serviceProvider); + + // Check the file's length before attempting to read its full contents. + const fileStat = fileSystem.statSync(uri); + if (fileStat.size > maxSourceFileSize) { + console.error( + `File length of "${uri}" is ${fileStat.size} ` + + `which exceeds the maximum supported file size of ${maxSourceFileSize}` + ); + throw new Error('File larger than max'); + } + + const fileContents = fileSystem.readFileSync(uri, 'utf8'); + + lastOpenFileUri = uri; + service?.setFileOpened(uri, /* version */ 1, fileContents); + } + break; + } + } + }); +} + +function verifyPackageTypes( + serviceProvider: ServiceProvider, + packageName: string, + options: PyrightCommandLineOptions, + outputJson: boolean, + minSeverityLevel: SeverityLevel, + ignoreUnknownTypesFromImports: boolean +): ExitStatus { + try { + const host = new FullAccessHost(serviceProvider); + const verifier = new PackageTypeVerifier( + serviceProvider, + host, + options, + packageName, + ignoreUnknownTypesFromImports + ); + const report = verifier.verify(); + const jsonReport = buildTypeCompletenessReport(packageName, report, minSeverityLevel); + + if (outputJson) { + console.info(JSON.stringify(jsonReport, /* replacer */ undefined, 4)); + } else { + printTypeCompletenessReportText(jsonReport, !!options.configSettings.verboseOutput); + } + + return jsonReport.typeCompleteness!.completenessScore < 1 ? ExitStatus.ErrorsReported : ExitStatus.NoErrors; + } catch (err) { + let errMessage = ''; + if (err instanceof Error) { + errMessage = ': ' + err.message; + } + + console.error(`Error occurred when verifying types: ` + errMessage); + return ExitStatus.FatalError; + } +} + +function accumulateReportDiagnosticStats(diag: PyrightJsonDiagnostic, report: PyrightJsonResults) { + if (diag.severity === 'error') { + report.summary.errorCount++; + } else if (diag.severity === 'warning') { + report.summary.warningCount++; + } else if (diag.severity === 'information') { + report.summary.informationCount++; + } +} + +function buildTypeCompletenessReport( + packageName: string, + completenessReport: PackageTypeReport, + minSeverityLevel: SeverityLevel +): PyrightJsonResults { + const report: PyrightJsonResults = { + version: getVersionString(), + time: Date.now().toString(), + generalDiagnostics: [], + summary: { + filesAnalyzed: completenessReport.modules.size, + errorCount: 0, + warningCount: 0, + informationCount: 0, + timeInSec: timingStats.getTotalDuration(), + }, + }; + + // Add the general diagnostics. + completenessReport.generalDiagnostics.forEach((diag) => { + const jsonDiag = convertDiagnosticToJson('', diag); + if (isDiagnosticIncluded(jsonDiag.severity, minSeverityLevel)) { + report.generalDiagnostics.push(jsonDiag); + } + accumulateReportDiagnosticStats(jsonDiag, report); + }); + + report.typeCompleteness = { + packageName, + packageRootDirectory: completenessReport.packageRootDirectoryUri?.getFilePath(), + moduleName: completenessReport.moduleName, + moduleRootDirectory: completenessReport.moduleRootDirectoryUri?.getFilePath(), + ignoreUnknownTypesFromImports: completenessReport.ignoreExternal, + pyTypedPath: completenessReport.pyTypedPathUri?.getFilePath(), + exportedSymbolCounts: { + withKnownType: 0, + withAmbiguousType: 0, + withUnknownType: 0, + }, + otherSymbolCounts: { + withKnownType: 0, + withAmbiguousType: 0, + withUnknownType: 0, + }, + missingFunctionDocStringCount: completenessReport.missingFunctionDocStringCount, + missingClassDocStringCount: completenessReport.missingClassDocStringCount, + missingDefaultParamCount: completenessReport.missingDefaultParamCount, + completenessScore: 0, + modules: [], + symbols: [], + }; + + // Add the modules. + completenessReport.modules.forEach((module) => { + const jsonModule: PyrightPublicModuleReport = { + name: module.name, + }; + + report.typeCompleteness!.modules.push(jsonModule); + }); + + // Add the symbols. + completenessReport.symbols.forEach((symbol) => { + const diagnostics: PyrightJsonDiagnostic[] = []; + + // Convert and filter the diagnostics. + symbol.diagnostics.forEach((diag) => { + const jsonDiag = convertDiagnosticToJson(diag.uri.getFilePath(), diag.diagnostic); + if (isDiagnosticIncluded(jsonDiag.severity, minSeverityLevel)) { + diagnostics.push(jsonDiag); + } + }); + + const jsonSymbol: PyrightPublicSymbolReport = { + category: PackageTypeVerifier.getSymbolCategoryString(symbol.category), + name: symbol.fullName, + referenceCount: symbol.referenceCount, + isExported: symbol.isExported, + isTypeKnown: symbol.typeKnownStatus === TypeKnownStatus.Known, + isTypeAmbiguous: symbol.typeKnownStatus === TypeKnownStatus.Ambiguous, + diagnostics, + }; + + const alternateNames = completenessReport.alternateSymbolNames.get(symbol.fullName); + if (alternateNames) { + jsonSymbol.alternateNames = alternateNames; + } + + report.typeCompleteness!.symbols.push(jsonSymbol); + + // Accumulate counts for report. + if (symbol.typeKnownStatus === TypeKnownStatus.Known) { + if (symbol.isExported) { + report.typeCompleteness!.exportedSymbolCounts.withKnownType++; + } else { + report.typeCompleteness!.otherSymbolCounts.withKnownType++; + } + } else if (symbol.typeKnownStatus === TypeKnownStatus.Ambiguous) { + if (symbol.isExported) { + report.typeCompleteness!.exportedSymbolCounts.withAmbiguousType++; + } else { + report.typeCompleteness!.otherSymbolCounts.withAmbiguousType++; + } + } else { + if (symbol.isExported) { + report.typeCompleteness!.exportedSymbolCounts.withUnknownType++; + } else { + report.typeCompleteness!.otherSymbolCounts.withUnknownType++; + } + } + }); + + const unknownSymbolCount = report.typeCompleteness.exportedSymbolCounts.withUnknownType; + const ambiguousSymbolCount = report.typeCompleteness.exportedSymbolCounts.withAmbiguousType; + const knownSymbolCount = report.typeCompleteness.exportedSymbolCounts.withKnownType; + const totalSymbolCount = unknownSymbolCount + ambiguousSymbolCount + knownSymbolCount; + + if (totalSymbolCount > 0) { + report.typeCompleteness!.completenessScore = knownSymbolCount / totalSymbolCount; + } + + return report; +} + +function printTypeCompletenessReportText(results: PyrightJsonResults, verboseOutput: boolean) { + const completenessReport = results.typeCompleteness!; + + console.info(`Module name: "${completenessReport.moduleName}"`); + if (completenessReport.packageRootDirectory !== undefined) { + console.info(`Package directory: "${completenessReport.packageRootDirectory}"`); + } + if (completenessReport.moduleRootDirectory !== undefined) { + console.info(`Module directory: "${completenessReport.moduleRootDirectory}"`); + } + + if (completenessReport.pyTypedPath !== undefined) { + console.info(`Path of py.typed file: "${completenessReport.pyTypedPath}"`); + } + + // Print list of public modules. + if (completenessReport.modules.length > 0) { + console.info(''); + console.info(`Public modules: ${completenessReport.modules.length}`); + completenessReport.modules.forEach((module) => { + console.info(` ${module.name}`); + }); + } + + // Print list of all symbols. + if (completenessReport.symbols.length > 0 && verboseOutput) { + console.info(''); + console.info(`Exported symbols: ${completenessReport.symbols.filter((sym) => sym.isExported).length}`); + completenessReport.symbols.forEach((symbol) => { + if (symbol.isExported) { + const refCount = symbol.referenceCount > 1 ? ` (${symbol.referenceCount} references)` : ''; + console.info(` ${symbol.name}${refCount}`); + } + }); + + console.info(''); + console.info(`Other referenced symbols: ${completenessReport.symbols.filter((sym) => !sym.isExported).length}`); + completenessReport.symbols.forEach((symbol) => { + if (!symbol.isExported) { + const refCount = symbol.referenceCount > 1 ? ` (${symbol.referenceCount} references)` : ''; + console.info(` ${symbol.name}${refCount}`); + } + }); + } + + // Print all the general diagnostics. + results.generalDiagnostics.forEach((diag) => { + logDiagnosticToConsole(diag); + }); + + // Print all the symbol-specific diagnostics. + console.info(''); + console.info(`Symbols used in public interface:`); + results.typeCompleteness!.symbols.forEach((symbol) => { + let diagnostics = symbol.diagnostics; + if (!verboseOutput) { + diagnostics = diagnostics.filter((diag) => diag.severity === 'error'); + } + if (diagnostics.length > 0) { + console.info(`${symbol.name}`); + diagnostics.forEach((diag) => { + logDiagnosticToConsole(diag); + }); + } + }); + + // Print other stats. + console.info(''); + console.info( + `Symbols exported by "${completenessReport.packageName}": ${ + completenessReport.exportedSymbolCounts.withKnownType + + completenessReport.exportedSymbolCounts.withAmbiguousType + + completenessReport.exportedSymbolCounts.withUnknownType + }` + ); + console.info(` With known type: ${completenessReport.exportedSymbolCounts.withKnownType}`); + console.info(` With ambiguous type: ${completenessReport.exportedSymbolCounts.withAmbiguousType}`); + console.info(` With unknown type: ${completenessReport.exportedSymbolCounts.withUnknownType}`); + if (completenessReport.ignoreUnknownTypesFromImports) { + console.info(` (Ignoring unknown types imported from other packages)`); + } + console.info(''); + console.info( + `Other symbols referenced but not exported by "${completenessReport.packageName}": ${ + completenessReport.otherSymbolCounts.withKnownType + + completenessReport.otherSymbolCounts.withAmbiguousType + + completenessReport.otherSymbolCounts.withUnknownType + }` + ); + console.info(` With known type: ${completenessReport.otherSymbolCounts.withKnownType}`); + console.info(` With ambiguous type: ${completenessReport.otherSymbolCounts.withAmbiguousType}`); + console.info(` With unknown type: ${completenessReport.otherSymbolCounts.withUnknownType}`); + console.info(''); + console.info(`Symbols without documentation:`); + console.info(` Functions without docstring: ${completenessReport.missingFunctionDocStringCount}`); + console.info(` Functions without default param: ${completenessReport.missingDefaultParamCount}`); + console.info(` Classes without docstring: ${completenessReport.missingClassDocStringCount}`); + console.info(''); + console.info(`Type completeness score: ${Math.round(completenessReport.completenessScore * 1000) / 10}%`); + console.info(''); + console.info(`Completed in ${results.summary.timeInSec}sec`); + console.info(''); +} + +function printUsage() { + console.info( + 'Usage: ' + + toolName + + ' [options] files...\n' + + ' Options:\n' + + ' --createstub Create type stub file(s) for import\n' + + ' --dependencies Emit import dependency information\n' + + ' -h,--help Show this help message\n' + + ' --ignoreexternal Ignore external imports for --verifytypes\n' + + ' --level Minimum diagnostic level (error or warning)\n' + + ' --outputjson Output results in JSON format\n' + + ' -p,--project Use the configuration file at this location\n' + + ' --pythonplatform Analyze for a specific platform (Darwin, Linux, Windows, iOS, Android)\n' + + ' --pythonpath Path to the Python interpreter\n' + + ' --pythonversion Analyze for a specific version (3.3, 3.4, etc.)\n' + + ' --skipunannotated Skip analysis of functions with no type annotations\n' + + ' --stats Print detailed performance stats\n' + + ' -t,--typeshedpath Use typeshed type stubs at this location\n' + + ' --threads Use separate threads to parallelize type checking \n' + + ' -v,--venvpath Directory that contains virtual environments\n' + + ' --verbose Emit verbose diagnostics\n' + + ' --verifytypes Verify type completeness of a py.typed package\n' + + ' --version Print Pyright version and exit\n' + + ' --warnings Use exit code of 1 if warnings are reported\n' + + ' -w,--watch Continue to run and watch for changes\n' + + ' - Read files from stdin\n' + ); +} + +function getVersionString() { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const version = require('../package.json').version; + return version.toString(); +} + +function printVersion(console: ConsoleInterface) { + console.info(`${toolName} ${getVersionString()}`); +} + +function reportDiagnosticsAsJson( + fileDiagnostics: FileDiagnostics[], + minSeverityLevel: SeverityLevel, + filesInProgram: number, + timeInSec: number +): DiagnosticResult { + const report: PyrightJsonResults = { + version: getVersionString(), + time: Date.now().toString(), + generalDiagnostics: [], + summary: { + filesAnalyzed: filesInProgram, + errorCount: 0, + warningCount: 0, + informationCount: 0, + timeInSec, + }, + }; + + fileDiagnostics.forEach((fileDiag) => { + fileDiag.diagnostics.sort(compareDiagnostics).forEach((diag) => { + if ( + diag.category === DiagnosticCategory.Error || + diag.category === DiagnosticCategory.Warning || + diag.category === DiagnosticCategory.Information + ) { + const jsonDiag = convertDiagnosticToJson(fileDiag.fileUri.getFilePath(), diag); + if (isDiagnosticIncluded(jsonDiag.severity, minSeverityLevel)) { + report.generalDiagnostics.push(jsonDiag); + } + + accumulateReportDiagnosticStats(jsonDiag, report); + } + }); + }); + + console.info(JSON.stringify(report, /* replacer */ undefined, 4)); + + // Output a blank line to help tools that are attempting to parse the + // JSON output when used in watch mode. + console.info(''); + + return { + errorCount: report.summary.errorCount, + warningCount: report.summary.warningCount, + informationCount: report.summary.informationCount, + diagnosticCount: report.summary.errorCount + report.summary.warningCount + report.summary.informationCount, + }; +} + +function isDiagnosticIncluded(diagSeverity: SeverityLevel, minSeverityLevel: SeverityLevel) { + // Errors are always included. + if (diagSeverity === 'error') { + return true; + } + + // Warnings are included only if the min severity level is below error. + if (diagSeverity === 'warning') { + return minSeverityLevel !== 'error'; + } + + // Informations are included only if the min severity level is 'information'. + return minSeverityLevel === 'information'; +} + +function convertDiagnosticCategoryToSeverity(category: DiagnosticCategory): SeverityLevel { + switch (category) { + case DiagnosticCategory.Error: + return 'error'; + + case DiagnosticCategory.Warning: + return 'warning'; + + case DiagnosticCategory.Information: + return 'information'; + + default: + fail('Unexpected diagnostic category'); + } +} + +function convertDiagnosticToJson(filePath: string, diag: Diagnostic): PyrightJsonDiagnostic { + return { + file: filePath, + severity: convertDiagnosticCategoryToSeverity(diag.category), + message: diag.message, + range: isEmptyRange(diag.range) ? undefined : diag.range, + rule: diag.getRule(), + }; +} + +function reportDiagnosticsAsText( + fileDiagnostics: FileDiagnostics[], + minSeverityLevel: SeverityLevel +): DiagnosticResult { + let errorCount = 0; + let warningCount = 0; + let informationCount = 0; + + fileDiagnostics.forEach((fileDiagnostics) => { + // Don't report unused code or deprecated diagnostics. + const fileErrorsAndWarnings = fileDiagnostics.diagnostics + .filter( + (diag) => + diag.category !== DiagnosticCategory.UnusedCode && + diag.category !== DiagnosticCategory.UnreachableCode && + diag.category !== DiagnosticCategory.Deprecated && + isDiagnosticIncluded(convertDiagnosticCategoryToSeverity(diag.category), minSeverityLevel) + ) + .sort(compareDiagnostics); + + if (fileErrorsAndWarnings.length > 0) { + console.info(`${fileDiagnostics.fileUri.toUserVisibleString()}`); + fileErrorsAndWarnings.forEach((diag) => { + const jsonDiag = convertDiagnosticToJson(fileDiagnostics.fileUri.getFilePath(), diag); + logDiagnosticToConsole(jsonDiag); + + if (diag.category === DiagnosticCategory.Error) { + errorCount++; + } else if (diag.category === DiagnosticCategory.Warning) { + warningCount++; + } else if (diag.category === DiagnosticCategory.Information) { + informationCount++; + } + }); + } + }); + + console.info( + `${errorCount.toString()} ${errorCount === 1 ? 'error' : 'errors'}, ` + + `${warningCount.toString()} ${warningCount === 1 ? 'warning' : 'warnings'}, ` + + `${informationCount.toString()} ${informationCount === 1 ? 'information' : 'informations'}` + ); + + return { + errorCount, + warningCount, + informationCount, + diagnosticCount: errorCount + warningCount + informationCount, + }; +} + +function logDiagnosticToConsole(diag: PyrightJsonDiagnostic, prefix = ' ') { + let message = prefix; + if (diag.file) { + message += `${diag.file}:`; + } + if (diag.range && !isEmptyRange(diag.range)) { + message += + chalk.yellow(`${diag.range.start.line + 1}`) + + ':' + + chalk.yellow(`${diag.range.start.character + 1}`) + + ' - '; + } else { + message += ' '; + } + + const [firstLine, ...remainingLines] = diag.message.split('\n'); + + message += + diag.severity === 'error' + ? chalk.red('error') + : diag.severity === 'warning' + ? chalk.cyan('warning') + : chalk.blue('information'); + message += `: ${firstLine}`; + if (remainingLines.length > 0) { + message += '\n' + prefix + remainingLines.join('\n' + prefix); + } + + if (diag.rule) { + message += chalk.gray(` (${diag.rule})`); + } + + console.info(message); +} + +function parseThreadsArgValue(input: string | null): any { + if (input === null || input === 'auto') { + return null; + } + + const value = parseInt(input, 10); + if (isNaN(value) || value < 1) { + return null; + } + + return value; +} + +// Increase the default stack trace limit from 16 to 64 to help diagnose +// crashes with deep stack traces. +Error.stackTraceLimit = 64; + +export async function main() { + await initializeDependencies(); + + // Is this a worker process for multi-threaded analysis? + if (process.argv[2] === 'worker') { + const workerNumber = parseInt(process.argv[3]); + runWorkerMessageLoop(workerNumber, process.argv[4]); + return; + } + + const exitCode = await processArgs(); + process.exitCode = exitCode; + // Don't call process.exit; stdout may not have been flushed which can break readers. + // https://github.com/nodejs/node/issues/6379 + // https://github.com/nodejs/node/issues/6456 + // https://github.com/nodejs/node/issues/19218 +} diff --git a/python-parser/packages/pyright-internal/src/pyrightFileSystem.ts b/python-parser/packages/pyright-internal/src/pyrightFileSystem.ts new file mode 100644 index 00000000..25c9e542 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/pyrightFileSystem.ts @@ -0,0 +1,49 @@ +/* + * pyrightFileSystem.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * A file system that knows how to deal with remapping files from one folder to another. + */ + +import type * as fs from 'fs'; + +import { FileSystem, MkDirOptions } from './common/fileSystem'; +import { Uri } from './common/uri/uri'; +import { ReadOnlyAugmentedFileSystem } from './readonlyAugmentedFileSystem'; + +export interface IPyrightFileSystem extends FileSystem {} + +export class PyrightFileSystem extends ReadOnlyAugmentedFileSystem implements IPyrightFileSystem { + constructor(realFS: FileSystem) { + super(realFS); + } + + override mkdirSync(uri: Uri, options?: MkDirOptions): void { + this.realFS.mkdirSync(uri, options); + } + + override chdir(uri: Uri): void { + this.realFS.chdir(uri); + } + + override writeFileSync(uri: Uri, data: string | Buffer, encoding: BufferEncoding | null): void { + this.realFS.writeFileSync(this.getOriginalUri(uri), data, encoding); + } + + override rmdirSync(uri: Uri): void { + this.realFS.rmdirSync(this.getOriginalUri(uri)); + } + + override unlinkSync(uri: Uri): void { + this.realFS.unlinkSync(this.getOriginalUri(uri)); + } + + override createWriteStream(uri: Uri): fs.WriteStream { + return this.realFS.createWriteStream(this.getOriginalUri(uri)); + } + + override copyFileSync(src: Uri, dst: Uri): void { + this.realFS.copyFileSync(this.getOriginalUri(src), this.getOriginalUri(dst)); + } +} diff --git a/python-parser/packages/pyright-internal/src/pythonParser/index.ts b/python-parser/packages/pyright-internal/src/pythonParser/index.ts new file mode 100644 index 00000000..173bcba0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/pythonParser/index.ts @@ -0,0 +1,979 @@ +#!/usr/bin/env node +// 入口文件 - 使用 pyright-internal 解析 Python 仓库 +import * as path from 'path'; +import * as fs from 'fs'; + +// 导出 parseRepository 函数 +export { parseRepository }; + +// 导入 UniAST 类型定义 +import { Repository, Module, Package, Function, Type, Var, Node, Dependency, Identity, Relation } from './types/uniast'; + +// 导入 pyright-internal 核心模块 (内部相对导入) +import { AnalyzerService } from '../analyzer/service'; +import { Program } from '../analyzer/program'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { ConfigOptions } from '../common/configOptions'; +import { Uri } from '../common/uri/uri'; +import { createServiceProvider } from '../common/serviceProviderExtensions'; +import { RealFileSystem, createFromRealFileSystem } from '../common/realFileSystem'; +import { RealTempFile } from '../common/realFileSystem'; +import { StandardConsole, LogLevel } from '../common/console'; +import { FullAccessHost } from '../common/fullAccessHost'; +import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import { + ParseNode, + NameNode, + CallNode, + MemberAccessNode, + ParseNodeType, +} from '../parser/parseNodes'; +import { DeclarationType } from '../analyzer/declaration'; +import { ClassType, TypeCategory } from '../analyzer/types'; + +// ================================================================ +// 辅助函数:使用 pyright API 提取符号信息 +// ================================================================ + +/** + * 检查声明是否为类型别名 + * 基于 pyright 的 isExplicitTypeAliasDeclaration 逻辑 + * 使用 pyright API: decl.typeAnnotationNode + */ +function isTypeAliasDecl(decl: any): boolean { + // 1. 必须是 Variable 类型 + if (decl.type !== DeclarationType.Variable) { + return false; + } + + // 2. 必须有类型注解节点 + if (!decl.typeAnnotationNode) { + return false; + } + + try { + // 3. 检查 typeAnnotationNode 的值 + const annotationNode = decl.typeAnnotationNode; + const annotationData = annotationNode.d || annotationNode; + + // 方法 1: 检查名称是否为 TypeAlias + if (annotationData.value === 'TypeAlias') { + return true; + } + + // 方法 2: 检查成员访问 (typing.TypeAlias) + if (annotationNode.nodeType === 34) { // MemberAccess + const member = annotationData.member; + if (member && member.d && member.d.value === 'TypeAlias') { + return true; + } + } + } catch (e) { + // 忽略错误 + } + + return false; +} + +/** + * 提取类成员(方法、属性) + * 使用 pyright API: evaluator.getTypeOfClass() + ClassType.getSymbolTable() + */ +function extractClassMembers( + classDecl: any, + evaluator: TypeEvaluator +): { methods: string[]; vars: string[] } { + const members = { methods: [] as string[], vars: [] as string[] }; + + if (!classDecl.node) { + return members; + } + + try { + // 使用 pyright API 获取类类型 + const classType = evaluator.getTypeOfClass(classDecl.node); + + if (classType && classType.classType) { + // 使用 pyright API 获取成员符号表 + const symbolTable = ClassType.getSymbolTable(classType.classType); + + if (symbolTable) { + const iter = symbolTable.entries(); + while (true) { + const result = iter.next(); + if (result.done) break; + + const memberName = result.value[0]; + const memberSymbol = result.value[1]; + + // 跳过内置属性 + if (memberName.startsWith('__') && memberName.endsWith('__')) { + continue; + } + + const memberDecls = memberSymbol.getDeclarations(); + if (memberDecls.length > 0) { + const memberDecl = memberDecls[0]; + + if (memberDecl.type === DeclarationType.Function) { + members.methods.push(memberName); + } else if (memberDecl.type === DeclarationType.Variable) { + members.vars.push(memberName); + } + } + } + } + } + } catch (e) { + // 忽略错误 + } + + return members; +} + +// 扫描 Python 文件 - 支持目录和单个文件 +function scanPythonFiles(targetPath: string): string[] { + const files: string[] = []; + + try { + const stats = fs.statSync(targetPath); + + if (stats.isFile()) { + // 单个文件 + if (targetPath.endsWith('.py')) { + files.push(targetPath); + } + } else if (stats.isDirectory()) { + // 目录,递归扫描 + function walk(dir: string) { + try { + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { + if (entry.name.startsWith('.') || entry.name === '__pycache__') { + continue; + } + const fullPath = path.join(dir, entry.name); + if (entry.isDirectory()) { + walk(fullPath); + } else if (entry.isFile() && entry.name.endsWith('.py')) { + files.push(fullPath); + } + } + } catch (e) { + // 忽略权限错误 + } + } + walk(targetPath); + } + } catch (e) { + // 忽略错误 + } + + return files; +} + +// 收集符号引用的 Walker - 支持函数调用、变量引用、类型注解 +class SymbolReferenceWalker extends ParseTreeWalker { + private references: Array<{ + name: string; + file: string; + Line: number; + nodeLine: number; // 引用出现的位置 + kind: 'call' | 'variable' | 'type'; + }> = []; + private resolvedCache = new Map(); + + // 记录每个 NameNode 的定义位置,用于排除定义点 + private definitionLocations = new Set(); + + // 记录函数定义的 (startOffset, endOffset) 范围,用于过滤依赖 + private functionScopes = new Map(); + + constructor( + private parseRoot: ParseNode, + private evaluator: TypeEvaluator, + private filePath: string, + ) { + super(); + } + + // 预扫描定义点 + scanDefinitions() { + // 扫描函数定义 + const scanNode = (node: ParseNode) => { + if (!node) return; + + // 函数定义 + if (node.nodeType === ParseNodeType.Function) { + const funcNode = node as any; + if (funcNode.d.name) { + const nameNode = funcNode.d.name; + this.definitionLocations.add(`${nameNode.start}:${nameNode.d.value}`); + // 记录函数范围: key = "函数名:起始行" + const funcName = nameNode.d.value; + const startLine = nameNode.start; + this.functionScopes.set(`${funcName}:${startLine}`, { + start: funcNode.start, + end: funcNode.start + funcNode.length, + }); + } + } + // 类定义 + if (node.nodeType === ParseNodeType.Class) { + const classNode = node as any; + if (classNode.d.name) { + const nameNode = classNode.d.name; + this.definitionLocations.add(`${nameNode.start}:${nameNode.d.value}`); + } + } + // 变量赋值 + if (node.nodeType === ParseNodeType.Assignment) { + const assignNode = node as any; + if (assignNode.d.leftExpr?.nodeType === ParseNodeType.Name) { + const nameNode = assignNode.d.leftExpr; + this.definitionLocations.add(`${nameNode.start}:${nameNode.d.value}`); + } + } + // 类型注解变量 x: int = 1 + if (node.nodeType === ParseNodeType.TypeAnnotation) { + const typeAnnNode = node as any; + if (typeAnnNode.d.valueExpr?.nodeType === ParseNodeType.Name) { + const nameNode = typeAnnNode.d.valueExpr; + this.definitionLocations.add(`${nameNode.start}:${nameNode.d.value}`); + } + } + + // 递归遍历子节点 + for (const key of Object.keys(node.d)) { + const child = (node.d as any)[key]; + if (child) { + if (Array.isArray(child)) { + child.forEach(c => { + if (c && typeof c === 'object' && 'nodeType' in c) { + scanNode(c as ParseNode); + } + }); + } else if (typeof child === 'object' && 'nodeType' in child) { + scanNode(child as ParseNode); + } + } + } + }; + + scanNode(this.parseRoot); + } + + // 获取指定函数的范围 + getFunctionScope(funcName: string, startLine: number): { start: number; end: number } | undefined { + return this.functionScopes.get(`${funcName}:${startLine}`); + } + + // 处理函数/方法调用 + override visitCall(node: CallNode): boolean { + let nameNode: NameNode | undefined; + + if (node.d.leftExpr.nodeType === ParseNodeType.Name) { + nameNode = node.d.leftExpr; + } else if (node.d.leftExpr.nodeType === ParseNodeType.MemberAccess) { + nameNode = node.d.leftExpr.d.member; + } + + if (nameNode) { + this._resolveNameNode(nameNode, 'call', node.start); + } + return true; + } + + // 处理变量引用 - 排除定义点 + override visitName(node: NameNode): boolean { + const parent = node.parent; + if (!parent) return true; + + // 排除函数/类定义本身 + if (parent.nodeType === ParseNodeType.Function || + parent.nodeType === ParseNodeType.Class || + parent.nodeType === ParseNodeType.Decorator) { + return true; + } + + // 排除 import 语句中的名称 + if (parent.nodeType === ParseNodeType.ImportAs || + parent.nodeType === ParseNodeType.ImportFromAs || + parent.nodeType === ParseNodeType.ModuleName) { + return true; + } + + // 排除参数 (Parameter) + if (parent.nodeType === ParseNodeType.Parameter) { + return true; + } + + // 排除属性访问 (obj.attr 中的 attr) + if (parent.nodeType === ParseNodeType.MemberAccess) { + return true; + } + + // 排除已记录的赋值左侧 + const key = `${node.start}:${node.d.value}`; + if (this.definitionLocations.has(key)) { + return true; + } + + this._resolveNameNode(node, 'variable', node.start); + return true; + } + + // 处理类型注解 + override visitTypeAnnotation(node: any): boolean { + const annotation = node.d.annotation; + if (!annotation) return true; + + if (annotation.nodeType === ParseNodeType.Name) { + this._resolveNameNode(annotation, 'type', annotation.start); + } else if (annotation.nodeType === ParseNodeType.MemberAccess) { + // 处理 typing.List[int] 等 + if (annotation.d.member) { + this._resolveNameNode(annotation.d.member, 'type', annotation.d.member.start); + } + } + return true; + } + + // 处理成员访问 (obj.attr) + override visitMemberAccess(node: MemberAccessNode): boolean { + const member = node.d.member; + const leftExpr = node.d.leftExpr; + + // 获取成员的类型 + try { + const leftType = this.evaluator.getType(leftExpr); + if (leftType) { + const { doForEachSubtype } = require('../analyzer/typeUtils') as any; + const { isClassInstance } = require('../analyzer/typeGuards') as any; + const { lookUpObjectMember } = require('../analyzer/typeUtils') as any; + + doForEachSubtype(leftType, (subtype: any) => { + if (subtype && isClassInstance(subtype)) { + const memberInfo = lookUpObjectMember(subtype, member.d.value); + if (memberInfo) { + const decls = memberInfo.symbol.getDeclarations(); + if (decls.length > 0) { + const decl = this.evaluator.resolveAliasDeclaration(decls[0], true); + if (decl) { + this.references.push({ + name: member.d.value, + file: decl.uri.getFilePath(), + Line: decl.range.start.line, + nodeLine: member.start, + kind: 'variable', + }); + } + } + } + } + }); + } + } catch (e) { + // 忽略类型解析错误 + } + + return true; + } + + private _resolveNameNode(nameNode: NameNode, kind: 'call' | 'variable' | 'type', nodeLine: number): void { + const nameValue = nameNode.d.value; + if (!nameValue || nameValue === '_') return; + + const cacheKey = `${nameNode.start}:${nameValue}`; + if (this.resolvedCache.has(cacheKey)) { + const cached = this.resolvedCache.get(cacheKey); + if (cached) { + this.references.push({ + name: nameValue, + file: cached.file, + Line: cached.Line, + nodeLine, + kind, + }); + } + return; + } + + try { + const declInfo = this.evaluator.getDeclInfoForNameNode(nameNode); + if (declInfo?.decls && declInfo.decls.length > 0) { + for (const decl of declInfo.decls) { + const resolvedDecl = this.evaluator.resolveAliasDeclaration(decl, true); + if (resolvedDecl && + (resolvedDecl.type === DeclarationType.Function || + resolvedDecl.type === DeclarationType.Class || + resolvedDecl.type === DeclarationType.Variable)) { + const filePath = resolvedDecl.uri.getFilePath(); + const Line = resolvedDecl.range.start.line; + + this.resolvedCache.set(cacheKey, { file: filePath, Line }); + this.references.push({ + name: nameValue, + file: filePath, + Line, + nodeLine, + kind, + }); + break; + } + } + } else { + this.resolvedCache.set(cacheKey, null); + } + } catch (e) { + this.resolvedCache.set(cacheKey, null); + } + } + + collect() { + this.walk(this.parseRoot); + return this.references; + } +} + +// 解析仓库 +async function parseRepository(repoPath: string, verbose: boolean = false) { + // 1. 初始化 ServiceProvider (按照 pyright 的初始化方式) + // 始终使用 NullConsole 保证有 level 属性 + const output = verbose ? new StandardConsole(LogLevel.Log) : { + log: () => {}, + error: () => {}, + warn: () => {}, + info: () => {}, + level: LogLevel.Error, // 关键:需要 level 属性 + } as any; + const tempFile = new RealTempFile(); + const fileSystem = createFromRealFileSystem(tempFile, output); + const serviceProvider = createServiceProvider(fileSystem, output, tempFile); + + // 2. 创建配置 + const repoUri = Uri.file(repoPath, serviceProvider); + const config = new ConfigOptions(repoUri); + + // 3. 创建 AnalyzerService + const service = new AnalyzerService('python-parser', serviceProvider, { + console: output, + hostFactory: () => new FullAccessHost(serviceProvider), + libraryReanalysisTimeProvider: () => 2 * 1000, + configOptions: config, + shouldRunAnalysis: () => true, + } as any); + + // 4. 扫描 Python 文件 + const pythonFiles = scanPythonFiles(repoPath); + + if (verbose) console.error('Python files found:', pythonFiles.length); + + if (pythonFiles.length === 0) { + console.error('No Python files found in:', repoPath); + return; + } + + // 5. 添加到 Service + const fileUris = pythonFiles.map(f => Uri.file(f, serviceProvider)); + + for (const uri of fileUris) { + service.setFileOpened(uri, null, '', { type: 0 } as any); + } + + // 6. 执行分析 + const program = service.test_program; + await program.analyze(); + + // 7. 收集结果 + const modName = path.basename(repoPath); + const result: Repository = { + id: repoPath, + ASTVersion: 'v0.1.5', + ToolVersion: 'v0.1.0', + Path: repoPath, + RepoVersion: { + CommitHash: 'mock123', + ParseTime: new Date().toISOString(), + }, + Modules: {}, + Graph: {}, + } as any; + + const modules = result.Modules; + const graph = result.Graph; + + // 预先收集 evaluator + const evaluator = program.evaluator; + + // 第一遍: 收集所有文件的引用 (只遍历一次 AST) + const fileDependencyMap = new Map>(); + + for (const fileUri of fileUris) { + const parseResults = program.getParseResults(fileUri); + if (!parseResults || !evaluator) continue; + + const collector = new SymbolReferenceWalker( + parseResults.parserOutput.parseTree, + evaluator, + fileUri.getFilePath(), + ); + collector.scanDefinitions(); // 预扫描定义点 + const refs = collector.collect(); + fileDependencyMap.set(fileUri.getFilePath(), refs); + } + + // 预加载源码,避免重复读取 + const sourceFileContents = new Map(); + for (const fileUri of fileUris) { + try { + sourceFileContents.set(fileUri.getFilePath(), fs.readFileSync(fileUri.getFilePath(), 'utf-8')); + } catch (e) { + sourceFileContents.set(fileUri.getFilePath(), ''); + } + } + + // 第二遍: 遍历 symbolTable 收集符号 + for (const fileUri of fileUris) { + const sourceFileInfo = program.getSourceFileInfo(fileUri); + if (!sourceFileInfo) continue; + + const boundSourceFile = sourceFileInfo.sourceFile; + const symbolTable = boundSourceFile.getModuleSymbolTable(); + if (!symbolTable) continue; + + const relativePath = fileUri.getFilePath().replace(repoPath + '/', ''); + const packageName = path.basename(fileUri.getFilePath(), '.py'); + const relPkgPath = path.dirname(relativePath); + // 绝对 pkgPath = modName/relPkgPath,例如 myproject/src/utils + const absPkgPath = relPkgPath === '.' ? modName : `${modName}/${relPkgPath}`; + const pkgPathKey = absPkgPath; + + // 获取文件依赖 + const fileDeps = fileDependencyMap.get(fileUri.getFilePath()) || []; + + // 确保 Module 存在 + if (!modules[modName]) { + modules[modName] = { + Language: 'python', + Version: '0.1.0', + Name: modName, + Dir: '.', + Packages: {}, + Dependencies: {}, + Files: {}, + LoadErrors: [], + } as any; + } + + const module = modules[modName]; + + // 确保 Package 存在 + if (!module.Packages[pkgPathKey]) { + module.Packages[pkgPathKey] = { + IsMain: packageName === '__main__', + IsTest: packageName.startsWith('test_') || packageName.endsWith('_test'), + PkgPath: absPkgPath, + Functions: {}, + Types: {}, + Vars: {}, + }; + } + + const pkg = module.Packages[pkgPathKey]; + + // 添加文件信息 + if (!module.Files) module.Files = {}; + module.Files[relativePath] = { + Path: relativePath, + Package: absPkgPath, + Imports: [], + }; + + // 遍历所有 symbols + for (const [name, symbol] of symbolTable) { + const declarations = symbol.getDeclarations(); + + for (const decl of declarations) { + if (decl.type === DeclarationType.Function) { + const funcNode = decl.node; + const startLine = decl.range.start.line; + const startOffset = funcNode?.start ?? 0; + const endOffset = funcNode ? funcNode.start + funcNode.length : 0; + + // 获取函数源代码 (使用预加载的源码) + let content = ''; + try { + if (funcNode) { + const fileContent = sourceFileContents.get(fileUri.getFilePath()) || ''; + content = fileContent.substring(startOffset, endOffset); + } + } catch (e) { + // 忽略错误 + } + + // 获取函数签名 + let signature = ''; + if (evaluator && funcNode) { + try { + const funcType = evaluator.getTypeOfFunction(funcNode); + if (funcType) { + signature = evaluator.printType(funcType.functionType); + } + } catch (e) { + // 忽略错误 + } + } + + // 收集该函数的依赖 (从预收集过滤的依赖中) + // 使用 nodeLine (字符偏移) 是否落在函数体内 [startOffset, endOffset) 来过滤 + const funcDeps = fileDeps.filter(d => + d.nodeLine >= startOffset && d.nodeLine < endOffset + ); + + pkg.Functions![name] = { + Exported: true, + IsMethod: false, + IsInterfaceMethod: false, + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + File: relativePath, + Line: startLine + 1, + Content: content, + Signature: signature, + } as any; + + // 添加到 Graph (收集依赖信息) + const funcKey = `${modName}?${pkg.PkgPath}#${name}`; + const funcDependencies = funcDeps.map(fc => { + // 从 fc.file 推导 PkgPath + const depRelativePath = fc.file.replace(repoPath + '/', ''); + const depRelPkgPath = path.dirname(depRelativePath); + const depAbsPkgPath = depRelPkgPath === '.' ? modName : `${modName}/${depRelPkgPath}`; + + return { + Kind: 'Dependency', + ModPath: modName, + PkgPath: depAbsPkgPath, + Name: fc.name, + Line: fc.Line, + }; + }); + + graph[funcKey] = { + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + Type: 'FUNC', + References: [], + Dependencies: funcDependencies, + } as any; + + } else if (decl.type === DeclarationType.Class) { + const classNode = decl.node; + + // 获取类源代码 (使用预加载的源码) + let content = ''; + try { + if (classNode) { + const fileContent = sourceFileContents.get(fileUri.getFilePath()) || ''; + const startOffset = classNode.start; + const endOffset = classNode.start + classNode.length; + content = fileContent.substring(startOffset, endOffset); + } + } catch (e) { + // 忽略错误 + } + + // 使用 pyright API 提取类成员 + const classMembers = evaluator ? extractClassMembers(decl, evaluator) : { methods: [], vars: [] }; + + // 提取继承关系依赖 + const classDeps: Relation[] = []; + if (evaluator && classNode) { + try { + const classType = evaluator.getTypeOfClass(classNode); + if (classType && classType.classType && classType.classType.shared.baseClasses) { + // 遍历所有父类 + classType.classType.shared.baseClasses.forEach(baseClass => { + // 解析父类的声明位置 + if (baseClass && baseClass.category === TypeCategory.Class) { + const baseClassType = baseClass as ClassType; + const baseDecl = baseClassType.shared.declaration; + if (baseDecl) { + const baseFilePath = baseDecl.uri.getFilePath(); + const baseLine = baseDecl.range.start.line; + const baseName = baseClassType.shared.name; + + // 添加到依赖 + const baseRelativePath = baseFilePath.replace(repoPath + '/', ''); + const baseRelPkgPath = path.dirname(baseRelativePath); + const baseAbsPkgPath = baseRelPkgPath === '.' ? modName : `${modName}/${baseRelPkgPath}`; + + classDeps.push({ + Kind: 'Inherit', + ModPath: modName, + PkgPath: baseAbsPkgPath, + Name: baseName, + Line: baseLine, + }); + } + } + }); + } + } catch (e) { + // 忽略类型解析错误 + } + } + + pkg.Types![name] = { + Exported: true, + TypeKind: 'class', + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + File: relativePath, + Line: decl.range.start.line + 1, + Content: content, + // 添加类成员信息 + Methods: classMembers.methods, + Vars: classMembers.vars, + } as any; + + // 添加到 Graph + const classKey = `${modName}?${pkg.PkgPath}#${name}`; + graph[classKey] = { + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + Type: 'TYPE', + References: [], + Dependencies: classDeps, + } as any; + + } else if (decl.type === DeclarationType.Variable) { + // 使用 pyright API 区分类型别名和普通变量 + const isTypeAlias = isTypeAliasDecl(decl); + + if (isTypeAlias) { + // 类型别名添加到 Types + const varNode = decl.node; + let content = ''; + try { + if (varNode) { + const fileContent = sourceFileContents.get(fileUri.getFilePath()) || ''; + const startOffset = varNode.start; + const endOffset = varNode.start + varNode.length; + content = fileContent.substring(startOffset, endOffset); + } + } catch (e) { + // 忽略错误 + } + + pkg.Types![name] = { + Exported: true, + TypeKind: 'typedef', + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + File: relativePath, + Line: decl.range.start.line + 1, + Content: content, + } as any; + + // 添加到 Graph + const typeKey = `${modName}?${pkg.PkgPath}#${name}`; + graph[typeKey] = { + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + Type: 'TYPE', + References: [], + Dependencies: [], + } as any; + } else { + // 普通变量添加到 Vars + const varNode = decl.node; + + // 获取变量源代码 (使用预加载的源码) + let content = ''; + try { + if (varNode) { + const fileContent = sourceFileContents.get(fileUri.getFilePath()) || ''; + const startOffset = varNode.start; + const endOffset = varNode.start + varNode.length; + content = fileContent.substring(startOffset, endOffset); + } + } catch (e) { + // 忽略错误 + } + + // 注: 变量依赖通过反向构建 References 时填充 + + pkg.Vars![name] = { + IsExported: true, + IsConst: decl.isConstant || decl.isFinal, + IsPointer: false, + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + File: relativePath, + Line: decl.range.start.line + 1, + Content: content, + } as any; + + // 添加到 Graph + const varKey = `${modName}?${pkg.PkgPath}#${name}`; + graph[varKey] = { + ModPath: modName, + PkgPath: pkg.PkgPath, + Name: name, + Type: 'VAR', + References: [], + Dependencies: [], + } as any; + } + } + } + } + } + + // 第三遍: 从 Dependencies 反向构建 References (Incoming) + // 同时过滤 Dependencies,只保留存在于 Graph 中的节点(精确匹配) + const allPkgPaths = new Set(['']); + for (const pkgKey of Object.keys(modules[modName]?.Packages || {})) { + allPkgPaths.add(pkgKey); + } + + for (const [key, node] of Object.entries(graph)) { + if (!node.Dependencies || node.Dependencies.length === 0) continue; + + const validDeps: typeof node.Dependencies = []; + + for (const dep of node.Dependencies) { + // 精确匹配:使用 dep 中的 PkgPath 直接查找 + const depAbsPkgPath = dep.PkgPath || modName; + const exactKey = `${dep.ModPath || modName}?${depAbsPkgPath}#${dep.Name}`; + + // 首先尝试精确匹配 + let depNode = graph[exactKey]; + + // 如果精确匹配找不到,才尝试其他 PkgPath 组合 + if (!depNode) { + const possibleKeys = [ + `${dep.ModPath || modName}?${modName}#${dep.Name}`, // 根目录 + ]; + for (const pkgPath of allPkgPaths) { + possibleKeys.push(`${modName}?${pkgPath}#${dep.Name}`); + } + for (const depKey of possibleKeys) { + if (graph[depKey]) { + depNode = graph[depKey]; + break; + } + } + } + + // 只有当目标节点存在于 Graph 中时,才保留这个依赖 + if (depNode) { + validDeps.push(dep); + + // 同时构建 References(反向) + if (depNode !== node) { // 避免自引用 + if (!depNode.References) depNode.References = []; + depNode.References.push({ + Kind: 'Dependency', + ModPath: node.ModPath, + PkgPath: node.PkgPath, + Name: node.Name, + Line: dep.Line, + }); + } + } + } + + // 更新 Dependencies,过滤掉不存在的节点 + node.Dependencies = validDeps; + } + + return result; +} + +// 主函数 +async function main() { + const args = process.argv.slice(2); + + // 检测 verbose 模式 + const verbose = args.includes('-v') || args.includes('--verbose'); + const filteredArgs = args.filter(a => a !== '-v' && a !== '--verbose'); + + let repoPath: string; + if (filteredArgs[0] === 'parse') { + repoPath = filteredArgs[1]; + } else { + repoPath = filteredArgs[0]; + } + + if (!repoPath) { + repoPath = path.join(__dirname, '../../e2e/mock-python'); + } + + // 转换为绝对路径 + let absoluteRepoPath = repoPath; + if (!path.isAbsolute(repoPath)) { + absoluteRepoPath = path.resolve(process.cwd(), repoPath); + } + + // 如果是文件,获取其所在目录 + let targetPath = absoluteRepoPath; + try { + const stats = fs.statSync(absoluteRepoPath); + if (stats.isFile() && absoluteRepoPath.endsWith('.py')) { + targetPath = path.dirname(absoluteRepoPath); + if (verbose) console.error('File detected, using directory:', targetPath); + } + } catch (e) { + // 忽略错误 + } + + if (verbose) console.error('Resolved path:', targetPath); + + try { + const result = await parseRepository(absoluteRepoPath, verbose); + + if (!result) { + console.error('Result is undefined!'); + return; + } + + if (verbose) console.error('Result modules:', Object.keys(result.Modules)); + + // 写入文件: ~/.asts/-path-to-repo.json + const homeDir = process.env.HOME || process.env.USERPROFILE || '/tmp'; + const astsDir = path.join(homeDir, '.asts'); + + if (!fs.existsSync(astsDir)) { + fs.mkdirSync(astsDir, { recursive: true }); + } + + const fileName = absoluteRepoPath.split('/').join('-').replace(/^-/, '') + '.json'; + const outputPath = path.join(astsDir, fileName); + const tempPath = outputPath + '.tmp'; + + fs.writeFileSync(tempPath, JSON.stringify(result, null, 2)); + fs.renameSync(tempPath, outputPath); + + console.log(outputPath); + + } catch (error) { + console.error('Error:', error); + } +} + +main(); diff --git a/python-parser/packages/pyright-internal/src/pythonParser/types/uniast.ts b/python-parser/packages/pyright-internal/src/pythonParser/types/uniast.ts new file mode 100644 index 00000000..e4927158 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/pythonParser/types/uniast.ts @@ -0,0 +1,224 @@ +// UNIAST v0.1.3 TypeScript interfaces +// Based on the specification in spec.md + +// ================================================================ +// Core Structures +// ================================================================ + +/** + * Root object representing an entire code repository. + */ +export interface Repository { + /** Unique identifier for the repository. Field name in JSON is "id". */ + id: string; + /** UNIAST specification version. Fixed to "v0.1.3". */ + ASTVersion: string; + /** abcoder version used to parse the repository. Field name in JSON is "ToolVersion". */ + ToolVersion: string; + /** File directory of the repository, usually should be an absolute path. */ + Path: string; + /** Map of all modules in the repository. Keys are unique path identifiers. */ + Modules: Record; + /** + * Global symbol graph. Keys are fully-qualified unique symbol strings, values are the corresponding Node objects. + */ + Graph: Record; +} + +/** + * A compilation unit, e.g., a Go module or an npm package. + */ +export interface Module { + Language: 'go' | 'rust' | 'cxx' | 'python' | 'typescript' | ''; + Version: string; + Name: string; + /** Path relative to the repository root. Empty string "" denotes an external dependency module. */ + Dir: string; + /** Map of all packages in the module. Keys are package import paths. */ + Packages: Record; + /** (Optional) Map of module dependencies. */ + Dependencies?: Record; + /** (Optional) Map of metadata for all files in the module. */ + Files?: Record; +} + +/** + * A namespace containing a group of code symbols. + */ +export interface Package { + IsMain: boolean; + IsTest: boolean; + /** Unique import path for this package. Located at the top level of the Package object. */ + PkgPath: string; + /** Map of all functions and methods. Keys are symbol names. */ + Functions: Record; + /** Map of all type definitions. Keys are type names. */ + Types: Record; + /** Map of all global variables and constants. Keys are variable names. */ + Vars: Record; +} + +// ================================================================ +// Core Definitions +// ================================================================ + +/** + * Globally unique identifier for any code symbol. + */ +export interface Identity { + ModPath: string; + PkgPath: string; + Name: string; +} + +/** + * Exact location of a symbol definition or reference in a source file. + */ +export interface FileLine { + File: string; + Line: number; // 1-based line number. + StartOffset: number; + EndOffset: number; +} + +/** + * Reference to another code symbol. Contains the target symbol's Identity and the location of the reference. + * Note: the FileLine part is optional. + */ +export type Dependency = Identity & Partial; + +// ================================================================ +// Concrete Symbol Structures +// ================================================================ + +export interface Function { + // --- Identity & FileLine (inline fields) --- + ModPath: string; + PkgPath: string; + Name: string; + File: string; + Line: number; + StartOffset: number; + EndOffset: number; + + // --- Function-specific Fields --- + Exported: boolean; + IsMethod: boolean; + IsInterfaceMethod: boolean; + /** Complete source code of the function, including signature and body. */ + Content: string; + Signature?: string; + Receiver?: Receiver; + Params?: Dependency[]; + Results?: Dependency[]; + FunctionCalls?: Dependency[]; + MethodCalls?: Dependency[]; + Types?: Dependency[]; + /** References to package-level variables, exported or not. */ + GlobalVars?: Dependency[]; +} + +export interface Type { + // --- Identity & FileLine (inline fields) --- + ModPath: string; + PkgPath: string; + Name: string; + File: string; + Line: number; + StartOffset: number; + EndOffset: number; + + // --- Type-specific Fields --- + Exported: boolean; + TypeKind: 'struct' | 'interface' | 'typedef' | 'enum' | 'class'; + Content: string; + SubStruct?: Dependency[]; + InlineStruct?: Dependency[]; + Methods?: Record; + Implements?: Identity[]; +} + +export interface Var { + // --- Identity & FileLine (inline fields) --- + ModPath: string; + PkgPath: string; + Name: string; + File: string; + Line: number; + StartOffset: number; + EndOffset: number; + + // --- Var-specific Fields --- + IsExported: boolean; + IsConst: boolean; + IsPointer: boolean; + Content: string; + Type?: Identity; + Dependencies?: Dependency[]; + Groups?: Identity[]; +} + +// ================================================================ +// Auxiliary & Graph Structures +// ================================================================ + +/** + * Represents a node (symbol entity) in the code. + */ +export interface Node { + // --- Identity (inline fields) --- + ModPath: string; + PkgPath: string; + Name: string; + + // --- Node-specific Fields --- + Type: 'FUNC' | 'TYPE' | 'VAR' | 'UNKNOWN'; + /** (Optional) List of other nodes this node depends on (outgoing edges). */ + Dependencies?: Relation[]; + /** (Optional) List of nodes that reference this node (incoming edges). */ + References?: Relation[]; + /** (Optional) List of interface nodes this node implements. */ + Implements?: Relation[]; + /** (Optional) List of parent nodes this node inherits from. */ + Inherits?: Relation[]; + /** (Optional) List of other nodes in the same definition group. */ + Groups?: Relation[]; +} + +/** + * Describes a relationship between two nodes. + */ +export interface Relation { + // --- Identity (inline fields) --- + ModPath: string; + PkgPath: string; + Name: string; + + // --- Relation-specific Fields --- + Kind: 'Dependency' | 'Implement' | 'Inherit' | 'Group'; + /** Line number (1-based) */ + Line?: number; + /** Description of this relation */ + desc?: string; + /** Related code snippet */ + codes?: string; +} + +export interface File { + Path: string; + Imports?: Import[]; + Package?: string; +} + +/** + * Represents an import declaration. Can be a simple string or an object containing alias and path. + */ +export type Import = string | { + Alias?: string; + Path: string; +}; + +export interface Receiver { + IsPointer: boolean; + Type: Identity; +} \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/readonlyAugmentedFileSystem.ts b/python-parser/packages/pyright-internal/src/readonlyAugmentedFileSystem.ts new file mode 100644 index 00000000..9811a7ac --- /dev/null +++ b/python-parser/packages/pyright-internal/src/readonlyAugmentedFileSystem.ts @@ -0,0 +1,270 @@ +/* + * readonlyAugmentedFileSystem.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * A file system that lets one to augment backing file system but not allow + * modifying the backing file system. + */ + +import type * as fs from 'fs'; + +import { FileSystem, MkDirOptions, Stats, VirtualDirent } from './common/fileSystem'; +import { FileWatcher, FileWatcherEventHandler } from './common/fileWatcher'; +import { Uri } from './common/uri/uri'; +import { UriMap } from './common/uri/uriMap'; +import { Disposable } from 'vscode-jsonrpc'; + +interface MappedEntry { + mappedUri: Uri; + originalUri: Uri; + filter: (uri: Uri, fs: FileSystem) => boolean; +} + +export class ReadOnlyAugmentedFileSystem implements FileSystem { + // Mapped (fake location) directory to original directory map + private readonly _entryMap = new UriMap(); + + // Original directory to mapped (fake location) directory map + private readonly _reverseEntryMap = new UriMap(); + + constructor(protected realFS: FileSystem) {} + + existsSync(uri: Uri): boolean { + if (this._isOriginalPath(uri)) { + // Pretend original files don't exist anymore. They are only in their mapped location. + return false; + } + + return this.realFS.existsSync(this._getInternalOriginalUri(uri)); + } + + mkdirSync(uri: Uri, options?: MkDirOptions): void { + throw new Error('Operation is not allowed.'); + } + + chdir(uri: Uri): void { + throw new Error('Operation is not allowed.'); + } + + readdirEntriesSync(uri: Uri): fs.Dirent[] { + // Stick all entries in a map by name to make sure we don't have duplicates. + const entries = new Map(); + + // Handle the case where the directory has children that are remappings. + // Example: + // uri: /lib/site-packages + // mapping: /lib/site-packages/foo -> /lib/site-packages/foo-stubs + // We should show 'foo' as a directory in this case. + for (const [key] of this._entryMap.entries()) { + if (key.isChild(uri) && key.getRelativePathComponents(uri).length === 1) { + entries.set(key.fileName, new VirtualDirent(key.fileName, false, uri.getFilePath())); + } + } + + // Handle the case where we're looking at a mapped directory (or a child). + // Example: + // uri: /lib/site-packages/foo/module + // mapping: /lib/site-packages/foo -> /lib/site-packages/foo-stubs + // We should list all of the children of /lib/site-packages/foo-stubs/module. + const mappedEntry = this._getOriginalEntry(uri); + if (mappedEntry) { + const originalUri = this._getInternalOriginalUri(uri); + const filteredEntries = this.realFS + .readdirEntriesSync(originalUri) + .filter((e) => mappedEntry.filter(originalUri.combinePaths(e.name), this.realFS)) + .map((e) => new VirtualDirent(e.name, e.isFile(), uri.getFilePath())); + for (const entry of filteredEntries) { + entries.set(entry.name, entry); + } + } + + if (this.realFS.existsSync(uri)) { + // Get our real entries, but filter out entries that are mapped to a different location. + // Example: + // uri: /lib/site-packages/foo-stubs + // mapping: /lib/site-packages/foo -> /lib/site-packages/foo-stubs + // We should list all of the children of /lib/site-packages/foo-stubs but only if they don't match the filter + const filteredEntries = this.realFS + .readdirEntriesSync(uri) + .filter((e) => !this._isOriginalPath(uri.combinePaths(e.name))); + for (const entry of filteredEntries) { + entries.set(entry.name, entry); + } + } + + return [...entries.values()]; + } + + readdirSync(uri: Uri): string[] { + return this.readdirEntriesSync(uri).map((p) => p.name); + } + + readFileSync(uri: Uri, encoding?: null): Buffer; + readFileSync(uri: Uri, encoding: BufferEncoding): string; + readFileSync(uri: Uri, encoding?: BufferEncoding | null): string | Buffer { + return this.realFS.readFileSync(this._getInternalOriginalUri(uri), encoding); + } + + writeFileSync(uri: Uri, data: string | Buffer, encoding: BufferEncoding | null): void { + throw new Error('Operation is not allowed.'); + } + + statSync(uri: Uri): Stats { + if (this._isOriginalPath(uri)) { + // Pretend original files don't exist anymore. They are only in their mapped location. + throw new Error('ENOENT: path does not exist'); + } + return this.realFS.statSync(this._getInternalOriginalUri(uri)); + } + + rmdirSync(uri: Uri): void { + throw new Error('Operation is not allowed.'); + } + + unlinkSync(uri: Uri): void { + throw new Error('Operation is not allowed.'); + } + + realpathSync(uri: Uri): Uri { + if (this._entryMap.has(uri)) { + return uri; + } + + return this.realFS.realpathSync(uri); + } + + getModulePath(): Uri { + return this.realFS.getModulePath(); + } + + createFileSystemWatcher(paths: Uri[], listener: FileWatcherEventHandler): FileWatcher { + return this.realFS.createFileSystemWatcher(paths, listener); + } + + createReadStream(uri: Uri): fs.ReadStream { + return this.realFS.createReadStream(this._getInternalOriginalUri(uri)); + } + + createWriteStream(uri: Uri): fs.WriteStream { + throw new Error('Operation is not allowed.'); + } + + copyFileSync(src: Uri, dst: Uri): void { + throw new Error('Operation is not allowed.'); + } + + // Async I/O + readFile(uri: Uri): Promise { + return this.realFS.readFile(this._getInternalOriginalUri(uri)); + } + + readFileText(uri: Uri, encoding?: BufferEncoding): Promise { + return this.realFS.readFileText(this._getInternalOriginalUri(uri), encoding); + } + + realCasePath(uri: Uri): Uri { + return this.realFS.realCasePath(uri); + } + + // See whether the file is mapped to another location. + isMappedUri(fileUri: Uri): boolean { + if (this._getOriginalEntry(fileUri) !== undefined) { + return true; + } + return this.realFS.isMappedUri(fileUri); + } + + // Get original filepath if the given filepath is mapped. + getOriginalUri(mappedFileUri: Uri) { + const internalUri = this._getInternalOriginalUri(mappedFileUri); + return this.realFS.getOriginalUri(internalUri); + } + + // Get mapped filepath if the given filepath is mapped. + getMappedUri(originalFileUri: Uri) { + const entry = this._getMappedEntry(originalFileUri); + if (!entry) { + return this.realFS.getMappedUri(originalFileUri); + } + const relative = entry.originalUri.getRelativePathComponents(originalFileUri); + return entry.mappedUri.combinePaths(...relative); + } + + isInZip(uri: Uri): boolean { + return this.realFS.isInZip(uri); + } + + mapDirectory(mappedUri: Uri, originalUri: Uri, filter?: (originalUri: Uri, fs: FileSystem) => boolean): Disposable { + const entry: MappedEntry = { originalUri, mappedUri, filter: filter ?? (() => true) }; + this._entryMap.set(mappedUri, entry); + this._reverseEntryMap.set(originalUri, entry); + return { + dispose: () => { + this._entryMap.delete(mappedUri); + this._reverseEntryMap.delete(originalUri); + }, + }; + } + + protected clear() { + this._entryMap.clear(); + this._reverseEntryMap.clear(); + } + + private _findClosestMatch(uri: Uri, map: UriMap): MappedEntry | undefined { + // Search through the map of directories to find the closest match. The + // closest match is the longest path that is a parent of the uri. + while (true) { + const entry = map.get(uri); + if (entry) { + return entry; + } + + const parent = uri.getDirectory(); + if (parent.equals(uri)) { + return undefined; + } + + uri = parent; + } + } + + private _getOriginalEntry(uri: Uri): MappedEntry | undefined { + return this._findClosestMatch(uri, this._entryMap); + } + + // Returns the original uri if the given uri is a mapped uri in this file system's + // internal mapping. getOriginalUri is different in that it will also ask the realFS + // if it has a mapping too. + private _getInternalOriginalUri(uri: Uri): Uri { + const entry = this._getOriginalEntry(uri); + if (!entry) { + return uri; + } + const relative = entry.mappedUri.getRelativePathComponents(uri); + const original = entry.originalUri.combinePaths(...relative); + + // Make sure this original URI passes the filter too. + if (entry.filter(original, this.realFS)) { + return original; + } + + return uri; + } + + private _getMappedEntry(uri: Uri): MappedEntry | undefined { + const reverseMatch = this._findClosestMatch(uri, this._reverseEntryMap); + + // Uri in this case is an original Uri. It should also match the filter. + if (reverseMatch && reverseMatch.filter(uri, this.realFS)) { + return reverseMatch; + } + return undefined; + } + + private _isOriginalPath(uri: Uri): boolean { + // If the uri is a child of any reverse entry or equals a reversed entry, then it is an original entry. + return this._getMappedEntry(uri) !== undefined; + } +} diff --git a/python-parser/packages/pyright-internal/src/server.ts b/python-parser/packages/pyright-internal/src/server.ts new file mode 100644 index 00000000..3611999b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/server.ts @@ -0,0 +1,332 @@ +/* + * server.ts + * + * Implements pyright language server. + */ + +import { + CancellationToken, + CodeAction, + CodeActionKind, + CodeActionParams, + Command, + Connection, + ExecuteCommandParams, + WorkDoneProgressServerReporter, +} from 'vscode-languageserver'; + +import { AnalysisResults } from './analyzer/analysis'; +import { CacheManager } from './analyzer/cacheManager'; +import { ImportResolver } from './analyzer/importResolver'; +import { isPythonBinary } from './analyzer/pythonPathUtils'; +import { BackgroundAnalysis } from './backgroundAnalysis'; +import { IBackgroundAnalysis } from './backgroundAnalysisBase'; +import { CommandController } from './commands/commandController'; +import { getCancellationFolderName } from './common/cancellationUtils'; +import { ConfigOptions, SignatureDisplayType } from './common/configOptions'; +import { ConsoleWithLogLevel, LogLevel, convertLogLevel } from './common/console'; +import { isDebugMode, isDefined, isString } from './common/core'; +import { resolvePathWithEnvVariables } from './common/envVarUtils'; +import { FileBasedCancellationProvider } from './common/fileBasedCancellationUtils'; +import { FileSystem } from './common/fileSystem'; +import { FullAccessHost } from './common/fullAccessHost'; +import { Host } from './common/host'; +import { ServerSettings } from './common/languageServerInterface'; +import { ProgressReporter } from './common/progressReporter'; +import { RealTempFile, WorkspaceFileWatcherProvider, createFromRealFileSystem } from './common/realFileSystem'; +import { ServiceProvider } from './common/serviceProvider'; +import { createServiceProvider } from './common/serviceProviderExtensions'; +import { Uri } from './common/uri/uri'; +import { getRootUri } from './common/uri/uriUtils'; +import { LanguageServerBase } from './languageServerBase'; +import { CodeActionProvider } from './languageService/codeActionProvider'; +import { PyrightFileSystem } from './pyrightFileSystem'; +import { WellKnownWorkspaceKinds, Workspace } from './workspaceFactory'; +import { PartialStubService } from './partialStubService'; + +const maxAnalysisTimeInForeground = { openFilesTimeInMs: 50, noOpenFilesTimeInMs: 200 }; + +export class PyrightServer extends LanguageServerBase { + private _controller: CommandController; + + constructor(connection: Connection, maxWorkers: number, realFileSystem?: FileSystem) { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const version = require('../package.json').version || ''; + + const tempFile = new RealTempFile(); + const console = new ConsoleWithLogLevel(connection.console); + const fileWatcherProvider = new WorkspaceFileWatcherProvider(); + const fileSystem = realFileSystem ?? createFromRealFileSystem(tempFile, console, fileWatcherProvider); + const pyrightFs = new PyrightFileSystem(fileSystem); + const cacheManager = new CacheManager(maxWorkers); + const partialStubService = new PartialStubService(pyrightFs); + + const serviceProvider = createServiceProvider( + pyrightFs, + tempFile, + console, + cacheManager, + partialStubService, + new FileBasedCancellationProvider('bg') + ); + + // When executed from CLI command (pyright-langserver), __rootDirectory is + // already defined. When executed from VSCode extension, rootDirectory should + // be __dirname. + const rootDirectory: Uri = getRootUri(serviceProvider) || Uri.file(__dirname, serviceProvider); + const realPathRoot = pyrightFs.realCasePath(rootDirectory); + + super( + { + productName: 'Pyright', + rootDirectory: realPathRoot, + version, + serviceProvider, + fileWatcherHandler: fileWatcherProvider, + maxAnalysisTimeInForeground, + supportedCodeActions: [CodeActionKind.QuickFix, CodeActionKind.SourceOrganizeImports], + }, + connection + ); + + this._controller = new CommandController(this); + } + + async getSettings(workspace: Workspace): Promise { + const serverSettings: ServerSettings = { + watchForSourceChanges: true, + watchForLibraryChanges: true, + watchForConfigChanges: true, + openFilesOnly: true, + useLibraryCodeForTypes: true, + disableLanguageServices: false, + disableTaggedHints: false, + disableOrganizeImports: false, + typeCheckingMode: 'standard', + diagnosticSeverityOverrides: {}, + logLevel: LogLevel.Info, + autoImportCompletions: true, + functionSignatureDisplay: SignatureDisplayType.formatted, + }; + + try { + const workspaces = this.workspaceFactory.getNonDefaultWorkspaces(WellKnownWorkspaceKinds.Regular); + + const pythonSection = await this.getConfiguration(workspace.rootUri, 'python'); + if (pythonSection) { + const pythonPath = pythonSection.pythonPath; + if (pythonPath && isString(pythonPath) && !isPythonBinary(pythonPath)) { + serverSettings.pythonPath = resolvePathWithEnvVariables(workspace, pythonPath, workspaces); + } + + const venvPath = pythonSection.venvPath; + if (venvPath && isString(venvPath)) { + serverSettings.venvPath = resolvePathWithEnvVariables(workspace, venvPath, workspaces); + } + } + + const pythonAnalysisSection = await this.getConfiguration(workspace.rootUri, 'python.analysis'); + if (pythonAnalysisSection) { + const typeshedPaths = pythonAnalysisSection.typeshedPaths; + if (typeshedPaths && Array.isArray(typeshedPaths) && typeshedPaths.length > 0) { + const typeshedPath = typeshedPaths[0]; + if (typeshedPath && isString(typeshedPath)) { + serverSettings.typeshedPath = resolvePathWithEnvVariables(workspace, typeshedPath, workspaces); + } + } + + const stubPath = pythonAnalysisSection.stubPath; + if (stubPath && isString(stubPath)) { + serverSettings.stubPath = resolvePathWithEnvVariables(workspace, stubPath, workspaces); + } + + const diagnosticSeverityOverrides = pythonAnalysisSection.diagnosticSeverityOverrides; + if (diagnosticSeverityOverrides) { + for (const [name, value] of Object.entries(diagnosticSeverityOverrides)) { + const ruleName = this.getDiagnosticRuleName(name); + const severity = this.getSeverityOverrides(value as string | boolean); + if (ruleName && severity) { + serverSettings.diagnosticSeverityOverrides![ruleName] = severity!; + } + } + } + + if (pythonAnalysisSection.diagnosticMode !== undefined) { + serverSettings.openFilesOnly = this.isOpenFilesOnly(pythonAnalysisSection.diagnosticMode); + } else if (pythonAnalysisSection.openFilesOnly !== undefined) { + serverSettings.openFilesOnly = !!pythonAnalysisSection.openFilesOnly; + } + + if (pythonAnalysisSection.useLibraryCodeForTypes !== undefined) { + serverSettings.useLibraryCodeForTypes = !!pythonAnalysisSection.useLibraryCodeForTypes; + } + + serverSettings.logLevel = convertLogLevel(pythonAnalysisSection.logLevel); + serverSettings.autoSearchPaths = !!pythonAnalysisSection.autoSearchPaths; + + const extraPaths = pythonAnalysisSection.extraPaths; + if (extraPaths && Array.isArray(extraPaths) && extraPaths.length > 0) { + serverSettings.extraPaths = extraPaths + .filter((p) => p && isString(p)) + .map((p) => resolvePathWithEnvVariables(workspace, p, workspaces)) + .filter(isDefined); + } + + serverSettings.includeFileSpecs = this._getStringValues(pythonAnalysisSection.include); + serverSettings.excludeFileSpecs = this._getStringValues(pythonAnalysisSection.exclude); + serverSettings.ignoreFileSpecs = this._getStringValues(pythonAnalysisSection.ignore); + + if (pythonAnalysisSection.typeCheckingMode !== undefined) { + serverSettings.typeCheckingMode = pythonAnalysisSection.typeCheckingMode; + } + + if (pythonAnalysisSection.autoImportCompletions !== undefined) { + serverSettings.autoImportCompletions = pythonAnalysisSection.autoImportCompletions; + } + + if ( + serverSettings.logLevel === LogLevel.Log && + pythonAnalysisSection.logTypeEvaluationTime !== undefined + ) { + serverSettings.logTypeEvaluationTime = pythonAnalysisSection.logTypeEvaluationTime; + } + + if (pythonAnalysisSection.typeEvaluationTimeThreshold !== undefined) { + serverSettings.typeEvaluationTimeThreshold = pythonAnalysisSection.typeEvaluationTimeThreshold; + } + } else { + serverSettings.autoSearchPaths = true; + } + + const pyrightSection = await this.getConfiguration(workspace.rootUri, 'pyright'); + if (pyrightSection) { + if (pyrightSection.openFilesOnly !== undefined) { + serverSettings.openFilesOnly = !!pyrightSection.openFilesOnly; + } + + if (pyrightSection.useLibraryCodeForTypes !== undefined) { + serverSettings.useLibraryCodeForTypes = !!pyrightSection.useLibraryCodeForTypes; + } + + serverSettings.disableLanguageServices = !!pyrightSection.disableLanguageServices; + serverSettings.disableTaggedHints = !!pyrightSection.disableTaggedHints; + serverSettings.disableOrganizeImports = !!pyrightSection.disableOrganizeImports; + + const typeCheckingMode = pyrightSection.typeCheckingMode; + if (typeCheckingMode && isString(typeCheckingMode)) { + serverSettings.typeCheckingMode = typeCheckingMode; + } + } + } catch (error) { + this.console.error(`Error reading settings: ${error}`); + } + return serverSettings; + } + + createBackgroundAnalysis(serviceId: string, workspaceRoot: Uri): IBackgroundAnalysis | undefined { + if (isDebugMode() || !getCancellationFolderName()) { + // Don't do background analysis if we're in debug mode or an old client + // is used where cancellation is not supported. + return undefined; + } + + return new BackgroundAnalysis(workspaceRoot, this.serverOptions.serviceProvider); + } + + protected override createHost(): Host { + return new FullAccessHost(this.serverOptions.serviceProvider); + } + + protected override createImportResolver( + serviceProvider: ServiceProvider, + options: ConfigOptions, + host: Host + ): ImportResolver { + const importResolver = new ImportResolver(serviceProvider, options, host); + + // In case there was cached information in the file system related to + // import resolution, invalidate it now. + importResolver.invalidateCache(); + + return importResolver; + } + + protected executeCommand(params: ExecuteCommandParams, token: CancellationToken): Promise { + return this._controller.execute(params, token); + } + + protected isLongRunningCommand(command: string): boolean { + return this._controller.isLongRunningCommand(command); + } + + protected isRefactoringCommand(command: string): boolean { + return this._controller.isRefactoringCommand(command); + } + + protected async executeCodeAction( + params: CodeActionParams, + token: CancellationToken + ): Promise<(Command | CodeAction)[] | undefined | null> { + this.recordUserInteractionTime(); + + const uri = Uri.parse(params.textDocument.uri, this.serverOptions.serviceProvider); + const workspace = await this.getWorkspaceForFile(uri); + return CodeActionProvider.getCodeActionsForPosition(workspace, uri, params.range, params.context.only, token); + } + + protected createProgressReporter(): ProgressReporter { + // The old progress notifications are kept for backwards compatibility with + // clients that do not support work done progress. + let displayingProgress = false; + let workDoneProgress: Promise | undefined; + return { + isDisplayingProgress: () => displayingProgress, + isEnabled: (data: AnalysisResults) => true, + begin: () => { + displayingProgress = true; + if (this.client.hasWindowProgressCapability) { + workDoneProgress = this.connection.window.createWorkDoneProgress(); + workDoneProgress + .then((progress) => { + progress.begin(''); + }) + .catch(() => {}); + } else { + this.connection.sendNotification('pyright/beginProgress'); + } + }, + report: (message: string) => { + if (workDoneProgress) { + workDoneProgress + .then((progress) => { + progress.report(message); + }) + .catch(() => {}); + } else { + this.connection.sendNotification('pyright/reportProgress', message); + } + }, + end: () => { + displayingProgress = false; + if (workDoneProgress) { + workDoneProgress + .then((progress) => { + progress.done(); + }) + .catch(() => {}); + workDoneProgress = undefined; + } else { + this.connection.sendNotification('pyright/endProgress'); + } + }, + }; + } + + private _getStringValues(values: any) { + if (!values || !Array.isArray(values) || values.length === 0) { + return []; + } + + return values.filter((p) => p && isString(p)) as string[]; + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/cacheManager.test.ts b/python-parser/packages/pyright-internal/src/tests/cacheManager.test.ts new file mode 100644 index 00000000..01113f1b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/cacheManager.test.ts @@ -0,0 +1,123 @@ +/* + * cacheManager.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for cache manager + */ + +import assert from 'assert'; + +import { Worker } from 'worker_threads'; +import { CacheManager, CacheOwner } from '../analyzer/cacheManager'; + +test('basic', () => { + const manager = new CacheManager(); + const mock = new MockCacheOwner(10); + + manager.registerCacheOwner(mock); + assert.strictEqual(manager.getCacheUsage(), 10); + + manager.unregisterCacheOwner(mock); + assert.strictEqual(manager.getCacheUsage(), 0); +}); + +test('nested stopTracking', () => { + const manager = new CacheManager(); + const mock = new MockCacheOwner(10); + + manager.registerCacheOwner(mock); + assert.strictEqual(manager.getCacheUsage(), 10); + + const handle1 = manager.pauseTracking(); + assert.strictEqual(manager.getCacheUsage(), -1); + + // nested + const handle2 = manager.pauseTracking(); + assert.strictEqual(manager.getCacheUsage(), -1); + + handle2.dispose(); + assert.strictEqual(manager.getCacheUsage(), -1); + + handle1.dispose(); + assert.strictEqual(manager.getCacheUsage(), 10); + + manager.unregisterCacheOwner(mock); + assert.strictEqual(manager.getCacheUsage(), 0); +}); + +test('multiple owners', () => { + const manager = new CacheManager(); + const mock1 = new MockCacheOwner(10); + const mock2 = new MockCacheOwner(20); + + manager.registerCacheOwner(mock1); + assert.strictEqual(manager.getCacheUsage(), 10); + + manager.registerCacheOwner(mock2); + assert.strictEqual(manager.getCacheUsage(), 30); + + const handle = manager.pauseTracking(); + assert.strictEqual(manager.getCacheUsage(), -1); + + manager.unregisterCacheOwner(mock1); + assert.strictEqual(manager.getCacheUsage(), -1); + + handle.dispose(); + assert.strictEqual(manager.getCacheUsage(), 20); + + manager.unregisterCacheOwner(mock2); + assert.strictEqual(manager.getCacheUsage(), 0); +}); + +test('Shared memory', async () => { + const manager = new CacheManager(/* maxWorkers */ 1); + + // Without the .js output from Jest, we need to generate a non module worker. Use a string + // to do so. This means the worker can't use the CacheManager, but it just needs to + // listen for the sharedArrayBuffer message. + const workerSource = ` +const { parentPort } = require('worker_threads'); +parentPort.on('message', (msg) => { +if (msg.requestType === 'cacheUsageBuffer') { + const buffer = msg.sharedUsageBuffer; + const view = new Float64Array(buffer); + view[1] = 50 * 1024 * 1024 * 1024; // Make this super huge, 50GB to make sure usage is over 100% + parentPort.postMessage('done'); + } +}); +`; + const worker = new Worker(workerSource, { eval: true }); + worker.on('error', (err) => { + throw err; + }); + manager.addWorker(1, worker); + + // Wait for the worker to post a message back to us. + await new Promise((resolve, reject) => { + worker.on('message', (msg: string) => { + if (msg === 'done') { + resolve(); + } + }); + }); + + // Get the heap usage and verify it's more than 100% + const usage = manager.getUsedHeapRatio(); + worker.terminate(); + assert(usage > 1); +}); + +class MockCacheOwner implements CacheOwner { + constructor(private _used: number) { + // empty + } + + getCacheUsage(): number { + return this._used; + } + + emptyCache(): void { + this._used = 0; + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/chainedSourceFiles.test.ts b/python-parser/packages/pyright-internal/src/tests/chainedSourceFiles.test.ts new file mode 100644 index 00000000..9d681969 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/chainedSourceFiles.test.ts @@ -0,0 +1,290 @@ +/* + * chainedSourceFiles.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for tokenizer ipython mode + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-jsonrpc'; +import { MarkupKind } from 'vscode-languageserver-types'; + +import { Program } from '../analyzer/program'; +import { AnalyzerService } from '../analyzer/service'; +import { IPythonMode } from '../analyzer/sourceFile'; +import { ConfigOptions } from '../common/configOptions'; +import { NullConsole } from '../common/console'; +import { normalizeSlashes } from '../common/pathUtils'; +import { convertOffsetsToRange, convertOffsetToPosition } from '../common/positionUtils'; +import { ServiceProvider } from '../common/serviceProvider'; +import { Uri } from '../common/uri/uri'; +import { UriEx } from '../common/uri/uriUtils'; +import { CompletionProvider } from '../languageService/completionProvider'; +import { parseTestData } from './harness/fourslash/fourSlashParser'; +import { TestAccessHost } from './harness/testAccessHost'; +import * as host from './harness/testHost'; +import { createFromFileSystem, distlibFolder, libFolder } from './harness/vfs/factory'; +import * as vfs from './harness/vfs/filesystem'; + +test('check chained files', () => { + const code = ` +// @filename: test1.py +//// def foo1(): pass + +// @filename: test2.py +//// def foo2(): pass + +// @filename: test3.py +//// def foo3(): pass + +// @filename: test4.py +//// [|foo/*marker*/|] + `; + + const basePath = UriEx.file(normalizeSlashes('/')); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + + const marker = data.markerPositions.get('marker')!; + const markerUri = marker.fileUri; + + const parseResult = service.getParseResults(markerUri)!; + const result = new CompletionProvider( + service.test_program, + markerUri, + convertOffsetToPosition(marker.position, parseResult.tokenizerOutput.lines), + { + format: MarkupKind.Markdown, + lazyEdit: false, + snippet: false, + }, + CancellationToken.None + ).getCompletions(); + + assert(result?.items.some((i) => i.label === 'foo1')); + assert(result?.items.some((i) => i.label === 'foo2')); + assert(result?.items.some((i) => i.label === 'foo3')); +}); + +test('modify chained files', () => { + const code = ` +// @filename: test1.py +//// def foo1(): pass + +// @filename: test2.py +//// [|/*delete*/|] +//// def foo2(): pass + +// @filename: test3.py +//// def foo3(): pass + +// @filename: test4.py +//// [|foo/*marker*/|] + `; + + const basePath = UriEx.file(normalizeSlashes('/')); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + + // Make sure files are all realized. + const marker = data.markerPositions.get('marker')!; + const markerUri = marker.fileUri; + const parseResult = service.getParseResults(markerUri)!; + + // Close file in the middle of the chain + service.setFileClosed(data.markerPositions.get('delete')!.fileUri); + + // Make sure we don't get suggestion from auto import but from chained files. + service.test_program.configOptions.autoImportCompletions = false; + + const result = new CompletionProvider( + service.test_program, + markerUri, + convertOffsetToPosition(marker.position, parseResult.tokenizerOutput.lines), + { + format: MarkupKind.Markdown, + lazyEdit: false, + snippet: false, + }, + CancellationToken.None + ).getCompletions(); + + assert(result); + + assert(!result.items.some((i) => i.label === 'foo1')); + assert(!result.items.some((i) => i.label === 'foo2')); + assert(result.items.some((i) => i.label === 'foo3')); +}); + +test('modify chained files', async () => { + const code = ` +// @filename: test1.py +//// [|/*changed*/|] +//// def foo1(): pass + +// @filename: test2.py +//// def foo2(): pass + +// @filename: test3.py +//// def foo3(): pass + +// @filename: test4.py +//// [|/*marker*/foo1()|] + `; + + const basePath = UriEx.file(normalizeSlashes('/')); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + + const marker = data.markerPositions.get('marker')!; + const markerUri = marker.fileUri; + const range = data.ranges.find((r) => r.marker === marker)!; + + const parseResults = service.getParseResults(markerUri)!; + analyze(service.test_program); + + // Initially, there should be no error. + const initialDiags = await service.getDiagnosticsForRange( + markerUri, + convertOffsetsToRange(range.pos, range.end, parseResults.tokenizerOutput.lines), + CancellationToken.None + ); + + assert.strictEqual(initialDiags.length, 0); + + // Change test1 content + service.updateOpenFileContents(data.markerPositions.get('changed')!.fileUri, 2, 'def foo5(): pass'); + analyze(service.test_program); + + const finalDiags = await service.getDiagnosticsForRange( + markerUri, + convertOffsetsToRange(range.pos, range.end, parseResults.tokenizerOutput.lines), + CancellationToken.None + ); + + assert.strictEqual(finalDiags.length, 1); +}); + +function generateChainedFiles(count: number, lastFile: string) { + let code = ''; + for (let i = 0; i < count; i++) { + code += ` +// @filename: test${i + 1}.py +//// def foo${i + 1}(): pass +`; + } + code += lastFile; + return code; +} + +test('chained files with 1000s of files', async () => { + const lastFile = ` +// @filename: testFinal.py +//// [|/*marker*/foo1()|] + `; + const code = generateChainedFiles(1000, lastFile); + const basePath = UriEx.file(normalizeSlashes('/')); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + const marker = data.markerPositions.get('marker')!; + const markerUri = marker.fileUri; + const range = data.ranges.find((r) => r.marker === marker)!; + + const parseResults = service.getParseResults(markerUri)!; + analyze(service.test_program); + + // There should be no error as it should find the foo1 in the first chained file. + const initialDiags = await service.getDiagnosticsForRange( + markerUri, + convertOffsetsToRange(range.pos, range.end, parseResults.tokenizerOutput.lines), + CancellationToken.None + ); + + assert.strictEqual(initialDiags.length, 0); +}); + +test('imported by files', async () => { + const code = ` +// @filename: test1.py +//// import [|/*marker*/os|] + +// @filename: test2.py +//// os.path.join() + `; + + const basePath = UriEx.file(normalizeSlashes('/')); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + analyze(service.test_program); + + const marker = data.markerPositions.get('marker')!; + const markerUri = marker.fileUri; + const range = data.ranges.find((r) => r.marker === marker)!; + + const parseResults = service.getParseResults(markerUri)!; + const diagnostics = await service.getDiagnosticsForRange( + markerUri, + convertOffsetsToRange(range.pos, range.end, parseResults.tokenizerOutput.lines), + CancellationToken.None + ); + + assert.strictEqual(diagnostics.length, 0); +}); + +test('re ordering cells', async () => { + const code = ` +// @filename: test1.py +//// import [|/*marker*/os|] + +// @filename: test2.py +//// /*bottom*/os.path.join() + `; + + const basePath = UriEx.file(normalizeSlashes('/')); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + analyze(service.test_program); + + const marker = data.markerPositions.get('marker')!; + const markerUri = marker.fileUri; + const range = data.ranges.find((r) => r.marker === marker)!; + + const bottom = data.markerPositions.get('bottom')!; + const bottomUri = bottom.fileUri; + + service.updateChainedUri(bottomUri, undefined); + service.updateChainedUri(markerUri, bottomUri); + analyze(service.test_program); + + const parseResults = service.getParseResults(markerUri)!; + const diagnostics = await service.getDiagnosticsForRange( + markerUri, + convertOffsetsToRange(range.pos, range.end, parseResults.tokenizerOutput.lines), + CancellationToken.None + ); + + assert.strictEqual(diagnostics.length, 1); +}); + +function createServiceWithChainedSourceFiles(basePath: Uri, code: string) { + const fs = createFromFileSystem(host.HOST, /*ignoreCase*/ false, { cwd: basePath.getFilePath() }); + const service = new AnalyzerService('test service', new ServiceProvider(), { + console: new NullConsole(), + hostFactory: () => new TestAccessHost(UriEx.file(vfs.MODULE_PATH), [libFolder, distlibFolder]), + importResolverFactory: AnalyzerService.createImportResolver, + configOptions: new ConfigOptions(basePath), + fileSystem: fs, + shouldRunAnalysis: () => true, + }); + + const data = parseTestData(basePath.getFilePath(), code, ''); + + let chainedFilePath: Uri | undefined; + for (const file of data.files) { + const uri = file.fileUri; + service.setFileOpened(uri, 1, file.content, IPythonMode.CellDocs, chainedFilePath); + chainedFilePath = uri; + } + return { data, service }; +} + +function analyze(program: Program) { + while (program.analyze()) { + // Process all queued items + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/checker.test.ts b/python-parser/packages/pyright-internal/src/tests/checker.test.ts new file mode 100644 index 00000000..cdd31a03 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/checker.test.ts @@ -0,0 +1,703 @@ +/* + * checker.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type checker. These tests also + * exercise the type evaluator (which the checker relies + * heavily upon). + */ + +import { ConfigOptions } from '../common/configOptions'; +import { pythonVersion3_10, pythonVersion3_8, pythonVersion3_9 } from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('BadToken1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['badToken1.py']); + + // We include this in the checker test rather than the tokenizer or + // parser test suite because it has cascading effects that potentially + // affect the type checker logic. + TestUtils.validateResults(analysisResults, 1); +}); + +test('Unicode1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unicode1.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('CircularBaseClass', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['circularBaseClass.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Private1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, optional diagnostics are ignored. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['private1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportPrivateUsage = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['private1.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('Constant1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, optional diagnostics are ignored. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['constant1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportConstantRedefinition = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['constant1.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); + +test('AbstractClass1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('AbstractClass2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('AbstractClass3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('AbstractClass4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('AbstractClass5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass5.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('AbstractClass6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('AbstractClass7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('AbstractClass8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('AbstractClass9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('AbstractClass10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass10.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('AbstractClass11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['abstractClass11.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Constants1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constants1.py']); + + TestUtils.validateResults(analysisResults, 20); +}); + +test('NoReturn1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noreturn1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('NoReturn2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noreturn2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('NoReturn3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noreturn3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('NoReturn4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noreturn4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('With1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('With2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('With3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('With4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['with4.py'], configOptions); + TestUtils.validateResults(analysisResults1, 4); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['with4.py'], configOptions); + TestUtils.validateResults(analysisResults2, 0); +}); + +test('With5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('With6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Mro1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['mro1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Mro2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['mro2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Mro3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['mro3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Mro4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['mro4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DefaultInitializer1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, the reportCallInDefaultInitializer is disabled. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['defaultInitializer1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportCallInDefaultInitializer = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['defaultInitializer1.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); + +test('UnnecessaryIsInstance1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryIsInstance1.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportUnnecessaryIsInstance = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryIsInstance1.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); + +test('UnnecessaryIsInstance2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryIsInstance2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportUnnecessaryIsInstance = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryIsInstance2.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('UnnecessaryIsSubclass1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryIsSubclass1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportUnnecessaryIsInstance = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryIsSubclass1.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('UnnecessaryCast1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryCast1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportUnnecessaryCast = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryCast1.py'], configOptions); + TestUtils.validateResults(analysisResults, 6); +}); + +test('UnnecessaryContains1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryContains1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportUnnecessaryContains = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryContains1.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); + +test('TypeIgnore1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Disable type ignore + configOptions.diagnosticRuleSet.enableTypeIgnoreComments = false; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore1.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeIgnore2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Disable type ignore + configOptions.diagnosticRuleSet.enableTypeIgnoreComments = false; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore2.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeIgnore3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore3.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Disable type ignore + configOptions.diagnosticRuleSet.enableTypeIgnoreComments = false; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore3.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeIgnore4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore4.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore4.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeIgnore5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment = 'warning'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 1); +}); + +test('PyrightIgnore1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['pyrightIgnore1.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); +}); + +test('PyrightIgnore2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['pyrightIgnore2.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); + + configOptions.diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment = 'warning'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['pyrightIgnore2.py'], configOptions); + TestUtils.validateResults(analysisResults, 2, 3); +}); + +test('PyrightComment1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['pyrightComment1.py'], configOptions); + TestUtils.validateResults(analysisResults, 9); +}); + +test('DuplicateImports1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, optional diagnostics are ignored. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['duplicateImports1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportDuplicateImport = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['duplicateImports1.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('ParamNames1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['paramNames1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 11); + + configOptions.diagnosticRuleSet.reportSelfClsParameterName = 'none'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['paramNames1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 0); + + configOptions.diagnosticRuleSet.reportSelfClsParameterName = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['paramNames1.py'], configOptions); + TestUtils.validateResults(analysisResults, 11, 0); +}); + +test('ParamType1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['paramType1.py']); + TestUtils.validateResults(analysisResults, 9); +}); + +test('Python2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['python2.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('InconsistentSpaceTab1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentSpaceTab1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('InconsistentSpaceTab2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentSpaceTab2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DuplicateDeclaration1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['duplicateDeclaration1.py']); + + TestUtils.validateResults(analysisResults, 10); +}); + +test('DuplicateDeclaration2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['duplicateDeclaration2.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Strings1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['strings1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportImplicitStringConcatenation = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['strings1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 2); +}); + +test('UnusedExpression1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, this is a warning. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedExpression1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 14); + + // Disable it. + configOptions.diagnosticRuleSet.reportUnusedExpression = 'none'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedExpression1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportUnusedExpression = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedExpression1.py'], configOptions); + TestUtils.validateResults(analysisResults, 14); +}); + +test('UnusedImport1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Enabled it + configOptions.diagnosticRuleSet.reportUnusedImport = 'warning'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedImport1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 2); + + // Disable it. + configOptions.diagnosticRuleSet.reportUnusedImport = 'none'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedImport1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportUnusedImport = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedImport1.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('UnusedImport2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Disable it. + configOptions.diagnosticRuleSet.reportUnusedImport = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedImport2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportUnusedImport = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedImport2.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('UninitializedVariable1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, this is off. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['uninitializedVariable1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportUninitializedInstanceVariable = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['uninitializedVariable1.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('UninitializedVariable2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, this is off. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['uninitializedVariable2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportUninitializedInstanceVariable = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['uninitializedVariable2.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('UninitializedVariable3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['uninitializedVariable3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('DeprecatedAlias1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults3 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults3, 0, 0, 0, undefined, undefined, 0); + + // Now enable the deprecateTypingAliases setting. + configOptions.diagnosticRuleSet.deprecateTypingAliases = true; + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults4 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults4, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults5 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults5, 0, 0, 0, undefined, undefined, 45); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults6 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults6, 0, 0, 0, undefined, undefined, 49); + + // Now change reportDeprecated to emit an error. + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults7 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults7, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults8 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults8, 45, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults9 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias1.py'], configOptions); + TestUtils.validateResults(analysisResults9, 49, 0, 0, undefined, undefined, 0); +}); + +test('DeprecatedAlias2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults3 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults3, 0, 0, 0, undefined, undefined, 0); + + // Now enable the deprecateTypingAliases setting. + configOptions.diagnosticRuleSet.deprecateTypingAliases = true; + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults4 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults4, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults5 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults5, 0, 0, 0, undefined, undefined, 42); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults6 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults6, 0, 0, 0, undefined, undefined, 46); + + // Now change reportDeprecated to emit an error. + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults7 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults7, 0, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults8 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults8, 42, 0, 0, undefined, undefined, 0); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults9 = TestUtils.typeAnalyzeSampleFiles(['deprecatedAlias2.py'], configOptions); + TestUtils.validateResults(analysisResults9, 46, 0, 0, undefined, undefined, 0); +}); + +test('Deprecated2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 14); + + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 14); +}); + +test('Deprecated3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated3.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 5); + + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated3.py'], configOptions); + TestUtils.validateResults(analysisResults2, 5); +}); + +test('Deprecated4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated4.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 7); + + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated4.py'], configOptions); + TestUtils.validateResults(analysisResults2, 7); +}); + +test('Deprecated5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated5.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 2); + + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated5.py'], configOptions); + TestUtils.validateResults(analysisResults2, 2); +}); + +test('Deprecated6', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated6.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 3); + + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated6.py'], configOptions); + TestUtils.validateResults(analysisResults2, 3); +}); + +test('Deprecated7', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated7.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 2); + + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated7.py'], configOptions); + TestUtils.validateResults(analysisResults2, 2); +}); + +test('Deprecated8', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated8.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 0, undefined, undefined, 4); + + configOptions.diagnosticRuleSet.reportDeprecated = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated8.py'], configOptions); + TestUtils.validateResults(analysisResults2, 4); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/classDeclaration.test.ts b/python-parser/packages/pyright-internal/src/tests/classDeclaration.test.ts new file mode 100644 index 00000000..9b2d39a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/classDeclaration.test.ts @@ -0,0 +1,198 @@ +/* + * classDeclaration.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Test class detail's declaration + */ + +import assert from 'assert'; + +import { isClassDeclaration, isSpecialBuiltInClassDeclaration } from '../analyzer/declaration'; +import { getEnclosingFunction } from '../analyzer/parseTreeUtils'; +import { isProperty } from '../analyzer/typeUtils'; +import { TypeCategory, isClassInstance } from '../analyzer/types'; +import { TextRange } from '../common/textRange'; +import { ParseNodeType } from '../parser/parseNodes'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { TestState, getNodeAtMarker, parseAndGetTestState } from './harness/fourslash/testState'; + +test('regular class', () => { + const code = ` +// @filename: test.py +//// [|class /*marker*/A: +//// pass|] + `; + + checkClassDetail(code); +}); + +test('Meta class', () => { + const code = ` +// @filename: test.py +//// [|class /*range*/MyMeta(type): +//// def __new__(cls, name, bases, dct): +//// return super().__new__(cls, name, bases, dct)|] +//// +//// class MyClass(metaclass=MyMeta): +//// pass +//// +//// /*marker*/E = MyMeta() + `; + + checkClassDetail(code, '__class_MyMeta'); +}); + +test('special built in class', () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// def foo(t: /*marker*/TypedDict): ... + `; + + checkSpecialBuiltInClassDetail(code); +}); + +test('dynamic enum', () => { + const code = ` +// @filename: test.py +//// from enum import Enum +//// /*marker*/E = Enum('E', { 'One': 1 }) + `; + + checkNoDeclarationInClassDetail(code); +}); + +test('dynamic named tuple', () => { + const code = ` +// @filename: test.py +//// from typing import NamedTuple +//// /*marker*/N = NamedTuple("N", [('name', str)]) + `; + + checkNoDeclarationInClassDetail(code); +}); + +test('dynamic typed dict', () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// /*marker*/T = TypedDict("T", { "one": str }) + `; + + checkNoDeclarationInClassDetail(code); +}); + +test('dynamic new type', () => { + const code = ` +// @filename: test.py +//// from typing import NewType +//// /*marker*/I = NewType('I', int) + `; + + checkNoDeclarationInClassDetail(code); +}); + +test('dynamic type', () => { + const code = ` +// @filename: test.py +//// /*marker*/D = type('D', (object,), {}) + `; + + checkNoDeclarationInClassDetail(code); +}); + +test('property', () => { + const code = ` +// @filename: test.py +//// class MyClass: +//// def __init__(self): +//// self._v = None +//// +//// @property +//// def /*getter*/value(self): +//// return self._v +//// +//// @value.setter +//// def /*setter*/value(self, value): +//// self._v = value +//// +//// @value.deleter +//// def /*deleter*/value(self): +//// del self._v + `; + + const state = parseAndGetTestState(code).state; + + ['getter', 'setter', 'deleter'].forEach((marker) => { + const node = getNodeAtMarker(state, marker); + assert(node.nodeType === ParseNodeType.Name); + + const functionNode = getEnclosingFunction(node); + assert(functionNode?.nodeType === ParseNodeType.Function); + + const result = state.program.evaluator!.getTypeOfFunction(functionNode); + assert(result?.decoratedType); + + assert(isProperty(result.decoratedType)); + assert(isClassInstance(result.decoratedType)); + + assert(result.decoratedType.shared.declaration); + assert(isClassDeclaration(result.decoratedType.shared.declaration)); + + assert(result.decoratedType.shared.declaration.moduleName === 'builtins'); + assert(result.decoratedType.shared.declaration.node.d.name.d.value === 'property'); + }); +}); + +function checkSpecialBuiltInClassDetail(code: string) { + const state = parseAndGetTestState(code).state; + + const node = getNodeAtMarker(state); + assert(node.nodeType === ParseNodeType.Name); + + const type = state.program.evaluator!.getType(node); + assert(type?.category === TypeCategory.Class); + + assert.strictEqual(node.d.value, type.priv.aliasName ?? type.shared.name); + + assert(type.shared.declaration); + if (type.priv.aliasName) { + assert(isClassDeclaration(type.shared.declaration)); + } else { + assert(isSpecialBuiltInClassDeclaration(type.shared.declaration)); + } +} + +function checkNoDeclarationInClassDetail(code: string) { + const state = parseAndGetTestState(code).state; + _checkClassDetail(state, undefined); +} + +function checkClassDetail(code: string, name?: string) { + const state = parseAndGetTestState(code).state; + _checkClassDetail(state, state.getRangeByMarkerName('marker') ?? state.getRangeByMarkerName('range'), name); +} + +function _checkClassDetail(state: TestState, range: Range | undefined, name?: string) { + const node = getNodeAtMarker(state); + assert(node.nodeType === ParseNodeType.Name); + + const type = state.program.evaluator!.getType(node); + assert(type?.category === TypeCategory.Class); + + assert.strictEqual(name ?? node.d.value, type.priv.aliasName ?? type.shared.name); + + if (range) { + assert(type.shared.declaration); + assert(isClassDeclaration(type.shared.declaration)); + + assert.deepStrictEqual( + TextRange.create(type.shared.declaration.node.start, type.shared.declaration.node.length), + TextRange.fromBounds(range.pos, range.end) + ); + } else { + // There should be no decl. + assert(!type.shared.declaration); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/collectionUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/collectionUtils.test.ts new file mode 100644 index 00000000..0308aba0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/collectionUtils.test.ts @@ -0,0 +1,178 @@ +/* + * collectionUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import assert from 'assert'; + +import * as utils from '../common/collectionUtils'; +import { compareValues, isArray } from '../common/core'; + +test('UtilsContainsDefault', () => { + const data = [1, 2, 3, 4, 5]; + assert(utils.contains(data, 2)); +}); + +test('UtilsContainsComparer', () => { + const data = [new D(1, 'A'), new D(2, 'B'), new D(3, 'C'), new D(4, 'D')]; + assert(utils.contains(data, new D(1, 'D'), (a, b) => a.value === b.value)); +}); + +test('UtilsAppend', () => { + const data: number[] = []; + assert.deepEqual(utils.append(data, 1), [1]); +}); + +test('UtilsAppendUndefined', () => { + const data = undefined; + assert.deepEqual(utils.append(data, 1), [1]); +}); + +test('UtilsAppendUndefinedValue', () => { + const data = [1]; + assert.equal(utils.append(data, undefined), data); +}); + +test('UtilsFindEmpty', () => { + const data: number[] = []; + assert.equal( + utils.find(data, (e) => true), + undefined + ); +}); + +test('UtilsFindNoMatch', () => { + const data = [1]; + assert.equal( + utils.find(data, (e) => false), + undefined + ); +}); + +test('UtilsFindMatchSimple', () => { + const data = [1]; + assert.equal( + utils.find(data, (e) => e === 1), + 1 + ); +}); + +test('UtilsFindMatch', () => { + const data = [new D(1, 'Hello')]; + assert.equal( + utils.find(data, (e) => e.value === 1), + data[0] + ); +}); + +test('UtilsFindMatchCovariant', () => { + const item1 = new D(1, 'Hello'); + const item2 = new D(2, 'Hello2'); + const data: B[] = [new B(0), item1, item2, new B(3)]; + assert.equal( + utils.find(data, (e: B) => e.value === 2), + item2 + ); +}); + +test('UtilsStableSort', () => { + const data = [new D(2, 'Hello3'), new D(1, 'Hello1'), new D(2, 'Hello4'), new D(1, 'Hello2')]; + const sorted = utils.stableSort(data, (a, b) => compareValues(a.value, b.value)); + + const result: string[] = []; + sorted.forEach((e) => result.push(e.name)); + + assert.deepEqual(result, ['Hello1', 'Hello2', 'Hello3', 'Hello4']); +}); + +test('UtilsBinarySearch', () => { + const data = [new D(1, 'Hello3'), new D(2, 'Hello1'), new D(3, 'Hello4'), new D(4, 'Hello2')]; + const index = utils.binarySearch(data, new D(3, 'Unused'), (v) => v.value, compareValues, 0); + + assert.equal(index, 2); +}); + +test('UtilsBinarySearchMiss', () => { + const data = [new D(1, 'Hello3'), new D(2, 'Hello1'), new D(4, 'Hello4'), new D(5, 'Hello2')]; + const index = utils.binarySearch(data, new D(3, 'Unused'), (v) => v.value, compareValues, 0); + + assert.equal(~index, 2); +}); + +test('isArray1', () => { + const data = [new D(1, 'Hello3')]; + assert(isArray(data)); +}); + +test('isArray2', () => { + const data = {}; + assert(!isArray(data)); +}); + +test('addRange1', () => { + const data: number[] = []; + assert.deepEqual(utils.addRange(data, [1, 2, 3]), [1, 2, 3]); +}); + +test('addRange2', () => { + const data: number[] = [1, 2, 3]; + assert.deepEqual(utils.addRange(data, [1, 2, 3, 4], 3, 4), [1, 2, 3, 4]); +}); + +test('insertAt1', () => { + const data: number[] = [2, 3, 4]; + assert.deepEqual(utils.insertAt(data, 0, 1), [1, 2, 3, 4]); +}); + +test('insertAt2', () => { + const data: number[] = [1, 2, 4]; + assert.deepEqual(utils.insertAt(data, 2, 3), [1, 2, 3, 4]); +}); + +test('insertAt3', () => { + const data: number[] = [1, 2, 3]; + assert.deepEqual(utils.insertAt(data, 3, 4), [1, 2, 3, 4]); +}); + +test('cloneAndSort', () => { + const data: number[] = [3, 2, 1]; + assert.deepEqual(utils.cloneAndSort(data), [1, 2, 3]); +}); + +test('flatten', () => { + const data: number[][] = [ + [1, 2], + [3, 4], + [5, 6], + ]; + assert.deepEqual(utils.flatten(data), [1, 2, 3, 4, 5, 6]); +}); + +test('getNestedProperty', () => { + const data = { a: { b: { c: 3 } } }; + assert.deepEqual(utils.getNestedProperty(data, 'a'), { b: { c: 3 } }); + assert.deepEqual(utils.getNestedProperty(data, 'a.b'), { c: 3 }); + assert.deepEqual(utils.getNestedProperty(data, 'a.b.c'), 3); + assert.deepEqual(utils.getNestedProperty(data, 'x'), undefined); + assert.deepEqual(utils.getNestedProperty(data, 'a.x'), undefined); + assert.deepEqual(utils.getNestedProperty(data, ''), undefined); + assert.deepEqual(utils.getNestedProperty(undefined, ''), undefined); +}); + +class B { + value: number; + + constructor(value: number) { + this.value = value; + } +} + +class D extends B { + name: string; + + constructor(value: number, name: string) { + super(value); + this.name = name; + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/common.test.ts b/python-parser/packages/pyright-internal/src/tests/common.test.ts new file mode 100644 index 00000000..8c0b89c5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/common.test.ts @@ -0,0 +1,117 @@ +/* + * common.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import * as assert from 'assert'; + +import { doRangesOverlap, isPositionInRange, Range, rangesAreEqual, TextRange } from '../common/textRange'; + +test('textRange create', () => { + assert.throws(() => TextRange.create(-1, 1), Error); + assert.throws(() => TextRange.create(1, -1), Error); +}); + +test('textRange from bounds', () => { + assert.throws(() => TextRange.fromBounds(-1, 1), Error); + assert.throws(() => TextRange.fromBounds(1, -1), Error); +}); + +test('textRange overlap', () => { + const textRangeOne: Range = { + start: { + line: 0, + character: 0, + }, + end: { + line: 10, + character: 0, + }, + }; + + const textRangeTwo: Range = { + start: { + line: 11, + character: 0, + }, + end: { + line: 20, + character: 0, + }, + }; + + const textRangeThree: Range = { + start: { + line: 5, + character: 0, + }, + end: { + line: 15, + character: 0, + }, + }; + + assert.equal(doRangesOverlap(textRangeOne, textRangeTwo), false); + assert.equal(doRangesOverlap(textRangeTwo, textRangeOne), false); + assert.equal(doRangesOverlap(textRangeOne, textRangeThree), true); +}); + +test('textRange contain', () => { + const textRangeOne: Range = { + start: { + line: 0, + character: 5, + }, + end: { + line: 10, + character: 1, + }, + }; + + assert.equal(isPositionInRange(textRangeOne, { line: 0, character: 0 }), false); + assert.equal(isPositionInRange(textRangeOne, { line: 0, character: 5 }), true); + assert.equal(isPositionInRange(textRangeOne, { line: 5, character: 0 }), true); + assert.equal(isPositionInRange(textRangeOne, { line: 10, character: 0 }), true); + assert.equal(isPositionInRange(textRangeOne, { line: 10, character: 1 }), true); + assert.equal(isPositionInRange(textRangeOne, { line: 10, character: 2 }), false); +}); + +test('textRange equal', () => { + const textRangeOne: Range = { + start: { + line: 0, + character: 0, + }, + end: { + line: 10, + character: 0, + }, + }; + + const textRangeTwo: Range = { + start: { + line: 0, + character: 0, + }, + end: { + line: 10, + character: 0, + }, + }; + + const textRangeThree: Range = { + start: { + line: 5, + character: 0, + }, + end: { + line: 15, + character: 0, + }, + }; + + assert.equal(rangesAreEqual(textRangeOne, textRangeTwo), true); + assert.equal(rangesAreEqual(textRangeTwo, textRangeOne), true); + assert.equal(rangesAreEqual(textRangeOne, textRangeThree), false); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/completions.test.ts b/python-parser/packages/pyright-internal/src/tests/completions.test.ts new file mode 100644 index 00000000..019b492f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/completions.test.ts @@ -0,0 +1,1663 @@ +/* + * completions.test.ts + * + * completions tests. + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-languageserver'; +import { CompletionItemKind, MarkupKind } from 'vscode-languageserver-types'; + +import { Uri } from '../common/uri/uri'; +import { CompletionOptions, CompletionProvider } from '../languageService/completionProvider'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('completion import statement tooltip', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import [|/*marker*/m|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'matplotlib', + documentation: 'matplotlib', + }, + ], + }, + }); +}); + +test('completion import statement tooltip - stub file', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import [|/*marker*/m|] + +// @filename: matplotlib/__init__.pyi +// @library: true +//// # empty + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'matplotlib', + documentation: 'matplotlib', + }, + ], + }, + }); +}); + +test('completion import statement tooltip - doc in stub file', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import [|/*marker*/m|] + +// @filename: matplotlib/__init__.pyi +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/__init__.py +// @library: true +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'matplotlib', + documentation: 'matplotlib', + }, + ], + }, + }); +}); + +test('completion import statement tooltip - sub modules', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import matplotlib.[|/*marker*/p|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'pyplot', + documentation: 'pyplot', + }, + ], + }, + }); +}); + +test('completion import reference tooltip', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import matplotlib +//// [|/*marker*/m|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'matplotlib', + documentation: '```python\nmatplotlib\n```\n---\nmatplotlib', + }, + ], + }, + }); +}); + +test('completion import reference tooltip - first module', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import matplotlib.pyplot +//// [|/*marker*/m|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'matplotlib', + documentation: '```python\nmatplotlib\n```\n---\nmatplotlib', + }, + ], + }, + }); +}); + +test('completion import reference tooltip - child module', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import matplotlib.pyplot +//// matplotlib.[|/*marker*/p|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'pyplot', + documentation: '```python\npyplot\n```\n---\npyplot', + }, + ], + }, + }); +}); + +test('completion from import statement tooltip - first module', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// from [|/*marker*/m|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'matplotlib', + documentation: 'matplotlib', + }, + ], + }, + }); +}); + +test('completion from import statement tooltip - child module', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// from matplotlib.[|/*marker*/p|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'pyplot', + documentation: 'pyplot', + }, + ], + }, + }); +}); + +test('completion from import statement tooltip - implicit module', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// from matplotlib import [|/*marker*/p|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Module, + label: 'pyplot', + documentation: 'pyplot', + }, + ], + }, + }); +}); + +test('include literals in expression completion', async () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// +//// class TestType(TypedDict): +//// A: str +//// B: int +//// +//// var: TestType = {} +//// +//// var[[|A/*marker*/|]] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: "'A'", + textEdit: { range: state.getPositionRange('marker'), newText: "'A'" }, + }, + ], + }, + }); +}); + +test('include literals in set key', async () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// +//// class TestType(TypedDict): +//// A: str +//// B: int +//// +//// var: TestType = { [|A/*marker*/|] } + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: "'A'", + textEdit: { range: state.getPositionRange('marker'), newText: "'A'" }, + }, + ], + }, + }); +}); + +test('include literals in dict key', async () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// +//// class TestType(TypedDict): +//// A: str +//// B: int +//// +//// var: TestType = { [|A/*marker*/|] : "hello" } + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"A"', + textEdit: { range: state.getPositionRange('marker'), newText: '"A"' }, + }, + ], + }, + }); +}); + +test('literals support for binary operators - equals', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// if c == [|"/*marker*/"|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + textEdit: { range: state.getPositionRange('marker'), newText: '"USD"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + textEdit: { range: state.getPositionRange('marker'), newText: '"EUR"' }, + }, + ], + }, + }); +}); + +test('literals support for binary operators - not equals', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// if c != [|"/*marker*/"|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + textEdit: { range: state.getPositionRange('marker'), newText: '"USD"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + textEdit: { range: state.getPositionRange('marker'), newText: '"EUR"' }, + }, + ], + }, + }); +}); + +test('literals support for binary operators without string node', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// if c != [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + }, + ], + }, + }); +}); + +test('literals support for binary operators with prior word', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// if c != [|US/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + }, + ], + }, + }); +}); + +test('literals support for binary operators - assignment expression', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// if c := [|"/*marker*/"|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + textEdit: { range: state.getPositionRange('marker'), newText: '"USD"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + textEdit: { range: state.getPositionRange('marker'), newText: '"EUR"' }, + }, + ], + }, + }); +}); + +test('literals support for call', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency) -> Currency: +//// return c +//// +//// if foo([|"/*marker1*/"|]) == [|"/*marker2*/"|] + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker1: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + textEdit: { range: state.getPositionRange('marker1'), newText: '"USD"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + textEdit: { range: state.getPositionRange('marker1'), newText: '"EUR"' }, + }, + ], + }, + marker2: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + textEdit: { range: state.getPositionRange('marker2'), newText: '"USD"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + textEdit: { range: state.getPositionRange('marker2'), newText: '"EUR"' }, + }, + ], + }, + }); +}); + +test('list with literal types', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// a: list[Currency] = [[|"/*marker*/"|]] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + textEdit: { range: state.getPositionRange('marker'), newText: '"USD"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + textEdit: { range: state.getPositionRange('marker'), newText: '"EUR"' }, + }, + ], + }, + }); +}); + +test('literals support for match - error case', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// match c: +//// case [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + }, + ], + }, + }); +}); + +test('literals support for match - simple case', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// match c: +//// case [|"/*marker*/"|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + textEdit: { range: state.getPositionRange('marker'), newText: '"USD"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"EUR"', + textEdit: { range: state.getPositionRange('marker'), newText: '"EUR"' }, + }, + ], + }, + }); +}); + +test('literals support for match - simple case without string', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// match c: +//// case [|US/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"USD"', + }, + ], + }, + }); +}); + +test('completion quote trigger', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["USD", "EUR"] +//// +//// def foo(c: Currency): +//// match c: +//// case [|"/*marker*/"|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + const filePath = marker.fileName; + const uri = Uri.file(filePath, state.serviceProvider); + const position = state.convertOffsetToPosition(filePath, marker.position); + + const options: CompletionOptions = { + format: 'markdown', + snippet: false, + lazyEdit: false, + triggerCharacter: '"', + }; + + const result = new CompletionProvider( + state.program, + uri, + position, + options, + CancellationToken.None + ).getCompletions(); + + assert(result); + const item = result.items.find((a) => a.label === '"USD"'); + assert(item); +}); + +test('completion quote trigger - middle', async () => { + const code = ` +// @filename: test.py +//// from typing import Literal +//// +//// Currency = Literal["Quote'Middle"] +//// +//// def foo(c: Currency): +//// match c: +//// case [|"Quote'/*marker*/"|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + const filePath = marker.fileName; + const uri = Uri.file(filePath, state.serviceProvider); + const position = state.convertOffsetToPosition(filePath, marker.position); + + const options: CompletionOptions = { + format: 'markdown', + snippet: false, + lazyEdit: false, + triggerCharacter: "'", + }; + + const result = new CompletionProvider( + state.program, + uri, + position, + options, + CancellationToken.None + ).getCompletions(); + + assert.strictEqual(result?.items.length, 0); +}); + +test('auto import sort text', async () => { + const code = ` +// @filename: test.py +//// [|os/*marker*/|] + +// @filename: unused.py +//// import os +//// p = os.path + +// @filename: vendored/__init__.py +// @library: true +//// # empty + +// @filename: vendored/os.py +// @library: true +//// def foo(): pass + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFiles(state.testData.files.map((f) => f.fileName)); + + while (state.workspace.service.test_program.analyze()); + + const filePath = marker.fileName; + const uri = Uri.file(filePath, state.serviceProvider); + const position = state.convertOffsetToPosition(filePath, marker.position); + + const options: CompletionOptions = { + format: 'markdown', + snippet: false, + lazyEdit: false, + }; + + const result = new CompletionProvider( + state.program, + uri, + position, + options, + CancellationToken.None + ).getCompletions(); + + const items = result?.items.filter((i) => i.label === 'os'); + assert.strictEqual(items?.length, 2); + + items.sort((a, b) => a.sortText!.localeCompare(b.sortText!)); + + assert(!items[0].labelDetails); + assert.strictEqual(items[1].labelDetails!.description, 'vendored'); +}); + +test('completion MRU affects sort order', async () => { + type RecentCompletionInfo = { + label: string; + autoImportText: string; + }; + + const completionProviderTestAccess = CompletionProvider as unknown as { + mostRecentCompletions: RecentCompletionInfo[]; + }; + + // Reset MRU list to keep the test deterministic. + completionProviderTestAccess.mostRecentCompletions = []; + + const code = ` +// @filename: test.py +//// true_divide = 0 +//// truly = 0 +//// tru/*marker*/ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFiles(state.testData.files.map((f) => f.fileName)); + + while (state.workspace.service.test_program.analyze()); + + const filePath = marker.fileName; + const uri = Uri.file(filePath, state.serviceProvider); + const position = state.convertOffsetToPosition(filePath, marker.position); + + const options: CompletionOptions = { + format: 'markdown', + snippet: false, + lazyEdit: false, + }; + + const provider1 = new CompletionProvider(state.program, uri, position, options, CancellationToken.None); + const result1 = provider1.getCompletions(); + assert(result1); + + const truly1 = result1.items.find((i) => i.label === 'truly'); + const trueDivide1 = result1.items.find((i) => i.label === 'true_divide'); + assert(truly1?.sortText); + assert(trueDivide1?.sortText); + + // Not in MRU list yet: normal symbol category (09) and recent index (9999). + assert(truly1.sortText.startsWith('09.9999.')); + assert(trueDivide1.sortText.startsWith('09.9999.')); + + provider1.resolveCompletionItem(truly1); + + const provider2 = new CompletionProvider(state.program, uri, position, options, CancellationToken.None); + const result2 = provider2.getCompletions(); + assert(result2); + + const truly2 = result2.items.find((i) => i.label === 'truly'); + const trueDivide2 = result2.items.find((i) => i.label === 'true_divide'); + assert(truly2?.sortText); + assert(trueDivide2?.sortText); + + // Now the selected item is in MRU: promoted to RecentKeywordOrSymbol category (05) with index (0000). + assert(truly2.sortText.startsWith('05.0000.')); + assert(trueDivide2.sortText.startsWith('09.9999.')); + + // Reset MRU list so it doesn't affect other tests. + completionProviderTestAccess.mostRecentCompletions = []; +}); + +test('override generic', async () => { + const code = ` +// @filename: test.py +//// from typing import Generic, TypeVar +//// from typing_extensions import override +//// +//// T = TypeVar('T') +//// class A(Generic[T]): +//// def foo(self, x: list[T]) -> T: +//// return x +//// +//// class B(A[int]): +//// @override +//// def [|foo/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'foo', + kind: CompletionItemKind.Method, + textEdit: { + range: state.getPositionRange('marker'), + newText: 'foo(self, x: list[T]) -> T:\n return super().foo(x)', + }, + }, + ], + }, + }); +}); + +test('override generic nested', async () => { + const code = ` +// @filename: test.py +//// from typing import Generic, TypeVar +//// from typing_extensions import override +//// +//// T = TypeVar('T') +//// T2 = TypeVar('T2') +//// class A(Generic[T, T2]): +//// def foo(self, x: tuple[T, T2]) -> T: +//// return x +//// +//// +//// T3 = TypeVar('T3') +//// class B(A[int, T3]): +//// @override +//// def [|foo/*marker1*/|] +//// +//// class C(B[int]): +//// @override +//// def [|foo/*marker2*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker1']: { + completions: [ + { + label: 'foo', + kind: CompletionItemKind.Method, + textEdit: { + range: state.getPositionRange('marker1'), + newText: 'foo(self, x: tuple[T, T2]) -> T:\n return super().foo(x)', + }, + }, + ], + }, + ['marker2']: { + completions: [ + { + label: 'foo', + kind: CompletionItemKind.Method, + textEdit: { + range: state.getPositionRange('marker2'), + newText: 'foo(self, x: tuple[T, T2]) -> T:\n return super().foo(x)', + }, + }, + ], + }, + }); +}); + +test('override __call__', async () => { + const code = ` +// @filename: test.py +//// from argparse import Action +//// +//// class MyAction(Action): +//// def [|__call__/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: '__call__', + kind: CompletionItemKind.Method, + textEdit: { + range: state.getPositionRange('marker'), + newText: + '__call__(self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = None) -> None:\n return super().__call__(parser, namespace, values, option_string)', + }, + }, + ], + }, + }); +}); + +test('override ParamSpec', async () => { + const code = ` +// @filename: test.py +//// from typing import Callable, ParamSpec +//// +//// P = ParamSpec("P") +//// +//// class A: +//// def foo(self, func: Callable[P, None], *args: P.args, **kwargs: P.kwargs): +//// pass +//// +//// class B(A): +//// def [|foo/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'foo', + kind: CompletionItemKind.Method, + textEdit: { + range: state.getPositionRange('marker'), + newText: + 'foo(self, func: Callable[P, None], *args: P.args, **kwargs: P.kwargs):\n return super().foo(func, *args, **kwargs)', + }, + }, + ], + }, + }); +}); + +test('annotation using comment', async () => { + const code = ` +// @filename: test.py +//// class A: +//// def foo(self, a): # type: (int) -> None +//// pass +//// +//// class B(A): +//// def [|foo/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'foo', + kind: CompletionItemKind.Method, + textEdit: { + range: state.getPositionRange('marker'), + newText: 'foo(self, a: int) -> None:\n return super().foo(a)', + }, + }, + ], + }, + }); +}); + +test('Complex type arguments', async () => { + const code = ` +// @filename: test.py +//// from typing import Generic, TypeVar, Any, List, Dict, Tuple, Mapping, Union +//// +//// T = TypeVar("T") +//// +//// class A(Generic[T]): +//// def foo(self, a: T) -> T: +//// return a +//// +//// class B(A[Union[Tuple[list, dict], tuple[Mapping[List[A[int]], Dict[str, Any]], float]]]): +//// pass + +// @filename: test1.py +//// from test import B +//// +//// class U(B): +//// def [|foo/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + state.openFiles(state.testData.files.map((f) => f.fileName)); + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'foo', + kind: CompletionItemKind.Method, + textEdit: { + range: state.getPositionRange('marker'), + newText: 'foo(self, a: T) -> T:\n return super().foo(a)', + }, + }, + ], + }, + }); +}); + +test('Enum member', async () => { + const code = ` +// @filename: test.py +//// from enum import Enum +//// +//// class MyEnum(Enum): +//// this = 1 +//// that = 2 +//// +//// print(MyEnum.[|/*marker*/|]) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'this', + kind: CompletionItemKind.EnumMember, + documentation: '```python\nthis: int\n```', + }, + ], + }, + }); +}); + +test('no member of Enum member', async () => { + const code = ` +// @filename: test.py +//// from enum import Enum +//// +//// class MyEnum(Enum): +//// this = 1 +//// that = 2 +//// +//// print(MyEnum.this.[|/*marker*/|]) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('excluded', 'markdown', { + ['marker']: { + completions: [ + { + label: 'this', + kind: undefined, + }, + { + label: 'that', + kind: undefined, + }, + ], + }, + }); +}); + +test('default Enum member', async () => { + const code = ` +// @filename: test.py +//// from enum import Enum +//// +//// class MyEnum(Enum): +//// MemberOne = [] +//// +//// MyEnum.MemberOne.[|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'name', + kind: CompletionItemKind.Property, + }, + { + label: 'value', + kind: CompletionItemKind.Property, + }, + ], + }, + }); +}); + +test('TypeDict literal values', async () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict, Literal +//// +//// class DataA(TypedDict): +//// name: Literal["a", "b"] | None +//// +//// data_a: DataA = { +//// "name": [|"/*marker*/"|] +//// } + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: '"a"', + kind: CompletionItemKind.Constant, + textEdit: { range: state.getPositionRange('marker'), newText: '"a"' }, + }, + { + label: '"b"', + kind: CompletionItemKind.Constant, + textEdit: { range: state.getPositionRange('marker'), newText: '"b"' }, + }, + ], + }, + }); +}); + +test('typed dict key constructor completion', async () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// +//// class Movie(TypedDict): +//// key1: str +//// +//// a = Movie(k[|"/*marker*/"|]) +//// + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + kind: CompletionItemKind.Variable, + label: 'key1=', + }, + ], + }, + }); +}); + +test('import from completion for namespace package', async () => { + const code = ` +// @filename: test.py +//// from nest1 import [|/*marker*/|] + +// @filename: nest1/nest2/__init__.py +//// # empty + +// @filename: nest1/module.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'nest2', + kind: CompletionItemKind.Module, + }, + { + label: 'module', + kind: CompletionItemKind.Module, + }, + ], + }, + }); +}); + +test('members off enum member', async () => { + const code = ` +// @filename: test.py +//// from enum import Enum +//// class Planet(Enum): +//// MERCURY = (3.303e+23, 2.4397e6) +//// EARTH = (5.976e+24, 6.37814e6) +//// +//// def __init__(self, mass, radius): +//// self.mass = mass # in kilograms +//// self.radius = radius # in meters +//// +//// @property +//// def surface_gravity(self): +//// # universal gravitational constant (m3 kg-1 s-2) +//// G = 6.67300E-11 +//// return G * self.mass / (self.radius * self.radius) +//// +//// Planet.EARTH.[|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('excluded', 'markdown', { + ['marker']: { + completions: [ + { + label: 'MERCURY', + kind: CompletionItemKind.EnumMember, + }, + { + label: 'EARTH', + kind: CompletionItemKind.EnumMember, + }, + ], + }, + }); + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'mass', + kind: CompletionItemKind.Variable, + }, + { + label: 'radius', + kind: CompletionItemKind.Variable, + }, + { + label: 'surface_gravity', + kind: CompletionItemKind.Property, + }, + ], + }, + }); +}); + +test('handle missing close paren case', async () => { + const code = ` +// @filename: test.py +//// count=100 +//// while count <= (c[|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'count', + kind: CompletionItemKind.Variable, + }, + ], + }, + }); +}); + +test('enum with regular base type', async () => { + const code = ` +// @filename: test.py +//// from enum import Enum +//// from datetime import timedelta +//// class Period(timedelta, Enum): +//// Today = -1 +//// +//// Period.Today.[|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'days', + kind: CompletionItemKind.Property, + }, + { + label: 'seconds', + kind: CompletionItemKind.Property, + }, + ], + }, + }); +}); + +test('import statements with implicit import', async () => { + const code = ` +// @filename: test.py +//// from lib import /*marker*/ + +// @filename: lib/__init__.py +//// from . import api as api + +// @filename: lib/api.py +//// # Empty + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + ['marker']: { + completions: [ + { + label: 'api', + kind: CompletionItemKind.Module, + }, + ], + }, + }); +}); + +test('overloaded Literal[...] suggestions in call arguments', async () => { + const code = ` +// @filename: test.py +//// from typing import overload, Literal +//// +//// @overload +//// def example(p: Literal["A"]): ... +//// @overload +//// def example(p: Literal["B"]): ... +//// @overload +//// def example(p: Literal["C"]): ... +//// def example(p): +//// pass +//// +//// example([|"/*marker*/"|]) + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"A"', + textEdit: { range: state.getPositionRange('marker'), newText: '"A"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"B"', + textEdit: { range: state.getPositionRange('marker'), newText: '"B"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"C"', + textEdit: { range: state.getPositionRange('marker'), newText: '"C"' }, + }, + ], + }, + }); +}); + +test('nested TypedDict completion with Unpack - without other fields', async () => { + const code = ` +// @filename: test.py +//// from typing import Unpack, TypedDict +//// +//// class InnerDict(TypedDict): +//// a: int +//// b: str +//// +//// class OuterDict(TypedDict): +//// inner: InnerDict +//// field_1: str +//// +//// def test_inner_dict(**kwargs: Unpack[OuterDict]): +//// pass +//// +//// test_inner_dict(inner={[|/*marker*/|]}) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: "'a'", + textEdit: { range: state.getPositionRange('marker'), newText: "'a'" }, + }, + { + kind: CompletionItemKind.Constant, + label: "'b'", + textEdit: { range: state.getPositionRange('marker'), newText: "'b'" }, + }, + ], + }, + }); +}); + +test('nested TypedDict completion with Unpack - with other fields', async () => { + const code = ` +// @filename: test.py +//// from typing import Unpack, TypedDict +//// +//// class InnerDict(TypedDict): +//// a: int +//// b: str +//// +//// class OuterDict(TypedDict): +//// inner: InnerDict +//// field_1: str +//// +//// def test_inner_dict(**kwargs: Unpack[OuterDict]): +//// pass +//// +//// test_inner_dict(field_1="test", inner={[|/*marker*/|]}) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: '"a"', + textEdit: { range: state.getPositionRange('marker'), newText: '"a"' }, + }, + { + kind: CompletionItemKind.Constant, + label: '"b"', + textEdit: { range: state.getPositionRange('marker'), newText: '"b"' }, + }, + ], + }, + }); +}); + +test('simple nested TypedDict completion - no Unpack', async () => { + const code = ` +// @filename: test.py +//// from typing import TypedDict +//// +//// class InnerDict(TypedDict): +//// a: int +//// b: str +//// +//// def test_func(inner: InnerDict): +//// pass +//// +//// test_func(inner={[|/*marker*/|]}) + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + kind: CompletionItemKind.Constant, + label: "'a'", + textEdit: { range: state.getPositionRange('marker'), newText: "'a'" }, + }, + { + kind: CompletionItemKind.Constant, + label: "'b'", + textEdit: { range: state.getPositionRange('marker'), newText: "'b'" }, + }, + ], + }, + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/config.test.ts b/python-parser/packages/pyright-internal/src/tests/config.test.ts new file mode 100644 index 00000000..ba3e5d20 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/config.test.ts @@ -0,0 +1,654 @@ +/* + * config.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for parsing of pyrightconfig.json files. + */ + +import assert from 'assert'; + +import { AnalyzerService } from '../analyzer/service'; +import { deserialize, serialize } from '../backgroundThreadBase'; +import { CommandLineOptions, DiagnosticSeverityOverrides } from '../common/commandLineOptions'; +import { + ConfigOptions, + ExecutionEnvironment, + getBasicDiagnosticRuleSet, + getStandardDiagnosticRuleSet, +} from '../common/configOptions'; +import { ConsoleInterface, NullConsole } from '../common/console'; +import { TaskListPriority } from '../common/diagnostic'; +import { combinePaths, normalizePath, normalizeSlashes } from '../common/pathUtils'; +import { pythonVersion3_13, pythonVersion3_9 } from '../common/pythonVersion'; +import { RealTempFile, createFromRealFileSystem } from '../common/realFileSystem'; +import { createServiceProvider } from '../common/serviceProviderExtensions'; +import { Uri } from '../common/uri/uri'; +import { UriEx } from '../common/uri/uriUtils'; +import { TestAccessHost } from './harness/testAccessHost'; +import { TestFileSystem } from './harness/vfs/filesystem'; + +describe(`config test'}`, () => { + const tempFile = new RealTempFile(); + + afterAll(() => tempFile.dispose()); + + test('FindFilesWithConfigFile', () => { + const cwd = normalizePath(process.cwd()); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.configFilePath = 'src/tests/samples/project1'; + + const configOptions = service.test_getConfigOptions(commandLineOptions); + service.setOptions(commandLineOptions); + + // The config file specifies a single file spec (a directory). + assert.strictEqual(configOptions.include.length, 1, `failed creating options from ${cwd}`); + assert.strictEqual( + configOptions.projectRoot.key, + service.fs.realCasePath( + Uri.file(combinePaths(cwd, commandLineOptions.configFilePath), service.serviceProvider) + ).key + ); + + const fileList = service.test_getFileNamesFromFileSpecs(); + + // The config file specifies a subdirectory, so we should find + // only two of the three "*.py" files present in the project + // directory. + assert.strictEqual(fileList.length, 2); + }); + + test('FindFilesVirtualEnvAutoDetectExclude', () => { + const cwd = normalizePath(process.cwd()); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.configFilePath = 'src/tests/samples/project_with_venv_auto_detect_exclude'; + + service.setOptions(commandLineOptions); + + // The config file is empty, so no 'exclude' are specified + // The myVenv directory is detected as a venv and will be automatically excluded + const fileList = service.test_getFileNamesFromFileSpecs(); + + // There are 3 python files in the workspace, outside of myVenv + // There is 1 python file in myVenv, which should be excluded + const fileNames = fileList.map((p) => p.fileName).sort(); + assert.deepStrictEqual(fileNames, ['sample1.py', 'sample2.py', 'sample3.py']); + }); + + test('FindFilesVirtualEnvAutoDetectInclude', () => { + const cwd = normalizePath(process.cwd()); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.configFilePath = 'src/tests/samples/project_with_venv_auto_detect_include'; + + service.setOptions(commandLineOptions); + + // Config file defines 'exclude' folder so virtual env will be included + const fileList = service.test_getFileNamesFromFileSpecs(); + + // There are 3 python files in the workspace, outside of myVenv + // There is 1 more python file in excluded folder + // There is 1 python file in myVenv, which should be included + const fileNames = fileList.map((p) => p.fileName).sort(); + assert.deepStrictEqual(fileNames, ['library1.py', 'sample1.py', 'sample2.py', 'sample3.py']); + }); + + test('FileSpecNotAnArray', () => { + const cwd = normalizePath(process.cwd()); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configFilePath = 'src/tests/samples/project2'; + service.setOptions(commandLineOptions); + + service.test_getConfigOptions(commandLineOptions); + + // The method should return a default config and log an error. + assert(nullConsole.infoCount > 0); + }); + + test('FileSpecNotAString', () => { + const cwd = normalizePath(process.cwd()); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configFilePath = 'src/tests/samples/project3'; + service.setOptions(commandLineOptions); + + service.test_getConfigOptions(commandLineOptions); + + // The method should return a default config and log an error. + assert(nullConsole.infoCount > 0); + }); + + test('SomeFileSpecsAreInvalid', () => { + const cwd = normalizePath(process.cwd()); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configFilePath = 'src/tests/samples/project4'; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + + // The config file specifies four file specs in the include array + // and one in the exclude array. + assert.strictEqual(configOptions.include.length, 4, `failed creating options from ${cwd}`); + assert.strictEqual(configOptions.exclude.length, 1); + assert.strictEqual( + configOptions.projectRoot.getFilePath(), + service.fs + .realCasePath(Uri.file(combinePaths(cwd, commandLineOptions.configFilePath), service.serviceProvider)) + .getFilePath() + ); + + const fileList = service.test_getFileNamesFromFileSpecs(); + + // We should receive two final files that match the include/exclude rules. + assert.strictEqual(fileList.length, 2); + }); + + test('ConfigBadJson', () => { + const cwd = normalizePath(process.cwd()); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configFilePath = 'src/tests/samples/project5'; + service.setOptions(commandLineOptions); + + service.test_getConfigOptions(commandLineOptions); + + // The method should return a default config and log an error. + assert(nullConsole.infoCount > 0); + }); + + test('FindExecEnv1', () => { + const cwd = UriEx.file(normalizePath(process.cwd())); + const configOptions = new ConfigOptions(cwd); + + // Build a config option with three execution environments. + const execEnv1 = new ExecutionEnvironment( + 'python', + cwd.resolvePaths('src/foo'), + getStandardDiagnosticRuleSet(), + /* defaultPythonVersion */ undefined, + /* defaultPythonPlatform */ undefined, + /* defaultExtraPaths */ undefined + ); + configOptions.executionEnvironments.push(execEnv1); + const execEnv2 = new ExecutionEnvironment( + 'python', + cwd.resolvePaths('src'), + getStandardDiagnosticRuleSet(), + /* defaultPythonVersion */ undefined, + /* defaultPythonPlatform */ undefined, + /* defaultExtraPaths */ undefined + ); + configOptions.executionEnvironments.push(execEnv2); + + const file1 = cwd.resolvePaths('src/foo/bar.py'); + assert.strictEqual(configOptions.findExecEnvironment(file1), execEnv1); + const file2 = cwd.resolvePaths('src/foo2/bar.py'); + assert.strictEqual(configOptions.findExecEnvironment(file2), execEnv2); + + // If none of the execution environments matched, we should get + // a default environment with the root equal to that of the config. + const file4 = UriEx.file('/nothing/bar.py'); + const defaultExecEnv = configOptions.findExecEnvironment(file4); + assert(defaultExecEnv.root); + const rootFilePath = Uri.is(defaultExecEnv.root) ? defaultExecEnv.root.getFilePath() : defaultExecEnv.root; + assert.strictEqual(normalizeSlashes(rootFilePath), normalizeSlashes(configOptions.projectRoot.getFilePath())); + }); + + test('PythonPlatform', () => { + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const cwd = Uri.file( + normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_pyproject_toml_platform')), + service.serviceProvider + ); + const commandLineOptions = new CommandLineOptions(cwd.getFilePath(), /* fromLanguageServer */ false); + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + assert.ok(configOptions.executionEnvironments[0]); + assert.equal(configOptions.executionEnvironments[0].pythonPlatform, 'platform'); + }); + + test('AutoSearchPathsOn', () => { + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const cwd = Uri.file( + normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_src')), + service.serviceProvider + ); + const commandLineOptions = new CommandLineOptions(cwd.getFilePath(), /* fromLanguageServer */ false); + commandLineOptions.configSettings.autoSearchPaths = true; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + + const expectedExtraPaths = [service.fs.realCasePath(cwd.combinePaths('src'))]; + assert.deepStrictEqual(configOptions.defaultExtraPaths, expectedExtraPaths); + }); + + test('AutoSearchPathsOff', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_src')); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configSettings.autoSearchPaths = false; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + + assert.deepStrictEqual(configOptions.executionEnvironments, []); + }); + + test('AutoSearchPathsOnSrcIsPkg', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_src_is_pkg')); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configSettings.autoSearchPaths = true; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + + // The src folder is a package (has __init__.py) and so should not be automatically added as extra path + assert.deepStrictEqual(configOptions.executionEnvironments, []); + }); + + test('AutoSearchPathsOnWithConfigExecEnv', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_src_with_config_extra_paths')); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configFilePath = combinePaths(cwd, 'pyrightconfig.json'); + commandLineOptions.configSettings.autoSearchPaths = true; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + + // The extraPaths in the config file should override the setting. + const expectedExtraPaths: string[] = []; + + assert.deepStrictEqual(configOptions.defaultExtraPaths, expectedExtraPaths); + }); + + test('AutoSearchPathsOnAndExtraPaths', () => { + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const cwd = Uri.file( + normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_src_with_config_no_extra_paths')), + service.serviceProvider + ); + const commandLineOptions = new CommandLineOptions(cwd.getFilePath(), /* fromLanguageServer */ false); + commandLineOptions.configSettings.autoSearchPaths = true; + commandLineOptions.configSettings.extraPaths = ['src/_vendored']; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + + const expectedExtraPaths: Uri[] = [ + service.fs.realCasePath(cwd.combinePaths('src')), + service.fs.realCasePath(cwd.combinePaths('src', '_vendored')), + ]; + + assert.deepStrictEqual(configOptions.defaultExtraPaths, expectedExtraPaths); + }); + + test('BasicPyprojectTomlParsing', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_pyproject_toml')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + assert.strictEqual(configOptions.defaultPythonVersion!.toString(), pythonVersion3_9.toString()); + assert.strictEqual(configOptions.diagnosticRuleSet.reportMissingImports, 'error'); + assert.strictEqual(configOptions.diagnosticRuleSet.reportUnusedClass, 'warning'); + }); + + test('FindFilesInMemoryOnly', () => { + const cwd = normalizePath(process.cwd()); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(undefined, /* fromLanguageServer */ true); + // Force a lookup of the typeshed path. This causes us to try and generate a module path for the untitled file. + commandLineOptions.configSettings.typeshedPath = combinePaths(cwd, 'src', 'tests', 'samples'); + service.setOptions(commandLineOptions); + + // Open a file that is not backed by the file system. + const untitled = Uri.parse('untitled:Untitled-1.py', service.serviceProvider); + service.setFileOpened(untitled, 1, '# empty'); + + const fileList = service.test_getFileNamesFromFileSpecs(); + assert(fileList.filter((f) => f.equals(untitled))); + }); + + test('verify config fileSpecs after cloning', () => { + const fs = new TestFileSystem(/* ignoreCase */ true); + const configFile = { + ignore: ['**/node_modules/**'], + }; + + const rootUri = Uri.file(process.cwd(), fs); + const config = new ConfigOptions(rootUri); + const sp = createServiceProvider(fs, new NullConsole()); + config.initializeFromJson(configFile, rootUri, sp, new TestAccessHost()); + const cloned = deserialize(serialize(config)); + + assert.deepEqual(config.ignore, cloned.ignore); + }); + + test('verify can serialize config options', () => { + const config = new ConfigOptions(UriEx.file(process.cwd())); + const serialized = serialize(config); + const deserialized = deserialize(serialized); + assert.deepEqual(config, deserialized); + assert.ok(deserialized.findExecEnvironment(UriEx.file('foo/bar.py'))); + }); + + test('extra paths on undefined execution root/default workspace', () => { + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(undefined, /* fromLanguageServer */ false); + commandLineOptions.configSettings.extraPaths = ['/extraPaths']; + + service.setOptions(commandLineOptions); + const configOptions = service.test_getConfigOptions(commandLineOptions); + + const expectedExtraPaths = [Uri.file('/extraPaths', service.serviceProvider)]; + assert.deepStrictEqual( + configOptions.defaultExtraPaths?.map((u) => u.getFilePath()), + expectedExtraPaths.map((u) => u.getFilePath()) + ); + }); + + test('Extended config files', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_extended_config')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + + service.setOptions(commandLineOptions); + + const fileList = service.test_getFileNamesFromFileSpecs(); + const fileNames = fileList.map((p) => p.fileName).sort(); + assert.deepStrictEqual(fileNames, ['sample.pyi', 'test.py']); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.diagnosticRuleSet.strictListInference, true); + }); + + test('Typechecking mode is standard when just config file is present', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_pyproject_toml')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.configSettings.typeCheckingMode = 'off'; + + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.diagnosticRuleSet.reportPossiblyUnboundVariable, 'error'); + }); + + test('Typechecking mode depends upon if vscode extension or not', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/package1')); + let service = createAnalyzer(); + let commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + + service.setOptions(commandLineOptions); + + let configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.diagnosticRuleSet.reportPossiblyUnboundVariable, 'none'); + + service = createAnalyzer(); + commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + + service.setOptions(commandLineOptions); + + configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.diagnosticRuleSet.reportPossiblyUnboundVariable, 'error'); + + commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.configSettings.typeCheckingMode = 'strict'; + service = createAnalyzer(); + service.setOptions(commandLineOptions); + + configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.diagnosticRuleSet.reportPossiblyUnboundVariable, 'error'); + }); + + test('Include file paths are only set in the config file when using extension', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project1')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.configSettings.includeFileSpecs = ['test']; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.include.length, 1); + assert.ok(configOptions.include[0].regExp.source.includes('/subfolder1)')); + }); + + test('Include file paths can be added to on the command line with a config', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project1')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configSettings.includeFileSpecs = ['test']; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.include.length, 2); + assert.ok(configOptions.include[1].regExp.source.includes('/test)')); + }); + + test('Include file paths can be added to by an extension without a config', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/package1')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.configSettings.includeFileSpecs = ['test']; + service.setOptions(commandLineOptions); + + const configOptions = service.test_getConfigOptions(commandLineOptions); + assert.equal(configOptions.include.length, 1); + assert.ok(configOptions.include[0].regExp.source.includes('/test)')); + }); + + test('Command line options can override config but only when not using extension', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_all_config')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + service.setOptions(commandLineOptions); + + // First get the default. + const defaultOptions = service.test_getConfigOptions(commandLineOptions); + + // Now set all of the different options and make sure the command line options override. + commandLineOptions.configSettings.typeCheckingMode = 'strict'; + commandLineOptions.configSettings.venvPath = 'test2'; + commandLineOptions.configSettings.typeshedPath = 'test2'; + commandLineOptions.configSettings.stubPath = 'test2'; + commandLineOptions.configSettings.useLibraryCodeForTypes = true; + commandLineOptions.configSettings.includeFileSpecs = ['test2']; + commandLineOptions.configSettings.excludeFileSpecs = ['test2']; + commandLineOptions.configSettings.diagnosticSeverityOverrides = { + reportMissingImports: DiagnosticSeverityOverrides.Error, + }; + commandLineOptions.configSettings.ignoreFileSpecs = ['test2']; + + service.setOptions(commandLineOptions); + const overriddenOptions = service.test_getConfigOptions(commandLineOptions); + assert.notDeepStrictEqual(defaultOptions.include, overriddenOptions.include); + assert.notDeepStrictEqual(defaultOptions.exclude, overriddenOptions.exclude); + assert.notDeepStrictEqual(defaultOptions.ignore, overriddenOptions.ignore); + assert.notDeepStrictEqual(defaultOptions.diagnosticRuleSet, overriddenOptions.diagnosticRuleSet); + assert.notDeepStrictEqual(defaultOptions.venvPath, overriddenOptions.venvPath); + // Typeshed and stub path are an exception, it should just be reported as a dupe. + assert.deepStrictEqual(defaultOptions.typeshedPath, overriddenOptions.typeshedPath); + assert.deepStrictEqual(defaultOptions.stubPath, overriddenOptions.stubPath); + + // Do the same with an extension based config, but make sure we get the default back. + const commandLineOptions2 = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + service.setOptions(commandLineOptions2); + const overriddenOptions2 = service.test_getConfigOptions(commandLineOptions2); + assert.deepStrictEqual(defaultOptions, overriddenOptions2); + }); + + test('Config venvPath take precedences over language server settings', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_all_config')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.languageServerSettings.venvPath = 'test_from_language_server'; + service.setOptions(commandLineOptions); + + // Verify language server options don't override + const options = service.test_getConfigOptions(commandLineOptions); + assert.equal(options.venvPath?.pathIncludes('from_language_server'), false); + }); + + test('Command line venvPath take precedences over everything else', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_all_config')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configSettings.venvPath = 'test_from_command_line'; + commandLineOptions.languageServerSettings.venvPath = 'test_from_language_server'; + service.setOptions(commandLineOptions); + + // Verify command line overrides everything + const options = service.test_getConfigOptions(commandLineOptions); + assert.ok(options.venvPath?.pathIncludes('test_from_command_line')); + }); + + test('Config empty venvPath does not take precedences over language server settings', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_src_with_config_extra_paths')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ true); + commandLineOptions.languageServerSettings.venvPath = 'test_from_language_server'; + service.setOptions(commandLineOptions); + + // Verify language server options don't override + const options = service.test_getConfigOptions(commandLineOptions); + assert.ok(options.venvPath?.pathIncludes('from_language_server')); + }); + + test('Language server specific settings are set whether or not there is a pyproject.toml', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_all_config')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.languageServerSettings.autoImportCompletions = true; + commandLineOptions.languageServerSettings.indexing = true; + commandLineOptions.languageServerSettings.taskListTokens = [{ priority: TaskListPriority.High, text: 'test' }]; + commandLineOptions.languageServerSettings.logTypeEvaluationTime = true; + commandLineOptions.languageServerSettings.typeEvaluationTimeThreshold = 1; + commandLineOptions.languageServerSettings.enableAmbientAnalysis = false; + commandLineOptions.languageServerSettings.disableTaggedHints = true; + commandLineOptions.languageServerSettings.watchForSourceChanges = true; + commandLineOptions.languageServerSettings.watchForLibraryChanges = true; + commandLineOptions.languageServerSettings.watchForConfigChanges = true; + commandLineOptions.languageServerSettings.typeStubTargetImportName = 'test'; + commandLineOptions.languageServerSettings.checkOnlyOpenFiles = true; + commandLineOptions.languageServerSettings.disableTaggedHints = true; + commandLineOptions.languageServerSettings.pythonPath = 'test_python_path'; + + service.setOptions(commandLineOptions); + let options = service.test_getConfigOptions(commandLineOptions); + assert.strictEqual(options.autoImportCompletions, true); + assert.strictEqual(options.indexing, true); + assert.strictEqual(options.taskListTokens?.length, 1); + assert.strictEqual(options.logTypeEvaluationTime, true); + assert.strictEqual(options.typeEvaluationTimeThreshold, 1); + assert.strictEqual(options.disableTaggedHints, true); + assert.ok(options.pythonPath?.pathIncludes('test_python_path')); + + // Test with language server set to true to make sure they are still set. + commandLineOptions.fromLanguageServer = true; + commandLineOptions.languageServerSettings.venvPath = 'test_venv_path'; + service.setOptions(commandLineOptions); + options = service.test_getConfigOptions(commandLineOptions); + assert.strictEqual(options.autoImportCompletions, true); + assert.strictEqual(options.indexing, true); + assert.strictEqual(options.taskListTokens?.length, 1); + assert.strictEqual(options.logTypeEvaluationTime, true); + assert.strictEqual(options.typeEvaluationTimeThreshold, 1); + assert.strictEqual(options.disableTaggedHints, true); + assert.ok(options.pythonPath?.pathIncludes('test_python_path')); + + // Verify language server options don't override the config setting. Only command line should + assert.equal(options.venvPath?.pathIncludes('test_venv_path'), false); + }); + + test('DefaultPythonVersion no config', () => { + const cwd = normalizePath(process.cwd()); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configFilePath = 'src/tests/samples/package1'; + service.setOptions(commandLineOptions); + + const config = service.test_getConfigOptions(commandLineOptions); + assert.deepStrictEqual(config.defaultPythonVersion, pythonVersion3_13); + }); + + test('DefaultPythonVersion with config', () => { + const cwd = normalizePath(process.cwd()); + const nullConsole = new NullConsole(); + const service = createAnalyzer(nullConsole); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptions.configFilePath = 'src/tests/samples/project1'; + service.setOptions(commandLineOptions); + + const config = service.test_getConfigOptions(commandLineOptions); + assert.deepStrictEqual(config.defaultPythonVersion, pythonVersion3_13); + }); + + test('Diagnostic rule overrides are preserved when positional args override include', () => { + const cwd = normalizePath(combinePaths(process.cwd(), 'src/tests/samples/project_with_diag_overrides')); + const service = createAnalyzer(); + const commandLineOptions = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + service.setOptions(commandLineOptions); + + // Get config without include override - should have reportPrivateImportUsage: 'none' + // because the config sets it to false. + const configWithoutOverride = service.test_getConfigOptions(commandLineOptions); + assert.equal(configWithoutOverride.diagnosticRuleSet.reportPrivateImportUsage, 'none'); + + // The basic default would be 'error', verify our config overrides it. + const basicDefaults = getBasicDiagnosticRuleSet(); + assert.equal(basicDefaults.reportPrivateImportUsage, 'error'); + + // Now simulate positional args overriding include (like `pyright --project config.json subdir`). + const commandLineOptionsWithOverride = new CommandLineOptions(cwd, /* fromLanguageServer */ false); + commandLineOptionsWithOverride.configSettings.includeFileSpecsOverride = [combinePaths(cwd, 'subdir')]; + service.setOptions(commandLineOptionsWithOverride); + + const configWithOverride = service.test_getConfigOptions(commandLineOptionsWithOverride); + + // The diagnostic rule overrides from the config file should still be applied + // even when positional args replace the include paths. + assert.equal(configWithOverride.diagnosticRuleSet.reportPrivateImportUsage, 'none'); + + // The execution environment for a file in the override path should also + // have the config's diagnostic rule overrides. + const fileUri = Uri.file(combinePaths(cwd, 'subdir', 'sample.py'), service.serviceProvider); + const execEnv = configWithOverride.findExecEnvironment(fileUri); + assert.equal(execEnv.diagnosticRuleSet.reportPrivateImportUsage, 'none'); + }); + + function createAnalyzer(console?: ConsoleInterface) { + const cons = console ?? new NullConsole(); + const fs = createFromRealFileSystem(tempFile, cons); + const serviceProvider = createServiceProvider(fs, cons, tempFile); + const host = new TestAccessHost(); + host.getPythonVersion = () => pythonVersion3_13; + return new AnalyzerService('', serviceProvider, { + console: cons, + hostFactory: () => host, + shouldRunAnalysis: () => true, + }); + } +}); diff --git a/python-parser/packages/pyright-internal/src/tests/debug.test.ts b/python-parser/packages/pyright-internal/src/tests/debug.test.ts new file mode 100644 index 00000000..0d39aa5e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/debug.test.ts @@ -0,0 +1,120 @@ +/* + * debug.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import assert from 'assert'; + +import * as debug from '../common/debug'; + +test('DebugAssertTrue', () => { + assert.doesNotThrow(() => { + debug.assert(true, "doesn't throw"); + }); +}); + +test('DebugAssertFalse', () => { + assert.throws( + () => { + debug.assert(false, 'should throw'); + }, + (err: any) => err instanceof Error, + 'unexpected' + ); +}); + +test('DebugAssertDetailInfo', () => { + // let assert to show more detail info which will get collected when + // assert raised + const detailInfo = 'Detail Info'; + assert.throws( + () => { + debug.assert(false, 'should throw', () => detailInfo); + }, + (err: any) => err instanceof Error && err.message.includes(detailInfo), + 'unexpected' + ); +}); + +test('DebugAssertStackTrace', () => { + // let assert to control what call stack to put in exception stack + assert.throws( + () => { + debug.assert(false, 'should throw', undefined, assert.throws); + }, + (err: any) => err instanceof Error && !err.message.includes('assert.throws'), + 'unexpected' + ); +}); + +test('DebugAssertUndefined', () => { + const unused = undefined; + assert.throws( + () => debug.assertDefined(unused), + (err: any) => err instanceof Error, + 'unexpected' + ); +}); + +test('DebugAssertDefined', () => { + const unused = 1; + assert.doesNotThrow(() => debug.assertDefined(unused)); +}); + +test('DebugAssertEachUndefined', () => { + type T = number | undefined; + const unused: T[] = [1, 2, 3, undefined]; + assert.throws( + () => debug.assertEachDefined(unused), + (err: any) => err instanceof Error, + 'unexpected' + ); +}); + +test('DebugAssertEachDefined', () => { + const unused: number[] = [1, 2, 3]; + assert.doesNotThrow(() => debug.assertEachDefined(unused)); +}); + +test('DebugAssertNever', () => { + const enum MyEnum { + A, + B, + C, + } + const unused = 5 as MyEnum; + + // prevent one from adding new values and forget to add + // handlers some places + assert.throws( + () => { + switch (unused) { + case MyEnum.A: + case MyEnum.B: + case MyEnum.C: + break; + default: + debug.assertNever(unused); + } + }, + (err: any) => err instanceof Error, + 'unexpected' + ); +}); + +test('DebugGetFunctionName', () => { + // helper method to add better message in exception + assert(debug.getFunctionName(assert.throws) === 'throws'); +}); + +test('DebugFormatEnum', () => { + // helper method to add better message in exception around enum + // const enum require --preserveConstEnums flag to work properly + enum MyEnum { + A, + B, + C, + } + assert(debug.formatEnum(MyEnum.A, MyEnum, false) === 'A'); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/deferred.test.ts b/python-parser/packages/pyright-internal/src/tests/deferred.test.ts new file mode 100644 index 00000000..b89c3a8e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/deferred.test.ts @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +'use strict'; + +import * as assert from 'assert'; + +import { createDeferred } from '../common/deferred'; + +test('Deferred - resolve', (done) => { + const valueToSent = new Date().getTime(); + const def = createDeferred(); + def.promise + .then((value) => { + assert.equal(value, valueToSent); + assert.equal(def.resolved, true, 'resolved property value is not `true`'); + }) + .then(done) + .catch(done); + + assert.equal(def.resolved, false, 'Promise is resolved even when it should not be'); + assert.equal(def.rejected, false, 'Promise is rejected even when it should not be'); + assert.equal(def.completed, false, 'Promise is completed even when it should not be'); + + def.resolve(valueToSent); + + assert.equal(def.resolved, true, 'Promise is not resolved even when it should not be'); + assert.equal(def.rejected, false, 'Promise is rejected even when it should not be'); + assert.equal(def.completed, true, 'Promise is not completed even when it should not be'); +}); +test('Deferred - reject', (done) => { + const errorToSend = new Error('Something'); + const def = createDeferred(); + def.promise + .then((value) => { + assert.fail(value, 'Error', 'Was expecting promise to get rejected, however it was resolved', ''); + done(); + }) + .catch((reason) => { + assert.equal(reason, errorToSend, 'Error received is not the same'); + done(); + }) + .catch(done); + + assert.equal(def.resolved, false, 'Promise is resolved even when it should not be'); + assert.equal(def.rejected, false, 'Promise is rejected even when it should not be'); + assert.equal(def.completed, false, 'Promise is completed even when it should not be'); + + def.reject(errorToSend); + + assert.equal(def.resolved, false, 'Promise is resolved even when it should not be'); + assert.equal(def.rejected, true, 'Promise is not rejected even when it should not be'); + assert.equal(def.completed, true, 'Promise is not completed even when it should not be'); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/diagnosticOverrides.test.ts b/python-parser/packages/pyright-internal/src/tests/diagnosticOverrides.test.ts new file mode 100644 index 00000000..7e5a2938 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/diagnosticOverrides.test.ts @@ -0,0 +1,108 @@ +/* + * diagnosticOverrides.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests to verify consistency between declarations of diagnostic + * overrides in code and in the configuration schema. + */ + +import * as fs from 'fs'; +import * as path from 'path'; + +import { DiagnosticRule } from '../common/diagnosticRules'; + +describe('Diagnostic overrides', () => { + test('Compare DiagnosticRule to pyrightconfig.schema.json', () => { + const schemasFolder = path.resolve(__dirname, '..', '..', '..', 'vscode-pyright', 'schemas'); + const schemaJson = path.join(schemasFolder, 'pyrightconfig.schema.json'); + const jsonString = fs.readFileSync(schemaJson, { encoding: 'utf-8' }); + const json = JSON.parse(jsonString); + + expect(json.definitions?.diagnostic?.anyOf).toBeDefined(); + const anyOf = json.definitions?.diagnostic?.anyOf; + + expect(Array.isArray(anyOf)); + expect(anyOf).toHaveLength(2); + + expect(anyOf[0].type).toEqual('boolean'); + expect(anyOf[1].type).toEqual('string'); + + const enumValues = anyOf[1].enum; + expect(Array.isArray(enumValues)); + expect(enumValues).toHaveLength(4); + expect(enumValues[0]).toEqual('none'); + expect(enumValues[1]).toEqual('information'); + expect(enumValues[2]).toEqual('warning'); + expect(enumValues[3]).toEqual('error'); + + expect(json.properties).toBeDefined(); + const overrideNamesInJson = Object.keys(json.properties).filter((n) => n.startsWith('report')); + + for (const propName of overrideNamesInJson) { + const p = json.properties[propName]; + + const ref = p['$ref']; + const def = json.definitions[ref.substring(ref.lastIndexOf('/') + 1)]; + + expect(def['$ref']).toEqual(`#/definitions/diagnostic`); + expect(def.title).toBeDefined(); + expect(def.title.length).toBeGreaterThan(0); + expect(def.default).toBeDefined(); + expect(enumValues).toContain(def.default); + } + + const overrideNamesInCode: string[] = Object.values(DiagnosticRule).filter((x) => x.startsWith('report')); + + for (const n of overrideNamesInJson) { + expect(overrideNamesInCode).toContain(n); + } + for (const n of overrideNamesInCode) { + expect(overrideNamesInJson).toContain(n); + } + }); + test('Compare DiagnosticRule to package.json', () => { + const extensionRoot = path.resolve(__dirname, '..', '..', '..', 'vscode-pyright'); + const packageJson = path.join(extensionRoot, 'package.json'); + const jsonString = fs.readFileSync(packageJson, { encoding: 'utf-8' }); + const json = JSON.parse(jsonString); + + expect(json.contributes?.configuration?.properties).toBeDefined(); + const overrides = json.contributes?.configuration?.properties['python.analysis.diagnosticSeverityOverrides']; + expect(overrides).toBeDefined(); + const props = overrides.properties; + expect(props).toBeDefined(); + + const overrideNamesInJson = Object.keys(props); + for (const propName of overrideNamesInJson) { + const p = props[propName]; + + expect(p.type).toEqual(['string', 'boolean']); + expect(p.description).toBeDefined(); + expect(p.description.length).toBeGreaterThan(0); + expect(p.default).toBeDefined(); + + expect(p.enum).toBeDefined(); + expect(Array.isArray(p.enum)); + expect(p.enum).toHaveLength(6); + + expect(p.enum[0]).toEqual('none'); + expect(p.enum[1]).toEqual('information'); + expect(p.enum[2]).toEqual('warning'); + expect(p.enum[3]).toEqual('error'); + expect(p.enum[4]).toEqual(true); + expect(p.enum[5]).toEqual(false); + + expect(p.enum).toContain(p.default); + } + + const overrideNamesInCode: string[] = Object.values(DiagnosticRule).filter((x) => x.startsWith('report')); + + for (const n of overrideNamesInJson) { + expect(overrideNamesInCode).toContain(n); + } + for (const n of overrideNamesInCode) { + expect(overrideNamesInJson).toContain(n); + } + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/diagnostics.test.ts b/python-parser/packages/pyright-internal/src/tests/diagnostics.test.ts new file mode 100644 index 00000000..2c99916b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/diagnostics.test.ts @@ -0,0 +1,41 @@ +/* + * diagnostics.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for diagnostics + */ + +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('unused import', async () => { + const code = ` +// @filename: test1.py +//// from test2 import [|/*marker*/foo|] + +// @filename: test2.py +//// def foo(): pass + `; + + const state = parseAndGetTestState(code).state; + + state.verifyDiagnostics({ + marker: { category: 'unused', message: '"foo" is not accessed' }, + }); +}); + +test('pyright ignore unused import', async () => { + const code = ` +// @filename: test1.py +//// from test2 import [|/*marker*/foo|] # pyright: ignore + +// @filename: test2.py +//// def foo(): pass + `; + + const state = parseAndGetTestState(code).state; + + state.verifyDiagnostics({ + marker: { category: 'none', message: '' }, + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/docStringConversion.test.ts b/python-parser/packages/pyright-internal/src/tests/docStringConversion.test.ts new file mode 100644 index 00000000..0854d461 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/docStringConversion.test.ts @@ -0,0 +1,1123 @@ +/* + * docStringConversion.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests for the Python doc string to markdown converter. + */ + +import assert = require('assert'); +import { PyrightDocStringService } from '../common/docStringService'; + +// For substitution in the test data strings +// Produces more readable test data than escaping the back ticks +const singleTick = '`'; +const doubleTick = '``'; +const tripleTick = '```'; +const tripleTilda = '~~~'; + +export function docStringTests(docStringService = new PyrightDocStringService()) { + test('PlaintextIndention', () => { + const all: string[][] = [ + ['A\nB', 'A\nB'], + ['A\n\nB', 'A\n\nB'], + ['A\n B', 'A\nB'], + [' A\n B', 'A\nB'], + ['\nA\n B', 'A\n B'], + ['\n A\n B', 'A\nB'], + ['\nA\nB\n', 'A\nB'], + [' \n\nA \n \nB \n ', 'A\n\nB'], + ]; + + all.forEach((v) => _testConvertToPlainText(v[0], v[1])); + }); + + test('MarkdownIndention', () => { + const all: string[][] = [ + ['A\nB', 'A\nB'], + ['A\n\nB', 'A\n\nB'], + ['A\n B', 'A\nB'], + [' A\n B', 'A\nB'], + ['\nA\n B', 'A \n    B'], + ['\n A\n B', 'A\nB'], + ['\nA\nB\n', 'A\nB'], + [' \n\nA \n \nB \n ', 'A\n\nB'], + ]; + + all.forEach((v) => _testConvertToMarkdown(v[0], v[1])); + }); + + test('NormalText', () => { + const docstring = `This is just some normal text +that extends over multiple lines. This will appear +as-is without modification. +`; + + const markdown = `This is just some normal text +that extends over multiple lines. This will appear +as-is without modification. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('InlineLiterals', () => { + const docstring = + 'This paragraph talks about ``foo``\n' + + 'which is related to :something:`bar`, and probably `qux`:something_else:.\n'; + + const markdown = 'This paragraph talks about `foo` \n' + 'which is related to `bar`, and probably `qux`.\n'; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('Headings', () => { + const docstring = `Heading 1 +========= + +Heading 2 +--------- + +Heading 3 +~~~~~~~~~ + +Heading 4 ++++++++++ +`; + + const markdown = `Heading 1 +========= + +Heading 2 +--------- + +Heading 3 +--------- + +Heading 4 +--------- +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('AsterisksAtStartOfArgs', () => { + const docstring = `Foo: + + Args: + foo (Foo): Foo! + *args: These are positional args. + **kwargs: These are named args. +`; + + const markdown = `Foo: + +Args: +    foo (Foo): Foo! +    \\*args: These are positional args. +    \\*\\*kwargs: These are named args. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('CopyrightAndLicense', () => { + const docstring = `This is a test. + +:copyright: Fake Name +:license: ABCv123 +`; + + const markdown = `This is a test. + +:copyright: Fake Name +:license: ABCv123 +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('CommonRestFieldLists', () => { + const docstring = `This function does something. + +:param foo: This is a description of the foo parameter + which does something interesting. +:type foo: Foo +:param bar: This is a description of bar. +:type bar: Bar +:return: Something else. +:rtype: Something +:raises ValueError: If something goes wrong. +`; + + const markdown = `This function does something. + +:param foo: This is a description of the foo parameter +    which does something interesting. +:type foo: Foo +:param bar: This is a description of bar. +:type bar: Bar +:return: Something else. +:rtype: Something +:raises ValueError: If something goes wrong. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('Doctest', () => { + const docstring = `This is a doctest: + +>>> print('foo') +foo +`; + + const markdown = `This is a doctest: + +${tripleTick} +>>> print('foo') +foo +${tripleTick} +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('DoctestIndented', () => { + const docstring = `This is a doctest: + + >>> print('foo') + foo +`; + + const markdown = `This is a doctest: + +${tripleTick} +>>> print('foo') +foo +${tripleTick} +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('DoctestTextAfter', () => { + const docstring = `This is a doctest: + +>>> print('foo') +foo + +This text comes after. +`; + + const markdown = `This is a doctest: + +${tripleTick} +>>> print('foo') +foo +${tripleTick} + +This text comes after. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('DoctestIndentedTextAfter', () => { + const docstring = `This is a doctest: + + >>> print('foo') + foo + This line has a different indent. +`; + + const markdown = `This is a doctest: + +${tripleTick} +>>> print('foo') +foo +${tripleTick} + +This line has a different indent. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('MarkdownStyleBacktickBlock', () => { + const docstring = `Backtick block: + +${tripleTick} +print(foo_bar) + +if True: + print(bar_foo) +${tripleTick} + +And some text after. +`; + + const markdown = `Backtick block: + +${tripleTick} +print(foo_bar) + +if True: + print(bar_foo) +${tripleTick} + +And some text after. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('MarkdownStyleTildaBlock', () => { + const docstring = `Backtick block: + +${tripleTilda} +print(foo_bar) + +if True: + print(bar_foo) +${tripleTilda} + +And some text after. +`; + + const markdown = `Backtick block: + +${tripleTilda} +print(foo_bar) + +if True: + print(bar_foo) +${tripleTilda} + +And some text after. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('RestLiteralBlock', () => { + const docstring = ` +Take a look at this code:: + + if foo: + print(foo) + else: + print('not foo!') + +This text comes after. +`; + + const markdown = `Take a look at this code: + +${tripleTick} + if foo: + print(foo) + else: + print('not foo!') +${tripleTick} + +This text comes after. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('RestLiteralBlockEmptyDoubleColonLine', () => { + const docstring = ` +:: + + if foo: + print(foo) + else: + print('not foo!') +`; + + const markdown = `${tripleTick} + if foo: + print(foo) + else: + print('not foo!') +${tripleTick} +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('RestLiteralBlockExtraSpace', () => { + const docstring = ` +Take a look at this code:: + + + + + if foo: + print(foo) + else: + print('not foo!') + +This text comes after. +`; + + const markdown = `Take a look at this code: + +${tripleTick} + if foo: + print(foo) + else: + print('not foo!') +${tripleTick} + +This text comes after. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('RestLiteralBlockNoIndentOneLiner', () => { + const docstring = ` +The next code is a one-liner:: + +print(a + foo + 123) + +And now it's text. +`; + + const markdown = `The next code is a one-liner: + +${tripleTick} +print(a + foo + 123) +${tripleTick} + +And now it's text. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('DirectiveRemoval', () => { + const docstring = `This is a test. + +.. ignoreme:: example + +This text is in-between. + +.. versionadded:: 1.0 + Foo was added to Bar. + +.. admonition:: Note + + This paragraph appears inside the admonition + and spans multiple lines. + +This text comes after. +`; + + const markdown = `This is a test. + +This text is in-between. + +This text comes after. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('ClassDirective', () => { + const docstring = ` +.. class:: FooBar() + This is a description of ${doubleTick}FooBar${doubleTick}. + +${doubleTick}FooBar${doubleTick} is interesting. +`; + + const markdown = `${tripleTick} +FooBar() +${tripleTick} + +This is a description of ${singleTick}FooBar${singleTick}. + +${singleTick}FooBar${singleTick} is interesting. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('CodeBlockDirective', () => { + const docstring = `Take a look at this + .. code-block:: Python + + if foo: + print(foo) + else: + print('not foo!') + +This text comes after. +`; + + const markdown = `Take a look at this +${tripleTick} + + if foo: + print(foo) + else: + print('not foo!') +${tripleTick} + +This text comes after.`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('UnfinishedBacktickBlock', () => { + const docstring = '```\nsomething\n'; + + const markdown = '```\nsomething\n```\n'; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('UnfinishedTildaBlock', () => { + const docstring = '~~~\nsomething\n'; + + const markdown = '~~~\nsomething\n~~~\n'; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('UnfinishedInlineLiteral', () => { + const docstring = '`oops\n'; + + const markdown = '`oops`'; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('DashList', () => { + const docstring = ` +This is a list: + - Item 1 + - Item 2 +`; + + const markdown = `This is a list: + - Item 1 + - Item 2 +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('AsteriskList', () => { + const docstring = ` +This is a list: + * Item 1 + * Item 2 +`; + + const markdown = `This is a list: + * Item 1 + * Item 2 +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('SquareBrackets', () => { + const docstring = 'Optional[List[str]]'; + const markdown = 'Optional\\[List\\[str\\]\\]'; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('ListDashMultiline', () => { + const docstring = `Keyword Arguments: + + - option_strings -- A list of command-line option strings which + should be associated with this action. + + - dest -- The name of the attribute to hold the created object(s) +`; + + const markdown = `Keyword Arguments: + +- option\\_strings -- A list of command-line option strings which +should be associated with this action. + +- dest -- The name of the attribute to hold the created object(s)`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('HalfIndentOnLeadingDash', () => { + const docstring = `Dash List +- foo + - foo +- bar + - baz +- qux + - aaa + `; + + const markdown = `Dash List +- foo + - foo +- bar + - baz +- qux + - aaa`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('AsteriskMultilineList', () => { + const docstring = ` +This is a list: + * this is a long, multi-line paragraph. It + seems to go on and on. + + * this is a long, multi-line paragraph. It + seems to go on and on. +`; + + const markdown = `This is a list: + * this is a long, multi-line paragraph. It +seems to go on and on. + + * this is a long, multi-line paragraph. It +seems to go on and on. +`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('ListAsteriskAddLeadingSpace', () => { + const docstring = `Title +* First line bullet, no leading space + with second line. +* Second line bullet, no leading space + with second line.`; + + const markdown = `Title + * First line bullet, no leading space +with second line. + * Second line bullet, no leading space +with second line.`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('PandasReadCsvListIndent', () => { + const docstring = `Title +keep_default_na : bool, default True + Whether or not to include the default NaN values when parsing the data. + + * If \`keep_default_na\` is True, and \`na_values\` are specified, \`na_values\` + is appended to the default NaN values used for parsing. + +na_filter : bool, default True`; + + const markdown = `Title +keep\\_default\\_na : bool, default True +    Whether or not to include the default NaN values when parsing the data. + + * If \`keep_default_na\` is True, and \`na_values\` are specified, \`na_values\` +is appended to the default NaN values used for parsing. + +na\\_filter : bool, default True`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('FieldListEpyText', () => { + const docstring = ` + 1. Epytext: + @param param1: description`; + + const markdown = ` +1. Epytext: +     @param param1: description`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('FieldListRest', () => { + const docstring = ` + 2. reST: + :param param1: description`; + + const markdown = ` +2. reST: +     :param param1: description`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('FieldListGoogleV1', () => { + const docstring = ` + 3. Google (variant 1): + Args: + param1: description`; + + const markdown = ` +3. Google (variant 1): +     Args: +         param1: description`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('FieldListGoogleV2', () => { + const docstring = ` + 4. Google (variant 2): + Args: + param1 (type): description + param2 (type): description`; + + const markdown = ` +4. Google (variant 2): +     Args: +         param1 (type): description +         param2 (type): description`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('Googlewithreturntypes', () => { + const docstring = ` + Example function with types documented in the docstring. + + \`PEP 484\`_ type annotations are supported. If attribute, parameter, and + return types are annotated according to \`PEP 484\`_, they do not need to be + included in the docstring: + + Args: + param1 (int): The first parameter. + param2 (str): The second parameter. + + Returns: + bool: The return value. True for success, False otherwise. + + .. _PEP 484: + https://www.python.org/dev/peps/pep-0484/`; + + const markdown = ` +Example function with types documented in the docstring. + +\`PEP 484\`\\_ type annotations are supported. If attribute, parameter, and +return types are annotated according to \`PEP 484\`\\_, they do not need to be +included in the docstring: + +Args: +    param1 (int): The first parameter. +    param2 (str): The second parameter. + +Returns: +    bool: The return value. True for success, False otherwise.`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('GoogleWithComplexTypes', () => { + const docstring = ` + Example function with types documented in the docstring. + + Args: + param1 (int|bool): The first parameter. + param2 (list[str] with others): The second parameter. + + Returns: + bool: The return value. True for success, False otherwise. +`; + + const markdown = ` +Example function with types documented in the docstring. + +Args: +    param1 (int|bool): The first parameter. +    param2 (list\\[str\\] with others): The second parameter. + +Returns: +    bool: The return value. True for success, False otherwise.`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('FieldListDontAddLineBreaksToHeaders', () => { + const docstring = ` + Parameters + ---------- + ThisIsAFieldAfterAHeader : str`; + + const markdown = ` +Parameters +---------- +ThisIsAFieldAfterAHeader : str`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('EpyDocCv2Imread', () => { + const docstring = `imread(filename[, flags]) -> retval +. @brief Loads an image from a file. +. @anchor imread +. +. The function imread loads an image from the specified file and returns it. If the image cannot be +. read (because of missing file, improper permissions, unsupported or invalid format), the function +. +. Currently, the following file formats are supported: +. +. - Windows bitmaps - \\*.bmp, \\*.dib (always supported) +. - JPEG files - \\*.jpeg, \\*.jpg, \\*.jpe (see the *Note* section)`; + + const markdown = `imread(filename\\[, flags\\]) -> retval + +@brief Loads an image from a file. + +@anchor imread + +The function imread loads an image from the specified file and returns it. If the image cannot be +read (because of missing file, improper permissions, unsupported or invalid format), the function + +Currently, the following file formats are supported: + +- Windows bitmaps - \\*.bmp, \\*.dib (always supported) +- JPEG files - \\*.jpeg, \\*.jpg, \\*.jpe (see the \\*Note\\* section)`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('Non EpyDocCv2Imread', () => { + const docstring = `imread(filename[, flags]) -> retval + . @brief Loads an image from a file. + . @anchor imread + . + . The function imread loads an image from the specified file and returns it. If the image cannot be + . read (because of missing file, improper permissions, unsupported or invalid format), the function + . + . Currently, the following file formats are supported: + . + . - Windows bitmaps - \\*.bmp, \\*.dib (always supported) + . - JPEG files - \\*.jpeg, \\*.jpg, \\*.jpe (see the *Note* section)`; + + const markdown = `imread(filename\\[, flags\\]) -> retval +. @brief Loads an image from a file. +. @anchor imread +. +. The function imread loads an image from the specified file and returns it. If the image cannot be +. read (because of missing file, improper permissions, unsupported or invalid format), the function +. +. Currently, the following file formats are supported: +. +. - Windows bitmaps - \\*.bmp, \\*.dib (always supported) +. - JPEG files - \\*.jpeg, \\*.jpg, \\*.jpe (see the \\*Note\\* section)`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('EpyDocTest', () => { + const docstring = `Return the x intercept of the line M{y=m*x+b}. The X{x intercept} +of a line is the point at which it crosses the x axis (M{y=0}). + +This function can be used in conjuction with L{z_transform} to +find an arbitrary function's zeros. + +@type m: number +@param m: The slope of the line. +@type b: number +@param b: The y intercept of the line. The X{y intercept} of a + line is the point at which it crosses the y axis (M{x=0}). +@rtype: number +@return: the x intercept of the line M{y=m*x+b}.`; + + const markdown = `Return the x intercept of the line M{y=m\\*x+b}. The X{x intercept} +of a line is the point at which it crosses the x axis (M{y=0}). + +This function can be used in conjuction with L{z\\_transform} to +find an arbitrary function's zeros. + +@type m: number + +@param m: The slope of the line. + +@type b: number + +@param b: The y intercept of the line. The X{y intercept} of a +            line is the point at which it crosses the y axis (M{x=0}). + +@rtype: number + +@return: the x intercept of the line M{y=m\\*x+b}.`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('DontEscapeHtmlTagsInsideCodeBlocks', () => { + const docstring = 'hello ``'; + + const markdown = 'hello ``'; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('EscapeHtmlTagsOutsideCodeBlocks', () => { + const docstring = 'hello '; + + const markdown = 'hello <noncode>'; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('IndentedCodeBlock', () => { + const docstring = ` +Expected: + ${tripleTick}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTick} + """ + ${tripleTick} +`; + + const markdown = ` +Expected: +${tripleTick}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTick} + """ +${tripleTick} +`; + _testConvertToMarkdown(docstring, markdown); + }); + + test('IndentedCodeBlockTilda', () => { + const docstring = ` +Expected: + ${tripleTilda}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTilda} + """ + ${tripleTilda} +`; + + const markdown = ` +Expected: +${tripleTilda}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTilda} + """ +${tripleTilda} +`; + _testConvertToMarkdown(docstring, markdown); + }); + + test('MixedCodeBlockBacktick', () => { + const docstring = ` +Expected: + ${tripleTick}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTick} + """ + ${tripleTick} +Expected: + ${tripleTilda}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTick} + """ + ${tripleTilda} +`; + + const markdown = ` +Expected: +${tripleTick}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTick} + """ +${tripleTick} + +Expected: +${tripleTilda}python + def some_fn(): + """ + Backticks on a different indentation level don't close the code block. + ${tripleTick} + """ +${tripleTilda} +`; + _testConvertToMarkdown(docstring, markdown); + }); + + test('RestTableWithHeader', () => { + const docstring = ` +=============== ========================================================= +Generator +--------------- --------------------------------------------------------- +Generator Class implementing all of the random number distributions +default_rng Default constructor for \`\`Generator\`\` +=============== =========================================================`; + + const markdown = ` +|Generator | | +|---------------|---------------------------------------------------------| +|Generator |Class implementing all of the random number distributions| +|default_rng |Default constructor for \`\`Generator\`\`| + +
`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('RestTablesMultilineHeader', () => { + const docstring = ` +==================== ========================================================= +Compatibility +functions - removed +in the new API +-------------------- --------------------------------------------------------- +rand Uniformly distributed values. +==================== =========================================================`; + + const markdown = ` +|Compatibility
functions - removed
in the new API |

| +|--------------------|---------------------------------------------------------| +|rand |Uniformly distributed values.| + +
`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('RestTableSimple', () => { + const docstring = ` +============================== ===================================== +Scalar Type Array Type +============================== ===================================== +:class:\`pandas.Interval\` :class:\`pandas.arrays.IntervalArray\` +:class:\`pandas.Period\` :class:\`pandas.arrays.PeriodArray\` +============================== ===================================== + `; + + const markdown = `|Scalar Type |Array Type | +|------------------------------|-------------------------------------| +|:class:\`pandas.Interval\` |:class:\`pandas.arrays.IntervalArray\`| +|:class:\`pandas.Period\` |:class:\`pandas.arrays.PeriodArray\`| + +
`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test('ReSTTableIndented', () => { + const docstring = ` + data : + + dtype : str, np.dtype, or ExtensionDtype, optional + + ============================== ===================================== + Scalar Type Array Type + ============================== ===================================== + :class:\`pandas.Interval\` :class:\`pandas.arrays.IntervalArray\` + :class:\`pandas.Period\` :class:\`pandas.arrays.PeriodArray\` + ============================== =====================================`; + + const markdown = ` +data : + +dtype : str, np.dtype, or ExtensionDtype, optional + +| Scalar Type | Array Type | +|------------------------------|-------------------------------------| +| :class:\`pandas.Interval\` | :class:\`pandas.arrays.IntervalArray\`| +| :class:\`pandas.Period\` | :class:\`pandas.arrays.PeriodArray\`| + +
`; + + _testConvertToMarkdown(docstring, markdown); + }); + + test(`OddnumberOfColons`, () => { + const docstring = ` + @param 'original:str' or 'original:list': original string to compare + @param 'new:str': the new string to compare + @return 'int': levenshtein difference + @return 'list': levenshtein difference if list + `; + const markdown = `@param 'original:str' or 'original:list': original string to compare\n\n@param 'new:str': the new string to compare\n\n@return 'int': levenshtein difference\n\n@return 'list': levenshtein difference if list`; + + _testConvertToMarkdown(docstring, markdown); + }); + + function _testConvertToMarkdown(docstring: string, expectedMarkdown: string) { + const actualMarkdown = docStringService.convertDocStringToMarkdown(docstring); + + assert.equal(_normalizeLineEndings(actualMarkdown).trim(), _normalizeLineEndings(expectedMarkdown).trim()); + } + + function _testConvertToPlainText(docstring: string, expectedPlainText: string) { + const actualMarkdown = docStringService.convertDocStringToPlainText(docstring); + + assert.equal(_normalizeLineEndings(actualMarkdown).trim(), _normalizeLineEndings(expectedPlainText).trim()); + } + + function _normalizeLineEndings(text: string): string { + return text.split(/\r?\n/).join('\n'); + } + + test('RPYCLiteralBlockTransition', () => { + const docstring = ` +:: + + ##### ##### #### + ## ## ## ## ## #### + ## ## ## ## ## # + ##### ##### ## ## ## ## + ## ## ## ## ## ## # + ## ## ## ### ## ### + ## ## ## ## ##### + -------------------- ## ------------------------------------------ + ## + +Remote Python Call (RPyC) +`; + + const markdown = ` + +${tripleTick} + ##### ##### #### + ## ## ## ## ## #### + ## ## ## ## ## # + ##### ##### ## ## ## ## + ## ## ## ## ## ## # + ## ## ## ### ## ### + ## ## ## ## ##### + -------------------- ## ------------------------------------------ + ## +${tripleTick} + +Remote Python Call (RPyC) +`; + + _testConvertToMarkdown(docstring, markdown); + }); +} + +describe('Doc String Conversion', () => { + docStringTests(); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/documentSymbolCollector.test.ts b/python-parser/packages/pyright-internal/src/tests/documentSymbolCollector.test.ts new file mode 100644 index 00000000..ecb474f7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/documentSymbolCollector.test.ts @@ -0,0 +1,399 @@ +/* + * documentSymbolCollector.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests documentSymbolCollector + */ + +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { verifyReferencesAtPosition } from './testStateUtils'; + +test('folder reference', () => { + const code = ` +// @filename: common/__init__.py +//// from [|io2|] import tools as tools +//// from [|io2|].tools import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// tools.combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .[|io2|] import tools as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .[|io2|].tools import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import tools, pathUtils +//// +//// tools.combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from [|io2|] import tools as tools +//// from [|io2|].tools import pathUtils as pathUtils +//// +//// tools.combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + + const ranges = state.getRangesByText().get('io2')!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, 'io2', range.fileName, range.pos, ranges); + } +}); + +test('__init__ wildcard import', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import [|tools|] as [|tools|] +//// from io2.[|tools|] import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .io2 import [|tools|] as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.[|tools|] import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import [|tools|], pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from io2 import [|tools|] as [|tools|] +//// from io2.[|tools|] import pathUtils as pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + + const ranges = state.getRangesByText().get('tools')!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, 'tools', range.fileName, range.pos, ranges); + } +}); + +test('submodule wildcard import', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import tools as tools +//// from io2.tools import [|pathUtils|] as [|pathUtils|] + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// tools.combine(1, 1) +//// [|pathUtils|].getFilename("c") + +// @filename: test2.py +//// from .io2 import tools as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.tools import [|pathUtils|] as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import tools, [|pathUtils|] +//// +//// tools.combine(1, 1) +//// [|pathUtils|].getFilename("c") + +// @filename: test5.py +//// from io2 import tools as tools +//// from io2.tools import [|pathUtils|] as [|pathUtils|] +//// +//// tools.combine(1, 1) +//// [|pathUtils|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + + const ranges = state.getRangesByText().get('pathUtils')!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, 'pathUtils', range.fileName, range.pos, ranges); + } +}); + +test('use localName import alias', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import tools as [|/*marker1*/tools|] +//// from io2.tools import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// [|/*marker2*/tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .io2 import tools as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.tools import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import [|/*marker3*/tools|], pathUtils +//// +//// [|/*marker4*/tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from io2 import tools as [|/*marker5*/tools|] +//// from io2.tools import pathUtils as pathUtils +//// +//// [|/*marker6*/tools|].combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const references = state + .getRangesByText() + .get('tools')! + .map((r) => ({ uri: r.fileUri, range: state.convertPositionRange(r) })); + + state.verifyFindAllReferences({ + marker1: { references }, + marker2: { references }, + marker3: { references }, + marker4: { references }, + marker5: { references }, + marker6: { references }, + }); +}); + +test('use localName import module', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import [|/*marker1*/tools|] as [|tools|] +//// from io2.[|/*marker2*/tools|] import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .io2 import [|/*marker3*/tools|] as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.[|/*marker4*/tools|] import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import [|tools|], pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from io2 import [|/*marker5*/tools|] as [|tools|] +//// from io2.[|/*marker6*/tools|] import pathUtils as pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const references = state + .getRangesByText() + .get('tools')! + .map((r) => ({ uri: r.fileUri, range: state.convertPositionRange(r) })); + + state.verifyFindAllReferences({ + marker1: { references }, + marker2: { references }, + marker3: { references }, + marker4: { references }, + marker5: { references }, + marker6: { references }, + }); +}); + +test('import dotted name', () => { + const code = ` +// @filename: nest1/__init__.py +//// # empty + +// @filename: nest1/nest2/__init__.py +//// # empty + +// @filename: nest1/nest2/module.py +//// def foo(): +//// pass + +// @filename: test1.py +//// import [|nest1|].[|nest2|].[|module|] +//// +//// [|nest1|].[|nest2|].[|module|] + +// @filename: nest1/test2.py +//// import [|nest1|].[|nest2|].[|module|] +//// +//// [|nest1|].[|nest2|].[|module|] + `; + + const state = parseAndGetTestState(code).state; + + function verify(name: string) { + const ranges = state.getRangesByText().get(name)!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, name, range.fileName, range.pos, ranges); + } + } + + verify('nest1'); + verify('nest2'); + verify('module'); +}); + +test('import alias', () => { + const code = ` +// @filename: nest/__init__.py +//// # empty + +// @filename: nest/module2.py +//// # empty + +// @filename: module1.py +//// # empty + +// @filename: test1.py +//// import [|/*marker1*/module1|] as [|module1|] + +// @filename: test2.py +//// import nest.[|/*marker2*/module2|] as [|module2|] + `; + + const state = parseAndGetTestState(code).state; + + const marker1 = state.getMarkerByName('marker1'); + const ranges1 = state.getRangesByText().get('module1')!; + verifyReferencesAtPosition( + state.program, + state.configOptions, + 'module1', + marker1.fileName, + marker1.position, + ranges1 + ); + + const marker2 = state.getMarkerByName('marker2'); + const ranges2 = state.getRangesByText().get('module2')!; + verifyReferencesAtPosition( + state.program, + state.configOptions, + 'module2', + marker2.fileName, + marker2.position, + ranges2 + ); +}); + +test('string in __all__', () => { + const code = ` +// @filename: test1.py +//// class [|/*marker1*/A|]: +//// pass +//// +//// a: "[|A|]" = "A" +//// +//// __all__ = [ "[|A|]" ] + `; + + const state = parseAndGetTestState(code).state; + + const marker1 = state.getMarkerByName('marker1'); + const ranges1 = state.getRangesByText().get('A')!; + verifyReferencesAtPosition(state.program, state.configOptions, 'A', marker1.fileName, marker1.position, ranges1); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/envVarUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/envVarUtils.test.ts new file mode 100644 index 00000000..d5ac1419 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/envVarUtils.test.ts @@ -0,0 +1,228 @@ +/* + * envVarUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for functions in envVarUtils. + */ + +import * as os from 'os'; + +import assert from 'assert'; + +import { expandPathVariables, resolvePathWithEnvVariables } from '../common/envVarUtils'; +import { WellKnownWorkspaceKinds, Workspace, createInitStatus } from '../workspaceFactory'; +import { UriEx } from '../common/uri/uriUtils'; +import { Uri } from '../common/uri/uri'; +import { AnalyzerService } from '../analyzer/service'; +import { NullConsole } from '../common/console'; +import { TestAccessHost } from './harness/testAccessHost'; +import { ConfigOptions } from '../common/configOptions'; +import { TestFileSystem } from './harness/vfs/filesystem'; +import { createServiceProvider } from '../common/serviceProviderExtensions'; + +jest.mock('os', () => ({ __esModule: true, ...jest.requireActual('os') })); + +const defaultWorkspace = createWorkspace(undefined); +const normalworkspace = createWorkspace(UriEx.file('/')); + +test('expands ${workspaceFolder}', () => { + const workspaceFolderUri = UriEx.parse('/src'); + const test_path = '${workspaceFolder}/foo'; + const path = `${workspaceFolderUri.getPath()}/foo`; + assert.equal(expandPathVariables(test_path, workspaceFolderUri, []), path); +}); + +test('expands ${workspaceFolder:sibling}', () => { + const workspaceFolderUri = UriEx.parse('/src'); + const workspace = { workspaceName: 'sibling', rootUri: workspaceFolderUri } as Workspace; + const test_path = `\${workspaceFolder:${workspace.workspaceName}}/foo`; + const path = `${workspaceFolderUri.getPath()}/foo`; + assert.equal(expandPathVariables(test_path, workspaceFolderUri, [workspace]), path); +}); + +test('resolvePathWithEnvVariables ${workspaceFolder}', () => { + const workspaceFolderUri = UriEx.parse('mem-fs:/hello/there'); + const test_path = `\${workspaceFolder}/foo`; + const path = `${workspaceFolderUri.toString()}/foo`; + + assert.equal(resolvePathWithEnvVariables(defaultWorkspace, test_path, []), undefined); + + const workspace = createWorkspace(workspaceFolderUri); + assert.equal(resolvePathWithEnvVariables(workspace, test_path, [])?.toString(), path); +}); + +test('test resolvePathWithEnvVariables', () => { + assert(!resolvePathWithEnvVariables(defaultWorkspace, '', [])); + assert(!resolvePathWithEnvVariables(defaultWorkspace, '${workspaceFolder}', [])); +}); + +describe('expandPathVariables', () => { + const OLD_ENV = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = { ...OLD_ENV }; + }); + + afterAll(() => { + process.env = OLD_ENV; + }); + + test('expands ${env:HOME}', () => { + process.env.HOME = 'file:///home/foo'; + const test_path = '${env:HOME}/bar'; + const path = `${process.env.HOME}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('resolvePathWithEnvVariables ${env:HOME}', () => { + process.env.HOME = '/home/foo'; + const test_path = '${env:HOME}/bar'; + const path = `file://${process.env.HOME}/bar`; + + assert.equal(resolvePathWithEnvVariables(defaultWorkspace, test_path, [])?.toString(), path); + assert.equal(resolvePathWithEnvVariables(normalworkspace, test_path, [])?.toString(), path); + }); + + test('expands ${env:USERNAME}', () => { + process.env.USERNAME = 'foo'; + const test_path = 'file:///home/${env:USERNAME}/bar'; + const path = `file:///home/${process.env.USERNAME}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('expands ${env:VIRTUAL_ENV}', () => { + process.env.VIRTUAL_ENV = 'file:///home/foo/.venv/path'; + const test_path = '${env:VIRTUAL_ENV}/bar'; + const path = `${process.env.VIRTUAL_ENV}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('resolvePathWithEnvVariables ${env:VIRTUAL_ENV}', () => { + process.env.VIRTUAL_ENV = 'https://server/home/foo/.venv/path'; + const test_path = '${env:VIRTUAL_ENV}/bar'; + const path = `${process.env.VIRTUAL_ENV}/bar`; + + assert.equal(resolvePathWithEnvVariables(defaultWorkspace, test_path, [])?.toString(), path); + assert.equal(resolvePathWithEnvVariables(normalworkspace, test_path, [])?.toString(), path); + }); + + test('expands ~ with os.homedir()', () => { + jest.spyOn(os, 'homedir').mockReturnValue('file:///home/foo'); + process.env.HOME = ''; + process.env.USERPROFILE = ''; + const test_path = '~/bar'; + const path = `${os.homedir()}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('resolvePathWithEnvVariables ~ with os.homedir()', () => { + jest.spyOn(os, 'homedir').mockReturnValue('c:\\home\\foo'); + + process.env.HOME = ''; + process.env.USERPROFILE = ''; + const test_path = '~/bar'; + const fileUri = UriEx.file(`${os.homedir()}/bar`); + + const defaultResult = resolvePathWithEnvVariables(defaultWorkspace, test_path, []); + const normalResult = resolvePathWithEnvVariables(normalworkspace, test_path, []); + + assert.equal(defaultResult?.scheme, fileUri.scheme); + assert.equal(normalResult?.scheme, fileUri.scheme); + + assert.equal(defaultResult?.getFilePath(), fileUri.getFilePath()); + assert.equal(normalResult?.getFilePath(), fileUri.getFilePath()); + }); + + test('expands ~ with env:HOME', () => { + jest.spyOn(os, 'homedir').mockReturnValue(''); + process.env.HOME = 'file:///home/foo'; + process.env.USERPROFILE = ''; + const test_path = '~/bar'; + const path = `${process.env.HOME}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('expands ~ with env:USERPROFILE', () => { + jest.spyOn(os, 'homedir').mockReturnValue(''); + process.env.HOME = ''; + process.env.USERPROFILE = 'file:///home/foo'; + const test_path = '~/bar'; + const path = `${process.env.USERPROFILE}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('expands /~ with os.homedir()', () => { + jest.spyOn(os, 'homedir').mockReturnValue('file:///home/foo'); + process.env.HOME = ''; + process.env.USERPROFILE = ''; + const test_path = '/~/bar'; + const path = `${os.homedir()}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('expands /~ with env:HOME', () => { + jest.spyOn(os, 'homedir').mockReturnValue(''); + process.env.HOME = 'file:///home/foo'; + process.env.USERPROFILE = ''; + const test_path = '/~/bar'; + const path = `${process.env.HOME}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('expands /~ with env:USERPROFILE', () => { + jest.spyOn(os, 'homedir').mockReturnValue(''); + process.env.HOME = ''; + process.env.USERPROFILE = 'file:///home/foo'; + const test_path = '/~/bar'; + const path = `${process.env.USERPROFILE}/bar`; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), path); + }); + + test('dont expands ~ when it is used as normal char 1', () => { + jest.spyOn(os, 'homedir').mockReturnValue('file:///home/foo'); + const test_path = '/home/user/~testfolder/testapp'; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), test_path); + }); + + test('dont expands ~ when it is used as normal char 2', () => { + jest.spyOn(os, 'homedir').mockReturnValue('file:///home/foo'); + const test_path = '/home/user/testfolder~'; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), test_path); + }); + + test('dont expands ~ when it is used as normal char 3', () => { + jest.spyOn(os, 'homedir').mockReturnValue('file:///home/foo'); + const test_path = '/home/user/test~folder'; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), test_path); + }); + + test('dont expands ~ when it is used as normal char 4', () => { + jest.spyOn(os, 'homedir').mockReturnValue('file:///home/foo'); + const test_path = '/home/user/testfolder~/testapp'; + assert.equal(expandPathVariables(test_path, Uri.empty(), []), test_path); + }); +}); + +function createWorkspace(rootUri: Uri | undefined) { + const fs = new TestFileSystem(false); + return { + workspaceName: '', + rootUri, + kinds: [WellKnownWorkspaceKinds.Test], + service: new AnalyzerService('test service', createServiceProvider(fs), { + console: new NullConsole(), + hostFactory: () => new TestAccessHost(), + importResolverFactory: AnalyzerService.createImportResolver, + configOptions: new ConfigOptions(Uri.empty()), + shouldRunAnalysis: () => true, + }), + disableLanguageServices: false, + disableTaggedHints: false, + disableOrganizeImports: false, + disableWorkspaceSymbol: false, + isInitialized: createInitStatus(), + searchPathsToWatch: [], + }; +} diff --git a/python-parser/packages/pyright-internal/src/tests/filesystem.test.ts b/python-parser/packages/pyright-internal/src/tests/filesystem.test.ts new file mode 100644 index 00000000..eef40a9d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/filesystem.test.ts @@ -0,0 +1,232 @@ +/* + * filesystem.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Test and show how to use virtual file system + */ + +import assert from 'assert'; + +import { combinePaths, normalizeSlashes } from '../common/pathUtils'; +import * as host from './harness/testHost'; +import * as factory from './harness/vfs/factory'; +import * as vfs from './harness/vfs/filesystem'; +import { UriEx } from '../common/uri/uriUtils'; + +test('CreateVFS', () => { + const cwd = normalizeSlashes('/'); + const fs = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd }); + assert.equal(fs.cwd(), cwd); +}); + +test('Folders', () => { + const cwd = UriEx.file(normalizeSlashes('/')); + const fs = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd: cwd.getFilePath() }); + + // no such dir exist + assert.throws(() => { + fs.chdir(cwd.combinePaths('a')); + }); + + fs.mkdirSync(cwd.combinePaths('a')); + fs.chdir(cwd.combinePaths('a')); + assert.equal(fs.cwd(), normalizeSlashes('/a')); + + fs.chdir(cwd.resolvePaths('..')); + fs.rmdirSync(cwd.combinePaths('a')); + + // no such dir exist + assert.throws(() => { + fs.chdir(cwd.combinePaths('a')); + }); +}); + +test('Folders Recursive', () => { + const cwd = UriEx.file(normalizeSlashes('/')); + const fs = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd: cwd.getFilePath() }); + + // no such dir exist + assert.throws(() => { + fs.chdir(cwd.combinePaths('a')); + }); + + const path = cwd.combinePaths('a', 'b', 'c'); + fs.mkdirSync(path, { recursive: true }); + + assert(fs.existsSync(path)); +}); + +test('Files', () => { + const cwd = UriEx.file(normalizeSlashes('/')); + const fs = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd: cwd.getFilePath() }); + + const uri = cwd.combinePaths('1.txt'); + fs.writeFileSync(uri, 'hello', 'utf8'); + const buffer1 = fs.readFileSync(uri); + assert.equal(buffer1.toString(), 'hello'); + + const p = cwd.resolvePaths('a/b/c'); + fs.mkdirpSync(p.getFilePath()); + + const f = p.combinePaths('2.txt'); + fs.writeFileSync(f, 'hi'); + + const str = fs.readFileSync(f, 'utf8'); + assert.equal(str, 'hi'); +}); + +test('CreateRich', () => { + const cwd = normalizeSlashes('/'); + const files: vfs.FileSet = { + [normalizeSlashes('/a/b/c/1.txt')]: new vfs.File('hello1'), + [normalizeSlashes('/a/b/2.txt')]: new vfs.File('hello2'), + [normalizeSlashes('/a/3.txt')]: new vfs.File('hello3'), + [normalizeSlashes('/4.txt')]: new vfs.File('hello4', { encoding: 'utf16le' }), + [normalizeSlashes('/a/b/../c/./5.txt')]: new vfs.File('hello5', { encoding: 'ucs2' }), + }; + + const fs = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd, files }); + const entries = fs.scanSync(cwd, 'descendants-or-self', {}); + + // files + directory + root + assert.equal(entries.length, 10); + + assert.equal(fs.readFileSync(UriEx.file(normalizeSlashes('/a/b/c/1.txt')), 'ascii'), 'hello1'); + assert.equal(fs.readFileSync(UriEx.file(normalizeSlashes('/a/b/2.txt')), 'utf8'), 'hello2'); + assert.equal(fs.readFileSync(UriEx.file(normalizeSlashes('/a/3.txt')), 'utf-8'), 'hello3'); + assert.equal(fs.readFileSync(UriEx.file(normalizeSlashes('/4.txt')), 'utf16le'), 'hello4'); + assert.equal(fs.readFileSync(UriEx.file(normalizeSlashes('/a/c/5.txt')), 'ucs2'), 'hello5'); +}); + +test('Shadow', () => { + const cwd = normalizeSlashes('/'); + const fs = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd }); + + // only readonly fs can be shadowed + assert.throws(() => fs.shadow()); + + // one way to create shadow is making itself snapshot + fs.snapshot(); + assert(!fs.isReadonly); + assert(fs.shadowRoot!.isReadonly); + + // another way is creating one off existing readonly snapshot + const shadow1 = fs.shadowRoot!.shadow(); + assert(!shadow1.isReadonly); + assert(shadow1.shadowRoot === fs.shadowRoot); + + // make itself readonly and then shawdow + shadow1.makeReadonly(); + assert(shadow1.isReadonly); + + const shadow2 = shadow1.shadow(); + assert(!shadow2.isReadonly); + assert(shadow2.shadowRoot === shadow1); +}); + +test('Diffing', () => { + const cwd = UriEx.file(normalizeSlashes('/')); + const fs = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd: cwd.getFilePath() }); + + // first snapshot + fs.snapshot(); + fs.writeFileSync(cwd.combinePaths('test1.txt'), 'hello1'); + + // compared with original + assert.equal(countFile(fs.diff()!), 1); + + // second snapshot + fs.snapshot(); + fs.writeFileSync(cwd.combinePaths('test2.txt'), 'hello2'); + + // compared with first snapshot + assert.equal(countFile(fs.diff()!), 1); + + // compare with original snapshot + assert.equal(countFile(fs.diff(fs.shadowRoot!.shadowRoot)!), 2); + + // branch out from first snapshot + const s = fs.shadowRoot!.shadow(); + + // "test2.txt" only exist in first snapshot + assert(!s.existsSync(cwd.combinePaths('test2.txt'))); + + // create parallel universe where it has another version of test2.txt with different content + // compared to second snapshot which forked from same first snapshot + s.writeFileSync(cwd.combinePaths('test2.txt'), 'hello3'); + + // diff between non direct snapshots + // diff gives test2.txt even though it exist in both snapshot + assert.equal(countFile(s.diff(fs)!), 1); +}); + +test('createFromFileSystem1', () => { + const filepath = normalizeSlashes(combinePaths(factory.srcFolder, 'test.py')); + const content = '# test'; + + // file system will map physical file system to virtual one + const fs = factory.createFromFileSystem(host.HOST, false, { + documents: [new factory.TextDocument(filepath, content)], + cwd: factory.srcFolder, + }); + + // check existing typeshed folder on virtual path inherited from base snapshot from physical file system + const entries = fs.readdirSync(factory.typeshedFolder); + assert(entries.length > 0); + + // confirm file + assert.equal(fs.readFileSync(UriEx.file(filepath), 'utf8'), content); +}); + +test('createFromFileSystem2', () => { + const fs = factory.createFromFileSystem(host.HOST, /* ignoreCase */ true, { cwd: factory.srcFolder }); + const entries = fs.readdirSync(UriEx.file(factory.typeshedFolder.getFilePath().toUpperCase())); + assert(entries.length > 0); +}); + +test('createFromFileSystemWithCustomTypeshedPath', () => { + const invalidpath = normalizeSlashes(combinePaths(host.HOST.getWorkspaceRoot(), '../docs')); + const fs = factory.createFromFileSystem(host.HOST, /* ignoreCase */ false, { + cwd: factory.srcFolder, + meta: { [factory.typeshedFolder.getFilePath()]: invalidpath }, + }); + + const entries = fs.readdirSync(factory.typeshedFolder); + assert(entries.filter((e) => e.endsWith('.md')).length > 0); +}); + +test('createFromFileSystemWithMetadata', () => { + const fs = factory.createFromFileSystem(host.HOST, /* ignoreCase */ false, { + cwd: factory.srcFolder, + meta: { unused: 'unused' }, + }); + + assert(fs.existsSync(UriEx.file(factory.srcFolder))); +}); + +function countFile(files: vfs.FileSet): number { + let count = 0; + for (const value of Object.values(flatten(files))) { + if (value instanceof vfs.File) { + count++; + } + } + + return count; +} + +function flatten(files: vfs.FileSet): vfs.FileSet { + const result: vfs.FileSet = {}; + _flatten(files, result); + return result; +} + +function _flatten(files: vfs.FileSet, result: vfs.FileSet): void { + for (const [key, value] of Object.entries(files)) { + result[key] = value; + if (value instanceof vfs.Directory) { + _flatten(value.files, result); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourSlashParser.test.ts b/python-parser/packages/pyright-internal/src/tests/fourSlashParser.test.ts new file mode 100644 index 00000000..97b4145a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourSlashParser.test.ts @@ -0,0 +1,677 @@ +/* + * fourSlashParser.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests and show how to use fourslash markup languages + * and how to use parseTestData API itself for other unit tests + */ + +import assert from 'assert'; + +import { getBaseFileName, normalizeSlashes } from '../common/pathUtils'; +import { compareStringsCaseSensitive } from '../common/stringUtils'; +import { parseTestData } from './harness/fourslash/fourSlashParser'; +import { + getFileAtRawOffset, + tryConvertContentOffsetToRawOffset, + tryConvertRawOffsetToContentOffset, +} from './harness/fourslash/fourSlashRawUtils'; +import { + CompilerSettings, + Marker, + Range, + RawToken, + RawTokenKind, + RawTokenRange, +} from './harness/fourslash/fourSlashTypes'; +import * as host from './harness/testHost'; +import * as factory from './harness/vfs/factory'; +import { UriEx } from '../common/uri/uriUtils'; + +test('GlobalOptions', () => { + const code = ` +// global options +// @libpath: ../dist/lib +// @pythonversion: 3.7 + +////class A: +//// pass + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assertOptions(data.globalOptions, [ + ['libpath', '../dist/lib'], + ['pythonversion', '3.7'], + ]); + + assert.equal(data.files.length, 1); + assert.equal(data.files[0].fileName, 'test.py'); + assert.equal(data.files[0].content, content); +}); + +test('Filename', () => { + const code = ` +// @filename: file1.py +////class A: +//// pass + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assertOptions(data.globalOptions, []); + + assert.equal(data.files.length, 1); + assert.equal(data.files[0].fileName, normalizeSlashes('./file1.py')); + assert.equal(data.files[0].content, content); +}); + +test('Extra file options', () => { + // filename must be the first file options + const code = ` +// @filename: file1.py +// @library: false +////class A: +//// pass + `; + + const data = parseTestData('.', code, 'test.py'); + + assert.equal(data.files[0].fileName, normalizeSlashes('./file1.py')); + + assertOptions(data.globalOptions, []); + assertOptions(data.files[0].fileOptions, [ + ['filename', 'file1.py'], + ['library', 'false'], + ]); +}); + +test('Library options', () => { + // filename must be the first file options + const code = ` +// @filename: file1.py +// @library: true +////class A: +//// pass + `; + + const data = parseTestData('.', code, 'test.py'); + + assert.equal(data.files[0].fileName, factory.libFolder.combinePaths('file1.py').getFilePath()); +}); + +test('Range', () => { + const code = ` +////class A: +//// [|pass|] + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files[0].content, content); + + assert.deepEqual(stripRanges(data.ranges), [ + { fileName: 'test.py', fileUri: UriEx.file('test.py'), pos: 13, end: 17, marker: undefined }, + ]); +}); + +test('Marker', () => { + const code = ` +////class A: +//// /*marker1*/pass + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files[0].content, content); + + const marker = { fileName: 'test.py', fileUri: UriEx.file('test.py'), position: 13 }; + assert.deepEqual(stripMarkers(data.markers), [marker]); + assert.deepEqual(stripMarker(data.markerPositions.get('marker1')!), marker); +}); + +test('MarkerWithData', () => { + // embedded json data + const code = ` +////class A: +//// {| "data1":"1", "data2":"2" |}pass + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files[0].content, content); + + assert.deepEqual(stripMarkers(data.markers), [ + { fileName: 'test.py', fileUri: UriEx.file('test.py'), position: 13, data: { data1: '1', data2: '2' } }, + ]); + assert.equal(data.markerPositions.size, 0); +}); + +test('MarkerWithDataAndName', () => { + // embedded json data with "name" + const code = ` +////class A: +//// {| "name": "marker1", "data1":"1", "data2":"2" |}pass + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files[0].content, content); + + const marker = { + fileName: 'test.py', + fileUri: UriEx.file('test.py'), + position: 13, + data: { name: 'marker1', data1: '1', data2: '2' }, + }; + assert.deepEqual(stripMarkers(data.markers), [marker]); + assert.deepEqual(stripMarker(data.markerPositions.get(marker.data.name)!), marker); +}); + +test('RangeWithMarker', () => { + // range can have 1 marker in it + const code = ` +////class A: +//// [|/*marker1*/pass|] + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files[0].content, content); + + const marker = { fileName: 'test.py', fileUri: UriEx.file('test.py'), position: 13 }; + assert.deepEqual(stripMarkers(data.markers), [marker]); + assert.deepEqual(stripMarker(data.markerPositions.get('marker1')!), marker); + + assert.deepEqual(stripRanges(data.ranges), [ + { fileName: 'test.py', fileUri: UriEx.file('test.py'), pos: 13, end: 17, marker }, + ]); +}); + +test('RangeWithMarkerAndJsonData', () => { + // range can have 1 marker in it + const code = ` +////class A: +//// [|{| "name": "marker1", "data1":"1", "data2":"2" |}pass|] + `; + + const content = `class A: + pass`; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files[0].content, content); + + const marker = { + fileName: 'test.py', + fileUri: UriEx.file('test.py'), + position: 13, + data: { name: 'marker1', data1: '1', data2: '2' }, + }; + assert.deepEqual(stripMarkers(data.markers), [marker]); + assert.deepEqual(stripMarker(data.markerPositions.get(marker.data.name)!), marker); + + assert.deepEqual(stripRanges(data.ranges), [ + { fileName: 'test.py', fileUri: UriEx.file('test.py'), pos: 13, end: 17, marker }, + ]); +}); + +test('Multiple Files', () => { + // range can have 1 marker in it + const code = ` +// @filename: src/A.py +// @library: false +////class A: +//// pass + +// @filename: src/B.py +// @library: true +////class B: +//// pass + +// @filename: src/C.py +////class C: +//// pass + `; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files.length, 3); + + assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/A.py'))[0].content, getContent('A')); + assert.equal( + data.files.filter((f) => f.fileName === factory.libFolder.resolvePaths('src/B.py').getFilePath())[0].content, + getContent('B') + ); + assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C')); +}); + +test('Multiple Files with default name', () => { + // only very first one can omit filename + const code = ` +////class A: +//// pass + +// @filename: src/B.py +////class B: +//// pass + +// @filename: src/C.py +////class C: +//// pass + `; + + const data = parseTestData('.', code, './src/test.py'); + assert.equal(data.files.length, 3); + + assert.equal( + data.files.filter((f) => f.fileName === normalizeSlashes('./src/test.py'))[0].content, + getContent('A') + ); + assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/B.py'))[0].content, getContent('B')); + assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C')); +}); + +test('Multiple Files with markers', () => { + // range can have 1 marker in it + const code = ` +// @filename: src/A.py +////class A: +//// [|pass|] + +// @filename: src/B.py +////class B: +//// [|/*marker1*/pass|] + +// @filename: src/C.py +////class C: +//// [|{|"name":"marker2", "data":"2"|}pass|] + `; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files.length, 3); + + assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/A.py'))[0].content, getContent('A')); + assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/B.py'))[0].content, getContent('B')); + assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C')); + + assert.equal(data.ranges.length, 3); + + assert(data.markerPositions.get('marker1')); + assert(data.markerPositions.get('marker2')); + + assert.equal(data.ranges.filter((r) => r.marker).length, 2); +}); + +test('fourSlashWithFileSystem', () => { + const code = ` +// @filename: src/A.py +////class A: +//// pass + +// @filename: src/B.py +////class B: +//// pass + +// @filename: src/C.py +////class C: +//// pass + `; + + const data = parseTestData('.', code, 'unused'); + const documents = data.files.map( + (f) => new factory.TextDocument(f.fileName, f.content, new Map(Object.entries(f.fileOptions))) + ); + + const fs = factory.createFromFileSystem(host.HOST, /* ignoreCase */ false, { + documents, + cwd: normalizeSlashes('/'), + }); + + for (const file of data.files) { + assert.equal(fs.readFileSync(file.fileUri, 'utf8'), getContent(getBaseFileName(file.fileName, '.py', false))); + } +}); + +test('RawTokensLossless', () => { + const code = '// @filename: a.py\r\n////a/*m*/b\r\n'; + const data = parseTestData('.', code, 'test.py'); + + assert.equal(data.rawText, code); + assert(data.rawText); + assert(data.rawTokens); + + const reconstructed = data.rawTokens.map((t) => data.rawText!.slice(t.start, t.end)).join(''); + assert.equal(reconstructed, code); + + assert.equal(data.rawTokens[0].start, 0); + assert.equal(data.rawTokens[data.rawTokens.length - 1].end, code.length); + for (let i = 0; i < data.rawTokens.length - 1; i++) { + assert.equal(data.rawTokens[i].end, data.rawTokens[i + 1].start); + } + + const twoSlash = data.rawTokens.find((t) => t.kind === RawTokenKind.TwoSlashPrefix); + assert(twoSlash); + assert.equal(data.rawText.slice(twoSlash.start, twoSlash.end), '//'); + + const fourSlash = data.rawTokens.find((t) => t.kind === RawTokenKind.FourSlashPrefix); + assert(fourSlash); + assert.equal(data.rawText.slice(fourSlash.start, fourSlash.end), '////'); + + const cr = data.rawTokens.find((t) => t.kind === RawTokenKind.NewLineCR); + const lf = data.rawTokens.find((t) => t.kind === RawTokenKind.NewLineLF); + assert(cr); + assert(lf); + assert.equal(data.rawText.slice(cr.start, cr.end), '\r'); + assert.equal(data.rawText.slice(lf.start, lf.end), '\n'); +}); + +test('RawTokensTraceCoversAllKinds', () => { + const code = '// @pythonversion:\t3.12 \r\n' + '////\t[|ab/*m*/cd{| "x": 1 |}ef|]\r\n'; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.rawText, code); + assert(data.rawText); + assert(data.rawTokens); + + const trace = data.rawTokens.map((t) => [t.kind, data.rawText!.slice(t.start, t.end)] as const); + + const expectedTrace = [ + // Directive line. + [RawTokenKind.TwoSlashPrefix, '//'], + [RawTokenKind.Whitespace, ' '], + [RawTokenKind.DirectiveAt, '@'], + [RawTokenKind.DirectiveName, 'pythonversion'], + [RawTokenKind.DirectiveColon, ':'], + [RawTokenKind.Whitespace, '\t'], + [RawTokenKind.DirectiveValue, '3.12'], + [RawTokenKind.Whitespace, ' '], + [RawTokenKind.NewLineCR, '\r'], + [RawTokenKind.NewLineLF, '\n'], + + // Four-slash content line. + [RawTokenKind.FourSlashPrefix, '////'], + [RawTokenKind.Whitespace, '\t'], + [RawTokenKind.RangeStart, '[|'], + [RawTokenKind.Text, 'ab'], + [RawTokenKind.MarkerStart, '/*'], + [RawTokenKind.MarkerName, 'm'], + [RawTokenKind.MarkerEnd, '*/'], + [RawTokenKind.Text, 'cd'], + [RawTokenKind.ObjectMarkerStart, '{|'], + [RawTokenKind.ObjectMarkerText, ' "x": 1 '], + [RawTokenKind.ObjectMarkerEnd, '|}'], + [RawTokenKind.Text, 'ef'], + [RawTokenKind.RangeEnd, '|]'], + [RawTokenKind.NewLineCR, '\r'], + [RawTokenKind.NewLineLF, '\n'], + ] as const; + + assert.deepEqual(trace, expectedTrace); + + const allKinds: RawToken['kind'][] = [ + RawTokenKind.Whitespace, + RawTokenKind.NewLineCR, + RawTokenKind.NewLineLF, + RawTokenKind.Text, + RawTokenKind.TwoSlashPrefix, + RawTokenKind.FourSlashPrefix, + RawTokenKind.DirectiveAt, + RawTokenKind.DirectiveName, + RawTokenKind.DirectiveColon, + RawTokenKind.DirectiveValue, + RawTokenKind.RangeStart, + RawTokenKind.RangeEnd, + RawTokenKind.MarkerStart, + RawTokenKind.MarkerName, + RawTokenKind.MarkerEnd, + RawTokenKind.ObjectMarkerStart, + RawTokenKind.ObjectMarkerText, + RawTokenKind.ObjectMarkerEnd, + ]; + + const seen = new Set(data.rawTokens.map((t) => t.kind)); + const missing = allKinds.filter((k) => !seen.has(k)); + assert.deepEqual( + missing, + [], + `Missing raw token kinds: ${missing.join(', ')}\n\nTrace:\n${trace + .map(([k, s]) => `${k}: ${JSON.stringify(s)}`) + .join('\n')}` + ); +}); + +test('RawDataTokenRangesForMarkerRangeAndDirective', () => { + const code = '// @pythonversion: 3.12\n' + '////class A:\n' + '//// [|/*marker1*/pass|]\n'; + + const data = parseTestData('.', code, 'test.py'); + assert(data.rawText); + assert(data.rawTokens); + + const directive = data.globalOptionsRawData?.pythonversion; + assert(directive); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, directive.prefix), '//'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, directive.name), '@pythonversion'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, directive.colon!), ':'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, directive.value), '3.12'); + + assert.equal(data.ranges.length, 1); + const range = data.ranges[0]; + assert(range.rawData); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, range.rawData.open), '[|'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, range.rawData.close), '|]'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, range.rawData.full), '[|/*marker1*/pass|]'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, range.rawData.selected), '/*marker1*/pass'); + + assert.equal(data.markers.length, 1); + const marker = data.markers[0]; + assert(marker.rawData); + assert.equal(marker.rawData.kind, 'slashStar'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, marker.rawData.full), '/*marker1*/'); + assert.equal(sliceByTokenRange(data.rawText, data.rawTokens, marker.rawData.name!), 'marker1'); +}); + +test('StrictRawToContentMapping', () => { + const code = '////class A:\r\n//// /*marker1*/pass\r\n'; + const data = parseTestData('.', code, 'test.py'); + const file = data.files[0]; + + const rawOffsetPass = code.indexOf('pass'); + assert(rawOffsetPass >= 0); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetPass), file.content.indexOf('pass')); + + const rawOffsetMarker = code.indexOf('/*marker1*/'); + assert(rawOffsetMarker >= 0); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetMarker), undefined); + + const rawOffsetCr = code.indexOf('\r'); + assert(rawOffsetCr >= 0); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetCr), undefined); + + const passContentOffset = file.content.indexOf('pass'); + assert.equal(tryConvertContentOffsetToRawOffset(file, passContentOffset), rawOffsetPass); + + const rawOffsetAfterPass = rawOffsetPass + 'pass'.length; + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetAfterPass), file.content.length); + assert.equal(tryConvertContentOffsetToRawOffset(file, file.content.length), rawOffsetAfterPass); +}); + +test('StrictRawToContentMapping_UnmappedPrefixesAndRangeDelimiters', () => { + const code = '////[|a|]\n'; + const data = parseTestData('.', code, 'test.py'); + const file = data.files[0]; + + // Raw offsets inside the four-slash prefix are unmapped. + const rawOffsetInPrefix = code.indexOf('////') + 2; + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetInPrefix), undefined); + + // Range delimiter tokens are metacharacters and are unmapped. + const rawOffsetRangeStart = code.indexOf('[|'); + assert(rawOffsetRangeStart >= 0); + // The start of a stripped delimiter can coincide with a mapped segment boundary. + // Strictness means the bytes *inside* the delimiter are unmapped. + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetRangeStart + 1), undefined); + + const rawOffsetRangeEnd = code.indexOf('|]'); + assert(rawOffsetRangeEnd >= 0); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetRangeEnd), file.content.length); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetRangeEnd + 1), undefined); + + // Content inside the range should map. + const rawOffsetA = code.indexOf('a'); + assert(rawOffsetA >= 0); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetA), file.content.indexOf('a')); +}); + +test('StrictRawToContentMapping_LfMapsButCrDoesNot', () => { + const code = '////a\r\n////b\r\n'; + const data = parseTestData('.', code, 'test.py'); + const file = data.files[0]; + + assert.equal(file.content, 'a\nb'); + + const rawOffsetCr = code.indexOf('\r'); + const rawOffsetLf = code.indexOf('\n'); + assert(rawOffsetCr >= 0); + assert(rawOffsetLf >= 0); + + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetCr), undefined); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetLf), file.content.indexOf('\n')); + assert.equal(tryConvertContentOffsetToRawOffset(file, file.content.indexOf('\n')), rawOffsetLf); +}); + +test('InvalidSlashStarMarkerIsTreatedAsComment', () => { + const code = '////a/*bad-marker*/b\n'; + const data = parseTestData('.', code, 'test.py'); + const file = data.files[0]; + + assert.equal(data.markers.length, 0); + assert.equal(file.content, 'a/*bad-marker*/b'); + + const rawOffsetCommentStart = code.indexOf('/*'); + assert(rawOffsetCommentStart >= 0); + assert.equal(tryConvertRawOffsetToContentOffset(file, rawOffsetCommentStart), file.content.indexOf('/*')); +}); + +test('UnterminatedRangeThrows', () => { + const code = '////[|a\n'; + assert.throws( + () => parseTestData('.', code, 'test.py'), + (e: unknown) => e instanceof Error && e.message.includes('Unterminated range.') + ); +}); + +test('GetOwningFileAtRawOffset', () => { + const code = '// @filename: A.py\n' + '////class A: pass\n' + '\n' + '// @filename: B.py\n' + '////class B: pass\n'; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files.length, 2); + + const bRawOffset = code.indexOf('class B'); + assert(bRawOffset >= 0); + const owningFile = getFileAtRawOffset(data, bRawOffset); + assert(owningFile); + assert.equal(getBaseFileName(owningFile.fileName, '.py', false), 'B'); +}); + +test('GetOwningFileAtRawOffset_Boundaries', () => { + const code = '// @filename: A.py\n' + '////class A: pass\n' + '// @filename: B.py\n' + '////class B: pass\n'; + + const data = parseTestData('.', code, 'test.py'); + assert.equal(data.files.length, 2); + + // The newline token at the end of A's four-slash line is still owned by file A. + const aLineEndLf = code.indexOf('////class A: pass') + '////class A: pass'.length; + assert.equal(code[aLineEndLf], '\n'); + const fileAtALineLf = getFileAtRawOffset(data, aLineEndLf); + assert(fileAtALineLf); + assert.equal(getBaseFileName(fileAtALineLf.fileName, '.py', false), 'A'); + + // The @filename directive line is not part of any file's tokenRanges. + const bDirectiveOffset = code.indexOf('// @filename: B.py'); + assert(bDirectiveOffset >= 0); + assert.equal(getFileAtRawOffset(data, bDirectiveOffset), undefined); + + // The first four-slash line after the directive is owned by file B. + const bContentOffset = code.indexOf('////class B'); + assert(bContentOffset >= 0); + const fileAtBLine = getFileAtRawOffset(data, bContentOffset); + assert(fileAtBLine); + assert.equal(getBaseFileName(fileAtBLine.fileName, '.py', false), 'B'); +}); + +function getContent(className: string) { + return `class ${className}: + pass`; +} + +function assertOptions(actual: CompilerSettings, expected: [string, string][], message?: string | Error): void { + assert.deepEqual( + Object.entries(actual).sort((x, y) => compareStringsCaseSensitive(x[0], y[0])), + expected, + message + ); +} + +type LegacyMarker = { + fileName: string; + fileUri: unknown; + position: number; + data?: {}; +}; + +type LegacyRange = { + fileName: string; + fileUri: unknown; + pos: number; + end: number; + marker: LegacyMarker | undefined; +}; + +function stripMarker(marker: Marker): LegacyMarker { + const base: Omit = { + fileName: marker.fileName, + fileUri: marker.fileUri, + position: marker.position, + }; + + // Preserve legacy shape: omit `data` when undefined. + return marker.data !== undefined ? { ...base, data: marker.data } : base; +} + +function stripMarkers(markers: Marker[]): LegacyMarker[] { + return markers.map((m) => stripMarker(m)); +} + +function stripRanges(ranges: Range[]): LegacyRange[] { + return ranges.map((r) => ({ + fileName: r.fileName, + fileUri: r.fileUri, + pos: r.pos, + end: r.end, + marker: r.marker ? stripMarker(r.marker) : undefined, + })); +} + +function sliceByTokenRange(rawText: string, rawTokens: RawToken[], tokenRange: RawTokenRange) { + if (tokenRange.startToken === tokenRange.endToken) { + return ''; + } + + const start = rawTokens[tokenRange.startToken].start; + const end = rawTokens[tokenRange.endToken - 1].end; + return rawText.slice(start, end); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourSlashRunner.test.ts b/python-parser/packages/pyright-internal/src/tests/fourSlashRunner.test.ts new file mode 100644 index 00000000..7b174f8e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourSlashRunner.test.ts @@ -0,0 +1,36 @@ +/* + * fourSlashRunner.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Entry point that will read all *.fourslash.ts files and + * register jest tests for them and run + */ + +import * as path from 'path'; + +import { normalizeSlashes } from '../common/pathUtils'; +import { runFourSlashTest } from './harness/fourslash/runner'; +import * as host from './harness/testHost'; +import { MODULE_PATH } from './harness/vfs/filesystem'; + +describe('fourslash tests', () => { + const testFiles: string[] = []; + + const basePath = path.resolve(path.dirname(module.filename), 'fourslash/'); + for (const file of host.HOST.listFiles(basePath, /.*\.fourslash\.ts$/i, { recursive: true })) { + testFiles.push(file); + } + + testFiles.forEach((file) => { + describe(file, () => { + const fn = normalizeSlashes(file); + const justName = fn.replace(/^.*[\\/]/, ''); + + // TODO: make these to use promise/async rather than callback token + it('fourslash test ' + justName + ' run', (cb) => { + runFourSlashTest(MODULE_PATH, fn, cb); + }); + }); + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.Found.Type.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.Found.Type.fourslash.ts new file mode 100644 index 00000000..157f6238 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.Found.Type.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: test1.py +//// [|/*import*/|][|Test/*marker*/|] + +// @filename: test2.py +//// import testLib + +// @filename: testLib/__init__.pyi +// @library: true +//// class Test: +//// pass + +{ + const importRange = helper.getPositionRange('import'); + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'Test', + kind: Consts.CompletionItemKind.Class, + documentation: '```\nfrom testLib import Test\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'Test' }, + additionalTextEdits: [{ range: importRange, newText: 'from testLib import Test\n\n\n' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.Found.duplication.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.Found.duplication.fourslash.ts new file mode 100644 index 00000000..c202bea1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.Found.duplication.fourslash.ts @@ -0,0 +1,47 @@ +/// + +// @filename: test1.py +//// [|/*import*/|][|test/*marker*/|] + +// @filename: test2.py +//// import testLib +//// import testLib.test1 +//// import testLib.test2 +//// a = testLib.test1.Test1() +//// b = testLib.test2.Test2() + +// @filename: testLib/__init__.pyi +// @library: true +//// class Test: +//// pass + +// @filename: testLib/test1.pyi +// @library: true +//// class Test1: +//// pass + +// @filename: testLib/test2.pyi +// @library: true +//// class Test2: +//// pass + +{ + const importRange = helper.getPositionRange('import'); + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'test1', + kind: Consts.CompletionItemKind.Module, + documentation: '```\nfrom testLib import test1\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'test1' }, + additionalTextEdits: [{ range: importRange, newText: 'from testLib import test1\n\n\n' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.NotFound.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.NotFound.fourslash.ts new file mode 100644 index 00000000..317c3f62 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.Lib.NotFound.fourslash.ts @@ -0,0 +1,14 @@ +/// + +// @filename: test1.py +//// Test[|/*marker*/|] + +// @filename: testLib/__init__.pyi +// @library: true +//// class Test: +//// pass + +// @ts-ignore +await helper.verifyCompletion('excluded', 'markdown', { + marker: { completions: [{ label: 'Test', kind: undefined }] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.disabled.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.disabled.fourslash.ts new file mode 100644 index 00000000..8d8a775b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.disabled.fourslash.ts @@ -0,0 +1,25 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "autoImportCompletions": false +//// } + +// @filename: test1.py +//// Test[|/*marker*/|] + +// @filename: test2.py +//// class Test: +//// pass + +// @ts-ignore +await helper.verifyCompletion('excluded', 'markdown', { + marker: { + completions: [ + { + label: 'Test', + kind: Consts.CompletionItemKind.Class, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.duplicates.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.duplicates.fourslash.ts new file mode 100644 index 00000000..e1b326df --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.duplicates.fourslash.ts @@ -0,0 +1,36 @@ +/// +// @indexer: true + +// @filename: test1.py +//// import math +//// import testLib +//// [|ata/*marker*/|] + +// @filename: testLib/__init__.pyi +// @library: true +//// def atan(x: float) -> float: ... +{ + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'atan', + kind: Consts.CompletionItemKind.Function, + documentation: '```\nfrom math import atan\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'math.atan' }, + }, + { + label: 'atan', + kind: Consts.CompletionItemKind.Function, + documentation: '```\nfrom testLib import atan\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'testLib.atan' }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.fourslash.ts new file mode 100644 index 00000000..c7a73efd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: test1.py +//// [|/*import*/|][|Test/*marker*/|] + +// @filename: test2.py +//// class Test: +//// pass + +{ + const importRange = helper.getPositionRange('import'); + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'Test', + kind: Consts.CompletionItemKind.Class, + documentation: '```\nfrom test2 import Test\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'Test' }, + additionalTextEdits: [{ range: importRange, newText: 'from test2 import Test\n\n\n' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.fromImport.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.fromImport.fourslash.ts new file mode 100644 index 00000000..ca7d44d0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.fromImport.fourslash.ts @@ -0,0 +1,130 @@ +/// + +// @filename: test.py +//// from lib import ( +//// [|/*import1*/|]MY_CONST_VAR[|/*import2*/|], +//// ClsOther[|/*import3*/|], +//// my_afunc[|/*import4*/|], +//// ) +//// +//// [|A_CONST/*marker1*/|] +//// [|MY_CONST_VAR2/*marker2*/|] +//// [|Cls/*marker3*/|] +//// [|ClsList/*marker4*/|] +//// [|my_func/*marker5*/|] +//// [|MYA_CONST_VAR/*marker6*/|] +//// [|MY_CONSTA_VAR/*marker7*/|] + +// @filename: lib.py +//// A_CONST = 1 +//// MY_CONST_VAR = 2 +//// MY_CONST_VAR2 = 3 +//// MY_CONSTA_VAR = 4 +//// MYA_CONST_VAR = 4 +//// class Cls: ... +//// class ClsOther: ... +//// ClsOtherList = list[ClsOther] +//// def my_afunc(): ... +//// def my_func(): ... + +{ + const import1Range = helper.getPositionRange('import1'); + const import2Range = helper.getPositionRange('import2'); + const import3Range = helper.getPositionRange('import3'); + const import4Range = helper.getPositionRange('import4'); + const marker1Range = helper.getPositionRange('marker1'); + const marker2Range = helper.getPositionRange('marker2'); + const marker3Range = helper.getPositionRange('marker3'); + const marker4Range = helper.getPositionRange('marker4'); + const marker5Range = helper.getPositionRange('marker5'); + const marker6Range = helper.getPositionRange('marker6'); + const marker7Range = helper.getPositionRange('marker7'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'A_CONST', + kind: Consts.CompletionItemKind.Constant, + documentation: '```\nfrom lib import A_CONST\n```', + detail: 'Auto-import', + textEdit: { range: marker1Range, newText: 'A_CONST' }, + additionalTextEdits: [{ range: import1Range, newText: 'A_CONST,\n ' }], + }, + ], + }, + marker2: { + completions: [ + { + label: 'MY_CONST_VAR2', + kind: Consts.CompletionItemKind.Constant, + documentation: '```\nfrom lib import MY_CONST_VAR2\n```', + detail: 'Auto-import', + textEdit: { range: marker2Range, newText: 'MY_CONST_VAR2' }, + additionalTextEdits: [{ range: import2Range, newText: ',\n MY_CONST_VAR2' }], + }, + ], + }, + marker3: { + completions: [ + { + label: 'Cls', + kind: Consts.CompletionItemKind.Class, + documentation: '```\nfrom lib import Cls\n```', + detail: 'Auto-import', + textEdit: { range: marker3Range, newText: 'Cls' }, + additionalTextEdits: [{ range: import2Range, newText: ',\n Cls' }], + }, + ], + }, + marker4: { + completions: [ + { + label: 'ClsOtherList', + kind: Consts.CompletionItemKind.Variable, + documentation: '```\nfrom lib import ClsOtherList\n```', + detail: 'Auto-import', + textEdit: { range: marker4Range, newText: 'ClsOtherList' }, + additionalTextEdits: [{ range: import3Range, newText: ',\n ClsOtherList' }], + }, + ], + }, + marker5: { + completions: [ + { + label: 'my_func', + kind: Consts.CompletionItemKind.Function, + documentation: '```\nfrom lib import my_func\n```', + detail: 'Auto-import', + textEdit: { range: marker5Range, newText: 'my_func' }, + additionalTextEdits: [{ range: import4Range, newText: ',\n my_func' }], + }, + ], + }, + marker6: { + completions: [ + { + label: 'MYA_CONST_VAR', + kind: Consts.CompletionItemKind.Constant, + documentation: '```\nfrom lib import MYA_CONST_VAR\n```', + detail: 'Auto-import', + textEdit: { range: marker6Range, newText: 'MYA_CONST_VAR' }, + additionalTextEdits: [{ range: import2Range, newText: ',\n MYA_CONST_VAR' }], + }, + ], + }, + marker7: { + completions: [ + { + label: 'MY_CONSTA_VAR', + kind: Consts.CompletionItemKind.Constant, + documentation: '```\nfrom lib import MY_CONSTA_VAR\n```', + detail: 'Auto-import', + textEdit: { range: marker7Range, newText: 'MY_CONSTA_VAR' }, + additionalTextEdits: [{ range: import2Range, newText: ',\n MY_CONSTA_VAR' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.lib.alias.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.lib.alias.fourslash.ts new file mode 100644 index 00000000..45b69272 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.lib.alias.fourslash.ts @@ -0,0 +1,61 @@ +/// + +// @filename: test1.py +//// [|/*import*/|][|job/*marker*/|] + +// @filename: test2.py +//// import dagster + +// @filename: dagster/py.typed +// @library: true +//// partial + +// @filename: dagster/__init__.py +// @library: true +//// from dagster.core.definitions import ( +//// job +//// ) +//// +//// __all__ = [ +//// "job" +//// ] + +// @filename: dagster/core/__init__.py +// @library: true +//// from builtins import * + +// @filename: dagster/core/definitions/__init__.py +// @library: true +//// from .decorators import ( +//// job +//// ) + +// @filename: dagster/core/definitions/decorators/__init__.py +// @library: true +//// from .job_decorator import job + +// @filename: dagster/core/definitions/decorators/job_decorator.py +// @library: true +//// def job(): +//// ... + +{ + const importRange = helper.getPositionRange('import'); + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'job', + kind: Consts.CompletionItemKind.Function, + documentation: '```\nfrom dagster import job\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'job' }, + additionalTextEdits: [{ range: importRange, newText: 'from dagster import job\n\n\n' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.plainText.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.plainText.fourslash.ts new file mode 100644 index 00000000..726d4998 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.plainText.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: test1.py +//// [|/*import*/|][|Test/*marker*/|] + +// @filename: test2.py +//// class Test: +//// pass + +{ + const importRange = helper.getPositionRange('import'); + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('included', 'plaintext', { + marker: { + completions: [ + { + label: 'Test', + kind: Consts.CompletionItemKind.Class, + documentation: 'from test2 import Test', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'Test' }, + additionalTextEdits: [{ range: importRange, newText: 'from test2 import Test\n\n\n' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.shadow.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.shadow.fourslash.ts new file mode 100644 index 00000000..ef9c6924 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.shadow.fourslash.ts @@ -0,0 +1,48 @@ +/// + +// @filename: test1.py +//// [|/*import*/|][|MyShadow/*marker*/|] + +// @filename: test2.py +//// import testLib +//// a = testLib.MyShadow() +//// a.[|/*hover*/method|]() + +// @filename: testLib/__init__.pyi +// @library: true +//// class MyShadow: +//// def method(self): ... + +// @filename: testLib/__init__.py +// @library: true +//// class MyShadow: +//// def method(self): +//// 'doc string' +//// pass + +{ + // This will cause shadow file to be injected. + helper.openFile(helper.getMarkerByName('hover').fileName); + helper.verifyHover('markdown', { + hover: '```python\n(method) def method() -> Unknown\n```\n---\ndoc string', + }); + + const importRange = helper.getPositionRange('import'); + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker: { + completions: [ + { + label: 'MyShadow', + kind: Consts.CompletionItemKind.Class, + documentation: '```\nfrom testLib import MyShadow\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'MyShadow' }, + additionalTextEdits: [{ range: importRange, newText: 'from testLib import MyShadow\n\n\n' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.submodule.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.submodule.fourslash.ts new file mode 100644 index 00000000..a8a65d93 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.submodule.fourslash.ts @@ -0,0 +1,31 @@ +/// + +// @filename: test1.py +//// [|/*import1*/|]from os.path import dirname +//// [|path/*marker1*/|] + +// @filename: test2.py +//// import os +//// a = os.path + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'path', + kind: Consts.CompletionItemKind.Variable, + documentation: '```\nfrom os import path\n```', + detail: 'Auto-import', + textEdit: { range: helper.getPositionRange('marker1'), newText: 'path' }, + additionalTextEdits: [ + { range: helper.getPositionRange('import1'), newText: 'from os import path\n' }, + ], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.topLevel.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.topLevel.fourslash.ts new file mode 100644 index 00000000..e6bdf22b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.topLevel.fourslash.ts @@ -0,0 +1,57 @@ +/// + +// @filename: test1.py +//// [|/*import1*/|][|os/*marker1*/|] + +// @filename: test2.py +//// [|/*import2*/|][|sys/*marker2*/|] + +// @filename: test3.py +//// import os +//// import sys +//// a = os.path +//// b = sys.path + +{ + helper.openFile('/test1.py'); + + const import1Range = helper.getPositionRange('import1'); + const marker1Range = helper.getPositionRange('marker1'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'os', + kind: Consts.CompletionItemKind.Module, + documentation: '```\nimport os\n```', + detail: 'Auto-import', + textEdit: { range: marker1Range, newText: 'os' }, + additionalTextEdits: [{ range: import1Range, newText: 'import os\n\n\n' }], + }, + ], + }, + }); + + helper.openFile('/test2.py'); + + const import2Range = helper.getPositionRange('import2'); + const marker2Range = helper.getPositionRange('marker2'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker2: { + completions: [ + { + label: 'sys', + kind: Consts.CompletionItemKind.Module, + documentation: '```\nimport sys\n```', + detail: 'Auto-import', + textEdit: { range: marker2Range, newText: 'sys' }, + additionalTextEdits: [{ range: import2Range, newText: 'import sys\n\n\n' }], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.unicode.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.unicode.fourslash.ts new file mode 100644 index 00000000..618acc29 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.autoimport.unicode.fourslash.ts @@ -0,0 +1,30 @@ +/// + +// @filename: consume.py +//// [|/*import1*/|][|Ät/*marker1*/|] + +// @filename: declare.py +//// class Äther: ... +//// class Ether: ... + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'Äther', + kind: Consts.CompletionItemKind.Class, + documentation: '```\nfrom declare import Äther\n```', + detail: 'Auto-import', + textEdit: { range: helper.getPositionRange('marker1'), newText: 'Äther' }, + additionalTextEdits: [ + { range: helper.getPositionRange('import1'), newText: 'from declare import Äther\n\n\n' }, + ], + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.builtinDocstrings.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.builtinDocstrings.fourslash.ts new file mode 100644 index 00000000..e2fbf35b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.builtinDocstrings.fourslash.ts @@ -0,0 +1,117 @@ +/// + +// @filename: docstrings.py +//// class A: ... +//// +//// class B: +//// """This is the class doc for B.""" +//// def __init__(self): +//// """This is the __init__ doc for B.""" +//// +//// class C: +//// """This is the class doc for C.""" +//// def __init__(self): +//// pass +//// +//// class D: +//// def __init__(self): +//// """This is the __init__ doc for D.""" +//// pass +//// +//// [|/*global*/|] +//// object().[|/*object*/|] +//// A().[|/*a*/|] +//// B().[|/*b*/|] +//// C().[|/*c*/|] +//// D().[|/*d*/|] + +// @filename: typeshed-fallback/stdlib/builtins.py +//// class object(): +//// """This is the class doc for object.""" +//// def __init__(self): +//// """This is the __init__ doc for object.""" +//// pass +//// +//// def __dir__(self): +//// """This is the __dir__ doc for object.""" +//// pass + +{ + // @ts-ignore + await helper.verifyCompletion('included', 'plaintext', { + global: { + completions: [ + { + label: 'object', + kind: Consts.CompletionItemKind.Class, + documentation: 'class object()\n\nThis is the class doc for object.', + }, + { + label: 'A', + kind: Consts.CompletionItemKind.Class, + documentation: 'class A()', + }, + { + label: 'B', + kind: Consts.CompletionItemKind.Class, + documentation: 'class B()\n\nThis is the class doc for B.', + }, + { + label: 'C', + kind: Consts.CompletionItemKind.Class, + documentation: 'class C()\n\nThis is the class doc for C.', + }, + { + label: 'D', + kind: Consts.CompletionItemKind.Class, + documentation: 'class D()', + }, + ], + }, + object: { + completions: [ + { + label: '__init__', + kind: Consts.CompletionItemKind.Method, + documentation: 'def __init__() -> None\n\nThis is the __init__ doc for object.', + }, + ], + }, + a: { + completions: [ + { + label: '__init__', + kind: Consts.CompletionItemKind.Method, + documentation: 'def __init__() -> None', + }, + ], + }, + b: { + completions: [ + { + label: '__init__', + kind: Consts.CompletionItemKind.Method, + documentation: 'def __init__() -> None\n\nThis is the __init__ doc for B.', + }, + ], + }, + c: { + completions: [ + { + label: '__init__', + kind: Consts.CompletionItemKind.Method, + documentation: 'def __init__() -> None', + }, + ], + }, + d: { + completions: [ + { + label: '__init__', + kind: Consts.CompletionItemKind.Method, + documentation: 'def __init__() -> None\n\nThis is the __init__ doc for D.', + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.builtinOverride.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.builtinOverride.fourslash.ts new file mode 100644 index 00000000..32cf9e81 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.builtinOverride.fourslash.ts @@ -0,0 +1,22 @@ +/// + +// @filename: test.py +//// Cust[|/*marker1*/|] +//// my_v[|/*marker2*/|] + +// @filename: __builtins__.pyi +//// class CustomClass: ... +//// my_var: int = ... + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: 'CustomClass', + kind: Consts.CompletionItemKind.Class, + }, + ], + }, + marker2: { completions: [{ label: 'my_var', kind: Consts.CompletionItemKind.Variable }] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.stringLiteral.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.stringLiteral.fourslash.ts new file mode 100644 index 00000000..e7a7efc5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.stringLiteral.fourslash.ts @@ -0,0 +1,55 @@ +/// + +// @filename: test.py +//// from typing import Literal +//// +//// def thing(foo: Literal["hello", "world"]): +//// pass +//// +//// thing([|/*marker1*/|]) +//// thing(hel[|/*marker2*/|]) +//// thing([|"/*marker3*/"|]) + +{ + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: '"hello"', + kind: Consts.CompletionItemKind.Constant, + }, + { + label: '"world"', + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker2: { + completions: [ + { + label: '"hello"', + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker3: { + completions: [ + { + label: '"hello"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"hello"' }, + }, + { + label: '"world"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"world"' }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.fourslash.ts new file mode 100644 index 00000000..c1ddc679 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.fourslash.ts @@ -0,0 +1,247 @@ +/// + +// @filename: test.py +//// from typing import TypedDict, Optional, Union, List, Dict, Any +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// def thing(movie: Movie): +//// pass +//// +//// thing({'[|/*marker1*/|]'}) +//// thing({'name': '[|/*marker2*/|]'}) +//// thing({'name': 'Robert','[|/*marker3*/|]'}) +//// thing({'name': 'Robert', '[|/*marker4*/|]'}) +//// thing('[|/*marker5*/|]') +//// thing({'na[|/*marker6*/|]'}) +//// thing({[|/*marker7*/|]}) +//// thing({'a', '[|/*marker8*/|]'}) +//// +//// class Episode(TypedDict): +//// title: str +//// score: int +//// +//// def thing2(item: Union[Episode, Movie]): +//// pass +//// +//// thing2({'[|/*marker9*/|]'}) +//// thing2({'unknown': 'a', '[|/*marker10*/|]': ''}) +//// thing2({'title': 'Episode 01', '[|/*marker11*/|]': ''}) +//// +//// class Wrapper(TypedDict): +//// age: int +//// wrapped: Union[bool, Movie] +//// data: Dict[str, Any] +//// +//// def thing3(wrapper: Optional[Wrapper]): +//// pass +//// +//// thing3({'data': {'[|/*marker12*/|]'}}) +//// thing3({'wrapped': {'[|/*marker13*/|]'}}) +//// thing3({'age': 1, 'wrapped': {'[|/*marker14*/|]'}}) +//// thing3({'unknown': {'[|/*marker15*/|]'}}) +//// thing3({'age': {'[|/*marker16*/|]'}}) +//// thing3({'wrapped': {'name': 'ET', '[|/*marker17*/|]'}}) + +{ + const marker1Range = helper.expandPositionRange(helper.getPositionRange('marker1'), 1, 1); + const marker3Range = helper.expandPositionRange(helper.getPositionRange('marker3'), 1, 1); + const marker4Range = helper.expandPositionRange(helper.getPositionRange('marker4'), 1, 1); + const marker6Range = helper.expandPositionRange(helper.getPositionRange('marker6'), 3, 1); + const marker7Range = helper.getPositionRange('marker7'); + const marker8Range = helper.expandPositionRange(helper.getPositionRange('marker8'), 1, 1); + const marker9Range = helper.expandPositionRange(helper.getPositionRange('marker9'), 1, 1); + const marker10Range = helper.expandPositionRange(helper.getPositionRange('marker10'), 1, 1); + const marker11Range = helper.expandPositionRange(helper.getPositionRange('marker11'), 1, 1); + const marker13Range = helper.expandPositionRange(helper.getPositionRange('marker13'), 1, 1); + const marker14Range = helper.expandPositionRange(helper.getPositionRange('marker14'), 1, 1); + const marker17Range = helper.expandPositionRange(helper.getPositionRange('marker17'), 1, 1); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'age'" }, + }, + ], + }, + marker2: { + completions: [], + }, + marker3: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'age'" }, + }, + ], + }, + marker4: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'age'" }, + }, + ], + }, + marker5: { + completions: [], + }, + marker6: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker6Range, newText: "'name'" }, + }, + ], + }, + marker8: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker8Range, newText: "'age'" }, + }, + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker8Range, newText: "'name'" }, + }, + ], + }, + marker9: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'age'" }, + }, + { + label: "'title'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'title'" }, + }, + { + label: "'score'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'score'" }, + }, + ], + }, + marker10: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'age'" }, + }, + { + label: "'title'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'title'" }, + }, + { + label: "'score'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'score'" }, + }, + ], + }, + marker11: { + completions: [ + { + label: "'score'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker11Range, newText: "'score'" }, + }, + ], + }, + marker12: { + completions: [], + }, + marker13: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker13Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker13Range, newText: "'age'" }, + }, + ], + }, + marker14: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker14Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker14Range, newText: "'age'" }, + }, + ], + }, + marker15: { + completions: [], + }, + marker16: { + completions: [], + }, + marker17: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker17Range, newText: "'age'" }, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker7: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'age'" }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.list.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.list.fourslash.ts new file mode 100644 index 00000000..b6e7131e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.list.fourslash.ts @@ -0,0 +1,163 @@ +/// + +// @filename: test.py +//// from typing import TypedDict, Union, List +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// class MultipleInputs(TypedDict): +//// items: List[Movie] +//// union: Union[bool, List[Movie]] +//// unions: Union[Movie, Union[bool, List[Movie]]] +//// +//// def thing(inputs: MultipleInputs): +//// pass +//// +//// thing({'items': ['[|/*marker1*/|]']}) +//// thing({'items': {'[|/*marker2*/|]'}}) +//// thing({'items': [{'[|/*marker3*/|]'}]}) +//// thing({'union': [{'[|/*marker4*/|]'}]}) +//// thing({'unions': {'[|/*marker5*/|]'}}) +//// thing({'unions': [{'[|/*marker6*/|]'}]}) +//// +//// def thing2(movies: List[Movie]): +//// pass +//// +//// thing2([{'[|/*marker7*/|]'}]) +//// thing2({'[|/*marker8*/|]'}) +//// +//// class Wrapper(TypedDict): +//// wrapped: MultipleInputs +//// +//// def thing3(wrapper: Wrapper): +//// pass +//// +//// thing3({'wrapped': {'items': [{'[|/*marker9*/|]'}]}}) +//// thing3({'wrapped': {'items': {'[|/*marker10*/|]'}}}) +//// thing3({'wrapped': {'items': [{'a': 'b'}, {'[|/*marker11*/|]'}]}}) + +{ + const marker3Range = helper.expandPositionRange(helper.getPositionRange('marker3'), 1, 1); + const marker4Range = helper.expandPositionRange(helper.getPositionRange('marker4'), 1, 1); + const marker5Range = helper.expandPositionRange(helper.getPositionRange('marker5'), 1, 1); + const marker6Range = helper.expandPositionRange(helper.getPositionRange('marker6'), 1, 1); + const marker7Range = helper.expandPositionRange(helper.getPositionRange('marker7'), 1, 1); + const marker9Range = helper.expandPositionRange(helper.getPositionRange('marker9'), 1, 1); + const marker11Range = helper.expandPositionRange(helper.getPositionRange('marker11'), 1, 1); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [], + }, + marker2: { + completions: [], + }, + marker3: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'age'" }, + }, + ], + }, + marker4: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'age'" }, + }, + ], + }, + marker5: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker5Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker5Range, newText: "'age'" }, + }, + ], + }, + marker6: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker6Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker6Range, newText: "'age'" }, + }, + ], + }, + marker7: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'age'" }, + }, + ], + }, + marker8: { + completions: [], + }, + marker9: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'age'" }, + }, + ], + }, + marker10: { + completions: [], + }, + marker11: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker11Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker11Range, newText: "'age'" }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.states.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.states.fourslash.ts new file mode 100644 index 00000000..e16d2187 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.states.fourslash.ts @@ -0,0 +1,135 @@ +/// + +// @filename: test.py +//// from typing import TypedDict +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// def thing(movie: Movie): +//// pass +//// +//// thing(movie={'foo': 'a', '[|/*marker1*/|]'}) +//// thing(movie={'foo': 'a', 'a[|/*marker2*/|]'}) +//// thing( +//// movie={ +//// 'name': 'Parasite', +//// '[|/*marker3*/|] +//// } +//// ) +//// thing( +//// movie={ +//// 'name': 'Parasite', +//// '[|/*marker4*/|]' +//// } +//// ) +//// thing({ +//// 'name': 'Parasite', +//// # hello world +//// '[|/*marker5*/|]' +//// }) +//// thing({'foo': '[|/*marker6*/|]'}) + +{ + // completions that rely on token parsing instead of node parsing + const marker1Range = helper.expandPositionRange(helper.getPositionRange('marker1'), 1, 1); + const marker2Range = helper.expandPositionRange(helper.getPositionRange('marker2'), 2, 1); + const marker3Range = helper.expandPositionRange(helper.getPositionRange('marker3'), 1, 0); + const marker4Range = helper.expandPositionRange(helper.getPositionRange('marker4'), 1, 1); + const marker5Range = helper.expandPositionRange(helper.getPositionRange('marker5'), 1, 1); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'age'" }, + }, + ], + }, + marker2: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker2Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker2Range, newText: "'age'" }, + }, + ], + }, + marker6: { + completions: [], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker3: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'age'" }, + }, + ], + }, + marker4: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'age'" }, + }, + ], + }, + marker5: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker5Range, newText: "'age'" }, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + marker3: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker4: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker5: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.classVariable.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.classVariable.fourslash.ts new file mode 100644 index 00000000..695faa8d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.classVariable.fourslash.ts @@ -0,0 +1,135 @@ +/// +// @filename: pyrightconfig.json +//// { +//// "pythonVersion": "3.11" +//// } + +// @filename: test.py +//// class MyType: pass +//// +//// class B: +//// var1 = 1 +//// var2: MyType +//// var3: list[str] = ["hello"] +//// __var4 = 4 +//// +//// def __init__(self): +//// self.var6 = 1 +//// +//// class T(B): +//// var5: bool +//// [|va/*marker1*/|] +//// +//// class T1(B): +//// var2: [|/*marker2*/|] +//// +//// class T2(B): +//// var3: [|/*marker3*/|] + +// @filename: test2.py +//// from typing import Generic, Sequence, TypeVar +//// +//// +//// T = TypeVar("T") +//// +//// class A(Generic[T]): +//// var: Sequence[T] +//// +//// class B(A[int]): +//// var: [|/*marker4*/|] +//// +//// T2 = TypeVar("T2") +//// +//// class C(A[T2]): +//// var: [|/*marker5*/|] + +// @filename: test3.py +//// from typing import Generic, TypeVarTuple +//// +//// T = TypeVarTuple('T') +//// +//// class MyType(Generic[*T]): +//// pass +//// +//// class A(Generic[*T]): +//// var: MyType[*T] +//// +//// class B(A[int, str, float]): +//// var: [|/*marker6*/|] +//// +//// T2 = TypeVarTuple('T2') +//// +//// class C(A[int, *T2]): +//// var: [|/*marker7*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'var1', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'var2', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'var3', + kind: Consts.CompletionItemKind.Variable, + }, + ], + }, + marker2: { + completions: [ + { + label: 'MyType', + kind: Consts.CompletionItemKind.Reference, + }, + ], + }, + marker3: { + completions: [ + { + label: 'list[str]', + kind: Consts.CompletionItemKind.Reference, + }, + ], + }, + marker4: { + completions: [ + { + label: 'Sequence[int]', + kind: Consts.CompletionItemKind.Reference, + }, + ], + }, + marker5: { + completions: [ + { + label: 'Sequence[T2]', + kind: Consts.CompletionItemKind.Reference, + }, + ], + }, + marker6: { + completions: [ + { + label: 'MyType[int, str, float]', + kind: Consts.CompletionItemKind.Reference, + }, + ], + }, + marker7: { + completions: [ + { + label: 'MyType[int, *T2]', + kind: Consts.CompletionItemKind.Reference, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.comment.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.comment.fourslash.ts new file mode 100644 index 00000000..31809ff4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.comment.fourslash.ts @@ -0,0 +1,28 @@ +/// + +// @filename: test.py +//// msg = 'hello' +//// [|/*marker1*/|] +//// # msg.[|/*marker2*/|] +//// [|/*marker3*/|] +//// print('upper: ' + msg.up[|/*marker4*/|]per()) +//// print('#upper: ' + msg.up[|/*marker5*/|]per()) +//// +//// # msg.[|/*marker6*/|] +//// [|/*marker7*/|] +//// + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker2: { completions: [] }, + marker6: { completions: [] }, +}); + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { completions: [{ label: 'msg', kind: Consts.CompletionItemKind.Variable }] }, + marker3: { completions: [{ label: 'msg', kind: Consts.CompletionItemKind.Variable }] }, + marker4: { completions: [{ label: 'upper', kind: Consts.CompletionItemKind.Method }] }, + marker5: { completions: [{ label: 'upper', kind: Consts.CompletionItemKind.Method }] }, + marker7: { completions: [{ label: 'msg', kind: Consts.CompletionItemKind.Variable }] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.class.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.class.fourslash.ts new file mode 100644 index 00000000..d51d4d2f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.class.fourslash.ts @@ -0,0 +1,35 @@ +/// + +// @filename: test.py +//// class [|/*marker1*/|] +//// + +// @filename: test1.py +//// class c[|/*marker2*/|] +//// + +// @filename: test2.py +//// class c1[|/*marker3*/|](): +//// pass +//// + +// @filename: test3.py +//// class c1([|/*marker4*/|]): +//// pass +//// + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker4: { completions: [{ label: 'Exception', kind: Consts.CompletionItemKind.Class }] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.exception.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.exception.fourslash.ts new file mode 100644 index 00000000..af878d28 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.exception.fourslash.ts @@ -0,0 +1,18 @@ +/// + +// @filename: test.py +//// try: +//// pass +//// except ZeroDivisionError as d[|/*marker1*/|]: +//// pass +//// +//// try: +//// pass +//// except ZeroDivisionError as [|/*marker2*/|]: +//// pass + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.for.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.for.fourslash.ts new file mode 100644 index 00000000..790542b2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.for.fourslash.ts @@ -0,0 +1,30 @@ +/// + +// @filename: test1.py +//// for [|/*marker1*/|] +//// + +// @filename: test2.py +//// for c[|/*marker2*/|] +//// + +// @filename: test3.py +//// for c1[|/*marker3*/|] in [1, 2]: +//// pass +//// + +// @filename: test4.py +//// [c for c[|/*marker4*/|] in [1, 2]] +//// + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [{ label: 'in', kind: Consts.CompletionItemKind.Keyword }] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + marker4: { completions: [] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.importAlias.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.importAlias.fourslash.ts new file mode 100644 index 00000000..38d5b8e7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.importAlias.fourslash.ts @@ -0,0 +1,15 @@ +/// + +// @filename: test.py +//// import os as o[|/*marker1*/|] +//// import os as [|/*marker2*/|] +//// from os import path as p[|/*marker3*/|] +//// from os import path as [|/*marker4*/|] + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + marker4: { completions: [] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.lambda.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.lambda.fourslash.ts new file mode 100644 index 00000000..489c8146 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.lambda.fourslash.ts @@ -0,0 +1,45 @@ +/// + +// @filename: test.py +//// lambda [|/*marker1*/|] + +// @filename: test1.py +//// lambda x[|/*marker2*/|] + +// @filename: test2.py +//// lambda x[|/*marker3*/|]: + +// @filename: test3.py +//// lambda x, [|/*marker4*/|] + +// @filename: test4.py +//// lambda x, [|/*marker5*/|]: + +// @filename: test5.py +//// lambda x, y[|/*marker6*/|] + +// @filename: test6.py +//// lambda x, y[|/*marker7*/|]: + +// @filename: test7.py +//// lambda x: [|/*marker8*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + marker4: { completions: [] }, + marker5: { completions: [] }, + marker6: { completions: [] }, + marker7: { completions: [] }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker8: { completions: [{ label: 'str', kind: Consts.CompletionItemKind.Class }] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.method.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.method.fourslash.ts new file mode 100644 index 00000000..7cbacdeb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.method.fourslash.ts @@ -0,0 +1,71 @@ +/// + +// @filename: test.py +//// def [|/*marker1*/|] +//// +//// def d[|/*marker2*/|] +//// +//// def d1[|/*marker3*/|](): +//// pass +//// +//// async def [|/*marker4*/|] +//// +//// async def a[|/*marker5*/|] +//// +//// async def a1[|/*marker6*/|](): +//// pass +//// +//// def method(x[|/*marker7*/|]): +//// pass +//// def method(x:[|/*marker8*/|]): +//// pass +//// +//// def method(x, x2[|/*marker9*/|]): +//// pass +//// def method(x, x2:[|/*marker10*/|]): +//// pass + +// @filename: test1.py +//// class A: +//// def a1[|/*marker11*/|] +//// +//// def a2[|/*marker12*/|](): +//// pass +//// +//// def method(x[|/*marker13*/|]): +//// pass +//// def method(x:[|/*marker14*/|]): +//// pass +//// +//// def method(x, x2[|/*marker15*/|]): +//// pass +//// def method(x, x2:[|/*marker16*/|]): +//// pass + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + marker4: { completions: [] }, + marker5: { completions: [] }, + marker6: { completions: [] }, + marker7: { completions: [] }, + marker9: { completions: [] }, + marker11: { completions: [] }, + marker12: { completions: [] }, + marker13: { completions: [] }, + marker15: { completions: [] }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker8: { completions: [{ label: 'str', kind: Consts.CompletionItemKind.Class }] }, + marker10: { completions: [{ label: 'str', kind: Consts.CompletionItemKind.Class }] }, + marker14: { completions: [{ label: 'str', kind: Consts.CompletionItemKind.Class }] }, + marker16: { completions: [{ label: 'str', kind: Consts.CompletionItemKind.Class }] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.overload.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.overload.fourslash.ts new file mode 100644 index 00000000..8ae90e0c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.overload.fourslash.ts @@ -0,0 +1,156 @@ +/// + +// @filename: test1.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def [|met/*marker1*/|] + +// @filename: test2.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def [|met/*marker2*/|]() + +// @filename: test3.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def [|met/*marker3*/|](): +//// pass + +// @filename: test4.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def method(self): +//// pass +//// @overload +//// def [|met/*marker4*/|] + +// @filename: test5.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def method(self): +//// pass +//// @overload +//// def method2(self): +//// pass +//// @overload +//// def [|met/*marker5*/|] + +// @filename: test6.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def method(self): +//// pass +//// @overload +//// def method2(self): +//// pass +//// @overload +//// def [|diff/*marker6*/|] + +// @filename: test7.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def method(self): +//// pass +//// @overload +//// def method2(self): +//// pass +//// +//// class B(A): +//// @overload +//// def method3(self): +//// pass +//// @overload +//// def [|met/*marker7*/|] + +// @filename: test8.py +//// from typing import overload +//// +//// class A: +//// @overload +//// def method(self): +//// pass +//// @overload +//// def [|method/*marker8*/|](self, a): +//// pass + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + const marker4Range = helper.getPositionRange('marker4'); + const marker5Range = helper.getPositionRange('marker5'); + const marker7Range = helper.getPositionRange('marker7'); + const marker8Range = helper.getPositionRange('marker8'); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + marker4: { + completions: [ + { + label: 'method', + kind: Consts.CompletionItemKind.Method, + textEdit: { range: marker4Range, newText: 'method' }, + }, + ], + }, + marker5: { + completions: [ + { + label: 'method', + kind: Consts.CompletionItemKind.Method, + textEdit: { range: marker5Range, newText: 'method' }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { range: marker5Range, newText: 'method2' }, + }, + ], + }, + marker6: { completions: [] }, + marker7: { + completions: [ + { + label: 'method', + kind: Consts.CompletionItemKind.Method, + textEdit: { range: marker7Range, newText: 'method' }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { range: marker7Range, newText: 'method2' }, + }, + { + label: 'method3', + kind: Consts.CompletionItemKind.Method, + textEdit: { range: marker7Range, newText: 'method3' }, + }, + ], + }, + marker8: { + completions: [ + { + label: 'method', + kind: Consts.CompletionItemKind.Method, + textEdit: { range: marker8Range, newText: 'method' }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.topLevelOverload.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.topLevelOverload.fourslash.ts new file mode 100644 index 00000000..cb941b6e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.declNames.topLevelOverload.fourslash.ts @@ -0,0 +1,141 @@ +/// + +// @filename: test1.py +//// from typing import overload +//// +//// @overload +//// def [|met/*marker1*/|] + +// @filename: test2.py +//// from typing import overload +//// +//// @overload +//// def [|met/*marker2*/|]() + +// @filename: test3.py +//// from typing import overload +//// +//// @overload +//// def [|met/*marker3*/|](): +//// pass + +// @filename: test4.py +//// from typing import overload +//// +//// @overload +//// def method(self): +//// pass +//// @overload +//// def [|met/*marker4*/|] + +// @filename: test5.py +//// from typing import overload +//// +//// @overload +//// def method(self): +//// pass +//// @overload +//// def method2(self): +//// pass +//// @overload +//// def [|met/*marker5*/|] + +// @filename: test6.py +//// from typing import overload +//// +//// @overload +//// def method(self): +//// pass +//// @overload +//// def method2(self): +//// pass +//// @overload +//// def [|diff/*marker6*/|] + +// @filename: test7.py +//// from typing import overload +//// +//// @overload +//// def method(self): +//// pass +//// @overload +//// def method2(self): +//// pass + +// @filename: test7-1.py +//// from typing import overload +//// from test7 import method, method2 +//// +//// @overload +//// def method3(self): +//// pass +//// @overload +//// def [|met/*marker7*/|] + +// @filename: test8.py +//// from typing import overload +//// +//// @overload +//// def method(self): +//// pass +//// @overload +//// def [|method/*marker8*/|](self, a): +//// pass + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + const marker4Range = helper.getPositionRange('marker4'); + const marker5Range = helper.getPositionRange('marker5'); + const marker7Range = helper.getPositionRange('marker7'); + const marker8Range = helper.getPositionRange('marker8'); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + marker4: { + completions: [ + { + label: 'method', + kind: Consts.CompletionItemKind.Function, + textEdit: { range: marker4Range, newText: 'method' }, + }, + ], + }, + marker5: { + completions: [ + { + label: 'method', + kind: Consts.CompletionItemKind.Function, + textEdit: { range: marker5Range, newText: 'method' }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Function, + textEdit: { range: marker5Range, newText: 'method2' }, + }, + ], + }, + marker6: { completions: [] }, + marker7: { + completions: [ + { + label: 'method3', + kind: Consts.CompletionItemKind.Function, + textEdit: { range: marker7Range, newText: 'method3' }, + }, + ], + }, + marker8: { + completions: [ + { + label: 'method', + kind: Consts.CompletionItemKind.Function, + textEdit: { range: marker8Range, newText: 'method' }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.complex.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.complex.fourslash.ts new file mode 100644 index 00000000..508a7dad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.complex.fourslash.ts @@ -0,0 +1,76 @@ +/// + +// @filename: dict_key_complex.py +//// d = { "key" : 1 } +//// +//// if (len(d) > 0): +//// d["ifKey"] = 2 +//// +//// def foo(): +//// d["capturedKey"] = 3 +//// +//// class C: +//// def method(self): +//// d["capturedInsideOfMethod"] = 4 +//// +//// d = dict(reassignedKey=5) +//// +//// d[[|/*marker1*/|]] + +// @filename: dict_expression_symbol.py +//// keyString = "key" +//// d = { keyString : 1 } +//// d[k/*marker2*/] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"ifKey"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"ifKey"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"capturedKey"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"capturedKey"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"capturedInsideOfMethod"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"capturedInsideOfMethod"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"reassignedKey"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"reassignedKey"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker2: { + completions: [{ label: 'keyString', kind: Consts.CompletionItemKind.Variable }], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + marker2: { + // Regular symbol should over take dict key. + completions: [{ label: 'keyString', kind: Consts.CompletionItemKind.Constant }], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.expression.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.expression.fourslash.ts new file mode 100644 index 00000000..eb577dba --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.expression.fourslash.ts @@ -0,0 +1,75 @@ +/// + +// @filename: dict_expression_number.py +//// d = { 1: 1 } +//// d[2] = 1 +//// d[/*marker1*/] + +// @filename: dict_expression_tuple.py +//// d = { (1, 2): 1 } +//// d[(2, 3)] = 1 +//// d[/*marker2*/] + +// @filename: dict_expression_key_expression.py +//// d = { 1 + 2: 1 } +//// d[2 + 3] = 1 +//// d[/*marker3*/] + +// @filename: dict_expression_partial_expression.py +//// d = { "key" : 1 } +//// d["key2"] = 1 +//// d[[|key/*marker4*/|]] + +// @filename: dict_expression_complex_key.py +//// class C: +//// key = "name" +//// +//// d = { C.key : 1 } +//// d[key/*marker5*/] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { label: '1', kind: Consts.CompletionItemKind.Constant, detail: Consts.IndexValueDetail }, + { label: '2', kind: Consts.CompletionItemKind.Constant, detail: Consts.IndexValueDetail }, + ], + }, + marker2: { + completions: [ + { label: '(1, 2)', kind: Consts.CompletionItemKind.Constant, detail: Consts.IndexValueDetail }, + { label: '(2, 3)', kind: Consts.CompletionItemKind.Constant, detail: Consts.IndexValueDetail }, + ], + }, + marker3: { + completions: [ + { label: '1 + 2', kind: Consts.CompletionItemKind.Constant, detail: Consts.IndexValueDetail }, + { label: '2 + 3', kind: Consts.CompletionItemKind.Constant, detail: Consts.IndexValueDetail }, + ], + }, + marker4: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key2"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"key2"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker5: { + completions: [ + { label: 'C.key', kind: Consts.CompletionItemKind.Constant, detail: Consts.IndexValueDetail }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.literalTypes.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.literalTypes.fourslash.ts new file mode 100644 index 00000000..0ef1af1e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.literalTypes.fourslash.ts @@ -0,0 +1,161 @@ +/// + +// @filename: literal_types.py +//// from typing import Mapping, Literal +//// +//// d: Mapping[Literal["key", "key2"], int] = { "key" : 1 } +//// d[[|/*marker1*/|]] + +// @filename: parameter_mapping.py +//// from typing import Mapping, Literal +//// +//// def foo(d: Mapping[Literal["key", "key2"], int]): +//// d[[|/*marker2*/|]] + +// @filename: literal_types_mixed.py +//// from typing import Mapping, Literal +//// +//// d: Mapping[Literal["key", 1], int] = { "key" : 1 } +//// d[[|/*marker3*/|]] + +// @filename: parameter_dict.py +//// from typing import Dict, Literal +//// +//// def foo(d: Dict[Literal["key", "key2"], int]): +//// d[[|/*marker4*/|]] + +// @filename: literal_types_boolean.py +//// from typing import Dict, Literal +//// +//// d: Dict[Literal[True, False], int] = { True: 1, False: 2 } +//// d[[|/*marker5*/|]] + +// @filename: literal_types_enum.py +//// from typing import Dict, Literal +//// from enum import Enum +//// +//// class MyEnum(Enum): +//// red = 1 +//// blue = 2 +//// +//// def foo(d: Dict[Literal[MyEnum.red, MyEum.blue], int]): +//// d[[|/*marker6/|]] + +// @filename: literal_bytes.py +//// from typing import Mapping, Literal +//// +//// d: Mapping[Literal[b"key", b"key2"], int] = { b"key" : 1 } +//// d[[|/*marker7*/|]] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key2"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"key2"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker2: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key2"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"key2"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker3: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '1', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker4: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key2"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"key2"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker5: { + completions: [ + { + label: 'True', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + { + label: 'False', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker6: { + completions: [ + { + label: 'MyEnum.red', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + { + label: 'MyEnum.blue', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker7: { + completions: [ + { + label: 'b"key"', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + { + label: 'b"key2"', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.simple.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.simple.fourslash.ts new file mode 100644 index 00000000..41c00a06 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.simple.fourslash.ts @@ -0,0 +1,151 @@ +/// + +// @filename: simple_dict_expression.py +//// d = { "key" : 1 } +//// d[[|/*marker1*/|]] + +// @filename: simple_dict_constructor.py +//// d = dict(key=1) +//// d[[|/*marker2*/|]] + +// @filename: dict_expression_index.py +//// d = {} +//// d["key"] = 1 +//// d[[|/*marker3*/|]] + +// @filename: dict_constructor_index.py +//// d = dict() +//// d["key"] = 1 +//// d[[|/*marker4*/|]] + +// @filename: dict_expression_multiple_keys.py +//// d = { "key": 1, "key2": 2 } +//// d["key3"] = 3 +//// d[[|/*marker5*/|]] + +// @filename: dict_constructor_multiple_keys.py +//// d = dict(key=1, key2=2) +//// d["key3"] = 3 +//// d[[|/*marker6*/|]] + +// @filename: dict_expression_typeAnnotation.py +//// from typing import Mapping +//// d: Mapping[str, int] = { "key": 1} +//// d[[|/*marker7*/|]] + +// @filename: dict_constructor_typeAnnotation.py +//// from typing import Mapping +//// d: Mapping[str, int] = dict(key=1) +//// d[[|/*marker8*/|]] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker2: { + completions: [ + { + label: "'key'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: "'key'" }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker3: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker4: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker5: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker5'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key2"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker5'), newText: '"key2"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key3"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker5'), newText: '"key3"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker6: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker6'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key2"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker6'), newText: '"key2"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"key3"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker6'), newText: '"key3"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker7: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker7'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker8: { + completions: [ + { + label: "'key'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker8'), newText: "'key'" }, + detail: Consts.IndexValueDetail, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.stringLiterals.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.stringLiterals.fourslash.ts new file mode 100644 index 00000000..0cb2ae77 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.stringLiterals.fourslash.ts @@ -0,0 +1,121 @@ +/// + +// @filename: string_literals.py +//// d = { "key" : 1 } +//// d[[|"/*marker1*/"|]] + +// @filename: dict_constructor.py +//// d = dict(key=1) +//// d[[|"/*marker2*/"|]] + +// @filename: dict_key_no_end.py +//// d = { "key": 1 } +//// d[[|"/*marker3*/|]] + +// @filename: dict_key_partial.py +//// d = dict(key=1) +//// d[[|"k/*marker4*/"|]] + +// @filename: dict_key_stringLiteralsOnly.py +//// name = "key" +//// d = { name: 1 } +//// d["key2"] = 2 +//// d[[|/*marker5*/|]] + +// @filename: dict_key_name_conflicts.py +//// keyString = "key" +//// d = dict(keyString=1) +//// d[[|keyStr/*marker6*/|]] + +// @filename: dict_key_mixed_literals.py +//// d = { "key": 1, 1 + 2: 1 } +//// d[[|/*marker7*/|]] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker2: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker3: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker4: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker5: { + completions: [ + { + label: 'name', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + { + label: '"key2"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker5'), newText: '"key2"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker6: { + completions: [ + { label: 'keyString', kind: Consts.CompletionItemKind.Variable }, + { + label: '"keyString"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker6'), newText: '"keyString"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker7: { + completions: [ + { + label: '"key"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker7'), newText: '"key"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '1 + 2', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.symbols.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.symbols.fourslash.ts new file mode 100644 index 00000000..0498ac20 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dictionary.keys.symbols.fourslash.ts @@ -0,0 +1,53 @@ +/// + +// @filename: string_literals_with_symbols.py +//// d = { "key-1" : 1 } +//// d[[|"/*marker1*/"|]] + +// @filename: string_literals_with_symbols2.py +//// d = { "key\"yo\"" : 1 } +//// d[[|"/*marker2*/"|]] + +// @filename: string_literals_duplicates.py +//// d = { "hello" : 1 } +//// d["hello"] = 2 +//// +//// d[[|"/*marker3*/"|]] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: '"key-1"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: '"key-1"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker2: { + completions: [ + { + label: '"key\\"yo\\""', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"key\\"yo\\""' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker3: { + completions: [ + { + label: '"hello"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"hello"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dunderNew.Inheritance.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dunderNew.Inheritance.fourslash.ts new file mode 100644 index 00000000..9378129f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dunderNew.Inheritance.fourslash.ts @@ -0,0 +1,30 @@ +/// + +// @filename: test.py +//// class Parent: +//// def __init__(self, *args: Any, **kwargs: Any): +//// pass +//// +//// def __new__(cls, *args: Any, **kwargs: Any): +//// pass +//// +//// class Child(Parent): +//// def __new__(cls, name:str): +//// return super().__new__(cls, name) +//// +//// class GrandChild(Child): +//// pass + +//// x = GrandChild([|/*marker1*/|]) + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'name=', + kind: Consts.CompletionItemKind.Variable, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dunderNew.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dunderNew.fourslash.ts new file mode 100644 index 00000000..4a7dbb16 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.dunderNew.fourslash.ts @@ -0,0 +1,20 @@ +/// + +// @filename: test.py +//// class Foo: +//// def __new__(cls, name: str): +//// return super().__new__(cls) +//// +//// x = Foo([|/*marker1*/|]) + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'name=', + kind: Consts.CompletionItemKind.Variable, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.enums.members.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.enums.members.fourslash.ts new file mode 100644 index 00000000..bf2e6f06 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.enums.members.fourslash.ts @@ -0,0 +1,44 @@ +/// + +// @filename: test.py +//// from enum import Enum +//// class Color(Enum): +//// RED = 1 +//// GREEN = 2 +//// BLUE = 3 +//// +//// NotAMember: int = 3 +//// +//// @property +//// def a_prop(self): +//// pass +//// +//// Color./*marker*/ + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'BLUE', + kind: Consts.CompletionItemKind.EnumMember, + }, + { + label: 'GREEN', + kind: Consts.CompletionItemKind.EnumMember, + }, + { + label: 'RED', + kind: Consts.CompletionItemKind.EnumMember, + }, + { + label: 'a_prop', + kind: Consts.CompletionItemKind.Property, + }, + { + label: 'NotAMember', + kind: Consts.CompletionItemKind.Variable, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.errorNodes.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.errorNodes.fourslash.ts new file mode 100644 index 00000000..74f6c8c3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.errorNodes.fourslash.ts @@ -0,0 +1,27 @@ +/// + +// @filename: test.py +//// import os + +//// class App(): +//// def __init(self): +//// self.instance_path = "\\foo" + +//// app = App() +//// try: +//// os.makedirs(app.in[|/*marker*/|]) + +//// except: +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'instance_path', + kind: Consts.CompletionItemKind.Variable, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.excluded.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.excluded.fourslash.ts new file mode 100644 index 00000000..b295ef52 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.excluded.fourslash.ts @@ -0,0 +1,12 @@ +/// + +// @filename: test.py +//// a = 42 +//// a.n[|/*marker1*/|] + +// @ts-ignore +await helper.verifyCompletion('excluded', 'markdown', { + marker1: { + completions: [{ label: 'capitalize', kind: undefined }], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fourslash.ts new file mode 100644 index 00000000..bfa63d8e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fourslash.ts @@ -0,0 +1,51 @@ +/// + +// @filename: test.py +//// import time +//// time.gm[|/*marker1*/|] +//// aaaaaa = 100 +//// aaaaa[|/*marker2*/|] +//// def some_func1(a): +//// '''some function docs''' +//// pass +//// def some_func2(a): +//// '''another function docs''' +//// pass +//// some_fun[|/*marker3*/|] +//// unknownVariable.[|/*marker4*/|] + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: 'gmtime', + kind: Consts.CompletionItemKind.Function, + }, + { + label: 'clock_gettime', + kind: Consts.CompletionItemKind.Function, + }, + { + label: 'clock_gettime_ns', + kind: Consts.CompletionItemKind.Function, + }, + ], + }, + marker2: { completions: [{ label: 'aaaaaa', kind: Consts.CompletionItemKind.Variable }] }, + marker3: { + completions: [ + { + label: 'some_func1', + kind: Consts.CompletionItemKind.Function, + documentation: '```python\ndef some_func1(a: Unknown) -> None\n```\n---\nsome function docs', + }, + { + label: 'some_func2', + kind: Consts.CompletionItemKind.Function, + documentation: '```python\ndef some_func2(a: Unknown) -> None\n```\n---\nanother function docs', + }, + ], + }, + marker4: { completions: [] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fstring.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fstring.fourslash.ts new file mode 100644 index 00000000..d21cc9d7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fstring.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: test.py +//// msg = "gekki" +//// +//// a = f"{[|/*marker1*/|]}" +//// b = f"{msg.c[|/*marker2*/|]}" +//// c = f"{msg.[|/*marker3*/|]}" +//// d = f"msg.[|/*marker4*/|]{msg}" +//// e = f"{msg}msg.[|/*marker5*/|]" + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [{ label: 'msg', kind: Consts.CompletionItemKind.Variable }], + }, + marker2: { + completions: [{ label: 'count', kind: Consts.CompletionItemKind.Method }], + }, + marker3: { + completions: [{ label: 'capitalize', kind: Consts.CompletionItemKind.Method }], + }, +}); + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker4: { + completions: [], + }, + marker5: { + completions: [], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fstring.stringLiteral.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fstring.stringLiteral.fourslash.ts new file mode 100644 index 00000000..b1d353da --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fstring.stringLiteral.fourslash.ts @@ -0,0 +1,96 @@ +/// + +// @filename: test.py +//// from typing import TypedDict +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// m = Movie(name="hello", age=10) +//// +//// a = f"{m[[|/*marker1*/|]]}" +//// b = f'{m[[|/*marker2*/|]]}' +//// c = f'{m[[|"ag/*marker3*/"|]]}' +//// +//// +//// +//// m2 = { 'name' : "hello" } +//// +//// d = f"{m2[[|/*marker4*/|]]}" +//// e = f'{m2[[|/*marker5*/|]]}' +//// f = f'{m2[[|"na/*marker6*/"|]]}' +//// + +{ + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: "'age'" }, + }, + ], + }, + marker2: { + completions: [ + { + label: '"name"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"name"' }, + }, + { + label: '"age"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"age"' }, + }, + ], + }, + marker3: { + completions: [ + { + label: '"age"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"age"' }, + }, + ], + }, + marker4: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + textEdit: { range: helper.getPositionRange('marker4'), newText: "'name'" }, + }, + ], + }, + marker5: { + completions: [ + { + label: '"name"', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + textEdit: { range: helper.getPositionRange('marker5'), newText: '"name"' }, + }, + ], + }, + marker6: { + completions: [ + { + label: '"name"', + kind: Consts.CompletionItemKind.Constant, + detail: Consts.IndexValueDetail, + textEdit: { range: helper.getPositionRange('marker6'), newText: '"name"' }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fuzzyMatching.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fuzzyMatching.fourslash.ts new file mode 100644 index 00000000..0ccf7365 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.fuzzyMatching.fourslash.ts @@ -0,0 +1,30 @@ +/// + +// @filename: test.py +//// from unittest.mock import MagicMock +//// mock = MagicMock() +//// mock.call[|/*marker1*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'call_args', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'called', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: '__call__', + kind: Consts.CompletionItemKind.Method, + }, + { + label: 'assert_called', + kind: Consts.CompletionItemKind.Method, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.dunderNames.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.dunderNames.fourslash.ts new file mode 100644 index 00000000..771734c2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.dunderNames.fourslash.ts @@ -0,0 +1,31 @@ +/// + +// @filename: test.py +//// import _[|/*marker1*/|] +//// import __pycache__[|/*marker2*/|] +//// from test2 import _[|/*marker3*/|] +//// from test2 import [|/*marker4*/|] + +// @filename: test2.py +//// def foo(): +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [{ label: '__future__', kind: Consts.CompletionItemKind.Module }], + }, + marker4: { + completions: [{ label: 'foo', kind: Consts.CompletionItemKind.Function }], + }, +}); + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker2: { + completions: [], + }, + marker3: { + completions: [], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.duplicates.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.duplicates.fourslash.ts new file mode 100644 index 00000000..8191766f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.duplicates.fourslash.ts @@ -0,0 +1,23 @@ +// @filename: test_no_duplicate_tseries_completions.py +//// from testLib import [|t/*marker*/|] + +// @filename: testLib/__init__.pyi +// @library: true +//// import tseries +//// __all__ = ['tseries'] + +// @filename: testLib/tseries/__init__.pyi +// @library: true +// + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker: { + completions: [ + { + label: 'tseries', + kind: Consts.CompletionItemKind.Module, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.exactMatch.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.exactMatch.fourslash.ts new file mode 100644 index 00000000..5aae9dcd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.exactMatch.fourslash.ts @@ -0,0 +1,30 @@ +/// + +// @filename: test.py +//// from testLib import [|/*marker1*/|]En[|/*marker2*/|]um[|/*marker3*/|] + +// @filename: testLib.py +//// class Enum: pass +//// class EnumCheck: pass + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { label: 'Enum', kind: Consts.CompletionItemKind.Class }, + { label: 'EnumCheck', kind: Consts.CompletionItemKind.Class }, + ], + }, + marker2: { + completions: [ + { label: 'Enum', kind: Consts.CompletionItemKind.Class }, + { label: 'EnumCheck', kind: Consts.CompletionItemKind.Class }, + ], + }, + marker3: { + completions: [ + { label: 'Enum', kind: Consts.CompletionItemKind.Class }, + { label: 'EnumCheck', kind: Consts.CompletionItemKind.Class }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.excludeAlreadyImported.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.excludeAlreadyImported.fourslash.ts new file mode 100644 index 00000000..13d1fc54 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.excludeAlreadyImported.fourslash.ts @@ -0,0 +1,15 @@ +/// + +// @filename: test.py +//// from testLib import Enum, Enu[|/*marker1*/|] + +// @filename: testLib.py +//// class Enum: pass +//// class EnumCheck: pass + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [{ label: 'EnumCheck', kind: Consts.CompletionItemKind.Class }], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.interimFile.fourslash.disabled.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.interimFile.fourslash.disabled.ts new file mode 100644 index 00000000..4a73d159 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.interimFile.fourslash.disabled.ts @@ -0,0 +1,52 @@ +/// + +// @filename: test1.py +//// import al[|/*marker1*/|] + +// @filename: altair/__init__.py +// @library: true +//// """module docstring""" +//// +//// __all__ = [ "selection_interval" ] +//// +//// from .vegalite import ( +//// selection, +//// selection_interval +//// ) + +// @filename: altair/vegalite/__init__.py +// @library: true +//// def selection(): pass +//// def selection_interval(): pass + +// @filename: altair/py.typed +// @library: true +//// # has to contain something for file to be written + +{ + // Force interim file to be created + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { label: 'altair', kind: Consts.CompletionItemKind.Module, documentation: 'module docstring' }, + ], + }, + }); + + helper.replace(helper.BOF, helper.getMarkerByName('marker1').position, 'import altair as alt\n\nalt.'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [{ label: 'selection_interval', kind: Consts.CompletionItemKind.Function }], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + marker1: { + completions: [{ label: 'selection', kind: Consts.CompletionItemKind.Function }], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.privateNoPytyped.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.privateNoPytyped.fourslash.ts new file mode 100644 index 00000000..727b8aec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.privateNoPytyped.fourslash.ts @@ -0,0 +1,32 @@ +// @filename: test_private_completions.py +//// import testLib.[|/*marker*/|] + +// @filename: testLib/__init__.py +// @library: true +//// # empty + +// @filename: testLib/__privateclass.py +// @library: true +//// class PrivateClass(): +//// pass + +// @filename: testLib/publicclass.py +// @library: true +//// class PublicClass(): +//// pass + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker: { + completions: [ + { + label: 'publicclass', + kind: Consts.CompletionItemKind.Module, + }, + { + label: '__privateclass', + kind: Consts.CompletionItemKind.Module, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.pytyped.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.pytyped.fourslash.ts new file mode 100644 index 00000000..cf0c1b5b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.pytyped.fourslash.ts @@ -0,0 +1,32 @@ +// @filename: test_no_private_completions.py +//// import testLib.[|/*marker*/|] + +// @filename: testLib/__init__.py +// @library: true +//// # empty + +// @filename: testLib/__privateclass.py +// @library: true +//// class PrivateClass(): +//// pass + +// @filename: testLib/publicclass.py +// @library: true +//// class PublicClass(): +//// pass + +// @filename: testLib/py.typed +// @library: true +//// # has to contain something for file to be written + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker: { + completions: [ + { + label: 'publicclass', + kind: Consts.CompletionItemKind.Module, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.pytypedLocal.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.pytypedLocal.fourslash.ts new file mode 100644 index 00000000..2df02231 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.pytypedLocal.fourslash.ts @@ -0,0 +1,32 @@ +// @filename: test_no_private_completions.py +//// import testLib.[|/*marker*/|] + +// @filename: testLib/__init__.py +//// # empty + +// @filename: testLib/__privateclass.py +//// class PrivateClass(): +//// pass + +// @filename: testLib/publicclass.py +//// class PublicClass(): +//// pass + +// @filename: testLib/py.typed +//// # has to contain something for file to be written + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker: { + completions: [ + { + label: 'publicclass', + kind: Consts.CompletionItemKind.Module, + }, + { + label: '__privateclass', + kind: Consts.CompletionItemKind.Module, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.submodule.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.submodule.fourslash.ts new file mode 100644 index 00000000..686b3ed5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.import.submodule.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: test.py +//// from submodule.[|/*marker1*/|] + +// @filename: pyrightconfig.json +//// { +//// "extraPaths": ["submodule"] +//// } + +// @filename: submodule/submodule/__init__.py +//// + +// @filename: submodule/submodule/submodule1.py +//// def test_function(): +//// pass + +// @filename: submodule/setup.py +//// + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [{ label: 'submodule1', kind: Consts.CompletionItemKind.Module }], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inList.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inList.fourslash.ts new file mode 100644 index 00000000..70ecd6f9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inList.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: testList.py +//// a = 42 +//// x = [ +//// a.[|/*marker1*/|] +//// ] + +// @filename: testListWithCall.py +//// b = 42 +//// y = [ +//// print(b.[|/*marker2*/|]) +//// ] + +// @filename: testListWithCallMissingClosedParens.py +//// b = 42 +//// y = [ +//// print(b.[|/*marker3*/|] +//// ] +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [{ label: 'numerator', kind: Consts.CompletionItemKind.Property }], + }, + marker2: { + completions: [{ label: 'numerator', kind: Consts.CompletionItemKind.Property }], + }, + marker3: { + completions: [{ label: 'numerator', kind: Consts.CompletionItemKind.Property }], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.included.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.included.fourslash.ts new file mode 100644 index 00000000..bec83ca6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.included.fourslash.ts @@ -0,0 +1,12 @@ +/// + +// @filename: test.py +//// a = 42 +//// a.n[|/*marker1*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [{ label: 'numerator', kind: Consts.CompletionItemKind.Property }], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.indexer.keys.getitem.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.indexer.keys.getitem.fourslash.ts new file mode 100644 index 00000000..0903833b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.indexer.keys.getitem.fourslash.ts @@ -0,0 +1,103 @@ +/// + +// @filename: getitem.py +//// from typing import Literal, Generic, TypeVar, overload +//// class ClassA: +//// def __getitem__(self, key: Literal['a', 'b']): +//// pass +//// T = TypeVar("T") +//// class ClassB(Generic[T]): +//// @overload +//// def __getitem__(self, key: T): +//// pass +//// @overload +//// def __getitem__(self, key: Literal['foo']): +//// pass + +// @filename: test1.py +//// from typing import Literal +//// from getitem import ClassA, ClassB +//// a = ClassA() +//// a[[|/*marker1*/|]] +//// b = ClassB[Literal['x']]() +//// b[[|/*marker2*/|]] + +// @filename: test2.py +//// from typing import Literal +//// from getitem import ClassA, ClassB +//// a = ClassA() +//// a[[|"/*marker3*/"|]] +//// b = ClassB[Literal['x']]() +//// b[[|"/*marker4*/"|]] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: "'a'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: "'a'" }, + detail: Consts.IndexValueDetail, + }, + { + label: "'b'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: "'b'" }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker2: { + completions: [ + { + label: "'x'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: "'x'" }, + detail: Consts.IndexValueDetail, + }, + { + label: "'foo'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: "'foo'" }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker3: { + completions: [ + { + label: '"a"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"a"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"b"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: '"b"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + marker4: { + completions: [ + { + label: '"x"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"x"' }, + detail: Consts.IndexValueDetail, + }, + { + label: '"foo"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker4'), newText: '"foo"' }, + detail: Consts.IndexValueDetail, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.function.docFromStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.function.docFromStub.fourslash.ts new file mode 100644 index 00000000..aa67af9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.function.docFromStub.fourslash.ts @@ -0,0 +1,28 @@ +/// + +// @filename: testFunctionWithVariableStub.py +//// import module1 +//// +//// module1.[|/*marker1*/displayhook|] + +// @filename: module1.py +//// def displayhook() -> None: +//// '''displayhook docs''' +//// ... + +// @filename: module1.pyi +//// from typing import Callable +//// displayhook: Callable[[],Any] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'displayhook', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\ndef displayhook() -> Unknown\n```', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.overload.docFromScrWithStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.overload.docFromScrWithStub.fourslash.ts new file mode 100644 index 00000000..23a5b6a9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.overload.docFromScrWithStub.fourslash.ts @@ -0,0 +1,52 @@ +/// + +// @filename: overloads_client.py +//// from typing import overload +//// import moduleA +//// +//// class ChildA(moduleA.A): +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// pass +//// +//// +//// ChildA.f[|/*marker1*/|] + +// @filename: typings/moduleA.pyi +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: ... +//// +//// @overload +//// def func(self, x: int) -> int: ... + +// @filename: typings/moduleA.py +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// '''func docs''' +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'func', + kind: Consts.CompletionItemKind.Method, + documentation: + '```python\ndef func(self: ChildA, x: str) -> str: ...\ndef func(self: ChildA, x: int) -> int: ...\n```\n---\nfunc docs', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.overload.docFromStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.overload.docFromStub.fourslash.ts new file mode 100644 index 00000000..14217e80 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.overload.docFromStub.fourslash.ts @@ -0,0 +1,53 @@ +/// + +// @filename: overloads_client.py +//// from typing import overload +//// import moduleA +//// +//// class ChildA(moduleA.A): +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// pass +//// +//// +//// ChildA.f[|/*marker1*/|] + +// @filename: typings/moduleA.pyi +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: ... +//// +//// @overload +//// def func(self, x: int) -> int: +//// '''func docs''' +//// ... + +// @filename: typings/moduleA.py +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'func', + kind: Consts.CompletionItemKind.Method, + documentation: + '```python\ndef func(self: ChildA, x: str) -> str: ...\ndef func(self: ChildA, x: int) -> int: ...\n```\n---\nfunc docs', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.property.docFromSrc.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.property.docFromSrc.fourslash.ts new file mode 100644 index 00000000..c35a5345 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.property.docFromSrc.fourslash.ts @@ -0,0 +1,104 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testLib +//// class ChildGetterDocs(testLib.ClassWithGetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ChildSetterDocs(testLib.ClassWithSetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// one = ChildGetterDocs(3) +//// one.lengt[|/*marker1*/|] +//// two = ChildSetterDocs(3) +//// two.lengt[|/*marker2*/|] + +// @filename: testLib/__init__.py +//// class ClassWithGetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// """ +//// read property doc +//// """ +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ClassWithSetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// """ +//// setter property doc +//// """ +//// pass +//// + +// @filename: testLib/__init__.pyi +//// class ClassWithGetterDocs(object): +//// @property +//// def length(self) -> int: ... +//// @length.setter +//// def length(self, value) -> None: ... +//// +//// class ClassWithSetterDocs(object): +//// @property +//// def length(self) -> int: ... +//// @length.setter +//// def length(self, value) -> None: ... + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'length', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nlength: Unknown (property)\n```\n---\nread property doc', + }, + ], + }, + marker2: { + completions: [ + { + label: 'length', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nlength: Unknown (property)\n```\n---\nsetter property doc', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.property.docFromStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.property.docFromStub.fourslash.ts new file mode 100644 index 00000000..3723841d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.inherited.property.docFromStub.fourslash.ts @@ -0,0 +1,108 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testLib +//// class ChildGetterDocs(testLib.ClassWithGetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ChildSetterDocs(testLib.ClassWithSetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// one = ChildGetterDocs(3) +//// one.lengt[|/*marker1*/|] +//// two = ChildSetterDocs(3) +//// two.lengt[|/*marker2*/|] + +// @filename: testLib/__init__.py +// @library: true +//// class ClassWithGetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ClassWithSetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// + +// @filename: testLib/__init__.pyi +// @library: true +//// class ClassWithGetterDocs(object): +//// @property +//// def length(self) -> int: +//// """ +//// read property doc +//// """ +//// ... +//// @length.setter +//// def length(self, value) -> None: ... +//// +//// class ClassWithSetterDocs(object): +//// @property +//// def length(self) -> int: ... +//// @length.setter +//// def length(self, value) -> None: +//// """ +//// setter property doc +//// """ +//// ... + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'length', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nlength: Unknown (property)\n```\n---\nread property doc', + }, + ], + }, + marker2: { + completions: [ + { + label: 'length', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nlength: Unknown (property)\n```\n---\nsetter property doc', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.keywords.pythonVersion.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.keywords.pythonVersion.fourslash.ts new file mode 100644 index 00000000..4cd300cf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.keywords.pythonVersion.fourslash.ts @@ -0,0 +1,72 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "executionEnvironments": [ +//// { "root": "python33", "pythonVersion": "3.3" }, +//// { "root": "python35", "pythonVersion": "3.5" }, +//// { "root": "python310", "pythonVersion": "3.10" }, +//// ] +//// } + +// @filename: python33/test.py +//// def foo(): +//// [|/*python33*/|] + +// @filename: python35/test.py +//// def foo(): +//// [|/*python35*/|] + +// @filename: python310/test.py +//// def foo(): +//// [|/*python310*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + python33: { + completions: [ + { label: 'def', kind: Consts.CompletionItemKind.Keyword }, + { label: 'import', kind: Consts.CompletionItemKind.Keyword }, + ], + }, + python35: { + completions: [ + { label: 'def', kind: Consts.CompletionItemKind.Keyword }, + { label: 'import', kind: Consts.CompletionItemKind.Keyword }, + { label: 'async', kind: Consts.CompletionItemKind.Keyword }, + { label: 'await', kind: Consts.CompletionItemKind.Keyword }, + ], + }, + python310: { + completions: [ + { label: 'def', kind: Consts.CompletionItemKind.Keyword }, + { label: 'import', kind: Consts.CompletionItemKind.Keyword }, + { label: 'async', kind: Consts.CompletionItemKind.Keyword }, + { label: 'await', kind: Consts.CompletionItemKind.Keyword }, + { label: 'case', kind: Consts.CompletionItemKind.Keyword }, + { label: 'match', kind: Consts.CompletionItemKind.Keyword }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + python33: { + completions: [ + { label: 'async', kind: Consts.CompletionItemKind.Keyword }, + { label: 'await', kind: Consts.CompletionItemKind.Keyword }, + { label: 'case', kind: Consts.CompletionItemKind.Keyword }, + { label: 'match', kind: Consts.CompletionItemKind.Keyword }, + ], + }, + python35: { + completions: [ + { label: 'case', kind: Consts.CompletionItemKind.Keyword }, + { label: 'match', kind: Consts.CompletionItemKind.Keyword }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libCodeAndStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libCodeAndStub.fourslash.ts new file mode 100644 index 00000000..11d6d332 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libCodeAndStub.fourslash.ts @@ -0,0 +1,79 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testLib +//// obj = testLib.[|/*marker1*/Validator|]() +//// obj.is[|/*marker2*/|] +//// obj.read[|/*marker3*/|] + +// @filename: testLib/__init__.py +// @library: true +//// class Validator: +//// '''The validator class''' +//// def is_valid(self, text): +//// '''Checks if the input string is valid.''' +//// return True +//// @property +//// def read_only_prop(self): +//// '''The read-only property.''' +//// return True +//// @property +//// def read_write_prop(self): +//// '''The read-write property.''' +//// return True +//// @read_write_prop.setter +//// def read_write_prop(self, val): +//// pass + +// @filename: testLib/__init__.pyi +// @library: true +//// class Validator: +//// def is_valid(self, text: str) -> bool: ... +//// @property +//// def read_only_prop(self) -> bool: ... +//// @property +//// def read_write_prop(self) -> bool: ... +//// @read_write_prop.setter +//// def read_write_prop(self, val: bool): ... + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'Validator', + kind: Consts.CompletionItemKind.Class, + documentation: '```python\nclass Validator()\n```\n---\nThe validator class', + }, + ], + }, + marker2: { + completions: [ + { + label: 'is_valid', + kind: Consts.CompletionItemKind.Method, + documentation: + '```python\ndef is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', + }, + ], + }, + marker3: { + completions: [ + { + label: 'read_only_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_only_prop: bool (property)\n```\n---\nThe read-only property.', + }, + { + label: 'read_write_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_write_prop: bool (property)\n```\n---\nThe read-write property.', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libCodeNoStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libCodeNoStub.fourslash.ts new file mode 100644 index 00000000..3b5c74f9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libCodeNoStub.fourslash.ts @@ -0,0 +1,69 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testLib +//// obj = testLib.[|/*marker1*/Validator|]() +//// obj.is[|/*marker2*/|] +//// obj.read[|/*marker3*/|] + +// @filename: testLib/__init__.py +// @library: true +//// class Validator: +//// '''The validator class''' +//// def is_valid(self, text: str) -> bool: +//// '''Checks if the input string is valid.''' +//// return True +//// @property +//// def read_only_prop(self) -> bool: +//// '''The read-only property.''' +//// return True +//// @property +//// def read_write_prop(self) -> bool: +//// '''The read-write property.''' +//// return True +//// @read_write_prop.setter +//// def read_write_prop(self, val: bool): +//// '''The read-write property.''' +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'Validator', + kind: Consts.CompletionItemKind.Class, + documentation: '```python\nclass Validator()\n```\n---\nThe validator class', + }, + ], + }, + marker2: { + completions: [ + { + label: 'is_valid', + kind: Consts.CompletionItemKind.Method, + documentation: + '```python\ndef is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', + }, + ], + }, + marker3: { + completions: [ + { + label: 'read_only_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_only_prop: bool (property)\n```\n---\nThe read-only property.', + }, + { + label: 'read_write_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_write_prop: bool (property)\n```\n---\nThe read-write property.', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libStub.fourslash.ts new file mode 100644 index 00000000..50ed4320 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.libStub.fourslash.ts @@ -0,0 +1,69 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testLib +//// obj = testLib.[|/*marker1*/Validator|]() +//// obj.is[|/*marker2*/|] +//// obj.read[|/*marker3*/|] + +// @filename: testLib/__init__.pyi +// @library: true +//// class Validator: +//// '''The validator class''' +//// def is_valid(self, text: str) -> bool: +//// '''Checks if the input string is valid.''' +//// pass +//// @property +//// def read_only_prop(self) -> bool: +//// '''The read-only property.''' +//// pass +//// @property +//// def read_write_prop(self) -> bool: +//// '''The read-write property.''' +//// pass +//// @read_write_prop.setter +//// def read_write_prop(self, val: bool): +//// '''The read-write property.''' +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'Validator', + kind: Consts.CompletionItemKind.Class, + documentation: '```python\nclass Validator()\n```\n---\nThe validator class', + }, + ], + }, + marker2: { + completions: [ + { + label: 'is_valid', + kind: Consts.CompletionItemKind.Method, + documentation: + '```python\ndef is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', + }, + ], + }, + marker3: { + completions: [ + { + label: 'read_only_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_only_prop: bool (property)\n```\n---\nThe read-only property.', + }, + { + label: 'read_write_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_write_prop: bool (property)\n```\n---\nThe read-write property.', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.literals.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.literals.fourslash.ts new file mode 100644 index 00000000..9e066b9d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.literals.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: test1.py +//// from typing import Literal +//// a: Literal["Hello"] = "He[|/*marker1*/|] + +// @filename: test2.py +//// from typing import Literal +//// a: Literal["Hello"] = [|/*marker2*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: '"Hello"', + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker2: { + completions: [ + { + label: '"Hello"', + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.localCode.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.localCode.fourslash.ts new file mode 100644 index 00000000..aac61c6a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.localCode.fourslash.ts @@ -0,0 +1,74 @@ +/// + +// @filename: test.py +//// import testLib +//// obj = testLib.[|/*marker1*/Validator|]() +//// obj.is[|/*marker2*/|] +//// obj.read[|/*marker3*/|] +//// testLib.Validator.is[|/*marker4*/|] + +// @filename: testLib/__init__.py +//// class Validator: +//// '''The validator class''' +//// def is_valid(self, text: str) -> bool: +//// '''Checks if the input string is valid.''' +//// return True +//// @property +//// def read_only_prop(self) -> bool: +//// '''The read-only property.''' +//// return True +//// @property +//// def read_write_prop(self) -> bool: +//// '''The read-write property.''' +//// return True +//// @read_write_prop.setter +//// def read_write_prop(self, val: bool): +//// '''The read-write property.''' +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'Validator', + kind: Consts.CompletionItemKind.Class, + documentation: '```python\nclass Validator()\n```\n---\nThe validator class', + }, + ], + }, + marker2: { + completions: [ + { + label: 'is_valid', + kind: Consts.CompletionItemKind.Method, + documentation: + '```python\ndef is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', + }, + ], + }, + marker3: { + completions: [ + { + label: 'read_only_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_only_prop: bool (property)\n```\n---\nThe read-only property.', + }, + { + label: 'read_write_prop', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nread_write_prop: bool (property)\n```\n---\nThe read-write property.', + }, + ], + }, + marker4: { + completions: [ + { + label: 'is_valid', + kind: Consts.CompletionItemKind.Method, + documentation: + '```python\ndef is_valid(self: Validator, text: str) -> bool\n```\n---\nChecks if the input string is valid.', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.UnknownMemberOnInstance.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.UnknownMemberOnInstance.fourslash.ts new file mode 100644 index 00000000..8758eda6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.UnknownMemberOnInstance.fourslash.ts @@ -0,0 +1,17 @@ +/// + +// @filename: test.py +//// class Model: +//// pass +//// +//// def some_func1(a: Model): +//// x = a.unknownName.[|/*marker1*/|] +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [], + memberAccessInfo: { lastKnownModule: 'test', lastKnownMemberName: 'Model', unknownMemberName: 'unknownName' }, + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.UnknownStaticFunctionOnClass.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.UnknownStaticFunctionOnClass.fourslash.ts new file mode 100644 index 00000000..6545229c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.UnknownStaticFunctionOnClass.fourslash.ts @@ -0,0 +1,57 @@ +/// + +// @filename: test.py +//// class Model: +//// @staticmethod +//// def foo( value ): +//// return value +//// +//// +//// x = Model.foo(unknownValue).[|/*marker1*/|] +//// pass +//// +//// y = Model.unknownMember.[|/*marker2*/|] +//// pass +//// +//// def some_func1(a: Model): +//// x = a.unknownMember.[|/*marker3*/|] +//// pass +//// +//// Model.unknownValue.[|/*marker4*/|] +//// +//// UnkownModel.unknownValue.[|/*marker5*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + // tests: _getLastKnownModule(): if (curNode.nodeType === ParseNodeType.MemberAccess && curNode.memberName) + marker1: { + completions: [], + memberAccessInfo: { lastKnownModule: 'test', lastKnownMemberName: 'foo', unknownMemberName: 'foo' }, + }, + // tests: _getLastKnownModule(): else if (curNode.nodeType === ParseNodeType.Name && isClassAndInstantiable(curType)) + marker2: { + completions: [], + memberAccessInfo: { + lastKnownModule: 'test', + lastKnownMemberName: 'Model', + unknownMemberName: 'unknownMember', + }, + }, + // tests: _getLastKnownModule(): else if (curNode.nodeType === ParseNodeType.Name && isObject(curType)) + marker3: { + completions: [], + memberAccessInfo: { + lastKnownModule: 'test', + lastKnownMemberName: 'Model', + unknownMemberName: 'unknownMember', + }, + }, + marker4: { + completions: [], + memberAccessInfo: { lastKnownModule: 'test', lastKnownMemberName: 'Model', unknownMemberName: 'unknownValue' }, + }, + marker5: { + completions: [], + memberAccessInfo: {}, + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.libCodeNoStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.libCodeNoStub.fourslash.ts new file mode 100644 index 00000000..07b315b0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.moduleContext.libCodeNoStub.fourslash.ts @@ -0,0 +1,30 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testnumpy +//// obj = testnumpy.random.randint("foo").[|/*marker1*/|] + +// @filename: testnumpy/__init__.py +// @library: true +//// from . import random + +// @filename: testnumpy/random/__init__.py +// @library: true +//// __all__ = ['randint'] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [], + memberAccessInfo: { + lastKnownModule: 'testnumpy.random', + lastKnownMemberName: 'random', + unknownMemberName: 'randint', + }, + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.overloads.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.overloads.fourslash.ts new file mode 100644 index 00000000..a2e95ede --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.overloads.fourslash.ts @@ -0,0 +1,40 @@ +/// + +// @filename: overloads_client.py +//// import overloads +//// overloads.f[|/*marker1*/|] + +// @filename: typings/overloads.pyi +//// from typing import overload +//// +//// @overload +//// def func(x: str) -> str: ...[|/*marker2*/|] +//// +//// @overload +//// def func(x: bytes) -> bytes: +//// ...[|/*marker3*/|] +//// +//// @overload +//// def func(x: int) -> int: +//// '''func docs''' +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'func', + kind: Consts.CompletionItemKind.Function, + documentation: + '```python\ndef func(x: str) -> str: ...\ndef func(x: bytes) -> bytes: ...\ndef func(x: int) -> int: ...\n```\n---\nfunc docs', + }, + ], + }, +}); + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker2: { completions: [] }, + marker3: { completions: [] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.fourslash.ts new file mode 100644 index 00000000..dc9fb1fb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.fourslash.ts @@ -0,0 +1,47 @@ +/// + +// @filename: test.py +//// class B: +//// def method1(self, a: str = 'hello', b: int = 1234): +//// pass +//// +//// def method2(self, a=None): +//// pass +//// +//// def method3(self, a=1234, b=object()): +//// pass +//// +//// class C(B): +//// def [|method/*marker*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: "method1(self, a: str = 'hello', b: int = 1234):\n return super().method1(a, b)", + }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method2(self, a=None):\n return super().method2(a)', + }, + }, + { + label: 'method3', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method3(self, a=1234, b=object()):\n return super().method3(a, b)', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.importStub.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.importStub.ts new file mode 100644 index 00000000..149520ca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.importStub.ts @@ -0,0 +1,50 @@ +/// + +// @filename: main.py +//// from test import B +//// +//// class C(B): +//// def [|method/*marker*/|] + +// @filename: test.pyi +//// class B: +//// def method1(self, a: str = 'hello', b: int = 1234): +//// pass +//// +//// def method2(self, a=None): +//// pass +//// +//// def method3(self, a=1234, b=object()): +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method1(self, a: str = ..., b: int = ...):\n return super().method1(a=a, b=b)', + }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method2(self, a=...):\n return super().method2(a=a)', + }, + }, + { + label: 'method3', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method3(self, a=..., b=...):\n return super().method3(a=a, b=b)', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.imported.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.imported.fourslash.ts new file mode 100644 index 00000000..b79d11c7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.imported.fourslash.ts @@ -0,0 +1,61 @@ +/// + +// @filename: test.py +//// from foo import B +//// +//// class C(B): +//// def [|method/*marker*/|] + +// @filename: foo.py +//// class B: +//// def method1(self, a: str = 'hello', b: int = 1234): +//// pass +//// +//// def method2(self, a=None): +//// pass +//// +//// def method3(self, a=1234, b=object()): +//// pass +//// +//// def method4(self, a=+1234, b=-1.23j, c=1+2j): +//// pass + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: "method1(self, a: str = 'hello', b: int = 1234):\n return super().method1(a, b)", + }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method2(self, a=None):\n return super().method2(a)', + }, + }, + { + label: 'method3', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method3(self, a=1234, b=...):\n return super().method3(a, b)', + }, + }, + { + label: 'method4', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method4(self, a=+1234, b=-1.23j, c=1 + 2j):\n return super().method4(a, b, c)', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.stub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.stub.fourslash.ts new file mode 100644 index 00000000..227dee7a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.default.stub.fourslash.ts @@ -0,0 +1,47 @@ +/// + +// @filename: test.pyi +//// class B: +//// def method1(self, a: str = 'hello', b: int = 1234): +//// pass +//// +//// def method2(self, a=None): +//// pass +//// +//// def method3(self, a=1234, b=object()): +//// pass +//// +//// class C(B): +//// def [|method/*marker*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method1(self, a: str = ..., b: int = ...): ...', + }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method2(self, a=...): ...', + }, + }, + { + label: 'method3', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method3(self, a=..., b=...): ...', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.fourslash.ts new file mode 100644 index 00000000..a7347ca6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.fourslash.ts @@ -0,0 +1,47 @@ +/// + +// @filename: test.py +//// class B: +//// def method1(self, a: str, *args, **kwargs): +//// pass +//// +//// def method2(self, b, /, *args): +//// pass +//// +//// def method3(self, b, *, c: str): +//// pass +//// +//// class C(B): +//// def [|method/*marker*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method1(self, a: str, *args, **kwargs):\n return super().method1(a, *args, **kwargs)', + }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method2(self, b, /, *args):\n return super().method2(b, *args)', + }, + }, + { + label: 'method3', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method3(self, b, *, c: str):\n return super().method3(b, c=c)', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.property.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.property.fourslash.ts new file mode 100644 index 00000000..09b92c93 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.property.fourslash.ts @@ -0,0 +1,31 @@ +/// + +// @filename: test.py +//// class B: +//// @property +//// def prop(self): +//// return 1 +//// +//// @prop.setter +//// def prop(self, value): +//// pass +//// +//// class C(B): +//// @property +//// def [|pr/*marker*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'prop', + kind: Consts.CompletionItemKind.Property, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'prop(self):\n return super().prop', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.property.stub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.property.stub.fourslash.ts new file mode 100644 index 00000000..c8370e8a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.property.stub.fourslash.ts @@ -0,0 +1,31 @@ +/// + +// @filename: test.pyi +//// class B: +//// @property +//// def prop(self): +//// return 1 +//// +//// @prop.setter +//// def prop(self, value): +//// pass +//// +//// class C(B): +//// @property +//// def [|pr/*marker*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'prop', + kind: Consts.CompletionItemKind.Property, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'prop(self): ...', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.staticAndClassmethod.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.staticAndClassmethod.fourslash.ts new file mode 100644 index 00000000..33f75d78 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.staticAndClassmethod.fourslash.ts @@ -0,0 +1,64 @@ +/// + +// @filename: test.py +//// class A: +//// @staticmethod +//// def smethod(a, b): +//// pass +//// +//// @classmethod +//// def cmethod(cls, a): +//// pass +//// +//// class B1(A): +//// def [|m/*marker1*/|] +//// +//// class B2(A): +//// @staticmethod +//// def [|m/*marker2*/|] +//// +//// class B3(A): +//// @classmethod +//// def [|m/*marker3*/|] + +{ + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker2: { + completions: [ + { + label: 'smethod', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker2'), + newText: 'smethod(a, b):\n return super().smethod(a, b)', + }, + }, + ], + }, + marker3: { + completions: [ + { + label: 'cmethod', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker3'), + newText: 'cmethod(cls, a):\n return super().cmethod(a)', + }, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + marker1: { + completions: [ + { label: 'smethod', kind: undefined }, + { label: 'cmethod', kind: undefined }, + ], + }, + marker2: { completions: [{ label: 'cmethod', kind: undefined }] }, + marker3: { completions: [{ label: 'smethod', kind: undefined }] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.stub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.stub.fourslash.ts new file mode 100644 index 00000000..63818a78 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override.stub.fourslash.ts @@ -0,0 +1,47 @@ +/// + +// @filename: test.pyi +//// class B: +//// def method1(self, a: str, *args, **kwargs): +//// pass +//// +//// def method2(self, b, /, *args): +//// pass +//// +//// def method3(self, b, *, c: str): +//// pass +//// +//// class C(B): +//// def [|method/*marker*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method1(self, a: str, *args, **kwargs): ...', + }, + }, + { + label: 'method2', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method2(self, b, /, *args): ...', + }, + }, + { + label: 'method3', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'method3(self, b, *, c: str): ...', + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override2.fourslash.ts new file mode 100644 index 00000000..9014b960 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.override2.fourslash.ts @@ -0,0 +1,71 @@ +/// + +// @filename: test.py +//// class B(list): +//// def [|append/*marker*/|] + +// @filename: test1.py +//// class A: +//// def __init__(self, *args, **kwargs): +//// pass +//// +//// class B(A): +//// def [|__init__/*marker1*/|] + +// @filename: test2.py +//// class A: +//// def [|__class__/*marker2*/|] + +// @filename: test3.py +//// class A: +//// def [|__call__/*marker3*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'append', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker'), + newText: 'append(self, object: _T, /) -> None:\n return super().append(object)', + }, + }, + ], + }, + marker1: { + completions: [ + { + label: '__init__', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker1'), + newText: '__init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)', + }, + }, + ], + }, + marker3: { + completions: [ + { + label: '__call__', + kind: Consts.CompletionItemKind.Method, + textEdit: { + range: helper.getPositionRange('marker3'), + newText: '__call__(self, *args: Any, **kwds: Any) -> Any:\n ${0:pass}', + }, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + // Only method shows up. __class__ is property + marker2: { completions: [{ label: '__class__', kind: undefined }] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parameters.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parameters.fourslash.ts new file mode 100644 index 00000000..21a910f3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parameters.fourslash.ts @@ -0,0 +1,23 @@ +/// + +// @filename: test.py +//// def Method(a, b, c): +//// pass +//// +//// Method([|/*marker1*/|]"[|/*marker2*/|]hello[|/*marker3*/|]"[|/*marker4*/|]) + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [{ label: 'a=', kind: Consts.CompletionItemKind.Variable }], + }, + marker4: { + completions: [{ label: 'b=', kind: Consts.CompletionItemKind.Variable }], + }, +}); + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker2: { completions: [] }, + marker3: { completions: [] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.params.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.params.fourslash.ts new file mode 100644 index 00000000..db77a8a0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.params.fourslash.ts @@ -0,0 +1,72 @@ +/// + +// @filename: test.py +//// from typing import TypedDict, Unpack, Any +//// +//// class Movie(TypedDict): +//// key1: str +//// key2: int +//// +//// def method(param1=None, param2='active', param3=None): +//// pass +//// +//// met/*marker1*/hod /*marker2*/ ( /*marker3*/ param2 = 'test') +//// +//// def method2(param1: int, **kwargs: Unpack[Movie]): +//// pass +//// +//// method2(p/*marker4*/, k/*marker5*/) +//// +//// def method3(param1: int, **kwargs: Any): +//// pass +//// +//// method3(p/*marker6*/, k/*marker7*/) +//// +//// def method4(param1: int, /, param2: int): +//// pass +//// +//// method4(p/*marker8*/) + +// @ts-ignore +await helper.verifyCompletion('excluded', 'markdown', { + marker1: { + completions: [{ label: 'param1', kind: undefined }], + }, + marker2: { + completions: [{ label: 'param1', kind: undefined }], + }, + marker4: { + completions: [{ label: 'key1', kind: undefined }], + }, + marker5: { + completions: [{ label: 'param1', kind: undefined }], + }, + marker7: { + completions: [{ label: 'key1', kind: undefined }], + }, + marker8: { + completions: [{ label: 'param1', kind: undefined }], + }, +}); + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker3: { + completions: [{ label: 'param1=', kind: Consts.CompletionItemKind.Variable }], + }, + marker4: { + completions: [{ label: 'param1=', kind: Consts.CompletionItemKind.Variable }], + }, + marker5: { + completions: [ + { label: 'key2=', kind: Consts.CompletionItemKind.Variable }, + { label: 'key1=', kind: Consts.CompletionItemKind.Variable }, + ], + }, + marker6: { + completions: [{ label: 'param1=', kind: Consts.CompletionItemKind.Variable }], + }, + marker8: { + completions: [{ label: 'param2=', kind: Consts.CompletionItemKind.Variable }], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parentFolder.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parentFolder.fourslash.ts new file mode 100644 index 00000000..53ca2dcf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parentFolder.fourslash.ts @@ -0,0 +1,17 @@ +/// + +// @filename: python/test.py +//// from d/*marker*/ + +// @filename: python/data_processing/__init__.py +//// #empty + +// @filename: python/data_processing/create_fullname.py +//// #empty + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [{ label: 'data_processing', kind: Consts.CompletionItemKind.Module }], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parentFolders.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parentFolders.fourslash.ts new file mode 100644 index 00000000..546bf6fe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.parentFolders.fourslash.ts @@ -0,0 +1,77 @@ +/// + +// @filename: module.py +//// # empty + +// @filename: nested1/__init__.py +//// # empty + +// @filename: nested1/module.py +//// # empty + +// @filename: nested1/nested2/__init__.py +//// # empty + +// @filename: nested1/nested2/test1.py +//// from .[|/*marker1*/|] + +// @filename: nested1/nested2/test2.py +//// from ..[|/*marker2*/|] + +// @filename: nested1/nested2/test3.py +//// from ..nested2.[|/*marker3*/|] + +// @filename: nested1/nested2/test4.py +//// from ...nested1.[|/*marker4*/|] + +// @filename: nested1/nested2/test5.py +//// from ...nested1.nested2.[|/*marker5*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { label: 'import', kind: Consts.CompletionItemKind.Keyword }, + { label: 'test1', kind: Consts.CompletionItemKind.Module }, + { label: 'test2', kind: Consts.CompletionItemKind.Module }, + { label: 'test3', kind: Consts.CompletionItemKind.Module }, + { label: 'test4', kind: Consts.CompletionItemKind.Module }, + { label: 'test5', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker2: { + completions: [ + { label: 'import', kind: Consts.CompletionItemKind.Keyword }, + { label: 'nested2', kind: Consts.CompletionItemKind.Module }, + { label: 'module', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker3: { + completions: [ + { label: 'test1', kind: Consts.CompletionItemKind.Module }, + { label: 'test2', kind: Consts.CompletionItemKind.Module }, + { label: 'test3', kind: Consts.CompletionItemKind.Module }, + { label: 'test4', kind: Consts.CompletionItemKind.Module }, + { label: 'test5', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker4: { + completions: [ + { label: 'nested2', kind: Consts.CompletionItemKind.Module }, + { label: 'module', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker5: { + completions: [ + { label: 'test1', kind: Consts.CompletionItemKind.Module }, + { label: 'test2', kind: Consts.CompletionItemKind.Module }, + { label: 'test3', kind: Consts.CompletionItemKind.Module }, + { label: 'test4', kind: Consts.CompletionItemKind.Module }, + { label: 'test5', kind: Consts.CompletionItemKind.Module }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.plainText.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.plainText.fourslash.ts new file mode 100644 index 00000000..57c810a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.plainText.fourslash.ts @@ -0,0 +1,34 @@ +/// + +// @filename: test.py +//// import time +//// time.gmt[|/*marker1*/|] +//// aaaaaa = 100 +//// aaaaa[|/*marker2*/|] +//// def some_func1(a): +//// '''some function docs''' +//// pass +//// def some_func2(a): +//// '''another function docs''' +//// pass +//// some_fun[|/*marker3*/|] + +// @ts-ignore +await helper.verifyCompletion('exact', 'plaintext', { + marker1: { completions: [{ label: 'gmtime', kind: Consts.CompletionItemKind.Function }] }, + marker2: { completions: [{ label: 'aaaaaa', kind: Consts.CompletionItemKind.Variable }] }, + marker3: { + completions: [ + { + label: 'some_func1', + kind: Consts.CompletionItemKind.Function, + documentation: 'def some_func1(a: Unknown) -> None\n\nsome function docs', + }, + { + label: 'some_func2', + kind: Consts.CompletionItemKind.Function, + documentation: 'def some_func2(a: Unknown) -> None\n\nanother function docs', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.private.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.private.fourslash.ts new file mode 100644 index 00000000..fed9c62b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.private.fourslash.ts @@ -0,0 +1,64 @@ +/// + +// @filename: test1.py +//// def __hello(): +//// pass +//// +//// __hello[|/*marker1*/|] + +// @filename: test2.pyi +//// from typing import Union +//// +//// Union[|/*marker2*/|] +//// +//// def __hello1(): +//// pass +//// +//// __hello1[|/*marker3*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + // Private symbol in same file suggested. + { + label: '__hello', + kind: Consts.CompletionItemKind.Function, + }, + ], + }, + marker2: { + completions: [ + // No Auto-import on Union exists. + { + label: 'Union', + kind: Consts.CompletionItemKind.Class, + }, + ], + }, + marker3: { + completions: [ + { + label: '__hello1', + kind: Consts.CompletionItemKind.Function, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + marker3: { + completions: [ + // Private symbol from other file not suggested. + { + label: '__hello', + kind: Consts.CompletionItemKind.Function, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.property.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.property.fourslash.ts new file mode 100644 index 00000000..b317ba27 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.property.fourslash.ts @@ -0,0 +1,28 @@ +/// + +// @filename: test.py +//// class C: +//// def __init__(self): +//// self._x = None +//// +//// @property +//// def prop(self): +//// pass +//// +//// @prop.setter +//// def prop(self, value): +//// pass +//// +//// C()./*marker*/prop + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'prop', + kind: Consts.CompletionItemKind.Property, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.propertyDocStrings.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.propertyDocStrings.fourslash.ts new file mode 100644 index 00000000..e1591c66 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.propertyDocStrings.fourslash.ts @@ -0,0 +1,64 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// class ClassWithGetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// """ +//// read property doc +//// """ +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// one = ClassWithGetterDocs(3) +//// one.lengt[|/*marker1*/|] +//// +//// class ClassWithSetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// """ +//// setter property doc +//// """ +//// pass +//// +//// two = ClassWithSetterDocs(3) +//// two.lengt[|/*marker2*/|] +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'length', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nlength: Unknown (property)\n```\n---\nread property doc', + }, + ], + }, + marker2: { + completions: [ + { + label: 'length', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nlength: Unknown (property)\n```\n---\nsetter property doc', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.self.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.self.fourslash.ts new file mode 100644 index 00000000..935b7665 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.self.fourslash.ts @@ -0,0 +1,43 @@ +/// + +// @filename: test.py +//// class Foo: +//// def __init__(self): +//// self.var1 = 3 +//// def method1(self): +//// '''Method 1.''' +//// pass +//// @property +//// def prop1(self): +//// '''Property 1.''' +//// return 2 +//// def new_method(self): +//// self.[|/*marker1*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + documentation: '```python\ndef method1() -> None\n```\n---\nMethod 1.', + }, + { + label: 'new_method', + kind: Consts.CompletionItemKind.Method, + documentation: '```python\ndef new_method() -> None\n```', + }, + { + label: 'prop1', + kind: Consts.CompletionItemKind.Property, + documentation: '```python\nprop1: Literal[2] (property)\n```\n---\nProperty 1.', + }, + { + label: 'var1', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nvar1: int\n```', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.stringLiteral.escape.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.stringLiteral.escape.fourslash.ts new file mode 100644 index 00000000..64f33dca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.stringLiteral.escape.fourslash.ts @@ -0,0 +1,71 @@ +/// + +// @filename: test.py +//// from typing import Literal +//// +//// def method(foo: Literal["'\"", '"\'', "'mixed'"]): +//// pass +//// +//// method([|/*marker1*/|]) +//// method([|"/*marker2*/"|]) +//// method([|'/*marker3*/'|]) + +{ + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: '"\'\\""', + kind: Consts.CompletionItemKind.Constant, + }, + { + label: '"\\"\'"', + kind: Consts.CompletionItemKind.Constant, + }, + { + label: '"\'mixed\'"', + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker2: { + completions: [ + { + label: '"\'\\""', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"\'\\""' }, + }, + { + label: '"\\"\'"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"\\"\'"' }, + }, + { + label: '"\'mixed\'"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"\'mixed\'"' }, + }, + ], + }, + marker3: { + completions: [ + { + label: "'\\'\"'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: "'\\'\"'" }, + }, + { + label: "'\"\\''", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: "'\"\\''" }, + }, + { + label: "'\\'mixed\\''", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker3'), newText: "'\\'mixed\\''" }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.stringLiteral.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.stringLiteral.fourslash.ts new file mode 100644 index 00000000..183a7256 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.stringLiteral.fourslash.ts @@ -0,0 +1,79 @@ +/// + +// @filename: test.py +//// import os +//// from typing import Literal, TypedDict, Union +//// +//// def method(a, b, c): +//// pass +//// +//// method("os.[|/*marker1*/|]") +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// m = Movie(name="hello", age=10) +//// m[[|"/*marker2*/"|]] +//// +//// a: Union[Literal["hello"], Literal["hallo"]] +//// a = [|"/*marker3*/"|] +//// +//// class Nested(TypedDict): +//// movie: Movie +//// +//// n: Nested = {"movie": {"name": "hello", "age": 10}} +//// n["movie"][[|/*marker4*/|]] + +{ + const marker2Range = helper.getPositionRange('marker2'); + const marker3Range = helper.getPositionRange('marker3'); + const marker4Range = helper.getPositionRange('marker4'); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { + completions: [ + { + label: '"name"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker2Range, newText: '"name"' }, + }, + { + label: '"age"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker2Range, newText: '"age"' }, + }, + ], + }, + marker3: { + completions: [ + { + label: '"hello"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: '"hello"' }, + }, + { + label: '"hallo"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: '"hallo"' }, + }, + ], + }, + marker4: { + completions: [ + { + label: '"name"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: '"name"' }, + }, + { + label: '"age"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: '"age"' }, + }, + ], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.triggers.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.triggers.fourslash.ts new file mode 100644 index 00000000..ddf05e1b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.triggers.fourslash.ts @@ -0,0 +1,35 @@ +/// + +// @filename: test1.py +//// .[|/*marker1*/|] + +// @filename: test2.py +//// ..[|/*marker2*/|] + +// @filename: test3.py +//// ...[|/*marker3*/|] + +// @filename: test4.py +//// ....[|/*marker4*/|] + +// @filename: test5.py +//// dict = { "test" : "value" } +//// dict[.[|/*marker5*/|]] + +// @filename: test6.py +//// a = 1 +//// a..[|/*marker6*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, + marker4: { completions: [] }, + marker5: { completions: [] }, + marker6: { completions: [] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.typeAlias.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.typeAlias.fourslash.ts new file mode 100644 index 00000000..219750fd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.typeAlias.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: test.py +//// AliasT = list[int] +//// x: AliasT[|/*marker1*/|] +//// y: AliasT = [] +//// y[|/*marker2*/|] + +// @ts-ignore +await helper.verifyCompletion('includes', 'markdown', { + marker1: { + completions: [ + { + label: 'AliasT', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nAliasT: type[list[int]]\n```', + }, + ], + }, + marker2: { + completions: [ + { + label: 'y', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\ny: AliasT\n```', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.typeshed.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.typeshed.fourslash.ts new file mode 100644 index 00000000..24c34c5d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.typeshed.fourslash.ts @@ -0,0 +1,9 @@ +/// + +// @filename: test.py +//// from r/*marker*/ + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { completions: [{ label: 'requests', kind: Consts.CompletionItemKind.Module }] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.vardecls.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.vardecls.fourslash.ts new file mode 100644 index 00000000..62ae93da --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.vardecls.fourslash.ts @@ -0,0 +1,41 @@ +/// + +// @filename: test1.py +//// a/*marker1*/ = 1 + +// @filename: test2.py +//// a = 1 +//// a/*marker2*/ = 1 + +// @filename: test3.py +//// if (a/*marker3*/:= 1): pass + +// @filename: test4.py +//// a = 1 +//// if (a/*marker4*/:= 1): pass + +// @filename: test5.py +//// a = 1 +//// a/*marker5*/ *= 1 + +// @filename: test6.py +//// a = 1 +//// a/*marker6*/ *= 1 + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + marker1: { completions: [{ label: 'a', kind: Consts.CompletionItemKind.Variable }] }, + marker3: { completions: [{ label: 'a', kind: Consts.CompletionItemKind.Variable }] }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker2: { completions: [{ label: 'a', kind: Consts.CompletionItemKind.Variable }] }, + marker4: { completions: [{ label: 'a', kind: Consts.CompletionItemKind.Variable }] }, + marker5: { completions: [{ label: 'a', kind: Consts.CompletionItemKind.Variable }] }, + marker6: { completions: [{ label: 'a', kind: Consts.CompletionItemKind.Variable }] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.variableDocStrings.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.variableDocStrings.fourslash.ts new file mode 100644 index 00000000..e3ee2891 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.variableDocStrings.fourslash.ts @@ -0,0 +1,82 @@ +/// + +// @filename: test.py +//// from typing import List, Union, Callable +//// +//// class Foo: +//// """ This is a docstring """ +//// +//// aaa = 4 +//// """ aaa is an int """ +//// +//// def __init__(self, func : Callable[[float], float]) -> None: +//// self.bbb = "hi" +//// " bbb is a str " +//// self.func = func +//// """A given function""" +//// +//// ccc = Foo() +//// """ ccc is a Foo """ +//// +//// SomeType = List[Union[int, str]] +//// """Here's some documentation about SomeType""" +//// +//// foo1.b[|/*marker1*/|] +//// +//// foo1.a[|/*marker2*/|] +//// +//// cc[|/*marker3*/|] +//// +//// SomeType[|/*marker4*/|] +//// +//// ccc.fun[|/*marker5*/|] + +// @ts-ignore +await helper.verifyCompletion('includes', 'markdown', { + marker1: { + completions: [ + { + label: 'bbb', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nbbb: str\n```\n---\nbbb is a str', + }, + ], + }, + marker2: { + completions: [ + { + label: 'aaa', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\naaa: int\n```\n---\naaa is an int', + }, + ], + }, + marker3: { + completions: [ + { + label: 'ccc', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nccc: Foo\n```\n---\nccc is a Foo', + }, + ], + }, + marker4: { + completions: [ + { + label: 'SomeType', + kind: Consts.CompletionItemKind.Variable, + documentation: + "```python\nSomeType: type[List[int | str]]\n```\n---\nHere's some documentation about SomeType", + }, + ], + }, + marker5: { + completions: [ + { + label: 'func', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\ndef func(float) -> float\n```\n---\nA given function', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.wildcardimports.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.wildcardimports.fourslash.ts new file mode 100644 index 00000000..e9a58a1f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.wildcardimports.fourslash.ts @@ -0,0 +1,121 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: lib1/definition.py +// @library: true +//// def func(): +//// '''func docs''' +//// pass +//// +//// class MyType: +//// def func2(self): +//// '''func2 docs''' +//// pass + +// @filename: lib1/alias.py +// @library: true +//// def func3(): +//// '''func3 docs''' +//// pass + +// @filename: lib1/withall.py +// @library: true +//// def func4(): +//// '''func4 docs''' +//// pass +//// +//// def func5(): +//// '''func5 docs''' +//// pass +//// +//// __all__ = ['func5'] + +// @filename: lib1/redirect.py +// @library: true +//// from . import withall +//// from .withall import * +//// +//// __all__ += withall.__all__ + +// @filename: lib1/wildcard.py +// @library: true +//// from .definition import * +//// from .redirect import * +//// from .alias import func3 + +// @filename: lib1/__init__.py +// @library: true +//// from .wildcard import * + +// @filename: lib1/__init__.pyi +// @library: true +//// class ufunc: +//// def __call__(self): ... +//// +//// func: ufunc +//// class MyType: +//// def func2(self) -> None : ... +//// func3: ufunc +//// func4: ufunc +//// func5: ufunc + +// @filename: test.py +//// import lib1 +//// lib1.[|/*marker1*/func|]() +//// lib1.MyType().[|/*marker2*/func2|]() +//// lib1.[|/*marker3*/func3|]() +//// lib1.[|/*marker4*/func4|]() +//// lib1.[|/*marker5*/func5|]() + +// @ts-ignore +await helper.verifyCompletion('includes', 'markdown', { + marker1: { + completions: [ + { + label: 'func', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nfunc: ufunc\n```\n---\nfunc docs', + }, + ], + }, + marker2: { + completions: [ + { + label: 'func2', + kind: Consts.CompletionItemKind.Method, + documentation: '```python\ndef func2() -> None\n```\n---\nfunc2 docs', + }, + ], + }, + marker3: { + completions: [ + { + label: 'func3', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nfunc3: ufunc\n```\n---\nfunc3 docs', + }, + ], + }, + marker4: { + completions: [ + { + label: 'func4', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nfunc4: ufunc\n```\n---\nfunc4 docs', + }, + ], + }, + marker5: { + completions: [ + { + label: 'func5', + kind: Consts.CompletionItemKind.Variable, + documentation: '```python\nfunc5: ufunc\n```\n---\nfunc5 docs', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/completions.with.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.with.fourslash.ts new file mode 100644 index 00000000..c0acd488 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/completions.with.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: test.py +//// from unittest.mock import patch +//// def some_func(): +//// pass +//// with patch('some_func') as[|/*marker1*/|] a1: +//// pass +//// with patch('some_func') as [|/*marker2*/|] a1: +//// pass +//// with patch('some_func') as a[|/*marker3*/|]2: +//// pass +//// with patch[|/*marker4*/|]('some_func'): +//// pass + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { completions: [] }, + marker2: { completions: [] }, + marker3: { completions: [] }, +}); + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker4: { completions: [{ label: 'patch', kind: Consts.CompletionItemKind.Variable }] }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/diagnostics.missingModuleSource.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/diagnostics.missingModuleSource.fourslash.ts new file mode 100644 index 00000000..803bc704 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/diagnostics.missingModuleSource.fourslash.ts @@ -0,0 +1,69 @@ +/// + +// @filename: test.py +//// +//// import [|/*marker1*/myLib.module|] +//// import myLib.module1 +//// +//// from [|/*marker2*/myLib.module|] import foo +//// from myLib import [|/*marker3*/module|] +//// +//// from [|/*marker4*/.conflict.module2|] import foo2 +//// from .conflict import [|/*marker5*/module2|] +//// +//// import [|/*marker6*/myLib.module|] as m1 +//// from myLib import [|/*marker7*/module|] as m2 +//// from .conflict import [|/*marker8*/module2|] as m3 + +// @filename: myLib/module.pyi +//// def foo(): ... + +// @filename: myLib/module1.pyi +//// + +// @filename: myLib/module1.py +//// + +// @filename: conflict/module2.pyi +//// def foo2(): ... + +// @filename: conflict/module2.py +// @library: true +//// + +{ + helper.verifyDiagnostics({ + marker1: { + category: 'warning', + message: 'Import "myLib.module" could not be resolved from source', + }, + marker2: { + category: 'warning', + message: 'Import "myLib.module" could not be resolved from source', + }, + marker3: { + category: 'warning', + message: 'Import "myLib.module" could not be resolved from source', + }, + marker4: { + category: 'warning', + message: 'Import ".conflict.module2" could not be resolved from source', + }, + marker5: { + category: 'warning', + message: 'Import ".conflict.module2" could not be resolved from source', + }, + marker6: { + category: 'warning', + message: 'Import "myLib.module" could not be resolved from source', + }, + marker7: { + category: 'warning', + message: 'Import "myLib.module" could not be resolved from source', + }, + marker8: { + category: 'warning', + message: 'Import ".conflict.module2" could not be resolved from source', + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.builtinClass.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.builtinClass.fourslash.ts new file mode 100644 index 00000000..dd4f8616 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.builtinClass.fourslash.ts @@ -0,0 +1,31 @@ +/// + +// @filename: test.py +//// from operator import itemgetter +//// x = 4 +//// itemgetter().[|/*marker*/__call__|](x) + +// @filename: operator.py +// @library: true +//// class itemgetter: +//// def [|__call__|](self, obj): +//// pass +//// + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker: { + definitions: rangeMap + .get('__call__')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.classes.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.classes.fourslash.ts new file mode 100644 index 00000000..09d749a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.classes.fourslash.ts @@ -0,0 +1,106 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.pyi +// @library: true +//// class C: ... +//// +//// class C2: ... +//// +//// class C3: ... +//// +//// class C4: ... +//// +//// def C5(a, b): ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// class [|C|]: +//// pass +//// +//// [|C3|] = D.C3 +//// [|C4|] = D.N.C4 +//// +//// class [|C5|]: +//// def __init__(self, a, b): +//// pass + +// @filename: testLib1/M.py +// @library: true +//// class [|C2|]: +//// pass + +// @filename: testLib1/D.py +// @library: true +//// class [|C3|]: +//// pass +//// +//// class N: +//// class [|C4|]: +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.[|/*marker1*/C|]() +//// a = testLib1.[|/*marker2*/C2|]() +//// a = testLib1.[|/*marker3*/C3|]() +//// a = testLib1.[|/*marker4*/C4|]() +//// a = testLib1.[|/*marker5*/C5|](1, 2) + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('C')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('C2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('C3')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('C4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker5: { + definitions: rangeMap + .get('C5')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.dataclasses.converter.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.dataclasses.converter.fourslash.ts new file mode 100644 index 00000000..fbce7c13 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.dataclasses.converter.fourslash.ts @@ -0,0 +1,40 @@ +/// + +// @filename: test.py +//// from typing import Any, Callable, dataclass_transform +//// +//// +//// def converter_simple(s: str) -> int: +//// ... +//// +//// +//// def model_field(*, converter: Callable[..., Any]) -> Any: +//// ... +//// +//// +//// @dataclass_transform(field_specifiers=(model_field,)) +//// class ModelBase: +//// ... +//// +//// +//// class A(ModelBase): +//// [|converted_attribute|]: int = model_field(converter=converter_simple) +//// +//// +//// a = A("1") +//// print(a.[|/*marker*/converted_attribute|]) + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions({ + marker: { + definitions: rangeMap + .get('converted_attribute')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferSource.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferSource.fourslash.ts new file mode 100644 index 00000000..54c4c62a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferSource.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// def [|func1|](a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// def [|/*ignore*/func1|](a: str): ... + +// @filename: test.py +//// from testLib1 import func1 +//// +//// [|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions( + { + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferSource.onlyStubs.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferSource.onlyStubs.ts new file mode 100644 index 00000000..52f81a9c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferSource.onlyStubs.ts @@ -0,0 +1,24 @@ +/// + +// @filename: typings/testLib1/__init__.pyi +//// def [|func1|](a: str): ... + +// @filename: test.py +//// from testLib1 import func1 +//// +//// [|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions( + { + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferStub.fourslash.ts new file mode 100644 index 00000000..0557d838 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferStub.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// def [|/*ignore*/func1|](a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// def [|func1|](a: str): ... + +// @filename: test.py +//// from testLib1 import func1 +//// +//// [|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions( + { + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferStubs' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferStub.onlySource.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferStub.onlySource.fourslash.ts new file mode 100644 index 00000000..3e9a3479 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.definitionFilter.preferStub.onlySource.fourslash.ts @@ -0,0 +1,25 @@ +/// + +// @filename: testLib1/__init__.py +//// def [|func1|](a): +//// pass + +// @filename: test.py +//// from testLib1 import func1 +//// +//// [|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions( + { + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferStubs' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.fields.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.fields.fourslash.ts new file mode 100644 index 00000000..aa0947f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.fields.fourslash.ts @@ -0,0 +1,136 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.pyi +// @library: true +//// from typing import ClassVar +//// +//// class C: +//// V = ... +//// +//// class C2: +//// V2 = ... +//// +//// class C3: +//// V3 = ... +//// +//// class C4: +//// V4: ClassVar[int] = ... +//// +//// class C5: +//// V5: ClassVar[int] = ... +//// +//// class C6: +//// V6 = ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// class C: +//// def __init__(self): +//// self.[|V|] = 1 +//// +//// C3 = D.C3 +//// C4 = D.N.C4 +//// +//// class B: +//// [|V5|] = 1 +//// +//// def __init__(self): +//// self.[|V6|] = 1 +//// +//// class C5(B): +//// pass +//// +//// class C6(B): +//// pass + +// @filename: testLib1/M.py +// @library: true +//// class C2: +//// def __init__(self): +//// self.[|V2|] = 1 + +// @filename: testLib1/D.py +// @library: true +//// class C3: +//// def [|__init__|](self): +//// self.[|V3|] = 1 +//// +//// class N: +//// class C4: +//// [|V4|] = 1 + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.C().[|/*marker1*/V|]() +//// a = testLib1.C2().[|/*marker2*/V2|]() +//// a = testLib1.C3().[|/*marker3*/V3|]() +//// a = testLib1.C4().[|/*marker4*/V4|]() +//// a = testLib1.C5().[|/*marker5*/V5|]() +//// a = testLib1.C6().[|/*marker6*/V6|]() + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('V')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('V2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('V3')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('V4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker5: { + definitions: rangeMap + .get('V5')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker6: { + definitions: rangeMap + .get('V6')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.functions.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.functions.fourslash.ts new file mode 100644 index 00000000..17569288 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.functions.fourslash.ts @@ -0,0 +1,92 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.pyi +// @library: true +//// def C(): ... +//// +//// def C2(): ... +//// +//// def C3(): ... +//// +//// def C4(): ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// def [|C|](): +//// pass +//// +//// [|C3|] = D.C3 +//// [|C4|] = D.Generate() + +// @filename: testLib1/M.py +// @library: true +//// def [|C2|](): +//// pass + +// @filename: testLib1/D.py +// @library: true +//// def [|C3|](): +//// pass +//// +//// def Generate(): +//// def [|C4|](): +//// pass +//// return C4; + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.[|/*marker1*/C|]() +//// a = testLib1.[|/*marker2*/C2|]() +//// a = testLib1.[|/*marker3*/C3|]() +//// a = testLib1.[|/*marker4*/C4|]() + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('C')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('C2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('C3')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('C4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.methods.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.methods.fourslash.ts new file mode 100644 index 00000000..8dc14248 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.methods.fourslash.ts @@ -0,0 +1,167 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.pyi +// @library: true +//// from typing import overload +//// +//// class C: +//// def method(self): ... +//// +//// class C2: +//// def method2(self): ... +//// +//// class C3: +//// def method3(self): ... +//// +//// class C4: +//// def method4(self): ... +//// +//// class C5: +//// def method5(self): ... +//// +//// class C6: +//// def method6(self): ... +//// +//// class C7: +//// @overload +//// def method7(self): ... +//// @overload +//// def method7(self, a): ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// class C: +//// def [|method|](self): +//// pass +//// +//// C3 = D.C3 +//// C4 = D.N.C4 +//// +//// class B: +//// def [|method5|](self): +//// pass +//// +//// def method6(self): +//// pass +//// +//// class C5(B): +//// pass +//// +//// class C6(B): +//// def [|method6|](self): +//// pass +//// +//// class C7: +//// def [|method7|](self, a): +//// pass + +// @filename: testLib1/M.py +// @library: true +//// class C2: +//// def [|method2|](self): +//// pass + +// @filename: testLib1/D.py +// @library: true +//// class C3: +//// def [|method3|](self): +//// pass +//// +//// class N: +//// class C4: +//// def [|method4|](self): +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// testLib1.C().[|/*marker1*/method|]() +//// testLib1.C2().[|/*marker2*/method2|]() +//// testLib1.C3().[|/*marker3*/method3|]() +//// testLib1.C4().[|/*marker4*/method4|]() +//// testLib1.C5().[|/*marker5*/method5|]() +//// testLib1.C6().[|/*marker6*/method6|]() +//// testLib1.C7().[|/*marker7*/method7|]() +//// testLib1.C7().[|/*marker7_1*/method7|](1) + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('method')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('method2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('method3')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('method4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker5: { + definitions: rangeMap + .get('method5')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker6: { + definitions: rangeMap + .get('method6')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker7: { + definitions: rangeMap + .get('method7')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker7_1: { + definitions: rangeMap + .get('method7')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.modules.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.modules.fourslash.ts new file mode 100644 index 00000000..7e366f22 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.modules.fourslash.ts @@ -0,0 +1,50 @@ +/// + +// @filename: testLib1/__init__.pyi +// @library: true +//// from . import M as M +//// from . import D as D + +// @filename: testLib1/D.pyi +// @library: true +//// # empty + +// @filename: testLib1/__init__.py +// @library: true +//// [|/*def1*/|]# empty + +// @filename: testLib1/M.py +// @library: true +//// [|/*def2*/|]# empty + +// @filename: testLib1/D.py +// @library: true +//// [|/*def3*/|]# empty + +// @filename: test.py +//// import [|/*marker1*/testLib1|] +//// import testLib1.[|/*marker2*/M|] +//// import testLib1.[|/*marker3*/D|] + +{ + helper.verifyFindDefinitions( + { + marker1: { + definitions: [ + { path: helper.getMarkerByName('def1').fileName, range: helper.getPositionRange('def1') }, + ], + }, + marker2: { + definitions: [ + { path: helper.getMarkerByName('def2').fileName, range: helper.getPositionRange('def2') }, + ], + }, + marker3: { + definitions: [ + { path: helper.getMarkerByName('def3').fileName, range: helper.getPositionRange('def3') }, + ], + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.namespaceImport.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.namespaceImport.fourslash.ts new file mode 100644 index 00000000..291b9713 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.namespaceImport.fourslash.ts @@ -0,0 +1,40 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "extraPaths": ["subproj"] +//// } + +// @filename: subproj/foo/bar1.py +//// [|/*def1*/x|] = 1 + +// @filename: foo/bar2.py +//// [|/*def2*/x|] = 1 + +// @filename: test.py +//// from foo import [|/*marker1*/bar1|] +//// from foo import [|/*marker2*/bar2|] + +{ + helper.verifyFindDefinitions( + { + marker1: { + definitions: [ + { + path: helper.getMarkerByName('def1').fileName, + range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } }, + }, + ], + }, + marker2: { + definitions: [ + { + path: helper.getMarkerByName('def2').fileName, + range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } }, + }, + ], + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.namespaceImportWithInit.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.namespaceImportWithInit.fourslash.ts new file mode 100644 index 00000000..fa3e7044 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.namespaceImportWithInit.fourslash.ts @@ -0,0 +1,40 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true, +//// "executionEnvironments": [{ "root": "."}], +//// "venv": ".venv", +//// "venvPath": ".", +//// } + +// @filename: .venv/lib/site-packages/lib1.pth +//// lib1 + +// @filename: .venv/lib/site-packages/lib2.pth +//// lib2 + +// @filename: .venv/lib/site-packages/lib1/a/b/main.py +//// + +// @filename: .venv/lib/site-packages/lib2/a/b/__init__.py +//// [|/*def1*/x|] = 1 + +// @filename: test.py +//// from a.b import [|/*marker1*/x|] + +{ + helper.verifyFindDefinitions( + { + marker1: { + definitions: [ + { + path: helper.getMarkerByName('def1').fileName, + range: { start: { line: 0, character: 0 }, end: { line: 0, character: 1 } }, + }, + ], + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.overloads.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.overloads.fourslash.ts new file mode 100644 index 00000000..42e1ac1b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.overloads.fourslash.ts @@ -0,0 +1,223 @@ +/// + +// @filename: testLib1/__init__.pyi +// @library: true +//// from typing import overload +//// +//// class C: +//// @overload +//// def method(self): ... +//// @overload +//// def method(self, a): ... +//// +//// class C2: +//// @overload +//// def method2(self): ... +//// @overload +//// def method2(self, a): ... +//// +//// class C3(C2): +//// @overload +//// def method2(self): ... +//// @overload +//// def method2(self, a): ... +//// +//// class C4: +//// @overload +//// def method4(self): ... +//// @overload +//// def method4(self, a): ... +//// +//// class C5: +//// @overload +//// def method5(self): ... +//// @overload +//// def method5(self, a): ... +//// +//// class C6: +//// @overload +//// def method6(self): ... +//// @overload +//// def method6(self, a): ... +//// +//// @overload +//// def method7(): ... +//// @overload +//// def method7(a): ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2, C3 as MC3, method7 as m7 +//// from . import D +//// +//// class C: +//// @overload +//// def [|method|](self): +//// pass +//// +//// @overload +//// def [|method|](self, a): +//// pass +//// +//// C3 = MC3 +//// C4 = D.N.C4 +//// +//// class B: +//// @overload +//// def [|method5|](self): +//// pass +//// +//// def [|method5|](self, a): +//// pass +//// +//// @overload +//// def method6(self): +//// pass +//// +//// @overload +//// def method6(self, a): +//// pass +//// +//// class C5(B): +//// pass +//// +//// class C6(B): +//// @overload +//// def [|method6|](self): +//// pass +//// +//// @overload +//// def [|method6|](self, a): +//// pass +//// +//// [|method7|] = m7 + +// @filename: testLib1/M.pyi +// @library: true +//// from . import D +//// C2 = D.C2 +//// C3 = D.C3 +//// method7 = D.method7 + +// @filename: testLib1/M.py +// @library: true +//// from . import D +//// C2 = D.C2 +//// C3 = D.C3 +//// method7 = D.method7 + +// @filename: testLib1/D.pyi +// @library: true +//// class C2: +//// @overload +//// def method2(self): ... +//// @overload +//// def method2(self, a): ... +//// +//// class C3(C2): ... +//// +//// class N: +//// class C4: +//// @overload +//// def method4(self): ... +//// @overload +//// def method4(self, a): ... +//// +//// @overload +//// def method7(): ... +//// @overload +//// def method7(a): ... + +// @filename: testLib1/D.py +// @library: true +//// class C2: +//// def [|method2|](self, a): +//// pass +//// +//// class C3(C2): +//// pass +//// +//// class N: +//// class C4: +//// def [|method4|](self, a): +//// pass +//// +//// def [|method7|](a): +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// testLib1.C().[|/*marker1*/method|]() +//// testLib1.C2().[|/*marker2*/method2|]() +//// testLib1.C3().[|/*marker3*/method2|]() +//// testLib1.C4().[|/*marker4*/method4|](1) +//// testLib1.C5().[|/*marker5*/method5|]() +//// testLib1.C6().[|/*marker6*/method6|](1) +//// testLib1.[|/*marker7*/method7|](1) + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('method')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('method2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('method2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('method4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker5: { + definitions: rangeMap + .get('method5')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker6: { + definitions: rangeMap + .get('method6')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker7: { + definitions: rangeMap + .get('method7')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.parameters.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.parameters.fourslash.ts new file mode 100644 index 00000000..ac188827 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.parameters.fourslash.ts @@ -0,0 +1,167 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.pyi +// @library: true +//// from typing import overload +//// +//// class C: +//// def method(self, a): ... +//// +//// class C2: +//// def method2(self, a2): ... +//// +//// class C3: +//// def method3(self, a3): ... +//// +//// class C4: +//// def method4(self, a4): ... +//// +//// class C5: +//// def method5(self, a5): ... +//// +//// class C6: +//// def method6(self, a6): ... +//// +//// class C7: +//// @overload +//// def method7(self, a7): ... +//// @overload +//// def method7(self, a7, b7): ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// class C: +//// def method(self, [|a|]): +//// pass +//// +//// C3 = D.C3 +//// C4 = D.N.C4 +//// +//// class B: +//// def method5(self, [|a5|]): +//// pass +//// +//// def method6(self, a6): +//// pass +//// +//// class C5(B): +//// pass +//// +//// class C6(B): +//// def method6(self, [|a6|]): +//// pass +//// +//// class C7: +//// def method7(self, [|a7|], [|b7|]): +//// pass + +// @filename: testLib1/M.py +// @library: true +//// class C2: +//// def method2(self, [|a2|]): +//// pass + +// @filename: testLib1/D.py +// @library: true +//// class C3: +//// def method3(self, [|a3|]): +//// pass +//// +//// class N: +//// class C4: +//// def method4(self, [|a4|]): +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// testLib1.C().method([|/*marker1*/a|] = 1) +//// testLib1.C2().method2([|/*marker2*/a2|] = 1) +//// testLib1.C3().method3([|/*marker3*/a3|] = 1) +//// testLib1.C4().method4([|/*marker4*/a4|] = 1) +//// testLib1.C5().method5([|/*marker5*/a5|] = 1) +//// testLib1.C6().method6([|/*marker6*/a6|] = 1) +//// testLib1.C7().method7([|/*marker7*/a7|] = 1) +//// testLib1.C7().method7(a7 = 1, [|/*marker7_1*/b7|] = 1) + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('a')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('a2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('a3')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('a4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker5: { + definitions: rangeMap + .get('a5')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker6: { + definitions: rangeMap + .get('a6')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker7: { + definitions: rangeMap + .get('a7')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker7_1: { + definitions: rangeMap + .get('b7')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.function.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.function.fourslash.ts new file mode 100644 index 00000000..b4b5da70 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.function.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// def [|func1|](a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// def [|func1|](a: str): ... + +// @filename: test.py +//// from testLib1 import func1 +//// +//// [|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.innerClass.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.innerClass.fourslash.ts new file mode 100644 index 00000000..36bd5c95 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.innerClass.fourslash.ts @@ -0,0 +1,32 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Outer: +//// class [|Middle|]: +//// class Inner: +//// def M(self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class Outer: +//// class [|Middle|]: +//// class Inner: +//// def M(self, a: str): ... + +// @filename: test.py +//// import testLib1 +//// +//// testLib1.Outer.[|/*marker*/Middle|].Inner() + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.innerClassMethod.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.innerClassMethod.fourslash.ts new file mode 100644 index 00000000..a71e7fb2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.innerClassMethod.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Outer: +//// class Middle: +//// class Inner: +//// def [|M|](self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class Outer: +//// class Middle: +//// class Inner: +//// def [|M|](self, a: str): ... + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.Outer.Middle.Inner() +//// a.[|/*marker*/M|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClass.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClass.fourslash.ts new file mode 100644 index 00000000..10ec408a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClass.fourslash.ts @@ -0,0 +1,28 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class [|Test1|]: +//// def M(self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class [|Test1|]: +//// def M(self, a: str): ... + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.[|/*marker*/Test1|]() + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassMethod.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassMethod.fourslash.ts new file mode 100644 index 00000000..91c4259a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassMethod.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// def [|M|](self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class Test1: +//// def [|M|](self, a: str): ... + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.Test1() +//// a.[|/*marker*/M|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassPropertyReadOnly.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassPropertyReadOnly.fourslash.ts new file mode 100644 index 00000000..36a63cc5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassPropertyReadOnly.fourslash.ts @@ -0,0 +1,31 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// @property +//// def [|P|](self): +//// return '' + +// @filename: typings/testLib1/__init__.pyi +//// class Test1: +//// @property +//// def [|P|](self) -> str: ... + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.Test1() +//// a.[|/*marker*/P|] + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassPropertyReadWrite.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassPropertyReadWrite.fourslash.ts new file mode 100644 index 00000000..cc064206 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceAndStub.outerClassPropertyReadWrite.fourslash.ts @@ -0,0 +1,36 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// @property +//// def [|P|](self): +//// return '' +//// @P.setter +//// def [|P|](self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class Test1: +//// @property +//// def [|P|](self) -> str: ... +//// @P.setter +//// def [|P|](self, a: str): ... + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.Test1() +//// a.[|/*marker*/P|] + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.class.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.class.fourslash.ts new file mode 100644 index 00000000..e08f0555 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.class.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// class [|Test1|]: +//// def M(self, a: str): +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.[|/*marker*/Test1|]() + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.function1.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.function1.fourslash.ts new file mode 100644 index 00000000..62b6e395 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.function1.fourslash.ts @@ -0,0 +1,28 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// def [|func1|](a): +//// pass + +// @filename: test.py +//// from testLib1 import func1 +//// +//// [|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.function2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.function2.fourslash.ts new file mode 100644 index 00000000..bc048363 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.function2.fourslash.ts @@ -0,0 +1,28 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// def [|func1|](a): +//// pass + +// @filename: test.py +//// from testLib1 import func1 as test_func1 +//// +//// [|/*marker*/test_func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.relativeImport1.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.relativeImport1.fourslash.ts new file mode 100644 index 00000000..d57410a3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.relativeImport1.fourslash.ts @@ -0,0 +1,22 @@ +/// + +// @filename: testLib1/__init__.py +//// def [|func1|](a): +//// pass + +// @filename: test.py +//// from .testLib1 import func1 +//// +//// [|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.relativeImport2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.relativeImport2.fourslash.ts new file mode 100644 index 00000000..727ce91d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.sourceOnly.relativeImport2.fourslash.ts @@ -0,0 +1,22 @@ +/// + +// @filename: testLib1/__init__.py +//// def [|func1|](a): +//// pass + +// @filename: test.py +//// from . import testLib1 +//// +//// testLib1.[|/*marker*/func1|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindDefinitions({ + marker: { + definitions: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.stubOnly.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.stubOnly.fourslash.ts new file mode 100644 index 00000000..66c88437 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.stubOnly.fourslash.ts @@ -0,0 +1,61 @@ +/// + +// @filename: typings/testLib1/__init__.pyi +//// from typing import overload +//// +//// class [|Test1|]: +//// def M(self, a: str): +//// pass +//// @overload +//// def [|OL|](self, [|a|]): +//// pass +//// @overload +//// def [|OL|](self, [|a|], [|b|]): +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.[|/*marker*/Test1|]() +//// testLib1.Test1().[|/*marker2*/OL|]("hello") +//// testLib1.Test1().OL([|/*marker3*/a|] = "hello") +//// testLib1.Test1().OL(a = "hello", [|/*marker4*/b|] = 1) + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions({ + marker: { + definitions: rangeMap + .get('Test1')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('OL')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('a')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('b')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.stubPackages.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.stubPackages.fourslash.ts new file mode 100644 index 00000000..bcf3b3a1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.stubPackages.fourslash.ts @@ -0,0 +1,71 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1-stubs/py.typed +// @library: true +//// partial +//// + +// @filename: testLib1-stubs/__init__.pyi +// @library: true +//// from .core import C as C +//// from .base import C2 as C2 + +// @filename: testLib1/__init__.py +// @library: true +//// from .core import C +//// from .base import C2 as C2 + +// @filename: testLib1-stubs/core/__init__.pyi +// @library: true +//// class [|C|]: ... + +// @filename: testLib1/core/__init__.py +// @library: true +//// class C: +//// pass + +// @filename: testLib1/base/__init__.py +// @library: true +//// from ..main import C2 as C2 + +// @filename: testLib1/main.py +// @library: true +//// class [|C2|]: +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.[|/*marker1*/C|]() +//// a = testLib1.[|/*marker2*/C2|]() + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('C')! + .filter((r) => !r.marker) + .map((r) => { + return { path: helper.getMappedFilePath(r.fileName), range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('C2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferStubs' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.typedDict.keys.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.typedDict.keys.fourslash.ts new file mode 100644 index 00000000..f786d8cd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.typedDict.keys.fourslash.ts @@ -0,0 +1,108 @@ +/// + +// @filename: a.py +//// from typing import TypedDict +//// +//// class Post(TypedDict, total=False): +//// [|title|] : str +//// author: 'Author' +//// +//// class AuthorOptionalData(TypedDict, total=False): +//// [|age|]: int +//// +//// class Author(AuthorOptionalData): +//// [|name|] : str +//// +//// Profile = TypedDict( +//// 'Profile', +//// { +//// 'bio': str, +//// [|'views'|]: int, +//// }, +//// total=False, +//// ) + +// @filename: test.py +//// from a import Post, Author, Profile +//// +//// author: Author = {[|/*marker1*/'name'|]: 'Robert'} +//// post: Post = {'author': {[|/*marker2*/'name'|]}} +//// profile: Profile = {[|/*marker3*/'views'|]: 100} +//// author: Author = {'name': 'Robert', [|/*marker4*/'age'|]: 67} +//// author2: Author = Author([|/*marker8*/name|]='Robert', age=67) +//// +//// def foo(item: Post | Author) -> None: +//// ... +//// +//// foo(item={[|/*marker5*/'title'|]}) +//// foo(item={'title': [|/*marker6*/'title'|]}) +//// foo(item={[|/*marker7*/'name'|]}) + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('name')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('name')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get("'views'")! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('age')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker5: { + definitions: rangeMap + .get('title')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker6: { + definitions: [], + }, + marker7: { + definitions: rangeMap + .get('name')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker8: { + definitions: rangeMap + .get('name')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.variables.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.variables.fourslash.ts new file mode 100644 index 00000000..99b682d4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.variables.fourslash.ts @@ -0,0 +1,93 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.pyi +// @library: true +//// C = ... +//// +//// C2 = ... +//// +//// C3 = ... +//// +//// C4 = ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// [|C|] = 1 +//// +//// [|C3|] = D.C3 +//// [|C4|] = D.Generate() + +// @filename: testLib1/M.py +// @library: true +//// [|C2|] = 1 + +// @filename: testLib1/D.pyi +// @library: true +//// C3 = ... +//// +//// def Generate(): ... + +// @filename: testLib1/D.py +// @library: true +//// C3 = 1 +//// +//// def Generate(): +//// return 1; + +// @filename: test.py +//// import testLib1 +//// +//// a = testLib1.[|/*marker1*/C|] +//// a = testLib1.[|/*marker2*/C2|] +//// a = testLib1.[|/*marker3*/C3|] +//// a = testLib1.[|/*marker4*/C4|] + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('C')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('C2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('C3')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('C4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.wildcardimports.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.wildcardimports.fourslash.ts new file mode 100644 index 00000000..18b80139 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findDefinitions.wildcardimports.fourslash.ts @@ -0,0 +1,122 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: lib1/definition.py +// @library: true +//// def [|func|](): +//// '''func docs''' +//// pass +//// +//// class MyType: +//// def [|func2|](): +//// '''func2 docs''' +//// pass + +// @filename: lib1/alias.py +// @library: true +//// def [|func3|](): +//// '''func3 docs''' +//// pass + +// @filename: lib1/withall.py +// @library: true +//// def [|func4|](): +//// '''func4 docs''' +//// pass +//// +//// def [|func5|](): +//// '''func5 docs''' +//// pass +//// +//// __all__ = ['func5'] + +// @filename: lib1/redirect.py +// @library: true +//// from . import withall +//// from .withall import * +//// +//// __all__ += withall.__all__ + +// @filename: lib1/wildcard.py +// @library: true +//// from .definition import * +//// from .redirect import * +//// from .alias import func3 + +// @filename: lib1/__init__.py +// @library: true +//// from .wildcard import * + +// @filename: lib1/__init__.pyi +// @library: true +//// class ufunc: +//// def __call__(self) -> None : ... +//// +//// func: ufunc +//// class MyType: +//// def func2() -> None : ... +//// func3: ufunc +//// func4: ufunc +//// func5: ufunc + +// @filename: test.py +//// import lib1 +//// lib1.[|/*marker1*/func|]() +//// lib1.MyType().[|/*marker2*/func2|]() +//// lib1.[|/*marker3*/func3|]() +//// lib1.[|/*marker4*/func4|]() +//// lib1.[|/*marker5*/func5|]() + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindDefinitions( + { + marker1: { + definitions: rangeMap + .get('func')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker2: { + definitions: rangeMap + .get('func2')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker3: { + definitions: rangeMap + .get('func3')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker4: { + definitions: rangeMap + .get('func4')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker5: { + definitions: rangeMap + .get('func5')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }, + 'preferSource' + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.builtinClass.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.builtinClass.fourslash.ts new file mode 100644 index 00000000..ce9ec6f9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.builtinClass.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: test.py +//// [|/*marker1*/a|] = 1 + +// @filename: typeshed-fallback/stdlib/builtins.pyi +//// class [|int|]: +//// @overload +//// def __new__(cls: Type[_T], x: str | bytes | SupportsInt | SupportsIndex | _SupportsTrunc = ...) -> _T: ... +//// @overload +//// def __new__(cls: Type[_T], x: str | bytes | bytearray, base: SupportsIndex) -> _T: ... + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindTypeDefinitions({ + marker1: { + definitions: rangeMap + .get('int')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.classes.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.classes.fourslash.ts new file mode 100644 index 00000000..7c319e75 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.classes.fourslash.ts @@ -0,0 +1,77 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.pyi +// @library: true +//// class [|C|]: ... +//// +//// class [|C2|]: ... +//// +//// class [|C3|]: ... +//// +//// class [|C4|]: ... +//// +//// class [|C5|]: ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// class [|C|]: +//// pass +//// +//// [|C3|] = D.C3 +//// [|C4|] = D.N.C4 +//// +//// class [|C5|]: +//// def __init__(self, a, b): +//// pass + +// @filename: testLib1/M.py +// @library: true +//// class [|C2|]: +//// pass + +// @filename: testLib1/D.py +// @library: true +//// class [|C3|]: +//// pass +//// +//// class N: +//// class [|C4|]: +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// [|/*marker1*/a|] = testLib1.C() +//// [|/*marker2*/a|] = testLib1.C2() +//// [|/*marker3*/a|] = testLib1.C3() +//// [|/*marker4*/a|] = testLib1.C4() +//// [|/*marker5*/a|] = testLib1.C5(1, 2) + +{ + const rangeMap = helper.getRangesByText(); + + var _getRanges = function (rangeName: string): _.DocumentRange[] { + return rangeMap + .get(rangeName)! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + }; + + helper.verifyFindTypeDefinitions({ + marker1: { definitions: _getRanges('C') }, + marker2: { definitions: _getRanges('C2') }, + marker3: { definitions: _getRanges('C3') }, + marker4: { definitions: _getRanges('C4') }, + marker5: { definitions: _getRanges('C5') }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.unions.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.unions.fourslash.ts new file mode 100644 index 00000000..f6965e57 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.unions.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: test.py +//// from typing import Union +//// +//// class [|C1|]: +//// pass +//// +//// class N: +//// class [|C2|]: +//// pass +//// +//// def foo([|/*marker1*/a|]: Union[C1, N.C2]): +//// pass + +{ + helper.verifyFindTypeDefinitions({ + marker1: { + definitions: helper + .getFilteredRanges<{ target?: string }>((m, d, t) => t === 'C1' || t === 'C2') + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.classPropertyReadWrite.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.classPropertyReadWrite.ts new file mode 100644 index 00000000..958b8d5f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.classPropertyReadWrite.ts @@ -0,0 +1,39 @@ +/// + +// @filename: testLib1/__init__.py +//// class Test1: +//// @property +//// def [|P|](self): +//// return '' +//// @[|P|].setter +//// def [|P|](self, a): +//// pass + +// @filename: test.py +//// from testLib1 import Test1 +//// +//// a = Test1() +//// a.[|/*marker*/P|] = '' +//// val = a.[|P|] + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() +//// func(b) +//// +//// def func(t: Test1): +//// t.[|P|] = '' +//// print(t.[|P|]) + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.fourslash.ts new file mode 100644 index 00000000..d862ca4b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.fourslash.ts @@ -0,0 +1,34 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// class [|Test1|]: +//// def M(self, a: Test1): +//// pass + +// @filename: test.py +//// from testLib1 import [|Test1|] +//// +//// a = [|/*marker*/Test1|]() + +// @filename: test2.py +//// from testLib1 import [|Test1|] +//// +//// b = [|Test1|]() + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.importalias.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.importalias.fourslash.ts new file mode 100644 index 00000000..21fd8d94 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.importalias.fourslash.ts @@ -0,0 +1,34 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// def M(self, a: Test1): +//// pass + +// @filename: test.py +//// from testLib1 import Test1 as [|t1|] +//// +//// a = [|[|/*marker*/t1|]|]() + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.invokedFromLibrary.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.invokedFromLibrary.fourslash.ts new file mode 100644 index 00000000..c53ee5f8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.invokedFromLibrary.fourslash.ts @@ -0,0 +1,52 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// class [|/*marker*/Test1|]: +//// def M(self, a: '[|Test1|]'): +//// pass + +// @filename: testLib2/__init__.py +// @library: true +//// # We need an indexer to discover references in closed files +//// # that are not referenced in open files (or workspace depends on diagnostic mode) +//// from testLib1 import Test1 +//// +//// a = Test1() + +// @filename: testLib3/__init__.py +// @library: true +//// from testLib1 import [|Test1|] +//// +//// class Test3: +//// def M(self, a: [|Test1|]): +//// pass + +// @filename: test.py +//// from testLib1 import [|Test1|] +//// +//// a = [|Test1|]() + +// @filename: test2.py +//// from testLib1 import [|Test1|] +//// from testLib3 import Test3 +//// +//// a = Test3() +//// b = [|Test1|]() + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.module.nested.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.module.nested.fourslash.ts new file mode 100644 index 00000000..75fb6be7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.module.nested.fourslash.ts @@ -0,0 +1,67 @@ +/// + +// @filename: nested/__init__.py +//// from .[|/*module1*/module1|] import module1Func as module1Func + +// @filename: nested/module1.py +//// def module1Func(): +//// pass + +// @filename: test1.py +//// import [|/*nest1*/nested|].[|/*module2*/module1|] +//// import [|/*nest2*/nested|].[|/*module3*/module1|] as m +//// +//// [|/*nest3*/nested|].[|/*module4*/module1|].module1Func() + +// @filename: test2.py +//// from [|/*nest4*/nested|].[|/*module5*/module1|] import module1Func +//// from .[|/*nest5*/nested|].[|/*module6*/module1|] import module1Func as f + +// @filename: test3.py +//// from .[|/*nest6*/nested|] import [|/*module7*/module1|] +//// from .[|/*nest7*/nested|] import [|/*module8*/module1|] as m + +// @filename: code/test4.py +//// from ..[|/*nest8*/nested|] import [|/*module9*/module1|] +//// from ..[|/*nest9*/nested|] import [|/*module10*/module1|] as m +//// from ..[|/*nest10*/nested|].[|/*module11*/module1|] import module1Func + +{ + const nestedReferences = helper + .getRangesByText() + .get('nested')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const moduleReferences = helper + .getRangesByText() + .get('module1')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + nest1: { references: nestedReferences }, + nest2: { references: nestedReferences }, + nest3: { references: nestedReferences }, + nest4: { references: nestedReferences }, + nest5: { references: nestedReferences }, + nest6: { references: nestedReferences }, + nest7: { references: nestedReferences }, + nest8: { references: nestedReferences }, + nest9: { references: nestedReferences }, + nest10: { references: nestedReferences }, + module1: { references: moduleReferences }, + module2: { references: moduleReferences }, + module3: { references: moduleReferences }, + module4: { references: moduleReferences }, + module5: { references: moduleReferences }, + module6: { references: moduleReferences }, + module7: { references: moduleReferences }, + module8: { references: moduleReferences }, + module9: { references: moduleReferences }, + module10: { references: moduleReferences }, + module11: { references: moduleReferences }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.duplicated.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.duplicated.fourslash.ts new file mode 100644 index 00000000..6a6a1aeb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.duplicated.fourslash.ts @@ -0,0 +1,59 @@ +/// + +// @filename: module1.py +//// def module1Func(): +//// pass + +// @filename: nest/__init__.py +//// # empty + +// @filename: nest/module1.py +//// def nestModule1Func(): +//// pass + +// @filename: test1.py +//// from [|/*marker1*/nest|] import [|/*marker2*/module1|] +//// +//// from [|/*marker3*/nest|].[|/*marker4*/module1|] import module1Func +//// +//// import [|/*marker5*/nest|].[|/*marker6*/module1|] +//// import [|/*marker7*/module1|] +//// +//// [|/*marker8*/nest|].[|/*marker9*/module1|] + +{ + const nestReferences = helper + .getRangesByText() + .get('nest')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const marker7 = helper.getMarkerByName('marker7'); + const module1References = helper + .getRangesByText() + .get('module1')! + .filter((r) => r.marker !== marker7) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + marker1: { references: nestReferences }, + marker2: { references: module1References }, + marker3: { references: nestReferences }, + marker4: { references: module1References }, + marker5: { references: nestReferences }, + marker6: { references: module1References }, + marker7: { + references: helper + .getRanges() + .filter((r) => r.marker === marker7) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker8: { references: nestReferences }, + marker9: { references: module1References }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.fourslash.ts new file mode 100644 index 00000000..a2202144 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.fourslash.ts @@ -0,0 +1,46 @@ +/// + +// @filename: module1.py +//// def module1Func(): +//// pass + +// @filename: test1.py +//// import [|/*marker1*/module1|] +//// import [|/*marker2*/module1|] as m +//// +//// [|/*marker3*/module1|].module1Func() + +// @filename: test2.py +//// from [|/*marker4*/module1|] import module1Func +//// from .[|/*marker5*/module1|] import module1Func as f + +// @filename: test3.py +//// from . import [|/*marker6*/module1|] +//// from . import [|/*marker7*/module1|] as m + +// @filename: nested/test4.py +//// from .. import [|/*marker8*/module1|] +//// from .. import [|/*marker9*/module1|] as m +//// from ..[|/*marker10*/module1|] import module1Func + +{ + const references = helper + .getRangesByText() + .get('module1')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + marker1: { references }, + marker2: { references }, + marker3: { references }, + marker4: { references }, + marker5: { references }, + marker6: { references }, + marker7: { references }, + marker8: { references }, + marker9: { references }, + marker10: { references }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.shadow.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.shadow.fourslash.ts new file mode 100644 index 00000000..6da52c54 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.shadow.fourslash.ts @@ -0,0 +1,95 @@ +/// + +// @filename: module1.py +//// def module1Func(): +//// pass + +// @filename: nest1/__init__.py +//// # empty + +// @filename: nest1/module1.py +//// def nest1Module1Func(): +//// pass + +// @filename: nest1/nest2/__init__.py +//// # empty + +// @filename: nest1/nest2/module1.py +//// def nest2Module1Func(): +//// pass + +// @filename: test1.py +//// from [|/*nest1_1*/nest1|] import [|{| "name":"nest1_module1", "target":"nest1" |}module1|] +//// from [|/*nest1_2*/nest1|].[|/*nest2_1*/nest2|] import [|{| "name":"nest2_module1", "target":"nest2" |}module1|] +//// +//// import [|/*nest1_3*/nest1|] +//// import [|/*nest1_4*/nest1|].[|/*nest2_2*/nest2|] +//// import [|/*nest1_5*/nest1|].[|/*nest2_3*/nest2|].[|{| "name":"nest2_module2", "target":"nest2" |}module1|] +//// +//// from [|/*nest1_6*/nest1|] import [|/*nest2_4*/nest2|] +//// +//// [|{| "name":"module4" |}module1|] +//// [|/*nest1_7*/nest1|] +//// [|/*nest1_8*/nest1|].[|/*nest2_5*/nest2|] +//// [|/*nest1_9*/nest1|].[|{| "name":"module5", "target":"none" |}module1|] + +{ + const nest1References = helper + .getRangesByText() + .get('nest1')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const nest2References = helper + .getRangesByText() + .get('nest2')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const nest2ModuleReferences = helper + .getFilteredRanges<{ target?: string }>( + (m, d, t) => t === 'module1' && !!d && (!d.target || d.target === 'nest2') + ) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + nest1_1: { references: nest1References }, + nest1_2: { references: nest1References }, + nest1_3: { references: nest1References }, + nest1_4: { references: nest1References }, + nest1_5: { references: nest1References }, + nest1_6: { references: nest1References }, + nest1_8: { references: nest1References }, + nest1_9: { references: nest1References }, + nest2_1: { references: nest2References }, + nest2_2: { references: nest2References }, + nest2_3: { references: nest2References }, + nest2_4: { references: nest2References }, + nest2_5: { references: nest2References }, + nest2_module1: { references: nest2ModuleReferences }, + nest2_module2: { references: nest2ModuleReferences }, + nest1_module1: { + references: helper + .getFilteredRanges<{ target?: string }>( + (m, d, t) => t === 'module1' && !!d && (!d.target || d.target === 'nest1') + ) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + module4: { + references: helper + .getFilteredRanges<{ target?: string }>((m, d, t) => t === 'module1' && !!d && d.target !== 'none') + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + module5: { + references: [], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.openFiles.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.openFiles.fourslash.ts new file mode 100644 index 00000000..2ce3364e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.openFiles.fourslash.ts @@ -0,0 +1,37 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// class [|/*lib*/Test1|]: +//// def M(self, a: '[|Test1|]'): +//// pass + +// @filename: test.py +//// from testLib1 import [|Test1|] +//// +//// a = [|/*marker*/Test1|]() + +// @filename: test2.py +//// from testLib1 import [|Test1|] +//// +//// b = [|Test1|]() + +{ + const ranges = helper.getRanges(); + + const libMarker = helper.getMarkerByName('lib'); + helper.openFile(libMarker.fileName); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.parameter.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.parameter.fourslash.ts new file mode 100644 index 00000000..27257beb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.parameter.fourslash.ts @@ -0,0 +1,24 @@ +/// + +// @filename: test.py +//// def func([|/*marker*/a|]): +//// print([|a|]) +//// +//// a = 40 +//// func(a) + +// @filename: test2.py +//// a = 50 +//// print(a) + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.class.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.class.fourslash.ts new file mode 100644 index 00000000..d4ad5469 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.class.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class [|Test1|]: +//// def M(self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class [|Test1|]: +//// def M(self, a: str): ... + +// @filename: test.py +//// from testLib1 import [|Test1|] +//// +//// a = [|/*marker*/Test1|]() + +// @filename: test2.py +//// from testLib1 import [|Test1|] +//// +//// b = [|Test1|]() + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classMethod.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classMethod.fourslash.ts new file mode 100644 index 00000000..f1d37c08 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classMethod.fourslash.ts @@ -0,0 +1,37 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// def [|M|](self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class Test1: +//// def [|M|](self, a: str): ... + +// @filename: test.py +//// from testLib1 import Test1 +//// +//// Test1().[|/*marker*/M|]('') + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() +//// func(b) +//// +//// def func(t: Test1): +//// t.[|M|]('') + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classPropertyReadOnly.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classPropertyReadOnly.fourslash.ts new file mode 100644 index 00000000..2c329775 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classPropertyReadOnly.fourslash.ts @@ -0,0 +1,40 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// @property +//// def [|P|](self): +//// return '' + +// @filename: typings/testLib1/__init__.pyi +//// class Test1: +//// @property +//// def [|P|](self) -> str: ... + +// @filename: test.py +//// from testLib1 import Test1 +//// +//// a = Test1() +//// val = a.[|/*marker*/P|] + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() +//// func(b) +//// +//// def func(t: Test1): +//// print(t.[|P|]) + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classPropertyReadWrite.fourslash.skip.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classPropertyReadWrite.fourslash.skip.ts new file mode 100644 index 00000000..771744c5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.classPropertyReadWrite.fourslash.skip.ts @@ -0,0 +1,48 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// @property +//// def [|P|](self): +//// return '' +//// # bug: these next 2 missing from results - disabling test until this is fixed, it actually works in the product +//// @[|P|].setter +//// def [|P|](self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class Test1: +//// @property +//// def [|P|](self) -> str: ... +//// @[|P|].setter +//// def [|P|](self, a: str): ... + +// @filename: test.py +//// from testLib1 import Test1 +//// +//// a = Test1() +//// a.[|/*marker*/P|] = '' +//// val = a.[|P|] + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() +//// func(b) +//// +//// def func(t: Test1): +//// t.[|P|] = '' +//// print(t.[|P|]) + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.function.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.function.fourslash.ts new file mode 100644 index 00000000..4752293c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.sourceAndStub.function.fourslash.ts @@ -0,0 +1,35 @@ +/// + +// @filename: testLib1/__init__.py +// @library: true +//// def [|func1|](a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// def [|func1|](a: str): ... + +// @filename: test.py +//// from testLib1 import [|func1|] +//// +//// [|/*marker*/func1|]('') + +// @filename: test2.py +//// import testLib1 +//// +//// def func1(t: str): +//// pass +//// +//// func1('') +//// testLib1.[|func1|]('') + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.variable.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.variable.fourslash.ts new file mode 100644 index 00000000..d3c62f78 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/findallreferences.variable.fourslash.ts @@ -0,0 +1,24 @@ +/// + +// @filename: test.py +//// def func(a): +//// print(a) +//// +//// [|/*marker*/a|] = 40 +//// func([|a|]) + +// @filename: test2.py +//// a = 50 +//// print(a) + +{ + const ranges = helper.getRanges(); + + helper.verifyFindAllReferences({ + marker: { + references: ranges.map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/highlightreferences.attributes.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/highlightreferences.attributes.fourslash.ts new file mode 100644 index 00000000..62828217 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/highlightreferences.attributes.fourslash.ts @@ -0,0 +1,31 @@ +/// + +// @filename: test.py +//// class DummyClass: +//// def __init__(self): +//// self.[|{| "kind":"write" |}var|] = 1 +//// self.[|{| "kind":"write" |}var|] += 1 +//// +//// def method_1(self): +//// self.[|{| "kind":"write" |}var|] += 2 +//// self.[|{| "kind":"write" |}var|] = None +//// +//// def method_2(self): +//// self.[|{| "kind":"write" |}var|] += 3 +//// self.[|{| "kind":"write" |}var|] = None +//// self.[|{| "name":"marker", "kind":"write" |}var|] = 1 +//// +//// x = DummyClass() +//// print(x.[|{| "kind":"read" |}var|]) + +{ + const ranges = helper.getRanges(); + + helper.verifyHighlightReferences({ + marker: { + references: ranges.map((r) => { + return { range: helper.convertPositionRange(r), kind: helper.getDocumentHighlightKind(r.marker) }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.async.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.async.fourslash.ts new file mode 100644 index 00000000..9aa5b904 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.async.fourslash.ts @@ -0,0 +1,11 @@ +/// + +// @filename: test.py +//// async def [|/*marker1*/test|](): +//// pass +//// +//// y = [|/*marker2*/test|] +helper.verifyHover('markdown', { + marker1: '```python\n(function) async def test() -> None\n```', + marker2: '```python\n(function) def test() -> CoroutineType[Any, Any, None]\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.builtinInheritedByBuiltin.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.builtinInheritedByBuiltin.fourslash.ts new file mode 100644 index 00000000..dc1b5808 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.builtinInheritedByBuiltin.fourslash.ts @@ -0,0 +1,24 @@ +/// + +// @filename: typeshed-fallback/stdlib/builtins.pyi +//// class baseClass: +//// def method(self) -> None: ... +//// +//// class derivedClass(baseClass): ... + +// @filename: typeshed-fallback/stdlib/builtins.py +//// class baseClass: +//// def method(self) -> None: +//// """baseClass doc string""" +//// pass +//// +//// class derivedClass(baseClass): +//// pass + +// @filename: test.py +//// x = derivedClass() +//// x.[|/*marker*/method|]() + +helper.verifyHover('markdown', { + marker: '```python\n(method) def method() -> None\n```\n---\nbaseClass doc string', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.builtinInheritedByUserCode.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.builtinInheritedByUserCode.fourslash.ts new file mode 100644 index 00000000..74fd309d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.builtinInheritedByUserCode.fourslash.ts @@ -0,0 +1,22 @@ +/// + +// @filename: typeshed-fallback/stdlib/builtins.pyi +//// class baseClass: +//// def method(self) -> None: ... + +// @filename: typeshed-fallback/stdlib/builtins.py +//// class baseClass: +//// def method(self) -> None: +//// """baseClass doc string""" +//// pass + +// @filename: test.py +//// class derivedClass(baseClass): +//// pass +//// +//// x = derivedClass() +//// x.[|/*marker*/method|]() + +helper.verifyHover('markdown', { + marker: '```python\n(method) def method() -> None\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.fourslash.ts new file mode 100644 index 00000000..5a11188e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.builtinDocstrings.fourslash.ts @@ -0,0 +1,64 @@ +/// + +// @filename: docstrings.py +//// [|/*object*/object|] +//// [|/*objectInit*/object|]() +//// object().[|/*objectDir*/__dir__|] +//// +//// class A: ... +//// +//// [|/*a*/A|] +//// [|/*aInit*/A|]() +//// A().[|/*aDir*/__dir__|] +//// +//// class B: +//// """This is the class doc for B.""" +//// def __init__(self): +//// """This is the __init__ doc for B.""" +//// +//// [|/*b*/B|] +//// [|/*bInit*/B|]() +//// +//// class C: +//// """This is the class doc for C.""" +//// def __init__(self): +//// pass +//// +//// [|/*c*/C|] +//// [|/*cInit*/C|]() +//// +//// class D: +//// def __init__(self): +//// """This is the __init__ doc for D.""" +//// pass +//// +//// [|/*d*/D|] +//// [|/*dInit*/D|]() + +// @filename: typeshed-fallback/stdlib/builtins.py +//// class object(): +//// """This is the class doc for object.""" +//// def __init__(self): +//// """This is the __init__ doc for object.""" +//// pass +//// +//// def __dir__(self): +//// """This is the __dir__ doc for object.""" +//// pass + +{ + helper.verifyHover('plaintext', { + object: '(class) object\n\nThis is the class doc for object.', + objectInit: 'class object()\n\nThis is the __init__ doc for object.', + objectDir: '(method) def __dir__() -> Iterable[str]\n\nThis is the __dir__ doc for object.', + a: '(class) A', + aInit: 'class A()', + aDir: '(method) def __dir__() -> Iterable[str]', + b: '(class) B\n\nThis is the class doc for B.', + bInit: 'class B()\n\nThis is the __init__ doc for B.', + c: '(class) C\n\nThis is the class doc for C.', + cInit: 'class C()\n\nThis is the class doc for C.', + d: '(class) D', + dInit: 'class D()\n\nThis is the __init__ doc for D.', + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.class.docString.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.class.docString.fourslash.ts new file mode 100644 index 00000000..d6cdfa68 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.class.docString.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import lib +//// +//// lib.[|/*marker1*/A|]() + +// @filename: lib/__init__.pyi +// @library: true +//// class A(): ... + +// @filename: lib/__init__.py +// @library: true +//// from ._lib import A + +// @filename: lib/_lib.py +// @library: true +//// from ._type import A as mod_A +//// A = mod_A +//// "doc string for A" + +// @filename: lib/_type.py +// @library: true +//// class A(): pass + +helper.verifyHover('markdown', { + marker1: '```python\nclass A()\n```\n---\ndoc string for A', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.classNoInit.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.classNoInit.fourslash.ts new file mode 100644 index 00000000..172ee711 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.classNoInit.fourslash.ts @@ -0,0 +1,14 @@ +/// + +// @filename: test.py +//// class Something: +//// '''This is a test.''' +//// +//// def __init__(self, text: str) -> None: +//// self.text = text +//// +//// [|/*marker1*/Something|]() + +helper.verifyHover('markdown', { + marker1: '```python\nclass Something(text: str)\n```\n---\nThis is a test.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.fourslash.ts new file mode 100644 index 00000000..e41be4ec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.fourslash.ts @@ -0,0 +1,90 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: module1.py +//// '''module1 docs''' +//// +//// def func1(): +//// '''func1 docs''' +//// return True +//// +//// class A: +//// '''A docs''' +//// def method1(self): +//// '''A.method1 docs''' +//// return True +//// class Inner: +//// '''A.Inner docs''' +//// def method1(self): +//// '''A.Inner.method1 docs''' +//// return True +//// +//// class B: +//// '''B docs''' +//// def __init__(self): +//// '''B init docs''' +//// pass + +// @filename: module1.pyi +//// def func1() -> bool: ... +//// +//// class A: +//// def method1(self) -> bool: ... +//// class Inner: +//// def method1(self) -> bool: ... +//// +//// class B: +//// def __init__(self): ... + +// @filename: module2/__init__.py +// @library: true +//// '''module2 docs''' +//// +//// from ._internal import func2 + +// @filename: module2/_internal.py +// @library: true +//// from ._more_internal import func2 + +// @filename: module2/_more_internal.py +// @library: true +//// def func2(): +//// '''func2 docs''' +//// return True + +// @filename: typings/module2.pyi +//// def func2() -> bool: ... + +// @filename: test.py +//// import [|/*module1_docs*/module1|] as m1 +//// import [|/*module2_docs*/module2|] as m2 +//// +//// print([|/*m1_docs*/m1|].[|/*func1_docs*/func1|]()) +//// +//// a = m1.[|/*a_docs*/A|]() +//// print(a.[|/*method1_docs*/method1|]()) +//// +//// b = m1.[|/*b_docs*/B|]() +//// +//// print([|/*m2_docs*/m2|].[|/*func2_docs*/func2|]()) +//// +//// inner = m1.A.[|/*a_inner_docs*/Inner|]() +//// print(inner.[|/*inner_method1_docs*/method1|]()) + +helper.verifyHover('markdown', { + a_docs: '```python\nclass A()\n```\n---\nA docs', + b_docs: '```python\nclass B()\n```\n---\nB init docs', + a_inner_docs: '```python\nclass Inner()\n```\n---\nA.Inner docs', + func1_docs: '```python\n(function) def func1() -> bool\n```\n---\nfunc1 docs', + func2_docs: '```python\n(function) def func2() -> bool\n```\n---\nfunc2 docs', + inner_method1_docs: '```python\n(method) def method1() -> bool\n```\n---\nA.Inner.method1 docs', + method1_docs: '```python\n(method) def method1() -> bool\n```\n---\nA.method1 docs', + module1_docs: '```python\n(module) module1\n```\n---\nmodule1 docs', + module2_docs: '```python\n(module) module2\n```\n---\nmodule2 docs', + m1_docs: '```python\n(module) m1\n```\n---\nmodule1 docs', + m2_docs: '```python\n(module) m2\n```\n---\nmodule2 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.pkg-vs-module1.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.pkg-vs-module1.fourslash.ts new file mode 100644 index 00000000..2a46e058 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.pkg-vs-module1.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: package1/__init__.py +// @library: true +//// from .subpackage import func1 + +// @filename: package1/subpackage.py +// @library: true +//// def func1(): +//// '''func1 docs''' +//// return True + +// @filename: typings/package1/__init__.pyi +//// from .subpackage import func1 as func1 + +// @filename: typings/package1/subpackage/__init__.pyi +//// def func1() -> bool: ... + +// @filename: test.py +//// from package1 import func1 +//// +//// print([|/*func1_docs*/func1|]()) + +helper.verifyHover('markdown', { + func1_docs: '```python\n(function) def func1() -> bool\n```\n---\nfunc1 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.pkg-vs-module2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.pkg-vs-module2.fourslash.ts new file mode 100644 index 00000000..495e3ef2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.pkg-vs-module2.fourslash.ts @@ -0,0 +1,29 @@ +/// + +// @filename: package1/__init__.py +// @library: true +//// '''package1 docs''' +//// from .subpackage import func1 + +// @filename: package1/subpackage/__init__.py +// @library: true +//// '''subpackage docs''' +//// def func1(): +//// '''func1 docs''' +//// return True + +// @filename: typings/package1/__init__.pyi +//// from .subpackage import func1 as func1 + +// @filename: typings/package1/subpackage.pyi +//// def func1() -> bool: ... + +// @filename: test.py +//// from [|/*package_docs*/package1|] import func1 +//// +//// print([|/*func1_docs*/func1|]()) + +helper.verifyHover('markdown', { + func1_docs: '```python\n(function) def func1() -> bool\n```\n---\nfunc1 docs', + package_docs: '```python\n(module) package1\n```\n---\npackage1 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport1.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport1.fourslash.ts new file mode 100644 index 00000000..1263c9ee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport1.fourslash.ts @@ -0,0 +1,23 @@ +/// + +// @filename: module1.py +//// '''module1 docs''' +//// +//// def func1(): +//// '''func1 docs''' +//// return True +//// + +// @filename: module1.pyi +//// def func1() -> bool: ... +//// + +// @filename: test.py +//// from . import module1 +//// +//// print([|/*module1_docs*/module1|].[|/*func1_docs*/func1|]()) + +helper.verifyHover('markdown', { + func1_docs: '```python\n(function) def func1() -> bool\n```\n---\nfunc1 docs', + module1_docs: '```python\n(module) module1\n```\n---\nmodule1 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport2.fourslash.ts new file mode 100644 index 00000000..aa8ce2d6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport2.fourslash.ts @@ -0,0 +1,23 @@ +/// + +// @filename: module1.py +//// '''module1 docs''' +//// +//// def func1(): +//// '''func1 docs''' +//// return True +//// + +// @filename: module1.pyi +//// def func1() -> bool: ... +//// + +// @filename: test.py +//// from .[|/*module_docs*/module1|] import func1 +//// +//// print([|/*func1_docs*/func1|]()) + +helper.verifyHover('markdown', { + func1_docs: '```python\n(function) def func1() -> bool\n```\n---\nfunc1 docs', + module_docs: '```python\n(module) module1\n```\n---\nmodule1 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport3.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport3.fourslash.ts new file mode 100644 index 00000000..718c9361 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.relativeImport3.fourslash.ts @@ -0,0 +1,53 @@ +/// + +// @filename: dj/__init__.py +// @library: true +//// '''dj doc string''' +//// # empty + +// @filename: dj/db/__init__.py +// @library: true +//// '''db doc string''' +//// # empty + +// @filename: dj/db/models/__init__.py +// @library: true +//// '''models doc string''' +//// from dj.db.models.base import Model + +// @filename: dj/db/models/base.py +// @library: true +//// class Model: +//// def clean_fields(self): +//// '''clean_fields docs''' +//// pass + +// @filename: typings/dj/__init__.pyi +//// # empty + +// @filename: typings/dj/db/__init__.pyi +//// # empty + +// @filename: typings/dj/db/models/__init__.pyi +//// '''models doc string''' +//// from .base import Model as Model + +// @filename: typings/dj/db/models/base.pyi +//// class Model: +//// def clean_fields(self) -> None: ... + +// @filename: test.py +//// from [|/*djmarker*/dj|].[|/*dbmarker*/db|] import [|/*modelsmarker*/models|] +//// +//// class Person(models.Model): +//// pass +//// +//// p = Person() +//// p.[|/*marker*/clean_fields|]() + +helper.verifyHover('markdown', { + marker: '```python\n(method) def clean_fields() -> None\n```\n---\nclean\\_fields docs', + djmarker: '```python\n(module) dj\n```\n---\ndj doc string', + dbmarker: '```python\n(module) db\n```\n---\ndb doc string', + modelsmarker: '```python\n(module) models\n```\n---\nmodels doc string', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.stringFormat.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.stringFormat.fourslash.ts new file mode 100644 index 00000000..dfabeefe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.stringFormat.fourslash.ts @@ -0,0 +1,52 @@ +/// + +// @filename: test.py + +//// # empty string +//// [|/*marker1*/emptySingleQuotes|]= '' +//// [|/*marker2*/emptyDoubleQuotes|]= "" +//// [|/*marker3*/emptyTripleQuotes|]= '''''' +//// [|/*marker4*/emptyTripleDoubleQuotes|]= """""" + +//// # simple string +//// [|/*marker5*/simpleSingleQuotes|]= 'a' +//// [|/*marker6*/simpleDoubleQuotes|]= "b" +//// [|/*marker7*/simpleTripleQuotes|]= '''foo\nbar''' +//// [|/*marker8*/simpleTripleDoubleQuotes|]= """foo\nbar""" + +//// # escaped quotes +//// [|/*marker9*/singleQuotesWithEscapedQuote|]= '\'' +//// [|/*marker10*/doubleQuotesWithEscapedQuote|]= "\"" +//// [|/*marker11*/tripleQuotesWithEscapedQuote|]= '''\n\'\'\'''' +//// [|/*marker12*/tripleDoubleQuotesWithEscapedQuote|]= """\n\"\"\"""" + +//// # mixing quotes +//// [|/*marker13*/singleQuotesWithDouble|]= '"' +//// [|/*marker14*/singleQuotesWithTripleDouble|]= '"""' +//// [|/*marker15*/singleTripleQuoteWithSingleAndDoubleQuote|]= ''' '"' ''' + +//// # multiline (truncated) +//// const [|/*marker16*/html|] = '''\nTitle''' +//// const [|/*marker17*/htmlWithSingleQuotes|] = '''\nTitle's''' +//// const [|/*marker18*/htmlWithTripleEscapedQuotes|] = '''\nTitle\'\'\'s''' + +helper.verifyHover('markdown', { + marker1: `\`\`\`python\n(variable) emptySingleQuotes: Literal['']\n\`\`\``, + marker2: `\`\`\`python\n(variable) emptyDoubleQuotes: Literal['']\n\`\`\``, + marker3: `\`\`\`python\n(variable) emptyTripleQuotes: Literal['']\n\`\`\``, + marker4: `\`\`\`python\n(variable) emptyTripleDoubleQuotes: Literal['']\n\`\`\``, + marker5: `\`\`\`python\n(variable) simpleSingleQuotes: Literal['a']\n\`\`\``, + marker6: `\`\`\`python\n(variable) simpleDoubleQuotes: Literal['b']\n\`\`\``, + marker7: `\`\`\`python\n(variable) simpleTripleQuotes: Literal['foo\\nbar']\n\`\`\``, + marker8: `\`\`\`python\n(variable) simpleTripleDoubleQuotes: Literal['foo\\nbar']\n\`\`\``, + marker9: `\`\`\`python\n(variable) singleQuotesWithEscapedQuote: Literal['\\\'']\n\`\`\``, + marker10: `\`\`\`python\n(variable) doubleQuotesWithEscapedQuote: Literal['"']\n\`\`\``, + marker11: `\`\`\`python\n(variable) tripleQuotesWithEscapedQuote: Literal['\\n\\'\\'\\'']\n\`\`\``, + marker12: `\`\`\`python\n(variable) tripleDoubleQuotesWithEscapedQuote: Literal['\\n"""']\n\`\`\``, + marker13: `\`\`\`python\n(variable) singleQuotesWithDouble: Literal['"']\n\`\`\``, + marker14: `\`\`\`python\n(variable) singleQuotesWithTripleDouble: Literal['"""']\n\`\`\``, + marker15: `\`\`\`python\n(variable) singleTripleQuoteWithSingleAndDoubleQuote: Literal[' \\'"\\' ']\n\`\`\``, + marker16: `\`\`\`python\n(variable) html: Literal['\\nTitle…']\n\`\`\``, + marker17: `\`\`\`python\n(variable) htmlWithSingleQuotes: Literal['<!DOCTYPE html><html lang="en">\\n<head><title>Title…']\n\`\`\``, + marker18: `\`\`\`python\n(variable) htmlWithTripleEscapedQuotes: Literal['<!DOCTYPE html><html lang="en">\\n<head><title>Title…']\n\`\`\``, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.stubs-package.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.stubs-package.fourslash.ts new file mode 100644 index 00000000..bc71e650 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.stubs-package.fourslash.ts @@ -0,0 +1,29 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: package1-stubs/__init__.pyi +// @library: true +//// from .api import func1 as func1 + +// @filename: package1-stubs/api.pyi +// @library: true +//// def func1() -> bool: ... + +// @filename: package1/__init__.py +// @library: true +//// from .api import func1 as func1 + +// @filename: package1/api.py +// @library: true +//// def func1(): +//// '''func1 docs''' +//// return True + +// @filename: test.py +//// import package1 +//// +//// print(package1.[|/*marker*/func1|]()) + +helper.verifyHover('markdown', { + marker: '```python\n(function) def func1() -> bool\n```\n---\nfunc1 docs', + marker2: '```python\n(function) def func2() -> bool\n```\n---\nfunc2 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.typeshed.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.typeshed.fourslash.ts new file mode 100644 index 00000000..eae24f8d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docFromSrc.typeshed.fourslash.ts @@ -0,0 +1,20 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: requests/__init__.pyi +// @library: true +//// from .api import head as head + +// @filename: requests/api.pyi +// @library: true +//// def head(url, **kwargs) -> None: +//// r"""Sends a <HEAD> request.""" +//// pass + +// @filename: test.py +//// import requests +//// +//// print(requests.[|/*marker*/head|]('')) + +helper.verifyHover('markdown', { + marker: '```python\n(function) def head(url: Unknown, **kwargs: Unknown) -> None\n```\n---\nSends a <HEAD> request.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.alias.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.alias.fourslash.ts new file mode 100644 index 00000000..5d7c78ee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.alias.fourslash.ts @@ -0,0 +1,40 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class ClassA: +//// ''' ClassA doc string ''' +//// pass +//// +//// [|/*marker1*/AliasA|] = ClassA +//// ''' AliasA doc string ''' +//// +//// def func1(x: [|/*marker2*/AliasA|]): +//// pass +//// +//// class ClassB: +//// pass +//// +//// [|/*marker3*/AliasB|] = ClassB +//// ''' AliasB alone doc string ''' +//// +//// class ClassC: +//// """ ClassC doc string """ +//// pass +//// +//// [|/*marker4*/AliasC|] = ClassC +//// ''' AliasC doc string ''' +//// +//// class ClassD: +//// pass +//// +//// [|/*marker5*/AliasD|] = ClassD +//// ''' AliasD alone doc string ''' +//// + +helper.verifyHover('markdown', { + marker1: '```python\n(type) AliasA = ClassA\n```\n---\nAliasA doc string\n\nClassA doc string', + marker2: '```python\n(type) AliasA = ClassA\n```\n---\nAliasA doc string\n\nClassA doc string', + marker3: '```python\n(type) AliasB = ClassB\n```\n---\nAliasB alone doc string', + marker4: '```python\n(type) AliasC = ClassC\n```\n---\nAliasC doc string\n\nClassC doc string', + marker5: '```python\n(type) AliasD = ClassD\n```\n---\nAliasD alone doc string', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.links.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.links.fourslash.ts new file mode 100644 index 00000000..bcd49d73 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.links.fourslash.ts @@ -0,0 +1,12 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// def func(): +//// '''something [link](http://microsoft.com) something''' +//// pass +//// +//// [|/*marker1*/func|]() + +helper.verifyHover('markdown', { + marker1: '```python\n(function) def func() -> None\n```\n---\nsomething [link](http://microsoft.com) something', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.overloads.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.overloads.fourslash.ts new file mode 100644 index 00000000..c2994037 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.overloads.fourslash.ts @@ -0,0 +1,41 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// import mylib +//// +//// mylib.[|/*marker1*/dontwork|] +//// mylib.[|/*marker2*/works|] + +// @filename: mylib/__init__.pyi +//// from typing import overload +//// +//// class RandomState: +//// @overload +//// def dontwork(self, x:int) -> None: ... +//// @overload +//// def dontwork(self, x:float) -> None: ... +//// def works(self) -> None: ... +//// +//// _rand = RandomState +//// +//// dontwork = _rand.dontwork +//// works = _rand.works + +// @filename: mylib/__init__.py +//// from typing import Union, overload +//// +//// class RandomState: +//// @overload +//// def dontwork(self, x:int) -> None: ... +//// def dontwork(self, x:Union[int, float]) -> None: +//// 'dontwork docstring' +//// ... +//// def works(self) -> None: +//// 'works docstring' +//// ... + +helper.verifyHover('markdown', { + marker1: + '```python\n(variable)\ndef dontwork(self: _rand, x: int) -> None: ...\ndef dontwork(self: _rand, x: float) -> None: ...\n```\n---\ndontwork docstring', + marker2: '```python\n(variable) def works(self: _rand) -> None\n```\n---\nworks docstring', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.parameter.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.parameter.fourslash.ts new file mode 100644 index 00000000..b5b1aa3c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.parameter.fourslash.ts @@ -0,0 +1,57 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// def foo1([|/*marker1*/bar|]: str) -> None: +//// """ +//// Foo1 does something +//// +//// @param bar: The bar is in town +//// """ +//// baz = [|/*marker2*/bar|] +//// ... +//// +//// def foo2([|/*marker3*/bar|]: str) -> None: +//// """ +//// Foo2 does something +//// +//// :param bar: The bar is in town +//// """ +//// baz = [|/*marker4*/bar|] +//// ... +//// +//// def foo3([|/*marker5*/bar|]: str, [|/*marker6*/bar2|]: str) -> None: +//// """ +//// Foo3 does something +//// +//// Args: +//// bar: The bar is in town +//// bar2 The bar is 2 far +//// """ +//// baz = [|/*marker7*/bar|] +//// [|/*marker8*/bar|] = "reassign" +//// ... +//// +//// def foo4([|/*marker9*/bar|]: str, [|/*marker10*/bar2|]: str) -> None: +//// """ +//// Foo4 does something +//// +//// Args: +//// bar (str): The bar is in town +//// bar2 str: The bar is 2 far +//// """ +//// baz = [|/*marker11*/bar|] +//// ... + +helper.verifyHover('markdown', { + marker1: '```python\n(parameter) bar: str\n```\nbar: The bar is in town', + marker2: '```python\n(parameter) bar: str\n```\nbar: The bar is in town', + marker3: '```python\n(parameter) bar: str\n```\nbar: The bar is in town', + marker4: '```python\n(parameter) bar: str\n```\nbar: The bar is in town', + marker5: '```python\n(parameter) bar: str\n```\nbar: The bar is in town', + marker6: '```python\n(parameter) bar2: str\n```', + marker7: '```python\n(parameter) bar: str\n```\nbar: The bar is in town', + marker8: "```python\n(parameter) bar: Literal['reassign']\n```\nbar: The bar is in town", + marker9: '```python\n(parameter) bar: str\n```\nbar (str): The bar is in town', + marker10: '```python\n(parameter) bar2: str\n```', + marker11: '```python\n(parameter) bar: str\n```\nbar (str): The bar is in town', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.split.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.split.fourslash.ts new file mode 100644 index 00000000..50257511 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.docstring.split.fourslash.ts @@ -0,0 +1,35 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// def func(): +//// '''This docstring ''' '''is split.''' +//// pass +//// +//// def func2(): +//// f'''This docstring ''' '''is split.''' +//// pass +//// +//// def func3(): +//// '''This docstring ''' f'''is split.''' +//// pass +//// +//// def func4(a:int, b:int, c:int): +//// """ +//// Args: +//// a (int): description +//// b (int|bool): 한국어 +//// c (int): description +//// """ +//// +//// [|/*marker1*/func|]() +//// [|/*marker2*/func2|]() +//// [|/*marker3*/func3|]() +//// [|/*marker4*/func4|]() + +helper.verifyHover('markdown', { + marker1: '```python\n(function) def func() -> None\n```\n---\nThis docstring is split.', + marker2: '```python\n(function) def func2() -> None\n```', + marker3: '```python\n(function) def func3() -> None\n```', + marker4: + '```python\n(function) def func4(a: int, b: int, c: int) -> None\n```\n---\nArgs: \n    a (int): description \n    b (int|bool): 한국어 \n    c (int): description', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.basic.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.basic.fourslash.ts new file mode 100644 index 00000000..18439e85 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.basic.fourslash.ts @@ -0,0 +1,13 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class Foo: +//// def __new__(cls, name:str): +//// '''doc for __new__.''' +//// return super().__new__(cls) +//// +//// x = [|/*marker1*/Foo|]() + +helper.verifyHover('markdown', { + marker1: '```python\nclass Foo(name: str)\n```\n---\ndoc for \\_\\_new\\_\\_.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.inheritance.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.inheritance.fourslash.ts new file mode 100644 index 00000000..845cc46e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.inheritance.fourslash.ts @@ -0,0 +1,16 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class Parent: +//// def __init__(self, *args: Any, **kwargs: Any): +//// pass +//// +//// class Child(Parent): +//// def __new__(cls, name: str): +//// return super().__new__(cls) + +//// x = [|/*marker1*/Child|]() + +helper.verifyHover('markdown', { + marker1: '```python\nclass Child(name: str)\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.inheritance2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.inheritance2.fourslash.ts new file mode 100644 index 00000000..d37c4818 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.inheritance2.fourslash.ts @@ -0,0 +1,22 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class Parent: +//// def __init__(self, *args: Any, **kwargs: Any): +//// pass +//// +//// def __new__(cls, *args: Any, **kwargs: Any): +//// return super().__new__(cls) +//// +//// class Child(Parent): +//// def __new__(cls, name:str): +//// return super().__new__(cls, name) +//// +//// class GrandChild(Child): +//// pass + +//// x = [|/*marker1*/GrandChild|]() + +helper.verifyHover('markdown', { + marker1: '```python\nclass GrandChild(name: str)\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.overloads.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.overloads.fourslash.ts new file mode 100644 index 00000000..e226d26e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.overloads.fourslash.ts @@ -0,0 +1,17 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import overload +//// class Foo: +//// @overload +//// def __new__(cls, name:str, last:str) -> "Foo": +//// return super().__new__(cls) +//// @overload +//// def __new__(cls, age:int, height:float) -> "Foo": +//// return super().__new__(cls) +//// +//// x = [|/*marker1*/Foo|]() + +helper.verifyHover('markdown', { + marker1: '```python\nclass Foo(name: str, last: str): ...\n\nclass Foo(age: int, height: float): ...\n\n\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.withInit.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.withInit.fourslash.ts new file mode 100644 index 00000000..56e13eea --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.dunderNew.withInit.fourslash.ts @@ -0,0 +1,15 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class Foo: +//// def __init__(self, *args: Any, **kwargs: Any): +//// pass +//// def __new__(cls, name:str): +//// '''doc for __new__.''' +//// return super().__new__(cls) +//// +//// x = [|/*marker1*/Foo|]() + +helper.verifyHover('markdown', { + marker1: '```python\nclass Foo(name: str)\n```\n---\ndoc for \\_\\_new\\_\\_.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.formatted.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.formatted.fourslash.ts new file mode 100644 index 00000000..45b75099 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.formatted.fourslash.ts @@ -0,0 +1,46 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "functionSignatureDisplay": "formatted" +//// } + +// @filename: test.py +//// from typing import overload +//// class A: +//// def __init__(self, x:int, y:int): +//// pass +//// +//// class B: +//// @overload +//// def __init__(self): +//// pass +//// @overload +//// def __init__(self, x:int, y:int): +//// pass +//// +//// a = [|/*a_constructor*/A|](1,2) +//// +//// b = [|/*b_constructorOverloads*/B|](1,2) +//// def [|/*paramFunc0*/foo|](): +//// pass +//// def [|/*paramFunc1*/foo1|](x:int): +//// pass +//// def [|/*paramFunc2*/foo2|](x:int, y:int): +//// pass +//// +//// @overload +//// def bar() -> int: ... +//// @overload +//// def bar(x:str, y:int) -> int: ... +//// +//// [|/*overload*/bar|] + +helper.verifyHover('markdown', { + a_constructor: '```python\nclass A(\n x: int,\n y: int\n)\n```', + b_constructorOverloads: '```python\nclass B(\n x: int,\n y: int\n)\n```', + paramFunc0: '```python\n(function) def foo() -> None\n```', + paramFunc1: '```python\n(function) def foo1(x: int) -> None\n```', + paramFunc2: '```python\n(function) def foo2(\n x: int,\n y: int\n) -> None\n```', + overload: '```python\n(function)\ndef bar() -> int: ...\ndef bar(\n x: str,\n y: int\n) -> int: ...\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.fourslash.ts new file mode 100644 index 00000000..12ae1327 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.fourslash.ts @@ -0,0 +1,20 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class [|/*marker1*/Validator|]: +//// '''The validator class +//// +//// .. versionadded:: 2.0 +//// This directive does not show in hover. +//// ''' +//// def is_valid(self, text: str) -> bool: +//// '''Checks if the input string is valid.''' +//// return true +//// +//// validator = Validator() +//// validator.[|/*marker2*/is_valid|]('hello') + +helper.verifyHover('markdown', { + marker1: '```python\n(class) Validator\n```\n---\nThe validator class', + marker2: '```python\n(method) def is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.import.django.view.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.import.django.view.fourslash.ts new file mode 100644 index 00000000..8f8274aa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.import.django.view.fourslash.ts @@ -0,0 +1,27 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from django.view import generic +//// generic.[|/*marker*/TemplateView|] + +// @filename: django/__init__.py +//// '''documentation for library''' + +// @filename: django/view/__init__.py +//// from .generic.base import View +//// __all__ = ['View'] + +// @filename: django/view/generic/__init__.py +//// from .base import (View, TemplateView) +//// __all__ = ['View', 'TemplateView'] + +// @filename: django/view/generic/base.py +//// class View(): +//// pass +//// +//// class TemplateView(): +//// pass + +helper.verifyHover('markdown', { + marker: '```python\n(class) TemplateView\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.import.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.import.fourslash.ts new file mode 100644 index 00000000..76697fe7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.import.fourslash.ts @@ -0,0 +1,11 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// import [|/*marker*/library|] + +// @filename: library/__init__.py +//// '''documentation for library''' + +helper.verifyHover('markdown', { + marker: '```python\n(module) library\n```\n---\ndocumentation for library', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inferred.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inferred.fourslash.ts new file mode 100644 index 00000000..7501e799 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inferred.fourslash.ts @@ -0,0 +1,23 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// import third_party_module # type: ignore +//// +//// def return_one(): +//// one = third_party_module.one() +//// if one is None: +//// return +//// return one +//// +//// def return_two() -> int: +//// [|on/*marker1*/e|]: int | None = return_one() +//// assert one is not None +//// two = [|on/*marker2*/e|] + 1 +//// return two +//// +//// [|tw/*marker3*/o|] = return_two() +helper.verifyHover('markdown', { + marker1: '```python\n(variable) one: Unknown | None\n```', + marker2: '```python\n(variable) one: Unknown\n```', + marker3: '```python\n(variable) two: int\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromSrc.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromSrc.fourslash.ts new file mode 100644 index 00000000..92e1c80d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromSrc.fourslash.ts @@ -0,0 +1,63 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: module1.py +//// '''module1 docs''' +//// +//// def func1(): +//// '''func1 docs''' +//// return True +//// +//// class A: +//// '''A docs''' +//// def method1(self) -> bool: +//// '''A.method1 docs''' +//// return True +//// +//// class B: +//// '''B docs''' +//// def __init__(self): +//// '''B init docs''' +//// pass + +// @filename: testBasicInheritance.py +//// import module1 +//// +//// class ChildA(module1.A): +//// def method1(self) -> bool: +//// return True +//// +//// class ChildB(module1.B): +//// def __init__(self): +//// pass +//// +//// childA =[|/*child_a_docs*/ChildA|]() +//// childA.[|/*child_a_method1_docs*/method1|]() +//// +//// childB =[|/*child_b_docs*/ChildB|]() +//// childB.[|/*child_b_init_docs*/__init__|]() + +// @filename: testMultiLevelInheritance.py +//// class Base: +//// """Base docs""" +//// def method(self): +//// """Base.method docs""" +//// +//// class Derived1(Base): +//// def method(self): +//// pass +//// +//// class Derived2(Derived1): +//// def method(self): +//// pass +//// +//// d2 = [|/*secondDerived_docs*/Derived2|]() +//// d2.[|/*secondDerived_method_docs*/method|]() + +helper.verifyHover('markdown', { + child_a_method1_docs: '```python\n(method) def method1() -> bool\n```\n---\nA.method1 docs', + child_a_docs: '```python\nclass ChildA()\n```', + child_b_docs: '```python\nclass ChildB()\n```\n---\nB init docs', + child_b_init_docs: '```python\n(method) def __init__() -> None\n```\n---\nB init docs', + secondDerived_docs: '```python\nclass Derived2()\n```', + secondDerived_method_docs: '```python\n(method) def method() -> None\n```\n---\nBase.method docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromSrcWithStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromSrcWithStub.fourslash.ts new file mode 100644 index 00000000..14a60a87 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromSrcWithStub.fourslash.ts @@ -0,0 +1,54 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: module1.py +//// class A: +//// '''A docs''' +//// def method1(self): +//// '''A.method1 docs''' +//// return True +//// class Inner: +//// '''A.Inner docs''' +//// def method1(self): +//// '''A.Inner.method1 docs''' +//// return True +//// +//// class NoFields: +//// '''NoFields docs''' + +// @filename: module1.pyi +//// class A: +//// def method1(self) -> bool:... +//// class Inner: +//// def method1(self) -> bool: ... +//// +//// class NoFields:... + +// @filename: testInheritedDocsInSource.py +//// import module1 +//// class ChildA(module1.A): +//// def method1(self) -> bool: +//// return True +//// class ChildInner(module1.A.Inner): +//// def method1(self) -> bool: +//// return True +//// +//// childA =[|/*child_a_docs*/ChildA|]() +//// childA.[|/*child_a_method1_docs*/method1|]() +//// +//// inner =ChildA.[|/*child_a_inner_docs*/ChildInner|]() +//// inner.[|/*child_a_inner_method1_docs*/method1|]() + +// @filename: testInheritedClassNoFieldsDocsInSource.py +//// import module1 +//// class ChildB(module1.NoFields): +//// pass +//// +//// childB =[|/*child_b_docs*/ChildB|]() + +helper.verifyHover('markdown', { + child_a_method1_docs: '```python\n(method) def method1() -> bool\n```\n---\nA.method1 docs', + child_a_docs: '```python\nclass ChildA()\n```', + child_a_inner_docs: '```python\nclass ChildInner()\n```', + child_a_inner_method1_docs: '```python\n(method) def method1() -> bool\n```\n---\nA.Inner.method1 docs', + child_b_docs: '```python\nclass ChildB()\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromStub.fourslash.ts new file mode 100644 index 00000000..ae7a355f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.docFromStub.fourslash.ts @@ -0,0 +1,42 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: module1.py +//// class A: +//// def method1(self) -> bool: +//// return True +//// class Inner: +//// def method1(self): +//// return True + +// @filename: module1.pyi +//// class A: +//// '''A docs''' +//// def method1(self) -> bool: +//// '''A.method1 docs''' +//// ... +//// class Inner: +//// '''A.Inner docs''' +//// def method1(self) -> bool: +//// '''A.Inner.method1 docs''' +//// ... + +// @filename: testInheritedDocsInStubs.py +//// import module1 +//// class ChildA(module1.A): +//// def method1(self) -> bool: +//// return True +//// class ChildInner(module1.A.Inner): +//// def method1(self) -> bool: +//// return True +//// +//// childA =[|/*child_a_docs*/ChildA|]() +//// childA.[|/*child_a_method1_docs*/method1|]() +//// inner =ChildA.[|/*child_a_inner_docs*/ChildInner|]() +//// inner.[|/*child_a_inner_method1_docs*/method1|]() + +helper.verifyHover('markdown', { + child_a_method1_docs: '```python\n(method) def method1() -> bool\n```\n---\nA.method1 docs', + child_a_docs: '```python\nclass ChildA()\n```', + child_a_inner_docs: '```python\nclass ChildInner()\n```', + child_a_inner_method1_docs: '```python\n(method) def method1() -> bool\n```\n---\nA.Inner.method1 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.overload.docFromSrcWithStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.overload.docFromSrcWithStub.fourslash.ts new file mode 100644 index 00000000..127a0c70 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.overload.docFromSrcWithStub.fourslash.ts @@ -0,0 +1,47 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: overloads_client.py +//// from typing import overload +//// import moduleA +//// +//// class ChildA(moduleA.A): +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// pass +//// +//// +//// ChildA.[|/*child_a_func_doc*/func|] +//// a = ChildA() +//// a.[|/*child_a_instance_func_doc*/func|] + +// @filename: typings/moduleA.pyi +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: ... +//// +//// @overload +//// def func(self, x: int) -> int: ... + +// @filename: typings/moduleA.py +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// '''func docs''' +//// pass + +helper.verifyHover('markdown', { + child_a_func_doc: + '```python\n(method)\ndef func(self: ChildA, x: str) -> str: ...\ndef func(self: ChildA, x: int) -> int: ...\n```\n---\nfunc docs', + child_a_instance_func_doc: + '```python\n(method)\ndef func(x: str) -> str: ...\ndef func(x: int) -> int: ...\n```\n---\nfunc docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.overload.docFromStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.overload.docFromStub.fourslash.ts new file mode 100644 index 00000000..952e8062 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.overload.docFromStub.fourslash.ts @@ -0,0 +1,48 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: overloads_client.py +//// from typing import overload +//// import moduleA +//// +//// class ChildA(moduleA.A): +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// pass +//// +//// +//// ChildA.[|/*child_a_func_doc*/func|] +//// a = ChildA() +//// a.[|/*child_a_instance_func_doc*/func|] + +// @filename: typings/moduleA.pyi +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: ... +//// +//// @overload +//// def func(self, x: int) -> int: +//// '''func docs''' +//// ... + +// @filename: typings/moduleA.py +//// from typing import overload +//// class A: +//// @overload +//// def func(self, x: str) -> str: +//// pass +//// +//// @overload +//// def func(self, x: int) -> int: +//// pass + +helper.verifyHover('markdown', { + child_a_func_doc: + '```python\n(method)\ndef func(self: ChildA, x: str) -> str: ...\ndef func(self: ChildA, x: int) -> int: ...\n```\n---\nfunc docs', + child_a_instance_func_doc: + '```python\n(method)\ndef func(x: str) -> str: ...\ndef func(x: int) -> int: ...\n```\n---\nfunc docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.property.docFromSrcWithStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.property.docFromSrcWithStub.fourslash.ts new file mode 100644 index 00000000..51e089e8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.property.docFromSrcWithStub.fourslash.ts @@ -0,0 +1,87 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testLib +//// class ChildGetterDocs(testLib.ClassWithGetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ChildSetterDocs(testLib.ClassWithSetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// one = ChildGetterDocs(3) +//// one.[|/*getter_docs*/length|] +//// two = ChildSetterDocs(3) +//// two.[|/*setter_docs*/length|] + +// @filename: testLib/__init__.py +//// class ClassWithGetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// """ +//// read property doc +//// """ +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ClassWithSetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// """ +//// setter property doc +//// """ +//// pass +//// + +// @filename: testLib/__init__.pyi +//// class ClassWithGetterDocs(object): +//// @property +//// def length(self) -> int: ... +//// @length.setter +//// def length(self, value) -> None: ... +//// +//// class ClassWithSetterDocs(object): +//// @property +//// def length(self) -> int: ... +//// @length.setter +//// def length(self, value) -> None: ... + +helper.verifyHover('markdown', { + getter_docs: '```python\n(property) length: int\n```\n---\nread property doc', + setter_docs: '```python\n(property) length: int\n```\n---\nsetter property doc', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.property.docFromStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.property.docFromStub.fourslash.ts new file mode 100644 index 00000000..f6411280 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.inherited.property.docFromStub.fourslash.ts @@ -0,0 +1,91 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import testLib +//// class ChildGetterDocs(testLib.ClassWithGetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ChildSetterDocs(testLib.ClassWithSetterDocs): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// one = ChildGetterDocs(3) +//// one.[|/*getter_docs*/length|] +//// two = ChildSetterDocs(3) +//// two.[|/*setter_docs*/length|] + +// @filename: testLib/__init__.py +// @library: true +//// class ClassWithGetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// +//// class ClassWithSetterDocs(object): +//// def __init__(self, length): +//// self._length = length +//// +//// @property +//// def length(self): +//// return self._length +//// +//// @length.setter +//// def length(self, value): +//// pass +//// + +// @filename: testLib/__init__.pyi +// @library: true +//// class ClassWithGetterDocs(object): +//// @property +//// def length(self) -> int: +//// """ +//// read property doc +//// """ +//// ... +//// @length.setter +//// def length(self, value) -> None: ... +//// +//// class ClassWithSetterDocs(object): +//// @property +//// def length(self) -> int: ... +//// @length.setter +//// def length(self, value) -> None: +//// """ +//// setter property doc +//// """ +//// ... + +helper.verifyHover('markdown', { + getter_docs: '```python\n(property) length: int\n```\n---\nread property doc', + setter_docs: '```python\n(property) length: int\n```\n---\nsetter property doc', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.init.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.init.fourslash.ts new file mode 100644 index 00000000..f225c23b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.init.fourslash.ts @@ -0,0 +1,39 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import Generic, TypeVar, Union +//// +//// class C1: +//// def __init__(self, name="hello"): +//// '''__init__ docs''' +//// pass +//// +//// class C2: +//// def __init__(self, name="hello"): +//// pass +//// +//// c1 = [|/*marker1*/C1|]() +//// +//// unionType = Union[C1, C2] +//// c2 = [|/*marker2*/unionType|] +//// +//// T = TypeVar("T") +//// class G(Generic[T]): +//// def __init__(self, value: T): +//// pass +//// +//// g1 = [|/*marker3*/G|](10) +//// g2 = [|/*marker4*/G|][int](10) + +// @filename: test1.py +//// import test +//// +//// c = test.[|/*marker5*/C1|]() + +helper.verifyHover('markdown', { + marker1: '```python\nclass C1(name: str = "hello")\n```\n---\n\\_\\_init\\_\\_ docs', + marker2: '```python\n(type) unionType = C1 | C2\n```', + marker3: '```python\nclass G(value: int)\n```', + marker4: '```python\nclass G(value: int)\n```', + marker5: '```python\nclass C1(name: str = "hello")\n```\n---\n\\_\\_init\\_\\_ docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libCodeAndStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libCodeAndStub.fourslash.ts new file mode 100644 index 00000000..8d2551e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libCodeAndStub.fourslash.ts @@ -0,0 +1,52 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib/__init__.py +// @library: true +//// class Validator: +//// '''The validator class''' +//// def is_valid(self, text): +//// '''Checks if the input string is valid.''' +//// return True +//// @property +//// def read_only_prop(self): +//// '''The read-only property.''' +//// return True +//// @property +//// def read_write_prop(self): +//// '''The read-write property.''' +//// return True +//// @read_write_prop.setter +//// def read_write_prop(self, val): +//// pass + +// @filename: testLib/__init__.pyi +// @library: true +//// class Validator: +//// def is_valid(self, text: str) -> bool: ... +//// @property +//// def read_only_prop(self) -> bool: ... +//// @property +//// def read_write_prop(self) -> bool: ... +//// @read_write_prop.setter +//// def read_write_prop(self, val: bool): ... + +// @filename: test.py +//// import testLib +//// obj = testLib.[|/*marker1*/Validator|]() +//// obj.[|/*marker2*/is_valid|]('') +//// obj.[|/*marker3*/read_only_prop|] +//// r = obj.[|/*marker4*/read_write_prop|] +//// obj.[|/*marker5*/read_write_prop|] = r + +helper.verifyHover('markdown', { + marker1: '```python\nclass Validator()\n```\n---\nThe validator class', + marker2: '```python\n(method) def is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', + marker3: '```python\n(property) read_only_prop: bool\n```\n---\nThe read-only property.', + marker4: '```python\n(property) read_write_prop: bool\n```\n---\nThe read-write property.', + marker5: '```python\n(property) read_write_prop: bool\n```\n---\nThe read-write property.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libCodeNoStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libCodeNoStub.fourslash.ts new file mode 100644 index 00000000..25d35343 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libCodeNoStub.fourslash.ts @@ -0,0 +1,41 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib/__init__.py +// @library: true +//// class Validator: +//// '''The validator class''' +//// def is_valid(self, text: str) -> bool: +//// '''Checks if the input string is valid.''' +//// return True +//// @property +//// def read_only_prop(self) -> bool: +//// '''The read-only property.''' +//// return True +//// @property +//// def read_write_prop(self) -> bool: +//// '''The read-write property.''' +//// return True +//// @read_write_prop.setter +//// def read_write_prop(self, val: bool): +//// pass + +// @filename: test.py +//// import testLib +//// obj = testLib.[|/*marker1*/Validator|]() +//// obj.[|/*marker2*/is_valid|]('') +//// obj.[|/*marker3*/read_only_prop|] +//// r = obj.[|/*marker4*/read_write_prop|] +//// obj.[|/*marker5*/read_write_prop|] = r + +helper.verifyHover('markdown', { + marker1: '```python\nclass Validator()\n```\n---\nThe validator class', + marker2: '```python\n(method) def is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', + marker3: '```python\n(property) read_only_prop: bool\n```\n---\nThe read-only property.', + marker4: '```python\n(property) read_write_prop: bool\n```\n---\nThe read-write property.', + marker5: '```python\n(property) read_write_prop: bool\n```\n---\nThe read-write property.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libStub.fourslash.ts new file mode 100644 index 00000000..3b8968e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.libStub.fourslash.ts @@ -0,0 +1,41 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib/__init__.pyi +// @library: true +//// class Validator: +//// '''The validator class''' +//// def is_valid(self, text: str) -> bool: +//// '''Checks if the input string is valid.''' +//// pass +//// @property +//// def read_only_prop(self) -> bool: +//// '''The read-only property.''' +//// pass +//// @property +//// def read_write_prop(self) -> bool: +//// '''The read-write property.''' +//// pass +//// @read_write_prop.setter +//// def read_write_prop(self, val: bool): +//// pass + +// @filename: test.py +//// import testLib +//// obj = testLib.[|/*marker1*/Validator|]() +//// obj.[|/*marker2*/is_valid|]('') +//// obj.[|/*marker3*/read_only_prop|] +//// r = obj.[|/*marker4*/read_write_prop|] +//// obj.[|/*marker5*/read_write_prop|] = r + +helper.verifyHover('markdown', { + marker1: '```python\nclass Validator()\n```\n---\nThe validator class', + marker2: '```python\n(method) def is_valid(text: str) -> bool\n```\n---\nChecks if the input string is valid.', + marker3: '```python\n(property) read_only_prop: bool\n```\n---\nThe read-only property.', + marker4: '```python\n(property) read_write_prop: bool\n```\n---\nThe read-write property.', + marker5: '```python\n(property) read_write_prop: bool\n```\n---\nThe read-write property.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.optionalAliasParameter.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.optionalAliasParameter.fourslash.ts new file mode 100644 index 00000000..445558a7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.optionalAliasParameter.fourslash.ts @@ -0,0 +1,14 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import Literal, Union +//// +//// A = Union[int, str, None] +//// +//// def func([|/*marker1*/param|]: A = None) -> None: +//// print([|/*marker2*/param|]) + +helper.verifyHover('markdown', { + marker1: '```python\n(parameter) param: A\n```', + marker2: '```python\n(parameter) param: A\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.overloadedFunction.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.overloadedFunction.fourslash.ts new file mode 100644 index 00000000..51586349 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.overloadedFunction.fourslash.ts @@ -0,0 +1,23 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import overload +//// +//// @overload +//// def func(a: int) -> int: +//// ... +//// +//// @overload +//// def func(a: str) -> str: +//// ... +//// +//// def func(a: int | str) -> int | str: +//// return a +//// +//// [|/*marker1*/func|](1) +//// [|/*marker2*/func|]("hi") + +helper.verifyHover('markdown', { + marker1: '```python\n(function) def func(a: int) -> int\n```', + marker2: '```python\n(function) def func(a: str) -> str\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.plainText.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.plainText.fourslash.ts new file mode 100644 index 00000000..e8387454 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.plainText.fourslash.ts @@ -0,0 +1,21 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class [|/*marker1*/Validator|]: +//// '''The validator class +//// +//// .. versionadded:: 2.0 +//// This directive shows in plaintext. +//// ''' +//// def is_valid(self, text: str) -> bool: +//// '''Checks if the input string is valid.''' +//// return true +//// +//// validator = Validator() +//// validator.[|/*marker2*/is_valid|]('hello') + +helper.verifyHover('plaintext', { + marker1: + '(class) Validator\n\nThe validator class\n\n.. versionadded:: 2.0\n This directive shows in plaintext.', + marker2: '(method) def is_valid(text: str) -> bool\n\nChecks if the input string is valid.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.slots.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.slots.fourslash.ts new file mode 100644 index 00000000..08659db2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.slots.fourslash.ts @@ -0,0 +1,15 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class Chat: +//// __slots__ = ("id",) +//// +//// def __init__(self): +//// self.id = 1234 +//// """The ID of the channel.""" +//// +//// y = Chat() +//// y.[|/*marker*/id|] +helper.verifyHover('markdown', { + marker: '```python\n(variable) id: int\n```\n---\nThe ID of the channel.', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.typedDict.get.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.typedDict.get.fourslash.ts new file mode 100644 index 00000000..7314cd60 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.typedDict.get.fourslash.ts @@ -0,0 +1,15 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing_extensions import TypedDict +//// +//// class Cls(TypedDict): +//// a: int +//// b: str +//// +//// dct: Cls = {"a": 1, "b": "2"} +//// dct.[|/*marker1*/get|]("a") + +helper.verifyHover('markdown', { + marker1: "```python\n(variable) def get(k: Literal['a']) -> int\n```", +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.typedDict.key.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.typedDict.key.fourslash.ts new file mode 100644 index 00000000..a567ef79 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.typedDict.key.fourslash.ts @@ -0,0 +1,51 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import TypedDict +//// +//// class User(TypedDict): +//// name: str +//// """The fullname of the User""" +//// +//// age: int +//// """The age of the User, will not be over 200""" +//// +//// views: float +//// +//// user: User = {[|/*marker1*/'name'|]: 'Robert'} +//// +//// def foo(user: User) -> None: +//// ... +//// +//// foo({[|/*marker2*/'name'|]}) +//// foo({[|/*marker3*/'views'|]}) +//// foo({[|/*marker4*/'points'|]}) +//// foo({'name': 'Robert', [|/*marker5*/'age'|]}) +//// foo({'name': 'Robert', [|/*marker6*/'age'|]: 100}) +//// foo({'name': [|/*marker7*/'Robert'|], 'age': 100}) +//// foo({'name': [|/*marker8*/'name'|]}) +//// +//// class Post(TypedDict): +//// title: str +//// age: int +//// """The age of the Post""" +//// +//// def bar(item: Post | User) -> None: +//// ... +//// +//// bar({[|/*marker9*/'title'|]}) +//// bar({[|/*marker10*/'age'|]}) + +helper.verifyHover('markdown', { + marker1: '```python\n(key) name: str\n```\n---\nThe fullname of the User', + marker2: '```python\n(key) name: str\n```\n---\nThe fullname of the User', + marker3: '```python\n(key) views: float\n```', + marker4: null, + marker5: '```python\n(key) age: int\n```\n---\nThe age of the User, will not be over 200', + marker6: '```python\n(key) age: int\n```\n---\nThe age of the User, will not be over 200', + marker7: null, + marker8: null, + marker9: '```python\n(key) title: str\n```', + marker10: + '```python\n(key) age: int\n```\n---\nThe age of the Post\n\n---\n```python\n(key) age: int\n```\n---\nThe age of the User, will not be over 200', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.unpackedTypedDict.key.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.unpackedTypedDict.key.fourslash.ts new file mode 100644 index 00000000..eda8ed99 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.unpackedTypedDict.key.fourslash.ts @@ -0,0 +1,24 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import TypedDict, Unpack +//// +//// class User(TypedDict): +//// name: str +//// """The fullname of the User""" +//// +//// age: int +//// """The age of the User, will not be over 200""" +//// +//// def foo(**user: Unpack[User]) -> None: +//// ... +//// +//// foo(name='Robert', [|/*marker1*/age|]=100) +//// foo(name='Robert', [|/*marker2*/age|]=) +//// foo([|/*marker3*/name|]='Robert') + +helper.verifyHover('markdown', { + marker1: '```python\n(variable) age: int\n```\n---\nThe age of the User, will not be over 200', + marker2: '```python\n(variable) age: int\n```\n---\nThe age of the User, will not be over 200', + marker3: '```python\n(variable) name: str\n```\n---\nThe fullname of the User', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.variable.docString.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.variable.docString.fourslash.ts new file mode 100644 index 00000000..695bbea7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.variable.docString.fourslash.ts @@ -0,0 +1,49 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import Callable +//// class A: +//// def __init__(self, func : Callable[[float], float]) -> None: +//// self.x = 1 +//// """ test x """ +//// self.func = func +//// """A given function""" +//// +//// a = A() +//// a.[|/*marker1*/x|] +//// a.[|/*marker2*/func|] + +// @filename: test2.py +//// y = 2 +//// """ test y """ +//// +//// [|/*marker3*/y|] + +// @filename: test3.py +//// from stubs import z +//// +//// [|/*marker4*/z|] + +// @filename: stubs.py +//// z = 3 +//// """ test z """ + +// @filename: stubs.pyi +//// z: int = ... + +// @filename: test4.py +//// from typing import List, Union +//// [|/*marker5*/SomeType|] = List[Union[int, str]] +//// """Here's some documentation about SomeType""" + +// @filename: testBigInt.py +//// [|/*marker6*/x|] = 123670029844611072 + +helper.verifyHover('markdown', { + marker1: '```python\n(variable) x: int\n```\n---\ntest x', + marker2: '```python\n(variable) def func(float) -> float\n```\n---\nA given function', + marker3: '```python\n(variable) y: Literal[2]\n```\n---\ntest y', + marker4: '```python\n(variable) z: int\n```\n---\ntest z', + marker5: "```python\n(type) SomeType = List[int | str]\n```\n---\nHere's some documentation about SomeType", + marker6: '```python\n(variable) x: Literal[123670029844611072]\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/hover.wildcardimports.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.wildcardimports.fourslash.ts new file mode 100644 index 00000000..273f0e9a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/hover.wildcardimports.fourslash.ts @@ -0,0 +1,85 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: lib1/definition.py +// @library: true +//// def func(): +//// '''func docs''' +//// pass +//// +//// class MyType: +//// '''MyType docs''' +//// pass +//// +//// class MyType2: +//// def func2(self): +//// '''func2 docs''' +//// pass + +// @filename: lib1/alias.py +// @library: true +//// def func3(): +//// '''func3 docs''' +//// pass + +// @filename: lib1/withall.py +// @library: true +//// def func4(): +//// '''func4 docs''' +//// pass +//// +//// def func5(): +//// '''func5 docs''' +//// pass +//// +//// __all__ = ['func5'] + +// @filename: lib1/redirect.py +// @library: true +//// from . import withall +//// from .withall import * +//// +//// __all__ += withall.__all__ + +// @filename: lib1/wildcard.py +// @library: true +//// from .definition import * +//// from .redirect import * +//// from .alias import func3 + +// @filename: lib1/__init__.py +// @library: true +//// from .wildcard import * + +// @filename: lib1/__init__.pyi +// @library: true +//// from typing import Any +//// func: Any +//// MyType: Any +//// class MyType2: +//// def func2(self) -> None : ... +//// func3: Any +//// func4: Any +//// func5: Any + +// @filename: test.py +//// import lib1 +//// lib1.[|/*marker1*/func|]() +//// c = lib1.[|/*marker2*/MyType|]() +//// lib1.MyType2().[|/*marker3*/func2|]() +//// lib1.[|/*marker4*/func3|]() +//// lib1.[|/*marker5*/func4|]() +//// lib1.[|/*marker6*/func5|]() + +helper.verifyHover('markdown', { + marker1: '```python\n(variable) func: Any\n```\n---\nfunc docs', + marker2: '```python\n(variable) MyType: Any\n```\n---\nMyType docs', + marker3: '```python\n(method) def func2() -> None\n```\n---\nfunc2 docs', + marker4: '```python\n(variable) func3: Any\n```\n---\nfunc3 docs', + marker5: '```python\n(variable) func4: Any\n```\n---\nfunc4 docs', + marker6: '```python\n(variable) func5: Any\n```\n---\nfunc5 docs', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.multipart.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.multipart.fourslash.ts new file mode 100644 index 00000000..4aaf8820 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.multipart.fourslash.ts @@ -0,0 +1,21 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pkg/__init__.py +//// from . import util + +// @filename: pkg/util/__init__.py +//// + +// @filename: pkg/util/foo.py +//// class Foo: +//// pass + +// @filename: test.py +//// import pkg +//// import pkg.util.foo +//// pkg.util.foo.[|/*marker*/Foo|]() + +// @ts-ignore +helper.verifyHover('markdown', { + marker: '```python\nclass Foo()\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.multipart2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.multipart2.fourslash.ts new file mode 100644 index 00000000..91d32d6b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.multipart2.fourslash.ts @@ -0,0 +1,22 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pkg/__init__.py +//// from . import sub1 +//// from .sub2 import * + +// @filename: pkg/sub1.py +//// a = 1 +//// b = 2 + +// @filename: pkg/sub2.py +//// from . import sub1 +//// from .sub1 import a + +// @filename: test.py +//// import pkg.sub1 +//// pkg.sub1.[|/*marker*/b|]() + +// @ts-ignore +helper.verifyHover('markdown', { + marker: '```python\n(variable) b: int\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.nameconflict.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.nameconflict.fourslash.ts new file mode 100644 index 00000000..f41d4ebe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.nameconflict.fourslash.ts @@ -0,0 +1,20 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: project/__init__.py +//// from .a import * + +// @filename: project/a/__init__.py +//// from .b import b + +// @filename: project/a/b.py +//// def b() -> None: +//// pass + +// @filename: project/a/test.py +//// from project import a +//// x: a.[|/*marker*/b|] + +// @ts-ignore +helper.verifyHover('markdown', { + marker: '```python\n(function) def b() -> None\n```', +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.publicSymbols.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.publicSymbols.fourslash.ts new file mode 100644 index 00000000..c13ab975 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.publicSymbols.fourslash.ts @@ -0,0 +1,65 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// [|/*import*/|][|MY_CONSTANT_VAR/*marker1*/|] +//// [|MyAliasList/*marker2*/|] +//// [|normal_variable/*marker3*/|] +//// [|MY_PROTECTED/*marker4*/|] +//// [|__MyAliasList/*marker5*/|] + +// @filename: lib.py +//// MY_CONSTANT_VAR = 42 +//// MyAliasList = list[int] +//// normal_variable = 1 +//// _MY_PROTECTED2 = False +//// __MyAliasList = int + +{ + const importRange = helper.getPositionRange('import'); + const marker1Range = helper.getPositionRange('marker1'); + const marker2Range = helper.getPositionRange('marker2'); + const marker4Range = helper.getPositionRange('marker4'); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: 'MY_CONSTANT_VAR', + kind: Consts.CompletionItemKind.Constant, + documentation: '```\nfrom lib import MY_CONSTANT_VAR\n```', + detail: 'Auto-import', + textEdit: { range: marker1Range, newText: 'MY_CONSTANT_VAR' }, + additionalTextEdits: [{ range: importRange, newText: 'from lib import MY_CONSTANT_VAR\n\n\n' }], + }, + ], + }, + marker2: { + completions: [ + { + label: 'MyAliasList', + kind: Consts.CompletionItemKind.Variable, + documentation: '```\nfrom lib import MyAliasList\n```', + detail: 'Auto-import', + textEdit: { range: marker2Range, newText: 'MyAliasList' }, + additionalTextEdits: [{ range: importRange, newText: 'from lib import MyAliasList\n\n\n' }], + }, + ], + }, + marker3: { completions: [] }, + marker4: { + // Protected variables SHOULD be added + completions: [ + { + label: '_MY_PROTECTED2', + kind: Consts.CompletionItemKind.Constant, + documentation: '```\nfrom lib import _MY_PROTECTED2\n```', + detail: 'Auto-import', + textEdit: { range: marker4Range, newText: '_MY_PROTECTED2' }, + additionalTextEdits: [{ range: importRange, newText: 'from lib import _MY_PROTECTED2\n\n\n' }], + }, + ], + }, + marker5: { completions: [] }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.dunderAll.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.dunderAll.fourslash.ts new file mode 100644 index 00000000..e45b7a56 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.dunderAll.fourslash.ts @@ -0,0 +1,77 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: testpkg/py.typed +// @library: true +//// + +// @filename: testpkg/__init__.py +// @library: true +//// from . import submod +//// from .submod2 import * +//// from submod import foofoofoo5, foofoofoo6, foofoofoo7, foofoofoo8 +//// foofoofoo0: int = 0 +//// foofoofoo1: int = 1 +//// foofoofoo2: int = 2 +//// foofoofoo3: int = 3 +//// foofoofoo4: int = 4 +//// __all__ = ["foofoofoo1"] +//// __all__ += ["foofoofoo2"] +//// __all__.extend(["foofoofoo3"]) +//// __all__.extend(submod.__all__) +//// __all__.remove("foofoofoo1") +//// __all__.remove("foofoofoo6") +//// __all__.append("foofoofoo0") +//// __all__ += submod2.__all__ + +// @filename: testpkg/submod.py +// @library: true +//// foofoofoo5: int = 5 +//// foofoofoo6: int = 6 +//// foofoofoo7: int = 7 +//// foofoofoo8: int = 8 +//// __all__ = ["foofoofoo5"] +//// __all__ += ["foofoofoo6"] +//// __all__.extend(["foofoofoo7"]) + +// @filename: testpkg/submod2.py +// @library: true +//// foofoofoo9: int = 9 +//// __all__ = ["foofoofoo9"] + +// @filename: .src/test.py +//// from testpkg import * +//// foofoofoo[|/*marker1*/|] + +// Ensure that only the __all__ items appear in the list. + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: 'foofoofoo0', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'foofoofoo2', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'foofoofoo3', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'foofoofoo5', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'foofoofoo7', + kind: Consts.CompletionItemKind.Variable, + }, + { + label: 'foofoofoo9', + kind: Consts.CompletionItemKind.Variable, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.privateSymbols.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.privateSymbols.fourslash.ts new file mode 100644 index 00000000..84081fb0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.privateSymbols.fourslash.ts @@ -0,0 +1,65 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "typeCheckingMode": "basic" +//// } + +// @filename: testLib/py.typed +// @library: true +//// + +// @filename: testLib/__init__.py +// @library: true +//// from .module1 import one as one, two, three +//// four: int = two * two +//// _five: int = two + three +//// _six: int = 6 +//// __all__ = ["_six"] + +// @filename: testLib/module1.py +// @library: true +//// one: int = 1 +//// two: int = 2 +//// three: int = 3 + +// @filename: .src/test1.py +//// # pyright: reportPrivateUsage=true, reportPrivateImportUsage=true +//// from testLib import one +//// from testLib import [|/*marker1*/two|] as two_alias +//// from testLib import [|/*marker2*/three|] +//// from testLib import four +//// from testLib import [|/*marker3*/_five|] +//// from testLib import _six +//// import testLib +//// testLib.one +//// testLib.[|/*marker4*/two|] +//// testLib.[|/*marker5*/three|] +//// testLib.four +//// testLib.[|/*marker6*/_five|] +//// testLib._six + +// @ts-ignore +await helper.verifyDiagnostics({ + marker1: { + category: 'error', + message: `"two" is not exported from module "testLib"\n  Import from \"testLib.module1\" instead`, + }, + marker2: { + category: 'error', + message: `"three" is not exported from module "testLib"\n  Import from \"testLib.module1\" instead`, + }, + marker3: { + category: 'error', + message: `"_five" is private and used outside of the module in which it is declared`, + }, + marker4: { category: 'error', message: `"two" is not exported from module "testLib"` }, + marker5: { + category: 'error', + message: `"three" is not exported from module "testLib"`, + }, + marker6: { + category: 'error', + message: `"_five" is private and used outside of the module in which it is declared`, + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingBasic.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingBasic.fourslash.ts new file mode 100644 index 00000000..f681a2e7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingBasic.fourslash.ts @@ -0,0 +1,38 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "typeCheckingMode": "basic" +//// } + +// @filename: testLib/py.typed +// @library: true +//// + +// @filename: testLib/__init__.py +// @library: true +//// class Foo: +//// def method1(self): +//// '''Method docs''' +//// return None +//// +//// # This method has no annotation +//// def foo(a): +//// return Foo() + +// @filename: .src/test.py +//// from testLib import foo +//// foo(1).me[|/*marker1*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + documentation: '```python\ndef method1() -> None\n```\n---\nMethod docs', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingOff.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingOff.fourslash.ts new file mode 100644 index 00000000..7865af7c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingOff.fourslash.ts @@ -0,0 +1,38 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "typeCheckingMode": "off" +//// } + +// @filename: testLib/py.typed +// @library: true +//// + +// @filename: testLib/__init__.py +// @library: true +//// class Foo: +//// def method1(self): +//// '''Method docs''' +//// return None +//// +//// # This method has no annotation +//// def foo(a): +//// return Foo() + +// @filename: .src/test.py +//// from testLib import foo +//// foo(1).me[|/*marker1*/|] + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + documentation: '```python\ndef method1() -> None\n```\n---\nMethod docs', + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/import.wildcard.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/import.wildcard.fourslash.ts new file mode 100644 index 00000000..5aff5107 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/import.wildcard.fourslash.ts @@ -0,0 +1,27 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: testpkg/py.typed +// @library: true +//// + +// @filename: testpkg/__init__.py +// @library: true +//// __all__ = ["submod"] +//// def foo(): +//// return + +// @filename: testpkg/submod.py +// @library: true +//// def test_func(): +//// print("hi") + +// @filename: .src/test.py +//// # pyright: reportWildcardImportFromLibrary=false +//// from testpkg import * +//// submod.test_func() +//// [|/*marker*/foo|]() + +// @ts-ignore +await helper.verifyDiagnostics({ + marker: { category: 'error', message: `"foo" is not defined` }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/importnotresolved.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/importnotresolved.fourslash.ts new file mode 100644 index 00000000..74ed9531 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/importnotresolved.fourslash.ts @@ -0,0 +1,13 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: importnotresolved.py +//// # these will not be resolve, no typestubs for django in typeshed +//// +//// import [|/*marker1*/notexistant|] +//// import [|/*marker2*/django|] +//// + +helper.verifyDiagnostics({ + marker1: { category: 'error', message: `Import "notexistant" could not be resolved` }, + marker2: { category: 'error', message: `Import "django" could not be resolved` }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingModuleSource.disablingInStrictMode.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingModuleSource.disablingInStrictMode.fourslash.ts new file mode 100644 index 00000000..58fa93da --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingModuleSource.disablingInStrictMode.fourslash.ts @@ -0,0 +1,20 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: typings/pkg1234/__init__.pyi +//// __version__: str + +// @filename: importnotresolved.py +//// #pyright: strict +//// +//// # verify that reportMissingModuleSource can be disabled via config +//// # even when in strict mode +//// +//// import pkg1234 +//// print(pkg1234.__version__) + +// @filename: pyrightconfig.json +//// { +//// "reportMissingModuleSource": false +//// } + +helper.verifyDiagnostics({}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingModuleSource.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingModuleSource.fourslash.ts new file mode 100644 index 00000000..c354e96f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingModuleSource.fourslash.ts @@ -0,0 +1,17 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: typings/pkg1234/__init__.pyi +//// __version__: str + +// @filename: importnotresolved.py +//// # will not resolve, stub found but source not found +//// +//// import [|/*marker1*/pkg1234|] +//// print(pkg1234.__version__) + +helper.verifyDiagnostics({ + marker1: { + category: 'warning', + message: 'Import "pkg1234" could not be resolved from source', + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.codeAction.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.codeAction.fourslash.ts new file mode 100644 index 00000000..101eeb56 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.codeAction.fourslash.ts @@ -0,0 +1,33 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib/__init__.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// pass + +// @filename: .src/test.py +//// import [|/*marker*/testLi|]b + +// @ts-ignore +await helper.verifyCodeActions('included', { + marker: { + codeActions: [ + { + title: `Create Type Stub For "testLib"`, + kind: Consts.CodeActionKind.QuickFix, + command: { + title: 'Create Type Stub', + command: Consts.Commands.createTypeStub, + arguments: ['\\', 'testLib', '\\.src\\test.py'], + }, + }, + ], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.multipart.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.multipart.fourslash.ts new file mode 100644 index 00000000..3251fde6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.multipart.fourslash.ts @@ -0,0 +1,48 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib/aa/bb/__init__.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// ... + +// @filename: testLib/aa/bb/cc.py +// @library: true +//// MyAlias = int + +// @filename: test.py +//// import [|/*marker*/testLib.aa.b|]b + +const filename2 = helper.getMarkerByName('marker').fileName; +const command2 = { + title: 'Create Type Stub', + command: Consts.Commands.createTypeStub, + arguments: ['/', 'testLib.aa.bb', filename2], +}; + +// @ts-ignore +await helper.verifyCommand(command2, { + ['/typings/testLib/aa/bb/__init__.pyi']: `""" +This type stub file was generated by pyright. +""" + +class MyLibrary: + def DoEveryThing(self, code: str): # -> None: + ... + + + +`, + ['/typings/testLib/aa/bb/cc.pyi']: `""" +This type stub file was generated by pyright. +""" + +MyAlias = int +`, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlefile.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlefile.fourslash.ts new file mode 100644 index 00000000..eef78dd2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlefile.fourslash.ts @@ -0,0 +1,38 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// ... + +// @filename: test.py +//// import [|/*marker*/testLi|]b + +const filename3 = helper.getMarkerByName('marker').fileName; +const command3 = { + title: 'Create Type Stub', + command: Consts.Commands.createTypeStub, + arguments: ['/', 'testLib', filename3], +}; + +// @ts-ignore +await helper.verifyCommand(command3, { + ['/typings/testLib/__init__.pyi']: `""" +This type stub file was generated by pyright. +""" + +class MyLibrary: + def DoEveryThing(self, code: str): # -> None: + ... + + + +`, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlepart.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlepart.fourslash.ts new file mode 100644 index 00000000..77f37090 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlepart.fourslash.ts @@ -0,0 +1,38 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib/__init__.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// ... + +// @filename: test.py +//// import [|/*marker*/testLi|]b + +const filename1 = helper.getMarkerByName('marker').fileName; +const command1 = { + title: 'Create Type Stub', + command: Consts.Commands.createTypeStub, + arguments: ['/', 'testLib', filename1], +}; + +// @ts-ignore +await helper.verifyCommand(command1, { + ['/typings/testLib/__init__.pyi']: `""" +This type stub file was generated by pyright. +""" + +class MyLibrary: + def DoEveryThing(self, code: str): # -> None: + ... + + + +`, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.trycatchImport.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.trycatchImport.fourslash.ts new file mode 100644 index 00000000..b2d23b60 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.trycatchImport.fourslash.ts @@ -0,0 +1,49 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib/mylibrary.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// ... +//// class ExceptLibrary: +//// def DoEveryThing(self, code: str): +//// ... +//// class ElseLibrary: +//// def DoEveryThing(self, code: str): +//// ... + +// @filename: testLib/__init__.py +// @library: true +//// try: +//// from .mylibrary import MyLibrary +//// except: +//// from .mylibrary import ExceptLibrary +//// else: +//// from .mylibrary import ElseLibrary + +// @filename: test.py +//// import [|/*marker*/testLi|]b + +const filename4 = helper.getMarkerByName('marker').fileName; +const command4 = { + title: 'Create Type Stub', + command: Consts.Commands.createTypeStub, + arguments: ['/', 'testLib', filename4], +}; + +// @ts-ignore +await helper.verifyCommand(command4, { + ['/typings/testLib/__init__.pyi']: `""" +This type stub file was generated by pyright. +""" + +from .mylibrary import MyLibrary + +`, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.fourslash.ts new file mode 100644 index 00000000..7dac991e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.fourslash.ts @@ -0,0 +1,20 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib/__init__.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// pass + +// @filename: test.py +//// import [|/*marker*/testLib|] + +helper.verifyDiagnostics({ + marker: { category: 'warning', message: `Stub file not found for "testLib"` }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.invokeCodeAction.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.invokeCodeAction.fourslash.ts new file mode 100644 index 00000000..79fc2f4a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/missingTypeStub.invokeCodeAction.fourslash.ts @@ -0,0 +1,36 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib/__init__.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// pass + +// @filename: test.py +//// import [|/*marker*/testLi|]b + +// @ts-ignore +await helper.verifyInvokeCodeAction({ + marker: { + title: `Create Type Stub For "testLib"`, + files: { + ['/typings/testLib/__init__.pyi']: `""" +This type stub file was generated by pyright. +""" + +class MyLibrary: + def DoEveryThing(self, code: str): # -> None: + ... + + + +`, + }, + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/noerrors.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/noerrors.fourslash.ts new file mode 100644 index 00000000..bb09358c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/noerrors.fourslash.ts @@ -0,0 +1,9 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// # make sure test works with no diagnostics +//// +//// class C: +//// pass + +helper.verifyDiagnostics(); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/orderImports1.command.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/orderImports1.command.fourslash.ts new file mode 100644 index 00000000..5ac9128c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/orderImports1.command.fourslash.ts @@ -0,0 +1,20 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: quickActionOrganizeImportTest1.py +//// import time +//// import os +//// import sys + +// @ts-ignore +await helper.verifyCommand( + { + title: 'Quick action order imports 1', + command: Consts.Commands.orderImports, + arguments: ['quickActionOrganizeImportTest1.py'], + }, + { + ['quickActionOrganizeImportTest1.py']: `import os +import sys +import time`, + } +); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/orderImports2.command.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/orderImports2.command.fourslash.ts new file mode 100644 index 00000000..6c089099 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/orderImports2.command.fourslash.ts @@ -0,0 +1,24 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: quickActionOrganizeImportTest2.py +//// import time +//// import sys +//// a = 100 +//// print(a) +//// import math +//// import os + +// @ts-ignore +await helper.verifyCommand( + { + title: 'Quick action order imports', + command: Consts.Commands.orderImports, + arguments: ['quickActionOrganizeImportTest2.py'], + }, + { + ['quickActionOrganizeImportTest2.py']: `import math +import os +import sys +import time`, + } +); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.externallyHidden.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.externallyHidden.fourslash.ts new file mode 100644 index 00000000..6e9da7ca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.externallyHidden.fourslash.ts @@ -0,0 +1,24 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test1.py +//// def [|__foo|](): +//// pass +//// +//// [|__foo/*marker*/|]() + +// @filename: test2.py +//// from test1 import [|__foo|] +//// +//// [|__foo|]() + +helper.verifyRename({ + marker: { + newName: '__foo1', + changes: helper + .getRangesByText() + .get('__foo')! + .map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: '__foo1' }; + }), + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.externallyHidden.params.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.externallyHidden.params.fourslash.ts new file mode 100644 index 00000000..ee917a9b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.externallyHidden.params.fourslash.ts @@ -0,0 +1,24 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test1.py +//// def __foo([|param/*marker*/|]: int): +//// pass +//// +//// __foo([|param|]=1) + +// @filename: test2.py +//// from test1 import __foo +//// +//// __foo([|param|]=1) + +helper.verifyRename({ + marker: { + newName: 'param1', + changes: helper + .getRangesByText() + .get('param')! + .map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'param1' }; + }), + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.fourslash.ts new file mode 100644 index 00000000..3a4a9e29 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.fourslash.ts @@ -0,0 +1,29 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// class [|Test1|]: +//// def M(self, a: '[|Test1|]'): +//// pass + +// @filename: test2.py +//// from test import [|Test1|] +//// +//// b = [|[|/*marker*/Test1|]|]() + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyRename({ + marker: { + newName: 'NewTest1', + changes: ranges.map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'NewTest1' }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.function.untitledFile.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.function.untitledFile.fourslash.ts new file mode 100644 index 00000000..667f6bbd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.function.untitledFile.fourslash.ts @@ -0,0 +1,21 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: declare.py +//// def func(): +//// pass + +// @filename: Untitled-1.py +//// from declare import func +//// /*marker*/func() + +{ + helper.verifyRename( + { + marker: { + newName: 'func1', + changes: [], + }, + }, + true + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.init.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.init.fourslash.ts new file mode 100644 index 00000000..ea157f66 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.init.fourslash.ts @@ -0,0 +1,25 @@ +/// <reference path="typings/fourslash.d.ts" /> +// Verify rename doesn't use the same logic as find all references (which would find the constructor calls) + +// @filename: test.py +//// class Test1: +//// def [|/*marker*/__init__|](self): +//// pass + +// @filename: test2.py +//// from test import Test1 +//// +//// b = Test1() + +{ + const ranges = helper.getRanges().filter((r) => r.marker); + + helper.verifyRename({ + marker: { + newName: 'foo', + changes: ranges.map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'foo' }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.library.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.library.fourslash.ts new file mode 100644 index 00000000..865b21aa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.library.fourslash.ts @@ -0,0 +1,29 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// def M(self, a: Test1): +//// pass + +// @filename: test.py +//// from testLib1 import Test1 +//// +//// a = [|/*marker*/Test1|]() + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() + +helper.verifyRename({ + marker: { + newName: 'NewTest1', + changes: [], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.library.sourceAndStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.library.sourceAndStub.fourslash.ts new file mode 100644 index 00000000..59d4f0a1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.library.sourceAndStub.fourslash.ts @@ -0,0 +1,33 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: testLib1/__init__.py +// @library: true +//// class Test1: +//// def M(self, a): +//// pass + +// @filename: typings/testLib1/__init__.pyi +//// class Test1: +//// def M(self, a: Test1): ... + +// @filename: test.py +//// from testLib1 import Test1 +//// +//// a = [|/*marker*/Test1|]() + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() + +helper.verifyRename({ + marker: { + newName: 'NewTest1', + changes: [], + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.multipleDecl.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.multipleDecl.fourslash.ts new file mode 100644 index 00000000..68fac2d4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.multipleDecl.fourslash.ts @@ -0,0 +1,21 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: foo/__init__.py +//// class Foo: +//// pass + +// @filename: test.py +//// import foo +//// [|/*marker*/foo|] = 3 +//// def [|foo|](): pass + +const ranges = helper.getRanges(); + +helper.verifyRename({ + marker: { + newName: 'foo1', + changes: ranges.map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'foo1' }; + }), + }, +}); diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.parens.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.parens.fourslash.ts new file mode 100644 index 00000000..04f75959 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.parens.fourslash.ts @@ -0,0 +1,20 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// [|/*marker*/A|] = True +//// if([|A|]): +//// pass + +{ + helper.verifyRename({ + marker: { + newName: 'RenamedA', + changes: helper + .getRangesByText() + .get('A')! + .map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'RenamedA' }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.sourceAndStub.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.sourceAndStub.fourslash.ts new file mode 100644 index 00000000..64b0beb4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.sourceAndStub.fourslash.ts @@ -0,0 +1,37 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: testLib1/__init__.py +//// class Test1: +//// def [|M|](self, a): +//// pass + +// @filename: testLib1/__init__.pyi +//// class Test1: +//// def [|M|](self, a: str): ... + +// @filename: test.py +//// from testLib1 import Test1 +//// +//// Test1().[|[|/*marker*/M|]|]('') + +// @filename: test2.py +//// from testLib1 import Test1 +//// +//// b = Test1() +//// func(b) +//// +//// def func(t: Test1): +//// t.[|M|]('') + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyRename({ + marker: { + newName: 'M2', + changes: ranges.map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'M2' }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.string.excluded.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.string.excluded.fourslash.ts new file mode 100644 index 00000000..4194888e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.string.excluded.fourslash.ts @@ -0,0 +1,43 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/exclude/**"] +//// } + +// @filename: exclude/test.py +//// class [|/*marker1*/A|]: +//// pass +//// +//// a = [|A|]() + +// @filename: exclude/test2.py +//// class [|/*marker2*/B|]: +//// pass +//// +//// b = [|B|]() + +{ + helper.openFile(helper.getMarkerByName('marker1').fileName); + + // excluded file opened + helper.verifyRename({ + marker1: { + newName: 'RenamedA', + changes: helper + .getRangesByText() + .get('A')! + .map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'RenamedA' }; + }), + }, + }); + + // excluded file closed + helper.verifyRename({ + marker2: { + newName: 'RenamedB', + changes: [], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.string.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.string.fourslash.ts new file mode 100644 index 00000000..284e5b7d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.string.fourslash.ts @@ -0,0 +1,26 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// class [|/*marker*/A|]: +//// pass +//// +//// __all__ = ["[|A|]"] + +// @filename: test2.py +//// from test import [|A|] +//// +//// a: "[|A|]" = [|A|]() + +{ + helper.verifyRename({ + marker: { + newName: 'RenamedA', + changes: helper + .getRangesByText() + .get('A')! + .map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'RenamedA' }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/rename.typeParams.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.typeParams.fourslash.ts new file mode 100644 index 00000000..19e5d14e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/rename.typeParams.fourslash.ts @@ -0,0 +1,29 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// class [|Test1|][T]: +//// def M(self, a: '[|Test1|]'): +//// pass + +// @filename: test2.py +//// from test import [|Test1|] +//// +//// b = [|[|/*marker*/Test1|]|]() + +{ + const ranges = helper.getRanges().filter((r) => !r.marker); + + helper.verifyRename({ + marker: { + newName: 'NewTest1', + changes: ranges.map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'NewTest1' }; + }), + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.aliasedFunction.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.aliasedFunction.fourslash.ts new file mode 100644 index 00000000..2cf73f55 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.aliasedFunction.fourslash.ts @@ -0,0 +1,36 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: declare.py +//// def func(): +//// return 1 + +// @filename: consume.py +//// from declare import func +//// from declare import func as /*marker1*/foobar +//// +//// def callByName(): +//// func() +//// def [|callByAlias|](): +//// /*marker2*/foobar() + +// @filename: consume2.py +//// from declare import func as foobar +//// +//// def callByAlias2(): +//// func() + +{ + const ranges = helper.getRanges(); + const itemList = ranges.map((range) => { + return { filePath: range.fileName, range: helper.convertPositionRange(range), name: 'callByAlias' }; + }); + + helper.verifyShowCallHierarchyGetIncomingCalls({ + marker1: { + items: itemList, + }, + marker2: { + items: itemList, + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.aliasedVariable.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.aliasedVariable.fourslash.ts new file mode 100644 index 00000000..3ce2449d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.aliasedVariable.fourslash.ts @@ -0,0 +1,17 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: declare.py +//// my_variable = "Hello, world!" + +// @filename: consume.py +//// from my_module import my_variable as /*marker*/greeting +//// +//// print(greeting) + +{ + helper.verifyShowCallHierarchyGetIncomingCalls({ + marker: { + items: [], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.function.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.function.fourslash.ts new file mode 100644 index 00000000..89663e94 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.incomingCalls.function.fourslash.ts @@ -0,0 +1,43 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: declare.py +//// def /*marker1*/func(): +//// return 1 + +// @filename: consume.py +//// from declare import func +//// from declare import /*marker2*/func as foobar +//// +//// def [|callByName|](): +//// /*marker3*/func() +//// def callByAlias(): +//// foobar() + +// @filename: consume2.py +//// from declare import func +//// +//// def [|callByName2|](): +//// func() + +{ + const ranges = helper.getRanges(); + const references = ranges.map((range) => { + return { path: range.fileName, range: helper.convertPositionRange(range) }; + }); + const itemList = [ + { filePath: references[0].path, range: references[0].range, name: 'callByName' }, + { filePath: references[1].path, range: references[1].range, name: 'callByName2' }, + ]; + + helper.verifyShowCallHierarchyGetIncomingCalls({ + marker1: { + items: itemList, + }, + marker2: { + items: itemList, + }, + marker3: { + items: itemList, + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.aliasedFunction1.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.aliasedFunction1.fourslash.ts new file mode 100644 index 00000000..e738e15f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.aliasedFunction1.fourslash.ts @@ -0,0 +1,57 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: declare.py +//// def func(): +//// func2() +//// return func3() +//// +//// def [|func2|](): +//// print(1) +//// +//// def [|func3|](): +//// return 1 + +// @filename: consume.py +//// +//// from declare import func as /*marker1*/foobar +//// +//// def callByAlias(): +//// /*marker2*/foobar() + +// @filename: consume2.py +//// from declare import func as foobar +//// from declare import func +//// +//// def callByBoth1(): +//// func() +//// /*marker3*/foobar() +//// +//// def callByBoth2(): +//// /*marker4*/foobar() +//// func() + +{ + const ranges = helper.getRanges(); + const references = ranges.map((range) => { + return { path: range.fileName, range: helper.convertPositionRange(range) }; + }); + const itemList = [ + { filePath: references[0].path, range: references[0].range, name: 'func2' }, + { filePath: references[1].path, range: references[1].range, name: 'func3' }, + ]; + + helper.verifyShowCallHierarchyGetOutgoingCalls({ + marker1: { + items: itemList, + }, + marker2: { + items: itemList, + }, + marker3: { + items: itemList, + }, + marker4: { + items: itemList, + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.aliasedFunction2.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.aliasedFunction2.fourslash.ts new file mode 100644 index 00000000..4b175afa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.aliasedFunction2.fourslash.ts @@ -0,0 +1,47 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: declare.py +//// def [|func|](): +//// func2() +//// +//// def func2(): +//// print(1) + +// @filename: consume.py +//// from declare import func as [|foobar|] +//// from declare import func +//// +//// def /*marker1*/callByAlias(): +//// foobar() +//// +//// def /*marker2*/callByName(): +//// func() +//// +//// def /*marker3*/callByBoth1(): +//// func() +//// foobar() +//// +//// def /*marker4*/callByBoth2(): +//// foobar() +//// func() +{ + const ranges = helper.getRanges(); + const references = ranges.map((range) => { + return { path: range.fileName, range: helper.convertPositionRange(range) }; + }); + + helper.verifyShowCallHierarchyGetOutgoingCalls({ + marker1: { + items: [{ filePath: references[0].path, range: references[0].range, name: 'foobar' }], + }, + marker2: { + items: [{ filePath: references[0].path, range: references[0].range, name: 'func' }], + }, + marker3: { + items: [{ filePath: references[0].path, range: references[0].range, name: 'func' }], + }, + marker4: { + items: [{ filePath: references[0].path, range: references[0].range, name: 'func' }], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.function.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.function.fourslash.ts new file mode 100644 index 00000000..1282b76f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/showcallhierarchy.outgoingCalls.function.fourslash.ts @@ -0,0 +1,41 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: declare.py +//// def /*marker1*/func(): +//// func2() +//// return func3() +//// +//// def [|func2|](): +//// print(1) +//// +//// def [|func3|](): +//// return 1 + +// @filename: consume.py +//// from declare import /*marker2*/func +//// +//// def callByName(): +//// /*marker3*/func() + +{ + const ranges = helper.getRanges(); + const references = ranges.map((range) => { + return { path: range.fileName, range: helper.convertPositionRange(range) }; + }); + const itemList = [ + { filePath: references[0].path, range: references[0].range, name: 'func2' }, + { filePath: references[1].path, range: references[1].range, name: 'func3' }, + ]; + + helper.verifyShowCallHierarchyGetOutgoingCalls({ + marker1: { + items: itemList, + }, + marker2: { + items: itemList, + }, + marker3: { + items: itemList, + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.builtinDocstrings.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.builtinDocstrings.fourslash.ts new file mode 100644 index 00000000..e65fafb6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.builtinDocstrings.fourslash.ts @@ -0,0 +1,45 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: docstrings.py +//// class A: ... +//// +//// class B: +//// """This is the class doc for B.""" +//// def __init__(self): +//// """This is the __init__ doc for B.""" +//// +//// class C: +//// """This is the class doc for C.""" +//// def __init__(self): +//// pass +//// +//// class D: +//// def __init__(self): +//// """This is the __init__ doc for D.""" +//// pass +//// +//// A([|/*a*/|]) +//// B([|/*b*/|]) +//// C([|/*c*/|]) +//// D([|/*d*/|]) + +{ + helper.verifySignature('plaintext', { + a: { + signatures: [{ label: '() -> A', parameters: [] }], + activeParameters: [undefined], + }, + b: { + signatures: [{ label: '() -> B', parameters: [], documentation: 'This is the __init__ doc for B.' }], + activeParameters: [undefined], + }, + c: { + signatures: [{ label: '() -> C', parameters: [], documentation: 'This is the class doc for C.' }], + activeParameters: [undefined], + }, + d: { + signatures: [{ label: '() -> D', parameters: [], documentation: 'This is the __init__ doc for D.' }], + activeParameters: [undefined], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.complicated.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.complicated.fourslash.ts new file mode 100644 index 00000000..39c3e8fa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.complicated.fourslash.ts @@ -0,0 +1,131 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: complicated.py +//// from typing import Any, Optional, Type, Union, TypedDict, Unpack, NotRequired +//// +//// class Movie(TypedDict, extra_items=int): +//// key1: str +//// key2: NotRequired[int] +//// +//// class A: +//// def __init__(self, x: bool): ... +//// +//// def __call__(self, z: float) -> complex: ... +//// +//// def complicated(self, a: int, b: int, c: int = 1234, d: Optional[str] = None, **kwargs: Any) -> Union[int, str]: ... +//// +//// def typeddict(self, a: int, b: int, **kwargs: Unpack[Movie]) -> None: ... +//// +//// x = A(True[|/*init1*/|]) +//// +//// x.complicated([|/*c1*/|]) +//// +//// x.complicated(1, [|/*c2*/|]) +//// +//// x.complicated(1, [|/*c3/|], 3) +//// +//// x.complicated(1[|/*cA*/|],[|/*cB*/|] 2, 3, x=[|/*cX*/|]123, d="wo[|/*cD*/|]w", z[|/*cZ*/|]=1234) +//// +//// x.typeddict(1[|/*tdA*/|], [|/*tdB*/|]2, key1=[|/*tdkey1*/|]'r', key2=[|/*tdkey2*/|]4) +//// +//// x([|/*call*/|]) +//// +//// def get_cls() -> Type[A]: +//// return A +//// +//// y = get_cls() +//// +//// y(True[|/*init2*/|]) + +{ + const xInitSignatures = [ + { + label: '(x: bool) -> A', + parameters: ['x: bool'], + }, + ]; + + const xComplicatedSignatures = [ + { + label: '(a: int, b: int, c: int = 1234, d: str | None = None, **kwargs: Any) -> (int | str)', + parameters: ['a: int', 'b: int', 'c: int = 1234', 'd: str | None = None', '**kwargs: Any'], + }, + ]; + + const xTypedDictSignatures = [ + { + label: '(a: int, b: int, *, key1: str, key2: int = ..., **kwargs: int) -> None', + parameters: ['a: int', 'b: int', '*', 'key1: str', 'key2: int = ...', '**kwargs: int'], + }, + ]; + + const xCallSignatures = [ + { + label: '(z: float) -> complex', + parameters: ['z: float'], + }, + ]; + + helper.verifySignature('plaintext', { + init1: { + signatures: xInitSignatures, + activeParameters: [0], + }, + init2: { + signatures: xInitSignatures, + activeParameters: [0], + }, + c1: { + signatures: xComplicatedSignatures, + activeParameters: [0], + }, + c2: { + signatures: xComplicatedSignatures, + activeParameters: [1], + }, + c3: { + signatures: xComplicatedSignatures, + activeParameters: [1], + }, + cA: { + signatures: xComplicatedSignatures, + activeParameters: [0], + }, + cB: { + signatures: xComplicatedSignatures, + activeParameters: [1], + }, + cX: { + signatures: xComplicatedSignatures, + activeParameters: [4], + }, + cD: { + signatures: xComplicatedSignatures, + activeParameters: [3], + }, + cZ: { + signatures: xComplicatedSignatures, + activeParameters: [4], + }, + call: { + signatures: xCallSignatures, + activeParameters: [0], + }, + tdA: { + signatures: xTypedDictSignatures, + activeParameters: [0], + }, + tdB: { + signatures: xTypedDictSignatures, + activeParameters: [1], + }, + tdkey1: { + signatures: xTypedDictSignatures, + activeParameters: [3], + }, + tdkey2: { + signatures: xTypedDictSignatures, + activeParameters: [4], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.cornercases.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.cornercases.fourslash.ts new file mode 100644 index 00000000..ac1e5281 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.cornercases.fourslash.ts @@ -0,0 +1,23 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: simple.py +//// +//// def simple(x: int, y: int) -> int: ... +//// +//// simple([|/*s1*/|] + +{ + const simpleSignatures = [ + { + label: '(x: int, y: int) -> int', + parameters: ['x: int', 'y: int'], + }, + ]; + + helper.verifySignature('plaintext', { + s1: { + signatures: simpleSignatures, + activeParameters: [0], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.dataclassAlias.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.dataclassAlias.fourslash.ts new file mode 100644 index 00000000..abc1d7db --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.dataclassAlias.fourslash.ts @@ -0,0 +1,72 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test.py +//// from typing import Any, dataclass_transform +//// +//// def model_field(*, kw_only: bool = False, alias: str = "") -> Any: +//// ... +//// +//// @dataclass_transform(field_specifiers=(model_field,)) +//// class ModelBase: +//// ... +//// +//// class DC1(ModelBase): +//// before: int = model_field() +//// env: int = model_field(alias='Invalid Identifier') +//// +//// DC1([|/*dc1*/|]) +//// +//// class DC2(ModelBase): +//// before: int = model_field(kw_only=True) +//// env: int = model_field(kw_only=True, alias='Invalid Identifier') +//// +//// DC2([|/*dc2*/|]) +//// +//// class DC3(ModelBase): +//// before: int = model_field(kw_only=True) +//// env: int = model_field(kw_only=True, alias='Invalid Identifier') +//// after: int = model_field(kw_only=True) +//// +//// DC3([|/*dc3*/|]) +//// DC3(after=[|/*dc3_with_after*/|]) + +{ + helper.verifySignature('plaintext', { + dc1: { + signatures: [ + { + label: '(before: int, Invalid Identifier: int) -> DC1', + parameters: ['before: int', 'Invalid Identifier: int'], + }, + ], + activeParameters: [0], + }, + dc2: { + signatures: [ + { + label: '(*, before: int) -> DC2', + parameters: ['*', 'before: int'], + }, + ], + activeParameters: [undefined], + }, + dc3: { + signatures: [ + { + label: '(*, before: int, after: int) -> DC3', + parameters: ['*', 'before: int', 'after: int'], + }, + ], + activeParameters: [undefined], + }, + dc3_with_after: { + signatures: [ + { + label: '(*, before: int, after: int) -> DC3', + parameters: ['*', 'before: int', 'after: int'], + }, + ], + activeParameters: [2], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.fourslash.ts new file mode 100644 index 00000000..7703d981 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.fourslash.ts @@ -0,0 +1,43 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: docstrings.py +//// from typing import overload +//// +//// def repeat(a: str, b: int) -> str: +//// """Repeat the string ``a`` ``b`` times. +//// +//// >>> repeat('foo', 3) +//// 'foofoofoo' +//// """ +//// +//// return a * b +//// +//// repeat([|/*marker1*/|]) + +{ + helper.verifySignature('plaintext', { + marker1: { + signatures: [ + { + label: '(a: str, b: int) -> str', + parameters: ['a: str', 'b: int'], + documentation: "Repeat the string ``a`` ``b`` times.\n\n>>> repeat('foo', 3)\n'foofoofoo'", + }, + ], + activeParameters: [0], + }, + }); + + helper.verifySignature('markdown', { + marker1: { + signatures: [ + { + label: '(a: str, b: int) -> str', + parameters: ['a: str', 'b: int'], + documentation: "Repeat the string `a` `b` times.\n\n```\n>>> repeat('foo', 3)\n'foofoofoo'\n```", + }, + ], + activeParameters: [0], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.overloaded.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.overloaded.fourslash.ts new file mode 100644 index 00000000..f082e399 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.overloaded.fourslash.ts @@ -0,0 +1,50 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: docstrings.pyi +//// from typing import overload +//// +//// @overload +//// def repeat() -> str: +//// """This is a docstring on the first overload.""" +//// @overload +//// def repeat(x: int) -> int: ... +//// +//// repeat([|/*marker1*/|]) + +{ + helper.verifySignature('plaintext', { + marker1: { + signatures: [ + { + label: '() -> str', + parameters: [], + documentation: 'This is a docstring on the first overload.', + }, + { + label: '(x: int) -> int', + parameters: ['x: int'], + documentation: 'This is a docstring on the first overload.', + }, + ], + activeParameters: [undefined, 0], + }, + }); + + helper.verifySignature('markdown', { + marker1: { + signatures: [ + { + label: '() -> str', + parameters: [], + documentation: 'This is a docstring on the first overload.', + }, + { + label: '(x: int) -> int', + parameters: ['x: int'], + documentation: 'This is a docstring on the first overload.', + }, + ], + activeParameters: [undefined, 0], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.wildcardimports.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.wildcardimports.fourslash.ts new file mode 100644 index 00000000..c2ca97f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.docstrings.wildcardimports.fourslash.ts @@ -0,0 +1,127 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: lib1/definition.py +// @library: true +//// def func(): +//// '''func docs''' +//// pass +//// +//// class MyType: +//// def func2(self): +//// '''func2 docs''' +//// pass + +// @filename: lib1/alias.py +// @library: true +//// def func3(): +//// '''func3 docs''' +//// pass + +// @filename: lib1/withall.py +// @library: true +//// def func4(): +//// '''func4 docs''' +//// pass +//// +//// def func5(): +//// '''func5 docs''' +//// pass +//// +//// __all__ = ['func5'] + +// @filename: lib1/redirect.py +// @library: true +//// from . import withall +//// from .withall import * +//// +//// __all__ += withall.__all__ + +// @filename: lib1/wildcard.py +// @library: true +//// from .definition import * +//// from .redirect import * +//// from .alias import func3 + +// @filename: lib1/__init__.py +// @library: true +//// from .wildcard import * + +// @filename: lib1/__init__.pyi +// @library: true +//// class ufunc: +//// def __call__(self) -> None : ... +//// +//// func: ufunc +//// class MyType: +//// def func2(self) -> None : ... +//// func3: ufunc +//// func4: ufunc +//// func5: ufunc + +// @filename: test.py +//// import lib1 +//// lib1.func([|/*marker1*/|]) +//// lib1.MyType().func2([|/*marker2*/|]) +//// lib1.func3([|/*marker3*/|]) +//// lib1.func4([|/*marker4*/|]) +//// lib1.func5([|/*marker5*/|]) + +{ + helper.verifySignature('markdown', { + marker1: { + signatures: [ + { + label: '() -> None', + parameters: [], + documentation: 'func docs', + }, + ], + activeParameters: [undefined], + }, + marker2: { + signatures: [ + { + label: '() -> None', + parameters: [], + documentation: 'func2 docs', + }, + ], + activeParameters: [undefined], + }, + marker3: { + signatures: [ + { + label: '() -> None', + parameters: [], + documentation: 'func3 docs', + }, + ], + activeParameters: [undefined], + }, + marker4: { + signatures: [ + { + label: '() -> None', + parameters: [], + documentation: 'func4 docs', + }, + ], + activeParameters: [undefined], + }, + marker5: { + signatures: [ + { + label: '() -> None', + parameters: [], + documentation: 'func5 docs', + }, + ], + activeParameters: [undefined], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.dunderNew.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.dunderNew.fourslash.ts new file mode 100644 index 00000000..68c08ec3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.dunderNew.fourslash.ts @@ -0,0 +1,26 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: dunderNew.py +//// +//// class Foo: +//// def __new__(cls, x:int, y:int): +//// return super().__new__(cls) + +//// +//// Foo([|/*s1*/|] + +{ + const simpleSignatures = [ + { + label: '(x: int, y: int) -> Foo', + parameters: ['x: int', 'y: int'], + }, + ]; + + helper.verifySignature('plaintext', { + s1: { + signatures: simpleSignatures, + activeParameters: [0], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.overload.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.overload.fourslash.ts new file mode 100644 index 00000000..4447f2a0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.overload.fourslash.ts @@ -0,0 +1,64 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: overloaded.py +//// from typing import overload +//// +//// @overload +//// def foo() -> int: ... +//// +//// @overload +//// def foo(x: int) -> int: ... +//// +//// @overload +//// def foo(x: int, y: int) -> str: ... +//// +//// def foo(*args): ... +//// +//// foo(1[|/*o1*/|], 2[|/*o2*/|]) +//// +//// foo(1, 2, [|/*o3*/|]) +//// +//// foo(1, 2, someVar[|/*o4*/|] , 4, 5, 6, 7, 8) +//// +//// foo([|/*o5*/|]) + +{ + const overloadedSignatures = [ + { + label: '() -> int', + parameters: [], + }, + { + label: '(x: int) -> int', + parameters: ['x: int'], + }, + { + label: '(x: int, y: int) -> str', + parameters: ['x: int', 'y: int'], + }, + ]; + + helper.verifySignature('plaintext', { + o1: { + signatures: overloadedSignatures, + activeParameters: [undefined, 0, 0], + }, + o2: { + signatures: overloadedSignatures, + activeParameters: [undefined, undefined, 1], + }, + o3: { + signatures: overloadedSignatures, + activeParameters: [undefined, undefined, undefined], + }, + o4: { + signatures: overloadedSignatures, + activeParameters: [undefined, undefined, undefined], + }, + o5: { + signatures: overloadedSignatures, + activeParameters: [undefined, 0, 0], + callHasParameters: false, + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/signature.simple.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.simple.fourslash.ts new file mode 100644 index 00000000..8c93a02a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/signature.simple.fourslash.ts @@ -0,0 +1,120 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: simple.py +//// +//// def simple(x: int, y: int) -> int: ... +//// +//// simple([|/*s1*/|])[|/*sOutside*/|] +//// +//// simple(1, [|/*s2*/|]) +//// +//// simple( [|/*s3*/|] 1 [|/*s4*/|] , [|/*s5*/|] 2 [|/*s6*/|] +//// [|/*s7*/|] ) +//// +//// x = 1234[|/*sNoCall*/|] +//// +//// 'string([|/*sNoCallInString*/|]'.capitalize() +//// +//// f'format string([|/*sNoCallInFormatString*/|]'.capitalize() +//// +//// f'format string {int.as_integer_ratio([|/*s8*/|])} '.capitalize() +//// +//// def foo(f:str): ... +//// +//// def bar(b:str): ... +//// +//// bar([|/*nestedString1*/|]foo([|/*nestedString2*/|])) +//// +//// def nested(x:int): +//// def inside(y:int): +//// return x + y +//// return inside +//// +//// nested(1)([|/*nestedString3*/|]) +{ + const simpleSignatures = [ + { + label: '(x: int, y: int) -> int', + parameters: ['x: int', 'y: int'], + }, + ]; + + helper.verifySignature('plaintext', { + s1: { + signatures: simpleSignatures, + activeParameters: [0], + }, + s2: { + signatures: simpleSignatures, + activeParameters: [1], + }, + s3: { + signatures: simpleSignatures, + activeParameters: [0], + }, + s4: { + signatures: simpleSignatures, + activeParameters: [0], + }, + s5: { + signatures: simpleSignatures, + activeParameters: [1], + }, + s6: { + signatures: simpleSignatures, + activeParameters: [1], + }, + s7: { + signatures: simpleSignatures, + activeParameters: [1], + }, + sOutside: { + noSig: true, + }, + sNoCall: { + noSig: true, + }, + sNoCallInString: { + noSig: true, + }, + sNoCallInFormatString: { + noSig: true, + }, + s8: { + signatures: [ + { + label: '(self: int) -> tuple[int, Literal[1]]', + parameters: ['self: int'], + }, + ], + activeParameters: [0], + }, + nestedString1: { + signatures: [ + { + label: '(b: str) -> None', + parameters: ['b: str'], + }, + ], + activeParameters: [0], + }, + nestedString2: { + signatures: [ + { + label: '(f: str) -> None', + parameters: ['f: str'], + }, + ], + activeParameters: [0], + }, + nestedString3: { + signatures: [ + { + label: '(y: int) -> int', + parameters: ['y: int'], + }, + ], + activeParameters: [0], + }, + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/tsconfig.json b/python-parser/packages/pyright-internal/src/tests/fourslash/tsconfig.json new file mode 100644 index 00000000..16731384 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "typeRoots": ["./node_modules/@types", "./typings"] + } +} \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/tests/fourslash/typeVerifier.fourslash.ts b/python-parser/packages/pyright-internal/src/tests/fourslash/typeVerifier.fourslash.ts new file mode 100644 index 00000000..05a90947 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/fourslash/typeVerifier.fourslash.ts @@ -0,0 +1,50 @@ +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: test_pkg/py.typed +// @library: true +//// + +// @filename: test_pkg/__init__.py +// @library: true +//// +//// from .submodule1 import A as A +//// from ._submodule2 import B as B, func1 as func1 +//// + +// @filename: test_pkg/submodule1.py +// @library: true +//// +//// class A: +//// ... + +// @filename: test_pkg/_submodule2.py +// @library: true +//// +//// class B: +//// ... +//// +//// def func1(a: int = ...) -> None: +//// ... + +{ + helper.verifyTypeVerifierResults('test_pkg', /* ignoreUnknownTypesFromImports */ false, /* verboseOutput */ false, { + generalDiagnostics: [], + missingClassDocStringCount: 4, + missingDefaultParamCount: 1, + missingFunctionDocStringCount: 1, + moduleName: 'test_pkg', + packageName: 'test_pkg', + modules: new Map<string, object>([ + ['/lib/site-packages/test_pkg/__init__.py', {}], + ['/lib/site-packages/test_pkg/submodule1.py', {}], + ]), + symbols: new Map<string, object>([ + ['test_pkg.submodule1', {}], + ['test_pkg.submodule1.A', {}], + ['test_pkg.A', {}], + ['test_pkg.B', {}], + ['test_pkg._submodule2.B', {}], + ['test_pkg.func1', {}], + ]), + }); +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashParser.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashParser.ts new file mode 100644 index 00000000..d8978700 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashParser.ts @@ -0,0 +1,1104 @@ +/* + * fourSlashParser.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Parse fourslash markup code and return parsed content with marker/range data + */ + +import { contains } from '../../../common/collectionUtils'; +import { toBoolean } from '../../../common/core'; +import { + combinePaths, + getRelativePath, + isRootedDiskPath, + normalizePath, + normalizeSlashes, +} from '../../../common/pathUtils'; +import { UriEx } from '../../../common/uri/uriUtils'; +import { distlibFolder, libFolder } from '../vfs/factory'; +import { + CompilerSettingRawData, + CompilerSettings, + FourSlashData, + FourSlashFile, + GlobalMetadataOptionNames, + Marker, + MetadataOptionNames, + RawContentMapping, + RawContentMappingSegment, + RawToken, + RawTokenKind, + RawTokenRange, + Range, + fileMetadataNames, +} from './fourSlashTypes'; +import { findItemContainingOffset, findTokenIndexAtOrAfter } from './fourSlashRawUtils'; + +/** + * Parse given fourslash markup code and return content with markup/range data + * + * @param basePath this will be combined with given `fileName` to form filepath to this content + * @param contents content with fourslash markups. + * @param fileName this will be a default filename for the first no named content in `contents`. + * if content is marked with `@filename`, that will override this given `filename` + */ +export function parseTestData(basePath: string, contents: string, fileName: string): FourSlashData { + const normalizedBasePath = normalizeSlashes(basePath); + + // Historically, many fourslash strings ended with a trailing newline (often from a closing backtick on its own + // line). Some parsing logic assumes line feeds exist for line-to-offset mapping. Normalize to ensure the + // input always ends with an LF, so callers don't need to add a trailing empty line. + const rawText = contents.endsWith('\n') ? contents : `${contents}\n`; + const rawTokens: RawToken[] = []; + + // List of all the subfiles we've parsed out + const files: FourSlashFile[] = []; + // Global options + const globalOptions: CompilerSettings = {}; + const globalOptionsRawData: CompilerSettings<CompilerSettingRawData> = {}; + // Marker positions + + // Split up the input file by line + // Note: IE JS engine incorrectly handles consecutive delimiters here when using RegExp split, so + // we have to string-based splitting instead and try to figure out the delimiting chars + const lines = rawText.split('\n'); + let i = 0; + + const markerPositions = new Map<string, Marker>(); + const markers: Marker[] = []; + const ranges: Range[] = []; + + // Stuff related to the subfile we're parsing + let currentFileContent: string | undefined; + let currentFileName = normalizeSlashes(fileName); + let currentFileOptions: CompilerSettings = {}; + let currentFileOptionsRawData: CompilerSettings<CompilerSettingRawData> = {}; + let currentFileTokenRanges: RawTokenRange[] = []; + let currentFileContentToRawSegments: ContentToRawSegment[] = []; + let lastFourSlashLineLfOffset: number | undefined; + + let normalizedProjectRoot = normalizedBasePath; + + function nextFile() { + if (currentFileContent === undefined) { + return; + } + + if (toBoolean(currentFileOptions[MetadataOptionNames.library])) { + currentFileName = normalizePath( + combinePaths(libFolder.getFilePath(), getRelativePath(currentFileName, normalizedBasePath)) + ); + } + + if (toBoolean(currentFileOptions[MetadataOptionNames.distLibrary])) { + currentFileName = normalizePath( + combinePaths(distlibFolder.getFilePath(), getRelativePath(currentFileName, normalizedBasePath)) + ); + } + + const ignoreCase = toBoolean(globalOptions[GlobalMetadataOptionNames.ignoreCase]); + const file = parseFileContent( + currentFileContent, + currentFileContentToRawSegments, + rawTokens, + currentFileName, + ignoreCase, + markerPositions, + markers, + ranges + ); + file.fileOptions = currentFileOptions; + + const mappingRawData = file.rawData; + file.rawData = { + tokenRanges: currentFileTokenRanges, + fileOptionsRawData: currentFileOptionsRawData, + rawToContent: mappingRawData?.rawToContent, + contentToRaw: mappingRawData?.contentToRaw, + }; + + // Store result file + files.push(file); + + currentFileContent = undefined; + currentFileOptions = {}; + currentFileOptionsRawData = {}; + currentFileTokenRanges = []; + currentFileContentToRawSegments = []; + lastFourSlashLineLfOffset = undefined; + currentFileName = fileName; + } + + let rawOffset = 0; + for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { + const lineWithPotentialCr = lines[lineIndex]; + i++; + + const lineStartRawOffset = rawOffset; + const hasLf = lineIndex < lines.length - 1; + + const lineTokenStart = rawTokens.length; + const tokenizeResult = tokenizeRawLine(rawText, rawTokens, lineStartRawOffset, lineWithPotentialCr, hasLf); + const lineTokenEnd = rawTokens.length; + + // Maintain legacy parsing behavior: treat CRLF as LF-delimited lines with a trailing '\r' in the line text. + let line = lineWithPotentialCr; + if (line.length > 0 && line.charAt(line.length - 1) === '\r') { + line = line.substr(0, line.length - 1); + } + + if (line.substr(0, 4) === '////') { + const text = line.substr(4); + currentFileTokenRanges.push({ startToken: lineTokenStart, endToken: lineTokenEnd }); + + if (currentFileContent === undefined) { + currentFileContent = text; + currentFileContentToRawSegments = [ + { + contentStart: 0, + contentEnd: text.length, + rawStart: lineStartRawOffset + 4, + }, + ]; + } else { + const newlineContentOffset = currentFileContent.length; + currentFileContent = currentFileContent + '\n' + text; + + if (lastFourSlashLineLfOffset === undefined) { + throw new Error(`Missing line feed mapping for four-slash line ending at line ${i - 1}`); + } + + currentFileContentToRawSegments.push({ + contentStart: newlineContentOffset, + contentEnd: newlineContentOffset + 1, + rawStart: lastFourSlashLineLfOffset, + }); + + const textContentStart = newlineContentOffset + 1; + currentFileContentToRawSegments.push({ + contentStart: textContentStart, + contentEnd: textContentStart + text.length, + rawStart: lineStartRawOffset + 4, + }); + } + + // Record the raw offset of the '\n' for this line (used if another four-slash line follows). + lastFourSlashLineLfOffset = hasLf ? lineStartRawOffset + lineWithPotentialCr.length : undefined; + } else if (line.substr(0, 3) === '///' && currentFileContent !== undefined) { + throw new Error(`Three-slash line in the middle of four-slash region at line ${i}`); + } else if (line.substr(0, 2) === '//') { + // Comment line, check for global/file @options and record them + const directive = tryParseOptionDirective(line.substr(2)); + if (directive) { + const key = directive.key.toLowerCase(); + const value = directive.value; + const directiveRawData = tokenizeResult.directiveRawData; + + if (!contains(fileMetadataNames, key)) { + // Check if the match is already existed in the global options + if (globalOptions[key] !== undefined) { + throw new Error(`Global option '${key}' already exists`); + } + globalOptions[key] = value; + + if (directiveRawData) { + globalOptionsRawData[key] = directiveRawData; + } + + if (key === GlobalMetadataOptionNames.projectRoot) { + normalizedProjectRoot = combinePaths(normalizedBasePath, value); + } + } else { + switch (key) { + case MetadataOptionNames.fileName: { + // Found an @FileName directive, if this is not the first then create a new subfile + nextFile(); + const normalizedPath = normalizeSlashes(value); + currentFileName = isRootedDiskPath(normalizedPath) + ? normalizedPath + : combinePaths(normalizedProjectRoot, normalizedPath); + currentFileOptions[key] = value; + if (directiveRawData) { + currentFileOptionsRawData[key] = directiveRawData; + } + break; + } + default: + // Add other fileMetadata flag + currentFileOptions[key] = value; + if (directiveRawData) { + currentFileOptionsRawData[key] = directiveRawData; + } + } + } + } + } else if (line !== '' || i === lines.length) { + // Previously blank lines between fourslash content caused it to be considered as 2 files, + // Remove this behavior since it just causes errors now + // + // Code line, terminate current subfile if there is one + nextFile(); + } + + rawOffset += lineWithPotentialCr.length + (hasLf ? 1 : 0); + } + + return { + markerPositions, + markers, + globalOptions, + globalOptionsRawData, + files, + ranges, + rawText, + rawTokens, + }; +} + +interface ContentToRawSegment { + contentStart: number; + contentEnd: number; + rawStart: number; +} + +interface ParsedOptionDirective { + key: string; + value: string; +} + +function tryParseOptionDirective(textAfterTwoSlash: string): ParsedOptionDirective | undefined { + // Matches the legacy behavior of: /^\s*@(\w+):\s*(.*)\s*/ + let i = 0; + while (i < textAfterTwoSlash.length && /\s/.test(textAfterTwoSlash[i])) { + i++; + } + + if (i >= textAfterTwoSlash.length || textAfterTwoSlash[i] !== '@') { + return undefined; + } + + i++; + const nameStart = i; + while (i < textAfterTwoSlash.length && /\w/.test(textAfterTwoSlash[i])) { + i++; + } + + if (i === nameStart) { + return undefined; + } + + const key = textAfterTwoSlash.substring(nameStart, i); + while (i < textAfterTwoSlash.length && /\s/.test(textAfterTwoSlash[i])) { + i++; + } + + if (i >= textAfterTwoSlash.length || textAfterTwoSlash[i] !== ':') { + return undefined; + } + + i++; + while (i < textAfterTwoSlash.length && /\s/.test(textAfterTwoSlash[i])) { + i++; + } + + const valueStart = i; + let valueEnd = textAfterTwoSlash.length; + while (valueEnd > valueStart && /\s/.test(textAfterTwoSlash[valueEnd - 1])) { + valueEnd--; + } + + const value = textAfterTwoSlash.substring(valueStart, valueEnd); + return { key, value }; +} + +interface TokenizeRawLineResult { + directiveRawData?: CompilerSettingRawData | undefined; +} + +function tokenizeRawLine( + rawText: string, + rawTokens: RawToken[], + lineStartRawOffset: number, + lineTextIncludingOptionalCR: string, + hasLf: boolean +): TokenizeRawLineResult { + const lineTokenStart = rawTokens.length; + + const hasCr = lineTextIncludingOptionalCR.length > 0 && lineTextIncludingOptionalCR.endsWith('\r'); + const lineBody = hasCr + ? lineTextIncludingOptionalCR.substring(0, lineTextIncludingOptionalCR.length - 1) + : lineTextIncludingOptionalCR; + + const lineBodyStart = lineStartRawOffset; + const lineBodyEnd = lineStartRawOffset + lineBody.length; + let directiveRawData: CompilerSettingRawData | undefined; + + const push = (kind: RawTokenKind, start: number, end: number) => { + if (start < end) { + rawTokens.push({ kind, start, end }); + } + }; + + const tokenizePlain = (start: number, end: number) => { + let pos = start; + while (pos < end) { + const ch = rawText[pos]; + if (ch === ' ' || ch === '\t') { + const wsStart = pos; + pos++; + while (pos < end && (rawText[pos] === ' ' || rawText[pos] === '\t')) { + pos++; + } + push(RawTokenKind.Whitespace, wsStart, pos); + } else { + const textStart = pos; + pos++; + while (pos < end && rawText[pos] !== ' ' && rawText[pos] !== '\t') { + pos++; + } + push(RawTokenKind.Text, textStart, pos); + } + } + }; + + if (lineBody.startsWith('////')) { + push(RawTokenKind.FourSlashPrefix, lineBodyStart, lineBodyStart + 4); + tokenizeFourSlashRemainder(rawText, rawTokens, lineBodyStart + 4, lineBodyEnd); + } else if (lineBody.startsWith('//')) { + const prefixStartToken = rawTokens.length; + push(RawTokenKind.TwoSlashPrefix, lineBodyStart, lineBodyStart + 2); + + const afterPrefixStart = lineBodyStart + 2; + const afterPrefixText = rawText.substring(afterPrefixStart, lineBodyEnd); + const directive = tryParseOptionDirective(afterPrefixText); + if (!directive) { + tokenizePlain(afterPrefixStart, lineBodyEnd); + } else { + const isWhitespaceNotNewline = (ch: string) => /\s/.test(ch) && ch !== '\r' && ch !== '\n'; + + const consumeWhitespaceTokens = (pos: number, stopChar?: string): number => { + while ( + pos < lineBodyEnd && + isWhitespaceNotNewline(rawText[pos]) && + (stopChar === undefined || rawText[pos] !== stopChar) + ) { + const wsStart = pos; + pos++; + while ( + pos < lineBodyEnd && + isWhitespaceNotNewline(rawText[pos]) && + (stopChar === undefined || rawText[pos] !== stopChar) + ) { + pos++; + } + push(RawTokenKind.Whitespace, wsStart, pos); + } + + return pos; + }; + + // Tokenize with directive structure. This keeps token spans aligned to the stored value. + let pos = afterPrefixStart; + + // Leading whitespace. + pos = consumeWhitespaceTokens(pos); + + const atStartToken = rawTokens.length; + push(RawTokenKind.DirectiveAt, pos, pos + 1); + pos++; + + const nameStart = pos; + while (pos < lineBodyEnd && /\w/.test(rawText[pos])) { + pos++; + } + const nameTokenIndex = rawTokens.length; + push(RawTokenKind.DirectiveName, nameStart, pos); + + // Whitespace before ':' + pos = consumeWhitespaceTokens(pos, ':'); + + const colonTokenIndex = rawTokens.length; + push(RawTokenKind.DirectiveColon, pos, pos + 1); + pos++; + + // Whitespace after ':' + pos = consumeWhitespaceTokens(pos); + + const valueStart = pos; + let valueEnd = lineBodyEnd; + while (valueEnd > valueStart && /\s/.test(rawText[valueEnd - 1])) { + valueEnd--; + } + + const valueTokenStart = rawTokens.length; + push(RawTokenKind.DirectiveValue, valueStart, valueEnd); + const valueTokenEnd = rawTokens.length; + + // Trailing whitespace. + tokenizePlain(valueEnd, lineBodyEnd); + + directiveRawData = { + directiveLine: { startToken: lineTokenStart, endToken: -1 }, + prefix: { startToken: prefixStartToken, endToken: prefixStartToken + 1 }, + name: { startToken: atStartToken, endToken: nameTokenIndex + 1 }, + colon: { startToken: colonTokenIndex, endToken: colonTokenIndex + 1 }, + value: { startToken: valueTokenStart, endToken: valueTokenEnd }, + }; + } + } else { + tokenizePlain(lineBodyStart, lineBodyEnd); + } + + // CR and LF are tokenized separately. + if (hasCr) { + push(RawTokenKind.NewLineCR, lineBodyEnd, lineBodyEnd + 1); + } + if (hasLf) { + const lfStart = lineStartRawOffset + lineTextIncludingOptionalCR.length; + push(RawTokenKind.NewLineLF, lfStart, lfStart + 1); + } + + const lineTokenEnd = rawTokens.length; + if (directiveRawData) { + directiveRawData.directiveLine.endToken = lineTokenEnd; + } + + return { directiveRawData }; +} + +function tokenizeFourSlashRemainder(rawText: string, rawTokens: RawToken[], start: number, end: number): void { + const push = (kind: RawTokenKind, s: number, e: number) => { + if (s < e) { + rawTokens.push({ kind, start: s, end: e }); + } + }; + + const validMarkerChars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz$1234567890_'; + + let pos = start; + while (pos < end) { + const ch = rawText[pos]; + if (ch === ' ' || ch === '\t') { + const wsStart = pos; + pos++; + while (pos < end && (rawText[pos] === ' ' || rawText[pos] === '\t')) { + pos++; + } + push(RawTokenKind.Whitespace, wsStart, pos); + continue; + } + + // Range delimiters. + if (pos + 1 < end && rawText[pos] === '[' && rawText[pos + 1] === '|') { + push(RawTokenKind.RangeStart, pos, pos + 2); + pos += 2; + continue; + } + if (pos + 1 < end && rawText[pos] === '|' && rawText[pos + 1] === ']') { + push(RawTokenKind.RangeEnd, pos, pos + 2); + pos += 2; + continue; + } + + // Object markers. + if (pos + 1 < end && rawText[pos] === '{' && rawText[pos + 1] === '|') { + const closeIndex = rawText.indexOf('|}', pos + 2); + if (closeIndex >= 0 && closeIndex + 2 <= end) { + push(RawTokenKind.ObjectMarkerStart, pos, pos + 2); + if (pos + 2 < closeIndex) { + push(RawTokenKind.ObjectMarkerText, pos + 2, closeIndex); + } + push(RawTokenKind.ObjectMarkerEnd, closeIndex, closeIndex + 2); + pos = closeIndex + 2; + continue; + } + } + + // Slash-star markers. + if (pos + 1 < end && rawText[pos] === '/' && rawText[pos + 1] === '*') { + const closeIndex = rawText.indexOf('*/', pos + 2); + if (closeIndex >= 0 && closeIndex + 2 <= end) { + let isValidMarker = true; + for (let j = pos + 2; j < closeIndex; j++) { + if (validMarkerChars.indexOf(rawText[j]) < 0) { + isValidMarker = false; + break; + } + } + + if (isValidMarker) { + push(RawTokenKind.MarkerStart, pos, pos + 2); + if (pos + 2 < closeIndex) { + push(RawTokenKind.MarkerName, pos + 2, closeIndex); + } + push(RawTokenKind.MarkerEnd, closeIndex, closeIndex + 2); + pos = closeIndex + 2; + continue; + } + } + } + + // Plain text chunk until whitespace or a known delimiter. + const textStart = pos; + pos++; + while (pos < end) { + const c = rawText[pos]; + if (c === ' ' || c === '\t') { + break; + } + if (pos + 1 < end) { + const c2 = rawText[pos + 1]; + if ( + (c === '[' && c2 === '|') || + (c === '|' && c2 === ']') || + (c === '{' && c2 === '|') || + (c === '|' && c2 === '}') || + (c === '/' && c2 === '*') || + (c === '*' && c2 === '/') + ) { + break; + } + } + pos++; + } + push(RawTokenKind.Text, textStart, pos); + } +} + +interface LocationInformation { + position: number; + sourcePosition: number; + sourceLine: number; + sourceColumn: number; +} + +interface RangeLocationInformation extends LocationInformation { + marker?: Marker | undefined; + rawOpen?: RawSpan | undefined; +} + +const enum State { + none, + inSlashStarMarker, + inObjectMarker, +} + +function reportError(fileName: string, line: number, col: number, message: string) { + const errorMessage = `${fileName}(${line},${col}): ${message}`; + throw new Error(errorMessage); +} + +function recordObjectMarker( + fileName: string, + ignoreCase: boolean, + location: LocationInformation, + text: string, + markerMap: Map<string, Marker>, + markers: Marker[], + rawData?: Marker['rawData'] +): Marker | undefined { + let markerValue: unknown; + try { + // Attempt to parse the marker value as JSON + markerValue = JSON.parse('{ ' + text + ' }') as unknown; + } catch (e: unknown) { + const message = e instanceof Error ? e.message : String(e); + reportError(fileName, location.sourceLine, location.sourceColumn, `Unable to parse marker text ${message}`); + } + + if (markerValue === undefined || markerValue === null || typeof markerValue !== 'object') { + reportError(fileName, location.sourceLine, location.sourceColumn, 'Object markers can not be empty'); + return undefined; + } + + const markerData = markerValue as {}; + + const marker: Marker = { + fileName, + fileUri: UriEx.file(fileName, !ignoreCase), + position: location.position, + data: markerData, + rawData, + }; + + // Object markers can be anonymous + const markerNameValue = (markerValue as Record<string, unknown>).name; + if (markerNameValue) { + // Preserve legacy behavior: this may not be a string at runtime. + markerMap.set(markerNameValue as unknown as string, marker); + } + + markers.push(marker); + + return marker; +} + +function recordMarker( + fileName: string, + ignoreCase: boolean, + location: LocationInformation, + name: string, + markerMap: Map<string, Marker>, + markers: Marker[], + rawData?: Marker['rawData'] +): Marker | undefined { + const marker: Marker = { + fileName, + fileUri: UriEx.file(fileName, !ignoreCase), + position: location.position, + rawData, + }; + + // Verify markers for uniqueness + if (markerMap.has(name)) { + const message = "Marker '" + name + "' is duplicated in the source file contents."; + reportError(marker.fileName, location.sourceLine, location.sourceColumn, message); + return undefined; + } else { + markerMap.set(name, marker); + markers.push(marker); + return marker; + } +} + +function parseFileContent( + content: string, + contentToRawSegments: ContentToRawSegment[], + rawTokens: RawToken[], + fileName: string, + ignoreCase: boolean, + markerMap: Map<string, Marker>, + markers: Marker[], + ranges: Range[] +): FourSlashFile { + ({ content, segments: contentToRawSegments } = chompLeadingSpaceWithMapping(content, contentToRawSegments)); + + // Any slash-star comment with a character not in this string is not a marker. + const validMarkerChars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz$1234567890_'; + + /// The file content (minus metacharacters) so far + let output = ''; + + // Mapping segments for the final FourSlashFile.content -> rawText offsets. + const contentToRawOutputSegments: RawContentMappingSegment[] = []; + + /// The current marker (or maybe multi-line comment?) we're parsing, possibly + let openMarker: LocationInformation | undefined; + + /// A stack of the open range markers that are still unclosed + const openRanges: RangeLocationInformation[] = []; + + /// A list of ranges we've collected so far */ + let localRanges: Range[] = []; + + /// The latest position of the start of an unflushed plain text area + let lastNormalCharPosition = 0; + + /// The total number of metacharacters removed from the file (so far) + let difference = 0; + + /// The fourslash file state object we are generating + let state: State = State.none; + + /// Current position data + let line = 1; + let column = 1; + + const flush = (lastSafeCharIndex: number | undefined) => { + const safeIndex = lastSafeCharIndex ?? content.length; + if (safeIndex <= lastNormalCharPosition) { + return; + } + + const outputStart = output.length; + output = output + content.substring(lastNormalCharPosition, safeIndex); + appendOutputMappingSegments( + contentToRawSegments, + lastNormalCharPosition, + safeIndex, + outputStart, + contentToRawOutputSegments + ); + }; + + if (content.length > 0) { + let previousChar = content.charAt(0); + for (let i = 1; i < content.length; i++) { + const currentChar = content.charAt(i); + switch (state) { + case State.none: + if (previousChar === '[' && currentChar === '|') { + // found a range start + const rawOpen = getRawSpanFromContentSpan(contentToRawSegments, i - 1, i + 1); + openRanges.push({ + position: i - 1 - difference, + sourcePosition: i - 1, + sourceLine: line, + sourceColumn: column, + rawOpen, + }); + // copy all text up to marker position + flush(i - 1); + lastNormalCharPosition = i + 1; + difference += 2; + } else if (previousChar === '|' && currentChar === ']') { + // found a range end + const rangeStart = openRanges.pop(); + if (!rangeStart) { + reportError(fileName, line, column, 'Found range end with no matching start.'); + } + + const rawClose = getRawSpanFromContentSpan(contentToRawSegments, i - 1, i + 1); + const rawSelectedStart = rangeStart!.rawOpen?.rawEnd ?? rawClose.rawStart; + const rawSelectedEnd = rawClose.rawStart; + const rawFullStart = rangeStart!.rawOpen?.rawStart ?? rawClose.rawStart; + const rawFullEnd = rawClose.rawEnd; + + const range: Range = { + fileName, + fileUri: UriEx.file(fileName, !ignoreCase), + pos: rangeStart!.position, + end: i - 1 - difference, + marker: rangeStart!.marker, + rawData: { + full: getTokenRangeCoveringRawSpan(rawTokens, rawFullStart, rawFullEnd), + open: getTokenRangeCoveringRawSpan( + rawTokens, + rangeStart!.rawOpen?.rawStart ?? rawClose.rawStart, + rangeStart!.rawOpen?.rawEnd ?? rawClose.rawStart + ), + selected: getTokenRangeCoveringRawSpan(rawTokens, rawSelectedStart, rawSelectedEnd), + close: getTokenRangeCoveringRawSpan(rawTokens, rawClose.rawStart, rawClose.rawEnd), + }, + }; + localRanges.push(range); + + // copy all text up to range marker position + flush(i - 1); + lastNormalCharPosition = i + 1; + difference += 2; + } else if (previousChar === '/' && currentChar === '*') { + // found a possible marker start + state = State.inSlashStarMarker; + openMarker = { + position: i - 1 - difference, + sourcePosition: i - 1, + sourceLine: line, + sourceColumn: column, + }; + } else if (previousChar === '{' && currentChar === '|') { + // found an object marker start + state = State.inObjectMarker; + openMarker = { + position: i - 1 - difference, + sourcePosition: i - 1, + sourceLine: line, + sourceColumn: column, + }; + flush(i - 1); + } + break; + + case State.inObjectMarker: + // Object markers are only ever terminated by |} and have no content restrictions + if (previousChar === '|' && currentChar === '}') { + const rawFull = getRawSpanFromContentSpan( + contentToRawSegments, + openMarker!.sourcePosition, + i + 1 + ); + const rawStart = getRawSpanFromContentSpan( + contentToRawSegments, + openMarker!.sourcePosition, + openMarker!.sourcePosition + 2 + ); + const rawEnd = getRawSpanFromContentSpan(contentToRawSegments, i - 1, i + 1); + const rawTextSpan = getRawSpanFromContentSpan( + contentToRawSegments, + openMarker!.sourcePosition + 2, + i - 1 + ); + + // Record the marker + const objectMarkerNameText = content.substring(openMarker!.sourcePosition + 2, i - 1).trim(); + const marker = recordObjectMarker( + fileName, + ignoreCase, + openMarker!, + objectMarkerNameText, + markerMap, + markers, + { + kind: 'object', + full: getTokenRangeCoveringRawSpan(rawTokens, rawFull.rawStart, rawFull.rawEnd), + start: getTokenRangeCoveringRawSpan(rawTokens, rawStart.rawStart, rawStart.rawEnd), + text: getTokenRangeCoveringRawSpan(rawTokens, rawTextSpan.rawStart, rawTextSpan.rawEnd), + end: getTokenRangeCoveringRawSpan(rawTokens, rawEnd.rawStart, rawEnd.rawEnd), + } + ); + + if (openRanges.length > 0) { + openRanges[openRanges.length - 1].marker = marker; + } + + // Set the current start to point to the end of the current marker to ignore its text + lastNormalCharPosition = i + 1; + difference += i + 1 - openMarker!.sourcePosition; + + // Reset the state + openMarker = undefined; + state = State.none; + } + break; + + case State.inSlashStarMarker: + if (previousChar === '*' && currentChar === '/') { + const rawFull = getRawSpanFromContentSpan( + contentToRawSegments, + openMarker!.sourcePosition, + i + 1 + ); + const rawStart = getRawSpanFromContentSpan( + contentToRawSegments, + openMarker!.sourcePosition, + openMarker!.sourcePosition + 2 + ); + const rawEnd = getRawSpanFromContentSpan(contentToRawSegments, i - 1, i + 1); + const rawNameSpan = getRawSpanFromContentSpan( + contentToRawSegments, + openMarker!.sourcePosition + 2, + i - 1 + ); + + // Record the marker + // start + 2 to ignore the */, -1 on the end to ignore the * (/ is next) + const markerNameText = content.substring(openMarker!.sourcePosition + 2, i - 1).trim(); + const marker = recordMarker( + fileName, + ignoreCase, + openMarker!, + markerNameText, + markerMap, + markers, + { + kind: 'slashStar', + full: getTokenRangeCoveringRawSpan(rawTokens, rawFull.rawStart, rawFull.rawEnd), + start: getTokenRangeCoveringRawSpan(rawTokens, rawStart.rawStart, rawStart.rawEnd), + name: getTokenRangeCoveringRawSpan(rawTokens, rawNameSpan.rawStart, rawNameSpan.rawEnd), + end: getTokenRangeCoveringRawSpan(rawTokens, rawEnd.rawStart, rawEnd.rawEnd), + } + ); + + if (openRanges.length > 0) { + openRanges[openRanges.length - 1].marker = marker; + } + + // Set the current start to point to the end of the current marker to ignore its text + flush(openMarker!.sourcePosition); + lastNormalCharPosition = i + 1; + difference += i + 1 - openMarker!.sourcePosition; + + // Reset the state + openMarker = undefined; + state = State.none; + } else if (validMarkerChars.indexOf(currentChar) < 0) { + if (currentChar === '*' && i < content.length - 1 && content.charAt(i + 1) === '/') { + // The marker is about to be closed, ignore the 'invalid' char + } else { + // We've hit a non-valid marker character, so we were actually in a block comment + // Bail out the text we've gathered so far back into the output + flush(i); + lastNormalCharPosition = i; + openMarker = undefined; + + state = State.none; + } + } + break; + } + + if (currentChar === '\n' && previousChar === '\r') { + // Ignore trailing \n after a \r + continue; + } else if (currentChar === '\n' || currentChar === '\r') { + line++; + column = 1; + continue; + } + + column++; + previousChar = currentChar; + } + } + + // Add the remaining text + flush(/* lastSafeCharIndex */ undefined); + + if (openRanges.length > 0) { + const openRange = openRanges[0]; + reportError(fileName, openRange.sourceLine, openRange.sourceColumn, 'Unterminated range.'); + } + + if (openMarker) { + reportError(fileName, openMarker.sourceLine, openMarker.sourceColumn, 'Unterminated marker.'); + } + + // put ranges in the correct order + localRanges = localRanges.sort((a, b) => (a.pos < b.pos ? -1 : a.pos === b.pos && a.end > b.end ? -1 : 1)); + localRanges.forEach((r) => { + ranges.push(r); + }); + + const contentToRaw: RawContentMapping = { segments: contentToRawOutputSegments }; + const rawToContent: RawContentMapping = { segments: contentToRawOutputSegments }; + + return { + content: output, + fileOptions: {}, + version: 0, + fileName, + fileUri: UriEx.file(fileName, !ignoreCase), + rawData: { + tokenRanges: [], + rawToContent, + contentToRaw, + }, + }; +} + +interface RawSpan { + rawStart: number; + rawEnd: number; +} + +function getRawSpanFromContentSpan(segments: ContentToRawSegment[], contentStart: number, contentEnd: number): RawSpan { + if (contentStart === contentEnd) { + const rawOffset = tryGetRawOffsetFromContentIndex(segments, contentStart) ?? 0; + return { rawStart: rawOffset, rawEnd: rawOffset }; + } + + const rawStart = tryGetRawOffsetFromContentIndex(segments, contentStart) ?? 0; + const rawLast = tryGetRawOffsetFromContentIndex(segments, contentEnd - 1) ?? rawStart; + return { rawStart, rawEnd: rawLast + 1 }; +} + +function tryGetRawOffsetFromContentIndex(segments: ContentToRawSegment[], contentIndex: number): number | undefined { + const seg = findItemContainingOffset( + segments, + contentIndex, + (s) => s.contentStart, + (s) => s.contentEnd + ); + if (!seg) { + return undefined; + } + + return seg.rawStart + (contentIndex - seg.contentStart); +} + +function getTokenRangeCoveringRawSpan(rawTokens: RawToken[], rawStart: number, rawEnd: number): RawTokenRange { + if (rawStart === rawEnd) { + const tokenIndex = findTokenIndexAtOrAfter(rawTokens, rawStart); + return { startToken: tokenIndex, endToken: tokenIndex }; + } + + const startToken = findTokenIndexAtOrAfter(rawTokens, rawStart); + const endToken = findTokenIndexAtOrAfter(rawTokens, rawEnd); + return { startToken, endToken }; +} + +function appendOutputMappingSegments( + sourceSegments: ContentToRawSegment[], + sourceStart: number, + sourceEnd: number, + outputStart: number, + out: RawContentMappingSegment[] +): void { + for (const seg of sourceSegments) { + const overlapStart = Math.max(sourceStart, seg.contentStart); + const overlapEnd = Math.min(sourceEnd, seg.contentEnd); + if (overlapStart >= overlapEnd) { + continue; + } + + const overlapLen = overlapEnd - overlapStart; + const rawStart = seg.rawStart + (overlapStart - seg.contentStart); + const contentStart = outputStart + (overlapStart - sourceStart); + out.push({ + rawStart, + rawEnd: rawStart + overlapLen, + contentStart, + contentEnd: contentStart + overlapLen, + }); + } +} + +function chompLeadingSpaceWithMapping( + content: string, + contentToRawSegments: ContentToRawSegment[] +): { content: string; segments: ContentToRawSegment[] } { + const lines = content.split('\n'); + for (const line of lines) { + if (line.length !== 0 && line.charAt(0) !== ' ') { + return { content, segments: contentToRawSegments }; + } + } + + // Remove one leading space from each line. + const newContent = lines.map((s) => s.substr(1)).join('\n'); + + // Rebuild mapping segments by walking line-by-line over the original content. + const newSegments: ContentToRawSegment[] = []; + let sourcePos = 0; + let outPos = 0; + for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { + const line = lines[lineIndex]; + const lineStart = sourcePos; + const lineEnd = lineStart + line.length; + + // Keep everything after the removed leading space. + if (line.length > 0) { + const keepStart = lineStart + 1; + const keepEnd = lineEnd; + appendChompedSegments(contentToRawSegments, keepStart, keepEnd, outPos, newSegments); + outPos += keepEnd - keepStart; + } + + sourcePos = lineEnd; + + // Keep newline except for the final line. + if (lineIndex < lines.length - 1) { + appendChompedSegments(contentToRawSegments, sourcePos, sourcePos + 1, outPos, newSegments); + sourcePos += 1; + outPos += 1; + } + } + + return { content: newContent, segments: newSegments }; +} + +function appendChompedSegments( + sourceSegments: ContentToRawSegment[], + sourceStart: number, + sourceEnd: number, + outputStart: number, + out: ContentToRawSegment[] +): void { + for (const seg of sourceSegments) { + const overlapStart = Math.max(sourceStart, seg.contentStart); + const overlapEnd = Math.min(sourceEnd, seg.contentEnd); + if (overlapStart >= overlapEnd) { + continue; + } + + const overlapLen = overlapEnd - overlapStart; + const rawStart = seg.rawStart + (overlapStart - seg.contentStart); + const contentStart = outputStart + (overlapStart - sourceStart); + out.push({ + contentStart, + contentEnd: contentStart + overlapLen, + rawStart, + }); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashRawUtils.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashRawUtils.ts new file mode 100644 index 00000000..7681bf57 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashRawUtils.ts @@ -0,0 +1,189 @@ +import { FourSlashData, FourSlashFile, RawContentMapping, RawToken } from './fourSlashTypes'; + +export function getRawTokenText(rawText: string, token: RawToken): string { + return rawText.slice(token.start, token.end); +} + +export function reconstructRawTextFromTokens(data: FourSlashData): string { + if (!data.rawText || !data.rawTokens) { + return ''; + } + + return data.rawTokens.map((t) => data.rawText!.slice(t.start, t.end)).join(''); +} + +export function getFileAtRawOffset(data: FourSlashData, rawOffset: number): FourSlashFile | undefined { + const tokenIndex = findTokenIndexAtOrAfter(data.rawTokens ?? [], rawOffset); + + for (const file of data.files) { + const ranges = file.rawData?.tokenRanges; + if (!ranges) { + continue; + } + + for (const r of ranges) { + if (tokenIndex >= r.startToken && tokenIndex < r.endToken) { + return file; + } + } + } + + return undefined; +} + +export function tryConvertRawOffsetToContentOffset(file: FourSlashFile, rawOffset: number): number | undefined { + const mapping = file.rawData?.rawToContent; + if (!mapping) { + return undefined; + } + + return tryConvertRawOffsetToContentOffsetWithMapping(mapping, rawOffset); +} + +export function tryConvertContentOffsetToRawOffset(file: FourSlashFile, contentOffset: number): number | undefined { + const mapping = file.rawData?.contentToRaw; + if (!mapping) { + return undefined; + } + + return tryConvertContentOffsetToRawOffsetWithMapping(mapping, contentOffset); +} + +export function tryConvertRawOffsetToContentOffsetWithMapping( + mapping: RawContentMapping, + rawOffset: number +): number | undefined { + const seg = findItemContainingOffset( + mapping.segments, + rawOffset, + (s) => s.rawStart, + (s) => s.rawEnd + ); + if (!seg) { + const last = mapping.segments[mapping.segments.length - 1]; + if (last && rawOffset === last.rawEnd) { + // Allow EOF raw offsets for the mapped content (end-exclusive). + return last.contentEnd; + } + + return undefined; + } + + return seg.contentStart + (rawOffset - seg.rawStart); +} + +export function tryConvertContentOffsetToRawOffsetWithMapping( + mapping: RawContentMapping, + contentOffset: number +): number | undefined { + const seg = findItemContainingOffset( + mapping.segments, + contentOffset, + (s) => s.contentStart, + (s) => s.contentEnd + ); + if (!seg) { + const segments = mapping.segments; + if (segments.length === 0) { + return undefined; + } + + const last = segments[segments.length - 1]; + if (contentOffset === last.contentEnd) { + // Allow EOF content offsets. + return last.rawEnd; + } + + const insertionIndex = upperBoundIndex(segments, contentOffset, (s) => s.contentStart); + const prev = insertionIndex - 1; + if (prev >= 0 && segments[prev].contentEnd === contentOffset) { + // Allow mapping at a segment boundary (use the left segment). + return segments[prev].rawEnd; + } + + return undefined; + } + + return seg.rawStart + (contentOffset - seg.contentStart); +} + +export function findTokenIndexAtOrAfter(rawTokens: RawToken[], rawOffset: number): number { + if (rawOffset <= 0) { + return 0; + } + if (rawTokens.length === 0) { + return 0; + } + if (rawOffset >= rawTokens[rawTokens.length - 1].end) { + return rawTokens.length; + } + + const insertionIndex = lowerBoundIndex(rawTokens, rawOffset, (t) => t.start); + + // If the token at insertionIndex starts exactly at the offset, it's a direct hit. + if (insertionIndex < rawTokens.length) { + const token = rawTokens[insertionIndex]; + if (rawOffset >= token.start && rawOffset < token.end) { + return insertionIndex; + } + } + + // Otherwise, the token immediately before may still contain the offset. + const prev = insertionIndex - 1; + if (prev >= 0 && rawOffset < rawTokens[prev].end) { + return prev; + } + + // No containing token; return insertion index. + return insertionIndex; +} + +export function lowerBoundIndex<T>(items: readonly T[], value: number, keySelector: (item: T) => number): number { + let low = 0; + let high = items.length; + + while (low < high) { + const mid = (low + high) >> 1; + if (value <= keySelector(items[mid])) { + high = mid; + } else { + low = mid + 1; + } + } + + return low; +} + +export function upperBoundIndex<T>(items: readonly T[], value: number, keySelector: (item: T) => number): number { + let low = 0; + let high = items.length; + + while (low < high) { + const mid = (low + high) >> 1; + if (value < keySelector(items[mid])) { + high = mid; + } else { + low = mid + 1; + } + } + + return low; +} + +export function findItemContainingOffset<T>( + items: readonly T[], + offset: number, + getStart: (item: T) => number, + getEnd: (item: T) => number +): T | undefined { + // Find the last item whose start is <= offset (i.e. upperBound(start) - 1), + // then validate the offset is strictly before its end. + const insertionIndex = upperBoundIndex(items, offset, getStart); + const candidateIndex = insertionIndex - 1; + if (candidateIndex < 0) { + return undefined; + } + + const candidate = items[candidateIndex]; + return offset < getEnd(candidate) ? candidate : undefined; +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashTypes.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashTypes.ts new file mode 100644 index 00000000..39ea74fd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/fourSlashTypes.ts @@ -0,0 +1,261 @@ +/* + * fourSlashTypes.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Various common types for fourslash test framework + */ +import * as debug from '../../../common/debug'; +import { Uri } from '../../../common/uri/uri'; + +/** well known global option names */ +export const enum GlobalMetadataOptionNames { + projectRoot = 'projectroot', + ignoreCase = 'ignorecase', + typeshed = 'typeshed', + indexer = 'indexer', + indexerWithoutStdLib = 'indexerwithoutstdlib', + indexerOptions = 'indexeroptions', +} + +/** Any option name not belong to this will become global option */ +export const enum MetadataOptionNames { + fileName = 'filename', + library = 'library', + distLibrary = 'distlibrary', + ipythonMode = 'ipythonmode', + chainedTo = 'chainedto', +} + +/** List of allowed file metadata names */ +export const fileMetadataNames = [ + MetadataOptionNames.fileName, + MetadataOptionNames.library, + MetadataOptionNames.distLibrary, + MetadataOptionNames.ipythonMode, + MetadataOptionNames.chainedTo, +]; + +/** all the necessary information to set the right compiler settings */ +export interface CompilerSettings<T = string> { + [name: string]: T; +} + +export const enum RawTokenKind { + Whitespace = 'whitespace', + NewLineCR = 'newlineCR', + NewLineLF = 'newlineLF', + Text = 'text', + + // Line prefixes + TwoSlashPrefix = 'twoSlashPrefix', + FourSlashPrefix = 'fourSlashPrefix', + + // Directive grammar (only when syntactically active) + DirectiveAt = 'directiveAt', + DirectiveName = 'directiveName', + DirectiveColon = 'directiveColon', + DirectiveValue = 'directiveValue', + + // Range grammar (only when syntactically active inside a four-slash content line) + RangeStart = 'rangeStart', + RangeEnd = 'rangeEnd', + + // Marker grammar (only when syntactically active inside a four-slash content line) + MarkerStart = 'markerStart', + MarkerName = 'markerName', + MarkerEnd = 'markerEnd', + + // Object marker grammar (only when syntactically active inside a four-slash content line) + ObjectMarkerStart = 'objectMarkerStart', + ObjectMarkerText = 'objectMarkerText', + ObjectMarkerEnd = 'objectMarkerEnd', +} + +export interface RawToken { + kind: RawTokenKind; + // Raw offsets into the original fourslash test string. End is exclusive. + start: number; + end: number; +} + +export interface RawTokenRange { + // Token indices into FourSlashData.rawTokens. End is exclusive. + startToken: number; + endToken: number; +} + +export interface RawContentMappingSegment { + // Raw offsets into the original fourslash test string. End is exclusive. + rawStart: number; + rawEnd: number; + // Offsets into FourSlashFile.content. End is exclusive. + contentStart: number; + contentEnd: number; +} + +export interface RawContentMapping { + // Piecewise-linear mapping segments. Any offset outside all segments is unmapped. + segments: RawContentMappingSegment[]; +} + +export interface FourSlashFileRawData { + // Token ranges for the four-slash content lines that contributed to this file. + // Multiple ranges are used to keep consumption straightforward. + tokenRanges: RawTokenRange[]; + + // Mapping between raw offsets (original test string) and content offsets (FourSlashFile.content). + // Mapping is strict: offsets in stripped syntax (prefixes, directives, marker/range tokens, chomped spaces) are unmapped. + rawToContent?: RawContentMapping; + contentToRaw?: RawContentMapping; + + // RawData for file options directives, keyed by option name. + fileOptionsRawData?: CompilerSettings<CompilerSettingRawData>; +} + +export interface CompilerSettingRawData { + // Token range covering the full directive line (including // and any whitespace/newline tokens on that line). + directiveLine: RawTokenRange; + // Token range for the // prefix. + prefix: RawTokenRange; + // Token range for '@' + directive name. + name: RawTokenRange; + // Token range for ':' if present. + colon?: RawTokenRange | undefined; + // Token range for the directive value (may be empty). + value: RawTokenRange; +} + +/** Represents a parsed source file with metadata */ +export interface FourSlashFile { + // The contents of the file (with markers, etc stripped out) + content: string; + fileName: string; + fileUri: Uri; + version: number; + // File-specific options (name/value pairs) + fileOptions: CompilerSettings; + + // Optional raw parsing metadata used for semantic tokenization of the original test string. + rawData?: FourSlashFileRawData; +} + +/** Represents a set of parsed source files and options */ +export interface FourSlashData { + // Global options (name/value pairs) + globalOptions: CompilerSettings; + files: FourSlashFile[]; + + // The original, unmodified fourslash test string. + rawText?: string; + // Lossless raw token stream that tiles rawText exactly. + rawTokens?: RawToken[]; + // RawData for global options directives, keyed by option name. + globalOptionsRawData?: CompilerSettings<CompilerSettingRawData>; + + // A mapping from marker names to name/position pairs + markerPositions: Map<string, Marker>; + markers: Marker[]; + + /** + * Inserted in source files by surrounding desired text + * in a range with `[|` and `|]`. For example, + * + * [|text in range|] + * + * is a range with `text in range` "selected". + */ + ranges: Range[]; + rangesByText?: MultiMap<Range> | undefined; +} + +export interface Marker { + fileName: string; + fileUri: Uri; + position: number; + data?: {}; + + // Optional raw token references for this marker in the original test string. + rawData?: MarkerRawData; +} + +export interface MarkerRawData { + kind: 'slashStar' | 'object'; + + full: RawTokenRange; + start: RawTokenRange; + end: RawTokenRange; + + // Present when kind === 'slashStar'. + name?: RawTokenRange | undefined; + // Present when kind === 'object'. + text?: RawTokenRange | undefined; +} + +export interface Range { + fileName: string; + fileUri: Uri; + marker?: Marker | undefined; + pos: number; + end: number; + + // Optional raw token references for this range in the original test string. + rawData?: RangeRawData; +} + +export interface RangeRawData { + full: RawTokenRange; + open: RawTokenRange; + selected: RawTokenRange; + close: RawTokenRange; +} + +export interface MultiMap<T> extends Map<string, T[]> { + /** + * Adds the value to an array of values associated with the key, and returns the array. + * Creates the array if it does not already exist. + */ + add(key: string, value: T): T[]; + + /** + * Removes a value from an array of values associated with the key. + * Does not preserve the order of those values. + * Does nothing if `key` is not in `map`, or `value` is not in `map[key]`. + */ + remove(key: string, value: T): void; +} + +/** Review: is this needed? we might just use one from vscode */ +export interface HostCancellationToken { + isCancellationRequested(): boolean; +} + +export class TestCancellationToken implements HostCancellationToken { + // 0 - cancelled + // >0 - not cancelled + // <0 - not cancelled and value denotes number of isCancellationRequested after which token become cancelled + private static readonly _notCanceled = -1; + private _numberOfCallsBeforeCancellation = TestCancellationToken._notCanceled; + + isCancellationRequested(): boolean { + if (this._numberOfCallsBeforeCancellation < 0) { + return false; + } + + if (this._numberOfCallsBeforeCancellation > 0) { + this._numberOfCallsBeforeCancellation--; + return false; + } + + return true; + } + + setCancelled(numberOfCalls = 0): void { + debug.assert(numberOfCalls >= 0); + this._numberOfCallsBeforeCancellation = numberOfCalls; + } + + resetCancelled(): void { + this._numberOfCallsBeforeCancellation = TestCancellationToken._notCanceled; + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/runner.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/runner.ts new file mode 100644 index 00000000..2e57338b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/runner.ts @@ -0,0 +1,100 @@ +/* + * runner.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provide APIs to run fourslash tests from provided fourslash markup contents + */ + +import * as ts from 'typescript'; + +import { combinePaths } from '../../../common/pathUtils'; +import * as host from '../testHost'; +import { parseTestData } from './fourSlashParser'; +import { FourSlashData } from './fourSlashTypes'; +import { HostSpecificFeatures, TestState } from './testState'; +import { Consts } from './testState.Consts'; + +export type TestStateFactory = ( + basePath: string, + testData: FourSlashData, + mountPaths?: Map<string, string>, + hostSpecificFeatures?: HostSpecificFeatures +) => TestState; + +/** + * run given fourslash test file + * + * @param basePath this is used as a base path of the virtual file system the test will run upon + * @param fileName this is the file path where fourslash test file will be read from + */ +export function runFourSlashTest( + basePath: string, + fileName: string, + cb?: jest.DoneCallback, + mountPaths?: Map<string, string>, + hostSpecificFeatures?: HostSpecificFeatures, + testStateFactory?: TestStateFactory +) { + const content = host.HOST.readFile(fileName)!; + runFourSlashTestContent(basePath, fileName, content, cb, mountPaths, hostSpecificFeatures, testStateFactory); +} + +/** + * run given fourslash markup content + * + * @param basePath this is used as a base path of the virtual file system the test will run upon + * @param fileName this will be used as a filename of the given `content` in the virtual file system + * if fourslash markup `content` doesn't have explicit `@filename` option + * @param content this is fourslash markup string + */ +export function runFourSlashTestContent( + basePath: string, + fileName: string, + content: string, + cb?: jest.DoneCallback, + mountPaths?: Map<string, string>, + hostSpecificFeatures?: HostSpecificFeatures, + testStateFactory?: TestStateFactory +) { + // give file paths an absolute path for the virtual file system + const absoluteBasePath = combinePaths('/', basePath); + const absoluteFileName = combinePaths('/', fileName); + + // parse out the files and their metadata + const testData = parseTestData(absoluteBasePath, content, absoluteFileName); + const state = + testStateFactory !== undefined + ? testStateFactory(absoluteBasePath, testData, mountPaths, hostSpecificFeatures) + : new TestState(absoluteBasePath, testData, mountPaths, hostSpecificFeatures); + const output = ts.transpileModule(content, { + reportDiagnostics: true, + compilerOptions: { target: ts.ScriptTarget.ES2019 }, + }); + if (output.diagnostics!.length > 0) { + throw new Error(`Syntax error in ${absoluteBasePath}: ${output.diagnostics![0].messageText}`); + } + + runCode(output.outputText, state, cb); +} + +async function runCode(code: string, state: TestState, cb?: jest.DoneCallback) { + // Compile and execute the test + try { + const wrappedCode = `(async function(helper, Consts) { +${code} +})`; + const f = eval(wrappedCode); // CodeQL [SM01632] test code that doesn't need to be secure. + await f(state, Consts); + markDone(); + } catch (ex) { + markDone(ex); + } + + function markDone(...args: any[]) { + if (cb) { + cb(...args); + } + state.dispose(); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testLanguageService.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testLanguageService.ts new file mode 100644 index 00000000..027809a7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testLanguageService.ts @@ -0,0 +1,175 @@ +/* + * testLanguageService.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Test mock that implements LanguageServiceInterface + */ + +import { CancellationToken, CodeAction, ExecuteCommandParams } from 'vscode-languageserver'; + +import { + BackgroundAnalysisProgram, + BackgroundAnalysisProgramFactory, +} from '../../../analyzer/backgroundAnalysisProgram'; +import { ImportResolver, ImportResolverFactory } from '../../../analyzer/importResolver'; +import { MaxAnalysisTime } from '../../../analyzer/program'; +import { AnalyzerService, AnalyzerServiceOptions } from '../../../analyzer/service'; +import { IBackgroundAnalysis } from '../../../backgroundAnalysisBase'; +import { CommandController } from '../../../commands/commandController'; +import { ConfigOptions } from '../../../common/configOptions'; +import { ConsoleInterface } from '../../../common/console'; +import * as debug from '../../../common/debug'; +import { FileSystem } from '../../../common/fileSystem'; +import { + LanguageServerInterface, + MessageAction, + ServerSettings, + WindowInterface, +} from '../../../common/languageServerInterface'; +import { ServiceProvider } from '../../../common/serviceProvider'; +import { Range } from '../../../common/textRange'; +import { Uri } from '../../../common/uri/uri'; +import { CodeActionProvider } from '../../../languageService/codeActionProvider'; +import { WellKnownWorkspaceKinds, Workspace, createInitStatus } from '../../../workspaceFactory'; +import { TestAccessHost } from '../testAccessHost'; +import { HostSpecificFeatures } from './testState'; + +export class TestFeatures implements HostSpecificFeatures { + importResolverFactory: ImportResolverFactory = AnalyzerService.createImportResolver; + backgroundAnalysisProgramFactory: BackgroundAnalysisProgramFactory = ( + serviceId: string, + serviceProvider: ServiceProvider, + configOptions: ConfigOptions, + importResolver: ImportResolver, + backgroundAnalysis?: IBackgroundAnalysis, + maxAnalysisTime?: MaxAnalysisTime + ) => + new BackgroundAnalysisProgram( + serviceId, + serviceProvider, + configOptions, + importResolver, + backgroundAnalysis, + maxAnalysisTime, + /* disableChecker */ undefined + ); + + getCodeActionsForPosition( + workspace: Workspace, + fileUri: Uri, + range: Range, + token: CancellationToken + ): Promise<CodeAction[]> { + return CodeActionProvider.getCodeActionsForPosition(workspace, fileUri, range, undefined, token); + } + execute(ls: LanguageServerInterface, params: ExecuteCommandParams, token: CancellationToken): Promise<any> { + const controller = new CommandController(ls); + return controller.execute(params, token); + } +} + +export class TestLanguageService implements LanguageServerInterface { + readonly window = new TestWindow(); + readonly supportAdvancedEdits = true; + readonly serviceProvider: ServiceProvider; + + private readonly _workspace: Workspace; + private readonly _defaultWorkspace: Workspace; + + constructor( + workspace: Workspace, + readonly console: ConsoleInterface, + readonly fs: FileSystem, + options?: AnalyzerServiceOptions + ) { + this._workspace = workspace; + this.serviceProvider = this._workspace.service.serviceProvider; + + this._defaultWorkspace = { + workspaceName: '', + rootUri: undefined, + kinds: [WellKnownWorkspaceKinds.Test], + service: new AnalyzerService( + 'test service', + new ServiceProvider(), + options ?? { + console: this.console, + hostFactory: () => new TestAccessHost(), + importResolverFactory: AnalyzerService.createImportResolver, + configOptions: new ConfigOptions(Uri.empty()), + fileSystem: this.fs, + shouldRunAnalysis: () => true, + } + ), + disableLanguageServices: false, + disableTaggedHints: false, + disableOrganizeImports: false, + disableWorkspaceSymbol: false, + isInitialized: createInitStatus(), + searchPathsToWatch: [], + }; + } + + getWorkspaces(): Promise<Workspace[]> { + return Promise.resolve([this._workspace, this._defaultWorkspace]); + } + + getWorkspaceForFile(uri: Uri): Promise<Workspace> { + if (uri.startsWith(this._workspace.rootUri)) { + return Promise.resolve(this._workspace); + } + + return Promise.resolve(this._defaultWorkspace); + } + + getSettings(_workspace: Workspace): Promise<ServerSettings> { + const settings: ServerSettings = { + venvPath: this._workspace.service.getConfigOptions().venvPath, + pythonPath: this._workspace.service.getConfigOptions().pythonPath, + typeshedPath: this._workspace.service.getConfigOptions().typeshedPath, + openFilesOnly: this._workspace.service.getConfigOptions().checkOnlyOpenFiles, + useLibraryCodeForTypes: this._workspace.service.getConfigOptions().useLibraryCodeForTypes, + disableLanguageServices: this._workspace.disableLanguageServices, + disableTaggedHints: this._workspace.disableTaggedHints, + autoImportCompletions: this._workspace.service.getConfigOptions().autoImportCompletions, + functionSignatureDisplay: this._workspace.service.getConfigOptions().functionSignatureDisplay, + }; + + return Promise.resolve(settings); + } + + createBackgroundAnalysis(serviceId: string): IBackgroundAnalysis | undefined { + // worker thread doesn't work in Jest + // by returning undefined, analysis will run inline + return undefined; + } + + reanalyze(): void { + // Don't do anything + } + + restart(): void { + // Don't do anything + } +} + +class TestWindow implements WindowInterface { + showErrorMessage(message: string): void; + showErrorMessage(message: string, ...actions: MessageAction[]): Promise<MessageAction | undefined>; + showErrorMessage(message: string, ...actions: MessageAction[]): Promise<MessageAction | undefined> | void { + debug.fail("shouldn't be called"); + } + + showWarningMessage(message: string): void; + showWarningMessage(message: string, ...actions: MessageAction[]): Promise<MessageAction | undefined>; + showWarningMessage(message: string, ...actions: MessageAction[]): Promise<MessageAction | undefined> | void { + debug.fail("shouldn't be called"); + } + + showInformationMessage(message: string): void; + showInformationMessage(message: string, ...actions: MessageAction[]): Promise<MessageAction | undefined>; + showInformationMessage(message: string, ...actions: MessageAction[]): Promise<MessageAction | undefined> | void { + // Don't do anything + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testState.Consts.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testState.Consts.ts new file mode 100644 index 00000000..3a97896f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testState.Consts.ts @@ -0,0 +1,32 @@ +/* + * testState.Consts.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Defines consts that will be available to fourslash tests. + * + * Make sure to declare consts in fourslash.ts as well to make them available on design time. + * Ones defined here will be used on runtime. + */ + +import * as lsp from 'vscode-languageserver'; +import { indexValueDetail } from '../../../languageService/completionProvider'; + +/* eslint-disable @typescript-eslint/no-unused-vars */ +export namespace Consts { + export import CodeActionKind = lsp.CodeActionKind; + + // it is duped here since original definition in '../../../commands/commands' + // is marked as const enum and we can't import "const enum" which get removed + // once compiled + export enum Commands { + createTypeStub = 'pyright.createtypestub', + restartServer = 'pyright.restartserver', + orderImports = 'pyright.organizeimports', + } + + export import CompletionItemKind = lsp.CompletionItemKind; + export import InlayHintKind = lsp.InlayHintKind; + + export const IndexValueDetail = indexValueDetail; +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testState.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testState.ts new file mode 100644 index 00000000..bdc3517c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testState.ts @@ -0,0 +1,2160 @@ +/* + * testState.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * TestState wraps currently test states and provides a way to query and manipulate + * the test states. + */ + +import assert from 'assert'; +import * as path from 'path'; +import { + CancellationToken, + CodeAction, + Command, + CompletionItem, + CompletionList, + Diagnostic, + DocumentHighlight, + DocumentHighlightKind, + ExecuteCommandParams, + Location, + MarkupContent, + MarkupKind, + TextEdit, + WorkspaceEdit, +} from 'vscode-languageserver'; + +import { BackgroundAnalysisProgramFactory, InvalidatedReason } from '../../../analyzer/backgroundAnalysisProgram'; +import { ImportResolver, ImportResolverFactory } from '../../../analyzer/importResolver'; +import { PackageTypeReport } from '../../../analyzer/packageTypeReport'; +import { PackageTypeVerifier } from '../../../analyzer/packageTypeVerifier'; +import { findNodeByOffset } from '../../../analyzer/parseTreeUtils'; +import { Program } from '../../../analyzer/program'; +import { AnalyzerService } from '../../../analyzer/service'; +import { CommandResult } from '../../../commands/commandResult'; +import { Char } from '../../../common/charCodes'; +import { CommandLineOptions } from '../../../common/commandLineOptions'; +import { ConfigOptions, SignatureDisplayType } from '../../../common/configOptions'; +import { ConsoleInterface, ConsoleWithLogLevel, NullConsole } from '../../../common/console'; +import { Comparison, isNumber, isString } from '../../../common/core'; +import * as debug from '../../../common/debug'; +import { DiagnosticCategory } from '../../../common/diagnostic'; +import { DocumentRange } from '../../../common/docRange'; +import { PyrightDocStringService } from '../../../common/docStringService'; +import { FileEditAction } from '../../../common/editAction'; +import { ReadOnlyFileSystem } from '../../../common/fileSystem'; +import { Host } from '../../../common/host'; +import { LanguageServerInterface } from '../../../common/languageServerInterface'; +import { getFileExtension, normalizePath, normalizeSlashes } from '../../../common/pathUtils'; +import { convertOffsetToPosition, convertPositionToOffset } from '../../../common/positionUtils'; +import { ServiceProvider } from '../../../common/serviceProvider'; +import { createServiceProvider } from '../../../common/serviceProviderExtensions'; +import { compareStringsCaseInsensitive, compareStringsCaseSensitive } from '../../../common/stringUtils'; +import { Position, Range as PositionRange, TextRange, rangesAreEqual } from '../../../common/textRange'; +import { TextRangeCollection } from '../../../common/textRangeCollection'; +import { Uri } from '../../../common/uri/uri'; +import { UriEx, getFileSpec } from '../../../common/uri/uriUtils'; +import { convertToWorkspaceEdit } from '../../../common/workspaceEditUtils'; +import { CallHierarchyProvider } from '../../../languageService/callHierarchyProvider'; +import { CompletionOptions, CompletionProvider } from '../../../languageService/completionProvider'; +import { + DefinitionFilter, + DefinitionProvider, + TypeDefinitionProvider, +} from '../../../languageService/definitionProvider'; +import { DocumentHighlightProvider } from '../../../languageService/documentHighlightProvider'; +import { CollectionResult } from '../../../languageService/documentSymbolCollector'; +import { HoverProvider } from '../../../languageService/hoverProvider'; +import { convertDocumentRangesToLocation } from '../../../languageService/navigationUtils'; +import { ReferencesProvider } from '../../../languageService/referencesProvider'; +import { RenameProvider } from '../../../languageService/renameProvider'; +import { SignatureHelpProvider } from '../../../languageService/signatureHelpProvider'; +import { ParseNode } from '../../../parser/parseNodes'; +import { ParseFileResults } from '../../../parser/parser'; +import { Tokenizer } from '../../../parser/tokenizer'; +import { PartialStubService } from '../../../partialStubService'; +import { PyrightFileSystem } from '../../../pyrightFileSystem'; +import { NormalWorkspace, WellKnownWorkspaceKinds, Workspace, createInitStatus } from '../../../workspaceFactory'; +import { TestAccessHost } from '../testAccessHost'; +import * as host from '../testHost'; +import { stringify } from '../utils'; +import { createFromFileSystem, distlibFolder, libFolder, typeshedFolder } from '../vfs/factory'; +import * as vfs from '../vfs/filesystem'; +import { parseTestData } from './fourSlashParser'; +import { + FourSlashData, + FourSlashFile, + Marker, + MetadataOptionNames, + MultiMap, + Range, + TestCancellationToken, +} from './fourSlashTypes'; +import { TestFeatures, TestLanguageService } from './testLanguageService'; +import { + createVfsInfoFromFourSlashData, + getMarkerByName, + getMarkerName, + getMarkerNames, + getRangeByMarkerName, +} from './testStateUtils'; +import { verifyWorkspaceEdit } from './workspaceEditTestUtils'; + +export interface TextChange { + span: TextRange; + newText: string; +} + +export interface HostSpecificFeatures { + importResolverFactory: ImportResolverFactory; + backgroundAnalysisProgramFactory: BackgroundAnalysisProgramFactory; + + getCodeActionsForPosition( + workspace: Workspace, + fileUri: Uri, + range: PositionRange, + token: CancellationToken + ): Promise<CodeAction[]>; + + execute(ls: LanguageServerInterface, params: ExecuteCommandParams, token: CancellationToken): Promise<any>; +} + +// Make sure everything is in lower case since it has hard coded `isCaseSensitive`: true. +const testAccessHost = new TestAccessHost(UriEx.file(vfs.MODULE_PATH), [libFolder, distlibFolder]); + +export class TestState { + private readonly _cancellationToken: TestCancellationToken; + private readonly _vfsFiles: vfs.FileSet; + protected readonly files: string[] = []; + private readonly _hostSpecificFeatures: HostSpecificFeatures; + + readonly testFS: vfs.TestFileSystem; + readonly fs: PyrightFileSystem; + readonly workspace: NormalWorkspace; + readonly console: ConsoleInterface; + readonly rawConfigJson: any | undefined; + readonly serviceProvider: ServiceProvider; + + // The current caret position in the active file + currentCaretPosition = 0; + // The position of the end of the current selection, or -1 if nothing is selected + selectionEnd = -1; + + lastKnownMarker = ''; + + // The file that's currently 'opened' + activeFile!: FourSlashFile; + + constructor( + projectRoot: string, + public testData: FourSlashData, + mountPaths?: Map<string, string>, + hostSpecificFeatures?: HostSpecificFeatures, + testFS?: vfs.TestFileSystem, + // Setting delayFileInitialization to true enables derived class constructors to execute + // before any files are opened. When set to true, initializeFiles() must be called separately + // after construction completes. + delayFileInitialization = false + ) { + const vfsInfo = createVfsInfoFromFourSlashData(projectRoot, testData); + this._vfsFiles = vfsInfo.files; + + this.testFS = + testFS ?? + createFromFileSystem( + host.HOST, + vfsInfo.ignoreCase, + { cwd: vfsInfo.projectRoot, files: vfsInfo.files, meta: testData.globalOptions }, + mountPaths + ); + + this.fs = new PyrightFileSystem(this.testFS); + this.console = new ConsoleWithLogLevel(new NullConsole(), 'test'); + const ps = new PartialStubService(this.fs); + this.serviceProvider = createServiceProvider(this.testFS, this.fs, this.console, ps); + + this._cancellationToken = new TestCancellationToken(); + this._hostSpecificFeatures = hostSpecificFeatures ?? new TestFeatures(); + + this.files = vfsInfo.sourceFileNames; + + this.rawConfigJson = vfsInfo.rawConfigJson; + const configOptions = this._convertGlobalOptionsToConfigOptions(vfsInfo.projectRoot, mountPaths); + + if (this.rawConfigJson) { + const configDirUri = Uri.file(projectRoot, this.serviceProvider); + configOptions.initializeTypeCheckingMode('standard'); + configOptions.initializeFromJson(this.rawConfigJson, configDirUri, this.serviceProvider, testAccessHost); + configOptions.setupExecutionEnvironments(this.rawConfigJson, configDirUri, this.serviceProvider.console()); + this._applyTestConfigOptions(configOptions); + } + + const service = this.createAnalysisService( + this.console, + this._hostSpecificFeatures.importResolverFactory, + this._hostSpecificFeatures.backgroundAnalysisProgramFactory, + configOptions, + testAccessHost + ); + + this.workspace = { + workspaceName: 'test workspace', + rootUri: Uri.file(vfsInfo.projectRoot, this.serviceProvider), + kinds: [WellKnownWorkspaceKinds.Test], + service: service, + disableLanguageServices: false, + disableTaggedHints: false, + disableOrganizeImports: false, + disableWorkspaceSymbol: false, + isInitialized: createInitStatus(), + searchPathsToWatch: [], + }; + + if (!delayFileInitialization) { + this.initializeFiles(); + } + } + + get importResolver(): ImportResolver { + return this.workspace.service.getImportResolver(); + } + + get configOptions(): ConfigOptions { + return this.workspace.service.getConfigOptions(); + } + + get program(): Program { + return this.workspace.service.test_program; + } + + // eslint-disable-next-line @typescript-eslint/naming-convention + get BOF(): number { + return 0; + } + + // eslint-disable-next-line @typescript-eslint/naming-convention + get EOF(): number { + return this.getFileContent(this.activeFile.fileName).length; + } + + initializeFiles() { + if (this.files.length > 0) { + // Open the first file by default + this.openFile(this.files[0]); + } + + for (const filePath of this.files) { + const file = this._vfsFiles[filePath] as vfs.File; + if (file.meta?.[MetadataOptionNames.ipythonMode]) { + this.program.getSourceFile(Uri.file(filePath, this.serviceProvider))?.test_enableIPythonMode(true); + } + if (file.meta?.[MetadataOptionNames.chainedTo]) { + const chainedTo = file.meta[MetadataOptionNames.chainedTo] as string; + const to = this.program.getSourceFile(Uri.file(chainedTo, this.serviceProvider)); + if (to) { + this.program.updateChainedUri(Uri.file(filePath, this.serviceProvider), to.getUri()); + } + } + } + } + + dispose() { + this.workspace.service.dispose(); + } + + cwd() { + return this.testFS.cwd(); + } + + // Entry points from fourslash.ts + goToMarker(nameOrMarker: string | Marker = '') { + const marker = isString(nameOrMarker) ? this.getMarkerByName(nameOrMarker) : nameOrMarker; + if (this.activeFile.fileName !== marker.fileName) { + this.openFile(marker.fileName); + } + + const content = this.getFileContent(marker.fileName); + if (marker.position === -1 || marker.position > content.length) { + throw new Error(`Marker "${nameOrMarker}" has been invalidated by unrecoverable edits to the file.`); + } + + const mName = isString(nameOrMarker) ? nameOrMarker : this.getMarkerName(marker); + this.lastKnownMarker = mName; + this.goToPosition(marker.position); + } + + goToEachMarker(markers: readonly Marker[], action: (marker: Marker, index: number) => void) { + assert.ok(markers.length > 0); + for (let i = 0; i < markers.length; i++) { + this.goToMarker(markers[i]); + action(markers[i], i); + } + } + + getMappedFilePath(path: string): string { + const uri = Uri.file(path, this.serviceProvider); + this.importResolver.ensurePartialStubPackages(this.configOptions.findExecEnvironment(uri)); + return this.fs.getMappedUri(uri).getFilePath(); + } + + getMarkerName(m: Marker): string { + return getMarkerName(this.testData, m); + } + + getMarkerByName(markerName: string) { + return getMarkerByName(this.testData, markerName); + } + + getMarkers(): Marker[] { + // Return a copy of the list + return this.testData.markers.slice(0); + } + + getMarkerNames(): string[] { + return getMarkerNames(this.testData); + } + + getPositionRange(markerString: string) { + const marker = this.getMarkerByName(markerString); + const ranges = this.getRanges().filter((r) => r.marker === marker); + if (ranges.length !== 1) { + this.raiseError(`no matching range for ${markerString}`); + } + + const range = ranges[0]; + return this.convertPositionRange(range); + } + + getPosition(markerString: string): Position { + const marker = this.getMarkerByName(markerString); + const ranges = this.getRanges().filter((r) => r.marker === marker); + if (ranges.length !== 1) { + this.raiseError(`no matching range for ${markerString}`); + } + return this.convertOffsetToPosition(marker.fileName, marker.position); + } + + expandPositionRange(range: PositionRange, start: number, end: number) { + return { + start: { line: range.start.line, character: range.start.character - start }, + end: { line: range.end.line, character: range.end.character + end }, + }; + } + + convertPositionRange(range: Range) { + return this.convertOffsetsToRange(range.fileName, range.pos, range.end); + } + + getPathSep() { + return path.sep; + } + + goToPosition(positionOrLineAndColumn: number | Position) { + const pos = isNumber(positionOrLineAndColumn) + ? positionOrLineAndColumn + : this.convertPositionToOffset(this.activeFile.fileName, positionOrLineAndColumn); + this.currentCaretPosition = pos; + this.selectionEnd = -1; + } + + select(startMarker: string, endMarker: string) { + const start = this.getMarkerByName(startMarker); + const end = this.getMarkerByName(endMarker); + + assert.ok(start.fileName === end.fileName); + if (this.activeFile.fileName !== start.fileName) { + this.openFile(start.fileName); + } + this.goToPosition(start.position); + this.selectionEnd = end.position; + } + + selectAllInFile(fileName: string) { + this.openFile(fileName); + this.goToPosition(0); + this.selectionEnd = this.activeFile.content.length; + } + + selectRange(range: Range): void { + this.goToRangeStart(range); + this.selectionEnd = range.end; + } + + selectLine(index: number) { + const lineStart = this.convertPositionToOffset(this.activeFile.fileName, { line: index, character: 0 }); + const lineEnd = lineStart + this._getLineContent(index).length; + this.selectRange({ + fileName: this.activeFile.fileName, + fileUri: this.activeFile.fileUri, + pos: lineStart, + end: lineEnd, + }); + } + + goToEachRange(action: (range: Range) => void) { + const ranges = this.getRanges(); + assert.ok(ranges.length > 0); + for (const range of ranges) { + this.selectRange(range); + action(range); + } + } + + goToRangeStart({ fileName, pos }: Range) { + this.openFile(fileName); + this.goToPosition(pos); + } + + getRanges(): Range[] { + return this.testData.ranges; + } + + getRangesInFile(fileName = this.activeFile.fileName) { + return this.getRanges().filter((r) => r.fileName === fileName); + } + + getRangesByText(): Map<string, Range[]> { + if (this.testData.rangesByText) { + return this.testData.rangesByText; + } + const result = this.createMultiMap<Range>(this.getRanges(), (r) => this.rangeText(r)); + this.testData.rangesByText = result; + + return result; + } + + getFilteredRanges<T extends {}>( + predicate: (m: Marker | undefined, d: T | undefined, text: string) => boolean + ): Range[] { + return this.getRanges().filter((r) => predicate(r.marker, r.marker?.data as T | undefined, this.rangeText(r))); + } + + getRangeByMarkerName(markerName: string): Range | undefined { + return getRangeByMarkerName(this.testData, markerName); + } + + goToBOF() { + this.goToPosition(this.BOF); + } + + goToEOF() { + this.goToPosition(this.EOF); + } + + moveCaretRight(count = 1) { + this.currentCaretPosition += count; + this.currentCaretPosition = Math.min( + this.currentCaretPosition, + this.getFileContent(this.activeFile.fileName).length + ); + this.selectionEnd = -1; + } + + // Opens a file given its 0-based index or fileName + openFile(indexOrName: number | string): FourSlashFile { + const fileToOpen: FourSlashFile = this.findFile(indexOrName); + fileToOpen.fileName = normalizeSlashes(fileToOpen.fileName); + this.activeFile = fileToOpen; + + this.program.setFileOpened(this.activeFile.fileUri, 1, fileToOpen.content); + + return fileToOpen; + } + + openFiles(indexOrNames: (number | string)[]): void { + for (const indexOrName of indexOrNames) { + this.openFile(indexOrName); + } + } + + printCurrentFileState(showWhitespace: boolean, makeCaretVisible: boolean) { + for (const file of this.testData.files) { + const active = this.activeFile === file; + host.HOST.log(`=== Script (${file.fileName}) ${active ? '(active, cursor at |)' : ''} ===`); + let content = this.getFileContent(file.fileName); + if (active) { + content = + content.substr(0, this.currentCaretPosition) + + (makeCaretVisible ? '|' : '') + + content.substr(this.currentCaretPosition); + } + if (showWhitespace) { + content = this._makeWhitespaceVisible(content); + } + host.HOST.log(content); + } + } + + deleteChar(count = 1) { + const offset = this.currentCaretPosition; + const ch = ''; + + const checkCadence = (count >> 2) + 1; + + for (let i = 0; i < count; i++) { + this._editScriptAndUpdateMarkers(this.activeFile.fileName, offset, offset + 1, ch); + + if (i % checkCadence === 0) { + this._checkPostEditInvariants(); + } + } + + this._checkPostEditInvariants(); + } + + replace(start: number, length: number, text: string) { + this._editScriptAndUpdateMarkers(this.activeFile.fileName, start, start + length, text); + this._checkPostEditInvariants(); + } + + deleteLineRange(startIndex: number, endIndexInclusive: number) { + const startPos = this.convertPositionToOffset(this.activeFile.fileName, { line: startIndex, character: 0 }); + const endPos = this.convertPositionToOffset(this.activeFile.fileName, { + line: endIndexInclusive + 1, + character: 0, + }); + this.replace(startPos, endPos - startPos, ''); + } + + deleteCharBehindMarker(count = 1) { + let offset = this.currentCaretPosition; + const ch = ''; + const checkCadence = (count >> 2) + 1; + + for (let i = 0; i < count; i++) { + this.currentCaretPosition--; + offset--; + this._editScriptAndUpdateMarkers(this.activeFile.fileName, offset, offset + 1, ch); + + if (i % checkCadence === 0) { + this._checkPostEditInvariants(); + } + + // Don't need to examine formatting because there are no formatting changes on backspace. + } + + this._checkPostEditInvariants(); + } + + // Enters lines of text at the current caret position + type(text: string) { + let offset = this.currentCaretPosition; + const selection = this._getSelection(); + this.replace(selection.start, selection.length, ''); + + for (let i = 0; i < text.length; i++) { + const ch = text.charAt(i); + this._editScriptAndUpdateMarkers(this.activeFile.fileName, offset, offset, ch); + + this.currentCaretPosition++; + offset++; + } + + this._checkPostEditInvariants(); + } + + // Enters text as if the user had pasted it + paste(text: string) { + this._editScriptAndUpdateMarkers( + this.activeFile.fileName, + this.currentCaretPosition, + this.currentCaretPosition, + text + ); + this._checkPostEditInvariants(); + } + + verifyDiagnostics(map?: { [marker: string]: { category: string; message: string } }): void { + this.analyze(); + + // organize things per file + const resultPerFile = this._getDiagnosticsPerFile(); + const rangePerFile = this.createMultiMap<Range>(this.getRanges(), (r) => r.fileName); + + if (!hasDiagnostics(resultPerFile) && rangePerFile.size === 0) { + // no errors and no error is expected. we are done + return; + } + + for (const [file, ranges] of rangePerFile.entries()) { + const rangesPerCategory = this.createMultiMap<Range>(ranges, (r) => { + if (map) { + const name = this.getMarkerName(r.marker!); + return map[name].category; + } + + return (r.marker!.data! as any).category as string; + }); + + if (!rangesPerCategory.has('error')) { + rangesPerCategory.set('error', []); + } + + if (!rangesPerCategory.has('warning')) { + rangesPerCategory.set('warning', []); + } + + if (!rangesPerCategory.has('information')) { + rangesPerCategory.set('information', []); + } + + const result = resultPerFile.get(file)!; + if (!result.parseResults) { + this.raiseError(`parse results not found for ${file}`); + } + resultPerFile.delete(file); + + for (const [category, expected] of rangesPerCategory.entries()) { + const lines = result.parseResults!.tokenizerOutput.lines; + const actual = + category === 'error' + ? result.errors + : category === 'warning' + ? result.warnings + : category === 'information' + ? result.information + : category === 'unused' + ? result.unused + : category === 'none' + ? [] + : this.raiseError(`unexpected category ${category}`); + + if (expected.length !== actual.length && category !== 'none') { + this.raiseError( + `contains unexpected result - expected: ${stringify(expected)}, actual: ${stringify(actual)}` + ); + } + + for (const range of expected) { + const rangeSpan = TextRange.fromBounds(range.pos, range.end); + const matches = actual.filter((d) => { + const diagnosticSpan = TextRange.fromBounds( + convertPositionToOffset(d.range.start, lines)!, + convertPositionToOffset(d.range.end, lines)! + ); + return this._deepEqual(diagnosticSpan, rangeSpan); + }); + + // If the map is provided, it might say + // a marker should have none. + const name = map ? this.getMarkerName(range.marker!) : ''; + const message = map ? map[name].message : undefined; + const expectMatches = !!message; + + if (expectMatches && matches.length === 0) { + this.raiseError(`doesn't contain expected range: ${stringify(range)}`); + } else if (!expectMatches && matches.length !== 0) { + this.raiseError(`${name} should not contain any matches`); + } + + // if map is provided, check message as well + if (message) { + if (matches.filter((d) => message === d.message).length !== 1) { + this.raiseError( + `message doesn't match: ${message} of ${name} - ${stringify( + range + )}, actual: ${stringify(matches)}` + ); + } + } + } + } + } + + if (hasDiagnostics(resultPerFile)) { + this.raiseError(`these diagnostics were unexpected: ${stringify(resultPerFile)}`); + } + + function hasDiagnostics( + resultPerFile: Map< + string, + { + fileUri: Uri; + parseResults: ParseFileResults | undefined; + errors: Diagnostic[]; + warnings: Diagnostic[]; + } + > + ) { + for (const entry of resultPerFile.values()) { + if (entry.errors.length + entry.warnings.length > 0) { + return true; + } + } + + return false; + } + } + + async verifyCodeActions( + verifyMode: _.FourSlashVerificationMode, + map: { + [marker: string]: { + codeActions: { title: string; kind: string; command?: Command; edit?: WorkspaceEdit }[]; + }; + } + ): Promise<any> { + // make sure we don't use cache built from other tests + this.workspace.service.invalidateAndForceReanalysis(InvalidatedReason.Reanalyzed); + this.analyze(); + + // calling `analyze` should have parse and bind all or open user files. make sure that's true at least for open files. + for (const info of this.program.getOpened()) { + if (!info.sourceFile.getModuleSymbolTable()) { + this.console.error(`Module symbol missing?: ${info.uri}, bound: ${!info.sourceFile.isBindingRequired}`); + + // Make sure it is bound. + this.program.getBoundSourceFile(info.uri); + } + } + + // Local copy to use in capture. + const serviceProvider = this.serviceProvider; + for (const range of this.getRanges()) { + const name = this.getMarkerName(range.marker!); + if (!map[name]) { + continue; + } + + const uri = Uri.file(range.fileName, this.serviceProvider); + const sourceFile = this.program.getSourceFile(uri); + if (!sourceFile) { + this.raiseError(`source file not found: ${range.fileName}`); + } + const diagnostics = sourceFile.getDiagnostics(this.configOptions) || []; + + const codeActions = await this._getCodeActions(range); + if (verifyMode === 'exact') { + if (codeActions.length !== map[name].codeActions.length) { + this.raiseError( + `doesn't contain expected result: ${stringify(map[name])}, actual: ${stringify(codeActions)}` + ); + } + } + + for (const expected of map[name].codeActions) { + let expectedCommand: Command | undefined; + if (expected.command) { + expectedCommand = { + title: expected.command.title, + command: expected.command.command, + arguments: convertToString(expected.command.arguments), + }; + } + + const matches = codeActions.filter((a) => { + const actualCommand = a.command + ? { + title: a.command.title, + command: a.command.command, + arguments: convertToString(a.command.arguments), + } + : undefined; + + const actualEdit = a.edit; + + return ( + a.title === expected.title && + a.kind! === expected.kind && + (expectedCommand ? this._deepEqual(actualCommand, expectedCommand) : true) && + (expected.edit ? this._deepEqual(actualEdit, expected.edit) : true) + ); + }); + + if (verifyMode === 'excluded' && matches.length > 0) { + this.raiseError(`unexpected result: ${stringify(map[name])}`); + } else if (verifyMode !== 'excluded' && matches.length !== 1) { + const uri = Uri.file('test2.py', this.serviceProvider); + const sourceFile = this.program.getSourceFile(uri); + const symbolsInTest2 = sourceFile + ? ', symbols in test2.py: ' + + Array.from(sourceFile.getModuleSymbolTable()?.keys() ?? []).join(',') + : ''; + + this.raiseError( + `doesn't contain expected result: ${stringify(expected)}, actual: ${stringify( + codeActions + )}, diagnostics: ${stringify(diagnostics)}${symbolsInTest2}` + ); + } + } + } + + function convertToString(args: any[] | undefined): string[] | undefined { + if (args) { + // Trim `undefined` from the args. + while (args.length > 0) { + if (args[args.length - 1] === undefined) { + args.pop(); + } else { + break; + } + } + } + + return args?.map((a) => { + if (isString(a)) { + // Might be a URI. For comparison purposes in a test, convert it into a + // file path. + if (a.startsWith('file://')) { + return normalizeSlashes(Uri.parse(a, serviceProvider).getFilePath()); + } + return normalizeSlashes(a); + } + + return JSON.stringify(a); + }); + } + } + + async verifyCommand(command: Command, files: { [filePath: string]: string }): Promise<any> { + this.analyze(); + + // Convert command arguments to file Uri strings. That's the expected input for command arguments. + const convertedArgs = command.arguments?.map((arg) => { + if (typeof arg === 'string' && (arg.endsWith('.py') || arg.endsWith('.pyi'))) { + return Uri.file(arg, this.serviceProvider).toString(); + } + return arg; + }); + command.arguments = convertedArgs; + + const commandResult = await this._hostSpecificFeatures.execute( + new TestLanguageService(this.workspace, this.console, this.fs), + { command: command.command, arguments: command.arguments || [] }, + CancellationToken.None + ); + + if (command.command === 'pyright.createtypestub') { + await this._verifyFiles(files); + } else if (command.command === 'pyright.organizeimports') { + // Organize imports command can be used on only one file at a time, + // so there is no looping over "commandResult" or "files". + const workspaceEditResult = commandResult as WorkspaceEdit; + const uri = Object.keys(workspaceEditResult.changes!)[0]; + const textEdit = workspaceEditResult.changes![uri][0]; + const actualText = textEdit.newText; + const expectedText: string = Object.values(files)[0]; + + if (actualText !== expectedText) { + this.raiseError( + `doesn't contain expected result: ${stringify(expectedText)}, actual: ${stringify(actualText)}` + ); + } + } + return commandResult; + } + + verifyWorkspaceEdit(expected: WorkspaceEdit, actual: WorkspaceEdit, marker?: string) { + return verifyWorkspaceEdit(expected, actual, marker); + } + + async verifyInvokeCodeAction( + map: { + [marker: string]: { title: string; files?: { [filePath: string]: string }; edits?: TextEdit[] }; + }, + verifyCodeActionCount?: boolean + ): Promise<any> { + this.analyze(); + + for (const range of this.getRanges()) { + const name = this.getMarkerName(range.marker!); + if (!map[name]) { + continue; + } + + const ls = new TestLanguageService(this.workspace, this.console, this.fs); + + const codeActions = await this._getCodeActions(range); + if (verifyCodeActionCount) { + if (codeActions.length !== Object.keys(map).length) { + this.raiseError( + `doesn't contain expected result count: ${stringify(map[name])}, actual: ${stringify( + codeActions + )}` + ); + } + } + + const matches = codeActions.filter((c) => c.title === map[name].title); + if (matches.length === 0) { + this.raiseError( + `doesn't contain expected result: ${stringify(map[name])}, actual: ${stringify(codeActions)}` + ); + } + + for (const codeAction of matches) { + const results = await this._hostSpecificFeatures.execute( + ls, + { + command: codeAction.command!.command, + arguments: codeAction.command?.arguments || [], + }, + CancellationToken.None + ); + + if (map[name].edits) { + const workspaceEdits = CommandResult.is(results) ? results.edits : (results as WorkspaceEdit); + for (const edits of Object.values(workspaceEdits.changes!)) { + for (const edit of edits) { + if (map[name].edits!.filter((e) => this._editsAreEqual(e, edit)).length !== 1) { + this.raiseError( + `${name} doesn't contain expected result: ${stringify( + map[name] + )}, actual: ${stringify(edits)}` + ); + } + } + } + } + } + + if (map[name].files) { + await this._verifyFiles(map[name].files!); + } + } + } + + verifyHover(kind: MarkupKind, map: { [marker: string]: string | null }): void { + // Do not force analyze, it can lead to test passing while it doesn't work in product + for (const range of this.getRanges()) { + const name = this.getMarkerName(range.marker!); + const expected = map[name]; + if (expected === undefined) { + continue; + } + + const rangePos = this.convertOffsetsToRange(range.fileName, range.pos, range.end); + const provider = new HoverProvider( + this.program, + range.fileUri, + rangePos.start, + kind, + CancellationToken.None + ); + const actual = provider.getHover(); + + // if expected is null then there should be nothing shown on hover + if (expected === null) { + assert.equal(actual, undefined); + continue; + } + + assert.ok(actual); + + assert.deepEqual(actual!.range, rangePos); + + if (MarkupContent.is(actual!.contents)) { + assert.equal(actual!.contents.value, expected); + assert.equal(actual!.contents.kind, kind); + } else { + assert.fail(`Unexpected type of contents object "${actual!.contents}", should be MarkupContent.`); + } + } + } + + verifyCaretAtMarker(markerName = '') { + const pos = this.getMarkerByName(markerName); + if (pos.fileName !== this.activeFile.fileName) { + throw new Error( + `verifyCaretAtMarker failed - expected to be in file "${pos.fileName}", but was in file "${this.activeFile.fileName}"` + ); + } + if (pos.position !== this.currentCaretPosition) { + throw new Error( + `verifyCaretAtMarker failed - expected to be at marker "/*${markerName}*/, but was at position ${ + this.currentCaretPosition + }(${this._getLineColStringAtPosition(this.currentCaretPosition)})` + ); + } + } + + verifyCurrentLineContent(text: string) { + const actual = this._getCurrentLineContent(); + if (actual !== text) { + throw new Error( + 'verifyCurrentLineContent\n' + this._displayExpectedAndActualString(text, actual, /* quoted */ true) + ); + } + } + + verifyCurrentFileContent(text: string) { + this._verifyFileContent(this.activeFile.fileName, text); + } + + verifyTextAtCaretIs(text: string) { + const actual = this.getFileContent(this.activeFile.fileName).substring( + this.currentCaretPosition, + this.currentCaretPosition + text.length + ); + if (actual !== text) { + throw new Error( + 'verifyTextAtCaretIs\n' + this._displayExpectedAndActualString(text, actual, /* quoted */ true) + ); + } + } + + verifyRangeIs(expectedText: string, includeWhiteSpace?: boolean) { + this._verifyTextMatches(this.rangeText(this._getOnlyRange()), !!includeWhiteSpace, expectedText); + } + + async verifyCompletion( + verifyMode: _.FourSlashVerificationMode, + docFormat: MarkupKind, + map: { + [marker: string]: { + completions: _.FourSlashCompletionItem[]; + }; + }, + abbrMap?: { + [abbr: string]: { + readonly importFrom?: string; + readonly importName: string; + }; + } + ): Promise<void> { + this.analyze(); + + for (const marker of this.getMarkers()) { + const markerName = this.getMarkerName(marker); + if (!map[markerName]) { + continue; + } + + this.lastKnownMarker = markerName; + + const expectedCompletions = map[markerName].completions; + const provider = this.getCompletionResults(this, marker, docFormat, abbrMap); + const results = await provider.getCompletions(); + if (results) { + if (verifyMode === 'exact') { + if (results.items.length !== expectedCompletions.length) { + assert.fail( + `${markerName} - Expected ${expectedCompletions.length} items but received ${ + results.items.length + }. Actual completions:\n${stringify(results.items.map((r) => r.label))}` + ); + } + } + + for (let i = 0; i < expectedCompletions.length; i++) { + const expected = expectedCompletions[i]; + const actualIndex = results.items.findIndex( + (a) => + a.label === expected.label && + (expected.kind ? a.kind === expected.kind : true) && + (expected.detail ? a.detail === expected.detail : true) && + (expected.documentation && MarkupContent.is(a.documentation) + ? a.documentation.value === expected.documentation + : true) + ); + if (actualIndex >= 0) { + if (verifyMode === 'excluded') { + // we're not supposed to find the completions passed to the test + assert.fail( + `${markerName} - Completion item with label "${ + expected.label + }" unexpected. Actual completions:\n${stringify(results.items.map((r) => r.label))}` + ); + } + + const actual: CompletionItem = results.items[actualIndex]; + + if (expected.additionalTextEdits !== undefined) { + if (actual.additionalTextEdits === undefined) { + provider.resolveCompletionItem(actual); + } + } + + this.verifyCompletionItem(expected, actual); + + if (expected.documentation !== undefined) { + if (actual.documentation === undefined && actual.data) { + provider.resolveCompletionItem(actual); + } + + if (MarkupContent.is(actual.documentation)) { + assert.strictEqual(actual.documentation.value, expected.documentation); + assert.strictEqual(actual.documentation.kind, docFormat); + } else { + assert.fail( + `${markerName} - Unexpected type of contents object "${actual.documentation}", should be MarkupContent.` + ); + } + } + + results.items.splice(actualIndex, 1); + } else { + if (verifyMode === 'included' || verifyMode === 'exact') { + // we're supposed to find all items passed to the test + assert.fail( + `${markerName} - Completion item with label "${ + expected.label + }" expected. Actual completions:\n${stringify(results.items.map((r) => r.label))}` + ); + } + } + } + + if (verifyMode === 'exact') { + if (results.items.length !== 0) { + // we removed every item we found, there should not be any remaining + assert.fail( + `${markerName} - Completion items unexpected: ${stringify( + results.items.map((r) => r.label) + )}` + ); + } + } + } else { + if (verifyMode !== 'exact' || expectedCompletions.length > 0) { + assert.fail(`${markerName} - Failed to get completions`); + } + } + } + } + + verifySignature( + docFormat: MarkupKind, + map: { + [marker: string]: { + noSig?: boolean; + signatures?: { + label: string; + parameters: string[]; + documentation?: string; + }[]; + activeParameters?: (number | undefined)[]; + callHasParameters?: boolean; + }; + } + ): void { + this.analyze(); + + for (const marker of this.getMarkers()) { + const fileName = marker.fileName; + const name = this.getMarkerName(marker); + + if (!(name in map)) { + continue; + } + + const expected = map[name]; + const position = this.convertOffsetToPosition(fileName, marker.position); + + const actual = new SignatureHelpProvider( + this.program, + Uri.file(fileName, this.serviceProvider), + position, + docFormat, + /* hasSignatureLabelOffsetCapability */ true, + /* hasActiveParameterCapability */ true, + /* context */ undefined, + new PyrightDocStringService(), + CancellationToken.None + ).getSignatureHelp(); + + if (expected.noSig) { + assert.equal(actual, undefined); + continue; + } + + assert.ok(actual); + assert.ok(actual!.signatures); + assert.ok(expected.activeParameters); + assert.equal(actual!.signatures.length, expected.activeParameters.length); + + actual!.signatures.forEach((sig, index) => { + const expectedSig = expected.signatures![index]; + assert.equal(sig.label, expectedSig.label); + + assert.ok(sig.parameters); + const actualParameters: string[] = []; + + sig.parameters!.forEach((p) => { + actualParameters.push(isString(p.label) ? p.label : sig.label.substring(p.label[0], p.label[1])); + }); + + assert.deepEqual(actualParameters, expectedSig.parameters); + + if (expectedSig.documentation === undefined) { + assert.equal(sig.documentation, undefined); + } else { + assert.deepEqual(sig.documentation, { + kind: docFormat, + value: expectedSig.documentation, + }); + } + }); + + assert.deepEqual( + actual!.signatures.map((sig) => sig.activeParameter), + expected.activeParameters + ); + + if (expected.callHasParameters !== undefined) { + const isActive = (sig: { parameters: string[] }) => + !expected.callHasParameters && !sig.parameters?.length; + + const activeSignature = expected.signatures?.findIndex(isActive) ?? undefined; + assert.equal(actual.activeSignature, activeSignature); + } + } + } + + verifyFindAllReferences( + map: { + [marker: string]: { + references: DocumentRange[]; + }; + }, + createDocumentRange?: (fileUri: Uri, result: CollectionResult, parseResults: ParseFileResults) => DocumentRange, + convertToLocation?: (fs: ReadOnlyFileSystem, ranges: DocumentRange) => Location | undefined + ) { + this.analyze(); + + for (const name of this.getMarkerNames()) { + const marker = this.getMarkerByName(name); + const fileName = marker.fileName; + + if (!(name in map)) { + continue; + } + + let expected = map[name].references; + expected = expected.map((c) => { + return { + ...c, + uri: c.uri ?? Uri.file((c as any).path, this.serviceProvider), + }; + }); + + const position = this.convertOffsetToPosition(fileName, marker.position); + + const actual = new ReferencesProvider( + this.program, + CancellationToken.None, + createDocumentRange, + convertToLocation + ).reportReferences(Uri.file(fileName, this.serviceProvider), position, /* includeDeclaration */ true); + assert.strictEqual(actual?.length ?? 0, expected.length, `${name} has failed`); + + for (const r of convertDocumentRangesToLocation(this.program.fileSystem, expected, convertToLocation)) { + assert.equal(actual?.filter((d) => this._deepEqual(d, r)).length, 1); + } + } + } + + verifyShowCallHierarchyGetIncomingCalls(map: { + [marker: string]: { + items: _.FourSlashCallHierarchyItem[]; + }; + }) { + this.analyze(); + + for (const marker of this.getMarkers()) { + const fileName = marker.fileName; + const name = this.getMarkerName(marker); + + if (!(name in map)) { + continue; + } + + const expectedFilePath = map[name].items.map((x) => x.filePath); + const expectedRange = map[name].items.map((x) => x.range); + const expectedName = map[name].items.map((x) => x.name); + + const position = this.convertOffsetToPosition(fileName, marker.position); + const actual = new CallHierarchyProvider( + this.program, + Uri.file(fileName, this.serviceProvider), + position, + CancellationToken.None + ).getIncomingCalls(); + + assert.strictEqual(actual?.length ?? 0, expectedFilePath.length, `${name} has failed`); + assert.strictEqual(actual?.length ?? 0, expectedRange.length, `${name} has failed`); + assert.strictEqual(actual?.length ?? 0, expectedName.length, `${name} has failed`); + + if (actual) { + for (const a of actual) { + assert.strictEqual(expectedRange?.filter((e) => this._deepEqual(a.from.range, e)).length, 1); + assert.strictEqual(expectedName?.filter((e) => this._deepEqual(a.from.name, e)).length, 1); + assert.ok( + expectedFilePath?.filter((e) => + this._deepEqual(a.from.uri, Uri.file(e, this.serviceProvider).toString()) + ).length >= 1 + ); + } + } + } + } + + verifyShowCallHierarchyGetOutgoingCalls(map: { + [marker: string]: { + items: _.FourSlashCallHierarchyItem[]; + }; + }) { + this.analyze(); + + for (const marker of this.getMarkers()) { + const fileName = marker.fileName; + const name = this.getMarkerName(marker); + + if (!(name in map)) { + continue; + } + + const expectedFilePath = map[name].items.map((x) => x.filePath); + const expectedRange = map[name].items.map((x) => x.range); + const expectedName = map[name].items.map((x) => x.name); + + const position = this.convertOffsetToPosition(fileName, marker.position); + const actual = new CallHierarchyProvider( + this.program, + Uri.file(fileName, this.serviceProvider), + position, + CancellationToken.None + ).getOutgoingCalls(); + + assert.strictEqual(actual?.length ?? 0, expectedFilePath.length, `${name} has failed`); + assert.strictEqual(actual?.length ?? 0, expectedRange.length, `${name} has failed`); + assert.strictEqual(actual?.length ?? 0, expectedName.length, `${name} has failed`); + if (actual) { + for (const a of actual) { + assert.strictEqual(expectedRange?.filter((e) => this._deepEqual(a.to.range, e)).length, 1); + assert.strictEqual(expectedName?.filter((e) => this._deepEqual(a.to.name, e)).length, 1); + assert.ok( + expectedFilePath?.filter((e) => + this._deepEqual(a.to.uri, Uri.file(e, this.serviceProvider).toString()) + ).length >= 1 + ); + } + } + } + } + + getDocumentHighlightKind(m?: Marker): DocumentHighlightKind | undefined { + const kind = m?.data ? ((m.data as any).kind as string) : undefined; + switch (kind) { + case 'text': + return DocumentHighlightKind.Text; + case 'read': + return DocumentHighlightKind.Read; + case 'write': + return DocumentHighlightKind.Write; + default: + return undefined; + } + } + + verifyHighlightReferences(map: { + [marker: string]: { + references: DocumentHighlight[]; + }; + }) { + this.analyze(); + + for (const name of Object.keys(map)) { + const marker = this.getMarkerByName(name); + const fileName = marker.fileName; + + const expected = map[name].references; + + const position = this.convertOffsetToPosition(fileName, marker.position); + const actual = new DocumentHighlightProvider( + this.program, + Uri.file(fileName, this.serviceProvider), + position, + CancellationToken.None + ).getDocumentHighlight(); + + assert.equal(actual?.length ?? 0, expected.length); + + for (const r of expected) { + const match = actual?.filter((h) => this._deepEqual(h.range, r.range)); + assert.equal(match?.length, 1); + + if (r.kind) { + assert.equal(match![0].kind, r.kind); + } + } + } + } + + fixupDefinitionsToMatchExpected(actual: DocumentRange[] | undefined): any { + return actual?.map((a) => { + const { uri, ...restOfActual } = a; + return { + ...restOfActual, + path: uri.getFilePath(), + }; + }); + } + + verifyFindDefinitions( + map: { + [marker: string]: { + definitions: DocumentRange[]; + }; + }, + filter: DefinitionFilter = DefinitionFilter.All + ) { + this.analyze(); + + for (const marker of this.getMarkers()) { + const fileName = marker.fileName; + const name = this.getMarkerName(marker); + + if (!(name in map)) { + continue; + } + + const expected = map[name].definitions; + const uri = Uri.file(fileName, this.serviceProvider); + // If we're going to def from a file, act like it's open. + if (!this.program.getSourceFileInfo(uri)) { + const file = this.testData.files.find((v) => v.fileName === fileName); + if (file) { + this.program.setFileOpened(uri, file.version, file.content); + } + } + + const position = this.convertOffsetToPosition(fileName, marker.position); + let actual = new DefinitionProvider( + this.program, + uri, + position, + filter, + CancellationToken.None + ).getDefinitions(); + + assert.equal(actual?.length ?? 0, expected.length, `No definitions found for marker "${name}"`); + actual = this.fixupDefinitionsToMatchExpected(actual!); + + for (const r of expected) { + assert.equal( + actual?.filter((d) => this._deepEqual(d, r)).length, + 1, + `No match found for ${JSON.stringify(r)} from marker ${name}` + ); + } + } + } + + verifyFindTypeDefinitions(map: { + [marker: string]: { + definitions: DocumentRange[]; + }; + }) { + this.analyze(); + + for (const marker of this.getMarkers()) { + const fileName = marker.fileName; + const name = this.getMarkerName(marker); + + if (!(name in map)) { + continue; + } + + const expected = map[name].definitions; + + const position = this.convertOffsetToPosition(fileName, marker.position); + let actual = new TypeDefinitionProvider( + this.program, + Uri.file(fileName, this.serviceProvider), + position, + CancellationToken.None + ).getDefinitions(); + actual = this.fixupDefinitionsToMatchExpected(actual!); + + assert.strictEqual(actual?.length ?? 0, expected.length, name); + + for (const r of expected) { + assert.strictEqual(actual?.filter((d) => this._deepEqual(d, r)).length, 1, name); + } + } + } + + verifyRename( + map: { + [marker: string]: { + newName: string; + changes: FileEditAction[]; + }; + }, + isUntitled = false + ) { + this.analyze(); + + for (const marker of this.getMarkers()) { + const fileName = marker.fileName; + const name = this.getMarkerName(marker); + + if (!(name in map)) { + continue; + } + + const expected = map[name]; + expected.changes = expected.changes.map((c) => { + return { + ...c, + fileUri: c.fileUri ?? Uri.file((c as any).filePath, this.serviceProvider), + }; + }); + + const position = this.convertOffsetToPosition(fileName, marker.position); + const actual = new RenameProvider( + this.program, + isUntitled + ? Uri.parse(`untitled:${fileName.replace(/\\/g, '/')}`, this.serviceProvider) + : Uri.file(fileName, this.serviceProvider), + position, + CancellationToken.None + ).renameSymbol(expected.newName, /* isDefaultWorkspace */ false, isUntitled); + + verifyWorkspaceEdit( + convertToWorkspaceEdit(this.program.fileSystem, { edits: expected.changes, fileOperations: [] }), + actual ?? { documentChanges: [] } + ); + } + } + + verifyTypeVerifierResults( + packageName: string, + ignoreUnknownTypesFromImports: boolean, + verboseOutput: boolean, + expected: PackageTypeReport + ) { + const commandLineOptions = new CommandLineOptions( + this.configOptions.projectRoot.getFilePath(), + /* fromLanguageServer */ false + ); + commandLineOptions.configSettings.verboseOutput = verboseOutput; + const verifier = new PackageTypeVerifier( + this.serviceProvider, + testAccessHost, + commandLineOptions, + packageName, + ignoreUnknownTypesFromImports + ); + const report = verifier.verify(); + + assert.strictEqual(report.generalDiagnostics.length, expected.generalDiagnostics.length); + assert.strictEqual(report.missingClassDocStringCount, expected.missingClassDocStringCount); + assert.strictEqual(report.missingDefaultParamCount, expected.missingDefaultParamCount); + assert.strictEqual(report.missingFunctionDocStringCount, expected.missingFunctionDocStringCount); + assert.strictEqual(report.moduleName, expected.moduleName); + assert.strictEqual(report.packageName, expected.packageName); + assert.deepStrictEqual(Array.from(report.symbols.keys()), Array.from(expected.symbols.keys())); + } + + setCancelled(numberOfCalls: number): void { + this._cancellationToken.setCancelled(numberOfCalls); + } + + resetCancelled(): void { + this._cancellationToken.resetCancelled(); + } + + convertPositionToOffset(fileName: string, position: Position): number { + const lines = this._getTextRangeCollection(fileName); + return convertPositionToOffset(position, lines)!; + } + + convertOffsetToPosition(fileName: string, offset: number): Position { + const lines = this._getTextRangeCollection(fileName); + + return convertOffsetToPosition(offset, lines); + } + + analyze() { + while (this.program.analyze()) { + // Continue to call analyze until it completes. Since we're not + // specifying a timeout, it should complete the first time. + } + } + + protected findFile(indexOrName: string | number): FourSlashFile { + if (typeof indexOrName === 'number') { + const index = indexOrName; + if (index >= this.testData.files.length) { + throw new Error( + `File index (${index}) in openFile was out of range. There are only ${this.testData.files.length} files in this test.` + ); + } else { + return this.testData.files[index]; + } + } else if (isString(indexOrName)) { + const { file, availableNames } = this._tryFindFileWorker(indexOrName); + if (!file) { + throw new Error( + `No test file named "${indexOrName}" exists. Available file names are: ${availableNames.join(', ')}` + ); + } + return file; + } else { + return debug.assertNever(indexOrName); + } + } + + protected getCompletionResults( + state: TestState, + marker: Marker, + docFormat: MarkupKind, + abbrMap?: { + [abbr: string]: { + readonly importFrom?: string; + readonly importName: string; + }; + } + ): { getCompletions(): CompletionList | null; resolveCompletionItem(item: CompletionItem): void } { + const filePath = marker.fileName; + const completionPosition = this.convertOffsetToPosition(filePath, marker.position); + + const options: CompletionOptions = { + format: docFormat, + snippet: true, + lazyEdit: false, + }; + + const provider = new CompletionProvider( + this.program, + Uri.file(filePath, this.serviceProvider), + completionPosition, + options, + CancellationToken.None + ); + + return { + getCompletions: () => provider.getCompletions(), + resolveCompletionItem: (i) => provider.resolveCompletionItem(i), + }; + } + + protected getFileContent(fileName: string): string { + const files = this.testData.files.filter((f) => + this.testFS.ignoreCase + ? compareStringsCaseInsensitive(f.fileName, fileName) === Comparison.EqualTo + : compareStringsCaseSensitive(f.fileName, fileName) === Comparison.EqualTo + ); + return files[0].content; + } + + protected convertOffsetsToRange(fileName: string, startOffset: number, endOffset: number): PositionRange { + const lines = this._getTextRangeCollection(fileName); + + return { + start: convertOffsetToPosition(startOffset, lines), + end: convertOffsetToPosition(endOffset, lines), + }; + } + + protected raiseError(message: string): never { + throw new Error(this._messageAtLastKnownMarker(message)); + } + + protected createMultiMap<T>(values?: T[], getKey?: (t: T) => string): MultiMap<T> { + const map = new Map<string, T[]>() as MultiMap<T>; + map.add = multiMapAdd; + map.remove = multiMapRemove; + + if (values && getKey) { + for (const value of values) { + map.add(getKey(value), value); + } + } + + return map; + + function multiMapAdd<T>(this: MultiMap<T>, key: string, value: T) { + let values = this.get(key); + if (values) { + values.push(value); + } else { + this.set(key, (values = [value])); + } + return values; + } + + function multiMapRemove<T>(this: MultiMap<T>, key: string, value: T) { + const values = this.get(key); + if (values) { + values.forEach((v, i, arr) => { + if (v === value) { + arr.splice(i, 1); + } + }); + if (!values.length) { + this.delete(key); + } + } + } + } + + protected rangeText({ fileName, pos, end }: Range): string { + return this.getFileContent(fileName).slice(pos, end); + } + + protected verifyCompletionItem(expected: _.FourSlashCompletionItem, actual: CompletionItem) { + assert.strictEqual(actual.label, expected.label); + assert.strictEqual(actual.detail, expected.detail); + assert.strictEqual(actual.kind, expected.kind); + + assert.strictEqual(actual.insertText, expected.insertionText); + this._verifyEdit(actual.textEdit as TextEdit, expected.textEdit); + this._verifyEdits(actual.additionalTextEdits, expected.additionalTextEdits); + + if (expected.detailDescription !== undefined) { + assert.strictEqual(actual.labelDetails?.description, expected.detailDescription); + } + + if (expected.commitCharacters !== undefined) { + expect(expected.commitCharacters.sort()).toEqual(actual.commitCharacters?.sort() ?? []); + } + } + + protected createAnalysisService( + nullConsole: ConsoleInterface, + importResolverFactory: ImportResolverFactory, + backgroundAnalysisProgramFactory: BackgroundAnalysisProgramFactory, + configOptions: ConfigOptions, + host: Host + ) { + // we do not initiate automatic analysis or file watcher in test. + const service = new AnalyzerService('test service', this.serviceProvider, { + console: nullConsole, + hostFactory: () => host, + importResolverFactory, + backgroundAnalysisProgramFactory, + configOptions, + fileSystem: this.fs, + libraryReanalysisTimeProvider: () => 0, + shouldRunAnalysis: () => true, + }); + + // directly set files to track rather than using fileSpec from config + // to discover those files from file system + service.test_program.setTrackedFiles( + this.files + .filter((path) => { + const fileExtension = getFileExtension(path).toLowerCase(); + return fileExtension === '.py' || fileExtension === '.pyi'; + }) + .map((path) => Uri.file(path, this.serviceProvider)) + .filter((path) => service.isTracked(path)) + ); + + return service; + } + + private _convertGlobalOptionsToConfigOptions(projectRoot: string, mountPaths?: Map<string, string>): ConfigOptions { + const configOptions = new ConfigOptions(Uri.file(projectRoot, this.serviceProvider)); + + // add more global options as we need them + const newConfigOptions = this._applyTestConfigOptions(configOptions, mountPaths); + + // default tests to run use compact signatures. + newConfigOptions.functionSignatureDisplay = SignatureDisplayType.compact; + + return newConfigOptions; + } + + private _applyTestConfigOptions(configOptions: ConfigOptions, mountPaths?: Map<string, string>) { + // Always enable "test mode". + configOptions.internalTestMode = true; + + // Always analyze all files + configOptions.checkOnlyOpenFiles = false; + + // make sure we set typing path + if (configOptions.stubPath === undefined) { + configOptions.stubPath = Uri.file(vfs.MODULE_PATH, this.serviceProvider).combinePaths('typings'); + } + + configOptions.include.push(getFileSpec(configOptions.projectRoot, '.')); + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, typeshedFolder.getFilePath())); + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, distlibFolder.getFilePath())); + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, libFolder.getFilePath())); + + if (mountPaths) { + for (const mountPath of mountPaths.keys()) { + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, mountPath)); + } + } + + if (configOptions.functionSignatureDisplay === undefined) { + configOptions.functionSignatureDisplay === SignatureDisplayType.compact; + } + + return configOptions; + } + + private _getParserOutput(fileName: string) { + const file = this.program.getBoundSourceFile(Uri.file(fileName, this.serviceProvider))!; + return file?.getParseResults(); + } + + private _getTextRangeCollection(fileName: string): TextRangeCollection<TextRange> { + if (this.files.includes(fileName)) { + const tokenizerOutput = this._getParserOutput(fileName)?.tokenizerOutput; + if (tokenizerOutput) { + return tokenizerOutput.lines; + } + } + + // slow path + const fileContents = this.fs.readFileSync(Uri.file(fileName, this.serviceProvider), 'utf8'); + const tokenizer = new Tokenizer(); + return tokenizer.tokenize(fileContents).lines; + } + + private _messageAtLastKnownMarker(message: string) { + const locationDescription = this.lastKnownMarker + ? this.lastKnownMarker + : this._getLineColStringAtPosition(this.currentCaretPosition); + return `At ${locationDescription}: ${message}`; + } + + private _checkPostEditInvariants() { + // blank for now + } + + private _editScriptAndUpdateMarkers(fileName: string, editStart: number, editEnd: number, newText: string) { + let fileContent = this.getFileContent(fileName); + fileContent = fileContent.slice(0, editStart) + newText + fileContent.slice(editEnd); + const uri = Uri.file(fileName, this.serviceProvider); + + this.testFS.writeFileSync(uri, fileContent, 'utf8'); + const newVersion = (this.program.getSourceFile(uri)?.getClientVersion() ?? -1) + 1; + this.program.setFileOpened(uri, newVersion, fileContent); + + for (const marker of this.testData.markers) { + if (marker.fileName === fileName) { + marker.position = this._updatePosition(marker.position, editStart, editEnd, newText); + } + } + + for (const range of this.testData.ranges) { + if (range.fileName === fileName) { + range.pos = this._updatePosition(range.pos, editStart, editEnd, newText); + range.end = this._updatePosition(range.end, editStart, editEnd, newText); + } + } + this.testData.rangesByText = undefined; + } + + private _removeWhitespace(text: string): string { + return text.replace(/\s/g, ''); + } + + private _getOnlyRange() { + const ranges = this.getRanges(); + if (ranges.length !== 1) { + this.raiseError('Exactly one range should be specified in the test file.'); + } + + return ranges[0]; + } + + private _verifyFileContent(fileName: string, text: string) { + const actual = this.getFileContent(fileName); + if (actual !== text) { + throw new Error(`verifyFileContent failed:\n${this._showTextDiff(text, actual)}`); + } + } + + private _verifyTextMatches(actualText: string, includeWhitespace: boolean, expectedText: string) { + const removeWhitespace = (s: string): string => (includeWhitespace ? s : this._removeWhitespace(s)); + if (removeWhitespace(actualText) !== removeWhitespace(expectedText)) { + this.raiseError( + `Actual range text doesn't match expected text.\n${this._showTextDiff(expectedText, actualText)}` + ); + } + } + + private _getSelection(): TextRange { + return TextRange.fromBounds( + this.currentCaretPosition, + this.selectionEnd === -1 ? this.currentCaretPosition : this.selectionEnd + ); + } + + private _getLineContent(index: number) { + const text = this.getFileContent(this.activeFile.fileName); + const pos = this.convertPositionToOffset(this.activeFile.fileName, { line: index, character: 0 }); + let startPos = pos; + let endPos = pos; + + while (startPos > 0) { + const ch = text.charCodeAt(startPos - 1); + if (ch === Char.CarriageReturn || ch === Char.LineFeed) { + break; + } + + startPos--; + } + + while (endPos < text.length) { + const ch = text.charCodeAt(endPos); + + if (ch === Char.CarriageReturn || ch === Char.LineFeed) { + break; + } + + endPos++; + } + + return text.substring(startPos, endPos); + } + + // Get the text of the entire line the caret is currently at + private _getCurrentLineContent() { + return this._getLineContent( + this.convertOffsetToPosition(this.activeFile.fileName, this.currentCaretPosition).line + ); + } + + private _tryFindFileWorker(name: string): { + readonly file: FourSlashFile | undefined; + readonly availableNames: readonly string[]; + } { + name = normalizePath(name); + + let file: FourSlashFile | undefined; + const availableNames: string[] = []; + this.testData.files.forEach((f) => { + const fn = normalizePath(f.fileName); + if (fn) { + if (fn === name) { + file = f; + } + + availableNames.push(fn); + } + }); + + assert.ok(file); + return { file, availableNames }; + } + + private _getLineColStringAtPosition(position: number, file: FourSlashFile = this.activeFile) { + const pos = this.convertOffsetToPosition(file.fileName, position); + return `line ${pos.line + 1}, col ${pos.character}`; + } + + private _showTextDiff(expected: string, actual: string): string { + // Only show whitespace if the difference is whitespace-only. + if (this._differOnlyByWhitespace(expected, actual)) { + expected = this._makeWhitespaceVisible(expected); + actual = this._makeWhitespaceVisible(actual); + } + return this._displayExpectedAndActualString(expected, actual); + } + + private _differOnlyByWhitespace(a: string, b: string) { + return this._removeWhitespace(a) === this._removeWhitespace(b); + } + + private _displayExpectedAndActualString(expected: string, actual: string, quoted = false) { + const expectMsg = '\x1b[1mExpected\x1b[0m\x1b[31m'; + const actualMsg = '\x1b[1mActual\x1b[0m\x1b[31m'; + const expectedString = quoted ? '"' + expected + '"' : expected; + const actualString = quoted ? '"' + actual + '"' : actual; + return `\n${expectMsg}:\n${expectedString}\n\n${actualMsg}:\n${actualString}`; + } + + private _makeWhitespaceVisible(text: string) { + return text + .replace(/ /g, '\u00B7') + .replace(/\r/g, '\u00B6') + .replace(/\n/g, '\u2193\n') + .replace(/\t/g, '\u2192 '); + } + + private _updatePosition(position: number, editStart: number, editEnd: number, { length }: string): number { + // If inside the edit, return -1 to mark as invalid + return position <= editStart ? position : position < editEnd ? -1 : position + length - +(editEnd - editStart); + } + + private _getDiagnosticsPerFile() { + const sourceFiles = this.files.map((f) => this.program.getSourceFile(Uri.file(f, this.serviceProvider))); + const results = sourceFiles.map((sourceFile, index) => { + if (sourceFile) { + const diagnostics = sourceFile.getDiagnostics(this.configOptions) || []; + const fileUri = sourceFile.getUri(); + if (sourceFile.isParseRequired()) { + sourceFile.parse( + this.program.configOptions, + this.program.importResolver, + sourceFile.getFileContent() + ); + } + const value = { + fileUri, + parseResults: sourceFile.getParseResults(), + errors: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Error), + warnings: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Warning), + information: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Information), + unused: diagnostics.filter((diag) => diag.category === DiagnosticCategory.UnusedCode), + }; + + // Don't use the uri key, but rather the file name, because other spots + // in the test data assume file paths. + return [this.files[index], value] as [string, typeof value]; + } else { + this.raiseError(`Source file not found for ${this.files[index]}`); + } + }); + + return new Map<string, (typeof results)[0][1]>(results); + } + + private _deepEqual(a: any, e: any) { + try { + // NOTE: find better way. + assert.deepStrictEqual(a, e); + } catch { + return false; + } + + return true; + } + + private async _waitForFile(filePath: string) { + const uri = Uri.file(filePath, this.serviceProvider); + while (!this.fs.existsSync(uri)) { + await new Promise<void>((res) => + setTimeout(() => { + res(); + }, 200) + ); + } + } + + private _getCodeActions(range: Range) { + const file = range.fileName; + const textRange = { + start: this.convertOffsetToPosition(file, range.pos), + end: this.convertOffsetToPosition(file, range.end), + }; + + return this._hostSpecificFeatures.getCodeActionsForPosition( + this.workspace, + range.fileUri, + textRange, + CancellationToken.None + ); + } + + private async _verifyFiles(files: { [filePath: string]: string }) { + for (const filePath of Object.keys(files)) { + const expected = files[filePath]; + const normalizedFilePath = normalizeSlashes(filePath); + + // wait until the file exists + await this._waitForFile(normalizedFilePath); + + const actual = this.fs.readFileSync(Uri.file(normalizedFilePath, this.serviceProvider), 'utf8'); + if (actual !== expected) { + this.raiseError( + `doesn't contain expected result: ${stringify(expected)}, actual: ${stringify(actual)}` + ); + } + } + } + + private _editsAreEqual(actual: TextEdit | undefined, expected: TextEdit | undefined) { + if (actual === expected) { + return true; + } + + if (actual === undefined || expected === undefined) { + return false; + } + + return rangesAreEqual(actual.range, expected.range) && actual.newText === expected.newText; + } + + private _verifyEdit(actual: TextEdit | undefined, expected: TextEdit | undefined) { + if (!this._editsAreEqual(actual, expected)) { + this.raiseError(`doesn't contain expected result: ${stringify(expected)}, actual: ${stringify(actual)}`); + } + } + + private _verifyEdits(actual: TextEdit[] | undefined, expected: TextEdit[] | undefined) { + actual = actual ?? []; + expected = expected ?? []; + + let extra = expected.slice(0); + let left = actual.slice(0); + + for (const item of actual) { + extra = extra.filter((e) => !this._editsAreEqual(e, item)); + } + + for (const item of expected) { + left = left.filter((e) => !this._editsAreEqual(e, item)); + } + + if (extra.length > 0 || left.length > 0) { + this.raiseError(`doesn't contain expected result: ${stringify(extra)}, actual: ${stringify(left)}`); + } + } +} + +export function parseAndGetTestState( + code: string, + projectRoot = '/', + anonymousFileName = 'unnamedFile.py', + testFS?: vfs.TestFileSystem +) { + const data = parseTestData(normalizeSlashes(projectRoot), code, anonymousFileName); + const state = new TestState( + normalizeSlashes('/'), + data, + /* mountPath */ undefined, + /* hostSpecificFeatures */ undefined, + testFS + ); + + return { data, state }; +} + +export function getNodeForRange(codeOrState: string | TestState, markerName = 'marker'): ParseNode { + const state = isString(codeOrState) ? parseAndGetTestState(codeOrState).state : codeOrState; + const range = state.getRangeByMarkerName(markerName); + assert(range); + + const textRange = TextRange.fromBounds(range.pos, range.end); + + const node = getNodeAtMarker(state, markerName); + let current: ParseNode | undefined = node; + while (current) { + if (TextRange.containsRange(current, textRange)) { + return current; + } + + current = current.parent; + } + + return node; +} + +export function getNodeAtMarker(codeOrState: string | TestState, markerName = 'marker'): ParseNode { + const state = isString(codeOrState) ? parseAndGetTestState(codeOrState).state : codeOrState; + const marker = state.getMarkerByName(markerName); + + const sourceFile = state.program.getBoundSourceFile(marker.fileUri); + assert(sourceFile); + + const parserResults = sourceFile.getParseResults(); + assert(parserResults); + + const node = findNodeByOffset(parserResults.parserOutput.parseTree, marker.position); + assert(node); + + return node; +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testStateUtils.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testStateUtils.ts new file mode 100644 index 00000000..88555858 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/testStateUtils.ts @@ -0,0 +1,85 @@ +/* + * testStateUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Various test utility functions for TestState. + */ + +import assert from 'assert'; +import * as JSONC from 'jsonc-parser'; + +import { Comparison, toBoolean } from '../../../common/core'; +import { combinePaths, getBaseFileName } from '../../../common/pathUtils'; +import { getStringComparer } from '../../../common/stringUtils'; +import * as vfs from '../vfs/filesystem'; +import { FourSlashData, FourSlashFile, GlobalMetadataOptionNames, Marker, MetadataOptionNames } from './fourSlashTypes'; +import { configFileName } from '../../../common/pathConsts'; + +export function createVfsInfoFromFourSlashData(projectRoot: string, testData: FourSlashData) { + const metaProjectRoot = testData.globalOptions[GlobalMetadataOptionNames.projectRoot]; + projectRoot = metaProjectRoot ? combinePaths(projectRoot, metaProjectRoot) : projectRoot; + + const ignoreCase = toBoolean(testData.globalOptions[GlobalMetadataOptionNames.ignoreCase]); + + let rawConfigJson = ''; + const sourceFileNames: string[] = []; + const files: vfs.FileSet = {}; + + for (const file of testData.files) { + // if one of file is configuration file, set config options from the given json + if (isConfig(file, ignoreCase)) { + try { + rawConfigJson = JSONC.parse(file.content); + } catch (e: any) { + throw new Error(`Failed to parse test ${file.fileName}: ${e.message}`); + } + } else { + files[file.fileName] = new vfs.File(file.content, { meta: file.fileOptions, encoding: 'utf8' }); + + if (!toBoolean(file.fileOptions[MetadataOptionNames.library])) { + sourceFileNames.push(file.fileName); + } + } + } + return { files, sourceFileNames, projectRoot, ignoreCase, rawConfigJson }; +} + +export function getMarkerName(testData: FourSlashData, markerToFind: Marker) { + let found: string | undefined; + testData.markerPositions.forEach((marker, name) => { + if (marker === markerToFind) { + found = name; + } + }); + + assert.ok(found); + return found!; +} + +export function getMarkerByName(testData: FourSlashData, markerName: string) { + const markerPos = testData.markerPositions.get(markerName); + if (markerPos === undefined) { + throw new Error( + `Unknown marker "${markerName}" Available markers: ${getMarkerNames(testData) + .map((m) => '"' + m + '"') + .join(', ')}` + ); + } else { + return markerPos; + } +} + +export function getMarkerNames(testData: FourSlashData): string[] { + return [...testData.markerPositions.keys()]; +} + +export function getRangeByMarkerName(testData: FourSlashData, markerName: string) { + const marker = getMarkerByName(testData, markerName); + return testData.ranges.find((r) => r.marker === marker); +} + +function isConfig(file: FourSlashFile, ignoreCase: boolean): boolean { + const comparer = getStringComparer(ignoreCase); + return comparer(getBaseFileName(file.fileName), configFileName) === Comparison.EqualTo; +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/fourslash/workspaceEditTestUtils.ts b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/workspaceEditTestUtils.ts new file mode 100644 index 00000000..1cfdc5cc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/fourslash/workspaceEditTestUtils.ts @@ -0,0 +1,191 @@ +/* + * workspaceEditTestUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Test Utils around workspace edits. + */ + +import assert from 'assert'; +import { + AnnotatedTextEdit, + ChangeAnnotation, + CreateFile, + DeleteFile, + OptionalVersionedTextDocumentIdentifier, + RenameFile, + TextDocumentEdit, + TextEdit, + WorkspaceEdit, +} from 'vscode-languageserver'; + +import * as debug from '../../../common/debug'; +import { rangesAreEqual } from '../../../common/textRange'; + +export function verifyWorkspaceEdit(expected: WorkspaceEdit, actual: WorkspaceEdit, marker?: string) { + if (actual.changes) { + verifyTextEditMap(expected.changes!, actual.changes, marker); + } else { + assert(!expected.changes); + } + + if (actual.documentChanges) { + verifyDocumentEdits(expected.documentChanges!, actual.documentChanges); + } else { + assert(!expected.documentChanges); + } + + if (actual.changeAnnotations) { + verifyChangeAnnotations(expected.changeAnnotations!, actual.changeAnnotations); + } else { + assert(!expected.changeAnnotations); + } +} + +export function verifyChangeAnnotations( + expected: { [id: string]: ChangeAnnotation }, + actual: { [id: string]: ChangeAnnotation } +) { + assert.strictEqual(Object.entries(expected).length, Object.entries(actual).length); + + for (const key of Object.keys(expected)) { + const expectedAnnotation = expected[key]; + const actualAnnotation = actual[key]; + + // We need to improve it to test localized strings. + assert.strictEqual(expectedAnnotation.label, actualAnnotation.label); + assert.strictEqual(expectedAnnotation.description, actualAnnotation.description); + + assert.strictEqual(expectedAnnotation.needsConfirmation, actualAnnotation.needsConfirmation); + } +} + +export function textDocumentAreSame( + expected: OptionalVersionedTextDocumentIdentifier, + actual: OptionalVersionedTextDocumentIdentifier +) { + return expected.version === actual.version && expected.uri === actual.uri; +} + +export function verifyDocumentEdits( + expected: (TextDocumentEdit | CreateFile | RenameFile | DeleteFile)[], + actual: (TextDocumentEdit | CreateFile | RenameFile | DeleteFile)[] +) { + assert.strictEqual(expected.length, actual.length); + + for (const op of expected) { + assert( + actual.some((a) => { + const expectedKind = TextDocumentEdit.is(op) ? 'edit' : op.kind; + const actualKind = TextDocumentEdit.is(a) ? 'edit' : a.kind; + if (expectedKind !== actualKind) { + return false; + } + + switch (expectedKind) { + case 'edit': { + const expectedEdit = op as TextDocumentEdit; + const actualEdit = a as TextDocumentEdit; + + if (!textDocumentAreSame(expectedEdit.textDocument, actualEdit.textDocument)) { + return false; + } + + if (!actualEdit.textDocument.uri.includes(':')) { + // Not returning a URI, so fail. + return false; + } + + return textEditsAreSame( + expectedEdit.edits.filter((e) => TextEdit.is(e)) as TextEdit[], + actualEdit.edits.filter((e) => TextEdit.is(e)) as TextEdit[] + ); + } + case 'create': { + const expectedOp = op as CreateFile; + const actualOp = a as CreateFile; + return ( + expectedOp.kind === actualOp.kind && + expectedOp.annotationId === actualOp.annotationId && + expectedOp.uri === actualOp.uri && + expectedOp.options?.ignoreIfExists === actualOp.options?.ignoreIfExists && + expectedOp.options?.overwrite === actualOp.options?.overwrite + ); + } + case 'rename': { + const expectedOp = op as RenameFile; + const actualOp = a as RenameFile; + return ( + expectedOp.kind === actualOp.kind && + expectedOp.annotationId === actualOp.annotationId && + expectedOp.oldUri === actualOp.oldUri && + expectedOp.newUri === actualOp.newUri && + expectedOp.options?.ignoreIfExists === actualOp.options?.ignoreIfExists && + expectedOp.options?.overwrite === actualOp.options?.overwrite + ); + } + case 'delete': { + const expectedOp = op as DeleteFile; + const actualOp = a as DeleteFile; + return ( + expectedOp.annotationId === actualOp.annotationId && + expectedOp.kind === actualOp.kind && + expectedOp.uri === actualOp.uri && + expectedOp.options?.ignoreIfNotExists === actualOp.options?.ignoreIfNotExists && + expectedOp.options?.recursive === actualOp.options?.recursive + ); + } + default: + debug.assertNever(expectedKind); + } + }) + ); + } +} + +export function verifyTextEditMap( + expected: { [uri: string]: TextEdit[] }, + actual: { [uri: string]: TextEdit[] }, + marker?: string +) { + assert.strictEqual( + Object.entries(expected).length, + Object.entries(actual).length, + marker === undefined ? '' : `${marker} has failed` + ); + + for (const key of Object.keys(expected)) { + assert(textEditsAreSame(expected[key], actual[key]), marker === undefined ? '' : `${marker} has failed`); + } +} + +export function textEditsAreSame( + expectedEdits: (TextEdit | AnnotatedTextEdit)[], + actualEdits: (TextEdit | AnnotatedTextEdit)[] +) { + if (expectedEdits.length !== actualEdits.length) { + return false; + } + + for (const edit of expectedEdits) { + if (!actualEdits.some((a) => textEditAreSame(edit, a))) { + return false; + } + } + + return true; +} + +export function textEditAreSame(expected: TextEdit, actual: TextEdit) { + if (!rangesAreEqual(expected.range, actual.range)) { + return false; + } + + if (expected.newText !== actual.newText) { + return false; + } + + const expectedAnnotation = AnnotatedTextEdit.is(expected) ? expected.annotationId : ''; + const actualAnnotation = AnnotatedTextEdit.is(actual) ? actual.annotationId : ''; + return expectedAnnotation === actualAnnotation; +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/testAccessHost.ts b/python-parser/packages/pyright-internal/src/tests/harness/testAccessHost.ts new file mode 100644 index 00000000..ec4bfff5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/testAccessHost.ts @@ -0,0 +1,25 @@ +/* + * testAccessHost.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * NoAccessHost variation for test environment + */ + +import { ImportLogger } from '../../analyzer/importLogger'; +import { PythonPathResult } from '../../analyzer/pythonPathUtils'; +import { NoAccessHost } from '../../common/host'; +import { Uri } from '../../common/uri/uri'; + +export class TestAccessHost extends NoAccessHost { + constructor(private _modulePath = Uri.empty(), private _searchPaths: Uri[] = []) { + super(); + } + + override getPythonSearchPaths(pythonPath?: Uri, importLogger?: ImportLogger): PythonPathResult { + return { + paths: this._searchPaths, + prefix: this._modulePath, + }; + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/testHost.ts b/python-parser/packages/pyright-internal/src/tests/harness/testHost.ts new file mode 100644 index 00000000..863d3d22 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/testHost.ts @@ -0,0 +1,195 @@ +/* + * io.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import * as os from 'os'; +import * as pathModule from 'path'; + +import { NullConsole } from '../../common/console'; +import { combinePaths, FileSystemEntries, resolvePaths } from '../../common/pathUtils'; +import { createFromRealFileSystem } from '../../common/realFileSystem'; +import { compareStringsCaseInsensitive, compareStringsCaseSensitive } from '../../common/stringUtils'; +import { directoryExists, fileExists, getFileSize, UriEx } from '../../common/uri/uriUtils'; +import { FileUriSchema } from '../../common/uri/fileUri'; +import { Uri } from '../../common/uri/uri'; +import { CaseSensitivityDetector } from '../../common/caseSensitivityDetector'; + +export class TestCaseSensitivityDetector implements CaseSensitivityDetector { + constructor(private _isCaseSensitive = true) { + // Empty + } + + setCaseSensitivity(value: boolean) { + this._isCaseSensitive = value; + } + + isCaseSensitive(uri: string): boolean { + if (uri.startsWith(FileUriSchema)) { + return this._isCaseSensitive; + } + + return false; + } +} + +export const HOST: TestHost = createHost(); + +export interface TestHost { + useCaseSensitiveFileNames(): boolean; + getAccessibleFileSystemEntries(dirname: string): FileSystemEntries; + directoryExists(path: string): boolean; + fileExists(fileName: string): boolean; + getFileSize(path: string): number; + readFile(path: string): string | undefined; + getWorkspaceRoot(): string; + + writeFile(path: string, contents: string): void; + listFiles( + path: string, + filter?: RegExp, + options?: { + recursive?: boolean; + } + ): string[]; + log(text: string): void; +} + +function createHost(): TestHost { + // NodeJS detects "\uFEFF" at the start of the string and *replaces* it with the actual + // byte order mark from the specified encoding. Using any other byte order mark does + // not actually work. + const byteOrderMarkIndicator = '\uFEFF'; + + const caseDetector = new TestCaseSensitivityDetector(); + const vfs = createFromRealFileSystem(caseDetector, new NullConsole()); + + const useCaseSensitiveFileNames = isFileSystemCaseSensitive(); + caseDetector.setCaseSensitivity(useCaseSensitiveFileNames); + + function isFileSystemCaseSensitive(): boolean { + // win32\win64 are case insensitive platforms + const platform = os.platform(); + if (platform === 'win32') { + return false; + } + // If this file exists under a different case, we must be case-insensitve. + return !vfs.existsSync(UriEx.file(swapCase(__filename))); + + /** Convert all lowercase chars to uppercase, and vice-versa */ + function swapCase(s: string): string { + return s.replace(/\w/g, (ch) => { + const up = ch.toUpperCase(); + return ch === up ? ch.toLowerCase() : up; + }); + } + } + + function listFiles(path: string, spec: RegExp, options: { recursive?: boolean } = {}) { + function filesInFolder(folder: string): string[] { + let paths: string[] = []; + + for (const file of vfs.readdirSync(Uri.file(folder, caseDetector))) { + const pathToFile = pathModule.join(folder, file); + const stat = vfs.statSync(Uri.file(pathToFile, caseDetector)); + if (options.recursive && stat.isDirectory()) { + paths = paths.concat(filesInFolder(pathToFile)); + } else if (stat.isFile() && (!spec || file.match(spec))) { + paths.push(pathToFile); + } + } + + return paths; + } + + return filesInFolder(path); + } + + function getAccessibleFileSystemEntries(dirname: string): FileSystemEntries { + try { + const entries: string[] = vfs + .readdirSync(Uri.file(dirname || '.', caseDetector)) + .sort(useCaseSensitiveFileNames ? compareStringsCaseSensitive : compareStringsCaseInsensitive); + const files: string[] = []; + const directories: string[] = []; + for (const entry of entries) { + if (entry === '.' || entry === '..') { + continue; + } + const name = combinePaths(dirname, entry); + try { + const stat = vfs.statSync(Uri.file(name, caseDetector)); + if (!stat) { + continue; + } + if (stat.isFile()) { + files.push(entry); + } else if (stat.isDirectory()) { + directories.push(entry); + } + } catch { + /* ignore */ + } + } + return { files, directories }; + } catch (e: any) { + return { files: [], directories: [] }; + } + } + + function readFile(fileName: string, _encoding?: string): string | undefined { + if (!fileExists(vfs, Uri.file(fileName, caseDetector))) { + return undefined; + } + const buffer = vfs.readFileSync(Uri.file(fileName, caseDetector)); + let len = buffer.length; + if (len >= 2 && buffer[0] === 0xfe && buffer[1] === 0xff) { + // Big endian UTF-16 byte order mark detected. Since big endian is not supported by node.js, + // flip all byte pairs and treat as little endian. + len &= ~1; // Round down to a multiple of 2 + for (let i = 0; i < len; i += 2) { + const temp = buffer[i]; + buffer[i] = buffer[i + 1]; + buffer[i + 1] = temp; + } + return buffer.toString('utf16le', 2); + } + if (len >= 2 && buffer[0] === 0xff && buffer[1] === 0xfe) { + // Little endian UTF-16 byte order mark detected + return buffer.toString('utf16le', 2); + } + if (len >= 3 && buffer[0] === 0xef && buffer[1] === 0xbb && buffer[2] === 0xbf) { + // UTF-8 byte order mark detected + return buffer.toString('utf8', 3); + } + // Default is UTF-8 with no byte order mark + return buffer.toString('utf8'); + } + + function writeFile(fileName: string, data: string, writeByteOrderMark?: boolean): void { + // If a BOM is required, emit one + if (writeByteOrderMark) { + data = byteOrderMarkIndicator + data; + } + + vfs.writeFileSync(Uri.file(fileName, caseDetector), data, 'utf8'); + } + + return { + useCaseSensitiveFileNames: () => useCaseSensitiveFileNames, + getFileSize: (path: string) => getFileSize(vfs, Uri.file(path, caseDetector)), + readFile: (path) => readFile(path), + writeFile: (path, content) => { + writeFile(path, content); + }, + fileExists: (path) => fileExists(vfs, Uri.file(path, caseDetector)), + directoryExists: (path) => directoryExists(vfs, Uri.file(path, caseDetector)), + listFiles, + log: (s) => { + console.log(s); + }, + getWorkspaceRoot: () => resolvePaths(__dirname, '../../..'), + getAccessibleFileSystemEntries, + }; +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/utils.ts b/python-parser/packages/pyright-internal/src/tests/harness/utils.ts new file mode 100644 index 00000000..11c76c2a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/utils.ts @@ -0,0 +1,355 @@ +/* + * utils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import { binarySearch, insertAt } from '../../common/collectionUtils'; +import { identity } from '../../common/core'; + +export interface SortOptions<T> { + comparer: (a: T, b: T) => number; + sort: 'insertion' | 'comparison'; +} + +export class SortedMap<K, V> { + private _comparer: (a: K, b: K) => number; + private _keys: K[] = []; + private _values: V[] = []; + private _order: number[] | undefined; + private _version = 0; + private _copyOnWrite = false; + + constructor(comparer: ((a: K, b: K) => number) | SortOptions<K>, iterable?: Iterable<[K, V]>) { + this._comparer = typeof comparer === 'object' ? comparer.comparer : comparer; + this._order = typeof comparer === 'object' && comparer.sort === 'insertion' ? [] : undefined; + if (iterable) { + const iterator = getIterator(iterable); + try { + for (let i = nextResult(iterator); i; i = nextResult(iterator)) { + const [key, value] = i.value; + this.set(key, value); + } + } finally { + closeIterator(iterator); + } + } + } + + get size() { + return this._keys.length; + } + + get comparer() { + return this._comparer; + } + + get [Symbol.toStringTag]() { + return 'SortedMap'; + } + + has(key: K) { + return binarySearch(this._keys, key, identity, this._comparer) >= 0; + } + + get(key: K) { + const index = binarySearch(this._keys, key, identity, this._comparer); + return index >= 0 ? this._values[index] : undefined; + } + + set(key: K, value: V) { + const index = binarySearch(this._keys, key, identity, this._comparer); + if (index >= 0) { + this._values[index] = value; + } else { + this._writePreamble(); + insertAt(this._keys, ~index, key); + insertAt(this._values, ~index, value); + if (this._order) { + insertAt(this._order, ~index, this._version); + } + this._writePostScript(); + } + return this; + } + + delete(key: K) { + const index = binarySearch(this._keys, key, identity, this._comparer); + if (index >= 0) { + this._writePreamble(); + this._orderedRemoveItemAt(this._keys, index); + this._orderedRemoveItemAt(this._values, index); + if (this._order) { + this._orderedRemoveItemAt(this._order, index); + } + this._writePostScript(); + return true; + } + return false; + } + + clear() { + if (this.size > 0) { + this._writePreamble(); + this._keys.length = 0; + this._values.length = 0; + if (this._order) { + this._order.length = 0; + } + this._writePostScript(); + } + } + + forEach(callback: (value: V, key: K, collection: this) => void, thisArg?: any) { + const keys = this._keys; + const values = this._values; + const indices = this._getIterationOrder(); + const version = this._version; + this._copyOnWrite = true; + try { + if (indices) { + for (const i of indices) { + callback.call(thisArg, values[i], keys[i], this); + } + } else { + for (let i = 0; i < keys.length; i++) { + callback.call(thisArg, values[i], keys[i], this); + } + } + } finally { + if (version === this._version) { + this._copyOnWrite = false; + } + } + } + + *keys() { + const keys = this._keys; + const indices = this._getIterationOrder(); + const version = this._version; + this._copyOnWrite = true; + try { + if (indices) { + for (const i of indices) { + yield keys[i]; + } + } else { + yield* keys; + } + } finally { + if (version === this._version) { + this._copyOnWrite = false; + } + } + } + + *values() { + const values = this._values; + const indices = this._getIterationOrder(); + const version = this._version; + this._copyOnWrite = true; + try { + if (indices) { + for (const i of indices) { + yield values[i]; + } + } else { + yield* values; + } + } finally { + if (version === this._version) { + this._copyOnWrite = false; + } + } + } + + *entries() { + const keys = this._keys; + const values = this._values; + const indices = this._getIterationOrder(); + const version = this._version; + this._copyOnWrite = true; + try { + if (indices) { + for (const i of indices) { + yield [keys[i], values[i]] as [K, V]; + } + } else { + for (let i = 0; i < keys.length; i++) { + yield [keys[i], values[i]] as [K, V]; + } + } + } finally { + if (version === this._version) { + this._copyOnWrite = false; + } + } + } + + [Symbol.iterator]() { + return this.entries(); + } + + private _writePreamble() { + if (this._copyOnWrite) { + this._keys = this._keys.slice(); + this._values = this._values.slice(); + if (this._order) { + this._order = this._order.slice(); + } + this._copyOnWrite = false; + } + } + + private _writePostScript() { + this._version++; + } + + private _getIterationOrder() { + if (this._order) { + const order = this._order; + return this._order.map((_, i) => i).sort((x, y) => order[x] - order[y]); + } + return undefined; + } + + /** Remove an item by index from an array, moving everything to its right one space left. */ + private _orderedRemoveItemAt<T>(array: T[], index: number): void { + // This seems to be faster than either `array.splice(i, 1)` or `array.copyWithin(i, i+ 1)`. + for (let i = index; i < array.length - 1; i++) { + array[i] = array[i + 1]; + } + array.pop(); + } +} + +export function getIterator<T>(iterable: Iterable<T>): Iterator<T> { + return iterable[Symbol.iterator](); +} + +export function nextResult<T>(iterator: Iterator<T>): IteratorResult<T> | undefined { + const result = iterator.next(); + return result.done ? undefined : result; +} + +export function closeIterator<T>(iterator: Iterator<T>) { + const fn = iterator.return; + if (typeof fn === 'function') { + fn.call(iterator); + } +} + +/** + * A collection of metadata that supports inheritance. + */ +export class Metadata { + private static readonly _undefinedValue = {}; + private _parent: Metadata | undefined; + private _map: { [key: string]: any }; + private _version = 0; + private _size = -1; + private _parentVersion: number | undefined; + + constructor(parent?: Metadata) { + this._parent = parent; + this._map = Object.create(parent ? parent._map : null); + } + + get size(): number { + if (this._size === -1 || (this._parent && this._parent._version !== this._parentVersion)) { + this._size = Object.keys(this._map).length; + if (this._parent) { + this._parentVersion = this._parent._version; + } + } + return this._size; + } + + get parent() { + return this._parent; + } + + has(key: string): boolean { + return this._map[Metadata._escapeKey(key)] !== undefined; + } + + get(key: string): any { + const value = this._map[Metadata._escapeKey(key)]; + return value === Metadata._undefinedValue ? undefined : value; + } + + set(key: string, value: any): this { + this._map[Metadata._escapeKey(key)] = value === undefined ? Metadata._undefinedValue : value; + this._size = -1; + this._version++; + return this; + } + + delete(key: string): boolean { + const escapedKey = Metadata._escapeKey(key); + if (this._map[escapedKey] !== undefined) { + delete this._map[escapedKey]; + this._size = -1; + this._version++; + return true; + } + return false; + } + + clear(): void { + this._map = Object.create(this._parent ? this._parent._map : null); + this._size = -1; + this._version++; + } + + forEach(callback: (value: any, key: string, map: this) => void) { + for (const key of Object.keys(this._map)) { + callback(this._map[key], Metadata._unescapeKey(key), this); + } + } + + private static _escapeKey(text: string) { + return text.length >= 2 && text.charAt(0) === '_' && text.charAt(1) === '_' ? '_' + text : text; + } + + private static _unescapeKey(text: string) { + return text.length >= 3 && text.charAt(0) === '_' && text.charAt(1) === '_' && text.charAt(2) === '_' + ? text.slice(1) + : text; + } +} + +export function bufferFrom(input: string, encoding?: BufferEncoding): Buffer { + // See https://github.com/Microsoft/TypeScript/issues/25652 + return Buffer.from && (Buffer.from as Function) !== Int8Array.from + ? Buffer.from(input, encoding) + : new Buffer(input, encoding); +} + +export const IO_ERROR_MESSAGE = Object.freeze({ + EACCES: 'access denied', + EIO: 'an I/O error occurred', + ENOENT: 'no such file or directory', + EEXIST: 'file already exists', + ELOOP: 'too many symbolic links encountered', + ENOTDIR: 'no such directory', + EISDIR: 'path is a directory', + EBADF: 'invalid file descriptor', + EINVAL: 'invalid value', + ENOTEMPTY: 'directory not empty', + EPERM: 'operation not permitted', + EROFS: 'file system is read-only', +}); + +export function createIOError(code: keyof typeof IO_ERROR_MESSAGE, details = '') { + const err: NodeJS.ErrnoException = new Error(`${code}: ${IO_ERROR_MESSAGE[code]} ${details}`); + err.code = code; + if (Error.captureStackTrace) { + Error.captureStackTrace(err, createIOError); + } + return err; +} + +export function stringify(data: any, replacer?: (key: string, value: any) => any): string { + return JSON.stringify(data, replacer, 2); +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/vfs/factory.ts b/python-parser/packages/pyright-internal/src/tests/harness/vfs/factory.ts new file mode 100644 index 00000000..6f019402 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/vfs/factory.ts @@ -0,0 +1,208 @@ +/* + * factory.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provides a factory to create virtual file system backed by a real file system with some path remapped + */ + +import * as pathConsts from '../../../common/pathConsts'; +import { combinePaths, getDirectoryPath, normalizeSlashes, resolvePaths } from '../../../common/pathUtils'; +import { UriEx } from '../../../common/uri/uriUtils'; +import { GlobalMetadataOptionNames } from '../fourslash/fourSlashTypes'; +import { TestHost } from '../testHost'; +import { bufferFrom } from '../utils'; +import { + FileSet, + FileSystemOptions, + FileSystemResolver, + MODULE_PATH, + Mount, + S_IFDIR, + S_IFREG, + TestFileSystem, +} from './filesystem'; + +export class TextDocument { + readonly meta: Map<string, string>; + readonly file: string; + readonly text: string; + + constructor(file: string, text: string, meta?: Map<string, string>) { + this.file = file; + this.text = text; + this.meta = meta || new Map<string, string>(); + } +} + +export interface FileSystemCreateOptions extends FileSystemOptions { + // Sets the documents to add to the file system. + documents?: readonly TextDocument[]; +} + +// Make sure all paths are lower case since `isCaseSensitive` is hard coded as `true` +export const libFolder = UriEx.file( + combinePaths(MODULE_PATH, normalizeSlashes(combinePaths(pathConsts.lib, pathConsts.sitePackages))) +); +export const distlibFolder = UriEx.file( + combinePaths(MODULE_PATH, normalizeSlashes(combinePaths(pathConsts.lib, pathConsts.distPackages))) +); +export const typeshedFolder = UriEx.file(combinePaths(MODULE_PATH, normalizeSlashes(pathConsts.typeshedFallback))); +export const srcFolder = normalizeSlashes('/.src'); + +/** + * Create a virtual file system from a physical file system using the following path mappings: + * + * - `/typeshed-fallback` is a directory mapped to `packages/pyright-internal/typeshed-fallback` + * - `/.src` is a virtual directory to be used for tests. + * + * @param host it provides an access to host (real) file system + * @param ignoreCase indicates whether we should ignore casing on this file system or not + * @param documents initial documents to create in this virtual file system + * @param files initial files to create in this virtual file system + * @param cwd initial current working directory in this virtual file system + * @param time initial time in this virtual file system + * @param meta initial metadata in this virtual file system + * + * all `FileSystemCreateOptions` are optional + */ +export function createFromFileSystem( + host: TestHost, + ignoreCase: boolean, + { documents, files, cwd, time, meta }: FileSystemCreateOptions = {}, + mountPaths: Map<string, string> = new Map<string, string>() +) { + const typeshedPath = meta ? meta[GlobalMetadataOptionNames.typeshed] : undefined; + if (typeshedPath) { + mountPaths.set(typeshedFolder.getFilePath(), typeshedPath); + } + + const fs = getBuiltLocal(host, ignoreCase, cwd, mountPaths).shadow(); + if (meta) { + for (const key of Object.keys(meta)) { + fs.meta.set(key, meta[key]); + } + } + if (time) { + fs.time(time); + } + if (cwd) { + fs.mkdirpSync(cwd); + fs.chdir(UriEx.file(cwd, !ignoreCase)); + } + if (documents) { + for (const document of documents) { + fs.mkdirpSync(getDirectoryPath(document.file)); + fs.writeFileSync(UriEx.file(document.file, !ignoreCase), document.text, 'utf8'); + fs.filemeta(document.file).set('document', document); + // Add symlinks + const symlink = document.meta.get('symlink'); + if (symlink) { + for (const link of symlink.split(',').map((link) => link.trim())) { + fs.mkdirpSync(getDirectoryPath(link)); + fs.symlinkSync(resolvePaths(fs.cwd(), document.file), link); + } + } + } + } + if (files) { + fs.apply(files); + } + return fs; +} + +let cacheKey: { host: TestHost; mountPaths: Map<string, string> } | undefined; +let localCIFSCache: TestFileSystem | undefined; +let localCSFSCache: TestFileSystem | undefined; + +export function clearCache() { + cacheKey = undefined; + localCIFSCache = undefined; + localCSFSCache = undefined; +} + +function getBuiltLocal( + host: TestHost, + ignoreCase: boolean, + cwd: string | undefined, + mountPaths: Map<string, string> +): TestFileSystem { + // Ensure typeshed folder + if (!mountPaths.has(typeshedFolder.getFilePath())) { + mountPaths.set( + typeshedFolder.getFilePath(), + resolvePaths(host.getWorkspaceRoot(), pathConsts.typeshedFallback) + ); + } + + if (!canReuseCache(host, mountPaths)) { + localCIFSCache = undefined; + localCSFSCache = undefined; + cacheKey = { host, mountPaths }; + } + + if (!localCIFSCache) { + const resolver = createResolver(host); + const files: FileSet = {}; + mountPaths.forEach((v, k) => (files[k] = new Mount(v, resolver))); + + localCIFSCache = new TestFileSystem(/* ignoreCase */ true, { + files, + cwd, + meta: {}, + }); + localCIFSCache.makeReadonly(); + } + + if (ignoreCase) { + return localCIFSCache; + } + + if (!localCSFSCache) { + localCSFSCache = localCIFSCache.shadow(/* ignoreCase */ false); + localCSFSCache.makeReadonly(); + } + + return localCSFSCache; +} + +function canReuseCache(host: TestHost, mountPaths: Map<string, string>): boolean { + if (cacheKey === undefined) { + return false; + } + if (cacheKey.host !== host) { + return false; + } + if (cacheKey.mountPaths.size !== mountPaths.size) { + return false; + } + + for (const key of cacheKey.mountPaths.keys()) { + if (cacheKey.mountPaths.get(key) !== mountPaths.get(key)) { + return false; + } + } + + return true; +} + +function createResolver(host: TestHost): FileSystemResolver { + return { + readdirSync(path: string): string[] { + const { files, directories } = host.getAccessibleFileSystemEntries(path); + return directories.concat(files); + }, + statSync(path: string): { mode: number; size: number } { + if (host.directoryExists(path)) { + return { mode: S_IFDIR | 0o777, size: 0 }; + } else if (host.fileExists(path)) { + return { mode: S_IFREG | 0o666, size: host.getFileSize(path) }; + } else { + throw new Error('ENOENT: path does not exist'); + } + }, + readFileSync(path: string): Buffer { + return bufferFrom(host.readFile(path)!, 'utf8'); + }, + }; +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/vfs/filesystem.ts b/python-parser/packages/pyright-internal/src/tests/harness/vfs/filesystem.ts new file mode 100644 index 00000000..7a0fd972 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/vfs/filesystem.ts @@ -0,0 +1,1983 @@ +/* + * filesystem.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * virtual file system implementation + */ + +/* eslint-disable no-dupe-class-members */ +import { Dirent, ReadStream, WriteStream } from 'fs'; + +import { CaseSensitivityDetector } from '../../../common/caseSensitivityDetector'; +import { FileSystem, MkDirOptions, TempFile, TmpfileOptions } from '../../../common/fileSystem'; +import { FileWatcher, FileWatcherEventHandler, FileWatcherEventType } from '../../../common/fileWatcher'; +import * as pathUtil from '../../../common/pathUtils'; +import { compareStringsCaseInsensitive, compareStringsCaseSensitive } from '../../../common/stringUtils'; +import { FileUriSchema } from '../../../common/uri/fileUri'; +import { Uri } from '../../../common/uri/uri'; +import { bufferFrom, createIOError } from '../utils'; +import { Metadata, SortedMap, closeIterator, getIterator, nextResult } from './../utils'; +import { ValidationFlags, validate } from './pathValidation'; +import { Disposable } from 'vscode-jsonrpc'; + +export const MODULE_PATH = pathUtil.normalizeSlashes('/'); + +let devCount = 0; // A monotonically increasing count of device ids +let inoCount = 0; // A monotonically increasing count of inodes + +export interface DiffOptions { + includeChangedFileWithSameContent?: boolean; +} + +export class TestFileSystemWatcher implements FileWatcher { + constructor(readonly paths: Uri[], private _listener: FileWatcherEventHandler) {} + close() { + // Do nothing. + } + + fireFileChange(path: Uri, eventType: FileWatcherEventType): boolean { + if (this.paths.some((p) => path.startsWith(p))) { + this._listener(eventType, path.getFilePath()); + } + return false; + } +} + +/** + * Represents a virtual POSIX-like file system. + */ +export class TestFileSystem implements FileSystem, TempFile, CaseSensitivityDetector { + /** Indicates whether the file system is case-sensitive (`false`) or case-insensitive (`true`). */ + readonly ignoreCase: boolean; + + /** Gets the comparison function used to compare two paths. */ + readonly stringComparer: (a: string, b: string) => number; + + // lazy-initialized state that should be mutable even if the FileSystem is frozen. + private _lazy: { + links?: SortedMap<string, Inode>; + shadows?: Map<number, Inode>; + meta?: Metadata; + } = {}; + + private _cwd: string; // current working directory + private _time: number | Date | (() => number | Date); + private _shadowRoot: TestFileSystem | undefined; + private _dirStack: string[] | undefined; + private _tmpfileCounter = 0; + private _watchers: TestFileSystemWatcher[] = []; + private _id: number; + private static _nextId = 1; + + constructor(ignoreCase: boolean, options: FileSystemOptions = {}) { + this._id = TestFileSystem._nextId++; + const { time = -1, files, meta } = options; + this.ignoreCase = ignoreCase; + this.stringComparer = this.ignoreCase ? compareStringsCaseInsensitive : compareStringsCaseSensitive; + this._time = time; + + if (meta) { + for (const key of Object.keys(meta)) { + this.meta.set(key, meta[key]); + } + } + + if (files) { + this._applyFiles(files, /* dirname */ ''); + } + + let cwd = options.cwd; + if ((!cwd || !pathUtil.isDiskPathRoot(cwd)) && this._lazy.links) { + const iterator = getIterator(this._lazy.links.keys()); + try { + for (let i = nextResult(iterator); i; i = nextResult(iterator)) { + const name = i.value; + cwd = cwd ? pathUtil.resolvePaths(name, cwd) : name; + break; + } + } finally { + closeIterator(iterator); + } + } + + if (cwd) { + validate(cwd, ValidationFlags.Absolute); + this.mkdirpSync(cwd); + } + + this._cwd = cwd || ''; + } + + /** + * Gets metadata for this `FileSystem`. + */ + get meta(): Metadata { + if (!this._lazy.meta) { + this._lazy.meta = new Metadata(this._shadowRoot ? this._shadowRoot.meta : undefined); + } + return this._lazy.meta; + } + + /** + * Gets a value indicating whether the file system is read-only. + */ + get isReadonly() { + return Object.isFrozen(this); + } + + /** + * Gets the file system shadowed by this file system. + */ + get shadowRoot() { + return this._shadowRoot; + } + + get fileWatchers() { + return this._watchers; + } + + /** + * Makes the file system read-only. + */ + makeReadonly() { + Object.freeze(this); + return this; + } + + /** + * Snapshots the current file system, effectively shadowing itself. This is useful for + * generating file system patches using `.diff()` from one snapshot to the next. Performs + * no action if this file system is read-only. + */ + snapshot() { + if (this.isReadonly) { + return; + } + const fs = new TestFileSystem(this.ignoreCase, { time: this._time }); + fs._lazy = this._lazy; + fs._cwd = this._cwd; + fs._time = this._time; + fs._shadowRoot = this._shadowRoot; + fs._dirStack = this._dirStack; + fs.makeReadonly(); + this._lazy = {}; + this._shadowRoot = fs; + } + + /** + * Gets a shadow copy of this file system. Changes to the shadow copy do not affect the + * original, allowing multiple copies of the same core file system without multiple copies + * of the same data. + */ + shadow(ignoreCase = this.ignoreCase) { + if (!this.isReadonly) { + throw new Error('Cannot shadow a mutable file system.'); + } + if (ignoreCase && !this.ignoreCase) { + throw new Error('Cannot create a case-insensitive file system from a case-sensitive one.'); + } + const fs = new TestFileSystem(ignoreCase, { time: this._time }); + fs._shadowRoot = this; + fs._cwd = this._cwd; + return fs; + } + + /** + * Gets or sets the timestamp (in milliseconds) used for file status, returning the previous timestamp. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/time.html + */ + time(value?: number | Date | (() => number | Date)): number { + if (value !== undefined && this.isReadonly) { + throw createIOError('EPERM'); + } + let result = this._time; + if (typeof result === 'function') { + result = result(); + } + if (typeof result === 'object') { + result = result.getTime(); + } + if (result === -1) { + result = Date.now(); + } + if (value !== undefined) { + this._time = value; + } + return result; + } + + /** + * Gets the metadata object for a path. + * @param path + */ + filemeta(path: string): Metadata { + const { node } = this._walk(this._resolve(path)); + if (!node) { + throw createIOError('ENOENT'); + } + return this._filemeta(node); + } + + /** + * Get the pathname of the current working directory. + * + * @link - http://pubs.opengroup.org/onlinepubs/9699919799/functions/getcwd.html + */ + cwd() { + if (!this._cwd) { + throw new Error('The current working directory has not been set.'); + } + const { node } = this._walk(this._cwd); + if (!node) { + throw createIOError('ENOENT'); + } + if (!isDirectory(node)) { + throw createIOError('ENOTDIR'); + } + return this._cwd; + } + + /** + * Changes the current working directory. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/chdir.html + */ + chdir(uri: Uri) { + let path = uri.getFilePath(); + if (this.isReadonly) { + throw createIOError('EPERM'); + } + path = this._resolve(path); + const { node } = this._walk(path); + if (!node) { + throw createIOError('ENOENT'); + } + if (!isDirectory(node)) { + throw createIOError('ENOTDIR'); + } + this._cwd = path; + } + + /** + * Pushes the current directory onto the directory stack and changes the current working directory to the supplied path. + */ + pushd(path?: string) { + if (this.isReadonly) { + throw createIOError('EPERM'); + } + if (path) { + path = this._resolve(path); + } + if (this._cwd) { + if (!this._dirStack) { + this._dirStack = []; + } + this._dirStack.push(this._cwd); + } + if (path && path !== this._cwd) { + this.chdir(Uri.file(path, this)); + } + } + + /** + * Pops the previous directory from the location stack and changes the current directory to that directory. + */ + popd() { + if (this.isReadonly) { + throw createIOError('EPERM'); + } + const path = this._dirStack && this._dirStack.pop(); + if (path) { + this.chdir(Uri.file(path, this)); + } + } + + /** + * Update the file system with a set of files. + */ + apply(files: FileSet) { + this._applyFiles(files, this._cwd); + } + + /** + * Scan file system entries along a path. If `path` is a symbolic link, it is dereferenced. + * @param path The path at which to start the scan. + * @param axis The axis along which to traverse. + * @param traversal The traversal scheme to use. + */ + scanSync(path: string, axis: Axis, traversal: Traversal) { + path = this._resolve(path); + const results: string[] = []; + this._scan(path, this._stat(this._walk(path)), axis, traversal, /* noFollow */ false, results); + return results; + } + + /** + * Scan file system entries along a path. + * @param path The path at which to start the scan. + * @param axis The axis along which to traverse. + * @param traversal The traversal scheme to use. + */ + lscanSync(path: string, axis: Axis, traversal: Traversal) { + path = this._resolve(path); + const results: string[] = []; + this._scan( + path, + this._stat(this._walk(path, /* noFollow */ true)), + axis, + traversal, + /* noFollow */ true, + results + ); + return results; + } + + createFileSystemWatcher(paths: Uri[], listener: FileWatcherEventHandler): FileWatcher { + const watcher = new TestFileSystemWatcher(paths, listener); + this._watchers.push(watcher); + return watcher; + } + + fireFileWatcherEvent(path: string, event: FileWatcherEventType) { + const uri = Uri.file(path, this); + for (const watcher of this._watchers) { + if (watcher.fireFileChange(uri, event)) { + break; + } + } + } + + getModulePath(): Uri { + return Uri.file(MODULE_PATH, this); + } + + isCaseSensitive(uri: string) { + if (uri.startsWith(FileUriSchema)) { + return !this.ignoreCase; + } + + return true; + } + + isLocalFileSystemCaseSensitive(): boolean { + return !this.ignoreCase; + } + + tmpdir(): Uri { + this.mkdirpSync('/tmp'); + return Uri.parse('file:///tmp', this); + } + + tmpfile(options?: TmpfileOptions): Uri { + // Use an algorithm similar to tmp's. + const prefix = options?.prefix || 'tmp'; + const postfix = options?.prefix ? '-' + options.prefix : ''; + const name = `${prefix}-${this._tmpfileCounter++}${postfix}`; + const path = this.tmpdir().combinePaths(name); + this.writeFileSync(path, ''); + return path; + } + + realCasePath(path: Uri): Uri { + return path; + } + + isMappedUri(filepath: Uri): boolean { + return false; + } + + // Get original filepath if the given filepath is mapped. + getOriginalUri(mappedFilePath: Uri) { + return mappedFilePath; + } + + // Get mapped filepath if the given filepath is mapped. + getMappedUri(originalFilepath: Uri) { + return originalFilepath; + } + + /** + * Mounts a physical or virtual file system at a location in this virtual file system. + * + * @param source The path in the physical (or other virtual) file system. + * @param target The path in this virtual file system. + * @param resolver An object used to resolve files in `source`. + */ + mountSync(source: string, target: string, resolver: FileSystemResolver) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + + source = validate(source, ValidationFlags.Absolute); + + const { parent, links, node: existingNode, basename } = this._walk(this._resolve(target), /* noFollow */ true); + if (existingNode) { + throw createIOError('EEXIST'); + } + + const time = this.time(); + const node = this._mknod(parent ? parent.dev : ++devCount, S_IFDIR, /* mode */ 0o777, time); + node.source = source; + node.resolver = resolver; + this._addLink(parent, links, basename, node, time); + } + + /** + * Recursively remove all files and directories underneath the provided path. + */ + rimrafSync(path: string) { + try { + const stats = this.lstatSync(path); + if (stats.isFile() || stats.isSymbolicLink()) { + this.unlinkSync(Uri.file(path, this)); + } else if (stats.isDirectory()) { + for (const file of this.readdirSync(Uri.file(path, this))) { + this.rimrafSync(pathUtil.combinePaths(path, file)); + } + this.rmdirSync(Uri.file(path, this)); + } + } catch (e: any) { + if (e.code === 'ENOENT') { + return; + } + throw e; + } + } + + /** + * Make a directory and all of its parent paths (if they don't exist). + */ + mkdirpSync(path: string) { + path = this._resolve(path); + const result = this._walk(path, /* noFollow */ true, (error, result) => { + if (error.code === 'ENOENT') { + this._mkdir(result); + return 'retry'; + } + return 'throw'; + }); + + if (!result.node) { + this._mkdir(result); + } + } + + getFileListing(filter?: (p: string) => boolean): string { + let result = ''; + + const addToResult = (path: string, add: string) => { + if (!filter || filter(path)) { + result += add; + } + }; + + const printLinks = (dirname: string | undefined, links: SortedMap<string, Inode>) => { + const iterator = getIterator(links); + try { + for (let i = nextResult(iterator); i; i = nextResult(iterator)) { + const [name, node] = i.value; + const path = dirname ? pathUtil.combinePaths(dirname, name) : name; + const marker = this.stringComparer(this._cwd, path) === 0 ? '*' : ' '; + if (result) { + addToResult(path, '\n'); + } + addToResult(path, marker); + if (isDirectory(node)) { + addToResult(path, pathUtil.ensureTrailingDirectorySeparator(path)); + printLinks(path, this._getLinks(node)); + } else if (isFile(node)) { + addToResult(path, path); + } else if (isSymlink(node)) { + addToResult(path, `${path} -> ${node.symlink}`); + } + } + } finally { + closeIterator(iterator); + } + }; + printLinks(/* dirname */ undefined, this._getRootLinks()); + return result; + } + + /** + * Print diagnostic information about the structure of the file system to the console. + */ + debugPrint(filter?: (p: string) => boolean): void { + console.log(this.getFileListing(filter)); + } + + // POSIX API (aligns with NodeJS "fs" module API) + + /** + * Determines whether a path exists. + */ + existsSync(path: Uri) { + if (path.isEmpty()) { + return false; + } + const result = this._walk(this._resolve(path.getFilePath()), /* noFollow */ true, () => 'stop'); + return result !== undefined && result.node !== undefined; + } + + /** + * Get file status. If `path` is a symbolic link, it is dereferenced. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/stat.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + statSync(path: Uri) { + return this._stat(this._walk(this._resolve(path.getFilePath()))); + } + + /** + * Change file access times + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + utimesSync(path: string, atime: Date, mtime: Date) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + if (!isFinite(+atime) || !isFinite(+mtime)) { + throw createIOError('EINVAL'); + } + + const entry = this._walk(this._resolve(path)); + if (!entry || !entry.node) { + throw createIOError('ENOENT'); + } + entry.node.atimeMs = +atime; + entry.node.mtimeMs = +mtime; + entry.node.ctimeMs = this.time(); + } + + /** + * Get file status. If `path` is a symbolic link, it is dereferenced. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/lstat.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + lstatSync(path: string) { + return this._stat(this._walk(this._resolve(path), /* noFollow */ true)); + } + + /** + * Read a directory. If `path` is a symbolic link, it is dereferenced. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/readdir.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + readdirSync(path: Uri) { + const { node } = this._walk(this._resolve(path.getFilePath())); + if (!node) { + throw createIOError('ENOENT'); + } + if (!isDirectory(node)) { + throw createIOError('ENOTDIR'); + } + return Array.from(this._getLinks(node).keys()); + } + + /** + * Read a directory. If `path` is a symbolic link, it is dereferenced. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/readdir.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + readdirEntriesSync(path: Uri): Dirent[] { + const pathStr = this._resolve(path.getFilePath()); + const { node } = this._walk(this._resolve(pathStr)); + if (!node) { + throw createIOError('ENOENT'); + } + if (!isDirectory(node)) { + throw createIOError('ENOTDIR'); + } + const entries = Array.from(this._getLinks(node).entries()); + return entries.map(([k, v]) => makeDirEnt(k, v, pathStr)); + } + + /** + * Make a directory. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/mkdir.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + mkdirSync(path: Uri, options?: MkDirOptions) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + + if (options?.recursive) { + this.mkdirpSync(path.getFilePath()); + return; + } + + this._mkdir(this._walk(this._resolve(path.getFilePath()), /* noFollow */ true)); + } + + /** + * Remove a directory. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/rmdir.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + rmdirSync(uri: Uri) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + const path = this._resolve(uri.getFilePath()); + + const { parent, links, node, basename } = this._walk(path, /* noFollow */ true); + if (!parent) { + throw createIOError('EPERM'); + } + if (!isDirectory(node)) { + throw createIOError('ENOTDIR'); + } + if (this._getLinks(node).size !== 0) { + throw createIOError('ENOTEMPTY'); + } + + this._removeLink(parent, links, basename, node); + } + + /** + * Link one file to another file (also known as a "hard link"). + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/link.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + linkSync(oldpath: string, newpath: string) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + + const { node } = this._walk(this._resolve(oldpath)); + if (!node) { + throw createIOError('ENOENT'); + } + if (isDirectory(node)) { + throw createIOError('EPERM'); + } + + const { parent, links, basename, node: existingNode } = this._walk(this._resolve(newpath), /* noFollow */ true); + if (!parent) { + throw createIOError('EPERM'); + } + if (existingNode) { + throw createIOError('EEXIST'); + } + + this._addLink(parent, links, basename, node); + } + + /** + * Remove a directory entry. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/unlink.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + unlinkSync(path: Uri) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + + const { parent, links, node, basename } = this._walk(this._resolve(path.getFilePath()), /* noFollow */ true); + if (!parent) { + throw createIOError('EPERM'); + } + if (!node) { + throw createIOError('ENOENT'); + } + if (isDirectory(node)) { + throw createIOError('EISDIR'); + } + + this._removeLink(parent, links, basename, node); + } + + /** + * Rename a file. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/rename.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + renameSync(oldpath: string, newpath: string) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + + const { + parent: oldParent, + links: oldParentLinks, + node, + basename: oldBasename, + } = this._walk(this._resolve(oldpath), /* noFollow */ true); + + if (!oldParent) { + throw createIOError('EPERM'); + } + if (!node) { + throw createIOError('ENOENT'); + } + + const { + parent: newParent, + links: newParentLinks, + node: existingNode, + basename: newBasename, + } = this._walk(this._resolve(newpath), /* noFollow */ true); + + if (!newParent) { + throw createIOError('EPERM'); + } + + const time = this.time(); + if (existingNode) { + if (isDirectory(node)) { + if (!isDirectory(existingNode)) { + throw createIOError('ENOTDIR'); + } + if (this._getLinks(existingNode).size > 0) { + throw createIOError('ENOTEMPTY'); + } + } else { + if (isDirectory(existingNode)) { + throw createIOError('EISDIR'); + } + } + this._removeLink(newParent, newParentLinks, newBasename, existingNode, time); + } + + this._replaceLink(oldParent, oldParentLinks, oldBasename, newParent, newParentLinks, newBasename, node, time); + } + + /** + * Make a symbolic link. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/symlink.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + symlinkSync(target: string, linkpath: string) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + + const { + parent, + links, + node: existingNode, + basename, + } = this._walk(this._resolve(linkpath), /* noFollow */ true); + if (!parent) { + throw createIOError('EPERM'); + } + if (existingNode) { + throw createIOError('EEXIST'); + } + + const time = this.time(); + const node = this._mknod(parent.dev, S_IFLNK, /* mode */ 0o666, time); + node.symlink = validate(target, ValidationFlags.RelativeOrAbsolute); + this._addLink(parent, links, basename, node, time); + } + + /** + * Resolve a pathname. + * + * @link http://pubs.opengroup.org/onlinepubs/9699919799/functions/realpath.html + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + realpathSync(path: Uri) { + try { + const { realpath } = this._walk(this._resolve(path.getFilePath())); + return Uri.file(realpath, this); + } catch (e: any) { + return path; + } + } + + /** + * Read from a file. + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + readFileSync(path: Uri, encoding?: null): Buffer; + /** + * Read from a file. + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + readFileSync(path: Uri, encoding: BufferEncoding): string; + /** + * Read from a file. + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + readFileSync(path: Uri, encoding?: BufferEncoding | null): string | Buffer; + readFileSync(path: Uri, encoding: BufferEncoding | null = null) { + const { node } = this._walk(this._resolve(path.getFilePath())); + if (!node) { + throw createIOError('ENOENT'); + } + if (isDirectory(node)) { + throw createIOError('EISDIR'); + } + if (!isFile(node)) { + throw createIOError('EBADF'); + } + + const buffer = this._getBuffer(node).slice(); + return encoding ? buffer.toString(encoding) : buffer; + } + + /** + * Write to a file. + * + * NOTE: do not rename this method as it is intended to align with the same named export of the "fs" module. + */ + writeFileSync(uri: Uri, data: string | Buffer, encoding: BufferEncoding | null = null) { + if (this.isReadonly) { + throw createIOError('EROFS'); + } + + const { + parent, + links, + node: existingNode, + basename, + } = this._walk(this._resolve(uri.getFilePath()), /* noFollow */ false); + if (!parent) { + throw createIOError('EPERM'); + } + + const time = this.time(); + let node = existingNode; + if (!node) { + node = this._mknod(parent.dev, S_IFREG, 0o666, time); + this._addLink(parent, links, basename, node, time); + } + + if (isDirectory(node)) { + throw createIOError('EISDIR'); + } + if (!isFile(node)) { + throw createIOError('EBADF'); + } + node.buffer = Buffer.isBuffer(data) + ? data.slice() + : bufferFrom('' + data, (encoding as BufferEncoding) || 'utf8'); + node.size = node.buffer.byteLength; + node.mtimeMs = time; + node.ctimeMs = time; + } + + readFile(fileUri: Uri): Promise<Buffer> { + return Promise.resolve(this.readFileSync(fileUri)); + } + readFileText(fileUri: Uri, encoding?: BufferEncoding): Promise<string> { + return Promise.resolve(this.readFileSync(fileUri, encoding || 'utf8')); + } + + createReadStream(path: Uri): ReadStream { + throw new Error('Not implemented in test file system.'); + } + createWriteStream(path: Uri): WriteStream { + throw new Error('Not implemented in test file system.'); + } + + copyFileSync(src: Uri, dst: Uri): void { + throw new Error('Not implemented in test file system.'); + } + + mapDirectory(mappedUri: Uri, originalUri: Uri, filter?: (originalUri: Uri, fs: FileSystem) => boolean): Disposable { + throw new Error('Not implemented in test file system.'); + } + + /** + * Generates a `FileSet` patch containing all the entries in this `FileSystem` that are not in `base`. + * @param base The base file system. If not provided, this file system's `shadowRoot` is used (if present). + */ + diff(base = this.shadowRoot, options: DiffOptions = {}) { + const differences: FileSet = {}; + const hasDifferences = base + ? TestFileSystem._rootDiff(differences, this, base, options) + : TestFileSystem._trackCreatedInodes(differences, this, this._getRootLinks()); + return hasDifferences ? differences : undefined; + } + + /** + * Generates a `FileSet` patch containing all the entries in `changed` that are not in `base`. + */ + static diff(changed: TestFileSystem, base: TestFileSystem, options: DiffOptions = {}) { + const differences: FileSet = {}; + return TestFileSystem._rootDiff(differences, changed, base, options) ? differences : undefined; + } + + isInZip(path: Uri): boolean { + return false; + } + + dispose(): void { + // Do Nothing + } + + private _mkdir({ parent, links, node: existingNode, basename }: WalkResult) { + if (existingNode) { + throw createIOError('EEXIST'); + } + const time = this.time(); + const node = this._mknod(parent ? parent.dev : ++devCount, S_IFDIR, /* mode */ 0o777, time); + this._addLink(parent, links, basename, node, time); + } + + private _filemeta(node: Inode): Metadata { + if (!node.meta) { + const parentMeta = node.shadowRoot && this._shadowRoot && this._shadowRoot._filemeta(node.shadowRoot); + node.meta = new Metadata(parentMeta); + } + return node.meta; + } + + private _scan(path: string, stats: Stats, axis: Axis, traversal: Traversal, noFollow: boolean, results: string[]) { + if (axis === 'ancestors-or-self' || axis === 'self' || axis === 'descendants-or-self') { + if (!traversal.accept || traversal.accept(path, stats)) { + results.push(path); + } + } + if (axis === 'ancestors-or-self' || axis === 'ancestors') { + const dirname = pathUtil.getDirectoryPath(path); + if (dirname !== path) { + try { + const stats = this._stat(this._walk(dirname, noFollow)); + if (!traversal.traverse || traversal.traverse(dirname, stats)) { + this._scan(dirname, stats, 'ancestors-or-self', traversal, noFollow, results); + } + } catch { + /* ignored */ + } + } + } + if (axis === 'descendants-or-self' || axis === 'descendants') { + if (stats.isDirectory() && (!traversal.traverse || traversal.traverse(path, stats))) { + for (const file of this.readdirSync(Uri.file(path, this))) { + try { + const childpath = pathUtil.combinePaths(path, file); + const stats = this._stat(this._walk(childpath, noFollow)); + this._scan(childpath, stats, 'descendants-or-self', traversal, noFollow, results); + } catch { + /* ignored */ + } + } + } + } + } + + private _stat(entry: WalkResult) { + const node = entry.node; + if (!node) { + throw createIOError(`ENOENT`, entry.realpath); + } + return new Stats( + node.dev, + node.ino, + node.mode, + node.nlink, + /* rdev */ 0, + /* size */ isFile(node) ? this._getSize(node) : isSymlink(node) ? node.symlink.length : 0, + /* blksize */ 4096, + /* blocks */ 0, + node.atimeMs, + node.mtimeMs, + node.ctimeMs, + node.birthtimeMs + ); + } + + private static _diffWorker( + container: FileSet, + changed: TestFileSystem, + changedLinks: ReadonlyMap<string, Inode> | undefined, + base: TestFileSystem, + baseLinks: ReadonlyMap<string, Inode> | undefined, + options: DiffOptions + ) { + if (changedLinks && !baseLinks) { + return TestFileSystem._trackCreatedInodes(container, changed, changedLinks); + } + if (baseLinks && !changedLinks) { + return TestFileSystem._trackDeletedInodes(container, baseLinks); + } + if (changedLinks && baseLinks) { + let hasChanges = false; + // track base items missing in changed + baseLinks.forEach((node, basename) => { + if (!changedLinks.has(basename)) { + container[basename] = isDirectory(node) ? new Rmdir() : new Unlink(); + hasChanges = true; + } + }); + // track changed items missing or differing in base + changedLinks.forEach((changedNode, basename) => { + const baseNode = baseLinks.get(basename); + if (baseNode) { + if (isDirectory(changedNode) && isDirectory(baseNode)) { + return (hasChanges = + TestFileSystem._directoryDiff( + container, + basename, + changed, + changedNode, + base, + baseNode, + options + ) || hasChanges); + } + if (isFile(changedNode) && isFile(baseNode)) { + return (hasChanges = + TestFileSystem._fileDiff( + container, + basename, + changed, + changedNode, + base, + baseNode, + options + ) || hasChanges); + } + if (isSymlink(changedNode) && isSymlink(baseNode)) { + return (hasChanges = + TestFileSystem._symlinkDiff(container, basename, changedNode, baseNode) || hasChanges); + } + } + return (hasChanges = + TestFileSystem._trackCreatedInode(container, basename, changed, changedNode) || hasChanges); + }); + return hasChanges; + } + return false; + } + + private static _rootDiff(container: FileSet, changed: TestFileSystem, base: TestFileSystem, options: DiffOptions) { + while (!changed._lazy.links && changed._shadowRoot) { + changed = changed._shadowRoot; + } + while (!base._lazy.links && base._shadowRoot) { + base = base._shadowRoot; + } + + // no difference if the file systems are the same reference + if (changed === base) { + return false; + } + + // no difference if the root links are empty and not shadowed + if (!changed._lazy.links && !changed._shadowRoot && !base._lazy.links && !base._shadowRoot) { + return false; + } + + return TestFileSystem._diffWorker( + container, + changed, + changed._getRootLinks(), + base, + base._getRootLinks(), + options + ); + } + + private static _directoryDiff( + container: FileSet, + basename: string, + changed: TestFileSystem, + changedNode: DirectoryInode, + base: TestFileSystem, + baseNode: DirectoryInode, + options: DiffOptions + ) { + while (!changedNode.links && changedNode.shadowRoot) { + changedNode = changedNode.shadowRoot; + } + while (!baseNode.links && baseNode.shadowRoot) { + baseNode = baseNode.shadowRoot; + } + + // no difference if the nodes are the same reference + if (changedNode === baseNode) { + return false; + } + + // no difference if both nodes are non shadowed and have no entries + if (isEmptyNonShadowedDirectory(changedNode) && isEmptyNonShadowedDirectory(baseNode)) { + return false; + } + + // no difference if both nodes are unpopulated and point to the same mounted file system + if ( + !changedNode.links && + !baseNode.links && + changedNode.resolver && + changedNode.source !== undefined && + baseNode.resolver === changedNode.resolver && + baseNode.source === changedNode.source + ) { + return false; + } + + // no difference if both nodes have identical children + const children: FileSet = {}; + if ( + !TestFileSystem._diffWorker( + children, + changed, + changed._getLinks(changedNode), + base, + base._getLinks(baseNode), + options + ) + ) { + return false; + } + + container[basename] = new Directory(children); + return true; + } + + private static _fileDiff( + container: FileSet, + basename: string, + changed: TestFileSystem, + changedNode: FileInode, + base: TestFileSystem, + baseNode: FileInode, + options: DiffOptions + ) { + while (!changedNode.buffer && changedNode.shadowRoot) { + changedNode = changedNode.shadowRoot; + } + while (!baseNode.buffer && baseNode.shadowRoot) { + baseNode = baseNode.shadowRoot; + } + + // no difference if the nodes are the same reference + if (changedNode === baseNode) { + return false; + } + + // no difference if both nodes are non shadowed and have no entries + if (isEmptyNonShadowedFile(changedNode) && isEmptyNonShadowedFile(baseNode)) { + return false; + } + + // no difference if both nodes are unpopulated and point to the same mounted file system + if ( + !changedNode.buffer && + !baseNode.buffer && + changedNode.resolver && + changedNode.source !== undefined && + baseNode.resolver === changedNode.resolver && + baseNode.source === changedNode.source + ) { + return false; + } + + const changedBuffer = changed._getBuffer(changedNode); + const baseBuffer = base._getBuffer(baseNode); + + // no difference if both buffers are the same reference + if (changedBuffer === baseBuffer) { + return false; + } + + // no difference if both buffers are identical + if (Buffer.compare(changedBuffer, baseBuffer) === 0) { + if (!options.includeChangedFileWithSameContent) { + return false; + } + container[basename] = new SameFileContentFile(changedBuffer); + return true; + } + + container[basename] = new File(changedBuffer); + return true; + } + + private static _symlinkDiff( + container: FileSet, + basename: string, + changedNode: SymlinkInode, + baseNode: SymlinkInode + ) { + // no difference if the nodes are the same reference + if (changedNode.symlink === baseNode.symlink) { + return false; + } + container[basename] = new Symlink(changedNode.symlink); + return true; + } + + private static _trackCreatedInode(container: FileSet, basename: string, changed: TestFileSystem, node: Inode) { + if (isDirectory(node)) { + const children: FileSet = {}; + TestFileSystem._trackCreatedInodes(children, changed, changed._getLinks(node)); + container[basename] = new Directory(children); + } else if (isSymlink(node)) { + container[basename] = new Symlink(node.symlink); + } else { + container[basename] = new File(node.buffer || ''); + } + return true; + } + + private static _trackCreatedInodes( + container: FileSet, + changed: TestFileSystem, + changedLinks: ReadonlyMap<string, Inode> + ) { + // no difference if links are empty + if (!changedLinks.size) { + return false; + } + + changedLinks.forEach((node, basename) => { + TestFileSystem._trackCreatedInode(container, basename, changed, node); + }); + return true; + } + + private static _trackDeletedInodes(container: FileSet, baseLinks: ReadonlyMap<string, Inode>) { + // no difference if links are empty + if (!baseLinks.size) { + return false; + } + baseLinks.forEach((node, basename) => { + container[basename] = isDirectory(node) ? new Rmdir() : new Unlink(); + }); + return true; + } + + private _mknod(dev: number, type: typeof S_IFREG, mode: number, time?: number): FileInode; + private _mknod(dev: number, type: typeof S_IFDIR, mode: number, time?: number): DirectoryInode; + private _mknod(dev: number, type: typeof S_IFLNK, mode: number, time?: number): SymlinkInode; + private _mknod(dev: number, type: number, mode: number, time = this.time()): Inode { + return { + dev, + ino: ++inoCount, + mode: (mode & ~S_IFMT & ~0o022 & 0o7777) | (type & S_IFMT), + atimeMs: time, + mtimeMs: time, + ctimeMs: time, + birthtimeMs: time, + nlink: 0, + }; + } + + private _addLink( + parent: DirectoryInode | undefined, + links: SortedMap<string, Inode>, + name: string, + node: Inode, + time = this.time() + ) { + links.set(name, node); + node.nlink++; + node.ctimeMs = time; + if (parent) { + parent.mtimeMs = time; + } + if (!parent && !this._cwd) { + this._cwd = name; + } + } + + private _removeLink( + parent: DirectoryInode | undefined, + links: SortedMap<string, Inode>, + name: string, + node: Inode, + time = this.time() + ) { + links.delete(name); + node.nlink--; + node.ctimeMs = time; + if (parent) { + parent.mtimeMs = time; + } + } + + private _replaceLink( + oldParent: DirectoryInode, + oldLinks: SortedMap<string, Inode>, + oldName: string, + newParent: DirectoryInode, + newLinks: SortedMap<string, Inode>, + newName: string, + node: Inode, + time: number + ) { + if (oldParent !== newParent) { + this._removeLink(oldParent, oldLinks, oldName, node, time); + this._addLink(newParent, newLinks, newName, node, time); + } else { + oldLinks.delete(oldName); + oldLinks.set(newName, node); + oldParent.mtimeMs = time; + newParent.mtimeMs = time; + } + } + + private _getRootLinks() { + if (!this._lazy.links) { + const links = new SortedMap<string, Inode>(this.stringComparer); + if (this._shadowRoot) { + this._copyShadowLinks(this._shadowRoot._getRootLinks(), links); + } + this._lazy.links = links; + } + return this._lazy.links; + } + + private _getLinks(node: DirectoryInode) { + if (!node.links) { + const links = new SortedMap<string, Inode>(this.stringComparer); + const { source, resolver } = node; + if (source && resolver) { + node.source = undefined; + node.resolver = undefined; + for (const name of resolver.readdirSync(source)) { + const path = pathUtil.combinePaths(source, name); + const stats = resolver.statSync(path); + switch (stats.mode & S_IFMT) { + case S_IFDIR: { + const dir = this._mknod(node.dev, S_IFDIR, 0o777); + dir.source = pathUtil.combinePaths(source, name); + dir.resolver = resolver; + this._addLink(node, links, name, dir); + break; + } + case S_IFREG: { + const file = this._mknod(node.dev, S_IFREG, 0o666); + file.source = pathUtil.combinePaths(source, name); + file.resolver = resolver; + file.size = stats.size; + this._addLink(node, links, name, file); + break; + } + } + } + } else if (this._shadowRoot && node.shadowRoot) { + this._copyShadowLinks(this._shadowRoot._getLinks(node.shadowRoot), links); + } + node.links = links; + } + return node.links; + } + + private _getShadow(root: DirectoryInode): DirectoryInode; + private _getShadow(root: Inode): Inode; + private _getShadow(root: Inode) { + const shadows = this._lazy.shadows || (this._lazy.shadows = new Map<number, Inode>()); + + let shadow = shadows.get(root.ino); + if (!shadow) { + shadow = { + dev: root.dev, + ino: root.ino, + mode: root.mode, + atimeMs: root.atimeMs, + mtimeMs: root.mtimeMs, + ctimeMs: root.ctimeMs, + birthtimeMs: root.birthtimeMs, + nlink: root.nlink, + shadowRoot: root, + }; + + if (isSymlink(root)) { + (shadow as SymlinkInode).symlink = root.symlink; + } + shadows.set(shadow.ino, shadow); + } + + return shadow; + } + + private _copyShadowLinks(source: ReadonlyMap<string, Inode>, target: SortedMap<string, Inode>) { + const iterator = getIterator(source); + try { + for (let i = nextResult(iterator); i; i = nextResult(iterator)) { + const [name, root] = i.value; + target.set(name, this._getShadow(root)); + } + } finally { + closeIterator(iterator); + } + } + + private _getSize(node: FileInode): number { + if (node.buffer) { + return node.buffer.byteLength; + } + if (node.size !== undefined) { + return node.size; + } + if (node.source && node.resolver) { + return (node.size = node.resolver.statSync(node.source).size); + } + if (this._shadowRoot && node.shadowRoot) { + return (node.size = this._shadowRoot._getSize(node.shadowRoot)); + } + return 0; + } + + private _getBuffer(node: FileInode): Buffer { + if (!node.buffer) { + const { source, resolver } = node; + if (source && resolver) { + node.source = undefined; + node.resolver = undefined; + node.size = undefined; + node.buffer = resolver.readFileSync(source); + } else if (this._shadowRoot && node.shadowRoot) { + node.buffer = this._shadowRoot._getBuffer(node.shadowRoot); + } else { + node.buffer = Buffer.allocUnsafe(0); + } + } + return node.buffer; + } + + /** + * Walk a path to its end. + * + * @param path The path to follow. + * @param noFollow A value indicating whether to *not* dereference a symbolic link at the + * end of a path. + * + * @link http://man7.org/linux/man-pages/man7/path_resolution.7.html + */ + private _walk( + path: string, + noFollow?: boolean, + onError?: (error: NodeJS.ErrnoException, fragment: WalkResult) => 'retry' | 'throw' + ): WalkResult; + private _walk( + path: string, + noFollow?: boolean, + onError?: (error: NodeJS.ErrnoException, fragment: WalkResult) => 'stop' | 'retry' | 'throw' + ): WalkResult | undefined; + private _walk( + path: string, + noFollow?: boolean, + onError?: (error: NodeJS.ErrnoException, fragment: WalkResult) => 'stop' | 'retry' | 'throw' + ): WalkResult | undefined { + let links = this._getRootLinks(); + let parent: DirectoryInode | undefined; + let components = pathUtil.getPathComponents(path); + let step = 0; + let depth = 0; + let retry = false; + while (true) { + if (depth >= 40) { + throw createIOError('ELOOP'); + } + const lastStep = step === components.length - 1; + const basename = components[step]; + const node = links.get(basename); + if (lastStep && (noFollow || !isSymlink(node))) { + return { realpath: pathUtil.combinePathComponents(components), basename, parent, links, node }; + } + if (node === undefined) { + if (trapError(createIOError('ENOENT'), node)) { + continue; + } + return undefined; + } + if (isSymlink(node)) { + const dirname = pathUtil.combinePathComponents(components.slice(0, step)); + const symlink = pathUtil.resolvePaths(dirname, node.symlink); + links = this._getRootLinks(); + parent = undefined; + components = pathUtil.getPathComponents(symlink).concat(components.slice(step + 1)); + step = 0; + depth++; + retry = false; + continue; + } + if (isDirectory(node)) { + links = this._getLinks(node); + parent = node; + step++; + retry = false; + continue; + } + if (trapError(createIOError('ENOTDIR'), node)) { + continue; + } + return undefined; + } + + function trapError(error: NodeJS.ErrnoException, node?: Inode) { + const realpath = pathUtil.combinePathComponents(components.slice(0, step + 1)); + const basename = components[step]; + const result = !retry && onError ? onError(error, { realpath, basename, parent, links, node }) : 'throw'; + if (result === 'stop') { + return false; + } + if (result === 'retry') { + retry = true; + return true; + } + throw error; + } + } + + /** + * Resolve a path relative to the current working directory. + */ + private _resolve(path: string) { + return this._cwd + ? pathUtil.resolvePaths( + this._cwd, + validate(path, ValidationFlags.RelativeOrAbsolute | ValidationFlags.AllowWildcard) + ) + : validate(path, ValidationFlags.Absolute | ValidationFlags.AllowWildcard); + } + + private _applyFiles(files: FileSet, dirname: string) { + const deferred: [Symlink | Link | Mount, string][] = []; + this._applyFilesWorker(files, dirname, deferred); + for (const [entry, path] of deferred) { + this.mkdirpSync(pathUtil.getDirectoryPath(path)); + this.pushd(pathUtil.getDirectoryPath(path)); + if (entry instanceof Symlink) { + if (this.stringComparer(pathUtil.getDirectoryPath(path), path) === 0) { + throw new TypeError('Roots cannot be symbolic links.'); + } + this.symlinkSync(pathUtil.resolvePaths(dirname, entry.symlink), path); + this._applyFileExtendedOptions(path, entry); + } else if (entry instanceof Link) { + if (this.stringComparer(pathUtil.getDirectoryPath(path), path) === 0) { + throw new TypeError('Roots cannot be hard links.'); + } + this.linkSync(entry.path, path); + } else { + this.mountSync(entry.source, path, entry.resolver); + this._applyFileExtendedOptions(path, entry); + } + this.popd(); + } + } + + private _applyFileExtendedOptions(path: string, entry: Directory | File | Symlink | Mount) { + const { meta } = entry; + if (meta !== undefined) { + const filemeta = this.filemeta(path); + for (const key of Object.keys(meta)) { + filemeta.set(key, meta[key]); + } + } + } + + private _applyFilesWorker(files: FileSet, dirname: string, deferred: [Symlink | Link | Mount, string][]) { + for (const key of Object.keys(files)) { + const value = normalizeFileSetEntry(files[key]); + const path = dirname ? pathUtil.resolvePaths(dirname, key) : key; + validate(path, ValidationFlags.Absolute); + + if (value === null || value === undefined || value instanceof Rmdir || value instanceof Unlink) { + if (this.stringComparer(pathUtil.getDirectoryPath(path), path) === 0) { + throw new TypeError('Roots cannot be deleted.'); + } + this.rimrafSync(path); + } else if (value instanceof File) { + if (this.stringComparer(pathUtil.getDirectoryPath(path), path) === 0) { + throw new TypeError('Roots cannot be files.'); + } + this.mkdirpSync(pathUtil.getDirectoryPath(path)); + this.writeFileSync(Uri.file(path, this), value.data, value.encoding); + this._applyFileExtendedOptions(path, value); + } else if (value instanceof Directory) { + this.mkdirpSync(path); + this._applyFileExtendedOptions(path, value); + this._applyFilesWorker(value.files, path, deferred); + } else { + deferred.push([value, path]); + } + } + } +} + +export interface FileSystemOptions { + // Sets the initial timestamp for new files and directories, or the function used + // to calculate timestamps. + time?: number | Date | (() => number | Date) | undefined; + + // A set of file system entries to initially add to the file system. + files?: FileSet | undefined; + + // Sets the initial working directory for the file system. + cwd?: string | undefined; + + // Sets initial metadata attached to the file system. + meta?: Record<string, any> | undefined; +} + +export type Axis = 'ancestors' | 'ancestors-or-self' | 'self' | 'descendants-or-self' | 'descendants'; + +export interface Traversal { + /** A function called to choose whether to continue to traverse to either ancestors or descendants. */ + traverse?(path: string, stats: Stats): boolean; + /** A function called to choose whether to accept a path as part of the result. */ + accept?(path: string, stats: Stats): boolean; +} + +export interface FileSystemResolver { + statSync(path: string): { mode: number; size: number }; + readdirSync(path: string): string[]; + readFileSync(path: string): Buffer; +} + +/** + * A template used to populate files, directories, links, etc. in a virtual file system. + */ +export interface FileSet { + [name: string]: DirectoryLike | FileLike | Link | Symlink | Mount | Rmdir | Unlink | null | undefined; +} + +export type DirectoryLike = FileSet | Directory; +export type FileLike = File | Buffer | string; + +/** Extended options for a directory in a `FileSet` */ +export class Directory { + readonly files: FileSet; + readonly meta: Record<string, any> | undefined; + constructor(files: FileSet, { meta }: { meta?: Record<string, any> } = {}) { + this.files = files; + this.meta = meta; + } +} + +/** Extended options for a file in a `FileSet` */ +export class File { + readonly data: Buffer | string; + readonly encoding: BufferEncoding | undefined; + readonly meta: Record<string, any> | undefined; + constructor( + data: Buffer | string, + { meta, encoding }: { encoding?: BufferEncoding; meta?: Record<string, any> } = {} + ) { + this.data = data; + this.encoding = encoding; + this.meta = meta; + } +} + +export class SameFileContentFile extends File { + constructor(data: Buffer | string, metaAndEncoding?: { encoding?: BufferEncoding; meta?: Record<string, any> }) { + super(data, metaAndEncoding); + } +} + +/** Extended options for a hard link in a `FileSet` */ +export class Link { + readonly path: string; + constructor(path: string) { + this.path = path; + } +} + +/** Removes a directory in a `FileSet` */ +export class Rmdir { + ' rmdirBrand'?: never; // brand necessary for proper type guards +} + +/** Unlinks a file in a `FileSet` */ +export class Unlink { + ' unlinkBrand'?: never; // brand necessary for proper type guards +} + +/** Extended options for a symbolic link in a `FileSet` */ +export class Symlink { + readonly symlink: string; + readonly meta: Record<string, any> | undefined; + constructor(symlink: string, { meta }: { meta?: Record<string, any> } = {}) { + this.symlink = symlink; + this.meta = meta; + } +} + +// file type +// these should be only used inside of test code. it is export just because mock file system is separated into +// 2 files. this and factory.ts file. actual value doesn't matter +export const S_IFMT = 0o170000; // file type +export const S_IFSOCK = 0o140000; // socket +export const S_IFLNK = 0o120000; // symbolic link +export const S_IFREG = 0o100000; // regular file +export const S_IFBLK = 0o060000; // block device +export const S_IFDIR = 0o040000; // directory +export const S_IFCHR = 0o020000; // character device +export const S_IFIFO = 0o010000; // FIFO + +/** Extended options for mounting a virtual copy of an external file system via a `FileSet` */ +export class Mount { + readonly source: string; + readonly resolver: FileSystemResolver; + readonly meta: Record<string, any> | undefined; + constructor(source: string, resolver: FileSystemResolver, { meta }: { meta?: Record<string, any> } = {}) { + this.source = source; + this.resolver = resolver; + this.meta = meta; + } +} + +// a generic POSIX inode +type Inode = FileInode | DirectoryInode | SymlinkInode; + +interface FileInode { + dev: number; // device id + ino: number; // inode id + mode: number; // file mode + atimeMs: number; // access time + mtimeMs: number; // modified time + ctimeMs: number; // status change time + birthtimeMs: number; // creation time + nlink: number; // number of hard links + size?: number | undefined; + buffer?: Buffer | undefined; + source?: string | undefined; + resolver?: FileSystemResolver | undefined; + shadowRoot?: FileInode | undefined; + meta?: Metadata | undefined; +} + +interface DirectoryInode { + dev: number; // device id + ino: number; // inode id + mode: number; // file mode + atimeMs: number; // access time + mtimeMs: number; // modified time + ctimeMs: number; // status change time + birthtimeMs: number; // creation time + nlink: number; // number of hard links + links?: SortedMap<string, Inode> | undefined; + source?: string | undefined; + resolver?: FileSystemResolver | undefined; + shadowRoot?: DirectoryInode | undefined; + meta?: Metadata | undefined; +} + +interface SymlinkInode { + dev: number; // device id + ino: number; // inode id + mode: number; // file mode + atimeMs: number; // access time + mtimeMs: number; // modified time + ctimeMs: number; // status change time + birthtimeMs: number; // creation time + nlink: number; // number of hard links + symlink: string; + shadowRoot?: SymlinkInode | undefined; + meta?: Metadata | undefined; +} + +function isEmptyNonShadowedDirectory(node: DirectoryInode) { + return !node.links && !node.shadowRoot && !node.resolver && !node.source; +} + +function isEmptyNonShadowedFile(node: FileInode) { + return !node.buffer && !node.shadowRoot && !node.resolver && !node.source; +} + +function isFile(node: Inode | undefined): node is FileInode { + return node !== undefined && (node.mode & S_IFMT) === S_IFREG; +} + +function isDirectory(node: Inode | undefined): node is DirectoryInode { + return node !== undefined && (node.mode & S_IFMT) === S_IFDIR; +} + +function isSymlink(node: Inode | undefined): node is SymlinkInode { + return node !== undefined && (node.mode & S_IFMT) === S_IFLNK; +} + +interface WalkResult { + realpath: string; + basename: string; + parent: DirectoryInode | undefined; + links: SortedMap<string, Inode>; + node: Inode | undefined; +} + +function normalizeFileSetEntry(value: FileSet[string]) { + if ( + value === undefined || + value === null || + value instanceof Directory || + value instanceof File || + value instanceof Link || + value instanceof Symlink || + value instanceof Mount || + value instanceof Rmdir || + value instanceof Unlink + ) { + return value; + } + return typeof value === 'string' || Buffer.isBuffer(value) ? new File(value) : new Directory(value); +} + +export function formatPatch(patch: FileSet): string; +export function formatPatch(patch: FileSet | undefined): string | null; +export function formatPatch(patch: FileSet | undefined) { + return patch ? formatPatchWorker('', patch) : null; +} + +function formatPatchWorker(dirname: string, container: FileSet): string { + let text = ''; + for (const name of Object.keys(container)) { + const entry = normalizeFileSetEntry(container[name]); + const file = dirname ? pathUtil.combinePaths(dirname, name) : name; + if (entry === null || entry === undefined || entry instanceof Unlink) { + text += `//// [${file}] unlink\r\n`; + } else if (entry instanceof Rmdir) { + text += `//// [${pathUtil.ensureTrailingDirectorySeparator(file)}] rmdir\r\n`; + } else if (entry instanceof Directory) { + text += formatPatchWorker(file, entry.files); + } else if (entry instanceof SameFileContentFile) { + text += `//// [${file}] file written with same contents\r\n`; + } else if (entry instanceof File) { + const content = typeof entry.data === 'string' ? entry.data : entry.data.toString('utf8'); + text += `//// [${file}]\r\n${content}\r\n\r\n`; + } else if (entry instanceof Link) { + text += `//// [${file}] link(${entry.path})\r\n`; + } else if (entry instanceof Symlink) { + text += `//// [${file}] symlink(${entry.symlink})\r\n`; + } else if (entry instanceof Mount) { + text += `//// [${file}] mount(${entry.source})\r\n`; + } + } + return text; +} + +function makeDirEnt(name: string, node: Inode, parentDir: string): Dirent { + const de: Dirent = { + isFile: () => isFile(node), + isDirectory: () => isDirectory(node), + isBlockDevice: () => false, + isCharacterDevice: () => false, + isFIFO: () => false, + isSocket: () => false, + isSymbolicLink: () => isSymlink(node), + name, + parentPath: parentDir, + get path() { + return this.parentPath; + }, + }; + return de; +} + +class Stats { + dev: number; + ino: number; + mode: number; + nlink: number; + uid: number; + gid: number; + rdev: number; + size: number; + blksize: number; + blocks: number; + atimeMs: number; + mtimeMs: number; + ctimeMs: number; + birthtimeMs: number; + atime: Date; + mtime: Date; + ctime: Date; + birthtime: Date; + + constructor(); + constructor( + dev: number, + ino: number, + mode: number, + nlink: number, + rdev: number, + size: number, + blksize: number, + blocks: number, + atimeMs: number, + mtimeMs: number, + ctimeMs: number, + birthtimeMs: number + ); + constructor( + dev = 0, + ino = 0, + mode = 0, + nlink = 0, + rdev = 0, + size = 0, + blksize = 0, + blocks = 0, + atimeMs = 0, + mtimeMs = 0, + ctimeMs = 0, + birthtimeMs = 0 + ) { + this.dev = dev; + this.ino = ino; + this.mode = mode; + this.nlink = nlink; + this.uid = 0; + this.gid = 0; + this.rdev = rdev; + this.size = size; + this.blksize = blksize; + this.blocks = blocks; + this.atimeMs = atimeMs; + this.mtimeMs = mtimeMs; + this.ctimeMs = ctimeMs; + this.birthtimeMs = birthtimeMs; + this.atime = new Date(this.atimeMs); + this.mtime = new Date(this.mtimeMs); + this.ctime = new Date(this.ctimeMs); + this.birthtime = new Date(this.birthtimeMs); + } + + isFile() { + return (this.mode & S_IFMT) === S_IFREG; + } + isDirectory() { + return (this.mode & S_IFMT) === S_IFDIR; + } + isSymbolicLink() { + return (this.mode & S_IFMT) === S_IFLNK; + } + isBlockDevice() { + return (this.mode & S_IFMT) === S_IFBLK; + } + isCharacterDevice() { + return (this.mode & S_IFMT) === S_IFCHR; + } + isFIFO() { + return (this.mode & S_IFMT) === S_IFIFO; + } + isSocket() { + return (this.mode & S_IFMT) === S_IFSOCK; + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/harness/vfs/pathValidation.ts b/python-parser/packages/pyright-internal/src/tests/harness/vfs/pathValidation.ts new file mode 100644 index 00000000..fa0b0f02 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/harness/vfs/pathValidation.ts @@ -0,0 +1,155 @@ +/* + * pathUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import { sep } from 'path'; + +import * as pu from '../../../common/pathUtils'; +import { createIOError } from '../utils'; +import { Uri } from '../../../common/uri/uri'; + +const invalidRootComponentRegExp = getInvalidRootComponentRegExp(); +const invalidNavigableComponentRegExp = /[:*?"<>|]/; +const invalidNavigableComponentWithWildcardsRegExp = /[:"<>|]/; +const invalidNonNavigableComponentRegExp = /^\.{1,2}$|[:*?"<>|]/; +const invalidNonNavigableComponentWithWildcardsRegExp = /^\.{1,2}$|[:"<>|]/; +const extRegExp = /\.\w+$/; + +export const enum ValidationFlags { + None = 0, + + RequireRoot = 1 << 0, + RequireDirname = 1 << 1, + RequireBasename = 1 << 2, + RequireExtname = 1 << 3, + RequireTrailingSeparator = 1 << 4, + + AllowRoot = 1 << 5, + AllowDirname = 1 << 6, + AllowBasename = 1 << 7, + AllowExtname = 1 << 8, + AllowTrailingSeparator = 1 << 9, + AllowNavigation = 1 << 10, + AllowWildcard = 1 << 11, + + /** Path must be a valid directory root */ + Root = RequireRoot | AllowRoot | AllowTrailingSeparator, + + /** Path must be a absolute */ + Absolute = RequireRoot | + AllowRoot | + AllowDirname | + AllowBasename | + AllowExtname | + AllowTrailingSeparator | + AllowNavigation, + + /** Path may be relative or absolute */ + RelativeOrAbsolute = AllowRoot | + AllowDirname | + AllowBasename | + AllowExtname | + AllowTrailingSeparator | + AllowNavigation, + + /** Path may only be a filename */ + Basename = RequireBasename | AllowExtname, +} + +function validateComponents(components: string[], flags: ValidationFlags, hasTrailingSeparator: boolean) { + const hasRoot = !!components[0]; + const hasDirname = components.length > 2; + const hasBasename = components.length > 1; + const hasExtname = hasBasename && extRegExp.test(components[components.length - 1]); + const invalidComponentRegExp = + flags & ValidationFlags.AllowNavigation + ? flags & ValidationFlags.AllowWildcard + ? invalidNavigableComponentWithWildcardsRegExp + : invalidNavigableComponentRegExp + : flags & ValidationFlags.AllowWildcard + ? invalidNonNavigableComponentWithWildcardsRegExp + : invalidNonNavigableComponentRegExp; + + // Validate required components + if (flags & ValidationFlags.RequireRoot && !hasRoot) { + return false; + } + if (flags & ValidationFlags.RequireDirname && !hasDirname) { + return false; + } + if (flags & ValidationFlags.RequireBasename && !hasBasename) { + return false; + } + if (flags & ValidationFlags.RequireExtname && !hasExtname) { + return false; + } + if (flags & ValidationFlags.RequireTrailingSeparator && !hasTrailingSeparator) { + return false; + } + + // Required components indicate allowed components + if (flags & ValidationFlags.RequireRoot) { + flags |= ValidationFlags.AllowRoot; + } + if (flags & ValidationFlags.RequireDirname) { + flags |= ValidationFlags.AllowDirname; + } + if (flags & ValidationFlags.RequireBasename) { + flags |= ValidationFlags.AllowBasename; + } + if (flags & ValidationFlags.RequireExtname) { + flags |= ValidationFlags.AllowExtname; + } + if (flags & ValidationFlags.RequireTrailingSeparator) { + flags |= ValidationFlags.AllowTrailingSeparator; + } + + // Validate disallowed components + if (~flags & ValidationFlags.AllowRoot && hasRoot) { + return false; + } + if (~flags & ValidationFlags.AllowDirname && hasDirname) { + return false; + } + if (~flags & ValidationFlags.AllowBasename && hasBasename) { + return false; + } + if (~flags & ValidationFlags.AllowExtname && hasExtname) { + return false; + } + if (~flags & ValidationFlags.AllowTrailingSeparator && hasTrailingSeparator) { + return false; + } + + // Validate component strings + if (invalidRootComponentRegExp.test(components[0])) { + return false; + } + for (let i = 1; i < components.length; i++) { + if (invalidComponentRegExp.test(components[i]) && components[i] !== Uri.DefaultWorkspaceRootComponent) { + return false; + } + } + + return true; +} + +export function validate(path: string, flags: ValidationFlags = ValidationFlags.RelativeOrAbsolute) { + const components = pu.getPathComponents(path); + const trailing = pu.hasTrailingDirectorySeparator(path); + if (!validateComponents(components, flags, trailing)) { + throw createIOError('ENOENT'); + } + return components.length > 1 && trailing + ? pu.combinePathComponents(pu.reducePathComponents(components)) + sep + : pu.combinePathComponents(pu.reducePathComponents(components)); +} + +function getInvalidRootComponentRegExp(): RegExp { + const escapedSeparator = pu.getRegexEscapedSeparator(); + return new RegExp( + `^(?!(${escapedSeparator}|${escapedSeparator}${escapedSeparator}w+${escapedSeparator}|[a-zA-Z]:${escapedSeparator}?|)$)` + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/hoverProvider.test.ts b/python-parser/packages/pyright-internal/src/tests/hoverProvider.test.ts new file mode 100644 index 00000000..0629b813 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/hoverProvider.test.ts @@ -0,0 +1,440 @@ +/* + * hoverProvider.test.ts + * + * hoverProvider tests. + */ + +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('import tooltip - import statement', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import [|/*marker1*/matplotlib|].[|/*marker2*/pyplot|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(module) matplotlib\n```\n---\nmatplotlib', + marker2: '```python\n(module) pyplot\n```\n---\npyplot', + }); +}); + +test('import tooltip - import reference', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import matplotlib.pyplot +//// [|/*marker1*/matplotlib|].[|/*marker2*/pyplot|] + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(module) matplotlib\n```\n---\nmatplotlib', + marker2: '```python\n(module) pyplot\n```\n---\npyplot', + }); +}); + +test('import tooltip - import statement with stubs', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import [|/*marker1*/matplotlib|].[|/*marker2*/pyplot|] + +// @filename: matplotlib/__init__.pyi +// @library: true +//// # empty + +// @filename: matplotlib/pyplot.pyi +// @library: true +//// # empty + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(module) matplotlib\n```\n---\nmatplotlib', + marker2: '```python\n(module) pyplot\n```\n---\npyplot', + }); +}); + +test('import tooltip - import reference - stub files', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import matplotlib.pyplot +//// [|/*marker1*/matplotlib|].[|/*marker2*/pyplot|] + +// @filename: matplotlib/__init__.pyi +// @library: true +//// # empty + +// @filename: matplotlib/pyplot.pyi +// @library: true +//// # empty + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(module) matplotlib\n```\n---\nmatplotlib', + marker2: '```python\n(module) pyplot\n```\n---\npyplot', + }); +}); + +test('import tooltip - import submodules statement', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import A.B.[|/*marker*/C|] + +// @filename: A/__init__.py +// @library: true +//// # empty + +// @filename: A/B/__init__.py +// @library: true +//// # empty + +// @filename: A/B/C/__init__.py +// @library: true +//// """ C """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + state.verifyHover('markdown', { marker: '```python\n(module) C\n```\n---\nC' }); +}); + +test('import tooltip - import submodules reference', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// import A.B.C +//// A.B.[|/*marker*/C|] + +// @filename: A/__init__.py +// @library: true +//// # empty + +// @filename: A/B/__init__.py +// @library: true +//// # empty + +// @filename: A/B/C/__init__.py +// @library: true +//// """ C """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + state.verifyHover('markdown', { marker: '```python\n(module) C\n```\n---\nC' }); +}); + +test('import tooltip - from import statement with stubs', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// from [|/*marker1*/matplotlib|].[|/*marker2*/pyplot|] import * + +// @filename: matplotlib/__init__.pyi +// @library: true +//// # empty + +// @filename: matplotlib/pyplot.pyi +// @library: true +//// # empty + +// @filename: matplotlib/__init__.py +// @library: true +//// """ matplotlib """ + +// @filename: matplotlib/pyplot.py +// @library: true +//// """ pyplot """ + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(module) matplotlib\n```\n---\nmatplotlib', + marker2: '```python\n(module) pyplot\n```\n---\npyplot', + }); +}); + +test('import tooltip - from import submodules statement', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// from A.B.[|/*marker*/C|] import * + +// @filename: A/__init__.py +// @library: true +//// # empty + +// @filename: A/B/__init__.py +// @library: true +//// # empty + +// @filename: A/B/C/__init__.py +// @library: true +//// """ C """ + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + state.verifyHover('markdown', { marker: '```python\n(module) C\n```\n---\nC' }); +}); + +test('import tooltip - check duplicate property', async () => { + const code = ` + +// @filename: test.py +//// class Test: +//// def __init__(self) -> None: +//// self.__test = False +//// +//// @property +//// def [|/*marker*/test|](self): +//// """Test DocString. +//// +//// Returns +//// ------- +//// bool +//// Lorem Ipsum +//// """ +//// return self.__test + + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + state.openFile(marker.fileName); + + state.verifyHover('markdown', { + marker: '```python\n(property) test: (self: Self@Test) -> bool\n```\n---\nTest DocString.\n\nReturns\n-------\nbool \n    Lorem Ipsum', + }); +}); + +test('import symbol tooltip - useLibraryCodeForTypes false', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": false +//// } + +// @filename: test.py +//// from foo import [|/*marker1*/bar|] +//// from bar.baz1 import [|/*marker2*/baz2|] + +// @filename: foo/__init__.py +// @library: true +//// from .bar import bar + +// @filename: foo/bar.py +// @library: true +//// class bar: ... + +// @filename: bar/baz1/baz2/__init__.py +// @library: true +//// class baz: ... + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(import) bar: Unknown\n```', + marker2: '```python\n(module) baz2\n```', + }); +}); + +test('import symbol tooltip - useLibraryCodeForTypes true', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +//// from foo import [|/*marker1*/bar|] + +// @filename: foo/__init__.py +// @library: true +//// from .bar import bar + +// @filename: foo/bar.py +// @library: true +//// class bar: ... + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(class) bar\n```', + }); +}); + +test('TypedDict doc string', async () => { + const code = ` +// @filename: test.py +//// from typing import [|/*marker*/TypedDict|] + +// @filename: typing.py +// @library: true +//// def TypedDict(typename, fields=None, /, *, total=True, **kwargs): +//// """A simple typed namespace. At runtime it is equivalent to a plain dict.""" + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker'); + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker: '```python\n(class) TypedDict\n```\n---\nA simple typed namespace. At runtime it is equivalent to a plain dict.', + }); +}); + +test('hover on class Foo and its __call__ method with overloads', async () => { + const code = ` +// @filename: test.py +//// from typing import overload +//// class Foo: +//// def __init__(self): +//// pass +//// +//// @overload +//// def __call__(self, a: int) -> int: pass +//// @overload +//// def __call__(self, a: str) -> str: pass +//// def __call__(self, a: int | str) -> int | str: +//// return a +//// +//// [|/*marker1*/foo|] = Foo() +//// [|/*marker2*/foo|](1) +//// [|/*marker3*/foo|]("hello") +//// [|/*marker4*/foo|]() + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(variable) foo: Foo\n```', + marker2: '```python\n(variable) def foo(a: int) -> int\n```', + marker3: '```python\n(variable) def foo(a: str) -> str\n```', + marker4: '```python\n(variable)\ndef __call__(a: int) -> int: ...\ndef __call__(a: str) -> str: ...\n```', + }); +}); + +test('hover on __call__ method', async () => { + const code = ` +// @filename: test.py +//// class Foo: +//// def __init__(self): +//// pass +//// +//// def __call__(self, a: int) -> int: +//// return a +//// +//// [|/*marker1*/foo|] = Foo() +//// [|/*marker2*/foo|](1) + `; + + const state = parseAndGetTestState(code).state; + const marker1 = state.getMarkerByName('marker1'); + + state.openFile(marker1.fileName); + + state.verifyHover('markdown', { + marker1: '```python\n(variable) foo: Foo\n```', + marker2: '```python\n(variable) def foo(a: int) -> int\n```', + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/importResolver.test.ts b/python-parser/packages/pyright-internal/src/tests/importResolver.test.ts new file mode 100644 index 00000000..2ab718fd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/importResolver.test.ts @@ -0,0 +1,1087 @@ +/* + * importResolver.test.ts + * + * importResolver tests. + */ + +import assert from 'assert'; + +import { Dirent, ReadStream, WriteStream } from 'fs'; +import { Disposable } from 'vscode-jsonrpc'; +import { ImportResolver } from '../analyzer/importResolver'; +import { ImportType } from '../analyzer/importResult'; +import { ConfigOptions } from '../common/configOptions'; +import { FileSystem, MkDirOptions, Stats } from '../common/fileSystem'; +import { FileWatcher, FileWatcherEventHandler } from '../common/fileWatcher'; +import { FullAccessHost } from '../common/fullAccessHost'; +import { Host } from '../common/host'; +import { lib, sitePackages, typeshedFallback } from '../common/pathConsts'; +import { combinePaths, getDirectoryPath, normalizeSlashes } from '../common/pathUtils'; +import { createFromRealFileSystem, RealTempFile } from '../common/realFileSystem'; +import { ServiceKeys } from '../common/serviceKeys'; +import { ServiceProvider } from '../common/serviceProvider'; +import { createServiceProvider } from '../common/serviceProviderExtensions'; +import { Uri } from '../common/uri/uri'; +import { UriEx } from '../common/uri/uriUtils'; +import { PartialStubService } from '../partialStubService'; +import { PyrightFileSystem } from '../pyrightFileSystem'; +import { TestAccessHost } from './harness/testAccessHost'; +import { TestFileSystem } from './harness/vfs/filesystem'; + +const libraryRoot = combinePaths(normalizeSlashes('/'), lib, sitePackages); + +function usingTrueVenv() { + return process.env.CI_IMPORT_TEST_VENVPATH !== undefined || process.env.CI_IMPORT_TEST_PYTHONPATH !== undefined; +} + +describe('Import tests with fake venv', () => { + const tempFile = new RealTempFile(); + + afterAll(() => tempFile.dispose()); + + if (!usingTrueVenv()) { + describe('Import tests that cannot run in a true venv', () => { + test('partial stub file exists', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + ]; + + const importResult = getImportResult(files, ['myLib', 'partialStub']); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedUris.filter( + (f) => !f.isEmpty() && f.getFilePath() === combinePaths(libraryRoot, 'myLib', 'partialStub.pyi') + ).length + ); + }); + + test('partial stub __init__ exists', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', '__init__.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.py'), + content: 'def test(): pass', + }, + ]; + + const importResult = getImportResult(files, ['myLib']); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedUris.filter( + (f) => f.getFilePath() === combinePaths(libraryRoot, 'myLib', '__init__.pyi') + ).length + ); + }); + + test('stub package', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'stub.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', '__init__.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + ]; + + // If fully typed stub package exists, that wins over the real package. + const importResult = getImportResult(files, ['myLib', 'partialStub']); + assert(!importResult.isImportFound); + }); + + test('partial stub package in typing folder', () => { + const typingFolder = combinePaths(normalizeSlashes('/'), 'typing'); + const files = [ + { + path: combinePaths(typingFolder, 'myLib-stubs', '__init__.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(typingFolder, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.py'), + content: 'def test(): pass', + }, + ]; + + const importResult = getImportResult(files, ['myLib'], (c) => (c.stubPath = UriEx.file(typingFolder))); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedUris.filter( + (f) => f.getFilePath() === combinePaths(libraryRoot, 'myLib', '__init__.pyi') + ).length + ); + }); + + test('typeshed folder', () => { + const typeshedFolder = combinePaths(normalizeSlashes('/'), 'ts'); + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', '__init__.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(typeshedFolder, 'stubs', 'myLibPackage', 'myLib.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.py'), + content: 'def test(): pass', + }, + ]; + + // Stub packages win over typeshed. + const importResult = getImportResult( + files, + ['myLib'], + (c) => (c.typeshedPath = UriEx.file(typeshedFolder)) + ); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedUris.filter( + (f) => f.getFilePath() === combinePaths(libraryRoot, 'myLib', '__init__.pyi') + ).length + ); + }); + + test('typeshed fallback folder', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', '__init__.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths('/', typeshedFallback, 'stubs', 'myLibPackage', 'myLib.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.py'), + content: 'def test(): pass', + }, + ]; + + // Stub packages win over typeshed. + const importResult = getImportResult(files, ['myLib']); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedUris.filter( + (f) => f.getFilePath() === combinePaths(libraryRoot, 'myLib', '__init__.pyi') + ).length + ); + }); + + test('py.typed file', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', '__init__.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.py'), + content: 'def test(): pass', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'py.typed'), + content: '# typed', + }, + ]; + + // Partial stub package always overrides original package. + const importResult = getImportResult(files, ['myLib']); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + }); + + test('py.typed library', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'os', '__init__.py'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'os', 'py.typed'), + content: '', + }, + { + path: combinePaths('/', typeshedFallback, 'stubs', 'os', 'os', '__init__.pyi'), + content: '# empty', + }, + ]; + + const importResult = getImportResult(files, ['os']); + assert(importResult.isImportFound); + assert.strictEqual( + files[0].path, + importResult.resolvedUris[importResult.resolvedUris.length - 1].getFilePath() + ); + }); + + test('import side by side file sub under lib folder', () => { + const files = [ + { + path: combinePaths('/lib/site-packages/myLib', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/lib/site-packages/myLib', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(!importResult.isImportFound); + }); + }); + + test('getModuleNameForImport library file', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib', 'myModule', 'file1.py'), + content: '# empty', + }, + ]; + + const moduleImportInfo = getModuleNameForImport(files); + + assert.strictEqual(moduleImportInfo.importType, ImportType.ThirdParty); + assert(!moduleImportInfo.isThirdPartyPyTypedPresent); + assert(!moduleImportInfo.isLocalTypingsFile); + }); + + test('getModuleNameForImport py.typed library file', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib', 'py.typed'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'myModule', 'file1.py'), + content: '# empty', + }, + ]; + + const moduleImportInfo = getModuleNameForImport(files); + + assert.strictEqual(moduleImportInfo.importType, ImportType.ThirdParty); + assert(moduleImportInfo.isThirdPartyPyTypedPresent); + assert(!moduleImportInfo.isLocalTypingsFile); + }); + + test('import found in symlinked file', () => { + const files = [ + { + path: combinePaths('/', 'external', 'file2.py'), + content: 'def f(): pass', + }, + { + path: combinePaths('/', 'src', 'file1.py'), + content: 'import file2', + }, + ]; + + const result = getImportResult( + files, + ['file2'], + (c) => { + c.defaultExtraPaths = [UriEx.file(combinePaths('/', 'external_symlinked'))]; + }, + (importResolver, uri, configOptions) => { + const fs = importResolver.serviceProvider.fs() as PyrightFileSystem; + const testFs = (fs as any).realFS as TestFileSystem; + testFs.mkdirSync(UriEx.file(combinePaths('/', 'external_symlinked'))); + testFs.symlinkSync( + combinePaths('/', 'external', 'file2.py'), + combinePaths('/', 'external_symlinked', 'file2.py') + ); + } + ); + + assert(result.isImportFound); + }); + } + + describe('Import tests that can run with or without a true venv', () => { + test('side by side files', () => { + const myFile = combinePaths('src', 'file.py'); + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub2.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub2.py'), + content: 'def test(): pass', + }, + { + path: myFile, + content: '# not used', + }, + ]; + + const sp = createServiceProviderFromFiles(files); + const configOptions = new ConfigOptions(UriEx.file('/')); + const importResolver = new ImportResolver( + sp, + configOptions, + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]) + ); + + // Stub package wins over original package (per PEP 561 rules). + const myUri = UriEx.file(myFile); + const sideBySideResult = importResolver.resolveImport(myUri, configOptions.findExecEnvironment(myUri), { + leadingDots: 0, + nameParts: ['myLib', 'partialStub'], + importedSymbols: new Set<string>(), + }); + + assert(sideBySideResult.isImportFound); + assert(sideBySideResult.isStubFile); + + const sideBySideStubFile = UriEx.file(combinePaths(libraryRoot, 'myLib', 'partialStub.pyi')); + assert.strictEqual(1, sideBySideResult.resolvedUris.filter((f) => f.key === sideBySideStubFile.key).length); + assert.strictEqual('def test(): ...', sp.fs().readFileSync(sideBySideStubFile, 'utf8')); + + // Side by side stub doesn't completely disable partial stub. + const partialStubResult = importResolver.resolveImport(myUri, configOptions.findExecEnvironment(myUri), { + leadingDots: 0, + nameParts: ['myLib', 'partialStub2'], + importedSymbols: new Set<string>(), + }); + + assert(partialStubResult.isImportFound); + assert(partialStubResult.isStubFile); + + const partialStubFile = UriEx.file(combinePaths(libraryRoot, 'myLib', 'partialStub2.pyi')); + assert.strictEqual(1, partialStubResult.resolvedUris.filter((f) => f.key === partialStubFile.key).length); + }); + + test('stub namespace package', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'stub.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + ]; + + // If fully typed stub package exists, that wins over the real package. + const importResult = getImportResult(files, ['myLib', 'partialStub']); + assert(importResult.isImportFound); + assert(!importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedUris.filter( + (f) => !f.isEmpty() && f.getFilePath() === combinePaths(libraryRoot, 'myLib', 'partialStub.py') + ).length + ); + }); + + test('py.typed namespace package plus stubs', () => { + const typingFolder = combinePaths(normalizeSlashes('/'), 'typing'); + const files = [ + { + path: combinePaths(typingFolder, 'myLib/core', 'foo.pyi'), + content: 'def test(): pass', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'py.typed'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.py'), + content: 'def test(): pass', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.pyi'), + content: 'def test(): pass', + }, + ]; + + const importResult = getImportResult(files, ['myLib'], (c) => (c.stubPath = UriEx.file(typingFolder))); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedUris.filter( + (f) => !f.isEmpty() && f.getFilePath() === combinePaths(libraryRoot, 'myLib', '__init__.pyi') + ).length + ); + }); + + test('stub in typing folder over partial stub package', () => { + const typingFolder = combinePaths(normalizeSlashes('/'), 'typing'); + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', '__init__.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(typingFolder, 'myLib.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib', '__init__.py'), + content: 'def test(): pass', + }, + ]; + + // If the package exists in typing folder, that gets picked up first. + const importResult = getImportResult(files, ['myLib'], (c) => (c.stubPath = UriEx.file(typingFolder))); + assert(importResult.isImportFound); + assert(importResult.isStubFile); + assert.strictEqual( + 0, + importResult.resolvedUris.filter( + (f) => f.getFilePath() === combinePaths(libraryRoot, 'myLib', '__init__.pyi') + ).length + ); + }); + + test('non py.typed library', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'os', '__init__.py'), + content: 'def test(): ...', + }, + { + path: combinePaths('/', typeshedFallback, 'stubs', 'os', 'os', '__init__.pyi'), + content: '# empty', + }, + ]; + + const importResult = getImportResult(files, ['os']); + assert(importResult.isImportFound); + assert.strictEqual( + files[1].path, + importResult.resolvedUris[importResult.resolvedUris.length - 1].getFilePath() + ); + }); + + test('no empty import roots', () => { + const sp = createServiceProviderFromFiles([]); + const configOptions = new ConfigOptions(Uri.empty()); // Empty, like open-file mode. + const importResolver = new ImportResolver( + sp, + configOptions, + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]) + ); + importResolver.getImportRoots(configOptions.getDefaultExecEnvironment()).forEach((path) => assert(path)); + }); + + test('multiple typeshedFallback', () => { + const files = [ + { + path: combinePaths('/', typeshedFallback, 'stubs', 'aLib', 'aLib', '__init__.pyi'), + content: '# empty', + }, + { + path: combinePaths('/', typeshedFallback, 'stubs', 'bLib', 'bLib', '__init__.pyi'), + content: '# empty', + }, + ]; + + const sp = createServiceProviderFromFiles(files); + const configOptions = new ConfigOptions(Uri.empty()); // Empty, like open-file mode. + const importResolver = new ImportResolver( + sp, + configOptions, + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]) + ); + const importRoots = importResolver.getImportRoots(configOptions.getDefaultExecEnvironment()); + + assert.strictEqual( + 1, + importRoots.filter( + (f) => !f.isEmpty() && f.getFilePath() === combinePaths('/', typeshedFallback, 'stubs', 'aLib') + ).length + ); + assert.strictEqual( + 1, + importRoots.filter( + (f) => !f.isEmpty() && f.getFilePath() === combinePaths('/', typeshedFallback, 'stubs', 'bLib') + ).length + ); + }); + + test('import side by side file root', () => { + const files = [ + { + path: combinePaths('/', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual( + 1, + importResult.resolvedUris.filter((f) => f.getFilePath() === combinePaths('/', 'file1.py')).length + ); + }); + + test('import side by side file sub folder', () => { + const files = [ + { + path: combinePaths('/test', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/test', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual( + 1, + importResult.resolvedUris.filter((f) => f.getFilePath() === combinePaths('/test', 'file1.py')).length + ); + }); + + test('import side by side file sub under src folder', () => { + const files = [ + { + path: combinePaths('/src/nested', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/src/nested', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual( + 1, + importResult.resolvedUris.filter((f) => f.getFilePath() === combinePaths('/src/nested', 'file1.py')) + .length + ); + }); + + test('import file sub under containing folder', () => { + const files = [ + { + path: combinePaths('/src/nested', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/src/nested/nested2', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual( + 1, + importResult.resolvedUris.filter((f) => f.getFilePath() === combinePaths('/src/nested', 'file1.py')) + .length + ); + }); + + test("don't walk up the root", () => { + const files = [ + { + path: combinePaths('/', 'file1.py'), + content: 'def test1(): ...', + }, + ]; + + const importResult = getImportResult(files, ['notExist'], (c) => (c.projectRoot = Uri.empty())); + assert(!importResult.isImportFound); + }); + + test('nested namespace package 1', () => { + const files = [ + { + path: combinePaths('/', 'packages1', 'a', 'b', 'c', 'd.py'), + content: 'def f(): pass', + }, + { + path: combinePaths('/', 'packages1', 'a', '__init__.py'), + content: '', + }, + { + path: combinePaths('/', 'packages2', 'a', '__init__.py'), + content: '', + }, + ]; + + const importResult = getImportResult(files, ['a', 'b', 'c', 'd'], (config) => { + config.defaultExtraPaths = [ + UriEx.file(combinePaths('/', 'packages1')), + UriEx.file(combinePaths('/', 'packages2')), + ]; + }); + assert(importResult.isImportFound); + }); + + test('nested namespace package 2', () => { + const files = [ + { + path: combinePaths('/', 'packages1', 'a', 'b', 'c', 'd.py'), + content: 'def f(): pass', + }, + { + path: combinePaths('/', 'packages1', 'a', 'b', 'c', '__init__.py'), + content: '', + }, + { + path: combinePaths('/', 'packages2', 'a', 'b', 'c', '__init__.py'), + content: '', + }, + ]; + + const importResult = getImportResult(files, ['a', 'b', 'c', 'd'], (config) => { + config.defaultExtraPaths = [ + UriEx.file(combinePaths('/', 'packages1')), + UriEx.file(combinePaths('/', 'packages2')), + ]; + }); + assert(importResult.isImportFound); + }); + + test('nested namespace package 3', () => { + const files = [ + { + path: combinePaths('/', 'packages1', 'a', 'b', 'c', 'd.py'), + content: 'def f(): pass', + }, + { + path: combinePaths('/', 'packages2', 'a', '__init__.py'), + content: '', + }, + ]; + + const importResult = getImportResult(files, ['a', 'b', 'c', 'd'], (config) => { + config.defaultExtraPaths = [ + UriEx.file(combinePaths('/', 'packages1')), + UriEx.file(combinePaths('/', 'packages2')), + ]; + }); + assert(!importResult.isImportFound); + }); + + test('nested namespace package 4', () => { + const files = [ + { + path: combinePaths('/', 'packages1', 'a', 'b', '__init__.py'), + content: '', + }, + { + path: combinePaths('/', 'packages1', 'a', 'b', 'c.py'), + content: 'def f(): pass', + }, + { + path: combinePaths('/', 'packages2', 'a', '__init__.py'), + content: '', + }, + { + path: combinePaths('/', 'packages2', 'a', 'b', '__init__.py'), + content: '', + }, + ]; + + const importResult = getImportResult(files, ['a', 'b', 'c'], (config) => { + config.defaultExtraPaths = [ + UriEx.file(combinePaths('/', 'packages1')), + UriEx.file(combinePaths('/', 'packages2')), + ]; + }); + assert(!importResult.isImportFound); + }); + + test('default workspace importing side by side file', () => { + const files = [ + { + path: combinePaths('/', 'src', 'a', 'b', 'file1.py'), + content: 'import file2', + }, + { + path: combinePaths('/', 'src', 'a', 'b', 'file2.py'), + content: 'def f(): pass', + }, + ]; + + const importResult = getImportResult(files, ['file2'], (config) => { + config.projectRoot = Uri.defaultWorkspace({ isCaseSensitive: () => true }); + }); + assert(importResult.isImportFound); + }); + + test('getModuleNameForImport user file', () => { + const files = [ + { + path: combinePaths('/', 'src', 'file1.py'), + content: '# empty', + }, + ]; + + const moduleImportInfo = getModuleNameForImport(files); + + assert.strictEqual(moduleImportInfo.importType, ImportType.Local); + assert(!moduleImportInfo.isThirdPartyPyTypedPresent); + assert(!moduleImportInfo.isLocalTypingsFile); + }); + }); + + if (usingTrueVenv()) { + describe('Import tests that have to run with a venv', () => { + test('venv can find imports', () => { + const tempFile = new RealTempFile(); + const files = [ + { + path: combinePaths('/', 'file1.py'), + content: 'import pytest', + }, + ]; + + const importResult = getImportResult(files, ['pytest']); + assert(importResult.isImportFound, `Import not found: ${importResult.importFailureInfo?.join('\n')}`); + + tempFile.dispose(); + }); + }); + } + + function getImportResult( + files: { path: string; content: string }[], + nameParts: string[], + setup?: (c: ConfigOptions) => void, + preImport?: (importResolver: ImportResolver, uri: Uri, configOptions: ConfigOptions) => void + ) { + const { importResolver, uri, configOptions } = setupImportResolver(files, setup); + + if (preImport) { + preImport(importResolver, uri, configOptions); + } + + const importResult = importResolver.resolveImport(uri, configOptions.findExecEnvironment(uri), { + leadingDots: 0, + nameParts: nameParts, + importedSymbols: new Set<string>(), + }); + + // Add the config venvpath to the import result so we can output it on failure. + if (!importResult.isImportFound) { + importResult.importFailureInfo = importResult.importFailureInfo ?? []; + importResult.importFailureInfo.push(`venvPath: ${configOptions.venvPath}`); + } + + return importResult; + } + + function getModuleNameForImport(files: { path: string; content: string }[], setup?: (c: ConfigOptions) => void) { + const { importResolver, uri, configOptions } = setupImportResolver(files, setup); + + const moduleImportInfo = importResolver.getModuleNameForImport( + uri, + configOptions.findExecEnvironment(uri), + undefined, + /* detectPyTyped */ true + ); + + return moduleImportInfo; + } + + function setupImportResolver(files: { path: string; content: string }[], setup?: (c: ConfigOptions) => void) { + const defaultHostFactory = (sp: ServiceProvider) => + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]); + const defaultSetup = + setup ?? + ((c) => { + /* empty */ + }); + const defaultSpFactory = (files: { path: string; content: string }[]) => createServiceProviderFromFiles(files); + + // Use environment variables to determine how to create a host and how to modify the config options. + // These are set in the CI to test imports with different options. + let hostFactory: (sp: ServiceProvider) => Host = defaultHostFactory; + let configModifier = defaultSetup; + let spFactory = defaultSpFactory; + + if (process.env.CI_IMPORT_TEST_VENVPATH) { + configModifier = (c: ConfigOptions) => { + defaultSetup(c); + c.venvPath = UriEx.file( + process.env.CI_IMPORT_TEST_VENVPATH!, + /* isCaseSensitive */ true, + /* checkRelative */ true + ); + c.venv = process.env.CI_IMPORT_TEST_VENV; + }; + spFactory = (files: { path: string; content: string }[]) => createServiceProviderWithCombinedFs(files); + } else if (process.env.CI_IMPORT_TEST_PYTHONPATH) { + configModifier = (c: ConfigOptions) => { + defaultSetup(c); + c.pythonPath = UriEx.file( + process.env.CI_IMPORT_TEST_PYTHONPATH!, + /* isCaseSensitive */ true, + /* checkRelative */ true + ); + }; + hostFactory = (sp: ServiceProvider) => { + return new TruePythonTestAccessHost(sp, tempFile); + }; + spFactory = (files: { path: string; content: string }[]) => createServiceProviderWithCombinedFs(files); + } + + const sp = spFactory(files); + const configOptions = new ConfigOptions(UriEx.file('/')); + configModifier(configOptions); + + const file = files.length > 0 ? files[files.length - 1].path : combinePaths('src', 'file.py'); + if (files.length === 0) { + files.push({ + path: file, + content: '# not used', + }); + } + + const uri = UriEx.file(file); + const importResolver = new ImportResolver(sp, configOptions, hostFactory(sp)); + + return { importResolver, uri, configOptions }; + } +}); + +function createTestFileSystem(files: { path: string; content: string }[]): TestFileSystem { + const fs = new TestFileSystem(/* ignoreCase */ false, { cwd: normalizeSlashes('/') }); + + for (const file of files) { + const path = normalizeSlashes(file.path); + const dir = getDirectoryPath(path); + fs.mkdirpSync(dir); + + fs.writeFileSync(UriEx.file(path), file.content); + } + + return fs; +} + +function createServiceProviderFromFiles(files: { path: string; content: string }[]): ServiceProvider { + const testFS = createTestFileSystem(files); + const fs = new PyrightFileSystem(testFS); + const partialStubService = new PartialStubService(fs); + return createServiceProvider(testFS, fs, partialStubService); +} + +function createServiceProviderWithCombinedFs(files: { path: string; content: string }[]): ServiceProvider { + const testFS = createTestFileSystem(files); + const fs = new PyrightFileSystem(new CombinedFileSystem(testFS)); + const partialStubService = new PartialStubService(fs); + return createServiceProvider(testFS, fs, partialStubService); +} + +class TruePythonTestAccessHost extends FullAccessHost { + constructor(sp: ServiceProvider, tempFile: RealTempFile) { + const clone = sp.clone(); + + // Make sure the service provide in use is using a real file system and real temporary file provider. + clone.add(ServiceKeys.tempFile, tempFile); + clone.add(ServiceKeys.fs, createFromRealFileSystem(tempFile)); + super(clone); + } +} + +class CombinedFileSystem implements FileSystem { + private _realFS = createFromRealFileSystem(this._testFS); + + constructor(private _testFS: TestFileSystem) {} + + mkdirSync(path: Uri, options?: MkDirOptions | undefined): void { + this._testFS.mkdirSync(path, options); + } + + writeFileSync(path: Uri, data: string | Buffer, encoding: BufferEncoding | null): void { + this._testFS.writeFileSync(path, data, encoding); + } + + unlinkSync(path: Uri): void { + this._testFS.unlinkSync(path); + } + + rmdirSync(path: Uri): void { + this._testFS.rmdirSync(path); + } + + createFileSystemWatcher(paths: Uri[], listener: FileWatcherEventHandler): FileWatcher { + return this._testFS.createFileSystemWatcher(paths, listener); + } + + createReadStream(path: Uri): ReadStream { + return this._testFS.createReadStream(path); + } + + createWriteStream(path: Uri): WriteStream { + return this._testFS.createWriteStream(path); + } + + copyFileSync(src: Uri, dst: Uri): void { + this._testFS.copyFileSync(src, dst); + } + + existsSync(path: Uri): boolean { + return this._testFS.existsSync(path) || this._realFS.existsSync(path); + } + + chdir(path: Uri): void { + this._testFS.chdir(path); + } + + readdirEntriesSync(path: Uri): Dirent[] { + if (this._testFS.existsSync(path)) { + return this._testFS.readdirEntriesSync(path); + } + return this._realFS.readdirEntriesSync(path); + } + + readdirSync(path: Uri): string[] { + if (this._testFS.existsSync(path)) { + return this._testFS.readdirSync(path); + } + return this._realFS.readdirSync(path); + } + + readFileSync(path: Uri, encoding?: null): Buffer; + readFileSync(path: Uri, encoding: BufferEncoding): string; + readFileSync(path: Uri, encoding?: BufferEncoding | null): string | Buffer; + readFileSync(path: Uri, encoding: BufferEncoding | null = null) { + if (this._testFS.existsSync(path)) { + return this._testFS.readFileSync(path, encoding); + } + return this._realFS.readFileSync(path, encoding); + } + + statSync(path: Uri): Stats { + if (this._testFS.existsSync(path)) { + return this._testFS.statSync(path); + } + return this._realFS.statSync(path); + } + + realpathSync(path: Uri): Uri { + if (this._testFS.existsSync(path)) { + return this._testFS.realpathSync(path); + } + return this._realFS.realpathSync(path); + } + + getModulePath(): Uri { + return this._testFS.getModulePath(); + } + + readFile(path: Uri): Promise<Buffer> { + if (this._testFS.existsSync(path)) { + return this._testFS.readFile(path); + } + return this._realFS.readFile(path); + } + + readFileText(path: Uri, encoding?: BufferEncoding | undefined): Promise<string> { + if (this._testFS.existsSync(path)) { + return this._testFS.readFileText(path, encoding); + } + return this._realFS.readFileText(path, encoding); + } + + realCasePath(path: Uri): Uri { + return this._testFS.realCasePath(path); + } + + isMappedUri(filepath: Uri): boolean { + return this._testFS.isMappedUri(filepath); + } + + getOriginalUri(mappedFilePath: Uri): Uri { + return this._testFS.getOriginalUri(mappedFilePath); + } + + getMappedUri(originalFilePath: Uri): Uri { + return this._testFS.getMappedUri(originalFilePath); + } + + isInZip(path: Uri): boolean { + return this._testFS.isInZip(path); + } + + mapDirectory(mappedUri: Uri, originalUri: Uri, filter?: (originalUri: Uri, fs: FileSystem) => boolean): Disposable { + return this._realFS.mapDirectory(mappedUri, originalUri, filter); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/importStatementUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/importStatementUtils.test.ts new file mode 100644 index 00000000..4ce97ba8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/importStatementUtils.test.ts @@ -0,0 +1,612 @@ +/* + * importStatementUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for importStatementUtils module. + */ + +import assert from 'assert'; + +import { isFunctionDeclaration } from '../analyzer/declaration'; +import { ImportType } from '../analyzer/importResult'; +import { + getRelativeModuleName, + getTextEditsForAutoImportInsertions, + getTextEditsForAutoImportSymbolAddition, + getTopLevelImports, + ImportNameInfo, + ImportNameWithModuleInfo, +} from '../analyzer/importStatementUtils'; +import { findNodeByOffset } from '../analyzer/parseTreeUtils'; +import { isArray } from '../common/core'; +import { TextEditAction } from '../common/editAction'; +import { convertOffsetToPosition } from '../common/positionUtils'; +import { rangesAreEqual } from '../common/textRange'; +import { NameNode } from '../parser/parseNodes'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { parseAndGetTestState, TestState } from './harness/fourslash/testState'; + +test('getTextEditsForAutoImportInsertion - import empty', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys"|}|] + `; + + testInsertion(code, 'marker1', [], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - import', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys"|}|] + `; + + testInsertion(code, 'marker1', {}, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - import alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s"|}|] + `; + + testInsertion(code, 'marker1', { alias: 's' }, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple imports', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys"|}|] + `; + + testInsertion(code, 'marker1', [{}, {}], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple imports alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s, sys as y"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { alias: 'y' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple imports alias duplicated', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { alias: 's' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - from import', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import path"|}|] + `; + + testInsertion(code, 'marker1', { name: 'path' }, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - from import alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import path as p"|}|] + `; + + testInsertion(code, 'marker1', { name: 'path', alias: 'p' }, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple from imports', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import meta_path, path"|}|] + `; + + testInsertion(code, 'marker1', [{ name: 'path' }, { name: 'meta_path' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple from imports with alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import meta_path as m, path as p"|}|] + `; + + testInsertion( + code, + 'marker1', + [ + { name: 'path', alias: 'p' }, + { name: 'meta_path', alias: 'm' }, + ], + 'sys', + ImportType.BuiltIn + ); +}); + +test('getTextEditsForAutoImportInsertion - multiple from imports with alias duplicated', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import meta_path as m, path as p"|}|] + `; + + testInsertion( + code, + 'marker1', + [ + { name: 'path', alias: 'p' }, + { name: 'meta_path', alias: 'm' }, + { name: 'path', alias: 'p' }, + ], + 'sys', + ImportType.BuiltIn + ); +}); + +test('getTextEditsForAutoImportInsertion - multiple import statements', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s!n!from sys import path as p"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - different group', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!!n!import sys as s!n!from sys import path as p"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.Local); +}); + +test('getTextEditsForAutoImportInsertion - at the top', () => { + const code = ` +//// [|/*marker1*/{|"r":"import sys as s!n!from sys import path as p!n!!n!!n!"|}|]import os + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - at top of second group', () => { + const code = ` +//// import os +//// +//// [|/*marker1*/{|"r":"from test.a import testa!n!"|}|]from test.b import testb + `; + + testInsertion(code, 'marker1', [{ name: 'testa' }], 'test.a', ImportType.Local); +}); + +test('getTextEditsForAutoImportInsertion - at the top after module doc string', () => { + const code = ` +//// ''' module doc string ''' +//// __author__ = "Software Authors Name" +//// __copyright__ = "Copyright (C) 2004 Author Name" +//// __license__ = "Public Domain" +//// __version__ = "1.0" +//// [|/*marker1*/{|"r":"import sys as s!n!from sys import path as p!n!!n!!n!"|}|]import os + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertions - mix of import and from import statements', () => { + const code = ` +//// [|/*marker1*/{|"r":"import sys as s!n!from sys import path as p!n!!n!!n!"|}|]import os + `; + + const module = { moduleName: 'sys', importType: ImportType.BuiltIn, isLocalTypingsFile: false }; + testInsertions(code, 'marker1', [ + { module, alias: 's' }, + { module, name: 'path', alias: 'p' }, + ]); +}); + +test('getTextEditsForAutoImportInsertions - multiple modules with different group', () => { + const code = ` +//// [|/*marker1*/|][|{|"r":"from sys import path as p!n!!n!!n!"|}|][|{|"r":"import numpy!n!!n!!n!"|}|][|{|"r":"from test import join!n!!n!!n!"|}|]import os + `; + + const module1 = { moduleName: 'sys', importType: ImportType.BuiltIn, isLocalTypingsFile: false }; + const module2 = { moduleName: 'numpy', importType: ImportType.ThirdParty, isLocalTypingsFile: false }; + const module3 = { moduleName: 'test', importType: ImportType.Local, isLocalTypingsFile: false }; + + testInsertions(code, 'marker1', [ + { module: module1, name: 'path', alias: 'p' }, + { module: module2 }, + { module: module3, name: 'join' }, + ]); +}); + +test('getTextEditsForAutoImportInsertions - multiple modules with existing imports', () => { + const code = ` +//// import os[|/*marker1*/|][|{|"r":"!n!from sys import path as p"|}|][|{|"r":"!n!!n!import numpy"|}|][|{|"r":"!n!!n!from test import join"|}|] + `; + + const module1 = { moduleName: 'sys', importType: ImportType.BuiltIn, isLocalTypingsFile: false }; + const module2 = { moduleName: 'numpy', importType: ImportType.ThirdParty, isLocalTypingsFile: false }; + const module3 = { moduleName: 'test', importType: ImportType.Local, isLocalTypingsFile: false }; + + testInsertions(code, 'marker1', [ + { module: module1, name: 'path', alias: 'p' }, + { module: module2 }, + { module: module3, name: 'join' }, + ]); +}); + +test('getTextEditsForAutoImportInsertions - multiple modules with same group', () => { + const code = ` +//// import os[|/*marker1*/|][|{|"r":"!n!!n!import module2!n!from module1 import path as p!n!from module3 import join"|}|] + `; + + const module1 = { moduleName: 'module1', importType: ImportType.Local, isLocalTypingsFile: false }; + const module2 = { moduleName: 'module2', importType: ImportType.Local, isLocalTypingsFile: false }; + const module3 = { moduleName: 'module3', importType: ImportType.Local, isLocalTypingsFile: false }; + + testInsertions(code, 'marker1', [ + { module: module1, name: 'path', alias: 'p' }, + { module: module2 }, + { module: module3, name: 'join' }, + ]); +}); + +test('getTextEditsForAutoImportSymbolAddition', () => { + const code = ` +//// from sys import [|/*marker1*/{|"r":"meta_path, "|}|]path + `; + + testAddition(code, 'marker1', { name: 'meta_path' }, 'sys'); +}); + +test('getTextEditsForAutoImportSymbolAddition - already exist', () => { + const code = ` +//// from sys import path[|/*marker1*/|] + `; + + testAddition(code, 'marker1', { name: 'path' }, 'sys'); +}); + +test('getTextEditsForAutoImportSymbolAddition - with alias', () => { + const code = ` +//// from sys import path[|/*marker1*/{|"r":", path as p"|}|] + `; + + testAddition(code, 'marker1', { name: 'path', alias: 'p' }, 'sys'); +}); + +test('getTextEditsForAutoImportSymbolAddition - multiple names', () => { + const code = ` +//// from sys import [|/*marker1*/{|"r":"meta_path as m, "|}|]path[|{|"r":", zoom as z"|}|] + `; + + testAddition( + code, + 'marker1', + [ + { name: 'meta_path', alias: 'm' }, + { name: 'zoom', alias: 'z' }, + ], + 'sys' + ); +}); + +test('getTextEditsForAutoImportSymbolAddition - multiple names at some spot', () => { + const code = ` +//// from sys import [|/*marker1*/{|"r":"meta_path as m, noon as n, "|}|]path + `; + + testAddition( + code, + 'marker1', + [ + { name: 'meta_path', alias: 'm' }, + { name: 'noon', alias: 'n' }, + ], + 'sys' + ); +}); + +test('getTextEditsForAutoImportSymbolAddition - wildcard', () => { + const code = ` +//// from sys import *[|/*marker1*/|] + `; + + testAddition(code, 'marker1', [{ name: 'path' }], 'sys'); +}); + +test('getRelativeModuleName - same file', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] [|/*dest*/|] + `; + + testRelativeModuleName(code, '.source'); +}); + +test('getRelativeModuleName - same file __init__', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*src*/|] [|/*dest*/|] + `; + + testRelativeModuleName(code, '.'); +}); + +test('getRelativeModuleName - same folder', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: dest.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.dest'); +}); + +test('getRelativeModuleName - different folder move down', () => { + const code = ` +// @filename: common/source.py +//// [|/*src*/|] + +// @filename: dest.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '..dest'); +}); + +test('getRelativeModuleName - different folder move up', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: common/dest.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.common.dest'); +}); + +test('getRelativeModuleName - folder move down __init__ parent folder', () => { + const code = ` +// @filename: nest1/nest2/source.py +//// [|/*src*/|] + +// @filename: nest1/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '..'); +}); + +test('getRelativeModuleName - folder move down __init__ parent folder ignore folder structure', () => { + const code = ` +// @filename: nest1/nest2/source.py +//// [|/*src*/|] + +// @filename: nest1/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '...nest1', /*ignoreFolderStructure*/ true); +}); + +test('getRelativeModuleName - different folder move down __init__ sibling folder', () => { + const code = ` +// @filename: nest1/nest2/source.py +//// [|/*src*/|] + +// @filename: different/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '...different'); +}); + +test('getRelativeModuleName - different folder move up __init__', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: common/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.common'); +}); + +test('getRelativeModuleName - root __init__', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: __init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.'); +}); + +test('getRelativeModuleName over fake file', () => { + const code = ` +// @filename: target.py +//// [|/*dest*/|] + `; + + const state = parseAndGetTestState(code).state; + const dest = state.getMarkerByName('dest')!.fileUri; + + assert.strictEqual( + getRelativeModuleName( + state.fs, + dest.getDirectory().combinePaths('source.py'), + dest, + state.configOptions, + /*ignoreFolderStructure*/ false, + /*sourceIsFile*/ true + ), + '.target' + ); +}); + +test('getRelativeModuleName - target in stub path', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: typings/library/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, undefined); +}); + +test('getRelativeModuleName - target in typeshed path', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "typeshedPath": "my_typeshed" +//// } + +// @filename: source.py +//// [|/*src*/|] + +// @filename: my_typeshed/library/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, undefined); +}); + +test('resolve alias of not needed file', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: myLib/__init__.py +// @library: true +//// from myLib.foo import [|/*marker*/foo|] + +// @filename: myLib/foo.py +// @library: true +//// def foo(): pass + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker')!; + + const evaluator = state.workspace.service.test_program.evaluator!; + state.openFile(marker.fileName); + + const markerUri = marker.fileUri; + const parseResults = state.workspace.service.getParseResults(markerUri)!; + const nameNode = findNodeByOffset(parseResults.parserOutput.parseTree, marker.position) as NameNode; + const aliasDecls = evaluator.getDeclInfoForNameNode(nameNode)!.decls; + + // Unroot the file. we can't explicitly close the file since it will unload the file from test program. + state.workspace.service.test_program.getSourceFileInfo(markerUri)!.isOpenByClient = false; + + const unresolved = evaluator.resolveAliasDeclaration(aliasDecls[0], /*resolveLocalNames*/ false); + assert(!unresolved); + + const resolved = evaluator.resolveAliasDeclaration(aliasDecls[0], /*resolveLocalNames*/ false, { + skipFileNeededCheck: true, + }); + + assert(resolved); + assert(isFunctionDeclaration(resolved)); +}); + +function testRelativeModuleName(code: string, expected: string | undefined, ignoreFolderStructure = false) { + const state = parseAndGetTestState(code).state; + const src = state.getMarkerByName('src')!.fileUri; + const dest = state.getMarkerByName('dest')!.fileUri; + + assert.strictEqual( + getRelativeModuleName(state.fs, src, dest, state.configOptions, ignoreFolderStructure), + expected + ); +} + +function testAddition( + code: string, + markerName: string, + importNameInfo: ImportNameInfo | ImportNameInfo[], + moduleName: string +) { + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName(markerName)!; + const parseResults = state.program.getBoundSourceFile(marker!.fileUri)!.getParseResults()!; + + const importStatement = getTopLevelImports(parseResults.parserOutput.parseTree).orderedImports.find( + (i) => i.moduleName === moduleName + )!; + const edits = getTextEditsForAutoImportSymbolAddition(importNameInfo, importStatement, parseResults); + + const ranges = [...state.getRanges().filter((r) => !!r.marker?.data)]; + assert.strictEqual(edits.length, ranges.length, `${markerName} expects ${ranges.length} but got ${edits.length}`); + + testTextEdits(state, edits, ranges); +} + +function testInsertions( + code: string, + markerName: string, + importNameInfo: ImportNameWithModuleInfo | ImportNameWithModuleInfo[] +) { + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName(markerName)!; + const parseResults = state.program.getBoundSourceFile(marker!.fileUri)!.getParseResults()!; + + const importStatements = getTopLevelImports(parseResults.parserOutput.parseTree); + const edits = getTextEditsForAutoImportInsertions( + importNameInfo, + importStatements, + parseResults, + convertOffsetToPosition(marker.position, parseResults.tokenizerOutput.lines) + ); + + const ranges = [...state.getRanges().filter((r) => !!r.marker?.data)]; + assert.strictEqual(edits.length, ranges.length, `${markerName} expects ${ranges.length} but got ${edits.length}`); + + testTextEdits(state, edits, ranges); +} + +function testInsertion( + code: string, + markerName: string, + importNameInfo: ImportNameInfo | ImportNameInfo[], + moduleName: string, + importType: ImportType +) { + importNameInfo = isArray(importNameInfo) ? importNameInfo : [importNameInfo]; + if (importNameInfo.length === 0) { + importNameInfo.push({}); + } + + testInsertions( + code, + markerName, + importNameInfo.map((i) => { + return { + module: { + moduleName, + importType, + isLocalTypingsFile: false, + }, + name: i.name, + alias: i.alias, + }; + }) + ); +} + +function testTextEdits(state: TestState, edits: TextEditAction[], ranges: Range[]) { + for (const edit of edits) { + assert( + ranges.some((r) => { + const data = r.marker!.data as { r: string }; + const expectedText = data.r; + return ( + rangesAreEqual(state.convertPositionRange(r), edit.range) && + expectedText.replace(/!n!/g, '\n') === edit.replacementText + ); + }), + `can't find '${edit.replacementText}'@'${edit.range.start.line},${edit.range.start.character}'` + ); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/ipythonMode.test.ts b/python-parser/packages/pyright-internal/src/tests/ipythonMode.test.ts new file mode 100644 index 00000000..928241b8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/ipythonMode.test.ts @@ -0,0 +1,577 @@ +/* + * ipythonMode.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for tokenizer ipython mode + */ + +import assert from 'assert'; +import { CompletionItemKind, MarkupKind } from 'vscode-languageserver-types'; + +import { DiagnosticRule } from '../common/diagnosticRules'; +import { TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { LocMessage } from '../localization/localize'; +import { Comment, CommentType, Token } from '../parser/tokenizerTypes'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('regular mode', () => { + const code = ` +//// [|/*marker*/%cd test|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython magic', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/%cd test|] + `; + + testIPython(code); +}); + +test('ipython shell escape', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/!shellCommand|] + `; + + testIPython(code); +}); + +test('ipython regular operator - mod', () => { + const code = ` +// @ipythonMode: true +//// a = 1 [|/*marker*/% 1|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython regular operator - bang', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// a [|/*marker*/!= 1|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython regular operator multiline', () => { + const code = ` +// @ipythonMode: true +//// a = 1 \\ +//// [|/*marker*/% 1|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython at the top', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/%cd test|] +//// b = 1 + `; + + testIPython(code); +}); + +test('ipython between statements', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// [|/*marker*/%cd test|] +//// b = 1 + `; + + testIPython(code); +}); + +test('ipython at the end', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// [|/*marker*/%cd test|] + `; + + testIPython(code); +}); + +test('ipython multiline magics', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// [|/*marker*/%cd test \ +//// other arguments|] + `; + + testIPython(code); +}); + +test('ipython cell mode magics', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/%%timeit|] + `; + + testIPython(code); +}); + +test('ipython with indentation', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// pass + `; + + testIPython(code); +}); + +test('ipython without indentation', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 1', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// # comments +//// [|/*marker*/%cd test|] +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 2', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// # comments +//// [|/*marker*/%cd test|] +//// # comments +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 3', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// # comments +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 4', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// # comments +//// pass + `; + + testIPython(code); +}); + +test('ipython multiple magics 1', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// %cd test2 +//// pass + `; + + testIPython(code); +}); + +test('ipython multiple magics 2', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// %cd test +//// [|/*marker*/%cd test2|] +//// pass + `; + + testIPython(code); +}); + +test('ipython cell magic', () => { + const code = ` +// @ipythonMode: true +//// def foo(): ... +//// [|/*marker*/%%cell magic +//// random text +//// and more|] + `; + + testIPython(code); +}); + +test('ipython cell shell escape', () => { + const code = ` +// @ipythonMode: true +//// def foo(): ... +//// [|/*marker*/!!cell shell escape +//// random text +//// and more|] + `; + + testIPython(code); +}); + +test('ipython wrong magic', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%!not cell magic|] +//// ... + `; + + testIPython(code); +}); + +test('top level await raises errors in regular mode', () => { + const code = ` +//// async def foo(): +//// pass +//// +//// [|/*marker*/await foo();|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(diagnostics?.some((d) => d.message === LocMessage.awaitNotInAsync())); +}); + +test('top level await raises no errors in ipython mode', () => { + const code = ` +// @ipythonMode: true +//// async def foo(): +//// pass +//// +//// [|/*marker*/await foo();|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(!diagnostics?.some((d) => d.message === LocMessage.awaitNotInAsync())); +}); + +test('await still raises errors when used in wrong context in ipython mode', () => { + const code = ` +// @ipythonMode: true +//// async def foo(): +//// pass +//// +//// def bar(): +//// [|/*marker*/await foo();|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(diagnostics?.some((d) => d.message === LocMessage.awaitNotInAsync())); +}); + +test('top level async for raises errors in regular mode', () => { + const code = ` +//// async def b(): +//// for i in range(5): +//// yield i +//// +//// [|/*marker*/async for x in b():|] +//// print("") + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(diagnostics?.some((d) => d.message === LocMessage.asyncNotInAsyncFunction())); +}); + +test('top level async for raises no errors in ipython mode', () => { + const code = ` +// @ipythonMode: true +//// async def b(): +//// for i in range(5): +//// yield i +//// +//// [|/*marker*/async for x in b():|] +//// print("") + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(!diagnostics?.some((d) => d.message === LocMessage.asyncNotInAsyncFunction())); +}); + +test('top level async for in list comprehension raises errors in regular mode', () => { + const code = ` +//// async def b(): +//// for i in range(5): +//// yield i +//// +//// y = [|/*marker*/[x async for x in b()]|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(diagnostics?.some((d) => d.message === LocMessage.asyncNotInAsyncFunction())); +}); + +test('top level async for in list comprehension raises no errors in ipython mode', () => { + const code = ` +// @ipythonMode: true +//// async def b(): +//// for i in range(5): +//// yield i +//// +//// y = [|/*marker*/[x async for x in b()]|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(!diagnostics?.some((d) => d.message === LocMessage.asyncNotInAsyncFunction())); +}); + +test('top level async with raises errors in regular mode', () => { + const code = ` +//// from contextlib import AsyncExitStack +//// +//// cm = AsyncExitStack() +//// +//// [|/*marker*/async with cm:|] +//// pass + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(diagnostics?.some((d) => d.message === LocMessage.asyncNotInAsyncFunction())); +}); + +test('top level async with raises no errors in ipython mode', () => { + const code = ` +// @ipythonMode: true +//// from contextlib import AsyncExitStack +//// +//// cm = AsyncExitStack() +//// +//// [|/*marker*/async with cm:|] +//// pass + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(!diagnostics?.some((d) => d.message === LocMessage.asyncNotInAsyncFunction())); +}); + +test('try implicitly load ipython display module but fail', async () => { + const code = ` +// @ipythonMode: true +//// [|display/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('excluded', MarkupKind.Markdown, { + marker: { + completions: [ + { + label: 'display', + kind: CompletionItemKind.Function, + }, + ], + }, + }); +}); + +test('magics at the end', async () => { + const code = ` +// @filename: test.py +// @ipythonMode: true +//// from random import random +//// def estimate_pi(n=1e7) -> "area": +//// """Estimate pi with monte carlo simulation. +//// +//// Arguments: +//// n: number of simulations +//// """ +//// in_circle = 0 +//// total = n +//// +//// while n != 0: +//// prec_x = random() +//// prec_y = random() +//// if pow(prec_x, 2) + pow(prec_y, 2) <= 1: +//// in_circle += 1 # inside the circle +//// n -= 1 +//// +//// return 4 * in_circle / total +//// +//// [|/*marker*/%time estimate_pi()|] + `; + + testIPython(code); +}); + +function testIPython(code: string, expectMagic = true) { + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const results = state.program.getBoundSourceFile(range.fileUri)!.getParseResults()!; + + const text = results.text.substring(range.pos, range.end); + const type = getCommentType(text); + + const offset = type === CommentType.IPythonMagic || type === CommentType.IPythonShellEscape ? 1 : 2; + const comment = findCommentByOffset(results.tokenizerOutput.tokens, range.pos + offset); + if (!expectMagic) { + assert(!comment); + return; + } + + assert(comment); + + assert.strictEqual(type, comment.type); + assert.strictEqual(text.substring(offset), comment.value); +} + +function getCommentType(text: string) { + assert(text.length > 0); + + const type = text[0] === '%' ? CommentType.IPythonMagic : CommentType.IPythonShellEscape; + if (text.length === 1) { + return type; + } + + switch (type) { + case CommentType.IPythonMagic: + return text[1] === '%' ? CommentType.IPythonCellMagic : type; + case CommentType.IPythonShellEscape: + return text[1] === '!' ? CommentType.IPythonCellShellEscape : type; + } +} + +function findCommentByOffset(tokens: TextRangeCollection<Token>, offset: number) { + let startIndex = tokens.getItemAtPosition(offset); + startIndex = startIndex >= 0 ? startIndex : 0; + + let comment: Comment | undefined; + for (let i = startIndex; i < tokens.count; i++) { + const token = tokens.getItemAt(i); + comment = token.comments?.find((c) => TextRange.contains(c, offset)); + if (comment) { + break; + } + + if (offset < token.start) { + return undefined; + } + } + + return comment; +} + +test('unused expression at end is not error', async () => { + const code = ` +// @filename: test.py +// @ipythonMode: true +//// 4[|/*marker*/|] + `; + + verifyAnalysisDiagnosticCount(code, 0); +}); + +test('unused expression is error if not at end of cell', async () => { + const code = ` +// @filename: test.py +// @ipythonMode: true +//// 4[|/*marker*/|] +//// +//// x = 1 + `; + + verifyAnalysisDiagnosticCount(code, 1, DiagnosticRule.reportUnusedExpression); +}); + +test('unused expression is error if within another statement', async () => { + const code = ` +// @filename: test.py +// @ipythonMode: true +//// if True: +//// 4[|/*marker*/|] + `; + + verifyAnalysisDiagnosticCount(code, 1, DiagnosticRule.reportUnusedExpression); +}); + +function verifyAnalysisDiagnosticCount(code: string, expectedCount: number, expectedRule?: string) { + const state = parseAndGetTestState(code).state; + + state.analyze(); + + const range = state.getRangeByMarkerName('marker')!; + const source = state.program.getBoundSourceFile(range.fileUri)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert.strictEqual(diagnostics?.length, expectedCount); + if (expectedRule) { + diagnostics.forEach((diagnostic) => assert.strictEqual(diagnostic.getRule(), expectedRule)); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/languageServer.test.ts b/python-parser/packages/pyright-internal/src/tests/languageServer.test.ts new file mode 100644 index 00000000..ce6cd9bf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/languageServer.test.ts @@ -0,0 +1,315 @@ +/* + * languageServer.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests to verify Pyright works as the backend for a language server. + */ + +import assert from 'assert'; +import { + CancellationToken, + CompletionRequest, + ConfigurationItem, + DidChangeWorkspaceFoldersNotification, + InitializedNotification, + InitializeRequest, + MarkupContent, +} from 'vscode-languageserver'; + +import { convertOffsetToPosition } from '../common/positionUtils'; +import { PythonVersion, pythonVersion3_10 } from '../common/pythonVersion'; + +import { isArray } from '../common/core'; +import { normalizeSlashes } from '../common/pathUtils'; +import { + cleanupAfterAll, + DEFAULT_WORKSPACE_ROOT, + getParseResults, + hover, + openFile, + PyrightServerInfo, + runPyrightServer, + waitForDiagnostics, +} from './lsp/languageServerTestUtils'; + +describe(`Basic language server tests`, () => { + let serverInfo: PyrightServerInfo | undefined; + async function runLanguageServer( + projectRoots: string[] | string, + code: string, + callInitialize = true, + extraSettings?: { item: ConfigurationItem; value: any }[], + pythonVersion: PythonVersion = pythonVersion3_10, + supportsBackgroundThread?: boolean, + supportsPullDiagnostics?: boolean + ) { + const result = await runPyrightServer( + projectRoots, + code, + callInitialize, + extraSettings, + pythonVersion, + supportsBackgroundThread, + supportsPullDiagnostics + ); + serverInfo = result; + return result; + } + + afterEach(async () => { + if (serverInfo) { + await serverInfo.dispose(); + serverInfo = undefined; + } + }); + + afterAll(async () => { + await cleanupAfterAll(); + }); + + test.each([ + { name: 'capability disabled', capability: false, initFolders: 1, firstNotify: null, secondNotify: null }, + { name: '1 init, no notifications', capability: true, initFolders: 1, firstNotify: null, secondNotify: null }, + { name: '1 init, notify with 0', capability: true, initFolders: 1, firstNotify: 0, secondNotify: null }, + { name: '1 init, notify with 1', capability: true, initFolders: 1, firstNotify: 1, secondNotify: null }, + { name: '1 init, notify with 2', capability: true, initFolders: 1, firstNotify: 2, secondNotify: null }, + { name: '1 init, notify with 0 then 0', capability: true, initFolders: 1, firstNotify: 0, secondNotify: 0 }, + { name: '1 init, notify with 0 then 1', capability: true, initFolders: 1, firstNotify: 0, secondNotify: 1 }, + { name: '1 init, notify with 0 then 2', capability: true, initFolders: 1, firstNotify: 0, secondNotify: 2 }, + { name: '1 init, notify with 1 then 0', capability: true, initFolders: 1, firstNotify: 1, secondNotify: 0 }, + { name: '1 init, notify with 1 then 1', capability: true, initFolders: 1, firstNotify: 1, secondNotify: 1 }, + { name: '1 init, notify with 1 then 2', capability: true, initFolders: 1, firstNotify: 1, secondNotify: 2 }, + { name: '1 init, notify with 2 then 0', capability: true, initFolders: 1, firstNotify: 2, secondNotify: 0 }, + { name: '1 init, notify with 2 then 1', capability: true, initFolders: 1, firstNotify: 2, secondNotify: 1 }, + { name: '1 init, notify with 2 then 2', capability: true, initFolders: 1, firstNotify: 2, secondNotify: 2 }, + { name: '2 init, no notifications', capability: true, initFolders: 2, firstNotify: null, secondNotify: null }, + { name: '2 init, notify with 2', capability: true, initFolders: 2, firstNotify: 2, secondNotify: null }, + { name: '0 init, notify with 1', capability: true, initFolders: 0, firstNotify: 1, secondNotify: null }, + { name: '0 init, notify with 2', capability: true, initFolders: 0, firstNotify: 2, secondNotify: null }, + ])('workspace initialization: $name', async ({ capability, initFolders, firstNotify, secondNotify }) => { + const code = ` +// @filename: test.py +//// import [|/*marker*/os|] + `; + const info = await runLanguageServer(DEFAULT_WORKSPACE_ROOT, code, false); + const params = info.getInitializeParams(); + const folders = params.workspaceFolders!; + const folder2 = { name: 'workspace2', uri: 'file:///workspace2' }; + + params.capabilities.workspace!.workspaceFolders = capability; + if (initFolders === 0) { + params.workspaceFolders = []; + } else if (initFolders === 2) { + params.workspaceFolders = [...folders, folder2]; + } + + await info.connection.sendRequest(InitializeRequest.type, params, CancellationToken.None); + await info.connection.sendNotification(InitializedNotification.type, {}); + + const getFoldersForNotify = (count: number) => { + if (count === 0) return []; + if (count === 1) return folders; + return [...folders, folder2]; + }; + + if (firstNotify !== null) { + await info.connection.sendNotification(DidChangeWorkspaceFoldersNotification.type, { + event: { added: getFoldersForNotify(firstNotify), removed: [] }, + }); + } + if (secondNotify !== null) { + await info.connection.sendNotification(DidChangeWorkspaceFoldersNotification.type, { + event: { added: getFoldersForNotify(secondNotify), removed: [] }, + }); + } + + openFile(info, 'marker'); + const result = await hover(info, 'marker'); + assert(result && MarkupContent.is(result.contents)); + assert.strictEqual(result.contents.value, '```python\n(module) os\n```'); + }); + + test('Hover', async () => { + const code = ` +// @filename: test.py +//// import [|/*marker*/os|] + `; + const info = await runLanguageServer(DEFAULT_WORKSPACE_ROOT, code, /* callInitialize */ true); + + // Do simple hover request + openFile(info, 'marker'); + const hoverResult = await hover(info, 'marker'); + assert(hoverResult); + assert(MarkupContent.is(hoverResult.contents)); + assert.strictEqual(hoverResult.contents.value, '```python\n(module) os\n```'); + }); + test('Completions', async () => { + const code = ` +// @filename: test.py +//// import os +//// os.[|/*marker*/|] + `; + const info = await runLanguageServer(DEFAULT_WORKSPACE_ROOT, code, /* callInitialize */ true); + + // Do simple completion request + openFile(info, 'marker'); + const marker = info.testData.markerPositions.get('marker')!; + const fileUri = marker.fileUri; + const text = info.testData.files.find((d) => d.fileName === marker.fileName)!.content; + const parseResult = getParseResults(text); + const completionResult = await info.connection.sendRequest( + CompletionRequest.type, + { + textDocument: { uri: fileUri.toString() }, + position: convertOffsetToPosition(marker.position, parseResult.tokenizerOutput.lines), + }, + CancellationToken.None + ); + + assert(completionResult); + assert(!isArray(completionResult)); + + const completionItem = completionResult.items.find((i) => i.label === 'path')!; + assert(completionItem); + }); + + [false, true].forEach((supportsPullDiagnostics) => { + describe(`Diagnostics ${supportsPullDiagnostics ? 'pull' : 'push'}`, () => { + // Background analysis takes longer than 5 seconds sometimes, so we need to + // increase the timeout. + jest.setTimeout(200000); + test('background thread diagnostics', async () => { + const code = ` +// @filename: root/test.py +//// from math import cos, sin +//// import sys +//// [|/*marker*/|] + `; + const settings = [ + { + item: { + scopeUri: `file://${normalizeSlashes(DEFAULT_WORKSPACE_ROOT, '/')}`, + section: 'python.analysis', + }, + value: { + typeCheckingMode: 'strict', + diagnosticMode: 'workspace', + }, + }, + ]; + + const info = await runLanguageServer( + DEFAULT_WORKSPACE_ROOT, + code, + /* callInitialize */ true, + settings, + undefined, + /* supportsBackgroundThread */ true, + supportsPullDiagnostics + ); + + // get the file containing the marker that also contains our task list comments + await openFile(info, 'marker'); + + // Wait for the diagnostics to publish + const diagnostics = await waitForDiagnostics(info); + const diagnostic = diagnostics.find((d) => d.uri.includes('root/test.py')); + assert(diagnostic); + assert.equal(diagnostic.diagnostics.length, 6); + + // Make sure the error has a special rule + assert.equal(diagnostic.diagnostics[1].code, 'reportUnusedImport'); + assert.equal(diagnostic.diagnostics[3].code, 'reportUnusedImport'); + assert.equal(diagnostic.diagnostics[4].code, 'reportUnusedImport'); + }); + + test('background thread diagnostics open mode', async () => { + const code = ` +// @filename: root/test.py +//// from math import cos, sin +//// import sys +//// [|/*marker*/|] + `; + const settings = [ + { + item: { + scopeUri: `file://${normalizeSlashes(DEFAULT_WORKSPACE_ROOT, '/')}`, + section: 'python.analysis', + }, + value: { + typeCheckingMode: 'strict', + }, + }, + ]; + + const info = await runLanguageServer( + DEFAULT_WORKSPACE_ROOT, + code, + /* callInitialize */ true, + settings, + undefined, + /* supportsBackgroundThread */ true, + supportsPullDiagnostics + ); + + // get the file containing the marker that also contains our task list comments + await openFile(info, 'marker'); + + // Wait for the diagnostics to publish + const diagnostics = await waitForDiagnostics(info); + const diagnostic = diagnostics.find((d) => d.uri.includes('root/test.py')); + assert(diagnostic); + const unusedImports = diagnostic.diagnostics.filter((d) => d.code === 'reportUnusedImport'); + assert.equal(unusedImports.length, 3); + }); + + test('Diagnostic severity overrides test', async () => { + const code = ` +// @filename: test.py +//// def test([|/*marker*/x|]): ... +//// +// @filename: pyproject.toml +//// + `; + const settings = [ + { + item: { + scopeUri: `file://${normalizeSlashes(DEFAULT_WORKSPACE_ROOT, '/')}`, + section: 'python.analysis', + }, + value: { + diagnosticSeverityOverrides: { + reportUnknownParameterType: 'warning', + }, + }, + }, + ]; + + const info = await runLanguageServer( + DEFAULT_WORKSPACE_ROOT, + code, + /* callInitialize */ true, + settings, + undefined, + /* supportsBackgroundThread */ true, + supportsPullDiagnostics + ); + + // get the file containing the marker that also contains our task list comments + await openFile(info, 'marker'); + + // Wait for the diagnostics to publish + const diagnostics = await waitForDiagnostics(info); + const diagnostic = diagnostics.find((d) => d.uri.includes('test.py')); + assert(diagnostic); + + // Make sure the error has a special rule + assert.ok( + diagnostic.diagnostics.some((d) => d.code === 'reportUnknownParameterType'), + `Expected diagnostic not found. Got ${JSON.stringify(diagnostic.diagnostics)}` + ); + }); + }); + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/localizer.test.ts b/python-parser/packages/pyright-internal/src/tests/localizer.test.ts new file mode 100644 index 00000000..0e300215 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/localizer.test.ts @@ -0,0 +1,69 @@ +/* + * localizer.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for localizer module, including default localized strings. + */ + +import * as assert from 'assert'; + +import { LocMessage, Localizer, ParameterizedString, setGetRawString } from '../localization/localize'; + +const namespaces = [Localizer.Diagnostic, Localizer.DiagnosticAddendum, Localizer.CodeAction]; + +test('Raw strings present', () => { + // Allocate a map so we can detect duplicate strings. This is + // an indication that the string key (e.g. 'DiagnosticAddendum.useDictInstead') + // used to fetch the localized string is a duplicate of another string key. + const stringContentMap = new Map<string, string>(); + + namespaces.forEach((namespace) => { + Object.keys(namespace).forEach((key) => { + const value = (namespace as any)[key](); + let formatString: string; + + if (value === undefined) { + assert.fail(`Default value for localized string "${key}" is missing`); + } else if (typeof value === 'string') { + formatString = value; + } else if (value instanceof ParameterizedString) { + formatString = value.getFormatString(); + if (!formatString) { + assert.fail(`Format string for localized string "${key}" is missing`); + } + } else { + assert.fail(`Default value for localized string "${key}" is unexpected type`); + } + + if (stringContentMap.has(formatString)) { + assert.fail(`Localized string for "${key}" is duplicate of ${stringContentMap.get(formatString)}`); + } + + stringContentMap.set(formatString, key); + }); + }); +}); + +test('Override a specific string', () => { + // eslint-disable-next-line prefer-const + let originalRawString: ((key: string) => string) | undefined; + function overrideImportResolve(key: string): string { + if (key === 'Diagnostic.importResolveFailure') { + return 'Import is {importName}'; + } + return originalRawString!(key); + } + originalRawString = setGetRawString(overrideImportResolve); + + const value = LocMessage.importResolveFailure().format({ importName: 'foo', venv: 'python' }); + + try { + assert.equal(value, 'Import is foo'); + const nonMovedValue = LocMessage.abstractMethodInvocation().format({ method: 'foo' }); + assert.equal(nonMovedValue, 'Method "foo" cannot be called because it is abstract and unimplemented'); + } finally { + setGetRawString(originalRawString); + } +}); diff --git a/python-parser/packages/pyright-internal/src/tests/logger.test.ts b/python-parser/packages/pyright-internal/src/tests/logger.test.ts new file mode 100644 index 00000000..bb322f85 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/logger.test.ts @@ -0,0 +1,98 @@ +/* + * logger.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for logger. + */ + +import * as assert from 'assert'; + +import { ConfigOptions } from '../common/configOptions'; +import { ConsoleInterface, ConsoleWithLogLevel, LogLevel } from '../common/console'; +import { test_setDebugMode } from '../common/core'; +import { timingStats } from '../common/timing'; +import * as TestUtils from './testUtils'; +import { Uri } from '../common/uri/uri'; + +class TestConsole implements ConsoleInterface { + errors: string[] = []; + warnings: string[] = []; + infos: string[] = []; + logs: string[] = []; + + error(message: string): void { + this.errors.push(message); + } + warn(message: string): void { + this.warnings.push(message); + } + info(message: string): void { + this.infos.push(message); + } + log(message: string): void { + this.logs.push(message); + } + + clear() { + this.logs = []; + this.errors = []; + this.warnings = []; + this.infos = []; + } +} + +describe('TypeEvaluatorWithTracker tests', () => { + const consoleInterface = new TestConsole(); + const console = new ConsoleWithLogLevel(consoleInterface); + const config = new ConfigOptions(Uri.empty()); + + beforeEach(() => { + consoleInterface.clear(); + }); + afterEach(() => { + consoleInterface.clear(); + timingStats.typeEvaluationTime.callCount = 0; + }); + test('Log generated', () => { + config.logTypeEvaluationTime = true; + console.level = LogLevel.Log; + + TestUtils.typeAnalyzeSampleFiles(['badToken1.py'], config, console); + assert.ok(consoleInterface.logs.length > 10, `No calls logged`); + }); + + test('Log not generated when level is error', () => { + config.logTypeEvaluationTime = true; + console.level = LogLevel.Error; + + TestUtils.typeAnalyzeSampleFiles(['badToken1.py'], config, console); + assert.equal(consoleInterface.logs.length, 0, `Should not have any logs when logging level is error`); + }); + + test('Inner log not generated when eval is turned off', () => { + config.logTypeEvaluationTime = false; + console.level = LogLevel.Log; + TestUtils.typeAnalyzeSampleFiles(['badToken1.py'], config, console); + assert.equal( + consoleInterface.logs.some((s) => s.includes('evaluateTypesForStatement')), + false, + `Inner evaluateTypesForStatement is being logged when it shouldnt` + ); + assert.ok( + timingStats.typeEvaluationTime.callCount > 1, + `Should be tracking timing when not logging but not debugging` + ); + }); + + test('Timing is not captured in debug mode', () => { + const oldValue = test_setDebugMode(true); + + config.logTypeEvaluationTime = false; + console.level = LogLevel.Log; + TestUtils.typeAnalyzeSampleFiles(['badToken1.py'], config, console); + assert.equal(timingStats.typeEvaluationTime.callCount, 0, `Should not be tracking call counts when debugging`); + + test_setDebugMode(oldValue); + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/lsp/customLsp.ts b/python-parser/packages/pyright-internal/src/tests/lsp/customLsp.ts new file mode 100644 index 00000000..a1158bdb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/lsp/customLsp.ts @@ -0,0 +1,173 @@ +/* + * customLsp.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Custom messages and notifications on top of the LSP used for testing. + */ + +import { + CancellationToken, + DidChangeConfigurationParams, + DidChangeNotebookDocumentParams, + Disposable, + NotificationHandler, + RequestHandler, +} from 'vscode-languageserver-protocol'; + +import { Uri } from '../../common/uri/uri'; + +export interface RequestSender { + sendRequest<R>(method: string, params: any, token?: CancellationToken): Promise<R>; +} + +export interface NotificationSender { + sendNotification: (method: string, params?: any) => void; +} + +export interface RequestReceiver { + onRequest<P, R, E>(method: string, handler: RequestHandler<P, R, E>): Disposable; +} + +export interface NotificationReceiver { + onNotification<P>(method: string, handler: NotificationHandler<P>): Disposable; +} + +export interface WorkspaceInfo { + rootUri: Uri; + kinds: string[]; + pythonPath: Uri | undefined; + pythonPathKind: string; +} + +// Type-safe LSP wrappers for our custom calls. +export namespace CustomLSP { + export enum TestSignalKinds { + Initialization = 'initialization', + DidOpenDocument = 'didopendocument', + DidChangeDocument = 'didchangedocument', + } + + export interface TestSignal { + uri: string; + kind: TestSignalKinds; + } + + export enum Requests { + GetDiagnostics = 'test/getDiagnostics', + GetOpenFiles = 'test/getOpenFiles', + } + + export enum Notifications { + SetStatusBarMessage = 'python/setStatusBarMessage', + BeginProgress = 'python/beginProgress', + ReportProgress = 'python/reportProgress', + EndProgress = 'python/endProgress', + WorkspaceTrusted = 'python/workspaceTrusted', + TestSignal = 'test/signal', + + // Due to some restrictions on vscode-languageserver-node package, + // we can't mix use types from the package in 2 different extensions. + // Basically due to how lsp package utilizes singleton objects internally, + // if we use a client created from python core extension, which uses LSP library + // they imported, with LSP types from LSP library we imported, LSP will throw + // an exception saying internal singleton objects are not same. + // + // To workaround it, we won't use some of LSP types directly but create our own + // and use them with the client. + DidChangeConfiguration = 'workspace/didChangeConfiguration', + DidChangeNotebookDocument = 'notebookDocument/didChange', + CacheDirCreate = 'python/cacheDirCreate', + CacheFileWrite = 'python/cacheFileWrite', + // Starting/stopping the server are all notifications so they pass + // through without any interference. + TestStartServer = 'test/startServer', + TestStartServerResponse = 'test/startServerResponse', + } + + interface Params { + [Requests.GetDiagnostics]: { uri: string }; + [Requests.GetOpenFiles]: { uri: string }; + [Notifications.CacheDirCreate]: { uri: string }; + [Notifications.CacheFileWrite]: { uri: string; contents: string; overwrite: boolean }; + [Notifications.SetStatusBarMessage]: string; + [Notifications.BeginProgress]: undefined; + [Notifications.ReportProgress]: string; + [Notifications.EndProgress]: undefined; + [Notifications.WorkspaceTrusted]: { isTrusted: boolean }; + [Notifications.TestSignal]: TestSignal; + [Notifications.DidChangeConfiguration]: DidChangeConfigurationParams; + [Notifications.DidChangeNotebookDocument]: DidChangeNotebookDocumentParams; + [Notifications.TestStartServer]: TestServerStartOptions; + [Notifications.TestStartServerResponse]: { testName: string }; + } + + interface Response { + [Requests.GetDiagnostics]: { diagnostics: string }; + [Requests.GetOpenFiles]: { files: string }; + } + + // Interface for returning config options as we cannot return a + // class instance from the server. + export interface IFileSpec { + wildcardRoot: Uri; + regExp: string; + hasDirectoryWildcard: boolean; + } + export interface IConfigOptions { + projectRoot: Uri; + pythonPath?: Uri; + typeshedPath?: Uri; + include: IFileSpec[]; + exclude: IFileSpec[]; + ignore: IFileSpec[]; + strict: IFileSpec[]; + } + + /** + * Data passed to the server worker thread in order to setup + * a test server. + */ + export interface TestServerStartOptions { + testName: string; // Helpful for debugging + pid: string; // Helpful for debugging + logFile: Uri; // Helpful for debugging + code: string; // Fourslash data. + projectRoots: Uri[]; + pythonVersion: string; + backgroundAnalysis?: boolean; + } + + export function sendRequest<P extends Params, R extends Response, M extends Requests & keyof P & keyof R & string>( + connection: RequestSender, + method: M, + params: P[M], + token?: CancellationToken + ): Promise<R[M]> { + return connection.sendRequest(method, params, token); + } + + export function sendNotification<P extends Params, M extends Notifications & keyof P & string>( + connection: NotificationSender, + method: M, + params: P[M] + ): void { + connection.sendNotification(method, params); + } + + export function onRequest<P extends Params, R extends Response, M extends Requests & keyof P & keyof R & string, E>( + connection: RequestReceiver, + method: M, + handler: RequestHandler<P[M], R[M], E> + ): Disposable { + return connection.onRequest(method, handler); + } + + export function onNotification<P extends Params, M extends Notifications & keyof P & string>( + connection: NotificationReceiver, + method: M, + handler: NotificationHandler<P[M]> + ): Disposable { + return connection.onNotification(method, handler); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/lsp/languageServer.ts b/python-parser/packages/pyright-internal/src/tests/lsp/languageServer.ts new file mode 100644 index 00000000..bd91f679 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/lsp/languageServer.ts @@ -0,0 +1,435 @@ +/* + * languageServer.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Test language server wrapper that lets us run the language server during a test. + */ +import { + CancellationToken, + Connection, + DidOpenTextDocumentParams, + Disposable, + Message, + MessageReader, + MessageWriter, + PortMessageReader, + PortMessageWriter, + ShutdownRequest, + createConnection, +} from 'vscode-languageserver/node'; +import { MessagePort, getEnvironmentData, parentPort, setEnvironmentData } from 'worker_threads'; + +import { Deferred, createDeferred } from '../../common/deferred'; +import { FileSystemEntries, resolvePaths } from '../../common/pathUtils'; +import { ServiceProvider } from '../../common/serviceProvider'; +import { Uri } from '../../common/uri/uri'; +import { parseTestData } from '../harness/fourslash/fourSlashParser'; +import * as PyrightTestHost from '../harness/testHost'; +import { clearCache } from '../harness/vfs/factory'; + +import { IPythonMode } from '../../analyzer/sourceFile'; +import { BackgroundAnalysis, BackgroundAnalysisRunner } from '../../backgroundAnalysis'; +import { IBackgroundAnalysis } from '../../backgroundAnalysisBase'; +import { serialize } from '../../backgroundThreadBase'; +import { initializeDependencies } from '../../common/asyncInitialization'; +import { FileSystem } from '../../common/fileSystem'; +import { ServerSettings } from '../../common/languageServerInterface'; +import { PythonVersion } from '../../common/pythonVersion'; +import { ServiceKeys } from '../../common/serviceKeys'; +import { PyrightFileSystem } from '../../pyrightFileSystem'; +import { PyrightServer } from '../../server'; +import { InitStatus, Workspace } from '../../workspaceFactory'; +import { CustomLSP } from './customLsp'; +import { + DEFAULT_WORKSPACE_ROOT, + TestHost, + TestHostOptions, + createFileSystem, + getFileLikePath, + logToDisk, + sleep, +} from './languageServerTestUtils'; + +const WORKER_STARTED = 'WORKER_STARTED'; +const WORKER_BACKGROUND_DATA = 'WORKER_BACKGROUND_DATA'; + +function getCommonRoot(files: Uri[]) { + let root = files[0]?.getPath() || DEFAULT_WORKSPACE_ROOT; + for (let i = 1; i < files.length; i++) { + const file = files[i]; + while (root.length > 0 && !file.pathStartsWith(root)) { + root = root.slice(0, root.lastIndexOf('/')); + } + } + return root; +} + +class TestPyrightHost implements PyrightTestHost.TestHost { + constructor(private _host: PyrightTestHost.TestHost) {} + useCaseSensitiveFileNames(): boolean { + return this._host.useCaseSensitiveFileNames(); + } + getAccessibleFileSystemEntries(dirname: string): FileSystemEntries { + return this._host.getAccessibleFileSystemEntries(dirname); + } + directoryExists(path: string): boolean { + return this._host.directoryExists(path); + } + fileExists(fileName: string): boolean { + return this._host.fileExists(fileName); + } + getFileSize(path: string): number { + return this._host.getFileSize(path); + } + readFile(path: string): string | undefined { + return this._host.readFile(path); + } + getWorkspaceRoot(): string { + // The default workspace root is wrong. It should be based on where the bundle is running. + // That's where the typeshed fallback and other bundled files are located. + return resolvePaths(__dirname); + } + writeFile(path: string, contents: string): void { + this._host.writeFile(path, contents); + } + listFiles( + path: string, + filter?: RegExp | undefined, + options?: { recursive?: boolean | undefined } | undefined + ): string[] { + return this._host.listFiles(path, filter, options); + } + log(text: string): void { + this._host.log(text); + } +} + +function createTestHost(testServerData: CustomLSP.TestServerStartOptions) { + const scriptOutput = ''; + const runScript = async ( + pythonPath: Uri | undefined, + scriptPath: Uri, + args: string[], + cwd: Uri, + token: CancellationToken + ) => { + return { stdout: scriptOutput, stderr: '', exitCode: 0 }; + }; + const options = new TestHostOptions({ version: PythonVersion.fromString(testServerData.pythonVersion), runScript }); + const projectRootPaths = testServerData.projectRoots.map((p) => getFileLikePath(p)); + const testData = parseTestData( + testServerData.projectRoots.length === 1 ? projectRootPaths[0] : DEFAULT_WORKSPACE_ROOT, + testServerData.code, + 'noname.py' + ); + const commonRoot = getCommonRoot(testServerData.projectRoots); + + // Make sure global variables from previous tests are cleared. + clearCache(); + + // create a test file system using the test data. + const fs = createFileSystem(commonRoot, testData, new TestPyrightHost(PyrightTestHost.HOST)); + + return new TestHost(fs, fs, testData, projectRootPaths, options); +} + +class TestServer extends PyrightServer { + private static _currentId = 0; + private _id = TestServer._currentId++; + constructor( + connection: Connection, + fs: FileSystem, + private readonly _supportsBackgroundAnalysis: boolean | undefined + ) { + super(connection, _supportsBackgroundAnalysis ? 1 : 0, fs); + } + + test_onDidChangeWatchedFiles(params: any) { + this.onDidChangeWatchedFiles(params); + } + + override async updateSettingsForWorkspace( + workspace: Workspace, + status: InitStatus | undefined, + serverSettings?: ServerSettings | undefined + ): Promise<void> { + const result = await super.updateSettingsForWorkspace(workspace, status, serverSettings); + + // LSP notification only allows synchronous callback. because of that, the one that sent the notification can't know + // when the work caused by the notification actually ended. To workaround that issue, we will send custom lsp to indicate + // something has been done. + CustomLSP.sendNotification(this.connection, CustomLSP.Notifications.TestSignal, { + uri: workspace.rootUri?.toString() ?? '', + kind: CustomLSP.TestSignalKinds.Initialization, + }); + + return result; + } + + override createBackgroundAnalysis(serviceId: string, workspaceRoot: Uri): IBackgroundAnalysis | undefined { + if (this._supportsBackgroundAnalysis) { + return new BackgroundAnalysis(workspaceRoot, this.serverOptions.serviceProvider); + } + return undefined; + } + + protected override async onDidOpenTextDocument( + params: DidOpenTextDocumentParams, + ipythonMode?: IPythonMode + ): Promise<void> { + await super.onDidOpenTextDocument(params, ipythonMode); + CustomLSP.sendNotification(this.connection, CustomLSP.Notifications.TestSignal, { + uri: params.textDocument.uri, + kind: CustomLSP.TestSignalKinds.DidOpenDocument, + }); + } +} + +async function runServer( + testServerData: CustomLSP.TestServerStartOptions, + reader: MessageReader, + writer: MessageWriter, + connectionFactory: (reader: MessageReader, writer: MessageWriter) => Connection +): Promise<{ disposables: Disposable[]; connection: Connection }> { + // Create connection back to the client first. + const connection = connectionFactory(reader, writer); + + // Fixup the input data. + testServerData = { + ...testServerData, + projectRoots: testServerData.projectRoots.map((p) => Uri.fromJsonObj(p)), + logFile: Uri.fromJsonObj(testServerData.logFile), + }; + + try { + // Create a host so we can control the file system for the PyrightServer. + const disposables: Disposable[] = []; + const host = createTestHost(testServerData); + const server = new TestServer(connection, host.fs, testServerData.backgroundAnalysis); + + // Listen for the test messages from the client. These messages + // are how the test code queries the state of the server. + disposables.push( + CustomLSP.onRequest(connection, CustomLSP.Requests.GetDiagnostics, async (params, token) => { + const filePath = Uri.parse(params.uri, server.serviceProvider); + const workspace = await server.getWorkspaceForFile(filePath); + workspace.service.test_program.analyze(undefined, token); + const file = workspace.service.test_program.getBoundSourceFile(filePath); + const diagnostics = file?.getDiagnostics(workspace.service.test_program.configOptions) || []; + return { diagnostics: serialize(diagnostics) }; + }), + CustomLSP.onRequest(connection, CustomLSP.Requests.GetOpenFiles, async (params) => { + const workspace = await server.getWorkspaceForFile(Uri.parse(params.uri, server.serviceProvider)); + const files = serialize(workspace.service.test_program.getOpened().map((f) => f.uri)); + return { files: files }; + }) + ); + + // Dispose the server and connection when terminating the server. + disposables.push(server); + disposables.push(connection); + + return { disposables, connection }; + } catch (err) { + console.error(err); + return { disposables: [], connection }; + } +} + +class ListeningPortMessageWriter extends PortMessageWriter { + private _callbacks: ((msg: Message) => Promise<void>)[] = []; + constructor(port: MessagePort) { + super(port); + } + override async write(msg: Message): Promise<void> { + await Promise.all(this._callbacks.map((c) => c(msg))); + return super.write(msg); + } + + onPostMessage(callback: (msg: Message) => Promise<void>) { + this._callbacks.push(callback); + } +} + +/** + * Object that exists in the worker thread that starts and stops (and cleans up after) the main server. + */ +class ServerStateManager { + private _instances: { disposables: Disposable[]; connection: Connection }[] = []; + private _pendingDispose: Deferred<void> | undefined; + private _reader = new PortMessageReader(parentPort!); + private _writer = new ListeningPortMessageWriter(parentPort!); + private _currentOptions: CustomLSP.TestServerStartOptions | undefined; + private _shutdownId: string | number | null = null; + constructor(private readonly _connectionFactory: (reader: MessageReader, writer: MessageWriter) => Connection) { + // Listen for shutdown response. + this._writer.onPostMessage(async (msg: Message) => { + if (Message.isResponse(msg) && msg.id === this._shutdownId) { + await this._handleShutdown(); + } + }); + } + + run() { + parentPort?.on('message', (message) => this._handleMessage(message)); + } + + private _handleMessage(message: any) { + try { + // Debug output to help diagnose sync issues. + if (message && message.method === CustomLSP.Notifications.TestStartServer) { + this._handleStart(message.params); + } else if (Message.isRequest(message) && message.method === ShutdownRequest.method) { + this._shutdownId = message.id; + } + } catch (err) { + console.error(err); + } + } + + private async _handleStart(options: CustomLSP.TestServerStartOptions) { + logToDisk(`Starting server for ${options.testName}`, options.logFile); + + // Every time we start the server, remove all message handlers from our PortMessageReader. + // This prevents the old servers from responding to messages for new ones. + this._reader.dispose(); + // Wait for the previous server to finish. This should be okay because the test + // client waits for the response message before sending anything else. Otherwise + // we'd receive the initialize message for the server and drop it before the server + // actually started. + if (this._pendingDispose) { + logToDisk( + `Waiting for previous server ${this._currentOptions?.testName} to finish for ${options.testName}`, + options.logFile + ); + await this._pendingDispose.promise; + this._pendingDispose = undefined; + } + this._currentOptions = options; + + // Set the worker data for the current test. Any background threads + // started after this point will pick up this value. + setEnvironmentData(WORKER_BACKGROUND_DATA, options); + + // Create an instance of the server. + const { disposables, connection } = await runServer( + options, + this._reader, + this._writer, + this._connectionFactory + ); + this._instances.push({ disposables, connection }); + + // Enable this to help diagnose sync issues. + logToDisk(`Started server for ${options.testName}`, options.logFile); + + // Respond back. + parentPort?.postMessage({ + jsonrpc: '2.0', + method: CustomLSP.Notifications.TestStartServerResponse, + params: options, + }); + } + + private async _handleShutdown() { + if (this._currentOptions) { + logToDisk(`Stopping ${this._currentOptions?.testName}`, this._currentOptions.logFile); + } + this._shutdownId = null; + const instance = this._instances.pop(); + if (instance) { + this._pendingDispose = createDeferred<void>(); + + // Dispose the server first. This might send a message or two. + const serverIndex = instance.disposables.findIndex((d) => d instanceof TestServer); + if (serverIndex >= 0) { + try { + instance.disposables[serverIndex].dispose(); + instance.disposables.splice(serverIndex, 1); + } catch (e) { + // Dispose failures don't matter. + } + } + + // Wait for our connection to finish first. Give it 10 tries. + // This is a bit of a hack but there are no good ways to cancel all running requests + // on shutdown. + let count = 0; + while (count < 10 && (instance.connection as any).console?._rawConnection?.hasPendingResponse()) { + await sleep(10); + count += 1; + } + this._pendingDispose.resolve(); + try { + instance.disposables.forEach((d) => { + d.dispose(); + }); + } catch (e) { + // Dispose failures don't matter. + } + this._pendingDispose = undefined; + if (this._currentOptions) { + logToDisk(`Stopped ${this._currentOptions?.testName}`, this._currentOptions.logFile); + } + } else { + if (this._currentOptions) { + logToDisk(`Failed to stop ${this._currentOptions?.testName}`, this._currentOptions.logFile); + } + } + if (global.gc) { + global.gc(); + } + } +} + +async function runTestBackgroundThread() { + let options = getEnvironmentData(WORKER_BACKGROUND_DATA) as CustomLSP.TestServerStartOptions; + + // Normalize the options. + options = { + ...options, + projectRoots: options.projectRoots.map((p) => Uri.fromJsonObj(p)), + logFile: Uri.fromJsonObj(options.logFile), + }; + try { + // Create a host on the background thread too so that it uses + // the host's file system. Has to be sync so that we don't + // drop any messages sent to the background thread. + const host = createTestHost(options); + const fs = new PyrightFileSystem(host.fs); + const serviceProvider = new ServiceProvider(); + serviceProvider.add(ServiceKeys.fs, fs); + + // run default background runner + const runner = new BackgroundAnalysisRunner(serviceProvider); + runner.start(); + } catch (e) { + console.error(`BackgroundThread crashed with ${e}`); + } +} + +export async function run() { + await initializeDependencies(); + + // Start the background thread if this is not the first worker. + if (getEnvironmentData(WORKER_STARTED) === 'true') { + runTestBackgroundThread(); + } else { + setEnvironmentData(WORKER_STARTED, 'true'); + + // Start the server state manager. + const stateManager = new ServerStateManager((reader, writer) => createConnection(reader, writer, {})); + stateManager.run(); + } +} + +process.on('uncaughtException', (err) => { + console.error('Uncaught exception in worker:', err); + process.exit(10); // Exit the worker process +}); + +process.on('unhandledRejection', (reason, promise) => { + console.error('Unhandled rejection in worker:', reason); + process.exit(11); // Exit the worker process +}); diff --git a/python-parser/packages/pyright-internal/src/tests/lsp/languageServerTestUtils.ts b/python-parser/packages/pyright-internal/src/tests/lsp/languageServerTestUtils.ts new file mode 100644 index 00000000..00416c90 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/lsp/languageServerTestUtils.ts @@ -0,0 +1,1224 @@ +/* + * languageServerTestUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Utilities for running tests against the LSP server. + */ + +import assert from 'assert'; +import * as fs from 'fs-extra'; +import { isMainThread, threadId, Worker } from 'node:worker_threads'; +import path from 'path'; +import { + ApplyWorkspaceEditParams, + ApplyWorkspaceEditRequest, + CancellationToken, + ConfigurationItem, + ConfigurationRequest, + DiagnosticRefreshRequest, + DidChangeWorkspaceFoldersNotification, + DidOpenTextDocumentNotification, + Disposable, + DocumentDiagnosticReport, + DocumentDiagnosticRequest, + FullDocumentDiagnosticReport, + InitializedNotification, + InitializeParams, + InitializeRequest, + InlayHintRefreshRequest, + LogMessageNotification, + LogMessageParams, + PublishDiagnosticsNotification, + PublishDiagnosticsParams, + Registration, + RegistrationRequest, + SemanticTokensRefreshRequest, + ShutdownRequest, + TelemetryEventNotification, + UnchangedDocumentDiagnosticReport, + UnregistrationRequest, + WorkspaceDiagnosticReport, + WorkspaceDiagnosticRequest, +} from 'vscode-languageserver-protocol'; +import { + Connection, + createConnection, + Emitter, + Event, + HoverRequest, + NotificationHandler, + PortMessageReader, + PortMessageWriter, + ProgressToken, + ProgressType, + ProtocolNotificationType, + WorkDoneProgress, + WorkDoneProgressCancelNotification, + WorkDoneProgressCreateRequest, +} from 'vscode-languageserver/node'; +import { ImportLogger } from '../../analyzer/importLogger'; +import { PythonPathResult } from '../../analyzer/pythonPathUtils'; +import { deserialize } from '../../backgroundThreadBase'; +import { PythonPlatform } from '../../common/configOptions'; +import { toBoolean } from '../../common/core'; +import { createDeferred, Deferred } from '../../common/deferred'; +import { DiagnosticSink } from '../../common/diagnosticSink'; +import { FileSystem } from '../../common/fileSystem'; +import { LimitedAccessHost } from '../../common/fullAccessHost'; +import { HostKind, ScriptOutput } from '../../common/host'; +import { combinePaths, resolvePaths } from '../../common/pathUtils'; +import { convertOffsetToPosition } from '../../common/positionUtils'; +import { PythonVersion, pythonVersion3_10 } from '../../common/pythonVersion'; +import { FileUri } from '../../common/uri/fileUri'; +import { Uri } from '../../common/uri/uri'; +import { UriEx } from '../../common/uri/uriUtils'; +import { ParseOptions, Parser } from '../../parser/parser'; +import { parseTestData } from '../harness/fourslash/fourSlashParser'; +import { FourSlashData, GlobalMetadataOptionNames } from '../harness/fourslash/fourSlashTypes'; +import { createVfsInfoFromFourSlashData, getMarkerByName } from '../harness/fourslash/testStateUtils'; +import * as host from '../harness/testHost'; +import { createFromFileSystem, distlibFolder, libFolder } from '../harness/vfs/factory'; +import * as vfs from '../harness/vfs/filesystem'; +import { CustomLSP } from './customLsp'; + +// bundled root on test virtual file system. +const bundledStubsFolder = combinePaths(vfs.MODULE_PATH, 'bundled', 'stubs'); + +// bundled file path on real file system. +const bundledStubsFolderPath = resolvePaths(__dirname, '../../bundled/stubs'); +const bundledStubsFolderPathTestServer = resolvePaths(__dirname, '../bundled/stubs'); + +// project root on test virtual file system. +export const DEFAULT_WORKSPACE_ROOT = combinePaths('/', 'src'); + +export const ERROR_SCRIPT_OUTPUT = 'Error: script failed to run'; +export const STALL_SCRIPT_OUTPUT = 'Timeout: script never finished running'; + +export interface PyrightServerInfo { + disposables: Disposable[]; + registrations: Registration[]; + logs: LogMessageParams[]; + connection: Connection; + signals: Map<CustomLSP.TestSignalKinds, Deferred<boolean>>; + testName: string; // Used for debugging + testData: FourSlashData; + projectRoots: Uri[]; + progressReporters: string[]; + progressReporterStatus: Map<string, number>; + progressParts: Map<string, TestProgressPart>; + telemetry: any[]; + supportsPullDiagnostics: boolean; + diagnostics: PublishDiagnosticsParams[]; + diagnosticsEvent: Event<PublishDiagnosticsParams>; + workspaceEdits: ApplyWorkspaceEditParams[]; + workspaceEditsEvent: Event<ApplyWorkspaceEditParams>; + getInitializeParams(): InitializeParams; + dispose(): Promise<void>; + convertPathToUri(path: string): Uri; + workspaceDiagnosticsPartialResultToken?: string; + pendingWorkspaceDiagnostics?: Promise<any>; + queriedConfigSettings: Deferred<void>; +} + +export class TestHostOptions { + version: PythonVersion; + platform: PythonPlatform; + + // Search path on virtual file system. + searchPaths: Uri[]; + + // Run script function + runScript: ( + pythonPath: Uri | undefined, + scriptPath: Uri, + args: string[], + cwd: Uri, + token: CancellationToken + ) => Promise<ScriptOutput>; + + constructor({ + version = pythonVersion3_10, + platform = PythonPlatform.Linux, + searchPaths = [libFolder, distlibFolder], + runScript = async ( + pythonPath: Uri | undefined, + scriptPath: Uri, + args: string[], + cwd: Uri, + token: CancellationToken + ) => { + return { stdout: '', stderr: '' }; + }, + } = {}) { + this.version = version; + this.platform = platform; + this.searchPaths = searchPaths; + this.runScript = runScript; + } +} +// Enable this to log to disk for debugging sync issues. +export const logToDisk = (m: string, f: Uri) => {}; // logToDiskImpl +export function logToDiskImpl(message: string, fileName: Uri) { + const thread = isMainThread ? 'main' : threadId.toString(); + fs.writeFileSync(fileName.getFilePath(), `${Date.now()} : ${thread} : ${message}\n`, { + flag: 'a+', + }); +} + +// Global server worker. +let serverWorker: Worker | undefined; +let serverWorkerFile: string | undefined; +let lastServerFinished: { name: string; finished: boolean } = { name: '', finished: true }; + +function removeAllListeners(worker: Worker) { + // Only remove the 'message', 'error' and 'close' events + worker.rawListeners('message').forEach((listener) => worker.removeListener('message', listener as any)); + worker.rawListeners('error').forEach((listener) => worker.removeListener('error', listener as any)); + worker.rawListeners('close').forEach((listener) => worker.removeListener('close', listener as any)); +} + +function createServerWorker(file: string, testServerData: CustomLSP.TestServerStartOptions) { + // Do not terminate the worker if it's the same file. Reuse it. + // This makes tests run a lot faster because creating a worker is the same + // as starting a new process. + if (!serverWorker || serverWorkerFile !== file) { + serverWorker?.terminate(); + serverWorkerFile = file; + serverWorker = new Worker(file); + logToDisk(`Created new server worker for ${file}`, testServerData.logFile); + } + // Every time we 'create' the worker, refresh its message handlers. This + // is essentially the same thing as creating a new worker. + removeAllListeners(serverWorker); + logToDisk( + `Removed all worker listeners. Test ${testServerData.testName} is starting.\n Last test was ${lastServerFinished.name} and finished: ${lastServerFinished.finished}`, + testServerData.logFile + ); + serverWorker.on('error', (e) => { + logToDisk(`Worker error: ${e}`, testServerData.logFile); + }); + serverWorker.on('exit', (code) => { + logToDisk(`Worker exit: ${code}`, testServerData.logFile); + serverWorker = undefined; + }); + return serverWorker; +} + +export async function cleanupAfterAll() { + if (serverWorker) { + await serverWorker.terminate(); + serverWorker = undefined; + } +} + +export function getFileLikePath(uri: Uri): string { + return FileUri.isFileUri(uri) ? uri.getFilePath() : uri.toString(); +} + +export function createFileSystem(projectRoot: string, testData: FourSlashData, optionalHost?: host.TestHost) { + const mountedPaths = new Map<string, string>(); + if (fs.existsSync(bundledStubsFolderPath)) { + mountedPaths.set(bundledStubsFolder, bundledStubsFolderPath); + } else if (fs.existsSync(bundledStubsFolderPathTestServer)) { + mountedPaths.set(bundledStubsFolder, bundledStubsFolderPathTestServer); + } + + const vfsInfo = createVfsInfoFromFourSlashData(projectRoot, testData); + + // If there's a pyrightconfig.json, add it to the list of files as well. + const files = vfsInfo.files; + if (vfsInfo.rawConfigJson) { + files[combinePaths(vfsInfo.projectRoot, 'pyrightconfig.json')] = new vfs.File( + JSON.stringify(vfsInfo.rawConfigJson), + { encoding: 'utf8', meta: { filename: 'pyrightconfig.json' } } + ); + } + return createFromFileSystem( + optionalHost ?? host.HOST, + vfsInfo.ignoreCase, + { cwd: vfsInfo.projectRoot, files, meta: testData.globalOptions }, + mountedPaths + ); +} + +const settingsMap = new Map<PyrightServerInfo, { item: ConfigurationItem; value: any }[]>(); + +export function updateSettingsMap(info: PyrightServerInfo, settings: { item: ConfigurationItem; value: any }[]) { + const ignoreCase = toBoolean(info.testData.globalOptions[GlobalMetadataOptionNames.ignoreCase]); + // Normalize the URIs for all of the settings. + settings.forEach((s) => { + if (s.item.scopeUri) { + s.item.scopeUri = UriEx.parse(s.item.scopeUri, !ignoreCase).toString(); + } + }); + + const current = settingsMap.get(info) || []; + settingsMap.set(info, [...settings, ...current]); +} + +export function getParseResults(fileContents: string, isStubFile = false, useNotebookMode = false) { + const diagSink = new DiagnosticSink(); + const parseOptions = new ParseOptions(); + parseOptions.useNotebookMode = useNotebookMode; + parseOptions.isStubFile = isStubFile; + parseOptions.pythonVersion = pythonVersion3_10; + parseOptions.skipFunctionAndClassBody = false; + + // Parse the token stream, building the abstract syntax tree. + const parser = new Parser(); + return parser.parseSourceFile(fileContents, parseOptions, diagSink); +} + +function createServerConnection(testServerData: CustomLSP.TestServerStartOptions, disposables: Disposable[]) { + // Start a worker with the server running in it. + const serverPath = path.join(__dirname, '..', '..', '..', 'out', 'testServer.bundle.js'); + assert( + fs.existsSync(serverPath), + `Server bundle does not exist: ${serverPath}. Make sure you ran the build script for test bundle (npm run webpack:testserver).` + ); + const serverWorker = createServerWorker(serverPath, testServerData); + const options = {}; + + const connection = createConnection( + new PortMessageReader(serverWorker), + new PortMessageWriter(serverWorker), + options + ); + disposables.push(connection); + + return connection; +} + +function getProjectRootString(info: PyrightServerInfo, projectRoot?: Uri) { + return projectRoot ? projectRoot.toString() : info.projectRoots.length > 0 ? info.projectRoots[0].toString() : ''; +} + +export async function getOpenFiles(info: PyrightServerInfo, projectRoot?: Uri): Promise<Uri[]> { + const uri = getProjectRootString(info, projectRoot); + const result = await CustomLSP.sendRequest(info.connection, CustomLSP.Requests.GetOpenFiles, { uri }); + return deserialize(result.files); +} + +export async function waitForPushDiagnostics( + info: PyrightServerInfo, + clearFirst: boolean, + numberOfFiles = 1, + timeout = 10000 +): Promise<PublishDiagnosticsParams[]> { + if (clearFirst) { + info.diagnostics = []; + } + + // We may already have the necessary diagnostics. + const currentCount = info.diagnostics.filter((d) => d.diagnostics.length > 0).length; + if (currentCount >= numberOfFiles) { + return info.diagnostics; + } + + // Otherwise we have to get called back with the diagnostics. Swallow any errors (timeouts, etc.) + // and just return what we currently have so callers can choose how strict to be. + let eventCount = currentCount; + try { + await waitForEvent( + info.diagnosticsEvent, + 'diagnostics', + (params) => { + if (params.diagnostics.length > 0) { + eventCount++; + if (eventCount >= numberOfFiles) { + return true; + } + } + + return false; + }, + timeout + ); + } catch (e: any) { + try { + console.log?.( + `waitForPushDiagnostics: ignoring error while waiting for diagnostics: ${ + e?.message || e + }. Had ${eventCount} of ${numberOfFiles} events` + ); + } catch { + /* ignore logging issues */ + } + } + + return info.diagnostics; +} + +export async function waitForEvent<T>(event: Event<T>, name: string, condition: (p: T) => boolean, timeout = 10000) { + const deferred = createDeferred<void>(); + const disposable = event((params) => { + if (condition(params)) { + deferred.resolve(); + } + }); + + const timer = setTimeout(() => deferred.reject(`Timed out waiting for ${name} event`), timeout); + + try { + await deferred.promise; + } finally { + clearTimeout(timer); + disposable.dispose(); + } +} + +function convertDiagnosticReportItem( + uri: string, + item: FullDocumentDiagnosticReport | UnchangedDocumentDiagnosticReport +): PublishDiagnosticsParams { + if (item.kind === 'unchanged') { + return { + uri, + diagnostics: [], + }; + } + + return { + uri, + diagnostics: item.items, + }; +} +export function convertDiagnosticReport( + uri: string | undefined, + report: DocumentDiagnosticReport | WorkspaceDiagnosticReport +): PublishDiagnosticsParams[] { + if (!(report as any).kind || !uri) { + const workspaceReport = report as WorkspaceDiagnosticReport; + return workspaceReport.items.map((item) => convertDiagnosticReportItem(item.uri, item)); + } + const documentReport = report as DocumentDiagnosticReport; + return [convertDiagnosticReportItem(uri, documentReport)]; +} + +async function waitForPullDiagnostics(info: PyrightServerInfo): Promise<PublishDiagnosticsParams[]> { + const openFiles = await getOpenFiles(info); + if (openFiles.length <= 0) { + return waitForPushDiagnostics(info, false); + } else { + const results: PublishDiagnosticsParams[] = []; + for (const openFile of openFiles) { + const result = await info.connection.sendRequest(DocumentDiagnosticRequest.type, { + textDocument: { + uri: openFile.toString(), + }, + }); + results.push(convertDiagnosticReport(openFile.toString(), result)[0]); + } + return results; + } +} + +export async function waitForDiagnostics(info: PyrightServerInfo, timeout = 20000) { + if (info.supportsPullDiagnostics) { + // Timeout doesn't apply on pull because we can actually ask for them. + return waitForPullDiagnostics(info); + } + return waitForPushDiagnostics(info, false, undefined, timeout); +} + +interface ProgressPart {} + +interface ProgressContext { + onProgress<P>(type: ProgressType<P>, token: string | number, handler: NotificationHandler<P>): Disposable; + sendNotification<P, RO>(type: ProtocolNotificationType<P, RO>, params?: P): void; +} + +class TestProgressPart implements ProgressPart { + constructor( + private readonly _context: ProgressContext, + private readonly _token: ProgressToken, + info: PyrightServerInfo, + done: () => void + ) { + info.disposables.push( + info.connection.onProgress(WorkDoneProgress.type, _token, (params) => { + switch (params.kind) { + case 'begin': + info.progressReporterStatus.set(_token.toString(), 0); + break; + case 'report': + info.progressReporterStatus.set(_token.toString(), params.percentage ?? 0); + break; + case 'end': + done(); + break; + } + }) + ); + info.progressReporters.push(this._token.toString()); + info.progressParts.set(this._token.toString(), this); + } + + sendCancel() { + this._context.sendNotification(WorkDoneProgressCancelNotification.type, { token: this._token }); + } +} + +export async function waitForPromise(promise: Promise<any>, timeout = 10000, message?: string): Promise<any> { + return await new Promise((resolve, reject) => { + const timer = setTimeout(() => { + reject(new Error(message || `Timed out waiting for promise`)); + }, timeout); + + promise + .then((result) => { + clearTimeout(timer); + resolve(result); + }) + .catch((error) => { + clearTimeout(timer); + reject(error); + }); + }); +} + +export async function runPyrightServer( + projectRoots: string[] | string, + code: string, + callInitialize = true, + extraSettings?: { item: ConfigurationItem; value: any }[], + pythonVersion: PythonVersion = pythonVersion3_10, + backgroundAnalysis?: boolean, + supportPullDiagnostics?: boolean +): Promise<PyrightServerInfo> { + // Setup the test data we need to send for Test server startup. + const projectRootsArray = Array.isArray(projectRoots) ? projectRoots : [projectRoots]; + + // Here all Uri has `isCaseSensitive` as true. + const testServerData: CustomLSP.TestServerStartOptions = { + testName: expect.getState().currentTestName ?? 'NoName', + code, + projectRoots: projectRootsArray.map((p) => (p.includes(':') ? UriEx.parse(p) : UriEx.file(p))), + pythonVersion: PythonVersion.toString(pythonVersion), + backgroundAnalysis, + logFile: UriEx.file(path.join(__dirname, `log${process.pid}.txt`)), + pid: process.pid.toString(), + }; + + logToDisk(`Starting test ${testServerData.testName}`, testServerData.logFile); + lastServerFinished = { name: testServerData.testName, finished: false }; + + // Parse the test data on this side as well. This allows the use of markers and such. + const testData = parseTestData( + testServerData.projectRoots.length === 1 + ? getFileLikePath(testServerData.projectRoots[0]) + : DEFAULT_WORKSPACE_ROOT, + testServerData.code, + 'noname.py' + ); + + const ignoreCase = toBoolean(testData.globalOptions[GlobalMetadataOptionNames.ignoreCase]); + + // Normalize the URIs for all of the settings. + extraSettings?.forEach((s) => { + if (s.item.scopeUri) { + s.item.scopeUri = UriEx.parse(s.item.scopeUri, !ignoreCase).toString(); + } + }); + + // Start listening to the 'client' side of the connection. + const disposables: Disposable[] = []; + const connection = createServerConnection(testServerData, disposables); + const serverStarted = createDeferred<string>(); + const diagnosticsEmitter = new Emitter<PublishDiagnosticsParams>(); + const workspaceEditsEmitter = new Emitter<ApplyWorkspaceEditParams>(); + const partialResultToken = `4fb5ab8a-37a6-4c16-a290-03cfc892ea7e`; // Doesn't need to be different, just a random guid + + // Setup the server info. + const info: PyrightServerInfo = { + disposables, + registrations: [], + connection, + logs: [], + progressReporters: [], + progressReporterStatus: new Map<string, number>(), + progressParts: new Map<string, TestProgressPart>(), + signals: new Map(Object.values(CustomLSP.TestSignalKinds).map((v) => [v, createDeferred<boolean>()])), + testData, + testName: testServerData.testName, + telemetry: [], + supportsPullDiagnostics: !!supportPullDiagnostics, + projectRoots: testServerData.projectRoots, + diagnostics: [], + diagnosticsEvent: diagnosticsEmitter.event, + workspaceEdits: [], + workspaceEditsEvent: workspaceEditsEmitter.event, + getInitializeParams: () => getInitializeParams(testServerData.projectRoots, !!supportPullDiagnostics), + convertPathToUri: (path: string) => UriEx.file(path, !ignoreCase), + dispose: async () => { + // Send shutdown. This should disconnect the dispatcher and kill the server. + if (serverWorker) { + await connection.sendRequest(ShutdownRequest.type, undefined); + } + // Shutdown of the server should resolve any pending workspace diagnostics requests. + await stopWorkspaceDiagnostics(info, 'shutdown'); + + // Now we can dispose the connection. + disposables.forEach((d) => d.dispose()); + + logToDisk(`Finished test ${testServerData.testName}`, testServerData.logFile); + }, + workspaceDiagnosticsPartialResultToken: supportPullDiagnostics ? partialResultToken : undefined, + queriedConfigSettings: createDeferred<void>(), + }; + info.disposables.push( + info.connection.onNotification(CustomLSP.Notifications.TestStartServerResponse, (p) => { + serverStarted.resolve(p.testName); + }), + info.connection.onRequest(RegistrationRequest.type, (p) => { + info.registrations.push(...p.registrations); + + // If this is a DocumentDiagnostic registration, we may need to start workspace diagnostics. + const documentDiagnosticRegistration = p.registrations.find( + (r) => r.method === DocumentDiagnosticRequest.type.method + ); + if (documentDiagnosticRegistration) { + if (documentDiagnosticRegistration.registerOptions.workspaceDiagnostics) { + startWorkspaceDiagnostics(info, 'registration'); + } else { + stopWorkspaceDiagnostics(info, 'registration'); + } + } + }), + info.connection.onNotification(CustomLSP.Notifications.TestSignal, (p: CustomLSP.TestSignal) => { + info.signals.get(p.kind)!.resolve(true); + }), + info.connection.onNotification(LogMessageNotification.type, (p) => { + info.logs.push(p); + }), + info.connection.onRequest(SemanticTokensRefreshRequest.type, () => { + // Empty. Silently ignore for now. + }), + info.connection.onRequest(InlayHintRefreshRequest.type, () => { + // Empty. Silently ignore for now. + }), + info.connection.onRequest(DiagnosticRefreshRequest.type, () => {}), + info.connection.onRequest(ApplyWorkspaceEditRequest.type, (p) => { + info.workspaceEdits.push(p); + workspaceEditsEmitter.fire(p); + return { applied: true }; + }), + info.connection.onRequest(UnregistrationRequest.type, (p) => { + const unregisterIds = p.unregisterations.map((u) => u.id); + info.registrations = info.registrations.filter((r) => !unregisterIds.includes(r.id)); + }), + info.connection.onRequest(WorkDoneProgressCreateRequest.type, (p) => { + // Save the progress reporter so we can send progress updates. + info.progressReporters.push(p.token.toString()); + info.disposables.push( + info.connection.onProgress(WorkDoneProgress.type, p.token, (params) => { + switch (params.kind) { + case 'begin': + info.progressReporterStatus.set(p.token.toString(), 0); + break; + case 'report': + info.progressReporterStatus.set(p.token.toString(), params.percentage ?? 0); + break; + case 'end': + break; + } + }) + ); + }), + info.connection.onNotification(PublishDiagnosticsNotification.type, (p) => { + info.diagnostics.push(p); + diagnosticsEmitter.fire(p); + }), + info.connection.onNotification(TelemetryEventNotification.type, (p) => { + info.telemetry.push(p); + }), + info.connection.onProgress(WorkspaceDiagnosticRequest.partialResult, partialResultToken, (progress) => { + const converted = convertDiagnosticReport(undefined, progress); + // Replace any current diagnostics with their new set + for (const p of converted) { + info.diagnostics = info.diagnostics.filter((d) => d.uri !== p.uri); + info.diagnostics.push(p); + diagnosticsEmitter.fire(p); + } + }) + ); + info.disposables.push( + info.connection.onRequest(ConfigurationRequest.type, (p) => { + const result = []; + const mappedSettings = settingsMap.get(info) || []; + for (const item of p.items) { + const setting = mappedSettings.find( + (s) => + (s.item.scopeUri === item.scopeUri || s.item.scopeUri === undefined) && + s.item.section === item.section + ); + if (setting) { + // Indicate we queried at least one setting. + info.queriedConfigSettings.resolve(); + } + result.push(setting?.value); + } + + return result; + }) + ); + + // Merge the extra settings. + const settings: { item: ConfigurationItem; value: any }[] = []; + if (extraSettings) { + for (const extra of extraSettings) { + const existing = settings.find( + (s) => s.item.section === extra.item.section && s.item.scopeUri === extra.item.scopeUri + ); + if (existing) { + existing.value = { ...existing.value, ...extra.value }; + } else { + settings.push(extra); + } + } + } + settingsMap.set(info, settings); + + // Wait for the server to be started. + connection.listen(); + logToDisk(`Sending start notification for ${testServerData.testName}`, testServerData.logFile); + CustomLSP.sendNotification(connection, CustomLSP.Notifications.TestStartServer, testServerData); + const serverTestName = await serverStarted.promise; + assert.equal(serverTestName, testServerData.testName, 'Server started for wrong test'); + + logToDisk(`Started test ${testServerData.testName}`, testServerData.logFile); + + // Initialize the server if requested. + if (callInitialize) { + await initializeLanguageServer(info); + logToDisk(`Initialized test ${testServerData.testName}`, testServerData.logFile); + + // If added any extra settings, wait for the server to query them. + if (extraSettings && extraSettings.length > 0) { + await waitForPromise( + info.queriedConfigSettings.promise, + 5000, + `Timed out waiting for server to query configuration settings for test ${testServerData.testName}` + ); + } + } + + if (lastServerFinished.name === testServerData.testName) { + lastServerFinished.finished = true; + } else { + logToDisk(`Last server finished was incorrectly updated to ${lastServerFinished.name}`, testServerData.logFile); + } + + return info; +} + +function startWorkspaceDiagnostics(info: PyrightServerInfo, extraMessage: string) { + if (!info.pendingWorkspaceDiagnostics) { + info.logs.push({ message: `Starting workspace diagnostics request: ${extraMessage}`, type: 4 }); + info.pendingWorkspaceDiagnostics = info.connection + .sendRequest(WorkspaceDiagnosticRequest.type, { + identifier: 'Pylance', + previousResultIds: [], + partialResultToken: info.workspaceDiagnosticsPartialResultToken, + }) + .then(() => { + info.logs.push({ message: 'Workspace diagnostics request completed', type: 4 }); + }); + } +} + +async function stopWorkspaceDiagnostics(info: PyrightServerInfo, extraMessage: string) { + if (info.pendingWorkspaceDiagnostics) { + const pending = info.pendingWorkspaceDiagnostics; + info.pendingWorkspaceDiagnostics = undefined; + try { + await waitForPromise( + pending, + 5000, + `Timed out waiting for stopping workspace diagnostics during ${extraMessage}` + ); + } catch { + // Ignore errors. We're trying to stop it. + } + } +} + +export async function initializeLanguageServer(info: PyrightServerInfo) { + const params = info.getInitializeParams(); + + // Send the initialize request. + const result = await info.connection.sendRequest(InitializeRequest.type, params, CancellationToken.None); + info.connection.sendNotification(InitializedNotification.type, {}); + + if (params.workspaceFolders?.length) { + await info.connection.sendNotification(DidChangeWorkspaceFoldersNotification.type, { + event: { + added: params.workspaceFolders!, + removed: [], + }, + }); + + // Wait until workspace initialization is done. + // This is required since some tests check status of server directly. In such case, even if the client sent notification, + // server might not have processed it and still in the event queue. + // This check makes sure server at least processed initialization before test checking server status directly. + // If test only uses response from client.sendRequest, then this won't be needed. + await info.signals.get(CustomLSP.TestSignalKinds.Initialization)!.promise; + } + + // Start the polling for workspace diagnostics as done by the VS Code LSP client. + if (result.capabilities.diagnosticProvider?.workspaceDiagnostics) { + startWorkspaceDiagnostics(info, 'initialize'); + } + return result; +} + +export async function sleep(timeout: number): Promise<number> { + return new Promise<number>((resolve) => { + setTimeout(() => resolve(timeout), timeout); + }); +} + +export async function waitForTestSignal(info: PyrightServerInfo, signal: CustomLSP.TestSignalKinds, timeout = 10000) { + const result = await Promise.race([info.signals.get(signal)!.promise, sleep(timeout)]); + if (result === timeout) { + throw new Error(`Timed out waiting for signal ${signal}`); + } +} + +export async function openFile(info: PyrightServerInfo, markerName: string, text?: string) { + const marker = getMarkerByName(info.testData, markerName); + const uri = marker.fileUri.toString(); + + text = text ?? info.testData.files.find((f) => f.fileName === marker.fileName)!.content; + + info.connection.sendNotification(DidOpenTextDocumentNotification.type, { + textDocument: { uri, languageId: 'python', version: 1, text }, + }); + + await waitForTestSignal(info, CustomLSP.TestSignalKinds.DidOpenDocument); +} + +export async function hover(info: PyrightServerInfo, markerName: string) { + const marker = info.testData.markerPositions.get(markerName)!; + const fileUri = marker.fileUri; + const text = info.testData.files.find((d) => d.fileName === marker.fileName)!.content; + const parseResult = getParseResults(text); + const hoverResult = await info.connection.sendRequest( + HoverRequest.type, + { + textDocument: { uri: fileUri.toString() }, + position: convertOffsetToPosition(marker.position, parseResult.tokenizerOutput.lines), + }, + CancellationToken.None + ); + + return hoverResult; +} + +export function getInitializeParams(projectRoots: Uri[], supportsPullDiagnostics: boolean) { + // cloned vscode "1.71.0-insider"'s initialize params. + const workspaceFolders = projectRoots + ? projectRoots.map((root, i) => ({ name: root.fileName, uri: projectRoots[i].toString() })) + : []; + + const params: InitializeParams = { + processId: process.pid, + clientInfo: { + name: `Pylance Unit Test ${expect.getState().currentTestName}`, + version: '1.71.0-insider', + }, + locale: 'en-us', + rootPath: null, + rootUri: null, + capabilities: { + workspace: { + applyEdit: true, + workspaceEdit: { + documentChanges: true, + resourceOperations: ['create', 'rename', 'delete'], + failureHandling: 'textOnlyTransactional', + normalizesLineEndings: true, + changeAnnotationSupport: { + groupsOnLabel: true, + }, + }, + configuration: true, + didChangeWatchedFiles: { + dynamicRegistration: true, + relativePatternSupport: true, + }, + symbol: { + dynamicRegistration: true, + symbolKind: { + valueSet: [ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + 26, + ], + }, + tagSupport: { + valueSet: [1], + }, + resolveSupport: { + properties: ['location.range'], + }, + }, + codeLens: { + refreshSupport: true, + }, + executeCommand: { + dynamicRegistration: true, + }, + didChangeConfiguration: { + dynamicRegistration: true, + }, + workspaceFolders: true, + semanticTokens: { + refreshSupport: true, + }, + fileOperations: { + dynamicRegistration: true, + didCreate: true, + didRename: true, + didDelete: true, + willCreate: true, + willRename: true, + willDelete: true, + }, + inlineValue: { + refreshSupport: true, + }, + inlayHint: { + refreshSupport: true, + }, + diagnostics: { + refreshSupport: true, + }, + }, + textDocument: { + publishDiagnostics: { + relatedInformation: true, + versionSupport: false, + tagSupport: { + valueSet: [1, 2], + }, + codeDescriptionSupport: true, + dataSupport: true, + }, + synchronization: { + dynamicRegistration: true, + willSave: true, + willSaveWaitUntil: true, + didSave: true, + }, + completion: { + dynamicRegistration: true, + contextSupport: true, + completionItem: { + snippetSupport: true, + commitCharactersSupport: true, + documentationFormat: ['markdown', 'plaintext'], + deprecatedSupport: true, + preselectSupport: true, + tagSupport: { + valueSet: [1], + }, + insertReplaceSupport: true, + resolveSupport: { + properties: ['documentation', 'detail', 'additionalTextEdits'], + }, + insertTextModeSupport: { + valueSet: [1, 2], + }, + labelDetailsSupport: true, + }, + insertTextMode: 2, + completionItemKind: { + valueSet: [ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + ], + }, + completionList: { + itemDefaults: ['commitCharacters', 'editRange', 'insertTextFormat', 'insertTextMode'], + }, + }, + hover: { + dynamicRegistration: true, + contentFormat: ['markdown', 'plaintext'], + }, + signatureHelp: { + dynamicRegistration: true, + signatureInformation: { + documentationFormat: ['markdown', 'plaintext'], + parameterInformation: { + labelOffsetSupport: true, + }, + activeParameterSupport: true, + }, + contextSupport: true, + }, + definition: { + dynamicRegistration: true, + linkSupport: true, + }, + references: { + dynamicRegistration: true, + }, + documentHighlight: { + dynamicRegistration: true, + }, + documentSymbol: { + dynamicRegistration: true, + symbolKind: { + valueSet: [ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + 26, + ], + }, + hierarchicalDocumentSymbolSupport: true, + tagSupport: { + valueSet: [1], + }, + labelSupport: true, + }, + codeAction: { + dynamicRegistration: true, + isPreferredSupport: true, + disabledSupport: true, + dataSupport: true, + resolveSupport: { + properties: ['edit'], + }, + codeActionLiteralSupport: { + codeActionKind: { + valueSet: [ + '', + 'quickfix', + 'refactor', + 'refactor.extract', + 'refactor.inline', + 'refactor.rewrite', + 'source', + 'source.organizeImports', + ], + }, + }, + honorsChangeAnnotations: false, + }, + codeLens: { + dynamicRegistration: true, + }, + formatting: { + dynamicRegistration: true, + }, + rangeFormatting: { + dynamicRegistration: true, + }, + onTypeFormatting: { + dynamicRegistration: true, + }, + rename: { + dynamicRegistration: true, + prepareSupport: true, + prepareSupportDefaultBehavior: 1, + honorsChangeAnnotations: true, + }, + documentLink: { + dynamicRegistration: true, + tooltipSupport: true, + }, + typeDefinition: { + dynamicRegistration: true, + linkSupport: true, + }, + implementation: { + dynamicRegistration: true, + linkSupport: true, + }, + colorProvider: { + dynamicRegistration: true, + }, + foldingRange: { + dynamicRegistration: true, + rangeLimit: 5000, + lineFoldingOnly: true, + foldingRangeKind: { + valueSet: ['comment', 'imports', 'region'], + }, + foldingRange: { + collapsedText: false, + }, + }, + declaration: { + dynamicRegistration: true, + linkSupport: true, + }, + selectionRange: { + dynamicRegistration: true, + }, + callHierarchy: { + dynamicRegistration: true, + }, + semanticTokens: { + dynamicRegistration: true, + tokenTypes: [ + 'namespace', + 'type', + 'class', + 'enum', + 'interface', + 'struct', + 'typeParameter', + 'parameter', + 'variable', + 'property', + 'enumMember', + 'event', + 'function', + 'method', + 'macro', + 'keyword', + 'modifier', + 'comment', + 'string', + 'number', + 'regexp', + 'operator', + 'decorator', + ], + tokenModifiers: [ + 'declaration', + 'definition', + 'readonly', + 'static', + 'deprecated', + 'abstract', + 'async', + 'modification', + 'documentation', + 'defaultLibrary', + ], + formats: ['relative'], + requests: { + range: true, + full: { + delta: true, + }, + }, + multilineTokenSupport: false, + overlappingTokenSupport: false, + serverCancelSupport: true, + augmentsSyntaxTokens: true, + }, + linkedEditingRange: { + dynamicRegistration: true, + }, + typeHierarchy: { + dynamicRegistration: true, + }, + inlineValue: { + dynamicRegistration: true, + }, + inlayHint: { + dynamicRegistration: true, + resolveSupport: { + properties: ['tooltip', 'textEdits', 'label.tooltip', 'label.location', 'label.command'], + }, + }, + diagnostic: { + dynamicRegistration: true, + relatedDocumentSupport: false, + }, + }, + window: { + showMessage: { + messageActionItem: { + additionalPropertiesSupport: true, + }, + }, + showDocument: { + support: true, + }, + workDoneProgress: true, + }, + general: { + staleRequestSupport: { + cancel: true, + retryOnContentModified: [ + 'textDocument/semanticTokens/full', + 'textDocument/semanticTokens/range', + 'textDocument/semanticTokens/full/delta', + ], + }, + regularExpressions: { + engine: 'ECMAScript', + version: 'ES2020', + }, + markdown: { + parser: 'marked', + version: '1.1.0', + }, + positionEncodings: ['utf-16'], + }, + notebookDocument: { + synchronization: { + dynamicRegistration: true, + executionSummarySupport: true, + }, + }, + }, + initializationOptions: { + autoFormatStrings: true, + disablePullDiagnostics: !supportsPullDiagnostics, + }, + workspaceFolders, + }; + + return params; +} + +export class TestHost extends LimitedAccessHost { + private readonly _options: TestHostOptions; + + constructor( + readonly fs: FileSystem, + readonly testFs: vfs.TestFileSystem, + readonly testData: FourSlashData, + readonly projectRoots: string[], + options?: TestHostOptions + ) { + super(); + + this._options = options ?? new TestHostOptions(); + } + + override get kind(): HostKind { + return HostKind.FullAccess; + } + + override getPythonVersion(pythonPath?: Uri, importLogger?: ImportLogger): PythonVersion | undefined { + return this._options.version; + } + + override getPythonPlatform(importLogger?: ImportLogger): PythonPlatform | undefined { + return this._options.platform; + } + + override getPythonSearchPaths(pythonPath?: Uri, importLogger?: ImportLogger): PythonPathResult { + return { + paths: this._options.searchPaths, + prefix: Uri.empty(), + }; + } + + override runScript( + pythonPath: Uri | undefined, + scriptPath: Uri, + args: string[], + cwd: Uri, + token: CancellationToken + ): Promise<ScriptOutput> { + return this._options.runScript(pythonPath, scriptPath, args, cwd, token); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/lsp/main.ts b/python-parser/packages/pyright-internal/src/tests/lsp/main.ts new file mode 100644 index 00000000..fe73ca0b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/lsp/main.ts @@ -0,0 +1,11 @@ +/* + * main.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provides the main entrypoint to the test server when running in Node. + */ + +import { run } from './languageServer'; + +run(); diff --git a/python-parser/packages/pyright-internal/src/tests/parseTreeUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/parseTreeUtils.test.ts new file mode 100644 index 00000000..957deca9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/parseTreeUtils.test.ts @@ -0,0 +1,432 @@ +/* + * parseTreeUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for parseTreeUtils module. + */ + +import assert from 'assert'; + +import { + findNodeByOffset, + getDottedName, + getDottedNameWithGivenNodeAsLastName, + getFirstAncestorOrSelfOfKind, + getFirstNameOfDottedName, + getFullStatementRange, + getStringNodeValueRange, + isFirstNameOfDottedName, + isFromImportAlias, + isFromImportModuleName, + isFromImportName, + isImportAlias, + isImportModuleName, + isLastNameOfDottedName, + printExpression, +} from '../analyzer/parseTreeUtils'; +import { TextRange, rangesAreEqual } from '../common/textRange'; +import { MemberAccessNode, NameNode, ParseNodeType, StringNode, isExpressionNode } from '../parser/parseNodes'; +import { TestState, getNodeAtMarker, getNodeForRange, parseAndGetTestState } from './harness/fourslash/testState'; + +test('isImportModuleName', () => { + const code = ` +//// import [|/*marker*/os|] + `; + + assert(isImportModuleName(getNodeAtMarker(code))); +}); + +test('isImportAlias', () => { + const code = ` +//// import os as [|/*marker*/os|] + `; + + assert(isImportAlias(getNodeAtMarker(code))); +}); + +test('isFromImportModuleName', () => { + const code = ` +//// from [|/*marker*/os|] import path + `; + + assert(isFromImportModuleName(getNodeAtMarker(code))); +}); + +test('isFromImportName', () => { + const code = ` +//// from . import [|/*marker*/os|] + `; + + assert(isFromImportName(getNodeAtMarker(code))); +}); + +test('isFromImportAlias', () => { + const code = ` +//// from . import os as [|/*marker*/os|] + `; + + assert(isFromImportAlias(getNodeAtMarker(code))); +}); + +test('getFirstAncestorOrSelfOfKind', () => { + const code = ` +//// import a.b.c +//// a.b.c.function( +//// 1 + 2 + 3, +//// [|/*result*/a.b.c.function2( +//// [|/*marker*/"name"|] +//// )|] +//// ) + `; + + const state = parseAndGetTestState(code).state; + const node = getFirstAncestorOrSelfOfKind(getNodeAtMarker(state), ParseNodeType.Call); + assert(node); + + const result = state.getRangeByMarkerName('result')!; + assert(node.nodeType === ParseNodeType.Call); + assert(node.start === result.pos); + assert(TextRange.getEnd(node) === result.end); +}); + +test('getDottedNameWithGivenNodeAsLastName', () => { + const code = ` +//// [|/*result1*/[|/*marker1*/a|]|] +//// [|/*result2*/a.[|/*marker2*/b|]|] +//// [|/*result3*/a.b.[|/*marker3*/c|]|] +//// [|/*result4*/a.[|/*marker4*/b|]|].c +//// [|/*result5*/[|/*marker5*/a|]|].b.c + `; + + const state = parseAndGetTestState(code).state; + + for (let i = 1; i <= 5; i++) { + const markerName = 'marker' + i; + const resultName = 'result' + i; + const node = getDottedNameWithGivenNodeAsLastName(getNodeAtMarker(state, markerName) as NameNode); + const result = state.getRangeByMarkerName(resultName)!; + + assert(node.nodeType === ParseNodeType.Name || node.nodeType === ParseNodeType.MemberAccess); + assert(node.start === result.pos); + assert(TextRange.getEnd(node) === result.end); + } +}); + +test('getDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// [|/*marker2*/a.b|] +//// [|/*marker3*/a.b.c|] +//// [|/*marker4*/a.b|].c +//// [|/*marker5*/a|].b.c + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(getDottedNameString('marker1'), 'a'); + assert.strictEqual(getDottedNameString('marker2'), 'a.b'); + assert.strictEqual(getDottedNameString('marker3'), 'a.b.c'); + assert.strictEqual(getDottedNameString('marker4'), 'a.b'); + assert.strictEqual(getDottedNameString('marker5'), 'a'); + + function getDottedNameString(marker: string) { + const node = getNodeForRange(state, marker); + return getDottedName(node as NameNode | MemberAccessNode) + ?.map((n) => n.d.value) + .join('.'); + } +}); + +test('getFirstNameOfDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// [|/*marker2*/a.b|] +//// [|/*marker3*/a.b.c|] +//// [|/*marker4*/a.b|].c +//// [|/*marker5*/a|].b.c + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(getDottedNameString('marker1'), 'a'); + assert.strictEqual(getDottedNameString('marker2'), 'a'); + assert.strictEqual(getDottedNameString('marker3'), 'a'); + assert.strictEqual(getDottedNameString('marker4'), 'a'); + assert.strictEqual(getDottedNameString('marker5'), 'a'); + + function getDottedNameString(marker: string) { + const node = getNodeForRange(state, marker); + return getFirstNameOfDottedName(node as NameNode | MemberAccessNode)?.d.value ?? ''; + } +}); + +test('isLastNameOfDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// a.[|/*marker2*/b|] +//// a.b.[|/*marker3*/c|] +//// a.[|/*marker4*/b|].c +//// [|/*marker5*/a|].b.c +//// (a).[|/*marker6*/b|] +//// (a.b).[|/*marker7*/c|] +//// a().[|/*marker8*/b|] +//// a[0].[|/*marker9*/b|] +//// a.b([|/*marker10*/c|]).d +//// a.b.([|/*marker11*/c|]) +//// a.[|/*marker12*/b|].c() +//// a.[|/*marker13*/b|]() +//// a.[|/*marker14*/b|][] + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker1') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker2') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker3') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker4') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker5') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker6') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker7') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker8') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker9') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker10') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker11') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker12') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker13') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker14') as NameNode), true); +}); + +test('isFirstNameOfDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// a.[|/*marker2*/b|] +//// a.b.[|/*marker3*/c|] +//// a.[|/*marker4*/b|].c +//// [|/*marker5*/a|].b.c +//// ([|/*marker6*/a|]).b +//// (a.b).[|/*marker7*/c|] +//// [|/*marker8*/a|]().b +//// a[0].[|/*marker9*/b|] +//// a.b([|/*marker10*/c|]).d +//// a.b.([|/*marker11*/c|]) +//// a.[|/*marker12*/b|].c() +//// [|/*marker13*/a|].b() +//// a.[|/*marker14*/b|][] +//// [|/*marker15*/a|][] + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker1') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker2') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker3') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker4') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker5') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker6') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker7') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker8') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker9') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker10') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker11') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker12') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker13') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker14') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker15') as NameNode), true); +}); + +test('getStringNodeValueRange', () => { + const code = ` +//// a = "[|/*marker1*/test|]" +//// b = '[|/*marker2*/test2|]' +//// c = '''[|/*marker3*/test3|]''' + `; + + const state = parseAndGetTestState(code).state; + + for (let i = 1; i <= 3; i++) { + const markerName = 'marker' + i; + const range = getStringNodeValueRange(getNodeAtMarker(state, markerName) as StringNode); + const result = state.getRangeByMarkerName(markerName)!; + + assert(range.start === result.pos); + assert(TextRange.getEnd(range) === result.end); + } +}); + +test('getFullStatementRange', () => { + const code = ` +//// [|/*marker1*/import a +//// |][|/*marker2*/a = 1; |][|/*marker3*/b = 2 +//// |] +//// try: +//// [| /*marker4*/a = 1 +//// |]except Exception: +//// pass +//// [|/*marker5*/if True: +//// pass|] + `; + + const state = parseAndGetTestState(code).state; + + testNodeRange(state, 'marker1', ParseNodeType.Import); + testNodeRange(state, 'marker2', ParseNodeType.Assignment); + testNodeRange(state, 'marker3', ParseNodeType.Assignment); + testNodeRange(state, 'marker4', ParseNodeType.Assignment); + testNodeRange(state, 'marker5', ParseNodeType.If); +}); + +test('getFullStatementRange with trailing blank lines', () => { + const code = ` +//// [|/*marker*/def foo(): +//// return 1 +//// +//// |]def bar(): +//// pass + `; + + const state = parseAndGetTestState(code).state; + + testNodeRange(state, 'marker', ParseNodeType.Function, true); +}); + +test('getFullStatementRange with only trailing blank lines', () => { + const code = ` +//// [|/*marker*/def foo(): +//// return 1 +//// |] +//// + `; + + const state = parseAndGetTestState(code).state; + + testNodeRange(state, 'marker', ParseNodeType.Function, true); +}); + +test('printExpression', () => { + const code = ` +//// [|/*marker1*/not x|] +//// [|/*marker2*/+x|] + `; + const state = parseAndGetTestState(code).state; + checkExpression('marker1', 'not x'); + checkExpression('marker2', '+x'); + + function checkExpression(marker: string, expected: string) { + const node = getNodeAtMarker(state, marker); + assert(isExpressionNode(node)); + assert.strictEqual(printExpression(node), expected); + } +}); + +test('findNodeByOffset', () => { + const code = ` +//// class A: +//// def read(self): pass +//// +//// class B(A): +//// x1 = 1 +//// def r[|/*marker*/|] +//// + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + const sourceFile = state.program.getBoundSourceFile(range.marker!.fileUri)!; + + const node = findNodeByOffset(sourceFile.getParseResults()!.parserOutput.parseTree, range.pos); + assert.strictEqual(node?.nodeType, ParseNodeType.Name); + assert.strictEqual((node as NameNode).d.value, 'r'); +}); + +test('findNodeByOffset with binary search', () => { + const code = ` +//// class A: +//// def read(self): pass +//// +//// class B(A): +//// x1 = 1 +//// x2 = 2 +//// x3 = 3 +//// x4 = 4 +//// x5 = 5 +//// x6 = 6 +//// x7 = 7 +//// x8 = 8 +//// x9 = 9 +//// x10 = 10 +//// x11 = 11 +//// x12 = 12 +//// x13 = 13 +//// x14 = 14 +//// x15 = 15 +//// x16 = 16 +//// x17 = 17 +//// x18 = 18 +//// x19 = 19 +//// def r[|/*marker*/|] +//// + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + const sourceFile = state.program.getBoundSourceFile(range.marker!.fileUri)!; + + const node = findNodeByOffset(sourceFile.getParseResults()!.parserOutput.parseTree, range.pos); + assert.strictEqual(node?.nodeType, ParseNodeType.Name); + assert.strictEqual((node as NameNode).d.value, 'r'); +}); + +test('findNodeByOffset with binary search choose earliest match', () => { + const code = ` +//// class A: +//// def read(self): pass +//// +//// class B(A): +//// x1 = 1 +//// x2 = 2 +//// x3 = 3 +//// x4 = 4 +//// x5 = 5 +//// x6 = 6 +//// x7 = 7 +//// x8 = 8 +//// x9 = 9 +//// x10 = 10 +//// x11 = 11 +//// x12 = 12 +//// x13 = 13 +//// x14 = 14 +//// x15 = 15 +//// x16 = 16 +//// x17 = 17 +//// x18 = 18 +//// x19 = 19 +//// def r[|/*marker*/|] +//// x20 = 20 +//// x21 = 21 +//// + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + const sourceFile = state.program.getBoundSourceFile(range.marker!.fileUri)!; + + const node = findNodeByOffset(sourceFile.getParseResults()!.parserOutput.parseTree, range.pos); + assert.strictEqual(node?.nodeType, ParseNodeType.Name); + assert.strictEqual((node as NameNode).d.value, 'r'); +}); + +function testNodeRange(state: TestState, markerName: string, type: ParseNodeType, includeTrailingBlankLines = false) { + const range = state.getRangeByMarkerName(markerName)!; + const sourceFile = state.program.getBoundSourceFile(range.marker!.fileUri)!; + + const statementNode = getFirstAncestorOrSelfOfKind(getNodeAtMarker(state, markerName), type)!; + const statementRange = getFullStatementRange(statementNode, sourceFile.getParseResults()!, { + includeTrailingBlankLines, + }); + + const expectedRange = state.convertPositionRange(range); + + assert(rangesAreEqual(expectedRange, statementRange)); +} diff --git a/python-parser/packages/pyright-internal/src/tests/parser.test.ts b/python-parser/packages/pyright-internal/src/tests/parser.test.ts new file mode 100644 index 00000000..ce22bd98 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/parser.test.ts @@ -0,0 +1,169 @@ +/* + * parser.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for Python parser. These are very basic because + * the parser gets lots of exercise in the type checker tests. + */ + +import * as assert from 'assert'; + +import { findNodeByOffset, getFirstAncestorOrSelfOfKind } from '../analyzer/parseTreeUtils'; +import { ExecutionEnvironment, getStandardDiagnosticRuleSet } from '../common/configOptions'; +import { DiagnosticSink } from '../common/diagnosticSink'; +import { pythonVersion3_13, pythonVersion3_14 } from '../common/pythonVersion'; +import { TextRange } from '../common/textRange'; +import { UriEx } from '../common/uri/uriUtils'; +import { ParseNodeType, StatementListNode } from '../parser/parseNodes'; +import { getNodeAtMarker, parseAndGetTestState } from './harness/fourslash/testState'; +import * as TestUtils from './testUtils'; + +test('Empty', () => { + const diagSink = new DiagnosticSink(); + const parserOutput = TestUtils.parseText('', diagSink).parserOutput; + + assert.equal(diagSink.fetchAndClear().length, 0); + assert.equal(parserOutput.parseTree.d.statements.length, 0); +}); + +test('Parser1', () => { + const diagSink = new DiagnosticSink(); + const parserOutput = TestUtils.parseSampleFile('parser1.py', diagSink).parserOutput; + + assert.equal(diagSink.fetchAndClear().length, 0); + assert.equal(parserOutput.parseTree.d.statements.length, 4); +}); + +test('Parser2', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('parser2.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 0); +}); + +test('FStringEmptyTuple', () => { + assert.doesNotThrow(() => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('fstring6.py', diagSink); + }); +}); + +test('SuiteExpectedColon1', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('suiteExpectedColon1.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 1); +}); + +test('SuiteExpectedColon2', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('suiteExpectedColon2.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 1); +}); + +test('SuiteExpectedColon3', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('suiteExpectedColon3.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 1); +}); + +test('ExpressionWrappedInParens', () => { + const diagSink = new DiagnosticSink(); + const parserOutput = TestUtils.parseText('(str)', diagSink).parserOutput; + + assert.equal(diagSink.fetchAndClear().length, 0); + assert.equal(parserOutput.parseTree.d.statements.length, 1); + assert.equal(parserOutput.parseTree.d.statements[0].nodeType, ParseNodeType.StatementList); + + const statementList = parserOutput.parseTree.d.statements[0] as StatementListNode; + assert.equal(statementList.d.statements.length, 1); + + // length of node should include parens + assert.equal(statementList.d.statements[0].nodeType, ParseNodeType.Name); + assert.equal(statementList.d.statements[0].length, 5); +}); + +test('MaxParseDepth1', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('maxParseDepth1.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 1); +}); + +test('MaxParseDepth2', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('maxParseDepth2.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 4); +}); + +test('ModuleName range', () => { + const code = ` +//// from [|/*marker*/...|] import A + `; + + const state = parseAndGetTestState(code).state; + const expectedRange = state.getRangeByMarkerName('marker'); + const node = getNodeAtMarker(state); + + assert.strictEqual(node.start, expectedRange?.pos); + assert.strictEqual(TextRange.getEnd(node), expectedRange?.end); +}); + +test('ParserRecovery1', () => { + const diagSink = new DiagnosticSink(); + const parseResults = TestUtils.parseSampleFile('parserRecovery1.py', diagSink); + + const node = findNodeByOffset(parseResults.parserOutput.parseTree, parseResults.text.length - 2); + const functionNode = getFirstAncestorOrSelfOfKind(node, ParseNodeType.Function); + assert.equal(functionNode!.parent!.nodeType, ParseNodeType.Module); +}); + +test('ParserRecovery2', () => { + const diagSink = new DiagnosticSink(); + const parseResults = TestUtils.parseSampleFile('parserRecovery2.py', diagSink); + + const node = findNodeByOffset(parseResults.parserOutput.parseTree, parseResults.text.length - 2); + const functionNode = getFirstAncestorOrSelfOfKind(node, ParseNodeType.Function); + assert.equal(functionNode!.parent!.nodeType, ParseNodeType.Suite); +}); + +test('ParserRecovery3', () => { + const diagSink = new DiagnosticSink(); + const parseResults = TestUtils.parseSampleFile('parserRecovery3.py', diagSink); + + const node = findNodeByOffset(parseResults.parserOutput.parseTree, parseResults.text.length - 2); + const functionNode = getFirstAncestorOrSelfOfKind(node, ParseNodeType.Function); + assert.equal(functionNode!.parent!.nodeType, ParseNodeType.Module); +}); + +test('FinallyExit1', () => { + const execEnvironment = new ExecutionEnvironment( + 'python', + UriEx.file('.'), + getStandardDiagnosticRuleSet(), + /* defaultPythonVersion */ undefined, + /* defaultPythonPlatform */ undefined, + /* defaultExtraPaths */ undefined + ); + + const diagSink1 = new DiagnosticSink(); + execEnvironment.pythonVersion = pythonVersion3_13; + TestUtils.parseSampleFile('finallyExit1.py', diagSink1, execEnvironment); + assert.strictEqual(diagSink1.getErrors().length, 0); + + const diagSink2 = new DiagnosticSink(); + execEnvironment.pythonVersion = pythonVersion3_14; + TestUtils.parseSampleFile('finallyExit1.py', diagSink2, execEnvironment); + assert.strictEqual(diagSink2.getErrors().length, 5); +}); + +test('TrailingBackslashCRAtEOF', () => { + // A file that ends with a line-continuation backslash followed by a CR + // should produce a syntax error. + const code = '"""Comment"""\n\n\\\r'; + + const diagSink = new DiagnosticSink(); + TestUtils.parseText(code, diagSink); + const errors = diagSink.getErrors(); + assert.strictEqual(errors.length > 0, true); + assert.ok(errors.some((e) => e.message.includes('Unexpected EOF'))); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/pathUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/pathUtils.test.ts new file mode 100644 index 00000000..df18aa62 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/pathUtils.test.ts @@ -0,0 +1,361 @@ +/* + * pathUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pathUtils module. + */ + +import assert from 'assert'; +import * as path from 'path'; + +import { + combinePathComponents, + combinePaths, + containsPath, + ensureTrailingDirectorySeparator, + getAnyExtensionFromPath, + getBaseFileName, + getFileExtension, + getFileName, + getPathComponents, + getRelativePath, + getRootLength, + getWildcardRegexPattern, + getWildcardRoot, + hasTrailingDirectorySeparator, + isDirectoryWildcardPatternPresent, + isRootedDiskPath, + normalizeSlashes, + reducePathComponents, + resolvePaths, + stripFileExtension, + stripTrailingDirectorySeparator, +} from '../common/pathUtils'; + +test('getPathComponents1', () => { + const components = getPathComponents(''); + assert.equal(components.length, 1); + assert.equal(components[0], ''); +}); + +test('getPathComponents2', () => { + const components = getPathComponents('/users/'); + assert.equal(components.length, 2); + assert.equal(components[0], path.sep); + assert.equal(components[1], 'users'); +}); + +test('getPathComponents3', () => { + const components = getPathComponents('/users/hello.py'); + assert.equal(components.length, 3); + assert.equal(components[0], path.sep); + assert.equal(components[1], 'users'); + assert.equal(components[2], 'hello.py'); +}); + +test('getPathComponents4', () => { + const components = getPathComponents('/users/hello/../'); + assert.equal(components.length, 2); + assert.equal(components[0], path.sep); + assert.equal(components[1], 'users'); +}); + +test('getPathComponents5', () => { + const components = getPathComponents('./hello.py'); + assert.equal(components.length, 2); + assert.equal(components[0], ''); + assert.equal(components[1], 'hello.py'); +}); + +test('getPathComponents6', () => { + const components = getPathComponents(fixSeparators('//server/share/dir/file.py')); + assert.equal(components.length, 4); + assert.equal(components[0], fixSeparators('//server/')); + assert.equal(components[1], 'share'); + assert.equal(components[2], 'dir'); + assert.equal(components[3], 'file.py'); +}); + +test('getPathComponents7', () => { + const components = getPathComponents('ab:cdef/test'); + assert.equal(components.length, 3); + assert.equal(components[0], ''); + assert.equal(components[1], 'ab:cdef'); + assert.equal(components[2], 'test'); +}); + +test('combinePaths1', () => { + const p = combinePaths('/user', '1', '2', '3'); + assert.equal(p, normalizeSlashes('/user/1/2/3')); +}); + +test('combinePaths2', () => { + const p = combinePaths('/foo', 'ab:c'); + assert.equal(p, normalizeSlashes('/foo/ab:c')); +}); + +test('combinePaths3', () => { + const p = combinePaths('untitled:foo', 'ab:c'); + assert.equal(p, normalizeSlashes('untitled:foo/ab:c')); +}); + +test('ensureTrailingDirectorySeparator1', () => { + const p = ensureTrailingDirectorySeparator('hello'); + assert.equal(p, normalizeSlashes('hello/')); +}); + +test('hasTrailingDirectorySeparator1', () => { + assert(!hasTrailingDirectorySeparator('hello')); + assert(hasTrailingDirectorySeparator('hello/')); + assert(hasTrailingDirectorySeparator('hello\\')); +}); + +test('stripTrailingDirectorySeparator1', () => { + const path = stripTrailingDirectorySeparator('hello/'); + assert.equal(path, 'hello'); +}); + +test('getFileExtension1', () => { + const ext = getFileExtension('blah.blah/hello.JsOn'); + assert.equal(ext, '.JsOn'); +}); + +test('getFileExtension2', () => { + const ext1 = getFileExtension('blah.blah/hello.cpython-32m.so', true); + assert.equal(ext1, '.cpython-32m.so'); + const ext2 = getFileExtension('blah.blah/hello.cpython-32m.so', false); + assert.equal(ext2, '.so'); +}); + +test('getFileName1', () => { + const fileName = getFileName('blah.blah/HeLLo.JsOn'); + assert.equal(fileName, 'HeLLo.JsOn'); +}); + +test('getFileName2', () => { + const fileName1 = getFileName('blah.blah/hello.cpython-32m.so'); + assert.equal(fileName1, 'hello.cpython-32m.so'); +}); + +test('stripFileExtension1', () => { + const path = stripFileExtension('blah.blah/HeLLo.JsOn'); + assert.equal(path, 'blah.blah/HeLLo'); +}); + +test('stripFileExtension2', () => { + const path1 = stripFileExtension('blah.blah/hello.cpython-32m.so', true); + assert.equal(path1, 'blah.blah/hello'); + const path2 = stripFileExtension('blah.blah/hello.cpython-32m.so', false); + assert.equal(path2, 'blah.blah/hello.cpython-32m'); +}); + +function fixSeparators(linuxPath: string) { + if (path.sep === '\\') { + return linuxPath.replace(/\//g, path.sep); + } + return linuxPath; +} + +test('getWildcardRegexPattern1', () => { + const pattern = getWildcardRegexPattern('/users/me', './blah/'); + const regex = new RegExp(pattern); + assert.ok(regex.test(fixSeparators('/users/me/blah/d'))); + assert.ok(!regex.test(fixSeparators('/users/me/blad/d'))); +}); + +test('getWildcardRegexPattern2', () => { + const pattern = getWildcardRegexPattern('/users/me', './**/*.py?'); + const regex = new RegExp(pattern); + assert.ok(regex.test(fixSeparators('/users/me/.blah/foo.pyd'))); + assert.ok(!regex.test(fixSeparators('/users/me/.blah/foo.py'))); // No char after +}); + +test('getWildcardRegexPattern3', () => { + const pattern = getWildcardRegexPattern('/users/me', './**/.*.py'); + const regex = new RegExp(pattern); + assert.ok(regex.test(fixSeparators('/users/me/.blah/.foo.py'))); + assert.ok(!regex.test(fixSeparators('/users/me/.blah/foo.py'))); +}); + +test('getWildcardRegexPattern4', () => { + const pattern = getWildcardRegexPattern('//server/share/dir', '.'); + const regex = new RegExp(pattern); + assert.ok(regex.test(fixSeparators('//server/share/dir/foo.py'))); + assert.ok(!regex.test(fixSeparators('//server/share/dix/foo.py'))); +}); + +test('getWildcardRegexPattern5', () => { + const pattern = getWildcardRegexPattern('//server/share/dir++', '.'); + const regex = new RegExp(pattern); + assert.ok(regex.test(fixSeparators('//server/share/dir++/foo.py'))); + assert.ok(!regex.test(fixSeparators('//server/share/dix++/foo.py'))); +}); + +test('isDirectoryWildcardPatternPresent1', () => { + const isPresent = isDirectoryWildcardPatternPresent('./**/*.py'); + assert.equal(isPresent, true); +}); + +test('isDirectoryWildcardPatternPresent2', () => { + const isPresent = isDirectoryWildcardPatternPresent('./**/a/*.py'); + assert.equal(isPresent, true); +}); + +test('isDirectoryWildcardPatternPresent3', () => { + const isPresent = isDirectoryWildcardPatternPresent('./**/@tests'); + assert.equal(isPresent, true); +}); + +test('isDirectoryWildcardPatternPresent4', () => { + const isPresent = isDirectoryWildcardPatternPresent('./**/test/test*'); + assert.equal(isPresent, true); +}); + +test('getWildcardRoot1', () => { + const p = getWildcardRoot('/users/me', './blah/'); + assert.equal(p, normalizeSlashes('/users/me/blah')); +}); + +test('getWildcardRoot2', () => { + const p = getWildcardRoot('/users/me', './**/*.py?/'); + assert.equal(p, normalizeSlashes('/users/me')); +}); + +test('getWildcardRoot with root', () => { + const p = getWildcardRoot('/', '.'); + assert.equal(p, normalizeSlashes('/')); +}); + +test('getWildcardRoot with drive letter', () => { + const p = getWildcardRoot('c:/', '.'); + assert.equal(p, normalizeSlashes('c:')); +}); + +test('reducePathComponentsEmpty', () => { + assert.equal(reducePathComponents([]).length, 0); +}); + +test('reducePathComponents', () => { + assert.deepEqual(reducePathComponents(getPathComponents('/a/b/../c/.')), [path.sep, 'a', 'c']); +}); + +test('combinePathComponentsEmpty', () => { + assert.equal(combinePathComponents([]), ''); +}); + +test('combinePathComponentsAbsolute', () => { + assert.equal(combinePathComponents(['/', 'a', 'b']), normalizeSlashes('/a/b')); +}); + +test('combinePathComponents', () => { + assert.equal(combinePathComponents(['a', 'b']), normalizeSlashes('a/b')); +}); + +test('resolvePath1', () => { + assert.equal(resolvePaths('/path', 'to', 'file.ext'), normalizeSlashes('/path/to/file.ext')); +}); + +test('resolvePath2', () => { + assert.equal(resolvePaths('/path', 'to', '..', 'from', 'file.ext/'), normalizeSlashes('/path/from/file.ext/')); +}); + +test('containsPath1', () => { + assert.equal(containsPath('/a/b/c/', '/a/d/../b/c/./d'), true); +}); + +test('containsPath2', () => { + assert.equal(containsPath('/', '\\a'), true); +}); + +test('containsPath3', () => { + assert.equal(containsPath('/a', '/A/B', true), true); +}); + +test('changeAnyExtension2', () => { + assert.equal(getAnyExtensionFromPath('/path/to/file.ext', '.ts', true), ''); +}); + +test('changeAnyExtension3', () => { + assert.equal(getAnyExtensionFromPath('/path/to/file.ext', ['.ext', '.ts'], true), '.ext'); +}); + +test('getBaseFileName1', () => { + assert.equal(getBaseFileName('/path/to/file.ext'), 'file.ext'); +}); + +test('getBaseFileName2', () => { + assert.equal(getBaseFileName('/path/to/'), 'to'); +}); + +test('getBaseFileName3', () => { + assert.equal(getBaseFileName('c:/'), ''); +}); + +test('getBaseFileName4', () => { + assert.equal(getBaseFileName('/path/to/file.ext', ['.ext'], true), 'file'); +}); + +test('getRootLength1', () => { + assert.equal(getRootLength('a'), 0); +}); + +test('getRootLength2', () => { + assert.equal(getRootLength(fixSeparators('/')), 1); +}); + +test('getRootLength3', () => { + assert.equal(getRootLength('c:'), 2); +}); + +test('getRootLength4', () => { + assert.equal(getRootLength('c:d'), 0); +}); + +test('getRootLength5', () => { + assert.equal(getRootLength(fixSeparators('c:/')), 3); +}); + +test('getRootLength6', () => { + assert.equal(getRootLength(fixSeparators('//server')), 8); +}); + +test('getRootLength7', () => { + assert.equal(getRootLength(fixSeparators('//server/share')), 9); +}); + +test('isRootedDiskPath1', () => { + assert(isRootedDiskPath(normalizeSlashes('C:/a/b'))); +}); + +test('isRootedDiskPath2', () => { + assert(isRootedDiskPath(normalizeSlashes('/'))); +}); + +test('isRootedDiskPath3', () => { + assert(!isRootedDiskPath(normalizeSlashes('a/b'))); +}); + +test('isDiskPathRoot1', () => { + assert(isRootedDiskPath(normalizeSlashes('/'))); +}); + +test('isDiskPathRoot2', () => { + assert(isRootedDiskPath(normalizeSlashes('c:/'))); +}); + +test('isDiskPathRoot3', () => { + assert(isRootedDiskPath(normalizeSlashes('c:'))); +}); + +test('isDiskPathRoot4', () => { + assert(!isRootedDiskPath(normalizeSlashes('c:d'))); +}); + +test('getRelativePath', () => { + assert.equal( + getRelativePath(normalizeSlashes('/a/b/c/d/e/f'), normalizeSlashes('/a/b/c')), + normalizeSlashes('./d/e/f') + ); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/positionUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/positionUtils.test.ts new file mode 100644 index 00000000..c8b0eeb1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/positionUtils.test.ts @@ -0,0 +1,72 @@ +/* + * positionUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for positionUtils module. + */ + +import assert from 'assert'; + +import { DiagnosticSink } from '../common/diagnosticSink'; +import { convertOffsetToPosition, convertPositionToOffset, getLineEndPosition } from '../common/positionUtils'; +import { ParseOptions, Parser } from '../parser/parser'; +import { Tokenizer } from '../parser/tokenizer'; + +test('getLineEndOffset', () => { + const code = 'a = 1'; + + verifyLineEnding(code, 0, 5); +}); + +test('getLineEndOffset with windows style ending at EOF', () => { + const code = 'a = 1\r\n'; + + verifyLineEnding(code, 0, 5); +}); + +test('getLineEndOffset with windows style ending', () => { + const code = 'a = 1\r\nb = 1'; + + verifyLineEnding(code, 0, 5); +}); + +test('getLineEndOffset with unix style ending at EOF', () => { + const code = 'a = 1\n'; + + verifyLineEnding(code, 0, 5); +}); + +test('getLineEndOffset with unix style ending', () => { + const code = 'a = 1\nb = 1'; + + verifyLineEnding(code, 0, 5); +}); + +test('getLineEndOffset with mixed style ending', () => { + const code = 'a = 1\r\nb = 1\nc = 1\n'; + + verifyLineEnding(code, 0, 5); + verifyLineEnding(code, 1, 5); + verifyLineEnding(code, 2, 5); +}); + +test('End of file position and offset conversion', () => { + const code = 'hello\n'; + + const t = new Tokenizer(); + const results = t.tokenize(code); + + const position = convertOffsetToPosition(code.length, results.lines); + assert.strictEqual(position.line, 1); + + const offset = convertPositionToOffset(position, results.lines); + assert.strictEqual(offset, code.length); +}); + +function verifyLineEnding(code: string, line: number, expected: number) { + const parser = new Parser(); + const parseResults = parser.parseSourceFile(code, new ParseOptions(), new DiagnosticSink()); + + assert.strictEqual(getLineEndPosition(parseResults.tokenizerOutput, parseResults.text, line).character, expected); +} diff --git a/python-parser/packages/pyright-internal/src/tests/privateImportUsage.test.ts b/python-parser/packages/pyright-internal/src/tests/privateImportUsage.test.ts new file mode 100644 index 00000000..e966e50f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/privateImportUsage.test.ts @@ -0,0 +1,342 @@ +/* + * privateImportUsage.test.ts + * + * Tests for reportPrivateImportUsage when source packages are added to check paths. + * + * Bug: When a py.typed package is added to check paths (e.g., via command line), + * the reportPrivateImportUsage errors incorrectly disappear because the file is + * created with isInPyTypedPackage=false instead of detecting it properly. + */ + +import assert from 'assert'; + +import { ImportResolver } from '../analyzer/importResolver'; +import { Program } from '../analyzer/program'; +import { ConfigOptions } from '../common/configOptions'; +import { DiagnosticCategory } from '../common/diagnostic'; +import { lib, sitePackages } from '../common/pathConsts'; +import { combinePaths, getDirectoryPath, normalizeSlashes } from '../common/pathUtils'; +import { createServiceProvider } from '../common/serviceProviderExtensions'; +import { UriEx } from '../common/uri/uriUtils'; +import { TestAccessHost } from './harness/testAccessHost'; +import { TestFileSystem } from './harness/vfs/filesystem'; +import { PyrightFileSystem } from '../pyrightFileSystem'; + +const libraryRoot = combinePaths(normalizeSlashes('/'), lib, sitePackages); + +function createTestFileSystem(files: { path: string; content: string }[]): TestFileSystem { + const fs = new TestFileSystem(/* ignoreCase */ false, { cwd: normalizeSlashes('/') }); + + for (const file of files) { + const path = normalizeSlashes(file.path); + const dir = getDirectoryPath(path); + fs.mkdirpSync(dir); + + fs.writeFileSync(UriEx.file(path), file.content); + } + + return fs; +} + +function createServiceProviderFromFiles(files: { path: string; content: string }[]) { + const testFS = createTestFileSystem(files); + const fs = new PyrightFileSystem(testFS); + return createServiceProvider(testFS, fs); +} + +describe('reportPrivateImportUsage with tracked library files', () => { + test('error should persist when library with py.typed is also a tracked file', () => { + // Setup: Create three packages + // pkg_a: defines helper_func + // pkg_b: imports helper_func from pkg_a but doesn't re-export it (has py.typed) + // pkg_c: imports helper_func from pkg_b (should get error) + + const files = [ + // pkg_a in library (defines the original function) + { + path: combinePaths(libraryRoot, 'pkg_a', '__init__.py'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_a', 'py.typed'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_a', 'utils.py'), + content: 'def helper_func(): pass', + }, + // pkg_b in library (re-imports without re-exporting) + { + path: combinePaths(libraryRoot, 'pkg_b', '__init__.py'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_b', 'py.typed'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_b', 'reexport.py'), + content: 'from pkg_a.utils import helper_func', // No __all__, not re-exported + }, + // pkg_c - local source file that imports from pkg_b + { + path: normalizeSlashes('/src/pkg_c/__init__.py'), + content: '', + }, + { + path: normalizeSlashes('/src/pkg_c/bad_import.py'), + content: 'from pkg_b.reexport import helper_func', // Should error + }, + ]; + + const sp = createServiceProviderFromFiles(files); + const configOptions = new ConfigOptions(UriEx.file('/')); + + // Set up execution environment with reportPrivateImportUsage enabled + configOptions.diagnosticRuleSet.reportPrivateImportUsage = 'error'; + + const importResolver = new ImportResolver( + sp, + configOptions, + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]) + ); + + const program = new Program(importResolver, configOptions, sp); + + // Track only the consumer file (pkg_c) + const consumerUri = UriEx.file('/src/pkg_c/bad_import.py'); + program.setTrackedFiles([consumerUri]); + + // Analyze + while (program.analyze()) { + // Continue until complete + } + + // Get diagnostics for the consumer file + const sourceFile = program.getSourceFile(consumerUri); + assert(sourceFile, 'Source file should exist'); + const diagnostics = sourceFile.getDiagnostics(configOptions) || []; + const errors = diagnostics.filter((d) => d.category === DiagnosticCategory.Error); + + // Should have 1 error about private import + assert.strictEqual( + errors.length, + 1, + `Expected 1 error when only consumer is tracked, got ${errors.length}: ${errors + .map((e) => e.message) + .join(', ')}` + ); + assert( + errors[0].message.includes('not exported') || errors[0].message.includes('helper_func'), + `Error message should mention private import: ${errors[0].message}` + ); + + program.dispose(); + + // Now create a new program and track BOTH the library file and the consumer + const program2 = new Program(importResolver, configOptions, sp); + + const libraryFileUri = UriEx.file(combinePaths(libraryRoot, 'pkg_b', 'reexport.py')); + + // Track both files - this is the bug scenario + // When the library file is tracked, it should still detect py.typed + program2.setTrackedFiles([consumerUri, libraryFileUri]); + + // Analyze + while (program2.analyze()) { + // Continue until complete + } + + // Get diagnostics for the consumer file again + const sourceFile2 = program2.getSourceFile(consumerUri); + assert(sourceFile2, 'Source file should exist in second program'); + const diagnostics2 = sourceFile2.getDiagnostics(configOptions) || []; + const errors2 = diagnostics2.filter((d) => d.category === DiagnosticCategory.Error); + + // BUG: Without the fix, this would be 0 errors instead of 1 + // The error disappears because pkg_b/reexport.py is created with isInPyTypedPackage=false + assert.strictEqual( + errors2.length, + 1, + `Expected 1 error when library is also tracked, got ${errors2.length}. ` + + `Errors: ${errors2.map((e) => e.message).join(', ')}. ` + + `This is the bug - error should persist even when library file is tracked.` + ); + + program2.dispose(); + sp.dispose(); + }); + + test('config file reportPrivateImportUsage override should apply when includeFileSpecsOverride is set', () => { + // This tests the scenario where pyright is run with positional directory arguments + // (e.g., pyright --project pyrightconfig.json dir1) and the config file has + // reportPrivateImportUsage: false. The override should still be respected. + + const files = [ + // pkg_a in library (defines the original function) + { + path: combinePaths(libraryRoot, 'pkg_a', '__init__.py'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_a', 'py.typed'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_a', 'utils.py'), + content: 'def helper_func(): pass', + }, + // pkg_b in library (re-imports without re-exporting) + { + path: combinePaths(libraryRoot, 'pkg_b', '__init__.py'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_b', 'py.typed'), + content: '', + }, + { + path: combinePaths(libraryRoot, 'pkg_b', 'reexport.py'), + content: 'from pkg_a.utils import helper_func', // No __all__, not re-exported + }, + // Consumer package - local source file that imports from pkg_b + { + path: normalizeSlashes('/src/consumer/__init__.py'), + content: '', + }, + { + path: normalizeSlashes('/src/consumer/bad_import.py'), + content: 'from pkg_b.reexport import helper_func', // Would normally error + }, + ]; + + const sp = createServiceProviderFromFiles(files); + + // Test 1: With reportPrivateImportUsage = 'error' (default) -> should get error + { + const configOptions = new ConfigOptions(UriEx.file('/')); + configOptions.diagnosticRuleSet.reportPrivateImportUsage = 'error'; + + const importResolver = new ImportResolver( + sp, + configOptions, + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]) + ); + + const program = new Program(importResolver, configOptions, sp); + const consumerUri = UriEx.file('/src/consumer/bad_import.py'); + program.setTrackedFiles([consumerUri]); + + while (program.analyze()) { + // Continue until complete + } + + const sourceFile = program.getSourceFile(consumerUri); + assert(sourceFile, 'Source file should exist'); + const diagnostics = sourceFile.getDiagnostics(configOptions) || []; + const errors = diagnostics.filter((d) => d.category === DiagnosticCategory.Error); + + assert.strictEqual( + errors.length, + 1, + `Expected 1 error with reportPrivateImportUsage=error, got ${errors.length}` + ); + + program.dispose(); + } + + // Test 2: With reportPrivateImportUsage = 'none' (simulating config override) -> should NOT get error + { + const configOptions = new ConfigOptions(UriEx.file('/')); + configOptions.diagnosticRuleSet.reportPrivateImportUsage = 'none'; + + const importResolver = new ImportResolver( + sp, + configOptions, + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]) + ); + + const program = new Program(importResolver, configOptions, sp); + const consumerUri = UriEx.file('/src/consumer/bad_import.py'); + program.setTrackedFiles([consumerUri]); + + while (program.analyze()) { + // Continue until complete + } + + const sourceFile = program.getSourceFile(consumerUri); + assert(sourceFile, 'Source file should exist'); + const diagnostics = sourceFile.getDiagnostics(configOptions) || []; + const privateImportErrors = diagnostics.filter( + (d) => + d.category === DiagnosticCategory.Error && + (d.message.includes('not exported') || d.message.includes('helper_func')) + ); + + assert.strictEqual( + privateImportErrors.length, + 0, + `Expected 0 private import errors with reportPrivateImportUsage=none, got ${ + privateImportErrors.length + }: ${privateImportErrors.map((e) => e.message).join(', ')}` + ); + + program.dispose(); + } + + // Test 3: With reportPrivateImportUsage = 'none' AND positional arg include override + // This simulates: pyright --project config.json consumer/ + // where config.json has reportPrivateImportUsage: false + { + const configOptions = new ConfigOptions(UriEx.file('/')); + // Simulate the config file setting: first set standard mode defaults + configOptions.diagnosticRuleSet = ConfigOptions.getDiagnosticRuleSet('standard'); + // Then apply the config override (reportPrivateImportUsage: false -> 'none') + configOptions.diagnosticRuleSet.reportPrivateImportUsage = 'none'; + + // Simulate includeFileSpecsOverride by changing the include + // (In the real flow, this is done by _applyCommandLineOverrides) + configOptions.include = []; + + const importResolver = new ImportResolver( + sp, + configOptions, + new TestAccessHost(sp.fs().getModulePath(), [UriEx.file(libraryRoot)]) + ); + + const program = new Program(importResolver, configOptions, sp); + const consumerUri = UriEx.file('/src/consumer/bad_import.py'); + + // Track only the consumer file (simulating positional arg) + program.setTrackedFiles([consumerUri]); + + while (program.analyze()) { + // Continue until complete + } + + const sourceFile = program.getSourceFile(consumerUri); + assert(sourceFile, 'Source file should exist'); + const diagnostics = sourceFile.getDiagnostics(configOptions) || []; + const privateImportErrors = diagnostics.filter( + (d) => + d.category === DiagnosticCategory.Error && + (d.message.includes('not exported') || d.message.includes('helper_func')) + ); + + // The config says reportPrivateImportUsage: false, so there should be no errors + // even when include is overridden by positional args. + assert.strictEqual( + privateImportErrors.length, + 0, + `Expected 0 private import errors with config override and include override, got ${ + privateImportErrors.length + }: ${privateImportErrors.map((e) => e.message).join(', ')}. ` + + `This would indicate the bug where positional args cause config overrides to be ignored.` + ); + + program.dispose(); + } + + sp.dispose(); + }); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/pyrightFileSystem.test.ts b/python-parser/packages/pyright-internal/src/tests/pyrightFileSystem.test.ts new file mode 100644 index 00000000..f0eabf5a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/pyrightFileSystem.test.ts @@ -0,0 +1,257 @@ +/* + * pyrightFileSystem.test.ts + * + * pyrightFileSystem tests. + */ + +import assert from 'assert'; + +import { lib, sitePackages } from '../common/pathConsts'; +import { combinePaths, getDirectoryPath, normalizeSlashes } from '../common/pathUtils'; +import { PyrightFileSystem } from '../pyrightFileSystem'; +import { TestFileSystem } from './harness/vfs/filesystem'; +import { Uri } from '../common/uri/uri'; +import { UriEx } from '../common/uri/uriUtils'; +import { PartialStubService } from '../partialStubService'; + +const libraryRoot = combinePaths(normalizeSlashes('/'), lib, sitePackages); +const libraryRootUri = UriEx.file(libraryRoot); + +test('virtual file exists', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'subdir', '__init__.pyi'), + content: 'def subdir(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + ]; + + const fs = createFileSystem(files); + const ps = new PartialStubService(fs); + ps.processPartialStubPackages([libraryRootUri], [libraryRootUri]); + + const stubFile = libraryRootUri.combinePaths('myLib', 'partialStub.pyi'); + assert(fs.existsSync(stubFile)); + assert(fs.isMappedUri(stubFile)); + + const myLib = libraryRootUri.combinePaths('myLib'); + const entries = fs.readdirEntriesSync(myLib); + assert.strictEqual(3, entries.length); + + const subDirFile = libraryRootUri.combinePaths('myLib', 'subdir', '__init__.pyi'); + assert(fs.existsSync(subDirFile)); + assert(fs.isMappedUri(subDirFile)); + + const fakeFile = entries.filter((e) => e.name.endsWith('.pyi'))[0]; + assert(fakeFile.isFile()); + + assert(!fs.existsSync(libraryRootUri.combinePaths('myLib-stubs'))); +}); + +test('virtual file coexists with real', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'subdir', '__init__.pyi'), + content: 'def subdir(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'subdir', '__init__.py'), + content: 'def test(): pass', + }, + ]; + + const fs = createFileSystem(files); + const ps = new PartialStubService(fs); + ps.processPartialStubPackages([libraryRootUri], [libraryRootUri]); + + const stubFile = libraryRootUri.combinePaths('myLib', 'partialStub.pyi'); + assert(fs.existsSync(stubFile)); + assert(fs.isMappedUri(stubFile)); + + const myLib = libraryRootUri.combinePaths('myLib'); + const entries = fs.readdirEntriesSync(myLib); + assert.strictEqual(3, entries.length); + + const subDirFile = libraryRootUri.combinePaths('myLib', 'subdir', '__init__.pyi'); + assert(fs.existsSync(subDirFile)); + assert(fs.isMappedUri(subDirFile)); + + const subDirPyiFile = libraryRootUri.combinePaths('myLib', 'subdir', '__init__.pyi'); + assert(fs.existsSync(subDirPyiFile)); + + const fakeFile = entries.filter((e) => e.name.endsWith('.pyi'))[0]; + assert(fakeFile.isFile()); + + assert(!fs.existsSync(libraryRootUri.combinePaths('myLib-stubs'))); +}); + +test('virtual file not exist', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'otherType.py'), + content: 'def test(): pass', + }, + ]; + + const fs = createFileSystem(files); + const ps = new PartialStubService(fs); + ps.processPartialStubPackages([libraryRootUri], [libraryRootUri]); + + assert(!fs.existsSync(libraryRootUri.combinePaths('myLib', 'partialStub.pyi'))); + + const myLib = libraryRootUri.combinePaths('myLib'); + const entries = fs.readdirEntriesSync(myLib); + assert.strictEqual(1, entries.length); + + assert.strictEqual(0, entries.filter((e) => e.name.endsWith('.pyi')).length); + + assert(fs.existsSync(libraryRootUri.combinePaths('myLib-stubs'))); +}); + +test('existing stub file', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.pyi'), + content: 'def test(): pass', + }, + ]; + + const fs = createFileSystem(files); + const ps = new PartialStubService(fs); + ps.processPartialStubPackages([libraryRootUri], [libraryRootUri]); + + const stubFile = libraryRootUri.combinePaths('myLib', 'partialStub.pyi'); + assert(fs.existsSync(stubFile)); + + const myLib = libraryRootUri.combinePaths('myLib'); + const entries = fs.readdirEntriesSync(myLib); + assert.strictEqual(2, entries.length); + + assert.strictEqual('def test(): ...', fs.readFileSync(stubFile, 'utf8')); + + assert(!fs.existsSync(libraryRootUri.combinePaths('myLib-stubs'))); +}); + +test('multiple package installed', () => { + const extraRoot = combinePaths(normalizeSlashes('/'), lib, 'extra'); + const extraRootUri = UriEx.file(extraRoot); + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + { + path: combinePaths(extraRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + ]; + + const fs = createFileSystem(files); + const ps = new PartialStubService(fs); + ps.processPartialStubPackages([libraryRootUri, extraRootUri], [libraryRootUri, extraRootUri]); + + assert(ps.isPathScanned(libraryRootUri)); + assert(ps.isPathScanned(extraRootUri)); + + assert(fs.existsSync(libraryRootUri.combinePaths('myLib', 'partialStub.pyi'))); + assert(fs.existsSync(extraRootUri.combinePaths('myLib', 'partialStub.pyi'))); + + assert.strictEqual(2, fs.readdirEntriesSync(libraryRootUri.combinePaths('myLib')).length); + assert.strictEqual(2, fs.readdirEntriesSync(extraRootUri.combinePaths('myLib')).length); +}); + +test('bundled partial stubs', () => { + const bundledPath = combinePaths(normalizeSlashes('/'), 'bundled'); + const bundledPathUri = UriEx.file(bundledPath); + + const files = [ + { + path: combinePaths(bundledPath, 'myLib-stubs', 'partialStub.pyi'), + content: 'def test(): ...', + }, + { + path: combinePaths(bundledPath, 'myLib-stubs', 'py.typed'), + content: 'partial\n', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'py.typed'), + content: '', + }, + ]; + + const fs = createFileSystem(files); + const ps = new PartialStubService(fs); + ps.processPartialStubPackages([bundledPathUri], [libraryRootUri], bundledPathUri); + + const stubFile = libraryRootUri.combinePaths('myLib', 'partialStub.pyi'); + assert(!fs.existsSync(stubFile)); + + const myLib = libraryRootUri.combinePaths('myLib'); + const entries = fs.readdirEntriesSync(myLib); + assert.strictEqual(2, entries.length); +}); + +function createFileSystem(files: { path: string; content: string }[]): PyrightFileSystem { + const fs = new TestFileSystem(/* ignoreCase */ false, { cwd: normalizeSlashes('/') }); + + for (const file of files) { + const path = normalizeSlashes(file.path); + const dir = getDirectoryPath(path); + fs.mkdirpSync(dir); + + fs.writeFileSync(Uri.file(path, fs), file.content); + } + + return new PyrightFileSystem(fs); +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass1.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass1.py new file mode 100644 index 00000000..5a4f916d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass1.py @@ -0,0 +1,55 @@ +# This sample tests the type analyzer's ability to flag attempts +# to instantiate abstract base classes. + +from abc import ABC, abstractmethod + + +class AbstractClassA(ABC): + @abstractmethod + def foo1(self): + pass + + @abstractmethod + def foo2(self): + pass + + def foo3(self): + return 3 + + @classmethod + def foo4(cls): + # This should not generate an error even though + # it would appear to be attempting to instantiate + # an abstract class. That's because we need to + # assume that the caller is making this call on + # a non-abstract subclass. + return cls() + + +v1 = [subclass() for subclass in AbstractClassA.__subclasses__()] +reveal_type(v1, expected_text="list[AbstractClassA]") + + +# This should generate an error because AbstractFoo +# is an abstract class. +a = AbstractClassA() + + +class AbstractClassB(AbstractClassA): + def foo1(self): + pass + + +# This should generate an error because AbstractBar1 +# is an abstract class. +b = AbstractClassB() + + +class AbstractClassC(AbstractClassB): + def foo2(self): + pass + + +# This should not generate an error because AbstractBar2 +# overrides all of the abstract methods it inherits. +c = AbstractClassC() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass10.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass10.py new file mode 100644 index 00000000..3a6624dd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass10.py @@ -0,0 +1,75 @@ +# This sample tests the detection of static or class method invocations +# where the method is marked abstract. + +from abc import ABC, abstractmethod + + +class A(ABC): + @staticmethod + @abstractmethod + def method1() -> None: ... + + @staticmethod + @abstractmethod + def method2() -> None: + pass + + @classmethod + @abstractmethod + def method3(cls) -> None: + raise NotImplementedError + + @classmethod + @abstractmethod + def method4(cls) -> None: + pass + + +# This should generate an error. +A.method1() + +A.method2() + +# This should generate an error. +A.method3() + +A.method4() + + +class B(A): + @staticmethod + def method1() -> None: + # This should generate an error. + return super(B).method1() + + @staticmethod + def method2() -> None: + return super(B).method2() + + @classmethod + def method3(cls) -> None: + # This should generate an error. + return super().method3() + + @classmethod + def method4(cls) -> None: + return super().method4() + + +B.method1() +B.method2() + + +def func1(a: type[A]): + a.method1() + a.method3() + + +class C(A): ... + + +# This should generate an error. +C.method1() + +# This should generate an error. +C.method3() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass11.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass11.py new file mode 100644 index 00000000..21a77820 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass11.py @@ -0,0 +1,25 @@ +# This sample tests the handling of a class that is created from a subclass +# of ABCMeta. + +from abc import ABCMeta, abstractmethod +from typing import final + + +class CustomMeta(ABCMeta): + pass + + +class A(metaclass=CustomMeta): + @abstractmethod + def abstract(self): + pass + + +@final +# This should generate an error. +class B(A): + pass + + +# This should generate an error. +B() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass2.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass2.py new file mode 100644 index 00000000..dcbed833 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass2.py @@ -0,0 +1,30 @@ +# This sample tests the cases where a mixin class +# overrides an abstract method, making it no longer abstract. + +import abc + + +class InterfaceA(abc.ABC): + @abc.abstractmethod + def a(self) -> None: + print("InterfaceA.a") + + +class MixinA(InterfaceA): + def a(self) -> None: + print("MixinA.a") + + +class InterfaceAB(InterfaceA): + @abc.abstractmethod + def b(self) -> None: + print("InterfaceAB.b") + + +class ClassAB(InterfaceAB, MixinA): + def b(self) -> None: + print("ClassAB.b") + + +ab = ClassAB() +ab.a() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass3.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass3.py new file mode 100644 index 00000000..ada173ee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass3.py @@ -0,0 +1,30 @@ +# This sample tests the checks for abstract method +# overrides. They depend on the order of the subclasses. + +import abc + + +class MixinA(abc.ABC): + pass + + +class MixinB(abc.ABC): + def get_model(self): + print("MixinB.get_model") + + +class MixinC(abc.ABC): + @abc.abstractmethod + def get_model(self): + pass + + def use_model(self): + print("MixinC.get_model") + + +class Trainer_1a(MixinA, MixinB, MixinC): + pass + + +# This should not generate an error +trainer = Trainer_1a() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass4.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass4.py new file mode 100644 index 00000000..52c70313 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass4.py @@ -0,0 +1,30 @@ +# This sample tests the checks for abstract method +# overrides. They depend on the order of the subclasses. + +import abc + + +class MixinA(abc.ABC): + pass + + +class MixinB(abc.ABC): + def get_model(self): + print("MixinB.get_model") + + +class MixinC(abc.ABC): + @abc.abstractmethod + def get_model(self): + pass + + def use_model(self): + print("MixinC.get_model") + + +class Trainer_1b(MixinA, MixinC, MixinB): + pass + + +# This should generate an error +trainer = Trainer_1b() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass5.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass5.py new file mode 100644 index 00000000..b132bba2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass5.py @@ -0,0 +1,54 @@ +# This sample tests the type checker's reporting of abstract +# overload mismatches. + +from abc import ABC, abstractmethod +from typing import Union, overload + + +class ClassA(ABC): + @overload + def func1(self, a: int) -> int: + pass + + @overload + @abstractmethod + # This should generate an error because this overload is + # missing an abstractmethod overload. + def func1(self, a: float) -> float: + pass + + @overload + def func1(self, a: str) -> str: ... + + def func1(self, a: Union[int, float, str]) -> Union[int, float, str]: + raise NotImplementedError() + + @overload + def func2(self, a: str) -> str: ... + + @overload + @abstractmethod + def func2(self, a: int) -> int: + pass + + @abstractmethod + def func2(self, a: Union[int, str]) -> Union[int, str]: + raise NotImplementedError() + + @overload + def func3(self, a: str) -> str: # pyright: ignore[reportNoOverloadImplementation] + ... + + @overload + @abstractmethod + # This should generate an error because the abstract status is inconsistent. + def func3(self, a: int) -> int: ... + + @overload + @abstractmethod + def func4(self, a: str) -> str: # pyright: ignore[reportNoOverloadImplementation] + ... + + @overload + # This should generate an error because the abstract status is inconsistent. + def func4(self, a: int) -> int: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass6.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass6.py new file mode 100644 index 00000000..38d0feee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass6.py @@ -0,0 +1,37 @@ +# This sample validates that a variable that is annotated as +# Type[X] where X refers to an abstract base class does not +# emit an error when the variable is instantiated. + +from abc import ABC, abstractmethod +from typing import Type, TypeVar + + +class Base(ABC): + @abstractmethod + def method1(self, x: int) -> int: + pass + + +def func1(base_cls: Type[Base]): + base_cls() + + +def func2(): + # This should generate an error. + Base() + + +def func3(base_cls: type[Base]): + base_cls() + + +T = TypeVar("T") + + +def create_instance(cls: Type[T]) -> T: + return cls() + + +def func4(): + base = create_instance(Base) + base.method1(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass7.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass7.py new file mode 100644 index 00000000..bfe40cc1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass7.py @@ -0,0 +1,22 @@ +# This sample verifies that Protocol classes are treated as +# abstract even though they don't derive from ABCMeta. + +from typing import Protocol, Tuple +from abc import abstractmethod + + +class RGB(Protocol): + rgb: Tuple[int, int, int] + + @abstractmethod + def intensity(self) -> int: + return 0 + + +class Point(RGB): + def __init__(self, red: int, green: int, blue: int) -> None: + self.rgb = red, green, blue + + +# This should generate an error because "intensity" is not implemented. +p = Point(1, 2, 3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass8.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass8.py new file mode 100644 index 00000000..e12f1ddf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass8.py @@ -0,0 +1,26 @@ +# This sample tests the check for abstract methods on a final class. + +from typing import final +from abc import ABC, abstractmethod + + +class Foo(ABC): + @abstractmethod + def foo(self): + pass + + +class Bar(Foo): + @abstractmethod + def bar(self): + pass + + @abstractmethod + def bar2(self): + pass + + +@final +# This should generate an error because Foo.foo, Bar.bar, and Bar.bar1 +# are abstract. +class Baz(Bar): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/abstractClass9.py b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass9.py new file mode 100644 index 00000000..9335c96e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/abstractClass9.py @@ -0,0 +1,22 @@ +# This sample tests that named tuple fields override abstract methods. + +# pyright: reportIncompatibleVariableOverride=false + +from abc import ABC, abstractmethod +from typing import NamedTuple + + +class ClassA(ABC): + @property + @abstractmethod + def myproperty(self) -> str: ... + + +MixinB = NamedTuple("MixinB", [("myproperty", str)]) + + +class ClassB(MixinB, ClassA): + pass + + +ClassB(myproperty="myproperty") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotated1.py b/python-parser/packages/pyright-internal/src/tests/samples/annotated1.py new file mode 100644 index 00000000..5993c530 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotated1.py @@ -0,0 +1,102 @@ +# This sample tests handling of the Python 3.9 "Annotated" feature +# described in PEP 593. + +from dataclasses import InitVar, dataclass +from typing import Annotated, ClassVar, Final, TypeVar + + +class struct2: + @staticmethod + def ctype(a: str): + pass + + class Packed: + pass + + +UnsignedShort = Annotated[int, struct2.ctype("H")] +SignedChar = Annotated[int, struct2.ctype("b")] + + +class Student(struct2.Packed): + name: Annotated[str, struct2.ctype("<10s")] + serial_num: UnsignedShort + school: SignedChar + + +def ValueRange(a: int, b: int): + pass + + +T1 = Annotated[int, ValueRange(-10, 5)] +T2 = Annotated[T1, ValueRange(-20, 3)] + +a: Annotated[Annotated[int, "hi"], "hi"] = 3 +b: T2 = 5 + +TypeWithStringArg = Annotated["int", "this string should not be parsed"] + + +def func2(a: TypeWithStringArg): + return 3 + + +# This should generate an error because the first type argument +# is not a valid type. +c: Annotated["this", "should generate an error"] + +# This should generate an error because all Annotated types should +# include at least two type arguments. +d: Annotated[int] + +# Verify that generic type aliases can be defined using Annotated. +_T = TypeVar("_T") +Param = Annotated[_T, "x"] + +x1: Param[int] = 3 +print(Param[int]) + + +class A: + classvar: Annotated[ClassVar[int], (2, 5)] = 4 + const: Annotated[Final[int], "metadata"] = 4 + + +@dataclass +class B: + x: Annotated[InitVar[int], "metadata"] + + +d1 = B(x=4) + +# This should generate an error because x is not an actual member. +d1.x + +Alias1 = Annotated[_T, ""] +Alias2 = str +Alias3 = Alias1[Alias2] + +reveal_type(Alias3, expected_text="type[str]") + +x2: Annotated[str, [*(1, 2)]] +x3: Annotated[str, (temp := 1)] + + +async def func3(): + x4: Annotated[str, await func3()] + + +x5: Annotated[str, f""] +x6: Annotated[str, "abc"] +x7: Annotated[str, "a\nb"] +x8: Annotated[str, *(1, 2, 3)] + + +def func4(): + return Annotated[int, 2 + 2] + + +reveal_type(func4(), expected_text="Annotated") + +x9 = list[Annotated[int, ""]]() +reveal_type(x9, expected_text="list[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotated2.py b/python-parser/packages/pyright-internal/src/tests/samples/annotated2.py new file mode 100644 index 00000000..a9226f80 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotated2.py @@ -0,0 +1,16 @@ +# This sample tests the case where Annotated is used with deferred +# annotation evaluation. + +from __future__ import annotations +from typing import Annotated + + +v1: Annotated[str, ClassA, func1(), v2[0]] = "" + +v2 = [1, 2, 3] + + +class ClassA: ... + + +def func1(): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar1.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar1.py new file mode 100644 index 00000000..0094dcae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar1.py @@ -0,0 +1,20 @@ +# This sample tests type annotations on variables. + +array1 = [1, 2, 3] + +# This should generate an error because the LHS can't +# have a declared type. +array1[2] = 4 # type: int + +dict1 = {} + +# This should generate an error because the LHS can't +# have a declared type. +dict1["hello"] = 4 # type: int + + +def foo(): + a: int = 3 + b: float = 4.5 + c: str = "" + d: int = yield 42 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar2.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar2.py new file mode 100644 index 00000000..76ed1e13 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar2.py @@ -0,0 +1,28 @@ +# This sample tests annotated types on global variables. + +# This should generate an error because the declared +# type below does not match the assigned type. +glob_var1 = 4 + +# This should generate an error because the declared +# type doesn't match the later declared type. +glob_var1 = Exception() # type: str + +glob_var1 = Exception() # type: Exception + +# This should generate an error because the assigned +# type doesn't match the declared type. +glob_var1 = "hello" # type: Exception + +# This should generate an error. +glob_var2 = 5 + + +def func1(): + global glob_var1 + global glob_var2 + + # This should generate an error. + glob_var1 = 3 + + glob_var2 = "hello" # type: str diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar3.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar3.py new file mode 100644 index 00000000..a979cf7a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar3.py @@ -0,0 +1,54 @@ +# This sample tests annotated types on local variables. + +from typing import Any, Optional, Union + + +class ClassB(object): + def __enter__(self) -> bytes: + return b"hello" + + def __exit__( + self, + t: Optional[type] = None, + exc: Optional[BaseException] = None, + tb: Optional[Any] = None, + ) -> bool: + return True + + +def func2(): + local_var = 3 # type: Union[int, str] + local_var = "hello" + + # This should generate an error because the assigned + # type doesn't match the declared type. + local_var = b"hello" + + local_var2 = 3 # type: int + + if local_var: + # This should generate an error because the + # assigned type doesn't match. + local_var = 3.4 + else: + # This should generate an error because the assigned + # type doesn't match the declared type. + local_var2 = b"hello" + + # This should generate an error because the declared type + # of local_var is not compatible. + with ClassB() as local_var: + pass + + bytes_list = [b"hello"] + + # This should generate an error because the declared type + # of local_var is not compatible. + for local_var in bytes_list: + pass + + # This should generate an error. + (local_var, local_var2) = (b"hello", 3) + + # This should generate an error. + (local_var, local_var2) = ("hello", b"h") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar4.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar4.py new file mode 100644 index 00000000..df13ede8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar4.py @@ -0,0 +1,24 @@ +# This sample tests annotated types for class variables. + + +class ClassA(object): + # This should generate an error because the declared + # type doesn't match the latter declared type. + class_var1 = 4 # type: str + + # This should generate an error because the assigned + # value doesn't match the declared type. + class_var1 = "hello" # type: int + + class_var1 = 3 # type: int + + # This should generate an error because the declared + # type doesn't match the latter declared type. + class_var2 = 3 # type: int + + def __init__(self): + # This should generate an error because the assigned + # type doesn't match the declared type. + self.class_var2 = 3 # type: str + + self.class_var2 = "hello" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar5.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar5.py new file mode 100644 index 00000000..bb56dbf0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar5.py @@ -0,0 +1,37 @@ +# This sample tests type annotations for instance variables. + + +class ClassC(object): + def __init__(self): + self.inst_var1 = 3 + + @property + def prop1(self): + return 1 + + @prop1.setter + def prop1(self, val): + pass + + def foo(self): + # This should generate an error because the assigned + # type doesn't match the declared type. + self.inst_var1 = 3 # type: str + + self.inst_var1: str = "hello" + + # This should generate an error because the declared + # type doesn't match the previously declared type. + self.inst_var1: int = "hello" + + # This should generate an error because the assigned + # type doesn't match the declared type. + self.inst_var1 = "hello" # type: int + + self.prop1 = 3 + + +class ClassE(ClassC): + def __init__(self): + # This should generate an error. + self.inst_var1 = 3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar6.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar6.py new file mode 100644 index 00000000..7cb8db34 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar6.py @@ -0,0 +1,22 @@ +# This sample tests that class-scoped variables with a declared type +# but without a ClassVar designation are treated as instance variables. +# Notably, Callable variables should be assumed to be pre-bound to +# the object. + +from typing import Callable + + +def add1(n: int): + return n + 1 + + +class Foo: + f: Callable[[int], int] + + def m(self): + print(self.f(1)) + + +foo = Foo() +foo.f = add1 +foo.m() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar7.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar7.py new file mode 100644 index 00000000..cda75b1e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar7.py @@ -0,0 +1,13 @@ +# This sample tests the reportTypeCommentUsage diagnostic check. + +# This should generate an error if reportTypeCommentUsage is enabled. +x = 3 # type: int + + +class Foo: + # This should generate an error if reportTypeCommentUsage is enabled. + y = 0 # type: int + + def __init__(self): + # This should generate an error if reportTypeCommentUsage is enabled. + self.x = 2 # type: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar8.py b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar8.py new file mode 100644 index 00000000..3d90b3d0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotatedVar8.py @@ -0,0 +1,29 @@ +# This sample tests that incompatible types implicitly assigned +# to an annotated variable via an import statement are flagged +# as an error. + +from typing import Callable, Final + +# This should generate an error because random is already declared. +import random + +random: int = 3 + +# This should generate an error because os is Final +import os.path + +os: Final = 3 + + +# This should generate an error because x is already declared. +from math import pow as x + +x: Callable[[], None] + +# This should generate an error because pow is already declared. +from math import pow + +pow: int = 3 + +y: Callable[[float, float], float] +from math import pow as y diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotations1.py b/python-parser/packages/pyright-internal/src/tests/samples/annotations1.py new file mode 100644 index 00000000..78ab76ac --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotations1.py @@ -0,0 +1,159 @@ +# This sample tests the handling of type annotations within a +# python source file (as opposed to a stub file). + +from typing import Any, Callable, TypeVar, Union +import uuid +from datetime import datetime + + +class ClassA: + # This should generate an error because ClassA + # is not yet defined at the time it's used. + def func0(self) -> ClassA | None: + return None + + +class ClassB(ClassA): + def func1(self) -> ClassA: + return ClassA() + + # This should generate an error because ClassC + # is a forward reference, which is not allowed + # in a python source file. + def func2(self) -> ClassC | None: + return None + + def func3(self) -> "ClassC | None": + return None + + def func4(self) -> "ClassC | None": + return None + + def func5(self) -> "int | None": + return None + + +class ClassC: + pass + + +def func10(): + pass + + +# This should generate an error because function calls +# are not allowed within a type annotation. +x1: func10() + +x2: """ + Union[ + int, + str + ] +""" + + +class ClassD: + ClassA: "ClassA" + + # This should generate an error because ClassF refers + # to itself, and there is no ClassF declared at the module + # level. It should also generate a second error because + # ClassF is a variable and can't be used as an annotation. + ClassF: "ClassF" + + str: "str" + + def int(self): ... + + foo: "int" + + # This should generate an error because it refers to the local + # "int" symbol rather than the builtins "int". + bar: int + + +# This should generate an error because modules are not allowed in +# type annotations. +x3: typing + + +class ClassG: + uuid = uuid.uuid4() + + +class ClassH: + # This should generate an error because uuid refers to the local + # symbol in this case, which is a circular reference. + uuid: uuid.UUID = uuid.uuid4() + + +def func11(): + for t in [str, float]: + # This should generate an error because t is not a valid annotation. + def f(x: str) -> t: + return t(x) + 1 + + f("") + + +def func12(x: type[int]): + # These should not generate an error because they are used + # in a location that is not considered a type annotation, so the + # normal annotation limitations do not apply here. + print(Union[x, x]) + print(x | None) + + +# This should generate an error because x4 isn't defined. +x4: int = x4 + + +class ClassJ: + datetime: datetime + + +T = TypeVar("T") + + +x5: type[int] = int + +# This should generate an error because variables are not allowed +# in a type annotation. +x6: x5 = 1 + +# This should generate an error because variables are not allowed +# in a type annotation. +x7: list[x5] = [1] + +# This should generate an error because a Callable isn't allowed +# in a "type". +x8: type[Callable] + +# This should generate an error because a Callable isn't allowed +# in a "type". +x9: type[func11] + +# This should generate an error because a Callable isn't allowed +# in a "type". +x10: type[Callable[..., Any]] + +# This should generate an error because raw strings aren't allowed. +x11: r"int" + +# This should generate an error because bytes strings aren't allowed. +x12: b"int" + +# This should generate an error because format strings aren't allowed. +x13: f"int" + + +class A: + def method1(self): + # This should result in an error. + x: self + + @classmethod + def method2(cls): + # This should result in an error. + x: cls diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotations2.py b/python-parser/packages/pyright-internal/src/tests/samples/annotations2.py new file mode 100644 index 00000000..fea1ce5b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotations2.py @@ -0,0 +1,33 @@ +# This sample tests the parser and type checker's ability to use +# type annotations in comments following assignment expressions. +# These are used in some older typestub files because they're +# compatible with versions of Python before 3.6. + +from typing import List, Optional, Tuple + +a = 3 # type: int + +b = "3" # type: str + +c = [1, 2, 3] # type: Optional[List[int]] + + +# A type on the next line shouldn't be honored +d = "hello" +# type: int + +# A type comment with a space between the type and +# the colon is also not honored. +e = "hello" # type : int + +# Neither is a capital "Type" +f = "hello" # Type: int + + +# This should generate an error because the type doesn't match +g = "hello" # type: int + + +# This should generate an error because the last entry +# of the tuple is the wrong type. +h = (1, "hello", (5,)) # type: Tuple[int, str, Tuple[str]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotations3.py b/python-parser/packages/pyright-internal/src/tests/samples/annotations3.py new file mode 100644 index 00000000..a05db66b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotations3.py @@ -0,0 +1,37 @@ +# This sample tests the handling of type annotations within a +# python source file with the __future__ annotations symbol present. + +from __future__ import annotations + +from typing import Optional + + +class ClassA: + def func0(self) -> Optional[ClassA]: + return None + + +class ClassB(ClassA): + def func1(self) -> ClassA: + return ClassA() + + def func2(self) -> Optional[ClassC]: + return None + + def func3(self) -> "Optional[ClassC]": + return None + + def func4(self) -> Optional["ClassC"]: + return None + + def func5(self, x: ClassA): + x.func0() + + class ClassA: ... + + def func6(self, x: ClassC): + x.my_int + + +class ClassC: + my_int: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotations4.py b/python-parser/packages/pyright-internal/src/tests/samples/annotations4.py new file mode 100644 index 00000000..2ed6621e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotations4.py @@ -0,0 +1,67 @@ +# This sample tests the type checker's reporting of +# incompatible declared types. + +from collections.abc import Callable + + +def a(): + pass + + +# This should generate an error for an incompatible assignment. +a = 1 + + +# This should generate an error for an obscured type. +def b(): + pass + + +b: int = 1 + +# This should generate an error for an obscured type. +c: int = 1 +c: float = 1.1 + +# This should generate two errors - one for an +# obscured type, the second for an incompatible assignment. +d: int = 2 + + +def d(): + pass + + +class Foo: + # This should generate an error because aa is redeclared. + aa: int + + def aa(self): + return 3 + + +# This should generate two errors, one for each param. +def my_func(param1: int, param2): + param1: int = 3 + param2: int = 4 + + +# This should be fine because both declarations of 'e' +# use the same type. +e: list[int] +e = [3] +e: list[int] + + +def register(fn: Callable[[], None]) -> None: ... + + +# These should be be fine because they use the "_" name. +@register +def _(): + print("Callback 1 called") + + +@register +def _(): + print("Callback 2 called") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotations5.py b/python-parser/packages/pyright-internal/src/tests/samples/annotations5.py new file mode 100644 index 00000000..865e3134 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotations5.py @@ -0,0 +1,9 @@ +# This sample tests the handling of tuple expressions within a subscript +# when used with type annotations. + +a1: dict[(str, str)] = {"hi": "there"} + +# This should generate an error because there are too many type arguments. +a2: dict[(str, str, str)] = {"hi": "there"} + +b1: list[(int,)] = [3, 4, 5] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/annotations6.py b/python-parser/packages/pyright-internal/src/tests/samples/annotations6.py new file mode 100644 index 00000000..77284613 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/annotations6.py @@ -0,0 +1,29 @@ +# This sample verifies that the Type[] and type[] annotations work +# as expected when the type argument is Any. + +from typing import Type, Any + + +def is_type1(x: object, y: Type[Any]) -> bool: + return isinstance(x, y) + + +is_type1(1, int) + +# This should generate an error. +is_type1(1, 1) + + +def is_type2(x: object, y: type[Any]) -> bool: + return isinstance(x, y) + + +is_type2(1, int) + +# This should generate an error. +is_type2(1, 1) + + +def func1(v1: Type[Any], v2: type[Any]): + reveal_type(v1, expected_text="Type[Any]") + reveal_type(v2, expected_text="type[Any]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/any1.py b/python-parser/packages/pyright-internal/src/tests/samples/any1.py new file mode 100644 index 00000000..4b0a1f38 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/any1.py @@ -0,0 +1,45 @@ +# This sample tests certain uses of Any that should be flagged as illegal. + +import typing +from typing import Any, cast + +# This should generate an error because Any is not valid for isinstance. +isinstance(0, Any) + +# This should generate an error because Any is not valid for isinstance. +isinstance(0, typing.Any) + +v1 = cast(Any, 0) +v2 = cast(typing.Any, 0) + + +class A(Any): ... + + +class B(typing.Any): ... + + +# This should generate an error because Any is not callable. +Any() + +# This should generate an error because Any is not callable. +typing.Any() + + +def func1() -> int: + # This should generate an error because Any cannot be used as a value. + return Any + + +def func2() -> int: + # This should generate an error because Any cannot be used as a value. + return typing.Any + + +v3: type[Any] = type(Any) + +# This should generate an error. +v4: type[type] = type(Any) + +# This should generate an error. +v5: type = Any diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assert1.py b/python-parser/packages/pyright-internal/src/tests/samples/assert1.py new file mode 100644 index 00000000..3c981608 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assert1.py @@ -0,0 +1,22 @@ +# This sample tests the ability to detect errant assert calls +# that are always true - the "reportAssertAlwaysTrue" option. + +from typing import Any, Tuple + + +# This should generate a warning. +assert (1 != 2, "Error message") + + +def foo(a: Tuple[int, ...]): + assert a + + +b = () +assert b + + +c = (2, 3) + +# This should generate a warning. +assert c diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assertType1.py b/python-parser/packages/pyright-internal/src/tests/samples/assertType1.py new file mode 100644 index 00000000..95d54bcd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assertType1.py @@ -0,0 +1,48 @@ +# This sample tests the assert_type call. + +from typing import Any, Literal +from typing_extensions import assert_type # pyright: ignore[reportMissingModuleSource] + + +def func1(): + # This should generate an error. + assert_type() + + # This should generate an error. + assert_type(1) + + # This should generate an error. + assert_type(1, 2, 3) + + # This should generate an error. + assert_type(*[]) + + +def func2(x: int, y: int | str, z: list): + assert_type(x, int) + + # This should generate an error. + assert_type(x, str) + + # This should generate an error. + assert_type(x, Any) + + x = 3 + assert_type(x, Literal[3]) + + # This should generate an error. + assert_type(x, int) + + assert_type(y, int | str) + assert_type(y, str | int) + + # This should generate an error. + assert_type(y, str) + + # This should generate an error. + assert_type(y, None) + + # This should generate two errors. + assert_type(y, 3) + + assert_type(z[0], Any) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment1.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment1.py new file mode 100644 index 00000000..85375f7c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment1.py @@ -0,0 +1,72 @@ +# This sample tests the type checker's handling of +# member assignments. + +from not_present import NotPresentClass # type: ignore + + +class ClassA: + def __init__(self): + self.string_list: list[str] = [] + + def do_something(self, num: int) -> str: + return "" + + +a = ClassA() + +a.string_list = ["yep"] + +# This should generate an error because of a type mismatch. +a.string_list = "bbb" + +# This should generate an error because of a type mismatch. +a.string_list = {} + +# This should generate an error because of a type mismatch. +a.string_list = [1] + +# This should generate an error because there is no member +# called string_list2 defined. +a.string_list2 = 4 + + +def patch1(num: int) -> str: + return "" + + +def patch2(self, num: int) -> str: + return "" + + +a.do_something = lambda num: "hello" +a.do_something = patch1 + +# This should generate an error because of a param count mismatch +a.do_something = lambda: "hello" + +# This should generate an error because of a return type mismatch +a.do_something = lambda x: 1 + + +ClassA.do_something = patch2 + +# This should generate an error because of a param count mismatch +ClassA.do_something = patch1 + + +class ClassB: + # This should generate an error because assignment expressions + # can't be used within a class. + [(j := i) for i in range(5)] + + +class ClassC: + def __init__(self): + self.x = NotPresentClass # type: int + self.y: int + self.y = NotPresentClass + self.z: int = NotPresentClass + + reveal_type(self.x, expected_text="Unknown | int") + reveal_type(self.y, expected_text="Unknown | int") + reveal_type(self.z, expected_text="Unknown | int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment10.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment10.py new file mode 100644 index 00000000..502b400b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment10.py @@ -0,0 +1,83 @@ +# This sample tests some cases where types are narrowed on assignment, +# including some cases that involve "Any". + +from typing import Any, Generic, Iterable, TypeVar + + +class A: + instance: "A | None" + + def __init__(self) -> None: + self.foo: bool + + @classmethod + def method1(cls) -> bool: + if cls.instance is None: + cls.instance = cls() + return cls.instance.foo + + +T = TypeVar("T") + + +class B(Generic[T]): ... + + +def func1(v1: list[Any | None], v2: list[int | str]): + x1: list[int | None] = v1 + reveal_type(x1, expected_text="list[int | None]") + + x2: list[Any] = v2 + reveal_type(x2, expected_text="list[Any]") + + x3: list[Any | str] = v2 + reveal_type(x3, expected_text="list[Any | str]") + + +def func2(v1: dict[int, Any | None], v2: dict[int, int | str]): + x1: dict[int, int | None] = v1 + reveal_type(x1, expected_text="dict[int, int | None]") + + x2: dict[Any, Any] = v2 + reveal_type(x2, expected_text="dict[Any, Any]") + + x3: dict[Any, Any | str] = v2 + reveal_type(x3, expected_text="dict[Any, Any | str]") + + +def func3(y: list[int]): + x1: Iterable[int | B[Any]] = y + reveal_type(x1, expected_text="list[int]") + + x2: Iterable[Any | B[Any]] = y + reveal_type(x2, expected_text="list[int]") + + x3: Iterable[Any] = y + reveal_type(x3, expected_text="list[int]") + + +def func4(y: list[Any]): + x1: Iterable[int | B[Any]] = y + reveal_type(x1, expected_text="list[Any]") + + x2: Iterable[Any | B[Any]] = y + reveal_type(x2, expected_text="list[Any]") + + x3: Iterable[Any] = y + reveal_type(x3, expected_text="list[Any]") + + +def func5(v1: list[Any | None]): + x1: list[int | None] = v1 + reveal_type(x1, expected_text="list[int | None]") + + +def func6(v1: tuple[Any], v2: tuple[int, Any], v3: tuple[Any, ...]): + x1: tuple[int] = v1 + reveal_type(x1, expected_text="tuple[int]") + + x2: tuple[int, str] = v2 + reveal_type(x2, expected_text="tuple[int, str]") + + x3: tuple[str, ...] = v3 + reveal_type(x3, expected_text="tuple[str, ...]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment11.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment11.py new file mode 100644 index 00000000..c9a91859 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment11.py @@ -0,0 +1,20 @@ +# This sample tests the type checker's handling of chained assignments. + +a1 = b1 = c1 = d1 = 3 + +my_list = [10] +a2 = my_list[a2] = b2 = my_list[b2] = 0 + +# This should generate an error because a3 is read before written. +my_list[a3] = a3 = 0 + + +# This should generate an error because type comments are not +# allowed for chained assignments. +x1 = x2 = x3 = [3] # type: list[float] + + +# Bidirectional type inference should be used in this case. +y2: list[list[float]] +y1 = y2 = [[3]] +reveal_type(y1, expected_text="list[list[float]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment12.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment12.py new file mode 100644 index 00000000..1034a0e6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment12.py @@ -0,0 +1,24 @@ +# This sample tests the case where a variable with a declared type +# is assigned an unknown value or partially-unknown value. + + +def a_test(x1: int, x2: list): + u = x1.upper() # type: ignore + reveal_type(u, expected_text="Unknown") + + # This should generate an error if reportUnknownVariableType is enabled. + y: str = u + reveal_type(y, expected_text="Unknown | str") + + # This should generate an error if reportUnknownVariableType is enabled. + z: list[str] = x2 + reveal_type(z, expected_text="list[str]") + + +def b_test(x: int | str): + u = x.upper() # type: ignore + reveal_type(u, expected_text="str | Unknown") + + # This should generate an error if reportUnknownVariableType is enabled. + y: str = u + reveal_type(y, expected_text="str | Unknown") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment2.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment2.py new file mode 100644 index 00000000..e13eb9f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment2.py @@ -0,0 +1,43 @@ +# This sample tests assignments to indexed expressions +# where the base is a specialized object. + +# Enable the reportUnknownArgumentType check so we can verify that it +# doesn't generate an error when assigning an empty list to an indexed +# expression. +# pyright: reportUnknownArgumentType=true + + +v1: list[int] = [1, 2, 3, 4, 5] +# This should generate an error because the assigned type is wrong. +v1[0] = "a" + +v2: dict[int, str] = {1: "str"} +# This should generate an error because the assigned type is wrong. +v2[1] = 123 + +v3: list[int | str] = ["a"] +v3[0] = 3 +reveal_type(v3[0], expected_text="Literal[3]") + + +v4: dict[str, int | str] = {} +v4["aaa"] = 3 +v4["bbb"] = "bbb" +reveal_type(v4["aaa"], expected_text="Literal[3]") +reveal_type(v4["bbb"], expected_text="Literal['bbb']") +reveal_type(v4["ccc"], expected_text="int | str") + + +class Asymmetric: + def __setitem__(self, i: int, value: object) -> None: ... + + def __getitem__(self, i: int) -> int: ... + + +v5 = Asymmetric() +v5[0] = 3 +reveal_type(v5[0], expected_text="int") + + +v6 = [1, 2, 3] +v6[1:] = [] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment3.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment3.py new file mode 100644 index 00000000..076159ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment3.py @@ -0,0 +1,51 @@ +# This sample tests various assignment scenarios where +# there is an expected type, so bidirectional type +# inference is used. + +from typing import Callable, Literal, Protocol + +f1: Callable[[int, int], int] = lambda a, b: a + b + +# This should generate an error because x should be +# determined to be an "int", so "len(x)" is invalid. +map(lambda x: len(x), [1, 2, 3]) + + +def must_be_int(val: int): + return val + + +d1: dict[str, tuple[int, Callable[[int], int]]] = { + "hello": (3, lambda x: must_be_int(x)) +} + +d2: dict[str, tuple[int, Callable[[int], int]]] = { + # This should generate an error because the key is not a str. + 3: (3, lambda x: must_be_int(x)) +} + +d3: dict[str, tuple[int, Callable[[int], int]]] = { + # This should generate an error because the first element + # of the tuple is not the correct type. + "3": (3.0, lambda x: must_be_int(x)) +} + +d4: dict[str, tuple[int, Callable[[int], int]]] = { + # This should generate an error because the lambda + # type doesn't match. + "3": (3, lambda _: 3.4) +} + + +class Adder(Protocol): + def __call__(self, x: int, y: dict[str, int]) -> int: ... + + +v1: Adder = lambda x, y: x + y["hi"] +reveal_type(v1, expected_text="(x: int, y: dict[str, int]) -> int") + + +class A: + @classmethod + def method1(cls): + cls.v1: list[Literal[0]] = [] if issubclass(cls, int) else [0] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment4.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment4.py new file mode 100644 index 00000000..513ac9c9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment4.py @@ -0,0 +1,20 @@ +# This sample tests various assignment scenarios where +# there is an expected type, so bidirectional type +# inference is used. + +# pyright: strict + +from typing import Callable, Sequence + +AAA = float +BBB = int +CCC = str +DDD = str +AAATuple = tuple[AAA, BBB, Callable[[Sequence[int], AAA], Sequence[float]]] + + +def foo(): + var1: dict[str, tuple[AAA, BBB, CCC, DDD]] = {} + var2: dict[str, AAATuple] = {} + for k, (var3, var4, _, _) in var1.items(): + var2[k] = (var3, var4, lambda var5, var6: [v * var6 for v in var5]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment5.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment5.py new file mode 100644 index 00000000..fdb39aa1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment5.py @@ -0,0 +1,16 @@ +# This sample tests the handling of tuple assignments +# where the order of assignment within the tuple is important. + +from typing import Optional + + +class Node: + key: str + next: Optional["Node"] = None + + +node = Node() + +# This should analyze fine because node.next should be assigned +# None before node is assigned None. +node.next, node = None, None diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment6.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment6.py new file mode 100644 index 00000000..6be62e16 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment6.py @@ -0,0 +1,25 @@ +# This sample tests that the type checker handles the case +# where a symbol within a class scope is assigned an expression +# that includes the same symbol, but that same symbol is defined +# in an outer scope. + +a = 0 +b = 1 +c = 4 + + +class MyClass: + # This should not generate an error because + # the RHS of the assignment refers to a different + # "a", declared in an outer scope. + a = a + + # Same with "b" here. + (b, a) = (b, 3) + + # Same with "c" here. + [c] = [c] + + # This should generate an error because "d" is + # not declared in the outer scope. + e = d diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment7.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment7.py new file mode 100644 index 00000000..ef3a2fbf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment7.py @@ -0,0 +1,13 @@ +# This sample tests a particularly difficult set of dependent +# assignments that involve tuple packing and unpacking. + +# pyright: strict + +v1 = "" +v3 = "" + +v2, _ = v1, v3 +v4 = v2 +for _ in range(1): + v1 = v4 + v2, v3 = v1, "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment8.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment8.py new file mode 100644 index 00000000..59c5cf4d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment8.py @@ -0,0 +1,38 @@ +# This sample ensures that a variable of type "object" can be +# assigned any other type. + +import os +from typing import Any, TypeVar, overload + + +class Foo: + @overload + def bar(self, obj: None) -> object: ... + + @overload + def bar(self, obj: object) -> Any: ... + + def bar(self, obj: object | None) -> Any: + pass + + @staticmethod + def baz(): + return 3 + + +_T = TypeVar("_T") + +my_obj: object + +my_obj = None +my_obj = os +my_obj = Foo +my_obj = Foo() +my_obj = Foo.bar +my_obj = Foo.baz +my_obj = () +my_obj = lambda x: x +my_obj = _T + +# This should generate an error because a is unbound. +my_obj = a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignment9.py b/python-parser/packages/pyright-internal/src/tests/samples/assignment9.py new file mode 100644 index 00000000..e017aa00 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignment9.py @@ -0,0 +1,23 @@ +# This sample tests assignment when the types are invariant and the +# source or destination are unions that contains subtypes which are +# subclasses of each other. + +from datetime import datetime + + +class FloatSubclass(float): + pass + + +float_list: list[float] = [1.0, 2.0] + +v1: list[float | FloatSubclass] = float_list + +v2: list[int | float] = float_list + +# This should generate an error. +v3: list[int | float | datetime] = float_list + + +v4: list[FloatSubclass | float] = [] +v5: list[float] = v4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr1.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr1.py new file mode 100644 index 00000000..7e917ce2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr1.py @@ -0,0 +1,26 @@ +# This sample tests the Python 3.8 assignment expressions. + +# pyright: reportUnusedExpression=false + +def func1(): + b = 'a' + d = 'b' + + a = (b := 3) + + # This should generate an error because the + # item to the left of an assignment expression + # must be a name. + a + 3 := 3 + + # This should generate an error because parens + # are required in this case. + c = d := 3 + +# This should generate an error because parens are required in this case. +val if val := 1 + 2 else None + +val2 if (val2 := 1 + 2) else None + +# This should generate an error because parens are not allowed in asserts. +assert e := 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr2.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr2.py new file mode 100644 index 00000000..8c95857b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr2.py @@ -0,0 +1,67 @@ +# This sample tests the Python 3.8 assignment expressions. This sample +# is taken from PEP 257. + +# pyright: reportUnusedExpression=false + +import re + +def func1(x: float): + ... + +def pep572_examples(): + if (match := re.search('123', '252')) is not None: + print(match) + print(match) + + file = open('hello') + while chunk := file.read(8192): + print(chunk) + print(chunk) + + def f(x: float): + return x + mylist = [y := f(25), y**2, y**3] + + data = [1, 2, 3] + filtered_data = [y for x in data if (y := f(x)) is not None] + print(filtered_data) + + # This should generate an error. + y := f(25) # INVALID + (y := f(25)) # Valid, though not recommended + + y1 = 1 + + # This should generate an error. + y0 = y1 := f(25) # INVALID + y0 = (y1 := f(25)) # Valid, though discouraged + + # This should generate an error. + func1(x = y := f(25)) # INVALID + func1(x=(y := f(25))) # Valid, though probably confusing + + # This should generate an error. + [y for x in [0, 1] if y := x - 1] + + [y for x in [0, 1] if (y := x - 1)] + + +def func2(): + # This should generate an error. + yield y := 1 + +def func3(): + # This should generate an error. + yield from y := [1] + +def func4(): + # This should generate an error. + v1 = {x := 'a': 0} + + v2 = {(x := 'a'): 0} + + # This should generate an error. + v3 = {x := 'a': i for i in range(4)} + + v4 = {(x := 'a'): i for i in range(4)} + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr3.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr3.py new file mode 100644 index 00000000..fbfcb7ad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr3.py @@ -0,0 +1,27 @@ +# This sample tests the Python 3.8 assignment expressions. + +import re + +def foo1(x: float): + ... + +p = 3 + +# This should generate an error. +def foo2(answer = p := 42): # INVALID + ... + +def foo3(answer=(p := 42)): # Valid, though not great style + ... + +default_value: int = 3 + +# This should generate two errors. +def foo4(answer: p := default_value = 5): # INVALID + ... + +# This should generate an error. +(lambda: x := 1) # INVALID +lambda: (x := 1) # Valid, but unlikely to be useful +(x := lambda: 1) # Valid +lambda line: (m := re.match('pattern', 'line')) and m.group(1) # Valid diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr4.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr4.py new file mode 100644 index 00000000..f11043bb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr4.py @@ -0,0 +1,49 @@ +# This sample tests error detection for certain cases that +# are explicitly disallowed by PEP 572 for assignment expressions +# when used in context of a list comprehension. + +pairs = [] +stuff = [] + +# These should generate an error because assignment +# expressions aren't allowed within an iterator expression +# in a "for" clause of a list comprehension. +[x for x, y in (pairs2 := pairs) if x % 2 == 0] +[x for x, y in ([1, 2, 3, pairs2 := pairs]) if x % 2 == 0] +{x: y for x, y in (pairs2 := pairs) if x % 2 == 0} +{x for x, y in (pairs2 := pairs) if x % 2 == 0} +foo = (x for x, y in ([1, 2, 3, pairs2 := pairs]) if x % 2 == 0) + +# This should generate an error because 'j' is used as a +# "for target" and the target of an assignment expression. +[[(j := j) for i in range(5)] for j in range(5)] +[i := 0 for i, j in stuff] +[i + 1 for i in (i := stuff)] + +[False and (i := 0) for i, j in stuff] +[i for i, j in stuff if True or (j := 1)] + +# These should generate an error because assignment +# expressions aren't allowed within an iterator expression +# in a "for" clause of a list comprehension. +[i + 1 for i in (j := stuff)] +[i + 1 for i in range(2) for j in (k := stuff)] +[i + 1 for i in [j for j in (k := stuff)]] +[i + 1 for i in (lambda: (j := stuff))()] + + +class Example: + # This should generate an error because the containing + # scope for the list comprehension is a class. + [(j := i) for i in range(5)] + + x = ((y := 1), (z := 2)) + + +Example.x +Example.y +Example.z + +# This should generate an error because 'j' is used as a +# "for target" and the target of an assignment expression. +[i for i in [1, 2] if True or (j := 1) for j in range(10)] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr5.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr5.py new file mode 100644 index 00000000..95ecd82d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr5.py @@ -0,0 +1,23 @@ +# This sample tests the scoping rules for assignment expressions +# within a list comprehension. + +# pyright: strict + +from typing import Tuple + + +def func1() -> Tuple[str, int]: + a = 3 + y = 4 + _ = [(a := x) for x in ["1", "2"] for _ in ["1", "2"]] + + # The type of "y" should be int because the "y" within + # the list comprehension doesn't leak outside. On the + # other hand, "a" does leak outside the list comprehension. + return (a, y) + + +def get_value(x: int) -> int: ... + + +x = sum(max(value for x in range(10) if (value := get_value(x))) for _ in range(10)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr6.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr6.py new file mode 100644 index 00000000..9c7f26a0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr6.py @@ -0,0 +1,10 @@ +# This sample tests that type guards work correctly +# with the walrus operator. + +import re + + +def foo(s: str) -> str: + if m := re.fullmatch("(test).+", s): + return m.group(1) + return "oops" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr7.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr7.py new file mode 100644 index 00000000..741963a3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr7.py @@ -0,0 +1,23 @@ +# This sample tests assignment expressions used within arguments. + +from dataclasses import dataclass +from typing import Mapping +import collections + + +class NearestKeyDict(collections.UserDict): + def method1(self, key): + a = len(keys := [k for k in sorted(self.data) if k >= key]) + + # This should generate an error because walrus operators + # are not allowed with named arguments. + b = list(iterable = keys := [k for k in sorted(self.data) if k >= key]) + + +@dataclass +class DC1: + x: str + + +def func1(mapping: Mapping[str, dict]): + return [DC1(temp := "x", **mapping[temp])] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr8.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr8.py new file mode 100644 index 00000000..1cf63222 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr8.py @@ -0,0 +1,11 @@ +# This sample checks that parsing for the assignment expression +# operator is using the correct precedence. It should be parsing +# the RHS as a "test expression" which allows for ternary +# expressions. + +result = None + +if items := 3 if result else None: + pass + +print(items) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr9.py b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr9.py new file mode 100644 index 00000000..867cbe49 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/assignmentExpr9.py @@ -0,0 +1,42 @@ +# This sample tests the case where an assignment expression target +# is found within a function decorator or a function default value expression. + +from typing import Any, Callable, TypeVar, overload + + +_T = TypeVar("_T") + + +def decorator(*args: Any, **kwargs: Any) -> Callable[[_T], _T]: ... + + +@decorator( + [ + walrus_target_1 + for combination in [[1]] + if None not in (walrus_target_1 := set(combination)) + ], +) +def decorated( + x: list[str] = [x for x in ["a", "b"] if x in (walrus_target_2 := ["a", "b"])], +): + pass + + +reveal_type(walrus_target_1, expected_text="set[int]") +reveal_type(walrus_target_2, expected_text="list[str]") + + +@overload +def func1(value: None = None) -> None: ... + + +@overload +def func1(value: str) -> str: ... + + +def func1(value: str | None = None) -> str | None: + return value + + +func1(value=(param_value := "test")) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/async1.py b/python-parser/packages/pyright-internal/src/tests/samples/async1.py new file mode 100644 index 00000000..47abf92d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/async1.py @@ -0,0 +1,49 @@ +# This sample validates that "async" is flagged as an error when +# used in inappropriate locations. + +from contextlib import AsyncExitStack + + +async def b(): + for i in range(5): + yield i + + +cm = AsyncExitStack() + + +def func1(): + # This should generate an error because + # "async" cannot be used in a non-async function. + async for x in b(): + print("") + + # This is allowed because it's in a generator. + y = (x async for x in b()) + + # This should generate an error because + # "async" cannot be used in a non-async function. + async with cm: + pass + + # This should generate an error because + # "async" cannot be used in a non-async function. + return [x async for x in b()] + + +# This should generate an error because +# "async" cannot be used in a non-async function. +async for x in b(): + print("") + +# This is allowed because it's in a generator. +y = (x async for x in b()) + +# This should generate an error because +# "async" cannot be used in a non-async function. +async with cm: + pass + +# This should generate an error because +# "async" cannot be used in a non-async function. +[x async for x in b()] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment1.py b/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment1.py new file mode 100644 index 00000000..a4f07c5c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment1.py @@ -0,0 +1,62 @@ +# This sample tests the type checker's handling of +# augmented assignments (combining a binary operator +# with an assignment). + + +a = 1 +b = 3.4 + +a += b +reveal_type(a, expected_text="float") + +a -= b +reveal_type(a, expected_text="float") + +a *= b +reveal_type(a, expected_text="float") + +a /= b +reveal_type(a, expected_text="float") + +a //= b +reveal_type(a, expected_text="float") + +a %= b +reveal_type(a, expected_text="float") + +a **= b +reveal_type(a, expected_text="Any") + +a = 1 + +# This should generate an error because +# matrix multiply isn't supported by int. +a @= b + +a |= b +a &= b +a ^= b +a <<= b +a >>= b + + +list1 = [1, 2, 3] +list1 += [4] + +# This should generate an error +list1 += 4 + +# This should generate an error +list2 = [1] +list2 *= 4 + +# This should generate an error +list2 *= [4] + + +# Test __iadd__ override in list class, which accepts +# any iterator as an argument. +bar = ("d",) +foo = ["a", "b"] +foo += ["c"] +foo += bar diff --git a/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment2.py b/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment2.py new file mode 100644 index 00000000..3d170fa0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment2.py @@ -0,0 +1,18 @@ +# This sample tests expected diagnostics for augmented assignment +# expressions. + + +def func1(values1: list[float] = [], values2: list[float] | None = None) -> None: + values3 = None + + # This should generate an error + values1 += values2 + + if values2 is not None: + values1 += values2 + + # This should generate an error + values1 -= values2 + + # This should generate an error + values1 += values3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment3.py b/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment3.py new file mode 100644 index 00000000..eb257871 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/augmentedAssignment3.py @@ -0,0 +1,21 @@ +# This sample tests the case where a class-scoped variable +# type needs to be inferred from an augmented assignment. + + +class ClassA: + y = 0 + z = 0 + z += 0.5 + + def __init__(self): + self.x = 0 + self.x += 0.5 + + @classmethod + def method1(cls): + cls.y += 0.5 + + +reveal_type(ClassA().x, expected_text="int | float") +reveal_type(ClassA.y, expected_text="int | float") +reveal_type(ClassA.z, expected_text="int | float") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/autoVariance1.py b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance1.py new file mode 100644 index 00000000..7e5af1cd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance1.py @@ -0,0 +1,164 @@ +# This sample tests variance inference for type variables that use +# autovariance. + +from dataclasses import dataclass +from typing import Final, Iterator, Sequence, overload + + +class ShouldBeCovariant1[T]: + def __getitem__(self, index: int) -> T: ... + + def __iter__(self) -> Iterator[T]: ... + + +vco1_1: ShouldBeCovariant1[float] = ShouldBeCovariant1[int]() + +# This should generate an error based on variance. +vco1_2: ShouldBeCovariant1[int] = ShouldBeCovariant1[float]() + + +class ShouldBeCovariant2[T](Sequence[T]): + def __len__(self) -> int: ... + @overload + def __getitem__(self, index: int) -> T: ... + @overload + def __getitem__(self, index: slice) -> Sequence[T]: ... + def __getitem__(self, index: int | slice) -> T | Sequence[T]: ... + + +vco2_1: ShouldBeCovariant2[float] = ShouldBeCovariant2[int]() +# This should generate an error based on variance. +vco2_2: ShouldBeCovariant2[int] = ShouldBeCovariant2[float]() + + +class ShouldBeCovariant3[T]: + def method1(self) -> "ShouldBeCovariant2[T]": ... + + +vco3_1: ShouldBeCovariant3[float] = ShouldBeCovariant3[int]() +# This should generate an error based on variance. +vco3_2: ShouldBeCovariant3[int] = ShouldBeCovariant3[float]() + + +@dataclass(frozen=True) +class ShouldBeCovariant4[T]: + x: T + + +vo4_1: ShouldBeCovariant4[float] = ShouldBeCovariant4[int](1) +# This should generate an error based on variance. +vo4_2: ShouldBeCovariant4[int] = ShouldBeCovariant4[float](1) + + +class ShouldBeCovariant5[T]: + def __init__(self, x: T) -> None: + self._x = x + + @property + def x(self) -> T: + return self._x + + +vo5_1: ShouldBeCovariant5[float] = ShouldBeCovariant5[int](1) +# This should generate an error based on variance. +vo5_2: ShouldBeCovariant5[int] = ShouldBeCovariant5[float](1) + + +class ShouldBeCovariant6[T]: + def f1[T2: int](self: "ShouldBeCovariant6[T2]") -> T2: ... + + @property + def f2[T2: int](self: "ShouldBeCovariant6[T2]") -> T2: ... + + +# This should generate an error based on variance. +vo6_1: ShouldBeCovariant6[int] = ShouldBeCovariant6[float]() + +vo6_2: ShouldBeCovariant6[float] = ShouldBeCovariant6[int]() + + +class ShouldBeInvariant1[T]: + def __init__(self, value: T) -> None: + self._value = value + + @property + def value(self): + return self._value + + @value.setter + def value(self, value: T): + self._value = value + + +# This should generate an error based on variance. +vinv1_1: ShouldBeInvariant1[float] = ShouldBeInvariant1[int](1) + +# This should generate an error based on variance. +vinv1_2: ShouldBeInvariant1[int] = ShouldBeInvariant1[float](1.1) + + +class ShouldBeInvariant2[T]: + def __init__(self, value: T) -> None: + self._value = value + + def get_value(self) -> T: + return self._value + + def set_value(self, value: T): + self._value = value + + +# This should generate an error based on variance. +vinv2_1: ShouldBeInvariant2[float] = ShouldBeInvariant2[int](1) + +# This should generate an error based on variance +vinv2_2: ShouldBeInvariant2[int] = ShouldBeInvariant2[float](1.1) + + +class ShouldBeInvariant3[K, V](dict[K, V]): + pass + + +# This should generate an error based on variance. +vinv3_1: ShouldBeInvariant3[float, str] = ShouldBeInvariant3[int, str]() + +# This should generate an error based on variance. +vinv3_2: ShouldBeInvariant3[int, str] = ShouldBeInvariant3[float, str]() + +# This should generate an error based on variance. +vinv3_3: ShouldBeInvariant3[str, float] = ShouldBeInvariant3[str, int]() + +# This should generate an error based on variance. +vinv3_4: ShouldBeInvariant3[str, int] = ShouldBeInvariant3[str, float]() + + +@dataclass +class ShouldBeInvariant4[T]: + x: T + + +# This should generate an error based on variance +vinv4_1: ShouldBeInvariant4[float] = ShouldBeInvariant4[int](1) + + +class ShouldBeInvariant5[T]: + def __init__(self, x: T) -> None: + self.x = x + + +# This should generate an error based on variance. +vinv5_1: ShouldBeInvariant5[float] = ShouldBeInvariant5[int](1) + + +class ShouldBeContravariant1[T]: + def __init__(self, value: T) -> None: + pass + + def set_value(self, value: T) -> None: + pass + + +# This should generate an error based on variance. +vcontra1_1: ShouldBeContravariant1[float] = ShouldBeContravariant1[int](1) + +vcontra1_2: ShouldBeContravariant1[int] = ShouldBeContravariant1[float](1.2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/autoVariance2.py b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance2.py new file mode 100644 index 00000000..de200cf0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance2.py @@ -0,0 +1,30 @@ +# This sample tests an auto-invariance case that involves recursive types. + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Concatenate, Protocol + + +@dataclass +class Node[T]: + left: Node[T] + right: Node[T] + value: T + + +class MyPartial[**P, R]: + def __init__(self, first: int, func: Callable[Concatenate[int, P], R]) -> None: + self.first = first + self.func = func + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + + +class CallbackKeyed[*Ts](Protocol): + def __call__(self, *args: *Ts, keyed: bool) -> tuple[*Ts]: ... + + +def invoke_keyed[*Ts](fn: CallbackKeyed[*Ts], *args: *Ts) -> tuple[*Ts]: + return fn(*args, keyed=True) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/autoVariance3.py b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance3.py new file mode 100644 index 00000000..6c1f994b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance3.py @@ -0,0 +1,163 @@ +# This sample tests variance inference for traditional type variables. + +from typing import Generic, Iterator, Sequence, overload +from typing_extensions import TypeVar # pyright: ignore[reportMissingModuleSource] +from dataclasses import dataclass + +T = TypeVar("T", infer_variance=True) +K = TypeVar("K", infer_variance=True) +V = TypeVar("V", infer_variance=True) + +# This should generate an error because covariant cannot be used +# with infer_variance. +S1 = TypeVar("S1", covariant=True, infer_variance=True) + +# This should generate an error because contravariant cannot be used +# with infer_variance. +S2 = TypeVar("S2", contravariant=True, infer_variance=True) + + +class ShouldBeCovariant1(Generic[T]): + def __getitem__(self, index: int) -> T: ... + + def __iter__(self) -> Iterator[T]: ... + + +vco1_1: ShouldBeCovariant1[float] = ShouldBeCovariant1[int]() + +# This should generate an error based on variance. +vco1_2: ShouldBeCovariant1[int] = ShouldBeCovariant1[float]() + + +class ShouldBeCovariant2(Sequence[T]): + def __len__(self) -> int: ... + @overload + def __getitem__(self, index: int) -> T: ... + @overload + def __getitem__(self, index: slice) -> Sequence[T]: ... + def __getitem__(self, index: int | slice) -> T | Sequence[T]: ... + + +vco2_1: ShouldBeCovariant2[float] = ShouldBeCovariant2[int]() +# This should generate an error based on variance. +vco2_2: ShouldBeCovariant2[int] = ShouldBeCovariant2[float]() + + +class ShouldBeCovariant3(Generic[T]): + def method1(self) -> "ShouldBeCovariant2[T]": ... + + +vco3_1: ShouldBeCovariant3[float] = ShouldBeCovariant3[int]() +# This should generate an error based on variance. +vco3_2: ShouldBeCovariant3[int] = ShouldBeCovariant3[float]() + + +@dataclass(frozen=True) +class ShouldBeCovariant4(Generic[T]): + x: T + + +vo4_1: ShouldBeCovariant4[float] = ShouldBeCovariant4[int](1) +# This should generate an error based on variance. +vo4_4: ShouldBeCovariant4[int] = ShouldBeCovariant4[float](1.0) + + +class ShouldBeCovariant5(Generic[T]): + def __init__(self, x: T) -> None: + self._x = x + + @property + def x(self) -> T: + return self._x + + +vo5_1: ShouldBeCovariant5[float] = ShouldBeCovariant5[int](1) +# This should generate an error based on variance. +vo5_2: ShouldBeCovariant5[int] = ShouldBeCovariant5[float](1.0) + + +class ShouldBeInvariant1(Generic[T]): + def __init__(self, value: T) -> None: + self._value = value + + @property + def value(self): + return self._value + + @value.setter + def value(self, value: T): + self._value = value + + +# This should generate an error based on variance. +vinv1_1: ShouldBeInvariant1[float] = ShouldBeInvariant1[int](1) + +# This should generate an error based on variance. +vinv1_2: ShouldBeInvariant1[int] = ShouldBeInvariant1[float](1.1) + + +class ShouldBeInvariant2(Generic[T]): + def __init__(self, value: T) -> None: + self._value = value + + def get_value(self) -> T: + return self._value + + def set_value(self, value: T): + self._value = value + + +# This should generate an error based on variance. +vinv2_1: ShouldBeInvariant2[float] = ShouldBeInvariant2[int](1) + +# This should generate an error based on variance. +vinv2_2: ShouldBeInvariant2[int] = ShouldBeInvariant2[float](1.1) + + +class ShouldBeInvariant3(dict[K, V]): + pass + + +# This should generate an error based on variance. +vinv3_1: ShouldBeInvariant3[float, str] = ShouldBeInvariant3[int, str]() + +# This should generate an error based on variance. +vinv3_2: ShouldBeInvariant3[int, str] = ShouldBeInvariant3[float, str]() + +# This should generate an error based on variance. +vinv3_3: ShouldBeInvariant3[str, float] = ShouldBeInvariant3[str, int]() + +# This should generate an error based on variance. +vinv3_4: ShouldBeInvariant3[str, int] = ShouldBeInvariant3[str, float]() + + +@dataclass +class ShouldBeInvariant4[T]: + x: T + + +# This should generate an error based on variance. +vinv4_1: ShouldBeInvariant4[float] = ShouldBeInvariant4[int](1) + + +class ShouldBeInvariant5[T]: + def __init__(self, x: T) -> None: + self.x = x + + +# This should generate an error based on variance. +vinv5_1: ShouldBeInvariant5[float] = ShouldBeInvariant5[int](1) + + +class ShouldBeContravariant1(Generic[T]): + def __init__(self, value: T) -> None: + pass + + def set_value(self, value: T): + pass + + +# This should generate an error based on variance. +vcontra1_1: ShouldBeContravariant1[float] = ShouldBeContravariant1[int](1) + +vcontra1_2: ShouldBeContravariant1[int] = ShouldBeContravariant1[float](1.2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/autoVariance4.py b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance4.py new file mode 100644 index 00000000..795d6d5c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance4.py @@ -0,0 +1,51 @@ +# This sample tests the case where a class uses auto-variance but derives +# from a class that does not. + +from typing import Generic, TypeVar + +T = TypeVar("T") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class Parent_Invariant(Generic[T]): + pass + + +class ShouldBeInvariant[T](Parent_Invariant[T]): + pass + + +# This should generate an error. +a1: ShouldBeInvariant[int] = ShouldBeInvariant[float]() + +# This should generate an error. +a2: ShouldBeInvariant[float] = ShouldBeInvariant[int]() + + +class Parent_Covariant(Generic[T_co]): + pass + + +class ShouldBeCovariant[T](Parent_Covariant[T]): + pass + + +# This should generate an error. +b1: ShouldBeCovariant[int] = ShouldBeCovariant[float]() + +b2: ShouldBeCovariant[float] = ShouldBeCovariant[int]() + + +class Parent_Contravariant(Generic[T_contra]): + pass + + +class ShouldBeContravariant[T](Parent_Contravariant[T]): + pass + + +c1: ShouldBeContravariant[int] = ShouldBeContravariant[float]() + +# This should generate an error. +c2: ShouldBeContravariant[float] = ShouldBeContravariant[int]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/autoVariance5.py b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance5.py new file mode 100644 index 00000000..61e86dcf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/autoVariance5.py @@ -0,0 +1,20 @@ +# This sample tests a case that involves an interaction between a class +# that uses auto-variance and a decorator that uses a generic type alias. + +from typing import Any, Concatenate, Generic, ParamSpec, TypeAlias, Callable +from typing_extensions import TypeVar # pyright: ignore[reportMissingModuleSource] + +T = TypeVar("T", infer_variance=True) +P = ParamSpec("P") +R = TypeVar("R") +S = TypeVar("S", bound="A[Any]") + +TA1: TypeAlias = Callable[Concatenate[S, P], R] + + +def deco(func: TA1[S, P, R], /) -> TA1[S, P, R]: ... + + +class A(Generic[T]): + @deco + def select_all(self, *args: object) -> list[Any]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/await1.py b/python-parser/packages/pyright-internal/src/tests/samples/await1.py new file mode 100644 index 00000000..86ed3d5c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/await1.py @@ -0,0 +1,57 @@ +# This sample validates that the await keyword participates in +# bidirectional type inference. + +from typing import ( + Any, + AsyncIterator, + Callable, + Iterable, + Literal, + TypeVar, + Generic, + overload, +) + +T = TypeVar("T") +AnyMsg = TypeVar("AnyMsg", bound="Msg") + + +class Msg(Generic[T]): + body: T + + +class Request: + id: int + + +async def func1(check: "Callable[[AnyMsg], bool]") -> AnyMsg: ... + + +async def func2(): + _: Msg[Request] = await func1(check=lambda msg: (msg.body.id == 12345)) + + +async def func3() -> AsyncIterator[int]: + yield 1 + + +async def func4() -> int: + return await anext(func3()) + + +async def func5(__fn: Callable[..., T]) -> T: ... + + +@overload +def sum(__iterable: Iterable[Literal[0]]) -> int: ... + + +@overload +def sum(__iterable: Iterable[T]) -> T: ... + + +def sum(__iterable: Iterable[Any]) -> Any: ... + + +async def func6(f: Callable[[], list[int]]): + sum(await func5(f)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/await2.py b/python-parser/packages/pyright-internal/src/tests/samples/await2.py new file mode 100644 index 00000000..1c08d3d0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/await2.py @@ -0,0 +1,31 @@ +# This sample tests that types defined by type variables can be +# awaited. + +from typing import Generator, Any, NoReturn + + +class MyAwaitable: + def __await__(self) -> Generator[Any, None, int]: + async def foo() -> int: + return 1 + + return foo().__await__() + + async def foo(self) -> int: + return await self + + +async def func1() -> None: + p = MyAwaitable() + print(await p.foo()) + print(await p) + + +async def func2() -> NoReturn: + raise Exception() + + +async def func3(x: int | None): + if x is None: + await func2() + print(x.bit_count()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/await3.py b/python-parser/packages/pyright-internal/src/tests/samples/await3.py new file mode 100644 index 00000000..1caee13d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/await3.py @@ -0,0 +1,37 @@ +# This sample tests various places where await is invalid. + +from typing import Any + + +def func1() -> Any: ... + + +def func2(): + # These are OK because generators can be called + # outside of the context of the current function. + (v async for v in func1()) + (await v for v in func1()) + + # This should generate an error because async + # cannot be used outside of an async function. + [x async for x in func1()] + + # This should generate an error because async + # cannot be used outside of an async function. + {x async for x in func1()} + + # This should generate an error because async + # cannot be used outside of an async function. + {k: v async for k, v in func1()} + + # This should generate an error because await + # cannot be used outside of an async function. + (x for x in await func1()) + + # This should generate an error because await + # cannot be used outside of an async function. + [await x for x in func1()] + + # This should generate an error because await + # cannot be used outside of an async function. + {await k: v for k, v in func1()} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/badToken1.py b/python-parser/packages/pyright-internal/src/tests/samples/badToken1.py new file mode 100644 index 00000000..3041f0f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/badToken1.py @@ -0,0 +1,6 @@ +# This sample uses a non-breaking space, which should generate errors in +# the tokenizer, parser and type checker. + +# The space between "import" and "sys" is a non-breaking UTF8 character. +import sys + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/builtins1.py b/python-parser/packages/pyright-internal/src/tests/samples/builtins1.py new file mode 100644 index 00000000..5c8be3fa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/builtins1.py @@ -0,0 +1,2 @@ +# This file is intentionally blank. It's used to test +# the implicitly-imported builtins module and its symbols. diff --git a/python-parser/packages/pyright-internal/src/tests/samples/builtins2.py b/python-parser/packages/pyright-internal/src/tests/samples/builtins2.py new file mode 100644 index 00000000..b1d5d71d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/builtins2.py @@ -0,0 +1,7 @@ +# This sample tests that builtins can be overridden at the module level +# without generating a "possibly unbound" error. + +if input(): + print = lambda *x: None + +print("") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call1.py b/python-parser/packages/pyright-internal/src/tests/samples/call1.py new file mode 100644 index 00000000..b5b6ff5a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call1.py @@ -0,0 +1,93 @@ +# This sample tests various function type checking +# behavior including arg/param matching. + +from typing import Callable + +# ------------------------------------------------------ +# Test function type matching + + +class FooBase: + pass + + +class Foo(FooBase): + pass + + +class Bar(Foo): + pass + + +def needs_function1(callback: Callable[[Foo], Foo]): + pass + + +def callback1(): + pass + + +def callback2(a: Foo) -> Foo: + return Foo() + + +def callback3(a: Foo) -> str: + return "1" + + +def callback4(a: Foo, b: Foo) -> Foo: + return Foo() + + +def callback5(a: Foo, b: int = 3) -> Foo: + return Foo() + + +def callback6(*a) -> Foo: + return Foo() + + +def callback7(a: str) -> Foo: + return Foo() + + +def callback8(a: Bar) -> Foo: + return Foo() + + +def callback9(a: FooBase) -> Foo: + return Foo() + + +# This should generate an error because callback1 +# takes no parameters. +needs_function1(callback1) + +needs_function1(callback2) + +# This should generate an error because the return +# type of callback3 doesn't match. +needs_function1(callback3) + +# This should generate an error because callback4 +# takes too many parameters. +needs_function1(callback4) + +needs_function1(callback5) +needs_function1(callback6) + +# This should fail because the parameter is the +# wrong type. +needs_function1(callback7) + +# This should fail because the parameter is the +# wrong type. +needs_function1(callback8) + +needs_function1(callback9) + + +import typing + +# This should generate an error because modules are not callable. +typing() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call10.py b/python-parser/packages/pyright-internal/src/tests/samples/call10.py new file mode 100644 index 00000000..41605545 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call10.py @@ -0,0 +1,75 @@ +# This sample tests that the type checker properly handles +# types of args and kwargs correctly. + +from typing import Any, Hashable, Mapping, Protocol + + +def requires_hashable_tuple(p1: tuple[Hashable, ...]): ... + + +def requires_hashable_dict(p1: dict[str, Hashable]): ... + + +def test_args(*args: Hashable): + if args: + aaa = list(args) + bbb = tuple(aaa) + args = bbb + requires_hashable_tuple(args) + + +def test_kwargs(**kwargs: Hashable): + requires_hashable_dict(kwargs) + + +class StrSubclass(str): ... + + +def test_kwargs2( + a: Mapping[str, Any], + b: Mapping[Any, Hashable], + c: dict[StrSubclass, Hashable], + d: int, + e: Mapping[int, Hashable], + f: tuple[str, ...], +): + test_kwargs(**a) + test_kwargs(**b) + test_kwargs(**c) + + # This should generate an error + test_kwargs(**d) + + # This should generate an error + test_kwargs(**e) + + # This should generate an error + test_kwargs(**f) + + +class Callback1(Protocol): + def __call__(self) -> None: ... + + +def func1( + value: str = ..., + *args: object, +) -> None: ... + + +def func2( + value: str = ..., + **kwargs: object, +) -> None: ... + + +def func3( + value: str = ..., + *args: object, + **kwargs: object, +) -> None: ... + + +v1: Callback1 = func1 +v2: Callback1 = func2 +v3: Callback1 = func3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call11.py b/python-parser/packages/pyright-internal/src/tests/samples/call11.py new file mode 100644 index 00000000..5f73ff45 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call11.py @@ -0,0 +1,44 @@ +# This sample tests the case where a call expression involves a union +# on the LHS where the subtypes of the union have different signatures. + +# pyright: strict + +from __future__ import annotations +from typing import Any, Callable, Generic, Self, TypeAlias, TypeVar + +T = TypeVar("T") +E = TypeVar("E") +U = TypeVar("U") +F = TypeVar("F") + +Either: TypeAlias = "Left[T]" | "Right[E]" + + +class Left(Generic[T]): + def __init__(self, value: T) -> None: + self.value = value + + def map_left(self, fn: Callable[[T], U]) -> Left[U]: + return Left(fn(self.value)) + + def map_right(self, fn: Callable[[Any], Any]) -> Self: + return self + + +class Right(Generic[E]): + def __init__(self, value: E) -> None: + self.value = value + + def map_left(self, fn: Callable[[Any], Any]) -> Self: + return self + + def map_right(self, fn: Callable[[E], F]) -> Right[F]: + return Right(fn(self.value)) + + +def func() -> Either[int, str]: + raise NotImplementedError + + +result = func().map_left(lambda lv: lv + 1).map_right(lambda rv: rv + "a") +reveal_type(result, expected_text="Right[str] | Left[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call12.py b/python-parser/packages/pyright-internal/src/tests/samples/call12.py new file mode 100644 index 00000000..b499fef8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call12.py @@ -0,0 +1,47 @@ +# This sample tests that positional arg expressions are evaluated prior +# to keyword arg expressions even if they don't appear in that order +# within the arg list. + + +from typing import TypedDict + + +def func1(a: int | None = None, b: int | None = None, c: int | None = None) -> None: + pass + + +func1((v1 := 1), b=v1 + 1) + +# This should generate an error. +func1(b=(v2 := 1), *[v2 + 1]) + +func1(b=v3 + 1, *[(v3 := 1)]) + + +class A(TypedDict): + a: int + + +class B(TypedDict): + b: int + + +class C(TypedDict): + c: int + + +func1(a=(v4 := 1), **B(b=(v4 + 1))) + +# This should generate an error. +func1(**A(a=(v5 + 1)), b=(v5 := 1)) + +func1(**A(a=(v5 := 1)), b=(v5 + 1)) + +func1(b=(v6 + 1), *[(v6 := 1)], **C(c=(v6 + 2))) + + +def func2(a: int, b: int): + pass + + +func2(b=1, *(2,)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call13.py b/python-parser/packages/pyright-internal/src/tests/samples/call13.py new file mode 100644 index 00000000..9f75878e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call13.py @@ -0,0 +1,15 @@ +# This sample tests the case where a call invokes a generic function that +# uses a default argument assigned to a parameter whose type is generic. + +from typing import TypeVar, Iterable, Callable + +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +def func1(values: Iterable[T1], func: Callable[[T1], T2] = lambda x: x) -> list[T2]: + return [func(value) for value in values] + + +v1 = func1([1, 2, 3]) +reveal_type(v1, expected_text="list[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call14.py b/python-parser/packages/pyright-internal/src/tests/samples/call14.py new file mode 100644 index 00000000..34443a7e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call14.py @@ -0,0 +1,57 @@ +# This sample tests the pre-3.8 position-only parameter convention. + + +from typing import Any + + +def func1(__a: int, __b: int, c: int) -> None: ... + + +func1(1, 2, c=3) + +# This should generate an error because __b is position-only. +func1(1, __b=2, c=3) + + +# This should generate an error because a position-only parameter cannot +# follow a non-position-only parameter. +def func2(a: int, __b: int) -> None: ... + + +def func3(a: int, *args: Any, __b: int) -> None: ... + + +func3(a=1, __b=2) + + +def func4(a: int, /, __b: int) -> None: ... + + +func4(1, __b=2) + + +class A: + def m1(self, __a: int, b: int) -> None: + pass + + # This should generate an error. + def m2(self, a: int, __b: int) -> None: + pass + + @classmethod + def c1(cls, __a, int, b: int) -> None: + pass + + @classmethod + # This should generate an error. + def c2(cls, a, int, __b: int) -> None: + pass + + @staticmethod + def s1(__a: int, b: int) -> None: + pass + + @staticmethod + # This should generate an error. + def s2(a: int, __b: int) -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call15.py b/python-parser/packages/pyright-internal/src/tests/samples/call15.py new file mode 100644 index 00000000..150bd40d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call15.py @@ -0,0 +1,66 @@ +# This sample tests the case where a generic function has a default argument +# value for a parameter with a generic type. + +from collections.abc import Callable +from typing import Iterable, TypeVar + +T = TypeVar("T") + +default_value: dict[str, int] = {} + + +def func1(x: T, y: dict[str, T] = default_value, /) -> T: ... + + +def func2(x: T, y: dict[str, T] = default_value) -> T: ... + + +def func3(x: T, *, y: dict[str, T] = default_value) -> T: ... + + +def test1(func: Callable[[T], T], value: T) -> T: + return func(value) + + +# This should generate an error. +test1(func1, "") + +# This should generate an error. +test1(func2, "") + +# This should generate an error. +test1(func3, "") + +reveal_type(test1(func1, 1), expected_text="int") +reveal_type(test1(func2, 1), expected_text="int") +reveal_type(test1(func3, 1), expected_text="int") + + +def func4(x: T, y: Iterable[T] = default_value, z: T = "", /) -> T: ... + + +def func5(x: T, y: Iterable[T] = default_value, z: T = "") -> T: ... + + +def func6(x: T, *, y: Iterable[T] = default_value, z: T = "") -> T: ... + + +reveal_type(test1(func4, 1), expected_text="str | int") +reveal_type(test1(func5, 1), expected_text="str | int") +reveal_type(test1(func6, 1), expected_text="str | int") + + +class A[T]: + def __init__(self, value: T) -> None: + self._value: T = value + + def update(self, value: T = 0, /) -> "A[T]": + return A(value) + + +a = A("") + +a.update("") + +# This should generate an error. +a.update() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call16.py b/python-parser/packages/pyright-internal/src/tests/samples/call16.py new file mode 100644 index 00000000..c937e7f0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call16.py @@ -0,0 +1,38 @@ +# This sample tests bidirectional type inference for calls where the expected +# type is a union. + +from typing import Any, AnyStr, Iterable, Literal, TypeVar, overload + +T = TypeVar("T") + + +class ItemBase: ... + + +class Item(ItemBase): ... + + +def gen_items() -> Iterable[Item]: + return [] + + +def make_list(val: Iterable[T]) -> list[T]: + return list(val) + + +x: ItemBase | list[ItemBase | None] = make_list(gen_items()) + + +@overload +def urlunsplit(components: Iterable[None]) -> Literal[b""]: ... + + +@overload +def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ... + + +def urlunsplit(components: Iterable[Any]) -> Any: ... + + +def func(url: str, candidates: list[Any]) -> str | None: + return urlunsplit(candidates[0]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call17.py b/python-parser/packages/pyright-internal/src/tests/samples/call17.py new file mode 100644 index 00000000..21eac652 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call17.py @@ -0,0 +1,32 @@ +# This sample tests a case of bidirectional type inference for calls +# that involves a union in the expected type. + +# pyright: strict + +from __future__ import annotations + +from collections.abc import Callable +from typing import Generic, TypeVar + +T_co = TypeVar("T_co", covariant=True) +E_co = TypeVar("E_co", covariant=True) +F = TypeVar("F") + + +class Ok(Generic[T_co]): + def or_else(self, op: object) -> Ok[T_co]: ... + + +class Err(Generic[E_co]): + def or_else(self, op: Callable[[E_co], Result[T_co, F]]) -> Result[T_co, F]: ... + + +Result = Ok[T_co] | Err[E_co] + + +def inner(func: Callable[[E_co], Err[F]], r: Result[T_co, E_co]) -> Result[T_co, F]: + match r: + case Ok(): + return r.or_else(func) + case Err(): + return r.or_else(func) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call2.py b/python-parser/packages/pyright-internal/src/tests/samples/call2.py new file mode 100644 index 00000000..c90d70ab --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call2.py @@ -0,0 +1,183 @@ +# This sample tests function parameter matching logic. + + +from typing import Any, Callable, Literal + + +def func1(a: int, *b: int): + pass + + +func1(3) +func1(3, 4) +func1(3, *[1, 2, 3]) + +# This should generate an error +func1(3, "hello") + +# This should generate an error +func1(3, 5, 2, "str") + +# This should generate an error +func1("hello", 3) + +str_list = ["he", "2", "3"] + +# This should generate an error +func1(3, *str_list) + + +def func2(a: str, **b: int): + pass + + +func2("hi") +func2("hi", b=3, c=4, d=5) + +str_dict = {"a": "3", "b": "2"} + +# This should generate a type error +func2("hi", **str_dict) + + +# This should generate a type error +func2("hi", 3) + +# This should generate a type error +func2("hi", b="hi") + + +def func4(*args: int): + pass + + +def func5(a: int, *args): + pass + + +tuple1 = (2, 3) +func4(*tuple1) +func5(*tuple1) + +# This should generate an error because a is assigned twice. +func2(a="", a="") + +# This should generate an error because c is assigned twice. +func2("", c=4, d=5, c=5) + + +def func6(param1: int, param2: str): + pass + + +def func7(*args: Any, param0: int, param1: int, param2: str): + func6(*args, param1=param1, param2=param2) + + func6(param0, param2=param2) + + # This should generate two errors because param0 has no match + # and param2 is missing. + func6(param0, param1=param1) + + +def func8( + y: str, + z: bool = ..., +) -> None: ... + + +kwargs1: dict[str, int] = {} +# This should generate an error because int is not compatible with str. +func8(z=False, **kwargs1) + + +class MyStr(str): ... + + +kwargs2: dict[MyStr, MyStr] = {} +func8(z=False, **kwargs2) + + +def func9( + x: int, + y: str, + *, + a: str = ..., + b: str, + c: str, +) -> None: ... + + +kwargs3: dict[str, str] = {} +func9(0, "", **kwargs3) + +args4: list[str] = ["hi"] +func9(0, *args4, **kwargs3) + +# This should generate an error. +func9(*args4, **kwargs3) + + +def func10(x: int): ... + + +func10(1, *()) + +# This should generate an error. +func10(1, *(1,)) + +func10(*(1,)) + +# This should generate an error. +func10(*(1, 1)) + +# This should generate an error. +func10(*("",)) + + +def func11(y: tuple[int, ...]): + func10(1, *y) + + +def func12(x: int, /, y: str): + pass + + +# This should generate an error. +func12(1, **{"z": None}) + + +def func13(*, a: str, b: str, c: int | None = None): + ... + + +func_args1: dict[Literal["a", "b", "d"], str] = { + "a": "a", + "b": "b", + "d": "d", +} + +func13(**func_args1) + +func_args2: dict[Literal["a", "b", "c"], str] = { + "a": "a", + "b": "b", + "c": "c", +} + + +# This should generate an error. +func13(**func_args2) + + +def func14(cb1: Callable[..., Any], cb2: Any, x: None): + cb1(**x) # This should generate an error + cb2(**x) # This should generate an error + + +def func15(cb1: Callable[..., Any], cb2: Any, a: int, b: None | str): + print(*a) # This should generate an error + print(*b) # This should generate an error + cb1(*a) # This should generate an error + cb2(*b) # This should generate an error + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call3.py b/python-parser/packages/pyright-internal/src/tests/samples/call3.py new file mode 100644 index 00000000..8616aa28 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call3.py @@ -0,0 +1,180 @@ +# This sample tests the Python 3.8 "positional-only parameter" feature. + +from typing import Any, Dict, Protocol, Tuple + + +def f0(a: int, b: int): + return 3 + + +def f1(a: int, b: int, /): + return 3 + +# This should generate an error because only one +# '/' parameter is allowed. +def f2(a: int, /, b: int, /): + return 3 + +def f3(a: int, /, b: int): + return 3 + +def f4(a: int, /, b: int, *, c: int): + return 3 + +# This should generate an error because a '/' +# parameter shouldn't appear after '*'. +def f5(a: int, *, b: int, /, c: int): + return 3 + +# This should generate an error because a '/' +# parameter cannot be the first in a param list. +def f6(/, a: int, *, b: int): + return 3 + + +f0(2, 3) + +f1(2, 3) + +# This should generate an error because b +# is a position-only parameter. +f1(2, b=3) + +# This should generate an error because a and b +# are position-only parameters. +f1(a=2, b=3) + +f2(2, 3) + +# This should generate an error. +f2(a=2, b=3) + +f3(2, 3) +f3(2, b=3) + +# This should generate 1 error because a is a +# position-only parameter. +f3(a=2, b=3) + +f4(1, 2, c=3) +f4(1, b=2, c=3) + +# This should generate an error because c is a +# keyword-only parameter. +f4(1, 2, 3) + +# This should generate an error because a is a +# positional-only parameter. +f4(a=1, b=2, c=3) + +# This will an error because of the bad +# declaration. Test to make sure we don't crash. +f5(1, b=2, c=3) + +f6(1, b=2) +f6(a=1, b=2) + +class A: + def f(self, g: bool = False, /, **kwargs) -> None: + ... + +a = A() + +a.f(hello="world") + + +def f7(name: str, /, **kwargs: Any): + return 3 + +f7("hi", name=3) + +# This should generate an error +f7("hi", name=3, name=4) + + +class P1(Protocol): + def f(self, x: Any, /): + ... + + +class C1: + def f( + self, + y: Any, + ): + ... + + +c1: P1 = C1() + + +class P2(Protocol): + def f(self, x: Any): + ... + + +class C2: + def f(self, y: Any, /): + ... + + +# This should generate an error +c2: P2 = C2() + + +def f8(a: int, b: int = 3, /): + ... + + +kwargs: Dict[str, Any] = {} + +# This should generate an error +f8() + +# This should generate an error +f8(**kwargs) + + +f8(0, **kwargs) + +def f9(*, c: int): + pass + +# This should generate an error because it is missing a keyword +# argument for keyword parameter "c". +f9(*[1, 2, 3]) + + +# This should generate an error because "/" cannot be used after "*args" +def f10(x, *args, /, y): + pass + +# This should generate an error because "*" cannot be used after "*args" +def f11(x, *args, *, y): + pass + +def f15(x, /, *args): + pass + +# This should generate an error because x +# is a position-only parameter. +f15(x=1) + +def f16(x, /, *args, **kw): + pass + +# This should generate an error because x +# is a position-only parameter. +f16(x=1) + +def f12(a: int, b: str, /): + ... + + +def f13(v: Tuple[int, str]): + f12(*v) + +def f14(v: Tuple[int]): + # This should generate an error because parameter "b" has + # no corresponding argument. + f12(*v) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call4.py b/python-parser/packages/pyright-internal/src/tests/samples/call4.py new file mode 100644 index 00000000..2b81adcb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call4.py @@ -0,0 +1,11 @@ +# This sample tests that the TypeVar matching logic for +# functions is working correctly. + +a: list[str] = ["a", "bc"] + +# This should work because the "sorted" is defined +# with the first parameter of Iterable[_T] and the +# 'key' parameter Callable[[_T], Any]. Since "len" +# is a function that takes a "Sized" and "str" is +# a "Sized", the result of this should be List[str]. +b: list[str] = sorted(a, key=len) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call5.py b/python-parser/packages/pyright-internal/src/tests/samples/call5.py new file mode 100644 index 00000000..eba6913f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call5.py @@ -0,0 +1,94 @@ +# This sample tests handling of unpack operators used +# for arguments that are of a specified length (specifically, +# tuples with a specified list of elements types). + +from typing import NamedTuple, List, Tuple + +X = NamedTuple("X", [("a", int), ("b", str), ("c", str)]) + +q0: List[Tuple[int, str, str]] = [(1, "", ""), (2, "", "")] + +[X(*item) for item in q0] + + +q1: List[Tuple[int, str, float]] = [(1, "a", 3), (2, "b", 4), (3, "c", 5)] + +# This should generate an error because the items in q1 are not the +# right type for the X constructor. +[X(*item) for item in q1] + + +q2: List[Tuple[int, str]] = [(1, "1"), (2, "2"), (3, "3")] + +# This should generate an error because the items in q2 contain only +# two elements, and we need three to populate all three parameters +# in the X constructor. +[X(*item) for item in q2] + + +q3: List[Tuple[int, str, str, float]] = [ + (1, "a", "3", 4), + (2, "b", "4", 5), + (3, "c", "5", 6), +] + +# This should generate an error because the items in q3 contain +# four elements, and we need three to populate all parameters +# in the X constructor. +[X(*item) for item in q3] + + +q4: List[Tuple[int, ...]] = [ + (1, 3), + (2, 5), + (3, 6), +] + +# This should generate two errors because it isn't assignable to parameter +# b or c. +[X(*item) for item in q4] + + +Y = NamedTuple("Y", [("a", str), ("b", str), ("c", str)]) + +q5: List[Tuple[str, ...]] = [ + ("a", "b"), + ("a", "b"), +] + +[Y(*item) for item in q5] + + +class Z(NamedTuple): + a: list[str] + b: list[int] + + +q6 = Z(["1"], [3]) + +for a, b in zip(*q6): + reveal_type(a, expected_text="str") + reveal_type(b, expected_text="int") + + +def func1(a: list[str], c: list[int]): ... + + +func1(*q6) + + +class ABC(NamedTuple): + a: float + b: float + c: float + + def to_rgba(self) -> "ABC": + return ABC(*self) + + +class AB(NamedTuple): + a: float + b: float + + def to_abc(self) -> ABC: + return ABC(*self, 1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call6.py b/python-parser/packages/pyright-internal/src/tests/samples/call6.py new file mode 100644 index 00000000..958e9ade --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call6.py @@ -0,0 +1,50 @@ +# This sample tests the handling of unpack operators +# used in argument expressions when used in conjunction with +# tuples and *args parameters. + + +def func1(a: int, b: int): + pass + + +def func2(*args: int): + pass + + +fixed_tuple_0 = () +func1(*fixed_tuple_0, 2, 3) +func2(*fixed_tuple_0, 2) + +fixed_tuple_1 = (1,) + +# This should generate an error because there +# are too many parameters. +func1(*fixed_tuple_1, 2, 3) + +func2(*fixed_tuple_1, 2, *fixed_tuple_0) + +fixed_tuple_3 = (1, 3, 5) + +# This should generate an error because there +# are too many parameters. +func1(*fixed_tuple_3, 2) + +func2(*fixed_tuple_3, 2, *fixed_tuple_0) + +unbounded_tuple: tuple[int, ...] = (1, 5, 3) + +func2(*unbounded_tuple) +func2(*unbounded_tuple, 2) + + +def func3(*args: str): ... + + +def func4(v1: list[str] | None, v2: None, v3: list[str]): + # This should generate an error. + func3(*v1) + + # This should generate an error. + func3(*v2) + + func3(*v3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call7.py b/python-parser/packages/pyright-internal/src/tests/samples/call7.py new file mode 100644 index 00000000..4f99d29e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call7.py @@ -0,0 +1,85 @@ +# This sample tests the handling of an unpacked TypedDict passed as +# an argument to a function. + +from typing import TypedDict, Unpack + + +class TD1(TypedDict): + arg1: int + arg2: str + + +class TD2(TD1): + arg3: float + + +def func1(arg1: int, arg2: str): + pass + + +def func2(arg1: int, arg2: str, arg3: float): + pass + + +def func3(arg1: int, arg2: str, **kwargs: float): + pass + + +def func4(arg1: int, arg2: str, **kwargs: int): + pass + + +def func5(arg1: int, arg2: str, **kwargs: object): + pass + + +td1: TD1 = {"arg1": 10, "arg2": "something"} +td2: TD2 = {"arg1": 10, "arg2": "something", "arg3": 3.4} + +func1(**td1) + +# This should generate an error because "arg1" is already assigned +func1(arg1=3, **td1) + +# This should generate an error because "arg3" isn't provided +func1(**td2) + +# This should generate an error because "arg3" isn't matched +func2(**td1) + +func2(**td2) + + +# This should generate an error because the extra entries +# in the TD are of type object. +func3(**td1) + +# This should generate an error because the extra entries +# in the TD are of type object. +func3(**td2) + +# This should generate an error because the extra entries +# in the TD are of type object. +func4(**td1) + +func5(**td1) +func5(**td2) + +# This should generate two errors because "arg3" cannot be matched +# due to the type of the **kwargs parameter. Also, the extra entries +# in the TD are of type object. +func4(**td2) + + +class Options(TypedDict, total=False): + opt1: bool + opt2: str + + +def func6(code: str | None = None, **options: Unpack[Options]): + pass + + +func6(**{}) +func6(**{"opt1": True}) +func6(**{"opt2": "hi"}) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call8.py b/python-parser/packages/pyright-internal/src/tests/samples/call8.py new file mode 100644 index 00000000..39213b85 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call8.py @@ -0,0 +1,40 @@ +# This sample tests a case where multiple overloaded calls are nested +# within each other. + +from typing import Any, Iterable, TypeVar, Protocol, overload +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + LiteralString, +) + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + + +class SupportsLenAndGetItem(Protocol[_T_co]): + def __getitem__(self, __k: int) -> _T_co: ... + + +def choices(population: SupportsLenAndGetItem[_T]) -> list[_T]: ... + + +@overload +def join(__iterable: Iterable[LiteralString]) -> LiteralString: # type:ignore + ... + + +@overload +def join(__iterable: Iterable[str]) -> str: ... + + +@overload +def array(object: int) -> list[Any]: ... + + +@overload +def array(object: object) -> list[Any]: ... + + +def array(object: object) -> list[Any]: ... + + +array([join(choices("")) for i in range(1)]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/call9.py b/python-parser/packages/pyright-internal/src/tests/samples/call9.py new file mode 100644 index 00000000..662449e2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/call9.py @@ -0,0 +1,51 @@ +# This sample tests the case where a dictionary expansion operator +# is used in a call. The type checker should verify that the +# type supports a SupportsKeyAndGetItem protocol. + +from typing import Any, Generic, Iterator, TypeVar, Mapping, KeysView + + +class MyMapping(Mapping[str, Any]): + def __getitem__(self, __key: str) -> Any: ... + + def __iter__(self) -> Iterator[str]: ... + + def __len__(self) -> int: ... + + +class StrRecord: + def __getitem__(self, __key: str) -> str: ... + + def keys(self) -> KeysView[str]: ... + + +T = TypeVar("T") + + +class GenericRecord(Generic[T]): + def __getitem__(self, __key: str) -> T: ... + + def keys(self) -> KeysView[T]: ... + + +def func1(**kwargs: Any) -> None: ... + + +m = MyMapping() +r = StrRecord() + + +def func2( + m: MyMapping, + r: StrRecord, + g: GenericRecord[str], + mrg: MyMapping | StrRecord | GenericRecord[str], + bad: GenericRecord[bytes], +): + func1(**m) + func1(**r) + func1(**g) + func1(**mrg) + + # This should generate an error. + func1(**bad) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callSite1.py b/python-parser/packages/pyright-internal/src/tests/samples/callSite1.py new file mode 100644 index 00000000..9285cdc8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callSite1.py @@ -0,0 +1,35 @@ +# This sample tests pyright's ability to perform return type +# analysis of functions based on call-site arguments. + + +# This function has no type annotations +from typing import TypeVar + +T = TypeVar("T") + + +def add(a, b): + return a + b + + +async def async_call(x): + return x + + +def deco1(f: T) -> T: + return f + + +@deco1 +def add2(a, b): + return a + b + + +def identity(a): + return a + + +def func1(x: T) -> T: + a = identity(x) + reveal_type(a, expected_text="T@func1") + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callSite2.py b/python-parser/packages/pyright-internal/src/tests/samples/callSite2.py new file mode 100644 index 00000000..a730492f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callSite2.py @@ -0,0 +1,16 @@ +# This sample tests pyright's ability to perform return type +# analysis of functions based on call-site arguments. + +from .callSite1 import add, add2, async_call + +v1 = add(1, 2) +reveal_type(v1, expected_text="Literal[3]") + +v2 = add("hi", "there") +reveal_type(v2, expected_text="Literal['hithere']") + +v3 = add2(1, 2) +reveal_type(v3, expected_text="Unknown") + +v4 = async_call(1) +reveal_type(v4, expected_text="CoroutineType[Any, Any, Unknown]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callSite3.py b/python-parser/packages/pyright-internal/src/tests/samples/callSite3.py new file mode 100644 index 00000000..65a89a82 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callSite3.py @@ -0,0 +1,50 @@ +# This sample tests the case where a call-site return type evaluation +# is invoked multiple times within a loop using different literal values +# each time. + + +def func1(h, ids): + for _ in ids: + h = func2(h, 1) + h = func2(h, 2) + h = func2(h, 3) + h = func2(h, 4) + h = func2(h, 5) + h = func2(h, 6) + h = func2(h, 7) + h = func2(h, 8) + h = func2(h, 9) + h = func2(h, 10) + h = func2(h, 11) + h = func2(h, 12) + h = func2(h, 13) + h = func2(h, 14) + h = func2(h, 15) + h = func2(h, 16) + h = func2(h, 17) + h = func2(h, 18) + h = func2(h, 19) + h = func2(h, 20) + h = func2(h, 21) + h = func2(h, 22) + h = func2(h, 23) + h = func2(h, 24) + h = func2(h, 25) + h = func2(h, 26) + h = func2(h, 27) + h = func2(h, 28) + h = func2(h, 29) + h = func2(h, 30) + h = func2(h, 31) + h = func2(h, 32) + h = func2(h, 33) + h = func2(h, 34) + h = func2(h, 35) + h = func2(h, 36) + h = func2(h, 37) + h = func2(h, 38) + h = func2(h, 39) + + +def func2(a, unused): + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callable1.py b/python-parser/packages/pyright-internal/src/tests/samples/callable1.py new file mode 100644 index 00000000..7689e906 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callable1.py @@ -0,0 +1,51 @@ +# This sample tests the type checker's handling of the +# builtin "Callable" class. + +from typing import Callable + +Callable1 = Callable[["A"], None] + + +class A: + pass + + +Callable2 = Callable[[A], None] + + +def func1(a: Callable1): + a(A()) + + +def func2(a: Callable2): + a(A()) + + +Callable3 = Callable[..., int] + + +def func3(a: Callable3) -> int: + return a(1, 2, 3) + a() + a("hello") + a([]) + + +# This should generate an error (... not allowed in param list). +Callable4 = Callable[[...], int] + +# This should generate an error (too many arguments). +Callable5 = Callable[..., int, int] + + +# Test Callable with no parameters +Callable6 = Callable[[], str] + + +def func6(a: Callable6): + a() + # This should generate an error. + a(1) + + +def func7(a: Callable): + reveal_type(a, expected_text="(...) -> Unknown") + b = a(3, 4, 5) + reveal_type(b, expected_text="Unknown") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callable2.py b/python-parser/packages/pyright-internal/src/tests/samples/callable2.py new file mode 100644 index 00000000..b4edd3b5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callable2.py @@ -0,0 +1,55 @@ +# This sample tests the assignment of generic callables +# to concrete callable types. + +from asyncio.futures import Future +from asyncio.tasks import ensure_future +from typing import Any, Awaitable, Callable, Iterable, Sequence, TypeVar + + +_T1 = TypeVar("_T1") + + +def func1(__iterable: Iterable[_T1]) -> _T1: ... + + +a: Callable[[Sequence[float]], float] = func1 +b: Callable[[Sequence[Any]], Any] = func1 + + +def func2(__iterable: Sequence[_T1]) -> _T1: ... + + +# This should generate an error because an Iterable parameter +# is not assignable to a Sequence parameter. +c: Callable[[Iterable[float]], float] = func2 + + +_T2 = TypeVar("_T2", bound=float) + + +def func3(__iterable: Iterable[_T2]) -> _T2: ... + + +d: Callable[[Sequence[int]], int] = func3 + +# This should generate an error because Sequence[str] +# is not compatible with the bound TypeVar _T2. +e: Callable[[Sequence[str]], Any] = func3 + + +_T3 = TypeVar("_T3") + +TA1 = Callable[[_T3], None] +TA2 = Callable[[TA1[_T3]], None] + + +def func4(cb: TA2[_T3]) -> Awaitable[_T3]: + future: Future[_T3] = Future() + return ensure_future(future) + + +def func5(done: TA1[int]) -> None: + pass + + +reveal_type(func4(func5), expected_text="Awaitable[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callable3.py b/python-parser/packages/pyright-internal/src/tests/samples/callable3.py new file mode 100644 index 00000000..a04d3ee6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callable3.py @@ -0,0 +1,23 @@ +# This sample tests the case where a callable type contains a +# callable type as an input parameter, and the latter callable +# contains generic types. + +from typing import Callable, Generic, TypeVar + +T = TypeVar("T") +R = TypeVar("R") + + +class ClassA(Generic[R]): ... + + +class ClassB(Generic[T]): + def method1(self, val: Callable[[ClassA[R]], T]) -> R | None: + return None + + +b1: ClassB[tuple[int, ClassA[str]]] = ClassB() +v1: Callable[[ClassA[str]], tuple[int, ClassA[str]]] = lambda r: (42, r) + +ret = b1.method1(v1) +reveal_type(ret, expected_text="str | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callable4.py b/python-parser/packages/pyright-internal/src/tests/samples/callable4.py new file mode 100644 index 00000000..56851cb8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callable4.py @@ -0,0 +1,32 @@ +# This sample tests the case where a callable type within a function +# signature contains a generic return type within a union. + +from typing import TypeVar +from collections.abc import Callable + + +T = TypeVar("T") +U = TypeVar("U") + + +def func1(f: Callable[[T], U | None], x: T) -> U: + y = f(x) + + reveal_type(y, expected_text="U@func1 | None") + + if y is not None: + reveal_type(y, expected_text="U@func1") + return y + + raise ValueError() + + +def func2(x: T, f: Callable[[T], U | None]) -> U: + def g() -> U: + y = f(x) + if y is not None: + return y + + raise ValueError() + + return g() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callable5.py b/python-parser/packages/pyright-internal/src/tests/samples/callable5.py new file mode 100644 index 00000000..3c2893c4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callable5.py @@ -0,0 +1,37 @@ +# This sample covers the case where a function accepts a generic callable +# as a parameter along with another parameter that uses the same type variable +# and a caller provides an overloaded function as an argument. + +from typing import Any, Callable, TypeVar, overload + +T = TypeVar("T") + + +@overload +def func1(real: float): ... + + +@overload +def func1(real: str): ... + + +def func1(real: float | str) -> None: ... + + +def func2(f: Callable[[T], Any], p: T): + return f(p) + + +func2(func1, 4) +func2(func1, "4") + +# This should generate an error because a "bytes" argument +# doesn't match any of the overloads. +func2(func1, b"") + + +map(complex, ["3j", "4"]) + +# This should generate two errors because a "bytes" argument +# doesn't match any of the overloads in the "complex" constructor. +map(complex, [b"3j"]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callable6.py b/python-parser/packages/pyright-internal/src/tests/samples/callable6.py new file mode 100644 index 00000000..d1f97e70 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callable6.py @@ -0,0 +1,91 @@ +# This sample tests the use of unpacked tuples in a Callable, as described +# in PEP 646. + +from typing import Callable, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +_T = TypeVar("_T", bound=int) + +TA1 = Callable[[_T, Unpack[tuple[int, ...]], tuple[int, int, str], str], _T] + +# This should generate an error. +TA2 = Callable[ + [int, Unpack[tuple[int, ...]], Unpack[tuple[int, int, str, ...]], str], int +] + +TA3 = Callable[[int, Unpack[tuple[int, int]], str], int] + +TA4 = Callable[[Unpack[tuple[int, ...]]], _T] + + +def func1(x: TA1[int]): + r1 = x(3, 4, 5, (1, 2, "hi"), "hi") + reveal_type(r1, expected_text="int") + + x(3, (1, 2, "hi"), "hi") + + # This should generate an error because the first argument is not an int. + x(None, (1, 2, "hi"), "hi") + + y = [1, 2, 3] + x(1, *y, (1, 2, "hi"), "hi") + + +def func2(x: TA3): + x(3, 4, 5, "hi") + + # This should generate an error. + x(3, 4, "hi") + + # This should generate an error. + x(3, 4, "hi", "hi") + + +def func6(x: TA4): + x() + + +Ts = TypeVarTuple("Ts") + + +def func3( + path: str, *args: Unpack[tuple[Unpack[Ts], str]] +) -> tuple[Unpack[Ts], int]: ... + + +v3 = func3("", 1, "2", 3.3, None, "") +reveal_type(v3, expected_text="tuple[int, str, float, None, int]") + +func3("", "") + +# This should generate an error because the type of the first arg is wrong. +func3(1, "") + +# This should generate an error because the type of the last arg is wrong. +func3("", 1) + +# This should generate an error because the type of the last arg is wrong. +func3("", 1, 2, 3, "hi", 1) + + +def func4( + path: str, *args: Unpack[tuple[Unpack[Ts], str]] +) -> tuple[Unpack[Ts], complex]: ... + + +v4 = func4("", 1, "2", 3.3, None, "") +reveal_type(v4, expected_text="tuple[int, str, float, None, complex]") + + +def func5(path: str, *args: Unpack[tuple[str, ...]]) -> None: ... + + +# This should generate an errors. +func5("", 1, "2", "") +func5("", "1", "2", "3.3", "None", "") + +# This should generate one error. +func5("", "1", "2", "3.3", "None", 3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callable7.py b/python-parser/packages/pyright-internal/src/tests/samples/callable7.py new file mode 100644 index 00000000..42841d99 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callable7.py @@ -0,0 +1,33 @@ +# This sample tests the handling of the `__call__` attribute. + + +from typing import Iterable, Iterator + + +class A: + def __init__(self): + self.__call__ = self.method1 + + def method1(self, a: int): + return a + + +# This should generate an error because `__call__` is +# callable only if it's a class variable. +A()(0) + + +class B: + def method1(self, a: int): + return a + + __call__ = method1 + + +B()(0) + + +class C[K](Iterable[K]): + def keys(self) -> Iterator[K]: ... + + __iter__ = keys diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol1.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol1.py new file mode 100644 index 00000000..3ca87886 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol1.py @@ -0,0 +1,159 @@ +# This sample tests support for callback protocols (defined in PEP 544). + +from typing import Callable, Protocol + + +class TestClass1(Protocol): + def __call__(self, *vals: bytes, maxlen: int | None = None) -> list[bytes]: + return [] + + +def good_cb(*vals: bytes, maxlen: int | None = None) -> list[bytes]: + return [] + + +def bad_cb1(*vals: bytes, maxlen: int | None, maxitems: int | None) -> list[bytes]: + return [] + + +def bad_cb2(*vals: bytes) -> list[bytes]: + return [] + + +def bad_cb3(*vals: bytes, maxlen: str | None) -> list[bytes]: + return [] + + +var1: TestClass1 = good_cb + +# This should generate an error because maxitems is unmatched. +var1 = bad_cb1 + +# This should generate an error because maxlen is unmatched. +var1 = bad_cb2 + +# This should generate an error because maxlen is the wrong type. +var1 = bad_cb3 + + +class TestClass2(Protocol): + def __call__(self, *vals: bytes, **kwargs: str) -> None: + pass + + +def func1(*a: bytes, **b: str): + pass + + +def func2(*a: bytes): + pass + + +def func3(*a: str, **b: str): + pass + + +def func4(*a: bytes, **b: bytes): + pass + + +def func5(**b: str): + pass + + +var2: TestClass2 = func1 + +# This should generate an error. +var2 = func2 + +# This should generate an error. +var2 = func3 + +# This should generate an error. +var2 = func4 + +# This should generate an error. +var2 = func5 + + +class NotProto: + def __call__(self, *vals: bytes, maxlen: int | None = None) -> list[bytes]: + return [] + + +# This should generate an error because NotProto is not a protocol class. +not_proto: NotProto = good_cb + + +class TestClass3(Protocol): + def __call__(self) -> None: + pass + + +var3: TestClass3 = func1 + +var3 = func2 +var3 = func3 +var3 = func4 +var3 = func5 + + +class TestClass4(Protocol): + foo: int + + def __call__(self, x: int) -> None: + pass + + +def test_func4(x: int) -> None: + pass + + +# This should generate an error. +var4: TestClass4 = test_func4 + + +class TestClass5(Protocol): + def __call__(self, *, a: int, b: str) -> int: ... + + +def test_func5(a: int, b: str) -> int: + return 123 + + +f5: TestClass5 = test_func5 + + +class TestClass6(Protocol): + def __call__(self, a: int, /, *, b: str) -> int: ... + + +def test_func6(a: int, b: str) -> int: + return 123 + + +f6: TestClass6 = test_func6 + + +class TestClass7: + def __call__(self) -> None: + pass + + +def test_func7(*args: *tuple[int, *tuple[int, ...]]) -> int: + return 123 + + +# This should generate an error. +f7: TestClass7 = test_func7 + + +class TestClass8: + def __call__(self: Callable[[int], int], v: int) -> int: + return v + + +def func8(f: Callable[[int], int]): ... + + +func8(TestClass8()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol10.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol10.py new file mode 100644 index 00000000..0d813d34 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol10.py @@ -0,0 +1,61 @@ +# This sample tests the case where a callback protocol uses +# a (*args: Any, **kwargs: Any) signature. + + +from typing import Any, Callable, Concatenate, ParamSpec, Protocol, TypeVar + +P = ParamSpec("P") +T_contra = TypeVar("T_contra", contravariant=True) + + +class Proto1(Protocol): + def __call__(self, *args, **kwargs) -> None: ... + + +class Proto2(Protocol): + def __call__(self, a: int, /, *args, **kwargs) -> None: ... + + +class Proto3(Protocol): + def __call__(self, a: int, *args: Any, **kwargs: Any) -> None: ... + + +class Proto4(Protocol[P]): + def __call__(self, a: int, *args: P.args, **kwargs: P.kwargs) -> None: ... + + +class Proto5(Protocol[T_contra]): + def __call__(self, *args: T_contra, **kwargs: T_contra) -> None: ... + + +class Proto6(Protocol): + def __call__(self, a: int, /, *args: Any, k: str, **kwargs: Any) -> None: + pass + + +class Proto7(Protocol): + def __call__(self, a: float, /, b: int, *, k: str, m: str) -> None: + pass + + +def func( + p1: Proto1, + p2: Proto2, + p3: Proto3, + p4: Proto4[...], + p5: Proto5[Any], + p7: Proto7, + c1: Callable[..., None], + c2: Callable[Concatenate[int, ...], None], +): + x1: Callable[..., None] = p1 + x2: Proto1 = c1 + x3: Callable[..., None] = p5 + x4: Proto5[Any] = c1 + x5: Callable[Concatenate[int, ...], None] = p2 + x6: Proto2 = c2 + x7: Callable[..., None] = p3 + x8: Proto3 = c1 + x9: Proto4[...] = p3 + x10: Proto3 = p4 + x11: Proto6 = p7 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol11.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol11.py new file mode 100644 index 00000000..86f500ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol11.py @@ -0,0 +1,21 @@ +# This sample tests the case where a callback protocol uses a function- +# scoped type variable. + +from typing import Generic, Protocol, TypeVar + +T = TypeVar("T") +T_co = TypeVar("T_co", covariant=True) +U_co = TypeVar("U_co", covariant=True) + + +class A(Generic[T_co, U_co]): ... + + +class BProto(Protocol): + def __call__(self, x: T) -> A[list[T], T]: ... + + +def func1() -> BProto: + def make_a(x: T) -> A[list[T], T]: ... + + return make_a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol2.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol2.py new file mode 100644 index 00000000..8a794105 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol2.py @@ -0,0 +1,28 @@ +# This sample tests the case where an object is assigned to a +# callback protocol and the object's "__call__" has unannotated +# or "Any" parameter types. + +from typing import Any, Protocol, TypeVar + +InputT = TypeVar("InputT", contravariant=True) +OutputT = TypeVar("OutputT", covariant=True) + + +class MyCallable(Protocol[InputT, OutputT]): + def __call__(self, inputs: InputT) -> OutputT: ... + + +class Class1: + def __call__(self, inputs) -> int: + return 5 + + +g1: MyCallable[int, int] = Class1() + + +class Class2: + def __call__(self, inputs: Any) -> int: + return 5 + + +g2: MyCallable[int, int] = Class2() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol3.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol3.py new file mode 100644 index 00000000..f3e316c9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol3.py @@ -0,0 +1,20 @@ +# This sample tests the case where a callback protocol uses a +# class-level type variable and a combination of Type[T] and T. + +from typing import Protocol, TypeVar, Type + +TE = TypeVar("TE", bound=Exception) + + +class CallbackProtocol1(Protocol[TE]): + def __call__(self, s_exc: Exception, t_exc_class: Type[TE]) -> TE: ... + + +def func1(s_exc: Exception, t_exc_class: Type[TE]) -> TE: ... + + +def func2( + s_exc_class: Exception, + t_exc_class: Type[TE], + mapper: CallbackProtocol1[TE] = func1, +): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol4.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol4.py new file mode 100644 index 00000000..ac35da9c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol4.py @@ -0,0 +1,65 @@ +# This sample tests the case where a callback protocol uses +# an overloaded __call__ method. + +from typing import Any, Protocol, overload + + +class P1(Protocol): + @overload + def __call__(self, x: int) -> int: ... + + @overload + def __call__(self, x: str) -> str: ... + + def __call__(self, x: Any) -> Any: ... + + +def func0(x: Any) -> Any: + return x + + +def func1(x: int) -> Any: + return x + + +a0: P1 = func0 + +# This should generate an error +a1: P1 = func1 + + +@overload +def of1(x: int) -> int: ... + + +@overload +def of1(x: str) -> str: ... + + +def of1(x: Any) -> Any: + return x + + +@overload +def of2(x: int) -> complex: ... + + +@overload +def of2(x: str) -> str: ... + + +def of2(x: Any) -> Any: + return x + + +b0: P1 = of1 + +# This should generate an error +b1: P1 = of2 + + +class P2(Protocol): + def __call__(self, *args: int) -> Any: ... + + +a: P2 = lambda *args: map(lambda arg: arg + 0, args) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol5.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol5.py new file mode 100644 index 00000000..6bd4c014 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol5.py @@ -0,0 +1,84 @@ +# This sample tests the case where a callback protocol defines additional +# attributes. + +from typing import Any, Callable, Protocol, TypeVar, cast +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + + +P = ParamSpec("P") +R = TypeVar("R", covariant=True) + + +class CallbackProto1(Protocol[P, R]): + __name__: str + + other_attribute: int + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + + +def decorator1(f: Callable[P, R]) -> CallbackProto1[P, R]: + converted = cast(CallbackProto1, f) + + print(converted.__name__) + + converted.other_attribute = 1 + + # This should generate an error + converted.other_attribute = "str" + + # This should generate an error + converted.xxx = 3 + + return converted + + +@decorator1 +def func1(x: int) -> str: ... + + +reveal_type(func1, expected_text="CallbackProto1[(x: int), str]") + +func1.other_attribute + +# This should generate an error +func1.other_attribute2 + +func1(x=3) + + +class CallbackProto2(Protocol): + __name__: str + __module__: str + __qualname__: str + __annotations__: dict[str, Any] + __slots__ = () + + def __call__(self) -> None: ... + + +def func2() -> None: ... + + +v: CallbackProto2 = func2 + + +class TestClass7(Protocol): + def __call__(self, x: int, /, y: str) -> Any: ... + + +def func8(x: int, y: str, /) -> Any: + pass + + +def func9(__x: int, __y: str) -> Any: + pass + + +# This should generate an error because "y" is position-only +# in the source but not the dest. +var7: TestClass7 = func8 + +# This should generate an error because "y" is position-only +# in the source but not the dest. +var8: TestClass7 = func9 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol6.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol6.py new file mode 100644 index 00000000..5b465aee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol6.py @@ -0,0 +1,56 @@ +# This sample tests the case where a callback protocol uses a default argument +# but the corresponding callable does not or vice versa. + +from typing import Protocol + + +# Callback with positional parameter with default arg value. +class Callback1(Protocol): + def __call__(self, path: str = ...) -> str: ... + + +# Callback with positional parameter without default arg value. +class Callback2(Protocol): + def __call__(self, path: str) -> str: ... + + +def func1_1(path: str = "") -> str: ... + + +def func1_2(path: str) -> str: ... + + +val1_1: Callback1 = func1_1 + +# This should generate an error. +val1_2: Callback1 = func1_2 + +val2_1: Callback2 = func1_1 + +val2_2: Callback2 = func1_2 + + +# Callback with keyword parameter with default arg value. +class Callback3(Protocol): + def __call__(self, *, path: str = ...) -> str: ... + + +# Callback with keyword parameter without default arg value. +class Callback4(Protocol): + def __call__(self, *, path: str) -> str: ... + + +def func3_1(*, path: str = "") -> str: ... + + +def func3_2(*, path: str) -> str: ... + + +val3_1: Callback3 = func3_1 + +# This should generate an error. +val3_2: Callback3 = func3_2 + +val4_1: Callback4 = func3_1 + +val4_2: Callback4 = func3_2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol7.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol7.py new file mode 100644 index 00000000..aa14470f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol7.py @@ -0,0 +1,36 @@ +# This sample tests the case where a callback protocol uses position-only +# parameters. + +from typing import Any, Protocol + + +class P0(Protocol): + def __call__(self, x: int, /, y: str) -> Any: ... + + +def test1(x: int, /, y: str, z: None = None) -> Any: ... + + +x: P0 = test1 + + +class P1(Protocol): + def __call__(self, *args: *tuple[int, int]): ... + + +class P2(Protocol): + def __call__(self, x: int, y: int, /): ... + + +class P3(Protocol): + def __call__(self, x: int, /, *args: *tuple[int]): ... + + +class P4(Protocol): + def __call__(self, x: int, y: int = 2, /): ... + + +def test2(p1: P1, p2: P2, p3: P3, p4: P4): + x1: P1 = p2 + x2: P1 = p3 + x3: P1 = p4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol8.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol8.py new file mode 100644 index 00000000..64454042 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol8.py @@ -0,0 +1,14 @@ +# This sample tests the case where a callback protocol contains an *args +# and some keyword parameters. + +from typing import Any, Protocol + + +class P(Protocol): + def __call__(self, *args: Any, kwarg0: Any, kwarg1: Any) -> None: ... + + +def f(*args: Any, kwarg0: Any, kwarg1: Any) -> None: ... + + +p: P = f diff --git a/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol9.py b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol9.py new file mode 100644 index 00000000..d0dab9df --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/callbackProtocol9.py @@ -0,0 +1,30 @@ +# This sample tests that a call through a __call__ handles the case +# where the __call__ is a callable object itself. + + +class A: + def __call__(self, v: int): + print("Received", v) + + +class B: + __call__ = A() + + +class C: + __call__ = B() + + +class D: + __call__ = C() + + +d = D() + +d(1) + +# This should generate an error because of the incompatible argument type. +d("1") + +# This should generate an error because of the wrong argument count. +d(1, 1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/capturedVariable1.py b/python-parser/packages/pyright-internal/src/tests/samples/capturedVariable1.py new file mode 100644 index 00000000..176219e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/capturedVariable1.py @@ -0,0 +1,146 @@ +# This sample tests the code flow analysis used to determine +# whether it is safe to narrow the type of a captured variable. + +from typing import NoReturn, Optional + + +def get_optional_int() -> Optional[int]: ... + + +v0 = get_optional_int() +if v0 is not None: + # This should generate an error because v0 is + # a global variable and could be reassigned + # outside of this module. + lambda: v0 + 5 + + +def func0(): + v1 = get_optional_int() + if v1 is not None: + lambda: v1 + 5 + + v2 = get_optional_int() + if v2 is not None: + # This should generate an error because v2 + # is reassigned after capture. + lambda: v2 + 5 + v2 = None + + v3 = get_optional_int() + if v3 is not None: + lambda: v3 + 5 + else: + v3 = None + + # This should generate an error because v4 is + # not bound prior to the capture. + lambda: v4 + 5 + v4 = get_optional_int() + + +def func1(v1: Optional[int]): + if v1 is not None: + lambda: v1 + 5 + + +def func2(v1: Optional[int]): + if v1 is not None: + + def func2_inner1(): + x = v1 + 5 + + def func2_inner2(): + lambda: v1 + 5 + + func2_inner2() + + func2_inner1() + + +def func3(): + v1: Optional[int] = 3 + lambda: v1 + 5 + + +def func4(): + v1: Optional[int] = 3 + # This should generate an error because v1 + # is reassigned after capture. + lambda: v1 + 5 + v1 = None + + +def func5(): + v1: Optional[int] = 3 + + while True: + lambda: v1 + 5 + + +def func6(): + v1: Optional[int] = 3 + + while True: + if v1 is not None: + # This should generate an error because + # v1 is reassigned on a code path that is + # reachable from the lambda. + lambda: v1 + 5 + else: + v1 = None + + +def func7(): + while True: + # This should generate an error because v1 is + # potentially unbound prior to capture. + lambda: v1 + 5 + + v1: Optional[int] = 3 + + +def func8() -> NoReturn: ... + + +def func9(x: str | None): + if not x: + func8() + + def foo() -> str: + return x.upper() + + return x.upper() + + +def func10(cond: bool, val: str): + x: str | None = val if cond else None + y: str | None = val if cond else None + + def inner1(): + nonlocal x + x = None + + if x is not None and y is not None: + + def inner2(): + reveal_type(x, expected_text="str | None") + reveal_type(y, expected_text="str") + + +def func11(foo: list[int] | None): + if isinstance(foo, list): + + def inner() -> list[int]: + return [x for x in foo] + + +def func12() -> None: + counter = 0 + + def inner() -> None: + nonlocal counter + reveal_type(counter, expected_text="int") + counter += 1 + + inner() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/capturedVariable2.py b/python-parser/packages/pyright-internal/src/tests/samples/capturedVariable2.py new file mode 100644 index 00000000..3c4089bb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/capturedVariable2.py @@ -0,0 +1,20 @@ +# This sample tests the case where a potentially-unbound variable +# in an outer scope is captured in an inner scope. + +from typing import Literal + + +def func1(subj: Literal[0, 1]) -> None: + v: int | None + + match subj: + case 0: + v = 1 + + def inner1() -> int: + # This should generate an error + return v + + def inner2() -> bool: + # This should generate an error + return v is None diff --git a/python-parser/packages/pyright-internal/src/tests/samples/circular1.py b/python-parser/packages/pyright-internal/src/tests/samples/circular1.py new file mode 100644 index 00000000..d41d0dd9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/circular1.py @@ -0,0 +1,16 @@ +# This sample tests the handling of a circular dependency +# when resolving a type annotation. + + +class Example1: + # This should generate two errors because "str" refers to itself + # and it is a variable, so it's an illegal annotation. + str: str = "" + + int = int + + test: int + + +class Example2: + int: "int" = 4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/circular2.py b/python-parser/packages/pyright-internal/src/tests/samples/circular2.py new file mode 100644 index 00000000..bada1c51 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/circular2.py @@ -0,0 +1,37 @@ +# This sample tests the handling of circular dependencies between +# class declarations. + +# pyright: strict + +from typing import Generic, TypeVar + +x: "F" + + +class A: + a_attr: object + + +_T = TypeVar("_T", bound=A) + + +class B(Generic[_T]): ... + + +class C(A): + template = B["E"]() + + +class D(A): + pass + + +class E(D): + pass + + +class F(D): + pass + + +E.a_attr diff --git a/python-parser/packages/pyright-internal/src/tests/samples/circularBaseClass.py b/python-parser/packages/pyright-internal/src/tests/samples/circularBaseClass.py new file mode 100644 index 00000000..562b76b0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/circularBaseClass.py @@ -0,0 +1,16 @@ +# This test validates that a circular base class reference +# will be reported and won't crash the analyzer. + + +# This should generate an error because 'Bar' is not bound. +class Bar(Bar): + pass + + +# This should generate an error because 'ClassB' is not bound. +class ClassA(ClassB): + pass + + +class ClassB(ClassA): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classGetItem1.py b/python-parser/packages/pyright-internal/src/tests/samples/classGetItem1.py new file mode 100644 index 00000000..fa2cca68 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classGetItem1.py @@ -0,0 +1,28 @@ +# This sample tests the handling of a class with a custom +# __class_getitem__ class method. + + +from typing import Generic, TypeVar + + +class ClassA: + # This should generate a warning because __class_getitem__ + # is implicitly a classmethod and should use cls rather than + # self. + def __class_getitem__(self, args: tuple[int, ...]) -> None: ... + + +reveal_type(ClassA[10, 63], expected_text="type[ClassA]") + + +_T = TypeVar("_T") +_S = TypeVar("_S") + + +class ClassB(Generic[_T, _S]): + # Even though this class has a __class_getitem__ method, + # it will be assumed to follow normal generic class semantics. + def __class_getitem__(cls, args: tuple[int, ...]) -> None: ... + + +reveal_type(ClassB[int, str], expected_text="type[ClassB[int, str]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classVar1.py b/python-parser/packages/pyright-internal/src/tests/samples/classVar1.py new file mode 100644 index 00000000..a59a93c5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classVar1.py @@ -0,0 +1,41 @@ +# This sample tests the type checker's handling of ClassVar +# as described in PEP 526. + +from typing import Any, ClassVar + + +class MyDescriptor: + def __get__(self, *args: Any) -> str: + return "" + + def __set__(self, obj: Any, value: str): + pass + + +class Starship: + captain: str = "Picard" + damage: int + stats: "ClassVar[dict[str, int]]" = {} + desc: ClassVar[MyDescriptor] = MyDescriptor() + + def __init__(self, damage: int, captain: str | None = None): + self.damage = damage + if captain: + self.captain = captain # Else keep the default + + def hit(self): + Starship.stats["hits"] = Starship.stats.get("hits", 0) + 1 + + +enterprise_d = Starship(3000) +Starship.stats = {} + +a = enterprise_d.stats + +# This should be flagged as an error because stats cannot +# be set via a class instance because it's a ClassVar. +enterprise_d.stats = {} + +# This should not generate an error because "desc" is a +# descriptor instance on the class. +enterprise_d.desc = "OK" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classVar2.py b/python-parser/packages/pyright-internal/src/tests/samples/classVar2.py new file mode 100644 index 00000000..9cf89aee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classVar2.py @@ -0,0 +1,29 @@ +# This sample tests the type checker's handling of ClassVar +# used within a Protocol, as specified in PEP 544. + +import typing as t +from typing import ClassVar as _ClassVar + + +class Proto(t.Protocol): + var1: t.ClassVar[str] + var2: t.ClassVar[str] + var3: _ClassVar = ["hi"] + + +class ProtoImpl: + var1 = "" + + def __init__(self) -> None: + self.var2 = "" + + +# This should generate an error because var2 +# is not a class variable. +a: Proto = ProtoImpl() + + +def func1(x: Proto): + reveal_type(x.var1, expected_text="str") + reveal_type(x.var2, expected_text="str") + reveal_type(x.var3, expected_text="list[str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classVar3.py b/python-parser/packages/pyright-internal/src/tests/samples/classVar3.py new file mode 100644 index 00000000..763245bd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classVar3.py @@ -0,0 +1,56 @@ +# This sample tests the reporting of errors for ClassVar in contexts +# where it is not allowed. + +from typing import Annotated, Any, ClassVar, Final, Generic, TypeAlias, TypeVar +from typing_extensions import Self # pyright: ignore[reportMissingModuleSource] + +# This should generate an error. +x: ClassVar[int] = 3 + +T = TypeVar("T") + +# This should generate an error. +TA1: TypeAlias = ClassVar[str] + + +class Foo(Generic[T]): + x: ClassVar[int] = 3 + + # This should generate an error. + y: Final[ClassVar[int]] = 3 + + # This should generate an error. + z: list[ClassVar[int]] = [] + + # This should generate an error because TypeVars cannot + # be used in a ClassVar. + illegal1: ClassVar[list[T]] + + # This should generate an error because TypeVars cannot + # be used in a ClassVar. + illegal2: ClassVar[T] + + # This should generate an error because Final cannot be + # used with a ClassVar. + illegal3: ClassVar[Final] = 0 + + # This should generate an error because Final cannot be + # used with a ClassVar. A second error is generated because + # Final[int] is not interpreted as a valid type. + illegal4: ClassVar[Final[int]] = 0 + + ok1: ClassVar[list] + ok2: ClassVar[list[Any]] + ok3: Annotated[ClassVar[list[Self]], ""] + + # This should generate an error. + def func1(self, a: ClassVar[int]): + # This should generate an error. + x: ClassVar[str] = "" + + # This should generate an error. + self.xx: ClassVar[str] = "" + + # This should generate an error. + def func2(self) -> ClassVar[int]: + return 3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classVar4.py b/python-parser/packages/pyright-internal/src/tests/samples/classVar4.py new file mode 100644 index 00000000..dcb67fe8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classVar4.py @@ -0,0 +1,39 @@ +# This sample tests that an error when attempting to access +# a non-ClassVar protocol attribute from a protocol class. + +from typing import ClassVar, Protocol + + +class SomeProtocol(Protocol): + x: int = 3 + y: int + z: ClassVar[int] + + @classmethod + def meth1(cls) -> None: + return None + + @staticmethod + def meth2() -> None: + return None + + +class Class(SomeProtocol): + y = 0 + z = 0 + + +def func1() -> None: + # Previously (prior to pyright 1.1.315), this generated an error + # because x was not explicitly declared as a ClassVar. This was changed + # to match mypy, which treats this as a normal class variable -- one that + # can be accessed as both a class an instance variable. + x: int = Class.x + + # Same as above. + y: int = Class.y + + z: int = Class.z + + Class.meth1 + Class.meth2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classVar5.py b/python-parser/packages/pyright-internal/src/tests/samples/classVar5.py new file mode 100644 index 00000000..c255c4d5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classVar5.py @@ -0,0 +1,16 @@ +# This sample tests the access of a ClassVar that uses Self in its +# declaration. + +# It's not clear whether this should be permitted. Arguably, it's not +# type safe, but mypy admits it. This should be clarified in the typing +# spec. + +from typing import ClassVar, Self + + +class Parent: + x: ClassVar[dict[str, Self]] = {} + + @classmethod + def __init_subclass__(cls): + cls.x = {} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classVar6.py b/python-parser/packages/pyright-internal/src/tests/samples/classVar6.py new file mode 100644 index 00000000..8b93db50 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classVar6.py @@ -0,0 +1,20 @@ +# This sample tests that a ClassVar is disallowed when used in a +# NamedTuple or TypedDict class as reflected in the runtime. + +from typing import ClassVar, NamedTuple, TypedDict + + +class NT1(NamedTuple): + # This should generate an error. + x: ClassVar + + # This should generate an error. + y: ClassVar[int] + + +class TD1(TypedDict): + # This should generate an error. + x: ClassVar + + # This should generate an error. + y: ClassVar[int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classVar7.py b/python-parser/packages/pyright-internal/src/tests/samples/classVar7.py new file mode 100644 index 00000000..09bac19e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classVar7.py @@ -0,0 +1,38 @@ +# This sample tests the handling of a "bare" ClassVar with no +# subscript. + +from typing import ClassVar + + +class A: + a: ClassVar + b: ClassVar = 2 + c: ClassVar + d: ClassVar + + d = 3 + + @classmethod + def m1(cls) -> None: + cls.c = "" + + +reveal_type(A.a, expected_text="Unknown") +A.a = 3 +A.a = "" + +reveal_type(A.b, expected_text="int") +A.b = 2 + +# This should generate an error +A.b = "" + +reveal_type(A.c, expected_text="Unknown") +A.c = 2 +A.c = "" + +reveal_type(A.d, expected_text="int") +A.d = 2 + +# This should generate an error +A.d = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes1.py b/python-parser/packages/pyright-internal/src/tests/samples/classes1.py new file mode 100644 index 00000000..b15fdfd3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes1.py @@ -0,0 +1,88 @@ +# This sample tests the type checker's ability to +# handle various class definition cases. + + +from typing import Any, Self, TypeVar + + +T = TypeVar("T") +T2 = TypeVar("T2", bound=type[Any]) + + +class A: + ... + + +class B: + C: type[A] + + +app = B() + + +class D(app.C): + ... + + +class EMeta(type): + def __new__(mcls, *args: Any, **kwargs: Any): + ... + + +class E(metaclass=EMeta): + pass + + +class F(E): + pass + + +class G[T](E, metaclass=type): + def my_method(self) -> "G": + reveal_type(__class__, expected_text="type[G[Unknown]]") + return __class__() + + +# This should generate an error because only one metaclass is supported. +class H(E, metaclass=type, metaclass=type): + pass + + +class I(E, other_keyword=2): + pass + + +args = [1, 2, 3] +kwargs = {"foo": 5} + + +class J(*args, **kwargs): + pass + + +def func1(x: type) -> object: + class Y(x): + pass + + return Y() + + +# This should generate an error because a TypeVar can't be used as a base class. +class K(T): + pass + + +class L(type[T]): + pass + + +def func2(cls: type[T]): + class M(cls): + pass + + +def func3(cls: T2) -> T2: + class M(cls): + pass + + return M diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes10.py b/python-parser/packages/pyright-internal/src/tests/samples/classes10.py new file mode 100644 index 00000000..7a412549 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes10.py @@ -0,0 +1,24 @@ +# This sample tests the handling of dynamic base classes. + +from typing import TypeVar + +T_A = TypeVar("T_A", bound="A") + + +class A: + class InnerA: + pass + + +def dynamic_subclass1(cls: type[T_A]): + class SubClass(cls): + class SubInnerClass(cls.InnerA): + pass + + return SubClass + + +def dynamic_subclass2(base: type[A] | None): + class SubClass(base or A): ... + + return SubClass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes11.py b/python-parser/packages/pyright-internal/src/tests/samples/classes11.py new file mode 100644 index 00000000..ec68153a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes11.py @@ -0,0 +1,46 @@ +# This sample tests the detection of mutually-incompatible base classes +# in classes that use multiple inheritance. + +from typing import Collection, Iterator, Mapping, Sequence, TypeVar + + +# This should generate an error. +class A(Mapping[str, int], Collection[int]): + def __len__(self) -> int: ... + + def __iter__(self) -> Iterator[str]: ... + + +# This should generate an error. +class B(Mapping[str, int], Sequence[int]): ... + + +# This should generate an error. +class C(Sequence[int], Mapping[str, int]): ... + + +class D(Sequence[float], Mapping[float, int]): ... + + +class E(Sequence[float], Mapping[int, int]): ... + + +# This should generate an error. +class F(Mapping[int, int], Sequence[float]): ... + + +T = TypeVar("T") +S = TypeVar("S") + + +class G(Mapping[T, S], Collection[T]): + def __len__(self) -> int: ... + + def __iter__(self) -> Iterator[T]: ... + + +# This should generate an error. +class H(Mapping[T, S], Collection[S]): + def __len__(self) -> int: ... + + def __iter__(self) -> Iterator[T]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes3.py b/python-parser/packages/pyright-internal/src/tests/samples/classes3.py new file mode 100644 index 00000000..452dcfb2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes3.py @@ -0,0 +1,64 @@ +# This sample tests that various class variables (as defined in +# the type metaclass) are accessible without a type error. + + +from typing import TypeVar + + +class TestClass: + # These should be accessible within the class body + print(__doc__) + print(__module__) + print(__name__) + print(__qualname__) + + +base = TestClass.__base__ +basic_size = TestClass.__basicsize__ +dict = TestClass.__dict__ +dict_offset = TestClass.__dictoffset__ +flags = TestClass.__flags__ +item_size = TestClass.__itemsize__ +module = TestClass.__module__ +mro = TestClass.__mro__ +name = TestClass.__name__ +qualname = TestClass.__qualname__ +text_signature = TestClass.__text_signature__ +subclasses = TestClass.__subclasses__ + + +# This should generate an error. +dummy = TestClass.__dummy__ + +instance = TestClass() + +instance.__doc__ +instance.__module__ + +# This should generate an error. +instance.__name__ + +# This should generate an error, but it doesn't currently. That's because +# the binder manually adds __qualname__ to a class's symbol table to make +# it available within a class body. +instance.__qualname__ + + +class Meta(type): + def method1(self) -> str: + return self.__name__ + + +class NonMeta: + def method1(self) -> str: + # This should generate an error. + return self.__name__ + + +_T = TypeVar("_T") + + +def func1(cls: type[_T]) -> _T: + x1 = cls.__dict__ + x2 = cls.__mro__ + return cls() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes4.py b/python-parser/packages/pyright-internal/src/tests/samples/classes4.py new file mode 100644 index 00000000..d0bd1abf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes4.py @@ -0,0 +1,15 @@ +# This sample tests the type checker's ability to handle +# class variables that redefine a symbol in an outer +# scope but are not defined with an explicit class +# variable statement. + + +class ClassA: + bar: str = "hi" + + def __init__(self, val: str) -> None: + self.str = val + + @classmethod + def method1(cls, val: str) -> None: + cls.str = val diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes5.py b/python-parser/packages/pyright-internal/src/tests/samples/classes5.py new file mode 100644 index 00000000..394d982b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes5.py @@ -0,0 +1,291 @@ +# This sample tests the reportIncompatibleVariableOverride +# configuration option. + +from typing import Any, ClassVar, Final, Protocol + + +class ParentClass1: + cv1: ClassVar[int] = 0 + cv2: ClassVar[int] = 0 + cv3: ClassVar[int] = 0 + cv4: ClassVar[int] = 0 + + var1: int + var2: str + var3: int | str + var4: int + var5: int + var6: int + var7: list[float] + var8: list[int] + var9: int + + _var1: int + __var1: int + + def __init__(self): + self.var10: int = 0 + self.var11: int = 0 + self.var12 = 0 + + +class Subclass1(ParentClass1): + # This should generate an error. + cv1 = "" + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + cv2: int = 3 + + cv3 = 3 + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled because it's overriding a non-final with a final. + cv4: Final = 3 + + # This should generate an error if reportIncompatibleVariableOverride is + # enabled because the type is incompatible. + var1: str + + var2: str + + # This should generate an error if reportIncompatibleVariableOverride is + # enabled because the member is mutable, and is therefore invariant. + var3: int + + # This should generate an error. + var4 = "" + + var5 = 5 + + # This should generate an error if reportIncompatibleVariableOverride is + # enabled because a property cannot override a variable. + @property + def var6(self) -> int: + return 3 + + # This should not generate an error because the inherited (expected) + # type of var7 is List[float], so the expression "[3, 4, 5]" should + # be inferred as List[float] rather than List[int]. + var7 = [3, 4, 5] + + # This should generate an error because floats are not allowed + # in a List[int]. + var8 = [3.3, 45.6, 5.9] + + # This should generate an error if reportIncompatibleVariableOverride is + # enabled. + var9: ClassVar[int] = 3 + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + _var1: str + + # This should not generate an error because it's a private name. + __var1: str + + def __init__(self): + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + self.var10: str = "" + + # This should generate an error. + self.var11 = "" + + self.var12 = "" + + +class ParentClass2: + cv_decl_1: float + cv_decl_2: float + cv_decl_3: float + cv_decl_4: float + cv_decl_5: float + cv_decl_6: float + + cv_infer_1 = 1.0 + cv_infer_2 = 1.0 + cv_infer_3 = 1.0 + cv_infer_4 = 1.0 + cv_infer_5 = 1.0 + cv_infer_6 = 1.0 + + def __init__(self): + self.iv_decl_1: float + self.iv_decl_2: float + self.iv_decl_3: float + + self.iv_infer_1 = 1.0 + self.iv_infer_2 = 1.0 + self.iv_infer_3 = 1.0 + + +class SubclassDeclared2(ParentClass2): + cv_decl_1: float + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + cv_decl_2: str + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + cv_decl_3: float | None + + cv_infer_1: int + cv_infer_2: str + cv_infer_3: float | None + + def __init__(self): + # This should generate an error if reportIncompatibleVariableOverride + # is enabled because the member is mutable and therefore invariant. + self.cv_decl_4: int + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + self.cv_decl_5: str + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + self.cv_decl_6: float | None + + self.cv_infer_4: int + self.cv_infer_5: str + self.cv_infer_6: float | None + + self.iv_decl_1: int + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + self.iv_decl_2: str + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + self.iv_decl_3: float | None + + self.iv_infer_1: int + self.iv_infer_2: str + self.iv_infer_3: float | None + + +class SubclassInferred2(ParentClass2): + cv_decl_1 = 1 + + # This should generate an error. + cv_decl_2 = "" + + # This should generate an error. + cv_decl_3 = None + + cv_infer_1 = 3 + cv_infer_2 = "" + cv_infer_3 = None + + def __init__(self): + self.cv_decl_4 = 1 + + # This should generate an error. + self.cv_decl_5 = "" + + # This should generate an error. + self.cv_decl_6 = None + + self.cv_infer_4 = 1 + self.cv_infer_5 = "" + self.cv_infer_6 = None + + self.iv_decl_1 = 1 + + # This should generate an error. + self.iv_decl_2 = "" + + # This should generate an error. + self.iv_decl_3 = None + + self.iv_infer_1 = 1 + self.iv_infer_2 = "" + self.iv_infer_3 = None + + +class SubclassTuple1(ParentClass2): + cv_decl_1, cv_decl_2, cv_decl_3 = (3, 4.5, 6.0) + + +class SubclassTuple2(ParentClass2): + # This should generate an error. + cv_decl_1, cv_decl_2, cv_decl_3 = (3, 4.5, None) + + +class ConfigBase: ... + + +class ParentClass3(Protocol): + Config1: ClassVar[type[ConfigBase]] + Config2: ClassVar[type[ConfigBase]] + + +class ChildClass3(ParentClass3): + class Config1(ConfigBase): ... + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + class Config2: ... + + +class PeerClass1: + test1: str = "a" + test2: str | None = None + + @property + def test3(self) -> int: + return 3 + + test4: int + test5: Any + test6: float + test7: float + + +class PeerClass2: + test1: int = 1 + test2: int | None = None + test3: int + + @property + def test4(self) -> int: + return 3 + + test5: int + test6: Any + test7: int + + +# This should generate 4 errors if reportIncompatibleVariableOverride +# is enabled. +class MultipleInheritance1(PeerClass1, PeerClass2): + pass + + +class ParentClass4(Protocol): + x: ClassVar[int] + y: int + + +class ChildClass4(ParentClass4): + # This should generate 2 errors if reportIncompatibleVariableOverride + # is enabled, one for overriding a classvar with an instance var, the + # other for overriding a non-final with a final. + x: Final = 0 + + # This should generate 1 error if reportIncompatibleVariableOverride + # is enabled because it is overriding a non-final with a final. + y: Final = 0 + + +class ParentClass5: + def __eq__(self, other: object) -> bool: + return True + + +class ParentClass6: + def __hash__(self) -> int: + return 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes6.py b/python-parser/packages/pyright-internal/src/tests/samples/classes6.py new file mode 100644 index 00000000..e3701778 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes6.py @@ -0,0 +1,16 @@ +# This sample tests proper scopes for nested classes. + + +class A: + a = 5 + + class B: + # This should generate an error + b = a + + class C: + # This should generate an error + c = a + + # This should generate an error + d = b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes7.py b/python-parser/packages/pyright-internal/src/tests/samples/classes7.py new file mode 100644 index 00000000..50054251 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes7.py @@ -0,0 +1,19 @@ +# This sample tests for duplicate base class detection. + +from typing import Generic, TypeVar + + +T = TypeVar("T") + + +class BaseClass(Generic[T]): + pass + + +IntBaseClass = BaseClass[float] + + +# This should generate an error because the same +# base class is used twice. +class SubClass(BaseClass[float], IntBaseClass): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes8.py b/python-parser/packages/pyright-internal/src/tests/samples/classes8.py new file mode 100644 index 00000000..824e6577 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes8.py @@ -0,0 +1,41 @@ +# This sample tests the case where a generic class declaration refers +# to itself. This case should arguably be considered an error, but +# it does appear within the stdlib typeshed stubs (see os.scandir). + +from os import DirEntry +from types import TracebackType +from typing import AnyStr, ContextManager, Iterator +from typing_extensions import Self # pyright: ignore[reportMissingModuleSource] + + +class _ScandirIterator( + Iterator[DirEntry[AnyStr]], ContextManager["_ScandirIterator[AnyStr]"] +): + def __iter__(self) -> Self: ... + + def __next__(self) -> DirEntry[AnyStr]: ... + + def close(self) -> None: ... + + def __enter__(self) -> Self: ... + + def __exit__( + self, + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: ... + + +def scandir(path: AnyStr) -> _ScandirIterator[AnyStr]: ... + + +def thing(value: AnyStr): + with scandir(value) as it: + for file in it: + if isinstance(file.name, str): + if file.name.endswith(".xml"): + ... + elif isinstance(file.name, bytes): + if file.name.endswith(b".xml"): + ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/classes9.py b/python-parser/packages/pyright-internal/src/tests/samples/classes9.py new file mode 100644 index 00000000..74795095 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/classes9.py @@ -0,0 +1,69 @@ +# This sample tests the reportIncompatibleVariableOverride +# configuration option in cases involving multiple inheritance +# where the override symbol is type compatible with the overridden. + + +# pyright: reportIncompatibleVariableOverride=true + + +from typing import NotRequired, Required, TypedDict + + +class A: + class M: + pass + + +class B0(A): + class M(A.M): + pass + + +class B1(A): + class M(A.M): + pass + + +class C(B0, B1): + class M(B0.M, B1.M): + pass + + +class D0(B0): + pass + + +class D1(B1): + pass + + +class D(D0, D1, C): + pass + + +class E0(B0): + pass + + +class E1(B1): + pass + + +# This should generate an error because B0.M is not +# type compatible with B1.M. +class E(E0, E1): + pass + + +class TD_A1(TypedDict): + x: Required[int] + y: Required[int] + + +class TD_A2(TypedDict): + x: NotRequired[int] + y: Required[int] + + +# This should generate an error for x but not y. +class TD_A(TD_A1, TD_A2): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow1.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow1.py new file mode 100644 index 00000000..5b4d79a9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow1.py @@ -0,0 +1,65 @@ +# This sample tests the type analyzer's ability to determine +# execution paths. + + +def func1(length: int) -> int: + n = 0 + while True: + if n >= length: + return n + n += 3 + else: + # This should not be flagged as an error + # because we should never get here. + return "hello" + + # This should not be flagged as an error + # because we should never get here. + return "not_returned" + + +def func2() -> int: + while None: + # This should not be flagged as an error + # because we should never get here. + return "hello" + else: + # This should be an error because the return + # type doesn't match. + return "hello" + + # This should not be an error because we + # should never get here. + return "not_returned" + + +def func3() -> str: + if True: + return "hello" + else: + # This should not be flagged as an error + # because we should never get here. + return 21 + + raise BaseException() + + # This should not be flagged as an error + # because we should never get here. + return 52 + + +def func4(length: int) -> int: + n = 0 + while True: + if n >= length: + return n + n += 3 + break + else: + return "hello" + + # This should be flagged as an error because + # the break in the while True loop means that + # we might get here, and the return type does + # not match. + return "not_returned" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow2.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow2.py new file mode 100644 index 00000000..e88fa224 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow2.py @@ -0,0 +1,26 @@ +# This sample tests the case where a symbol that exists in +# an outer scope is redeclared within a class but is referenced +# within the class before it is declared. + +import stat + + +class FakeOsModule(object): + # The symbol "stat" is a module even though + # it is redeclared below in this scope as + # a method. + _stat_mode: int = stat.S_IFDIR + + def stat(self): + return None + + +def outer(): + a = 1 + + def inner(): + # This should generate an error + a += 1 + + inner() + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow3.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow3.py new file mode 100644 index 00000000..471ce1a2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow3.py @@ -0,0 +1,8 @@ +# This sample tests the handling of a compound conditional statement +# where the first portion is statically determined to be false. + + +def func1(): + val = "" + if False and val: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow4.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow4.py new file mode 100644 index 00000000..aca416ff --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow4.py @@ -0,0 +1,142 @@ +# This sample tests the handling of if/elif chains that omit an else +# statement. The "implied else" statement should be assumed never taken if the +# final if/elif test expression evaluates to Never in the negative case. + +from enum import Enum +from typing import Literal + + +def func1(x: int | str): + if isinstance(x, int): + y = 0 + elif isinstance(x, str): + y = 1 + + print(y) + + +def func2(x: Literal[1, 2, 3, 4]): + if x == 1 or x == 2: + y = 0 + elif x == 3 or not x == 3: + y = 1 + + print(y) + + +def func3(x: Literal[1, 2], y: Literal["one", "two"]): + if x == 1 or y != "two": + z = 0 + elif x == 2 or y != "one": + z = 1 + + print(z) + + +class Color(Enum): + RED = 1 + BLUE = 2 + GREEN = 3 + PERIWINKLE = 4 + + +def func4(x: Color): + if x == Color.RED: + return + + if x == Color.GREEN or (x == Color.PERIWINKLE and True): + y = 2 + else: + if x == Color.BLUE: + y = 3 + + print(y) + + +def func5(): + if True: + y = 2 + + print(y) + + +def func6(): + if not None: + y = 2 + + print(y) + + +def func7(color: Color) -> str: + if color == Color.RED or color == Color.BLUE: + return "yes" + elif color == Color.GREEN or color == Color.PERIWINKLE: + return "no" + + +def func8(color: Color) -> bool: + if color == Color.RED or color == Color.BLUE: + return True + elif color == Color.GREEN or color == Color.PERIWINKLE: + return False + + +reveal_type(func8(Color.RED), expected_text="bool") + + +def func9(a: str | int, b: str | int) -> bool: + if isinstance(a, str): + return True + elif isinstance(a, int): + if isinstance(b, str): + return False + elif isinstance(b, int): + return False + + +def func10(foo: list[str]) -> bool: + i = 0 + x: int | None = None + + while i < 5: + foo[i] + + if x is None: + return False + reveal_type(x, expected_text="Never") + i = x + + return True + + +class A: + pass + + +class B(A): + pass + + +def func11(val: A | B): + if not (isinstance(val, A) or isinstance(val, B)): + raise Exception + + +reveal_type(func11(A()), expected_text="None") + + +def func12(val: A | B): + if isinstance(val, A) or isinstance(val, B): + raise Exception + + +reveal_type(func12(A()), expected_text="NoReturn") + + +def func13(val: int | float): + err_msg = "error!" + if isinstance(val, int): + return 1 + elif isinstance(val, float): + return 1.0 + raise ValueError(err_msg) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow5.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow5.py new file mode 100644 index 00000000..53e6be30 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow5.py @@ -0,0 +1,10 @@ +# This sample verifies that a builtin symbol that is used +# prior to being redefined in the same file isn't flagged +# as an error. + +int_ = int +int = 3 + + +max_ = max +max = lambda a, b: a if a > b else b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow6.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow6.py new file mode 100644 index 00000000..0e229af7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow6.py @@ -0,0 +1,29 @@ +# This sample tests an interaction between a lambda, a function +# that is declared later in the source, and a function return type +# that uses code flow analysis for evaluation. + +from typing import Any, Callable, overload + + +class C: ... + + +@overload +def func1(v: Callable[[], int]) -> int: ... + + +@overload +def func1(v: Callable[[], list[C]]) -> list[C]: ... + + +def func1(v: Any) -> Any: ... + + +def func2(v: list[C]): ... + + +t = func1(lambda: func3()) +t.append(C()) + + +def func3() -> list[C]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow7.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow7.py new file mode 100644 index 00000000..0e229af7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow7.py @@ -0,0 +1,29 @@ +# This sample tests an interaction between a lambda, a function +# that is declared later in the source, and a function return type +# that uses code flow analysis for evaluation. + +from typing import Any, Callable, overload + + +class C: ... + + +@overload +def func1(v: Callable[[], int]) -> int: ... + + +@overload +def func1(v: Callable[[], list[C]]) -> list[C]: ... + + +def func1(v: Any) -> Any: ... + + +def func2(v: list[C]): ... + + +t = func1(lambda: func3()) +t.append(C()) + + +def func3() -> list[C]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow8.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow8.py new file mode 100644 index 00000000..13c51aa0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow8.py @@ -0,0 +1,27 @@ +# This sample tests the case where an assignment expression +# is used within a looping construct such that the assigned +# value is initially unknown. + +# pyright: strict + +from typing import Iterator + +for _ in ["1"]: + old_lines: Iterator[str] = iter(["2", "3"]) + + try: + while True: + line = next(old_lines) + count = 1 + if count: + while True: + if not (line := next(old_lines)): + pass + elif line.startswith(""): + print(line.removeprefix("")) + else: + old_lines = iter([line] + list(old_lines)) + break + + except StopIteration: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/codeFlow9.py b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow9.py new file mode 100644 index 00000000..9ba49fae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/codeFlow9.py @@ -0,0 +1,19 @@ +# This sample tests the case where a comprehension-scoped variable +# shadows a variable of the same name in an outer scope and is +# narrowed within the comprehension. + +def func1(m: list[str | int]) -> None: + print( + [ + reveal_type(value, expected_text="str") + for value in m + if isinstance(value, str) + ] + ) + + reveal_type(value, expected_text="() -> None") + value() + + +def value() -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comparison1.py b/python-parser/packages/pyright-internal/src/tests/samples/comparison1.py new file mode 100644 index 00000000..20bc755d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comparison1.py @@ -0,0 +1,77 @@ +# This sample tests the check for non-overlapping types compared +# with equals comparison. + +from typing import Literal, TypeVar + + +OS = Literal["Linux", "Darwin", "Windows"] + + +def func1(os: OS, val: Literal[1, "linux"]): + if os == "Linux": + return True + + # This should generate an error because this expression will always + # evaluate to False. + if os == "darwin": + return False + + # This should generate an error because this expression will always + # evaluate to True. + if os != val: + return False + + # This should generate an error because this expression will always + # evaluate to False. + if val == 2: + return False + + if val == 1: + return True + + +class ClassA: ... + + +class ClassB: ... + + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2", bound=ClassB) + + +def func2(a: ClassA, b: ClassB, c: _T1, d: _T2, e: ClassA | ClassB) -> None | _T1 | _T2: + # This should generate an error because there is no overlap in types. + if a == b: + return + + # This should generate an error because there is no overlap in types. + if a != b: + return + + if a != c: + return + + # This should generate an error because there is no overlap in types. + if a != d: + return + + if a == e: + return + + if b == e: + return + + +def func3(base: type) -> None: + if base == ClassA: + ... + + if ClassA == base: + ... + + +def func4(val: str | None): + # This should generate an error because there is no overlap in types. + if val == 42: + ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comparison2.py b/python-parser/packages/pyright-internal/src/tests/samples/comparison2.py new file mode 100644 index 00000000..638eb91d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comparison2.py @@ -0,0 +1,158 @@ +# This sample tests the reportUnnecessaryComparison diagnostic check +# when applied to functions that appear within a conditional expression. + + +from enum import Enum +from typing import Any, Callable, Coroutine, Protocol +from dataclasses import dataclass + + +def cond() -> bool: ... + + +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +if cond: + pass + +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +if 0 or cond: + pass + +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +if 1 and cond: + pass + +if cond(): + pass +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +elif cond: + pass + + +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +def func1(): + while cond: + pass + + +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +a = [x for x in range(20) if cond] + +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +a = 1 if cond else 2 + +b = "1" == "1" == "1" + +c = "" +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +if c is None: + pass + +# This should generate a diagnostic when reportUnnecessaryComparison is enabled. +if c is not None: + pass + + +def func2(d: str | Any): + if d is None: + pass + + +@dataclass +class DC1: + bar: str + + +def func3(x: DC1): + # This should generate an error if reportUnnecessaryComparison is enabled. + if x == 42: + ... + + +async def func4() -> bool: + return True + + +async def func5() -> None: + # This should generate an error if reportUnnecessaryComparison is enabled. + if func4(): + pass + + +def func6() -> Coroutine[Any, Any, int] | None: ... + + +def func7(): + coro = func6() + if coro: + pass + + +class A: ... + + +def func8(x: A): + # This should generate an error if reportUnnecessaryComparison is enabled. + if x is True: + pass + + # This should generate an error if reportUnnecessaryComparison is enabled. + if x is False: + pass + + # This should generate an error if reportUnnecessaryComparison is enabled. + if x is not True: + pass + + # This should generate an error if reportUnnecessaryComparison is enabled. + if x is not False: + pass + + +def func9(x: object, y: type[object]): + if x is y: + pass + + +def func10(x: object, y: type[object]): + if x is not y: + pass + + +class SupportsBool(Protocol): + def __bool__(self) -> Any: ... + + +def func11(a: A, b: SupportsBool, c: object): + # This should generate an error if reportUnnecessaryComparison is enabled. + if a is None: + pass + + # This should generate an error if reportUnnecessaryComparison is enabled. + if a is not None: + pass + + if b is None: + pass + + if c is None: + pass + + +def func12(a: object, b: Callable[..., int]) -> bool: + return a is b + + +class IntVal(int, Enum): + one = 1 + two = 2 + three = 3 + + +def func13(x: IntVal): + if x == 1: + pass + + # This should generate an error if reportUnnecessaryComparison is enabled. + if x == 4: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/complex1.py b/python-parser/packages/pyright-internal/src/tests/samples/complex1.py new file mode 100644 index 00000000..91d8762b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/complex1.py @@ -0,0 +1,16 @@ +# This sample tests the type checker's handling of imaginary +# and complex numbers. + +a = 3.2j + +b = a + 4 + +c = 1.2 * a + + +def requires_complex(val: complex): ... + + +requires_complex(a) +requires_complex(b) +requires_complex(c) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension1.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension1.py new file mode 100644 index 00000000..a2b6601c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension1.py @@ -0,0 +1,65 @@ +# This sample tests type checking for list comprehensions. + +from typing import Any, Generator, Iterable, Literal + +a = [1, 2, 3, 4] + + +def func1() -> Generator[int, None, None]: + b = (elem for elem in a) + return b + + +def func2() -> list[int]: + c = [elem for elem in a] + return c + + +def func3() -> list[str]: + c = [elem for elem in a] + + # This should generate an error because + # c is a List[int], which doesn't match + # the declared return type. + return c + +def func4(): + c = [(i for i in [1, 2, 3])] + reveal_type(c, expected_text="list[Generator[int, None, None]]") + + +def generate(): + for i in range(2): + yield i + + +# Verify that generate returns a Generator. +s = generate() +s.close() + +# Verify that literals are handled correctly. +FooOrBar = Literal["foo", "bar"] + + +def to_list(values: Iterable[FooOrBar]) -> list[FooOrBar]: + a = [value for value in values] + reveal_type(a, expected_text="list[str]") + + b: list[FooOrBar] = [value for value in values] + + c = list(value for value in values) + reveal_type(c, expected_text="list[str]") + + d: list[FooOrBar] = list(value for value in values) + + e = (value for value in values) + reveal_type(e, expected_text="Generator[str, None, None]") + + f: Generator[FooOrBar, Any, Any] = (value for value in values) + + return [value for value in values] + +x = 3 +# This should generate a syntax error. +[x for in range(3)] + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension10.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension10.py new file mode 100644 index 00000000..dc0c69ec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension10.py @@ -0,0 +1,34 @@ +# This sample tests type checking for set comprehensions. + +from typing import Generator + +a = [1, 2, 3, 4] + + +def func1() -> Generator[int, None, None]: + b = (elem for elem in a) + return b + + +def func2() -> set[int]: + c = {elem for elem in a} + return c + + +def func3() -> set[str]: + c = {elem for elem in a} + + # This should generate an error because + # c is a Set[int], which doesn't match + # the declared return type. + return c + + +def generate(): + for i in range(2): + yield i + + +# Verify that generate returns a Generator. +s = generate() +s.close() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension11.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension11.py new file mode 100644 index 00000000..34717f07 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension11.py @@ -0,0 +1,18 @@ +# This sample tests the case where bidirectional type inference is used +# in a nested manner with list comprehensions. + +# pyright: strict + +from itertools import chain + +times = [ + (hour, minute, meridian) + for hour, minute, meridian in chain.from_iterable( + chain.from_iterable( + ((hour, minute, meridian) for minute in range(0, 60, 15)) + for hour in range(12) + ) + for meridian in ("am", "pm") + ) +] +reveal_type(times, expected_text="list[tuple[int, int, str]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension2.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension2.py new file mode 100644 index 00000000..c6db64f9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension2.py @@ -0,0 +1,17 @@ +# This sample tests interleaved for and if clauses in a list comprehension. + +# pyright: strict, reportUnnecessaryComparison=false + +from typing import Union, List, Tuple + +m1: List[Union[Tuple[int, int], None]] = [] + +a = [ + y + z + x[0] + for x in m1 + if x is not None + for y in x + if y is not None + for z in [1, None, 3] + if z is not None +] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension3.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension3.py new file mode 100644 index 00000000..c265343c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension3.py @@ -0,0 +1,12 @@ +# This sample tests type inference for list comprehensions, +# including list target expressions in for statements. + + +def foo() -> list[str]: + pairs = [s.split(":") if ":" in s else [s, "null"] for s in ["foo:bar", "baz"]] + foo = [p[0] for p in pairs] + if foo: + return foo + + bar = [a for [a, b] in pairs] + return bar diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension4.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension4.py new file mode 100644 index 00000000..013b9bb6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension4.py @@ -0,0 +1,15 @@ +# This sample tests scoping rules for variables that are declared within +# a list comprehension statement. + +a: str = "hello" + +# The statement "len(a)" should not generate an +# error because "a" is not yet bound to the local +# variable at the time it is executed. Instead, it +# has the str type of the "a" in the outer scope. +b = [a for a in [len(a)]] + +# This assignment should succeed because "a" at this +# point should have the type of "a" in the outer scope, +# not the int type from the list comprehension. +c: str = a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension5.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension5.py new file mode 100644 index 00000000..295002ad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension5.py @@ -0,0 +1,20 @@ +# This sample tests the type inference of list comprehensions +# that result in AsyncGenerator types. + +import asyncio + + +async def do_iter(): + for i in range(10): + yield i + await asyncio.sleep(0.1) + + +async def doit(): + as_list = (i + 1 async for i in do_iter()) + + async for i in as_list: + print(i) + + +asyncio.run(doit()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension6.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension6.py new file mode 100644 index 00000000..78fad2b9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension6.py @@ -0,0 +1,14 @@ +# This sample tests parsing of list comprehensions with +# various syntax errors. + +# This should generate an error. +(*i for i in []) + +# This should generate an error. +[*i for i in []] + +# This should generate an error. +{*d for d in []} + +# This should generate an error. +{**d for d in []} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension7.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension7.py new file mode 100644 index 00000000..fe0543ac --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension7.py @@ -0,0 +1,15 @@ +# This sample validates that list comprehensions within a class +# body do not reference class-scoped variables within the +# comprehension unless they are within the initial iterator expression. + +outer_var = [1, 2] + + +class A: + var1 = [1, 2] + var2 = {x for x in var1} + + # This should generate an error. + var3 = {var1[0] for x in var1} + + var4 = {outer_var[0] for x in outer_var} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension8.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension8.py new file mode 100644 index 00000000..ce71dd1a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension8.py @@ -0,0 +1,15 @@ +# This sample tests the evaluation of a list comprehension where +# there are interdependencies between some of the variables. + +# pyright: strict + + +class ClassA: + input: str + output: str + + +def func1(a: ClassA, x: str): + a.output = x.join( + stripped for line in a.input.splitlines() if (stripped := line.strip()) + ) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/comprehension9.py b/python-parser/packages/pyright-internal/src/tests/samples/comprehension9.py new file mode 100644 index 00000000..e75fe8ae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/comprehension9.py @@ -0,0 +1,22 @@ +# This sample tests the case where a comprehension requires bidirectional +# type inference for correct analysis. + +# pyright: strict + +from typing import TypedDict + + +class X(TypedDict): + x: str + + +xs: list[X] = [] +xs.extend({"x": c} for c in "abc") + + +def func1(data: dict[str, int]): + sum(data.get(k, 0) for k in "") + + +def func2(data: dict[str, int]): + sum([data.get(k, 0) for k in ""]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/conditional1.py b/python-parser/packages/pyright-internal/src/tests/samples/conditional1.py new file mode 100644 index 00000000..49d17695 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/conditional1.py @@ -0,0 +1,93 @@ +# This sample tests that the check that validates the operand type for +# a conditional statement. The operand must be of type bool or a type +# that has a __bool__ method that returns a bool. + +from typing import NoReturn, TypeVar + + +class ReturnsBool: + def __bool__(self) -> bool: + return True + + +class ReturnsNonBool: + def __bool__(self) -> NoReturn: + raise TypeError("Not a bool") + + +def func1(val: ReturnsNonBool): + # This should generate an error. + if val: + pass + + # This should generate an error. + a = not val + + b = val or 1 + # This should generate an error. + if b: + pass + + c = 1 and val + # This should generate an error. + if c: + pass + + # This should generate an error. + y = 1 if val else 2 + + # This should generate an error. + while val: + break + + # This should generate an error. + z = [1 for i in range(10) if val] + + +TVal = TypeVar("TVal", bound=ReturnsNonBool) + + +def func2(val: TVal | ReturnsBool) -> TVal | ReturnsBool: + # This should generate an error. + if val: + pass + + # This should generate an error. + a = not val + + b = val or 1 + # This should generate an error. + if b: + pass + + c = 1 and val + # This should generate an error. + if c: + pass + + # This should generate an error. + y = 1 if val else 2 + + # This should generate an error. + while val: + break + + # This should generate an error. + z = [1 for i in range(10) if val] + + return val + + +class Meta(type): + def __bool__(self) -> int: + return 1 + + +class MetaDerived(metaclass=Meta): + pass + + +def func3(val: type[MetaDerived]): + # This should generate an error. + if val: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constant1.py b/python-parser/packages/pyright-internal/src/tests/samples/constant1.py new file mode 100644 index 00000000..2b74484f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constant1.py @@ -0,0 +1,40 @@ +# This sample tests the "reportConstantRedefinition" feature. + +ALL_CAPS_123_ = 234 +# This should generate an error if the feature is enabled. +ALL_CAPS_123_ = 233 + +_ = 234 +# This should not be considered a constant +_ = 234 + + +a = True + + +def foo(): + LOCALVAR = 3 + + if a: + # This should generate an error if the feature is enabled. + LOCALVAR = 23 + + +from typing import TYPE_CHECKING + +# This should generate an error if the feature is enabled. +TYPE_CHECKING = True + + +class Foo(object): + CONST_VAR = 3 + + # This should generate an error if the feature is enabled. + CONST_VAR = 4 + + def __init__(self): + self.HELLO = "3" + + def foo(self): + # This should generate an error if the feature is enabled. + self.HELLO = "324" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constants1.py b/python-parser/packages/pyright-internal/src/tests/samples/constants1.py new file mode 100644 index 00000000..440b1e5e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constants1.py @@ -0,0 +1,64 @@ +# This sample tests that the type checker flags certain values +# that cannot be deleted or assigned to. + +# This should generate an error +True = 3 + +# This should generate an error +False = 4 + +# This should generate an error +None = True + +# This should generate an error +__debug__ = 4 + +# This should generate an error +del True + +# This should generate an error +del None + +# This should generate an error +-3 = 2 + +# This should generate an error +[4] = [2] + +# This should generate an error +[True] = [3] + +# This should generate an error +(True) = 3 + +# This should generate an error +del -3 + +# This should generate an error +3 + 4 = 2 + +# This should generate an error +del 3 + 4 + +# This should generate an error +del -(4) + +# This should generate an error +del __debug__ + +# This should generate an error +del {} + +# This should generate an error +... = 3 + +# This should generate an error +del ... + +# This should generate an error +(...) = 3 + +# This should generate an error +del ... + + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar1.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar1.py new file mode 100644 index 00000000..bd21a03b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar1.py @@ -0,0 +1,46 @@ +# This sample tests constraint solving constrained TypeVars. + +from typing import Generic, TypeVar + +S = TypeVar("S", str, bytes) + + +def constrained(first: S, second: S) -> S: + return first + + +# This should generate an error because the two arguments +# cannot satisfy the 'str' or 'bytes' constraint. +result = constrained("a", b"abc") + +U = TypeVar("U", float, str) + + +class ClassA(Generic[U]): + def generic_func1(self, a: U, b: str = "", **kwargs: U) -> U: + return a + + +a1 = ClassA[str]() +r1 = a1.generic_func1("hi") +reveal_type(r1, expected_text="str") +r2 = a1.generic_func1("hi", test="hi") +reveal_type(r2, expected_text="str") + +# This should generate an error. +r3 = a1.generic_func1("hi", test=3) +reveal_type(r3, expected_text="str") + +# This should generate an error. +r4 = a1.generic_func1("hi", 3) +reveal_type(r4, expected_text="str") + +a2: ClassA[int] + +# This should generate an error. +a3: ClassA[Never] + +ClassAAlias = ClassA[U] + +# This should generate an error. +a4: ClassAAlias[Never] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar10.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar10.py new file mode 100644 index 00000000..9b069c20 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar10.py @@ -0,0 +1,30 @@ +# This sample tests the handling of constrained type variables +# that include unions. + +from typing import TypeVar + +T1 = TypeVar("T1", int, str) +T2 = TypeVar("T2", int, str, int | str) +T3 = TypeVar("T3", int, str, int | str | list[int]) + + +def func1(x: T1) -> T1: + return x + + +def func2(x: T2) -> T2: + return x + + +def func3(x: T3) -> T3: + return x + + +def func4(y: int | str): + # This should generate an error because T1 doesn't + # include a union constraint. + func1(y) + + func2(y) + + func3(y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar11.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar11.py new file mode 100644 index 00000000..b8571757 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar11.py @@ -0,0 +1,29 @@ +# This sample tests for proper handling of constrained or bound TypeVars. + +from typing import Generic, TypeVar + + +class IntSubclass1(int): + pass + + +_T1 = TypeVar("_T1", int, IntSubclass1) + + +def add1(value: _T1) -> _T1: + reveal_type(value + 1, expected_text="int*") + + # This should generate an error + return value + 5 + + +class IntSubclass2(int): + def __add__(self, value: object) -> "IntSubclass2": ... + + +_T2 = TypeVar("_T2", int, IntSubclass2) + + +def add2(value: _T2) -> _T2: + reveal_type(value + 1, expected_text="int* | IntSubclass2*") + return value + 5 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar12.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar12.py new file mode 100644 index 00000000..63653972 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar12.py @@ -0,0 +1,13 @@ +# This sample tests the case where a constrained TypeVar is assigned +# to another constrained TypeVar or a union that contains a constrained +# TypeVar. + +from os import PathLike +from typing import AnyStr + + +def func1(path: AnyStr | PathLike[AnyStr]) -> AnyStr: ... + + +def func2(value: AnyStr) -> AnyStr: + return func1(value) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar13.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar13.py new file mode 100644 index 00000000..e7362b9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar13.py @@ -0,0 +1,126 @@ +# This sample tests the "constrained TypeVar narrowing for return types" +# feature. When a declared return type of a function contains a constrained +# TypeVar and the return statement is found on a path that tests a variable +# that is typed as that TypeVar, we know that the code path is taken only +# in the case where constraint is satisfied. + +from typing import AnyStr, Generic, ParamSpec, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +_T1 = TypeVar("_T1", str, int) +_T2 = TypeVar("_T2") + + +class A: ... + + +class B: ... + + +class C: ... + + +_T3 = TypeVar("_T3", A, B, C) + +_P = ParamSpec("_P") +_Ts = TypeVarTuple("_Ts") + + +def func1(val1: _T1) -> _T1: + if isinstance(val1, str): + return "" + return 0 + + +def func2(val1: _T1) -> list[_T1]: + if isinstance(val1, str): + return [""] + return [0] + + +class Class1(Generic[_T1, _T2, _T3, _P, Unpack[_Ts]]): + def meth1( + self, val1: _T1, val2: _T2, val3: _T3, cond: bool + ) -> list[_T1] | list[_T2] | list[_T3]: + if cond: + # This should generate an error. + return [0] + + if cond or 3 > 2: + if isinstance(val1, str): + # This should generate an error. + return [0] + else: + return [0] + + if cond or 3 > 2: + if isinstance(val3, B): + return [B()] + else: + # This should generate an error. + return [C()] + + if cond or 3 > 2: + if not isinstance(val3, B) and not isinstance(val3, C): + return [A()] + + return [val1] + + def meth2(self, val1: _T1) -> _T1: + val2 = val1 + + while True: + if isinstance(val2, str): + return "hi" + + val2 = val2 = val1 + + if isinstance(val2, int): + return 0 + + def meth3(self, val1: _T1, val2: _T3) -> _T1: + if isinstance(val2, A): + # This should generate an error. + return 1 + + if isinstance(val2, B): + if isinstance(val1, str): + return "" + + if isinstance(val1, int): + if isinstance(val2, B): + # This should generate an error. + return "" + + raise BaseException() + + +def func3(s: AnyStr, y: AnyStr | None = None) -> AnyStr: + if isinstance(s, str): + if y is None: + pass + return "" + else: + raise NotImplementedError + + +def func4(t: _T3) -> _T3: + match t: + case A(): + return A() + case B(): + return B() + case C(): + return C() + + +def func5(t: _T3) -> _T3: + if isinstance(t, A): + return A() + elif isinstance(t, B): + return B() + elif isinstance(t, C): + return C() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar14.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar14.py new file mode 100644 index 00000000..53c74bc5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar14.py @@ -0,0 +1,29 @@ +# This sample verifies that a generic class parameterized with a +# constrained TypeVar properly translates an explicit type argument +# into the correct constrained type. + +from typing import TypeVar, Generic + + +class A: ... + + +class B: ... + + +class A2(A): ... + + +T = TypeVar("T", A, B) + + +class F(Generic[T]): + def __init__(self, thing: T) -> None: + self.thing = thing + + +f2 = F[A2](A2()) + +reveal_type(F[A2], expected_text="type[F[A]]") +reveal_type(f2, expected_text="F[A]") +reveal_type(f2.thing, expected_text="A") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar15.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar15.py new file mode 100644 index 00000000..be2691a1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar15.py @@ -0,0 +1,31 @@ +# This sample tests the case where a constrained TypeVar is used +# as an argument for a constructor or function call. + +from typing import AnyStr, Iterable, TypeVar, Generic +from dataclasses import dataclass + + +class ClassA: ... + + +_T = TypeVar("_T", int | float, ClassA) + + +@dataclass +class Data(Generic[_T]): + data: _T + + +def func1(a: Data[_T]) -> _T: + if isinstance(a.data, (int, float)): + value = int(a.data / 3) + reveal_type(value, expected_text="int*") + else: + value = a.data + reveal_type(value, expected_text="ClassA*") + + return value + + +def func2(val: AnyStr, objs: Iterable[AnyStr]) -> AnyStr: + return val.join(objs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar16.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar16.py new file mode 100644 index 00000000..b4913a07 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar16.py @@ -0,0 +1,29 @@ +# This sample tests the case where a TypeVar requires multiple passes +# to resolve and the first pass it is assigned to a max widened +# type that is a constrained TypeVar. + +# The code below is a simplified version of this code: +# import os +# result = filter(os.path.exists, ["hello", "world"]) + +from typing import Any, Callable, Iterable, Iterator, TypeVar +from functools import reduce + +_T1 = TypeVar("_T1") +AnyStr = TypeVar("AnyStr", str, bytes) + + +def exists2(path: AnyStr) -> bool: ... + + +def filter2(f: Callable[[_T1], Any], i: Iterable[_T1]) -> Iterator[_T1]: ... + + +result = filter2(exists2, ["hello", "world"]) + + +_T2 = TypeVar("_T2", set, frozenset) + + +def merge_sets(sets: Iterable[_T2]) -> _T2: + return reduce(lambda x, y: x.union(y), sets) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar17.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar17.py new file mode 100644 index 00000000..002f583b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar17.py @@ -0,0 +1,48 @@ +# This sample tests the case where a constrained TypeVar +# is used in a protocol with a contravariant TypeVar. + +# This example is a stand-alone sample based on the following code. +# with open('read', 'rb') as fr, open('write', 'wb') as fw: +# shutil.copyfileobj(fr, fw) + +from typing import Any, AnyStr, Optional, Protocol, TypeVar, Union + + +class Array: ... + + +class MMap: ... + + +# Note that this union contains types that are not compatible +# with the type "bytes". +ReadableBuffer = Union[bytes, bytearray, memoryview, Array, MMap] + +_T_contra = TypeVar("_T_contra", contravariant=True) +_T_co = TypeVar("_T_co", covariant=True) + + +class BufferedWriter: + def write(self, __buffer: ReadableBuffer) -> int: ... + + +class SupportsWrite(Protocol[_T_contra]): + def write(self, __s: _T_contra) -> Any: ... + + +class SupportsRead(Protocol[_T_co]): + def read(self, __length: int = ...) -> _T_co: ... + + +class BufferedReader: + def read(self, __size: Optional[int] = ...) -> bytes: ... + + +def copyfileobj( + fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = ... +) -> AnyStr: ... + + +def f(fr: BufferedReader, fw: BufferedWriter): + x = copyfileobj(fr, fw) + reveal_type(x, expected_text="bytes") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar18.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar18.py new file mode 100644 index 00000000..457df901 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar18.py @@ -0,0 +1,36 @@ +# This sample tests the case where an inferred method return type is +# a union with subtypes that are conditioned on different constraints of +# a constrained TypeVar. When the method is bound, one or more of these +# subtypes should be eliminated. + +from typing import Generic, TypeVar, Awaitable + +T1 = TypeVar("T1") + + +class Async: + def fn(self, returnable: T1) -> Awaitable[T1]: ... + + +class Sync: + def fn(self, returnable: T1) -> T1: ... + + +T2 = TypeVar("T2", Async, Sync) + + +class A(Generic[T2]): + def __init__(self, client: T2): + self._client = client + + def method1(self): + return self._client.fn(7) + + +a1 = A(Async()) +r1 = a1.method1() +reveal_type(r1, expected_text="Awaitable[int]*") + +a2 = A(Sync()) +r2 = a2.method1() +reveal_type(r2, expected_text="int*") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar19.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar19.py new file mode 100644 index 00000000..fa9bc4aa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar19.py @@ -0,0 +1,18 @@ +# This tests the handling of a constrained TypeVar with literal types +# in the constraints. + +from typing import TypeVar, Literal, Generic + +T = TypeVar("T", Literal[True], Literal[False]) + + +class A(Generic[T]): + def __init__(self, null: T = False) -> None: + pass + + +A(null=bool()) # Type error + +reveal_type(A(null=False), expected_text="A[Literal[False]]") +reveal_type(A(), expected_text="A[Literal[False]]") +reveal_type(A(null=True), expected_text="A[Literal[True]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar2.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar2.py new file mode 100644 index 00000000..40c35e03 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar2.py @@ -0,0 +1,103 @@ +# This sample tests constraint solving for constrained type vars. + +import pathlib +import shutil +from typing import AnyStr, Type, TypeVar, Union + + +class Foo: + pass + + +class Bar(Foo): + pass + + +T1 = TypeVar("T1", Foo, str) +T2 = TypeVar("T2", bound=Foo) + + +def test1(x: T1) -> T1: + return x + + +def test2(x: T2) -> T2: + return x + + +# This should generate an error because test1(Bar()) +# should evaluate to type Foo, not Bar. +aa1: Bar = test1(Bar()) + +aa2: Foo = test1(Bar()) + +bb1: Bar = test2(Bar()) + +bb2: Foo = test2(Bar()) + + +# The call to rmtree should not generate any errors. +data_dir = pathlib.Path("/tmp") +archive_path = data_dir / "hello" +shutil.rmtree(archive_path) + + +def func1(a: AnyStr, b: AnyStr) -> None: ... + + +def func2(a: Union[str, bytes], b: Union[str, bytes]): + # This should generate two errors + func1(a, b) + + +class A: ... + + +class B: ... + + +class C: ... + + +class D: ... + + +T3 = TypeVar("T3", A, B, Union[C, D]) + + +def do_something(value: T3) -> T3: ... + + +def func10(value: Union[C, D]): + value1 = do_something(value) + + +def func11(value: D): + value1 = do_something(value) + + +def func12(value: Union[A, B]): + # This should generate an error because A and B + # map to different constraints. + value1 = do_something(value) + + +def func13(value: Union[A, D]): + # This should generate an error because A and D + # map to different constraints. + value1 = do_something(value) + + +T4 = TypeVar("T4", A, B, Union[C, D]) + + +def func14(cls: Type[T4]) -> T4: + instance1 = cls() + reveal_type(instance1, expected_text="T4@func14") # Unknown + return instance1 + + +def func15(cls: Union[Type[Union[A, B]], Type[Union[C, D]]]) -> Union[A, B, C, D]: + instance2 = cls() + reveal_type(instance2, expected_text="A | B | C | D") + return instance2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar20.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar20.py new file mode 100644 index 00000000..c7e09635 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar20.py @@ -0,0 +1,25 @@ +# This sample tests the case involving nested calls that each use +# constrained TypeVars but one is a subset of the other. + +from os import PathLike +from typing import Any, AnyStr, Literal, LiteralString, TypeVar, overload + +AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) + + +def abspath(path: PathLike[AnyStr] | AnyStr) -> AnyStr: ... + + +@overload +def dirname(p: PathLike[AnyStr]) -> AnyStr: ... + + +@overload +def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... + + +def dirname(p: Any) -> Any: ... + + +def func1(refpath: Literal["-"]): + reveal_type(dirname(abspath(refpath)), expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar3.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar3.py new file mode 100644 index 00000000..1f05f125 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar3.py @@ -0,0 +1,23 @@ +# This sample tests the type checker's expansion of constrained +# TypeVars within a union type. + +from typing import Union, AnyStr, Sequence + + +def do_the_thing(param: Union[Sequence[AnyStr], AnyStr]) -> None: + if isinstance(param, str): + print(f"str: {param}") + return + + if isinstance(param, bytes): + print(f"bytes: {param!r}") + return + + print(f"list:") + for itm in param: + print(f" -> {itm}") + + +do_the_thing("a") +do_the_thing(b"b") +do_the_thing(["a", "b"]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar4.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar4.py new file mode 100644 index 00000000..05f2ecd2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar4.py @@ -0,0 +1,37 @@ +# This sample tests type narrowing of generic constrained types. + +from typing import AnyStr, Generic, Sequence, TypeVar + + +Command = AnyStr | Sequence[AnyStr] + + +def func1(cmd: Command) -> list[str]: + if isinstance(cmd, bytes): + return [str(cmd, "utf-8")] + if isinstance(cmd, str): + return [cmd] + + ret: list[str] = [] + for itm in cmd: + if isinstance(itm, str): + ret.append(itm) + else: + ret.append(str(itm, "utf-8")) + return ret + + +T = TypeVar("T", str, int, float, bool) + + +class Item(Generic[T]): + value: T | None + + def __init__(self, source: T | None) -> None: + self.value = source + + def read(self) -> T | None: + if self.value is None: + raise RuntimeError(f"Item is required!") + + return self.value diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar5.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar5.py new file mode 100644 index 00000000..ef1a3cc2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar5.py @@ -0,0 +1,54 @@ +# This sample tests the handling of generic type aliases +# with constrained types. + +from typing import Callable, Generic, TypeVar + +T = TypeVar("T", str, bool, None) + + +class MyData(Generic[T]): + def __init__(self, val: T): + self.val = val + + +Op = Callable[[MyData[T]], T] + + +def f_generic1(val: T, op: Op[T]) -> T: + obj = MyData[T](val) + return op(obj) + + +def f_generic2(val: T, op: Op[T]) -> T: + obj = MyData(val) + return op(obj) + + +def f_bool(val: bool) -> bool: + op: Op[bool] = lambda od: od.val + r = f_generic1(val, op) + return r + + +def f_generic3(val: T) -> T: + return val + + +def f_union(val: bool | str) -> None: + # This should generate an error because a + # union cannot be assigned to a constrained + # type variable. + f_generic3(val) + + if isinstance(val, bool): + f_generic3(val) + else: + f_generic3(val) + + +def func1(v: T, t: type[T]): + print(t) + + +def func2(v: T, t: type[T]): + func1(v, t) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar6.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar6.py new file mode 100644 index 00000000..1a0421ad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar6.py @@ -0,0 +1,35 @@ +# This sample tests the handling of constrained types +# when one of the constraints is a narrower version +# of another. The order of the constraints as they appear +# within the TypeVar definition shouldn't matter. + +from typing import TypeVar + +_T1 = TypeVar("_T1", float, str) + + +def add1(a: _T1, b: _T1) -> _T1: + return a + b + + +a1 = add1(3, 5.5) +reveal_type(a1, expected_text="float") +b1 = add1(3.3, 5) +reveal_type(b1, expected_text="float") +c1 = add1("3", "5") +reveal_type(c1, expected_text="str") + + +_T2 = TypeVar("_T2", float, int) + + +def add2(a: _T2, b: _T2) -> _T2: + return a + b + + +a2 = add2(3, 5.5) +reveal_type(a2, expected_text="float") +b2 = add2(3.3, 5) +reveal_type(b2, expected_text="float") +c2 = add2(3, 5) +reveal_type(c2, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar7.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar7.py new file mode 100644 index 00000000..3537785c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar7.py @@ -0,0 +1,63 @@ +# This sample tests the assignment of constrained TypeVars to a union +# that allows for all of the types in the constraint. + +from typing import Iterator, Optional, Sequence, TypeVar, Union + + +def func0(a: Union[int, float]): ... + + +_T1 = TypeVar("_T1", int, float) + + +def func1(a: _T1, b: _T1): + return func0(a) + + +_T2 = TypeVar("_T2", int, float, complex) + + +def func2(a: _T2, b: _T2): + # This should generate an error. + return func0(a) + + +_T3 = TypeVar("_T3", int, float) + + +def func3(xs: Sequence[Optional[_T3]]) -> Iterator[_T3]: + return (x for x in xs if x is not None) + + +def func4(xs: Sequence[Optional[_T3]]) -> Iterator[_T3]: + return func3(xs) + + +def func5(xs: Sequence[Optional[_T2]]) -> Iterator[_T2]: + # This should generate an error. + return func3(xs) + + +class A: ... + + +class B(A): ... + + +_T4 = TypeVar("_T4", A, B) +_T5 = TypeVar("_T5", B, A) + + +def func6(t: type[_T4]) -> type[_T4]: + return t + + +def func7(t: type[_T5]) -> type[_T5]: + return t + + +val6 = func6(B) +val7 = func7(B) + +reveal_type(val6, expected_text="type[B]") +reveal_type(val7, expected_text="type[B]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar8.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar8.py new file mode 100644 index 00000000..0eb0d43e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar8.py @@ -0,0 +1,32 @@ +# This sample tests the handling of a constrained TypeVar used with +# a Type[T] annotation. + +from typing import TypeVar, Any + + +class A: + def __init__(self, x: Any) -> None: + pass + + def f(self) -> None: + pass + + +T = TypeVar("T", str, int, A) + + +def factory(desired_type: type[T]) -> T: + return desired_type(1) + + +factory(str) +reveal_type(factory(str), expected_text="str") + +factory(int) +reveal_type(factory(int), expected_text="int") + +factory(A).f() +reveal_type(factory(A), expected_text="A") + +# This should generate an error +factory(float) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar9.py b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar9.py new file mode 100644 index 00000000..3f92ae7f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constrainedTypeVar9.py @@ -0,0 +1,15 @@ +# This sample tests the case where a constrained type variable +# includes a Literal[False] and Literal[True]. + +from typing import TypeVar, Generic, Literal + +XOrY = TypeVar("XOrY", Literal[True], Literal[False]) + + +class A(Generic[XOrY]): + pass + + +class B(Generic[XOrY]): + def __init__(self, a: A[XOrY]): + self.a = a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor1.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor1.py new file mode 100644 index 00000000..0fda3e90 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor1.py @@ -0,0 +1,41 @@ +# This sample tests the handling of a constructor for a generic +# class where the type arguments need to be inferred using +# bidirectional type inference and the expected type is a +# union of other types. + +from typing import Generic, TypeVar, Final + +T = TypeVar("T") +S = TypeVar("S") + + +class A(Generic[T]): + def __init__(self, value: T) -> None: + self._value: Final = value + + +class B(Generic[S]): + def __init__(self, value: S) -> None: + self._value: Final = value + + +Result = A[T] | B[S] + + +def return_ok_none() -> Result[int | None, Exception]: + return A(None) + + +def return_ok_one() -> Result[int | None, Exception]: + return A(1) + + +class C(Generic[T]): + pass + + +c1: C[bool] | None = C() +reveal_type(c1, expected_type="C[bool]") + +c2: A[int] | C[int] = C() +reveal_type(c2, expected_type="C[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor10.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor10.py new file mode 100644 index 00000000..e3df1abc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor10.py @@ -0,0 +1,29 @@ +# This sample tests the handling of a __new__ method that +# is part of a generic class but uses its own type parameters. + + +from typing import Generic, Iterable, Iterator, TypeVar + + +_T_co = TypeVar("_T_co", covariant=True) +_T = TypeVar("_T") + + +class A(Iterator[_T_co]): + def __new__(cls, __iterable: Iterable[_T]) -> "A[tuple[_T, _T]]": ... + + def __next__(self) -> _T_co: ... + + +def func1(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: + for (a, _), (b, c) in A(A(iterable)): + yield a, b, c + + +class B(Generic[_T_co]): + def __new__(cls, __iter1: Iterable[_T]) -> "B[_T]": ... + + +def func2(p1: list[dict]): + v1 = B(p1) + reveal_type(v1, expected_text="B[dict[Unknown, Unknown]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor11.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor11.py new file mode 100644 index 00000000..eb9a4060 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor11.py @@ -0,0 +1,32 @@ +# This sample tests the case where a subclass of Dict uses +# a dictionary literal as an argument to the constructor call. + +from collections import Counter, defaultdict +from typing import Callable, Generic, Mapping, Optional, TypeVar + +c1 = Counter({0, 1}) +reveal_type(c1, expected_text="Counter[int]") + +for i in range(256): + c1 = Counter({0: c1[1]}) + reveal_type(c1, expected_text="Counter[int]") + +reveal_type(c1, expected_text="Counter[int]") + + +K = TypeVar("K") +V = TypeVar("V") + +MyFuncType = Callable[[Callable[[K], V]], V] + + +class MyFunc(Generic[K, V]): + def __init__(self, g: MyFuncType[K, V]) -> None: + self.g = g + + +MyFuncMapping = Mapping[K, Optional[MyFunc[K, V]]] + +my_func_defaultdict: MyFuncMapping[str, int] = defaultdict( + lambda: None, {"x": MyFunc(lambda f: f("a"))} +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor12.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor12.py new file mode 100644 index 00000000..cab755cc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor12.py @@ -0,0 +1,20 @@ +# This sample tests the case where a specialized class is constructed +# from within the class implementation and uses a class TypeVar in +# the specialization. + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class ClassA(Generic[T]): + def return_from_variable(self) -> "ClassA[T]": + value = ClassA[T]() + reveal_type(value, expected_text="ClassA[T@ClassA]") + return value + + +x = ClassA[int]() +v1 = x.return_from_variable() + +reveal_type(v1, expected_text="ClassA[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor13.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor13.py new file mode 100644 index 00000000..85fbd8c1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor13.py @@ -0,0 +1,17 @@ +# This sample tests the case where a generic type Type[T] is +# instantiated. + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class Foo(Generic[T]): + def __init__(self) -> None: + val = self.method1() + reveal_type(val(), expected_text="T@Foo") + + # This should generate an error. + val(1) + + def method1(self) -> type[T]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor14.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor14.py new file mode 100644 index 00000000..84b3e910 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor14.py @@ -0,0 +1,25 @@ +# This sample tests the case where a constructor call is evaluated +# using bidirectional type inference in the case where the expected +# type is `Self`. + +# pyright: strict + +from __future__ import annotations + +from typing import Any, Generic, Protocol, TypeVar +from typing_extensions import Self # pyright: ignore[reportMissingModuleSource] + +T_contra = TypeVar("T_contra", contravariant=True) +ThingT = TypeVar("ThingT", bound="Thing[Any]") + + +class Callback(Protocol[T_contra]): + def __call__(self, message: T_contra, /) -> Any: ... + + +class Thing(Generic[T_contra]): + def __init__(self, callback: Callback[T_contra]) -> None: + self._callback: Callback[T_contra] = callback + + def copy(self) -> Self: + return type(self)(self._callback) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor15.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor15.py new file mode 100644 index 00000000..08ac7ed2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor15.py @@ -0,0 +1,40 @@ +# This sample tests the case where a constructor for a generic +# class is called with an inference context (i.e. using bidirectional +# type inference) and literals are used as type arguments. + +from typing import Any, Generic, Literal, Self, TypeVar + +_N = TypeVar("_N") +_M = TypeVar("_M") + + +class A(Generic[_M, _N]): + def __new__(cls, m: _M, n: _N) -> "A[_M, _N]": ... + + +a: A[Literal[3], Literal[4]] = A(3, 4) + + +class B(Generic[_M, _N]): + def __new__(cls, m: _M, n: _N) -> "B[_M, _N]": ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + + +b: B[Literal[3], Literal[4]] = B(3, 4) + + +class C(Generic[_M, _N]): + def __new__(cls, m: _M, n: _N) -> "C[_M, _N]": ... + + def __init__(self, m: _M, n: _N) -> None: ... + + +c: C[Literal[3], Literal[4]] = C(3, 4) + + +class D(Generic[_M, _N]): + def __new__(cls, m: _M, n: _N) -> Self: ... + + +d: D[Literal[3], Literal[4]] = D(3, 4) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor16.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor16.py new file mode 100644 index 00000000..32e24df3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor16.py @@ -0,0 +1,31 @@ +# This sample tests the case where a class defines a __new__ method +# that returns a class other than the owning class. + + +class A: + def __init__(self) -> None: + pass + + +class B(A): + def __new__(cls) -> A: + return A() + + def __init__(self, a: int) -> None: + pass + + +class C(B): + def __init__(self, a: int) -> None: + pass + + +B() + +# This should generate an error because B.__init__ is never called. +B(1) + +C() + +# This should generate an error because C.__init__ is never called. +C(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor17.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor17.py new file mode 100644 index 00000000..4970521c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor17.py @@ -0,0 +1,50 @@ +# This sample tests the case where a generic class constructor doesn't +# allow for the solving of a class-scoped type variable. In this case, +# the type argument should be Unknown. + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class A(Generic[T]): + def __new__(cls, *args, **kwargs): + return super().__new__(cls, *args, **kwargs) + + +class B(Generic[T]): + def __init__(self): ... + + +class C(Generic[T]): + def __new__(cls, *args, **kwargs): + return super().__new__(cls, *args, **kwargs) + + def __init__(self): ... + + +class D(Generic[T]): + def __new__(cls, *args, **kwargs): + return super().__new__(cls, *args, **kwargs) + + def __init__(self, a: T): ... + + +class E(Generic[T]): + pass + + +a = A(1) +reveal_type(a, expected_text="A[Unknown]") + +b = B() +reveal_type(b, expected_text="B[Unknown]") + +c = C() +reveal_type(c, expected_text="C[Unknown]") + +d = D(1) +reveal_type(d, expected_text="D[int]") + +e = E() +reveal_type(e, expected_text="E[Unknown]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor18.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor18.py new file mode 100644 index 00000000..d45c95f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor18.py @@ -0,0 +1,44 @@ +# This sample tests the case where a generic class contains a +# __new__ method that returns a Self type and an __init__ method +# that provides specialization for the Self type. When evaluating +# the __new__ method, we don't want to specialize the Self type +# too early. + +from dataclasses import dataclass +from typing import Generic, Self, TypeVar, overload + + +_ = isinstance(dict(a=0), dict) + + +class ClassA: ... + + +_T1 = TypeVar("_T1", bound=ClassA | str, covariant=True) +_T2 = TypeVar("_T2") + + +class ClassB(Generic[_T1]): + def __new__(cls, *args, **kwargs) -> Self: + return super().__new__(cls, *args, **kwargs) + + @overload + def __init__(self, arg: _T1) -> None: ... + + @overload + def __init__(self: "ClassB[str]", arg: int) -> None: ... + + def __init__(self, arg: int | ClassA | str) -> None: + pass + + +b1: ClassB[ClassA | str] = ClassB[str](32) + + +@dataclass +class ClassC(Generic[_T2]): + value: _T2 + + +c1: ClassC[int] | ClassC[str] = ClassC("hi") +c2: ClassC[int] | ClassC[str] = ClassC(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor19.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor19.py new file mode 100644 index 00000000..7a93b989 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor19.py @@ -0,0 +1,15 @@ +# This sample handles the case where a class doesn't define its own +# constructor and relies on the `object` class constructor, which accepts +# no parameters. + + +class A: + pass + + +a1 = A() + +# This should generate an error +a2 = A(1) + +a3 = A(*[], **{}) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor2.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor2.py new file mode 100644 index 00000000..b4a8bb09 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor2.py @@ -0,0 +1,184 @@ +# This sample tests bidirectional type inference when the RHS +# is a call to a constructor. + +from typing import ( + Any, + Generic, + Iterable, + Literal, + Mapping, + Protocol, + Self, + TypeVar, +) + + +_T1 = TypeVar("_T1") +_T1_contra = TypeVar("_T1_contra", contravariant=True) +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") + + +class Animal(Generic[_T1, _T2]): + pass + + +class Bear(Animal[_T3, int]): + def __init__(self, p1: _T3 | None = None): + pass + + +class Donkey(Animal[int, int], Generic[_T3]): + pass + + +class Flyer(Protocol[_T1_contra]): + def get_wingspan(self, p1: _T1_contra) -> float: + raise NotImplemented + + +class CaveDweller(Generic[_T1]): + pass + + +class Bat(Animal[int, int], CaveDweller[int]): + def get_wingspan(self, p1: int) -> float: + raise NotImplemented + + +def s1(): + b: Bear[str] = Bear() + a: Animal[str, int] = b + reveal_type(a, expected_text="Bear[str]") + + +def s2(): + a: Animal[str, int] = Bear() + reveal_type(a, expected_text="Bear[str]") + + +def s3(): + a: Animal[str, int] = Bear() + reveal_type(a, expected_text="Bear[str]") + + +def s4(): + a: Bear[Any] = Bear[int]() + reveal_type(a, expected_text="Bear[Any]") + + +def s5(): + a: Animal[Any, Any] = Bear[int]() + reveal_type(a, expected_text="Bear[int]") + + +def s6(): + a: Bat | Bear[str] = Bear() + reveal_type(a, expected_text="Bear[str]") + + +def s7(p: Bat | Bear[int]): + a: Animal[int, int] = p + reveal_type(a, expected_text="Bat | Bear[int]") + + +def s8(): + a: Animal[int, int] = Bear[int]() + reveal_type(a, expected_text="Bear[int]") + + +def s9(p: dict[str, str]): + a: dict[str, Any] = p + reveal_type(a, expected_text="dict[str, Any]") + + +def s10(p: list[str]): + a: Iterable[Any] = p + reveal_type(a, expected_text="list[str]") + b: Iterable[str] = [] + reveal_type(b, expected_text="list[str]") + c: Iterable[str] = list() + reveal_type(c, expected_text="list[str]") + + +def s11(): + a: Animal[Any, Any] = Donkey[int]() + reveal_type(a, expected_text="Donkey[int]") + + +def s12(p: Bear[_T1], b: _T1): + a: Animal[Any, int] = p + reveal_type(a, expected_text="Bear[_T1@s12]") + + +def s13(p: Bat): + a: Flyer[int] = p + reveal_type(a, expected_text="Bat") + + +def s14(p: Bat): + a: CaveDweller[int] = p + reveal_type(a, expected_text="Bat") + + +def s15(): + a = Bear(1) + reveal_type(a, expected_text="Bear[int]") + b = Bear[int](1) + reveal_type(b, expected_text="Bear[int]") + c = Bear[float](1) + reveal_type(c, expected_text="Bear[float]") + d = Bear[str | int](1) + reveal_type(d, expected_text="Bear[str | int]") + + +def s16(): + a: Any = Bear(1) + reveal_type(a, expected_text="Any") + + +def s17(): + a1: Iterable[object] = [2, 3, 4] + reveal_type(a1, expected_text="list[int]") + + a2: list[object] = [2, 3, 4] + reveal_type(a2, expected_text="list[object]") + + b1: Iterable[float] = [2, 3, 4] + reveal_type(b1, expected_text="list[int]") + + b2: list[float] = [2, 3, 4] + reveal_type(b2, expected_text="list[float]") + + c1: Iterable[Literal["A", "B", "C"]] = ["A", "B"] + reveal_type(c1, expected_text="list[Literal['A', 'B']]") + + c2: list[Literal["A", "B", "C"]] = ["A", "B"] + reveal_type(c2, expected_text="list[Literal['A', 'B', 'C']]") + + +def s18(): + a1: Mapping[object, object] = {"a": 3, "b": 5.6} + reveal_type(a1, expected_text="dict[object, int | float]") + + a2: dict[object, object] = {"a": 3, "b": 5.6} + reveal_type(a2, expected_text="dict[object, object]") + + b1: Mapping[str, float] = {"a": 3, "b": 5} + reveal_type(b1, expected_text="dict[str, int]") + + b2: dict[str, float] = {"a": 3, "b": 5} + reveal_type(b2, expected_text="dict[str, float]") + + c1: Mapping[Literal["A", "B"], Literal[3, 4]] = {"A": 3} + reveal_type(c1, expected_text="dict[Literal['A', 'B'], Literal[3]]") + + c2: dict[Literal["A", "B"], Literal[3, 4]] = {"A": 3} + reveal_type(c2, expected_text="dict[Literal['A', 'B'], Literal[3, 4]]") + + +class Plant(Generic[_T1]): + def __new__(cls, o: _T1) -> Self: ... + + +plant: Plant[float] = Plant(0) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor20.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor20.py new file mode 100644 index 00000000..c1d5f65c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor20.py @@ -0,0 +1,42 @@ +# This sample tests the specialization of constructors based on +# the expected type specified through either an assignment to +# an annotated variable or by a call where the parameter is +# annotated. + +from typing import Generic, TypeVar + +T = TypeVar("T", int, str) + + +class Adder(Generic[T]): + def add(self, a: T, b: T) -> T: + return a + b + + +int_adder: Adder[int] = Adder() +int_adder.add(1, 2) + +# This should be an error because "adder" +# should be of type Adder[int]. +int_adder.add("1", 2) + + +def requires_str_adder(str_adder: Adder[str]): + return str_adder + + +a = requires_str_adder(Adder()) +print(a.add("1", "2")) + +# This should be an error because the result +# of the call should be an Adder[str] +print(a.add(1, "2")) + + +generic_adder = Adder() +generic_adder.add(1, 2) +generic_adder.add("a", "b") + +# Since the type has an Unknown type argument, +# the following should not generate an error. +generic_adder.add(1, "b") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor21.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor21.py new file mode 100644 index 00000000..ebe6449d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor21.py @@ -0,0 +1,33 @@ +# This sample tests the instantiation of classes via a constructor +# when the type of the class is a TypeVar. + +from typing import TypeVar + + +class ClassA: + def __init__(self, a: int, b: str): + pass + + +T_A = TypeVar("T_A", bound=ClassA) + + +def func1(cls: type[T_A]) -> T_A: + # This should generate an error. + y = cls() + + x = cls(1, "") + reveal_type(x, expected_text="T_A@func1") + return x + + +_T = TypeVar("_T") + + +def func2(cls: type[_T]) -> _T: + # This should generate an error. + y = cls(1, "") + + x = cls() + reveal_type(x, expected_text="_T@func2") + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor22.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor22.py new file mode 100644 index 00000000..ea4b433a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor22.py @@ -0,0 +1,42 @@ +# This sample tests the case where a method within a generic class +# constructs an instance of the same type using a type variable +# within that class. + +from typing import Generic, Protocol, Tuple, TypeVar + + +T = TypeVar("T", covariant=True) + + +class A(Protocol[T]): + def a(self) -> "A[Tuple[T]]": ... + + def b(self) -> "A[Tuple[T]]": ... + + def c(self) -> "T": ... + + +class B(Generic[T]): + def __init__(self, t: T): + self._t = t + + def a(self) -> A[Tuple[T]]: + t = (self._t,) + y = B(t) + v = f(y.b()) + reveal_type(v, expected_text="tuple[T@B]") + return y + + def b(self) -> A[Tuple[T]]: + x = (self._t,) + reveal_type(x, expected_text="tuple[T@B]") + y = B(x) + reveal_type(y, expected_text="B[tuple[T@B]]") + return y + + def c(self) -> T: + return self._t + + +def f(a: A[Tuple[T]]) -> T: + return a.c()[0] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor23.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor23.py new file mode 100644 index 00000000..768d7577 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor23.py @@ -0,0 +1,31 @@ +# This sample tests the case where a generic class has a constructor that +# supplies the type arguments via a callable which is itself generic. + + +from typing import Callable, Generic, Sequence, TypeVar + +T = TypeVar("T") +V = TypeVar("V", bound=object) +V_co = TypeVar("V_co", covariant=True) +U = TypeVar("U", bound=object) + + +class Result(Generic[V]): + pass + + +ParseFn = Callable[[Sequence[T], int, int], Result[V]] + + +class Parser(Generic[T, V_co]): + def fmap1(self, fn: Callable[[V_co], U]) -> "Parser[T, U]": + def fmap2(stream: Sequence[T], pos: int, bt: int) -> Result[U]: + raise NotImplementedError() + + reveal_type(FnParser(fmap2), expected_text="FnParser[T@Parser, U@fmap1]") + return FnParser(fmap2) + + +class FnParser(Parser[T, V_co]): + def __init__(self, fn: ParseFn[T, V_co]): + self._fn = fn diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor24.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor24.py new file mode 100644 index 00000000..e544189c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor24.py @@ -0,0 +1,88 @@ +# This sample tests the case where a generic class uses a default argument +# for a generic type parameter in its constructor. + +from typing import Callable, Generic, List, TypeVar + + +T = TypeVar("T") +U = TypeVar("U") + + +class Box(Generic[T]): + def __init__(self, value: T = 123): + self.value = value + + +x1 = Box[str | int]() +x2 = Box[float]() +x3 = Box[str]("hi") + +# This should generate an error because "hi" isn't compatible +# with float. +x4 = Box[float]("hi") + + +# This should generate an error because the default value of 123 +# isn't compatible with str. +y = Box[str]() + + +class Container(Generic[T]): + def __init__(self, value: T = None): + self.value = value + + @classmethod + def create(cls) -> "Container[T]": + # This should generate an error if strictParameterNoneValue + # is true because Container[T] being constructed is different + # from the current Container[T]. + return Container[T]() + + def on_next(self, value: T): + pass + + +class IntContainer(Container[int]): + def increment(self): + # This should generate an error if strictParameterNoneValue is false. + self.value += 1 + + +class ContainerList(Generic[U]): + def __init__(self) -> None: + self.containers: List[Container[U]] = [] + + def method1(self, a: U): + Container[U](a) + Container() + Container(123) + + # This should generate an error if strictParameterNoneValue is true. + Container[U]() + + # This should generate an error if strictParameterNoneValue is true. + Container[U](None) + + def method2(self): + Container[U].create() + + +def func1(obv: Container[T], default_value: T = None) -> None: + # This should generate an error if strictParameterNoneValue is false. + obv.on_next(default_value) + + +class A: ... + + +T_A = TypeVar("T_A", bound=A) + + +class B(Generic[T]): + def __init__(self, c: Callable[[], T]): ... + + +def func2(cls: type[T_A] = A) -> Callable[[], T_A]: ... + + +B(func2()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor25.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor25.py new file mode 100644 index 00000000..b44c5641 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor25.py @@ -0,0 +1,45 @@ +# This sample tests the handling of a generic class whose implementation +# allocates an instance of itself by invoking a constructor and passing +# an argument that is a generic type. + +# pyright: strict + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class A(Generic[T]): + def __init__(self, x: T): + self.x = x + + def method1(self) -> "A[T]": + x = self.x + reveal_type(x, expected_text="T@A") + t = (x,) + reveal_type(t, expected_text="tuple[T@A]") + a = A(t[0]) + reveal_type(a, expected_text="A[T@A]") + return a + + +class B(Generic[T]): + def __init__(self, thing: T): + pass + + @staticmethod + def method1(val: T) -> "B[T]": + # This should generate an error. + return B(0) + + +class C(Generic[T]): + def method1(self) -> "C[T]": + return C[T]() + + +c1 = C[int]() +reveal_type(c1, expected_text="C[int]") + +c2 = c1.method1() +reveal_type(c2, expected_text="C[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor26.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor26.py new file mode 100644 index 00000000..fd32878f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor26.py @@ -0,0 +1,58 @@ +# This sample tests the case where a generic class with multiple +# type parameters invokes its own constructor and uses its own +# type parameters to specialize the constructed type. + +from typing import Generic, TypeVar + +T = TypeVar("T") +U = TypeVar("U") + + +class Test1(Generic[T, U]): + def __init__(self, t: T, u: U): + pass + + def test1(self, ts: list[T], us: list[U]) -> None: + # This should generate an error. + x1: Test1[U, T] = Test1(us, ts) + + x2: Test1[list[U], list[T]] = Test1(us, ts) + + +class Test2(Generic[T, U]): + def __init__(self): + pass + + def test2(self) -> None: + x1: Test2[U, T] + x2: Test2[T, T] + x3: Test2[T, U] + + x1 = Test2[U, T]() + # This should generate an error. + x2 = Test2[U, T]() + # This should generate an error. + x3 = Test2[U, T]() + + # This should generate an error. + x1 = Test2[T, T]() + x2 = Test2[T, T]() + # This should generate an error. + x3 = Test2[T, T]() + + # This should generate an error. + x1 = Test2[T, U]() + # This should generate an error. + x2 = Test2[T, U]() + x3 = Test2[T, U]() + + +class Test3(Generic[T, U]): + def __init__(self, ts: list[T], us: list[U]): + pass + + def test3(self, ts: list[T], us: list[U]) -> None: + x1: Test3[U, T] = Test3(us, ts) + + # This should generate two errors. + x2: Test3[list[U], list[T]] = Test3(us, ts) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor27.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor27.py new file mode 100644 index 00000000..b34f9d27 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor27.py @@ -0,0 +1,35 @@ +# This sample tests a case where a __new__ method in a generic class +# returns an instance of the class but with different type arguments +# than expected. This is arguably an error case, but pyright needs +# to handle it gracefully. + +from __future__ import annotations +from typing import Generic, TypeVar + + +T = TypeVar("T", contravariant=True) +S = TypeVar("S", contravariant=True) + + +class ClassA(Generic[T]): ... + + +class ClassB(Generic[S, T], ClassA[T]): ... + + +class ClassC(ClassB[S, T]): + def __new__(cls, subcon: ClassA[S]) -> ClassC[S, list[S]]: ... + + +class ClassD(ClassB[S, T]): + def __new__(cls, subcon: ClassA[S]) -> ClassD[S, list[S]]: ... + + +c = ClassA[int]() + +intermediate = ClassC(c) +v1 = ClassD(intermediate) +reveal_type(v1, expected_text="ClassD[list[int], list[list[int]]]") + +v2 = ClassD(ClassC(c)) +reveal_type(v2, expected_text="ClassD[list[int], list[list[int]]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor28.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor28.py new file mode 100644 index 00000000..d8b86986 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor28.py @@ -0,0 +1,69 @@ +# This sample tests the case where a constructor for a class is invoked +# multiple times as separate arguments for a call. + +# pyright: strict + +from __future__ import annotations +from typing import Any, Callable, Generic, Iterable, TypeVar, overload + +T = TypeVar("T") +S = TypeVar("S", covariant=True) + + +class ParentA: ... + + +class ChildA(ParentA, Generic[T]): + def __init__(self, a: T) -> None: ... + + +def func1(arg1: ParentA, arg2: ParentA): ... + + +func1(ChildA(1), ChildA(2)) + + +class ParentB(Generic[T]): ... + + +class ChildB(ParentB[T]): + def __init__(self, a: T) -> None: ... + + +def func2(arg1: ParentB[T], arg2: ParentB[T]) -> T: ... + + +# This should generate an error. +func2(ChildB(""), ChildB(1.2)) + + +class ClassC(Generic[S]): + def __new__(cls, item: S) -> "ClassC[S]": ... + + def __call__(self, obj: Any) -> S: ... + + +def func3(func1: Callable[..., T], func2: Callable[..., T]) -> T: ... + + +x2 = func3(ClassC(""), ClassC(1)) +reveal_type(x2, expected_text="str | int") + + +class ClassD(Generic[S]): + @overload + def __new__(cls, item: S, /) -> ClassD[S]: ... + + @overload + def __new__(cls, item: S, __item2: S, /) -> ClassD[tuple[S, S]]: ... + + def __new__(cls, *items: Any) -> Any: ... + + def __call__(self, obj: Any) -> Any: ... + + +func3(ClassD(""), ClassD("")) + + +def func4(a: Iterable[tuple[str, ...]]): + zip(a, zip(*a)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor29.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor29.py new file mode 100644 index 00000000..758518c3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor29.py @@ -0,0 +1,18 @@ +# This sample tests the case where a constructor for a generic class is +# called with a bidirectional type inference context that includes a union +# of multiple types that could apply. + +from typing import Mapping + + +d1: dict[str, str] | dict[int, int] = dict() +reveal_type(d1, expected_text="dict[int, int]") + +d2: dict[int, int] | dict[str, str] = dict() +reveal_type(d2, expected_text="dict[int, int]") + +d3: Mapping[int, int] | Mapping[str, str] | int | float = dict() +reveal_type(d3, expected_text="dict[int, int]") + +d4: dict[str, str] | dict[int, int] = dict(a="hi") +reveal_type(d4, expected_text="dict[str, str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor3.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor3.py new file mode 100644 index 00000000..ce5b2f9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor3.py @@ -0,0 +1,18 @@ +# This sample tests inference of constructed types (in this case, +# for "chain") when the expected type is provided by another +# constructor (in this case "list"). + +# pyright: strict + +from concurrent.futures import Future, wait +from itertools import chain +from typing import Any, Dict + +my_list = list(chain([0])) +reveal_type(my_list, expected_text="list[int]") + + +pending: Dict[Future[Any], Any] = {} +done_tasks = wait(list(pending.keys())).done + +reveal_type(done_tasks, expected_text="set[Future[Any]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor30.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor30.py new file mode 100644 index 00000000..e2bae00b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor30.py @@ -0,0 +1,39 @@ +# This sample tests the case where a class is parameterized by a ParamSpec +# which is inferred by a call to the constructor, and the passed value +# is a generic function whose types are informed by additional parameters +# also passed to the constructor. + +from typing import Callable, Generic, ParamSpec, TypeVar + +P = ParamSpec("P") +T = TypeVar("T") + + +class ABase: ... + + +class A(ABase): ... + + +TA = TypeVar("TA", bound=ABase) + + +class B(Generic[P, T]): + def __init__( + self, _type: Callable[P, T], *args: P.args, **kwargs: P.kwargs + ) -> None: ... + + +def func1(t: type[TA]) -> TA: ... + + +b = B(func1, A) +reveal_type(b, expected_text="B[(t: type[A]), A]") + + +class C(Generic[TA]): + def __init__(self, _type: type[TA]) -> None: ... + + +c = B(C, A) +reveal_type(c, expected_text="B[(_type: type[A]), C[A]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor31.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor31.py new file mode 100644 index 00000000..1f852ecd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor31.py @@ -0,0 +1,15 @@ +# This sample tests the case where bidirectional type inference fails +# because a particular set of nested constructor calls and variance +# combinations makes it impossible to infer the correct type arguments +# using bidirectional type inference. We need to fall back to using +# regular evaluation rules in this case. + +from typing import Iterable, Sequence + +list1 = [1] + + +class NT(tuple[list]): ... + + +x1: Iterable[NT | Sequence] = list(zip(list1)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor32.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor32.py new file mode 100644 index 00000000..c607bcbd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor32.py @@ -0,0 +1,36 @@ +# This sample tests the case where a metaclass __call__ method is present +# and supplies a different bidirectional type inference context than +# the __new__ or __init__ methods. + +from typing import TypedDict, TypeVar + +T = TypeVar("T") + + +class TD1(TypedDict): + x: int + + +class AMeta(type): + def __call__(cls, *args, **kwargs): + super().__call__(*args, **kwargs) + + +class A(metaclass=AMeta): + def __init__(self, params: TD1): + pass + + +A({"x": 42}) + + +class BMeta(type): + def __call__(cls: type[T], x: int, y: str) -> T: ... + + +class B(metaclass=BMeta): ... + + +def func1(cls: type[B]): + # This should generate an error. + cls() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor33.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor33.py new file mode 100644 index 00000000..c5712cfa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor33.py @@ -0,0 +1,41 @@ +# This sample tests the case where an argument to a constructor +# uses an assignment expression (walrus operator) and the constructor +# has both a __new__ and __init__ method whose parameters have +# different bidirectional type inference contexts. + +from dataclasses import dataclass +from typing import Any, Self, TypedDict + + +class A: + def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... + def __init__(self, base: list[str], joined: str) -> None: ... + + +A(temp := ["x"], " ".join(temp)) + + +class TD1(TypedDict): + a: str + + +class TD2(TD1): + b: str + + +@dataclass +class DC1[T: TD1]: + x: T + + +@dataclass +class DC2[T: TD1]: + y: list[DC1[T]] + + +@dataclass +class DC3[T: TD1]: + embedded: DC2[T] + + +v1 = DC3[TD2](DC2(y=[DC1(x={"a": "", "b": ""})])) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor4.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor4.py new file mode 100644 index 00000000..71b5c8c3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor4.py @@ -0,0 +1,25 @@ +# This sample tests that unspecified type parameters +# for a class instance are "unknown". + +from collections import defaultdict +from queue import Queue +from typing import DefaultDict, List, Type, TypeVar + +val1 = Queue() +reveal_type(val1, expected_text="Queue[Unknown]") + +val2 = list() +reveal_type(val2, expected_text="list[Unknown]") + +_T = TypeVar("_T") + + +def foo(value: Type[_T], b: _T) -> None: + val1: "DefaultDict[str, list[_T]]" = defaultdict(list) + reveal_type(val1, expected_text="DefaultDict[str, list[_T@foo]]") + + val2: "DefaultDict[str, list[_T]]" = defaultdict(List[_T]) + reveal_type(val2, expected_text="DefaultDict[str, list[_T@foo]]") + + # This should generate an error because the type is incompatible. + val3: "DefaultDict[str, list[_T]]" = defaultdict(list[int]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor5.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor5.py new file mode 100644 index 00000000..1f76cc6c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor5.py @@ -0,0 +1,25 @@ +# This sample tests the case where a constructor +# (in this case, OrderedDict) accepts a dict expression +# that is matched against a protocol in the OrderedDict +# constructor. + +from typing import OrderedDict + + +val1 = { + "a": 1, + "b": 0, +} +reveal_type(val1, expected_text="dict[str, int]") + +val2 = OrderedDict(val1) +reveal_type(val2, expected_text="OrderedDict[str, int]") + + +val3 = OrderedDict( + { + "a": 1, + "b": 0, + } +) +reveal_type(val3, expected_text="OrderedDict[str, int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor6.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor6.py new file mode 100644 index 00000000..04f20db5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor6.py @@ -0,0 +1,87 @@ +# This sample tests the special-case handling of an overloaded __init__ +# method where the "self" parameter is specialized to influence the +# type of the constructed object. + +from typing import Any, Generic, Literal, Optional, Type, TypeVar, overload + +_T = TypeVar("_T", bound=Optional[str]) + + +class TextField(Generic[_T]): + @overload + def __init__(self: "TextField[str]", *, null: Literal[False] = ...) -> None: ... + + @overload + def __init__( + self: "TextField[Optional[str]]", + *, + null: Literal[True] = ..., + ) -> None: ... + + @overload + def __init__(self, *, null: bool = ...) -> None: ... + + def __init__(self, *, null: bool = ...) -> None: ... + + def __get__(self: "TextField[_T]", instance: Any, owner: Any) -> _T: ... + + +def foo(a: bool): + reveal_type(TextField(), expected_text="TextField[str]") + reveal_type(TextField(null=True), expected_text="TextField[str | None]") + reveal_type(TextField(null=a), expected_text="TextField[Unknown]") + + +class Model: ... + + +_T1 = TypeVar("_T1", bound="Optional[Model]") +_T2 = TypeVar("_T2", bound="Optional[Model]") + + +class ForeignKey(Generic[_T1]): + @overload + def __init__( + self: "ForeignKey[_T2]", to: Type[_T2], *, null: Literal[False] = ... + ) -> None: ... + + @overload + def __init__( + self: "ForeignKey[Optional[_T2]]", to: Type[_T2], *, null: Literal[True] + ) -> None: ... + + def __init__(self, to: Type[_T2], *, null: bool = False) -> None: ... + + +class Author(Model): + pass + + +reveal_type(ForeignKey(Author, null=False), expected_text="ForeignKey[Author]") +reveal_type(ForeignKey(Author, null=True), expected_text="ForeignKey[Author | None]") + + +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_S1 = TypeVar("_S1") +_S2 = TypeVar("_S2") + + +class Class1(Generic[_T3, _T4]): + def __init__(self: "Class1[_S1, _S2]", value1: _S1, value2: _S2) -> None: ... + + +reveal_type(Class1(0, ""), expected_text="Class1[int, str]") + + +class Class2(Generic[_T3, _T4]): + def __init__(self: "Class2[_S2, _S1]", value1: _S1, value2: _S2) -> None: ... + + +reveal_type(Class2(0, ""), expected_text="Class2[str, int]") + + +class Class3(Generic[_T3, _T4]): + # This should generate an error because class-scoped TypeVars are not + # allowed in the "self" type annotation for an __init__ method. + def __init__(self: "Class3[_T3, _T4]", value1: _T3, value2: _T4) -> None: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor7.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor7.py new file mode 100644 index 00000000..d2163283 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor7.py @@ -0,0 +1,51 @@ +# This sample tests the case where a __new__ method provides +# a type that differs from the class that contains it. + + +from typing import Any, Callable, ParamSpec, Self, TypeVar + + +class ClassA: + def __new__(cls) -> str: + return "Hello World" + + +v1 = ClassA() +reveal_type(v1, expected_text="str") + + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +def func1(a: int) -> int: + return a + 1 + + +class ClassB: + def __new__(cls, func: Callable[_P, _R]) -> Callable[_P, _R]: + return func + + +v2 = ClassB(func1) +reveal_type(v2, expected_text="(a: int) -> int") + + +class ClassC: + def __new__(cls) -> Any: + return 1 + + def __init__(self, a: int) -> None: ... + + +ClassC() + + +class ClassD: + def __new__(cls) -> Self | Any: + return 1 + + def __init__(self, a: int) -> None: ... + + +ClassD() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructor9.py b/python-parser/packages/pyright-internal/src/tests/samples/constructor9.py new file mode 100644 index 00000000..6ca11eab --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructor9.py @@ -0,0 +1,18 @@ +# This sample validates that type(self) can be instantiated without +# error even if the class is abstract. + +from abc import ABC, abstractmethod +from typing import TypeVar + +T_A = TypeVar("T_A", bound="A") + + +class A(ABC): + @abstractmethod + def some_method(self) -> str: ... + + def some_factory_method_1(self): + return type(self)() + + def some_factory_method_2(self: T_A) -> T_A: + return type(self)() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructorCallable1.py b/python-parser/packages/pyright-internal/src/tests/samples/constructorCallable1.py new file mode 100644 index 00000000..c04aa9ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructorCallable1.py @@ -0,0 +1,174 @@ +# This sample verifies that a class can be assigned to a Callable +# type if its constructor conforms to that type. + +from dataclasses import dataclass +from typing import ( + Any, + Callable, + Generic, + Literal, + ParamSpec, + Sized, + TypeVar, + Union, + overload, +) + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +P = ParamSpec("P") +R = TypeVar("R") + + +def func1(callback: Callable[[T1], T2], val: T1) -> T2: ... + + +class A(Generic[T1]): + def __new__(cls, x: T1) -> "A[T1]": ... + + +a1 = func1(A[float], 3.4) +reveal_type(a1, expected_text="A[float]") + +# This should generate an error. +a2 = func1(A[int], 3.4) + +a3 = func1(A[int], 3) +reveal_type(a3, expected_text="A[int]") + + +class B(Generic[T1]): + @overload + def __new__(cls, x: int, y: Literal[True]) -> "B[None]": ... + + @overload + def __new__(cls, x: T1, y: bool = ...) -> "B[T1]": ... + + def __new__(cls, x: Union[T1, int], y: bool = False) -> "B[Any]": ... + + +b1 = func1(B[int], 3) +reveal_type(b1, expected_text="B[int]") + +# This should generate an error. +b2 = func1(B[None], 3.5) + +b3 = func1(B[float], 3.5) +reveal_type(b3, expected_text="B[float]") + +b4 = func1(B[Union[int, str]], 3) +reveal_type(b4, expected_text="B[int | str]") + +b5 = func1(B[Union[int, str]], "3") +reveal_type(b5, expected_text="B[int | str]") + + +class C(Generic[T1]): + def __init__(self, x: T1) -> None: ... + + +c1 = func1(C[float], 3.4) +reveal_type(c1, expected_text="C[float]") + +# This should generate an error. +c2 = func1(C[int], 3.4) + +c3 = func1(C[int], 3) +reveal_type(c3, expected_text="C[int]") + + +class D(Generic[T1]): + @overload + def __init__(self: "D[None]", x: int, y: Literal[True]) -> None: ... + + @overload + def __init__(self, x: T1, y: bool = ...) -> None: ... + + def __init__(self, x: Any, y: bool = False) -> None: ... + + +d1 = func1(D[int], 3) +reveal_type(d1, expected_text="D[int]") + +# This should generate an error. +d2 = func1(D[None], 3.5) + +d3 = func1(D[float], 3.5) +reveal_type(d3, expected_text="D[float]") + +d4 = func1(D[Union[int, str]], 3) +reveal_type(d4, expected_text="D[int | str]") + +d5 = func1(D[Union[int, str]], "3") +reveal_type(d5, expected_text="D[int | str]") + + +@dataclass(frozen=True, slots=True) +class E(Generic[T1]): + x: T1 + + +e1: Callable[[int], E[int]] = E + + +def func2(x: T1) -> E[T1]: ... + + +e2: Callable[[int], E[int]] = func2 + + +def cast_to_callable(cls: Callable[P, T1]) -> Callable[P, T1]: + return cls + + +class F: + pass + + +reveal_type(cast_to_callable(F), expected_text="() -> F") +reveal_type( + cast_to_callable(Sized), expected_text="(*args: Any, **kwargs: Any) -> Sized" +) + + +def func3(t: type[object]): + reveal_type( + cast_to_callable(t), expected_text="(*args: Any, **kwargs: Any) -> object" + ) + + +@dataclass +class G: + value: int + + +def func4(c: Callable[[T1], T2]) -> Callable[[T1], T2]: + return c + + +reveal_type(func4(G), expected_text="(int) -> G") + + +# Test the conversion of a complex constructor that involves +# a bunch of type variables, a default __new__ (that comes +# from object), and an __init__ that involves custom self +# types. This is meant to test a class like defaultdict. +KT = TypeVar("KT") +VT = TypeVar("VT") + + +class DDict(dict[KT, VT]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: "DDict[str, T1]", **kwargs: T1) -> None: ... + @overload + def __init__(self, default_factory: Callable[[], VT] | None, /) -> None: ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + + +dd1 = cast_to_callable(DDict) +reveal_type( + dd1, + expected_text="Overload[() -> DDict[Unknown, Unknown], (**kwargs: T1@__init__) -> DDict[str, T1@__init__], (default_factory: (() -> VT@DDict) | None, /) -> DDict[Unknown, VT@DDict]]", +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/constructorCallable2.py b/python-parser/packages/pyright-internal/src/tests/samples/constructorCallable2.py new file mode 100644 index 00000000..bf0872af --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/constructorCallable2.py @@ -0,0 +1,163 @@ +# This sample tests the case where a constructor is converted to +# a callable. + + +from typing import ( + Any, + Callable, + Generic, + NoReturn, + ParamSpec, + Self, + TypeVar, + overload, + reveal_type, +) + +P = ParamSpec("P") +R = TypeVar("R") +T = TypeVar("T") + + +def accepts_callable(cb: Callable[P, R]) -> Callable[P, R]: + return cb + + +class Class1: + def __init__(self, x: int) -> None: + pass + + +r1 = accepts_callable(Class1) +reveal_type(r1, expected_text="(x: int) -> Class1") +reveal_type(r1(1), expected_text="Class1") + + +class Class2: + pass + + +r2 = accepts_callable(Class2) +reveal_type(r2, expected_text="() -> Class2") +reveal_type(r2(), expected_text="Class2") + + +class Class3: + def __new__(cls, *args, **kwargs) -> Self: ... + + def __init__(self, x: int) -> None: ... + + +r3 = accepts_callable(Class3) +reveal_type(r3, expected_text="(x: int) -> Class3") +reveal_type(r3(3), expected_text="Class3") + + +class Class4: + def __new__(cls, x: int) -> int: ... + + +r4 = accepts_callable(Class4) +reveal_type(r4, expected_text="(x: int) -> int") +reveal_type(r4(1), expected_text="int") + + +class Meta1(type): + def __call__(cls, *args: Any, **kwargs: Any) -> NoReturn: + raise NotImplementedError("Class not constructable") + + +class Class5(metaclass=Meta1): + def __new__(cls, *args: Any, **kwargs: Any) -> Self: + return super().__new__(cls) + + +r5 = accepts_callable(Class5) +reveal_type(r5, expected_text="(...) -> NoReturn") + + +class Class6Proxy: ... + + +class Class6: + def __new__(cls) -> Class6Proxy: + # This should generate an error because "cls" isn't compatible. + return Class6Proxy.__new__(cls) + + def __init__(self, x: int) -> None: + pass + + +r6 = accepts_callable(Class6) +reveal_type(r6, expected_text="() -> Class6Proxy") +reveal_type(r6(), expected_text="Class6Proxy") + + +class Class6_2: + def __new__(cls) -> Any: + return super().__new__(cls) + + def __init__(self, x: int) -> None: + pass + + +r6_2 = accepts_callable(Class6_2) +reveal_type(r6_2, expected_text="() -> Any") +reveal_type(r6_2(), expected_text="Any") + + +class Class7(Generic[T]): + @overload + def __init__(self: "Class7[int]", x: int) -> None: ... + + @overload + def __init__(self: "Class7[str]", x: str) -> None: ... + + def __init__(self, x: int | str) -> None: + pass + + +r7 = accepts_callable(Class7) +reveal_type( + r7, expected_text="Overload[(x: int) -> Class7[int], (x: str) -> Class7[str]]" +) + +reveal_type(r7(0), expected_text="Class7[int]") +reveal_type(r7(""), expected_text="Class7[str]") + + +class Class8(Generic[T]): + def __new__(cls, x: T, y: list[T]) -> Self: + return super().__new__(cls) + + +r8 = accepts_callable(Class8) +reveal_type(r8, expected_text="(x: T@Class8, y: list[T@Class8]) -> Class8[T@Class8]") +reveal_type(r8("", [""]), expected_text="Class8[str]") + + +class Class9: + def __init__(self, x: list[T], y: list[T]) -> None: + pass + + +r9 = accepts_callable(Class9) +reveal_type(r9, expected_text="(x: list[T@__init__], y: list[T@__init__]) -> Class9") +reveal_type(r9([""], [""]), expected_text="Class9") + + +M = TypeVar("M") + + +class Meta2(type): + def __call__(cls: type[M], *args: Any, **kwargs: Any) -> M: ... + + +class Class10(metaclass=Meta2): + def __new__(cls, x: int, y: str) -> Self: + return super().__new__(cls) + + +r10 = accepts_callable(Class10) +reveal_type(r10, expected_text="(x: int, y: str) -> Class10") +reveal_type(r10(1, ""), expected_text="Class10") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/coroutines1.py b/python-parser/packages/pyright-internal/src/tests/samples/coroutines1.py new file mode 100644 index 00000000..4a6739df --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/coroutines1.py @@ -0,0 +1,69 @@ +# This same tests the type checker's ability to validate +# types related to coroutines (and async/await) statements. + +from typing import Generator, Any, Optional +from asyncio import coroutine + + +async def coroutine1(): + return 1 + + +a = coroutine1() + +# This should generate an error because 'await' +# can't be used outside of an async function. +await a + + +async def func1() -> int: ... + + +async def func2() -> None: + # This should generate an error because await cannot be + # used in a lambda. + x = lambda: await func2() + + +def needs_int(val: int): + pass + + +async def consumer1(): + # This should generate an error because + # a is not an int + needs_int(a) + + needs_int(await a) + + needs_int(await coroutine1()) + + +class ScopedClass1: + def __aenter__(self): + return self + + @coroutine + def __await__(self) -> Generator[Any, None, int]: + yield 3 + return 3 + + async def __aexit__( + self, + t: Optional[type] = None, + exc: Optional[BaseException] = None, + tb: Optional[Any] = None, + ) -> bool: + return True + + +async def consumer2(): + a = ScopedClass1() + + # This should generate two errors because + # there is no __enter__ or __exit__ method on ScopedClass1. + with a as b: + needs_int(b) + + async with a as b: + needs_int(b) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/coroutines2.py b/python-parser/packages/pyright-internal/src/tests/samples/coroutines2.py new file mode 100644 index 00000000..51016315 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/coroutines2.py @@ -0,0 +1,23 @@ +# This sample verifies that the inferred return type +# of an async function is wrapped in a Coroutine. + +import asyncio +from typing import Any, Coroutine + + +async def inspector(cr: Coroutine[Any, Any, Any]): + return await cr + + +async def inner(sleep: int, message: str) -> str: + await asyncio.sleep(sleep) + print(message) + return message + + +async def outer(): + await inspector(inner(1, "test")) + + +async def recursive1(): + await recursive1() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/coroutines3.py b/python-parser/packages/pyright-internal/src/tests/samples/coroutines3.py new file mode 100644 index 00000000..2352cc48 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/coroutines3.py @@ -0,0 +1,39 @@ +# This sample tests old-style (pre-await) awaitable generators. + +import asyncio +from typing import Any +from _typeshed._type_checker_internals import AwaitableGenerator + + +@asyncio.coroutine +def old_style_coroutine1(): + yield from asyncio.sleep(1) + + +async def func1() -> None: + x = await old_style_coroutine1() + reveal_type(x, expected_text="None") + return x + + +reveal_type( + old_style_coroutine1, + expected_text="() -> AwaitableGenerator[Any, Unknown, None, Any]", +) + + +@asyncio.coroutine +def old_style_coroutine2() -> AwaitableGenerator[None, None, None, Any]: + yield from asyncio.sleep(1) + + +async def func2() -> None: + x = await old_style_coroutine2() + reveal_type(x, expected_text="None") + return x + + +reveal_type( + old_style_coroutine2, + expected_text="() -> AwaitableGenerator[None, None, None, Any]", +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/coroutines4.py b/python-parser/packages/pyright-internal/src/tests/samples/coroutines4.py new file mode 100644 index 00000000..32967916 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/coroutines4.py @@ -0,0 +1,17 @@ +# This sample tests the case where an inner coroutine with an inferred +# return type is referenced in a manner that results in recursion. + +import asyncio + + +def func1(replace_inner: bool) -> None: + inner = lambda: None + + async def wrapper(): + inner() + + wrapped_fn = wrapper() + asyncio.create_task(wrapped_fn) + + if replace_inner: + inner = lambda: None diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass1.py new file mode 100644 index 00000000..1ebd285b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass1.py @@ -0,0 +1,128 @@ +# This sample tests the handling of the @dataclass decorator. + +from dataclasses import dataclass, InitVar, field +from typing import Callable, Generic, Literal, Sequence, TypeVar + + +@dataclass +class DC1: + bbb: int + ccc: str + aaa: str = "string" + __hash__: None # pyright: ignore[reportIncompatibleMethodOverride] + + +bar1 = DC1(bbb=5, ccc="hello") +bar2 = DC1(5, "hello") +bar3 = DC1(5, "hello", "hello2") +print(bar3.bbb) +print(bar3.ccc) +print(bar3.aaa) + +# This should generate an error because ddd +# isn't a declared value. +bar = DC1(bbb=5, ddd=5, ccc="hello") + +# This should generate an error because the +# parameter types don't match. +bar = DC1("hello", "goodbye") + +# This should generate an error because a parameter +# is missing. +bar = [DC1(2)] + +# This should generate an error because there are +# too many parameters. +bar = DC1(2, "hello", "hello", 4) + + +@dataclass +class DC2: + bbb: int + aaa: str = "string" + + # This should generate an error because variables + # with no default cannot come after those with + # defaults. + ccc: str + + def __init__(self) -> None: + pass + + +@dataclass +class DC3: + aaa: str + ddd: InitVar[int] = 3 + + +@dataclass(init=False) +class DC4: + bbb: int + aaa: str = "string" + # This should not generate an error because + # the ordering requirement is not enforced when + # init=False. + ccc: str + + +@dataclass +class DC5: + # Private names are not allowed, so this should + # generate an error. + __private: int + + +@dataclass +class DC6: + x: type + + +DC6(int) + +# This should generate an error. +DC6(1) + + +T1 = TypeVar("T1", int, str) + + +@dataclass +class DC7(Generic[T1]): + # This should generate an error. + x: T1 = 1 + + +v7_1 = DC7() +reveal_type(v7_1, expected_text="DC7[int]") + +# This should generate an error. +v7_2: DC7[str] = DC7() + + +@dataclass +class DC8(Generic[T1]): + # This should generate an error. + x: T1 = field(default=1) + + +# This should generate an error. +v8_1 = DC8() +reveal_type(v8_1, expected_text="DC8[int]") + +# This should generate an error. +v8_2 = DC8[str]() + + +@dataclass +class DC9(Generic[T1]): + x: Sequence[Literal["a", "b"]] = ["a"] + + +@dataclass +class DC10[T]: + a: type[T] + b: Callable[[T], bool] = lambda _: True + + +DC10(a=int) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass10.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass10.py new file mode 100644 index 00000000..d6633be4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass10.py @@ -0,0 +1,44 @@ +# This sample verifies that a generic dataclass works. + +from dataclasses import dataclass +from typing import Generic, TypeVar, Union + +T = TypeVar("T") + + +@dataclass +class ABase(Generic[T]): + value: Union[str, T] + + +reveal_type(ABase(""), expected_text="ABase[Unknown]") + + +class AChild(ABase[int]): + pass + + +reveal_type(AChild(123), expected_text="AChild") + + +class B(Generic[T]): + pass + + +@dataclass +class CBase(Generic[T]): + x: B[T] = B[T]() + + +@dataclass +class CChild(CBase[T]): + pass + + +c1 = CBase[int]() +reveal_type(c1, expected_text="CBase[int]") +reveal_type(c1.x, expected_text="B[int]") + +c2 = CChild[int]() +reveal_type(c2, expected_text="CChild[int]") +reveal_type(c2.x, expected_text="B[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass11.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass11.py new file mode 100644 index 00000000..2ef75ba2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass11.py @@ -0,0 +1,32 @@ +# This sample tests the case where an inheritance chain of +# dataclasses use generic types. + +from dataclasses import dataclass +from typing import Generic, TypeVar + +Key0 = TypeVar("Key0") +Key1 = TypeVar("Key1") +Key2 = TypeVar("Key2") +Value = TypeVar("Value") + + +@dataclass +class MapTreeLeaf(Generic[Key0, Value]): + key: Key0 + value: Value + + +@dataclass +class MapTreeNode(MapTreeLeaf[Key1, Value]): + pass + + +class Foo(Generic[Key2, Value]): + def add(self, key: Key2, value: Value): + return MapTreeNode(key=key, value=value) + + def test1(self, a: Key2, b: Value): + v1 = self.add(a, b) + reveal_type(v1, expected_text="MapTreeNode[Key2@Foo, Value@Foo]") + reveal_type(v1.key, expected_text="Key2@Foo") + reveal_type(v1.value, expected_text="Value@Foo") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass12.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass12.py new file mode 100644 index 00000000..17fdedf4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass12.py @@ -0,0 +1,40 @@ +# This sample tests the case where a dataclass declares an instance +# variable and a subclass redeclares it as a class variable. + +# pyright: reportIncompatibleVariableOverride=false + +from dataclasses import dataclass +from typing import ClassVar + + +@dataclass +class Base: + x: int + y: int + + +@dataclass +class Special(Base): + x: ClassVar[int] = 1 + z: int + + +@dataclass +class VerySpecial(Special): + y: ClassVar[int] = 2 + + +Base(x=1, y=2) +Special(y=2, z=3) +Special(2, 3) + +# This should generate an error +Special(x=1, y=2, z=3) + +# This should generate an error +Special(1, 2, 3) + +VerySpecial(z=3) + +# This should generate an error +VerySpecial(x=1, z=3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass13.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass13.py new file mode 100644 index 00000000..acf7ca4f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass13.py @@ -0,0 +1,13 @@ +# This sample tests that a dataclass member without a type annotation +# but with a field descriptor assignment results in an error. + +from dataclasses import dataclass, field + + +@dataclass +class MyClass: + id: int + x: int = field() + + # This should generate an error because it will result in a runtime exception + y = field() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass14.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass14.py new file mode 100644 index 00000000..8bc9661c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass14.py @@ -0,0 +1,16 @@ +# This sample tests the case where a dataclass field has a corresponding +# redundant declaration within a method. + +from dataclasses import dataclass + + +@dataclass +class ClassA: + a: int + b: str + + def foo(self): + self.b: str = "" + + +ClassA(1, "hi") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass15.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass15.py new file mode 100644 index 00000000..64d05302 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass15.py @@ -0,0 +1,22 @@ +# This sample tests the case where a dataclass type refers to +# type that circularly refers back to the dataclass itself +# through a type alias. + +from dataclasses import dataclass +from typing import List + + +class ClassA: + test: "C" + + +@dataclass +class ClassB: + children: "C" + + def test(self): + for child in self.children: + reveal_type(child, expected_text="ClassB") + + +C = List[ClassB] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass16.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass16.py new file mode 100644 index 00000000..805d6ac1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass16.py @@ -0,0 +1,21 @@ +# This sample tests the generation of __init__ when some ancestor +# classes are unknown. + +from dataclasses import dataclass +import abc +from random import random + +C = abc.ABC if random() else object + + +class B(C): + def __init__(self, x: int): + pass + + +@dataclass +class A(B): + color: str + + +reveal_type(A.__init__, expected_text="(self: A, *args: Any, **kwargs: Any) -> None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass17.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass17.py new file mode 100644 index 00000000..1d770129 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass17.py @@ -0,0 +1,41 @@ +# This sample tests the case where a dataclass uses a ClassVar that +# is also Final. + +from dataclasses import dataclass +from typing import ClassVar, Final + + +@dataclass +class A: + a: Final[int] + b: Final[str] = "" + c: ClassVar[Final[int]] = 0 + d: ClassVar[Final] = 0 + e: Final[ClassVar[int]] = 0 + + +a = A(1) + +# This should generate an error. +a.a = 0 + +# This should generate an error. +a.b = "" + +# This should generate an error. +a.c = 0 + +# This should generate an error. +A.c = 0 + +# This should generate an error. +A.d = 0 + +# This should generate an error. +A.e = 0 + + +@dataclass +class B: + a: ClassVar[Final[int]] = 0 + b: int = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass18.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass18.py new file mode 100644 index 00000000..20771e58 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass18.py @@ -0,0 +1,17 @@ +# This sample tests the case where a "bare" Final is used in a dataclass +# with a default value. + +from typing import Final +from dataclasses import dataclass + + +@dataclass +class DC1: + a: Final = 1 + + +v1 = DC1(1) +reveal_type(v1.a, expected_text="Literal[1]") + +v2 = DC1() +reveal_type(v2.a, expected_text="Literal[1]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass2.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass2.py new file mode 100644 index 00000000..69cd8c42 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass2.py @@ -0,0 +1,30 @@ +# This sample tests the handling of Callable fields within a +# dataclass definition. + +# pyright: strict + +from dataclasses import dataclass +from typing import Any, Callable, TypeVar + +CallableT = TypeVar("CallableT", bound=Callable[..., Any]) + + +def decorate(arg: CallableT) -> CallableT: + return arg + + +def f(s: str) -> int: + return int(s) + + +@dataclass +class C: + str_to_int: Callable[[str], int] = f + + +c = C() + + +reveal_type(c.str_to_int, expected_text="(str) -> int") + +c.str_to_int = decorate(f) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass3.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass3.py new file mode 100644 index 00000000..1e8d4013 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass3.py @@ -0,0 +1,33 @@ +# This sample tests the type checker's handling of +# synthesized __init__ and __new__ methods for +# dataclass classes and their subclasses. + +from dataclasses import dataclass + + +@dataclass +class A: + x: int + + +@dataclass(init=False) +class B(A): + y: int + + def __init__(self, a: A, y: int): + self.__dict__ = a.__dict__ + + +a = A(3) +b = B(a, 5) + + +# This should generate an error because there is an extra parameter +a = A(3, 4) + +# This should generate an error because there is one too few parameters +b = B(a) + + +A.__new__(A) +B.__new__(B) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass4.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass4.py new file mode 100644 index 00000000..31640b1f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass4.py @@ -0,0 +1,110 @@ +# This sample tests the analyzer's ability to handle inherited +# data classes. + +# pyright: reportIncompatibleVariableOverride=false + +from dataclasses import dataclass, field + + +class C1: ... + + +class C2: ... + + +class C3: ... + + +@dataclass +class DC1: + aa: C1 + bb: C2 + cc: C3 + + +class NonDC2: + ff: int + + +@dataclass +class DC2(NonDC2, DC1): + ee: C2 + aa: C2 + dd: C2 + + +dc2_1 = DC2(C2(), C2(), C3(), C2(), C2()) + +# This should generate an error because the type +# of parameter aa has been replaced with type C1. +dc2_2 = DC2(C1(), C2(), C3(), C2(), C2()) + +dc2_3 = DC2(ee=C2(), dd=C2(), aa=C2(), bb=C2(), cc=C3()) + + +@dataclass +class DC3: + aa: C1 + bb: C2 = C2() + cc: C3 = C3() + + +@dataclass +class DC4(DC3): + # This should generate an error because + # previous parameters have default values. + dd: C1 + + +@dataclass +class DC5(DC3): + # This should not generate an error because + # aa replaces aa in DC3, and it's ordered + # before the params with default values. + aa: C2 + + +@dataclass +class DC6: + a: int = 0 + + +@dataclass +class DC7(DC6): + # This should generate an error because it is overriding + # a field with a default value, but it doesn't have a + # default value. + a: int + + # This should generate an error because the default + # value for "a" is inherited from the base class. + b: str + + +@dataclass +class DC8: + a: int = field(default=0) + + +@dataclass +class DC9(DC8): + # This should generate an error because it is overriding + # a field with a default value, but it doesn't have a + # default value. + a: int + + # This should generate an error because the default + # value for "a" is inherited from the base class. + b: str + + +@dataclass +class DC10: + a: str = field(init=False, default="s") + b: bool = field() + + +@dataclass +class DC11(DC10): + a: str = field() + b: bool = field() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass5.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass5.py new file mode 100644 index 00000000..deb471cb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass5.py @@ -0,0 +1,83 @@ +# This sample tests the handling of the @dataclass decorator +# with a custom __init__. + +# pyright: reportIncompatibleMethodOverride=false + +from dataclasses import dataclass + + +@dataclass(init=False) +class A: + x: int + x_squared: int + + def __init__(self, x: int): + self.x = x + self.x_squared = x**2 + + +a = A(3) + + +@dataclass(init=True) +class B: + x: int + x_squared: int + + def __init__(self, x: int): + self.x = x + self.x_squared = x**2 + + +b = B(3) + + +@dataclass() +class C: + x: int + x_squared: int + + def __init__(self, x: int): + self.x = x + self.x_squared = x**2 + + +c = C(3) + + +@dataclass(init=False) +class D: + x: int + x_squared: int + + +# This should generate an error because there is no +# override __init__ method and no synthesized __init__. +d = D(3) + + +@dataclass(eq=False) +class E: + x: int + + def __eq__(self, x: "E") -> float: + return 1.23 + + def __lt__(self, x: "E") -> str: + return "" + + +foo1 = E(3) == E(3) +reveal_type(foo1, expected_text="float") + +foo2 = E(3) < E(3) +reveal_type(foo2, expected_text="str") + + +@dataclass(order=True) +class F: + x: int + + +foo3 = F(3) < F(3) +reveal_type(foo3, expected_text="bool") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass6.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass6.py new file mode 100644 index 00000000..c2a96330 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass6.py @@ -0,0 +1,45 @@ +# This sample tests the case where a dataclass entry is +# initialized with a "field" that uses "init=False". This +# case needs to be handled specially because it means +# that the synthesized __init__ method shouldn't include +# this field in its parameter list. + +from dataclasses import dataclass, field + + +@dataclass +class ParentA: + prop_1: str = field(init=False) + prop_2: str = field(default="hello") + prop_3: str = field(default_factory=lambda: "hello") + + # This should generate an error because it appears after + # a property with a default value. + prop_4: str = field() + + def __post_init__(self): + self.prop_1 = "test" + + +@dataclass +class ChildA(ParentA): + prop_2: str = "bye" + + +test = ChildA(prop_2="test", prop_4="hi") + +assert test.prop_1 == "test" +assert test.prop_2 == "test" + + +@dataclass +class ClassB: + prop_1: str + prop_2: str + prop_3: str = field(default="") + prop_4: str = field(init=False) + prop_5: str = field(init=False) + + def __post_init__(self): + cprop_1 = "calculated value" + cprop_2 = "calculated value" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass7.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass7.py new file mode 100644 index 00000000..e098ae5a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass7.py @@ -0,0 +1,53 @@ +# This sample tests the synthesized comparison operators for dataclasses. + +from dataclasses import dataclass + + +@dataclass(order=True) +class DC1: + a: str + b: int + + +@dataclass(order=True) +class DC2: + a: str + b: int + + +dc1_1 = DC1("hi", 2) +dc1_2 = DC1("hi", 2) + +if dc1_1 < dc1_2: + print("") + +if dc1_1 <= dc1_2: + print("") + +if dc1_1 > dc1_2: + print("") + +if dc1_1 >= dc1_2: + print("") + +if dc1_1 == dc1_2: + print("") + +if dc1_1 != dc1_2: + print("") + +if dc1_1 == None: + print("") + +if dc1_1 != None: + print("") + +dc2_1 = DC2("hi", 2) + +# This should generate an error because the types are +# incompatible. +if dc1_1 < dc2_1: + print("") + +if dc1_1 != dc2_1: + print("") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass8.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass8.py new file mode 100644 index 00000000..632ec8a9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass8.py @@ -0,0 +1,30 @@ +# This sample tests the type checker's ability to handle +# circular type references within dataclass definitions. + +from dataclasses import dataclass +from pathlib import Path + + +@dataclass +class ParentA: + b: "ClassB" + + +@dataclass +class ChildA(ParentA): + pass + + +@dataclass +class ClassB: + sub_class: ChildA + + def method1(self): + ChildA(b=self) + + +@dataclass() +class ClassC: + name: str = "sample" + dir_a: Path = Path.home().joinpath(f"source/{name}") + dir_b: Path = dir_a.joinpath("path/to/b") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclass9.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclass9.py new file mode 100644 index 00000000..d6fa5f48 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclass9.py @@ -0,0 +1,25 @@ +# This sample verifies that the type analyzer adds +# the __dataclass_fields__ class variable to +# synthesized data classes. + +from dataclasses import dataclass +from typing import Any, ClassVar, Protocol + + +class IsDataclass(Protocol): + # Checking for this attribute seems to currently be + # the most reliable way to ascertain that something is a dataclass + __dataclass_fields__: ClassVar[dict[str, Any]] + + +def dataclass_only( + x: IsDataclass, +): ... # do something that only makes sense with a dataclass + + +@dataclass +class A: + pass + + +dataclass_only(A()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter1.py new file mode 100644 index 00000000..fe0cd363 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter1.py @@ -0,0 +1,159 @@ +# This sample tests the use of field's converter parameter +# described in PEP 712. + +from typing import Any, Callable, dataclass_transform, overload + + +def model_field(*, converter: Callable[..., Any]) -> Any: ... + + +@dataclass_transform(field_specifiers=(model_field,)) +class ModelBase: ... + + +def converter_simple(s: str) -> int: + return int(s) + + +def converter_with_param_before_args(s: str, *args: int, **kwargs: int) -> int: + return int(s) + + +def converter_with_args(*args: str) -> int: + return int(args[0]) + + +def converter_with_extra_defaulted_params( + s: str, extra: int = 1, *, extraKwarg: int = 1 +) -> int: + return int(s) + + +def converter_with_default_for_first_param(s: str = "1") -> int: + return int(s) + + +def converter_with_more_specialized_return_type(s: str) -> int: + return int(s) + + +class ConverterClass: + @overload + def __init__(self, val: str) -> None: ... + + @overload + def __init__(self, val: bytes) -> None: ... + + def __init__(self, val: str | bytes) -> None: + pass + + +class DC1(ModelBase): + field0: int = model_field(converter=converter_simple) + field1: int = model_field(converter=converter_with_param_before_args) + field2: int = model_field(converter=converter_with_args) + field3: int = model_field(converter=converter_with_extra_defaulted_params) + field4: int = model_field(converter=converter_with_default_for_first_param) + field5: int | str = model_field( + converter=converter_with_more_specialized_return_type + ) + field6: ConverterClass = model_field(converter=ConverterClass) + + +reveal_type( + DC1.__init__, + expected_text="(self: DC1, field0: str, field1: str, field2: str, field3: str, field4: str, field5: str, field6: str | bytes) -> None", +) + + +# This overload will be ignored because it has too many arguments. +@overload +def overloaded_converter(s: float, secondParam: str, /) -> int: ... + + +# This overload will be ignored because its return type doesn't match the field type. +@overload +def overloaded_converter(s: float) -> str: ... + + +@overload +def overloaded_converter(s: str) -> int: ... + + +@overload +def overloaded_converter(s: list[str]) -> int: ... + + +def overloaded_converter(s: float | str | list[str], *args: str) -> int | float | str: + return 0 + + +class Overloads(ModelBase): + field0: int = model_field(converter=overloaded_converter) + + +reveal_type( + Overloads.__init__, + expected_text="(self: Overloads, field0: str | list[str]) -> None", +) + + +class CallableObject: + @overload + def __call__(self, arg1: int) -> str: ... + + @overload + def __call__(self, arg1: str) -> int: ... + + def __call__(self, arg1: str | int | list[str]) -> int | str: + return 1 + + +callable: Callable[[str], int] = converter_simple +callable_union: Callable[[str], int] | Callable[[int], str] = converter_simple + + +class Callables(ModelBase): + field0: int = model_field(converter=CallableObject()) + field1: int = model_field(converter=callable) + field2: int = model_field(converter=callable_union) + + +reveal_type( + Callables.__init__, + expected_text="(self: Callables, field0: str, field1: str, field2: str) -> None", +) + + +def wrong_return_type(s: str) -> str: + return s + + +def wrong_number_of_params(x: str, x2: str, /) -> int: + return 1 + + +@overload +def wrong_converter_overload(s: float) -> str: ... + + +@overload +def wrong_converter_overload(s: str) -> str: ... + + +def wrong_converter_overload(s: float | str) -> int | str: + return 1 + + +class Errors(ModelBase): + # This should generate an error because the return type doesn't + # match the field type. + field0: int = model_field(converter=wrong_return_type) + + # This should generate an error because the converter has the + # wrong number of parameters. + field1: int = model_field(converter=wrong_number_of_params) + + # This should generate an error because none of the overloads + # match the field type. + field2: int = model_field(converter=wrong_converter_overload) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter2.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter2.py new file mode 100644 index 00000000..7004138b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter2.py @@ -0,0 +1,71 @@ +# This sample tests assignment of dataclass fields that use +# the converter parameter described in PEP 712. + +from typing import Any, Callable, dataclass_transform + + +def converter_simple(s: str) -> int: ... + + +def converter_passThru(x: str | int) -> str | int: ... + + +def model_field(*, converter: Callable[..., Any]) -> Any: ... + + +@dataclass_transform(field_specifiers=(model_field,)) +class ModelBase: ... + + +class DC1(ModelBase): + asymmetric: int = model_field(converter=converter_simple) + symmetric: str | int = model_field(converter=converter_passThru) + + +dc1 = DC1("1", 1) + +reveal_type(dc1.asymmetric, expected_text="int") +dc1.asymmetric = "2" +reveal_type( + dc1.asymmetric, expected_text="int" +) # Asymmetric -- type narrowing should not occur +# This should generate an error because only strs can be assigned to field0. +dc1.asymmetric = 2 + +reveal_type(dc1.symmetric, expected_text="str | int") +dc1.symmetric = "1" +reveal_type( + dc1.symmetric, expected_text="Literal['1']" +) # Symmetric -- type narrowing should occur + + +reveal_type(DC1.asymmetric, expected_text="int") +DC1.asymmetric = "2" +reveal_type(DC1.asymmetric, expected_text="int") +# This should generate an error because only strs can be assigned to field0. +DC1.asymmetric = 2 + +reveal_type(DC1.symmetric, expected_text="str | int") +DC1.symmetric = "1" +reveal_type(DC1.symmetric, expected_text="Literal['1']") + + +class DC2(ModelBase): + a: dict[str, str] = model_field(converter=dict) + + +DC2({}) +DC2({"": ""}) + +# This should generate an error. +DC2({"": 1}) + + +class DC3(ModelBase): + b: tuple[int, ...] = model_field(converter=tuple) + + +DC3([1, 2, 3]) + +# This should generate an error. +DC3(["", 1]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter3.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter3.py new file mode 100644 index 00000000..5b6a59e1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassConverter3.py @@ -0,0 +1,20 @@ +# This sample tests the case where a dataclass converter is used with +# a generic type. + +from typing import Any, Callable, dataclass_transform + + +def model_field(*, converter: Callable[..., Any]) -> Any: ... + + +@dataclass_transform(field_specifiers=(model_field,)) +class ModelBase: ... + + +class DC1[T](ModelBase): + data: set[T] = model_field(converter=set) + + +x = DC1([1, 2]) +reveal_type(x, expected_text="DC1[int]") +reveal_type(x.data, expected_text="set[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassDescriptors1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassDescriptors1.py new file mode 100644 index 00000000..43a368cf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassDescriptors1.py @@ -0,0 +1,41 @@ +# This sample tests the handling of dataclass fields that use +# descriptor objects. + +from dataclasses import dataclass +from typing import Any, cast, overload + + +class MyDescriptor: + @overload + def __get__(self, __obj: None, __owner: Any) -> "MyDescriptor": ... + + @overload + def __get__(self, __obj: object, __owner: Any) -> int: ... + + def __get__(self, __obj: object | None, __owner: Any) -> "int | MyDescriptor": + if __obj is None: + return self + return cast(Any, __obj)._x + + def __set__(self, __obj: object, __value: int) -> None: + if __obj is not None: + cast(Any, __obj)._x = __value + + +@dataclass +class Foo: + y: MyDescriptor = MyDescriptor() + + +f1 = Foo(3) + +reveal_type(f1.y, expected_text="int") +reveal_type(Foo.y, expected_text="MyDescriptor") + + +# This should generate an error. +f2 = Foo("hi") + + +f3 = Foo() +reveal_type(f3.y, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassDescriptors2.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassDescriptors2.py new file mode 100644 index 00000000..eb01ec04 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassDescriptors2.py @@ -0,0 +1,68 @@ +# This sample tests the handling of fields within a dataclass that +# are descriptors. + +from dataclasses import dataclass + +from typing import overload, Any, TypeVar, Generic, Optional, Union, Callable, Type +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + dataclass_transform, +) + + +_T = TypeVar("_T") + + +class A(Generic[_T]): ... + + +class Desc(Generic[_T]): + @overload + def __get__(self, instance: None, owner: Any) -> A[_T]: ... + + @overload + def __get__(self, instance: object, owner: Any) -> _T: ... + + def __get__(self, instance: Optional[object], owner: Any) -> Union[A[_T], _T]: ... + + +@dataclass_transform(field_specifiers=(Desc[Any],)) +def dataclass_like( + *, + init: bool = True, + repr: bool = True, # noqa: A002 + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, +) -> Callable[[Type[_T]], Type[_T]]: ... + + +@dataclass_like() +class B: + x: Desc[int] + y: Desc[str] + z: Desc[str] = Desc() + + +@dataclass +class C: + x: Desc[int] + y: Desc[str] + z: Desc[str] = Desc() + + +reveal_type(B.x, expected_text="A[int]") +reveal_type(B.y, expected_text="A[str]") +reveal_type(B.z, expected_text="A[str]") +reveal_type(C.x, expected_text="A[int]") +reveal_type(C.y, expected_text="A[str]") +reveal_type(C.z, expected_text="A[str]") + +b = B(Desc(), Desc(), Desc()) +reveal_type(b.x, expected_text="int") +reveal_type(b.y, expected_text="str") +reveal_type(b.z, expected_text="str") + +c = C(Desc(), Desc(), Desc()) +reveal_type(c.x, expected_text="int") +reveal_type(c.y, expected_text="str") +reveal_type(c.z, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassFrozen1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassFrozen1.py new file mode 100644 index 00000000..faa6817d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassFrozen1.py @@ -0,0 +1,54 @@ +# This sample tests the handling of frozen dataclass types. + +from dataclasses import dataclass +from typing import ClassVar + + +@dataclass(frozen=False) +class DC1: + val1: int = 6 + + +@dataclass(frozen=True) +class DC2: + val2: float = 4 + + +# This should generate an error because a frozen dataclass +# cannot inherit from a non-frozen dataclass. +@dataclass(frozen=True) +class DC3(DC1): + val3: int = 4 + + +@dataclass(frozen=True) +class DC4(DC2): + val4: int = 4 + + val5: ClassVar[int] + + +# This should generate an error because a non-frozen dataclass +# cannot inherit from a frozen dataclass. +@dataclass(frozen=False) +class DC5(DC2): + val4: int = 5 + + +a = DC1(val1=3) +a.val1 = 3 + +b = DC4(val2=3, val4=5) + +DC4.val5 = 3 + +# This should generate an error because the dataclass is frozen. +b.val2 = 3 + +# This should generate an error because the dataclass is frozen. +b.val4 = 3 + + +@dataclass(frozen=True) +class DC6(DC2): + val2: int = 6 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassHash1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassHash1.py new file mode 100644 index 00000000..18d741f0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassHash1.py @@ -0,0 +1,68 @@ +# This sample tests the synthesis of the __hash__ method for dataclasses. + +from dataclasses import dataclass +from typing import Hashable + + +@dataclass +class DC1: + a: int + + +# This should generate an error. +v1: Hashable = DC1(0) + + +@dataclass(eq=True, frozen=True) +class DC2: + a: int + + +v2: Hashable = DC2(0) + + +@dataclass(eq=True) +class DC3: + a: int + + +# This should generate an error. +v3: Hashable = DC3(0) + + +@dataclass(frozen=True) +class DC4: + a: int + + +v4: Hashable = DC4(0) + + +@dataclass(eq=True, unsafe_hash=True) +class DC5: + a: int + + +v5: Hashable = DC5(0) + + +@dataclass(eq=True) +class DC6: + a: int + + def __hash__(self) -> int: + return 0 + + +v6: Hashable = DC6(0) + + +@dataclass(frozen=True) +class DC7: + a: int + + def __eq__(self, other) -> bool: + return self.a == other.a + + +v7: Hashable = DC7(0) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassKwOnly1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassKwOnly1.py new file mode 100644 index 00000000..faa03c9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassKwOnly1.py @@ -0,0 +1,58 @@ +# This sample tests the Python 3.10 additions to dataclass. + +from dataclasses import dataclass, KW_ONLY, field + + +@dataclass +class DC1: + a: str + _: KW_ONLY + b: int = 0 + + +DC1("hi") +DC1(a="hi") +DC1(a="hi", b=1) +DC1("hi", b=1) + +# This should generate an error because "b" is keyword-only. +DC1("hi", 1) + + +@dataclass +class DC2: + b: int = field(kw_only=True, default=3) + a: str + + +DC2("hi") +DC2(a="hi") +DC2(a="hi", b=1) +DC2("hi", b=1) + +# This should generate an error because "b" is keyword-only. +DC2("hi", 1) + + +@dataclass(kw_only=True) +class DC3: + a: str = field(kw_only=False) + b: int = 0 + + +DC3("hi") +DC3(a="hi") +DC3(a="hi", b=1) +DC3("hi", b=1) + +# This should generate an error because "b" is keyword-only. +DC3("hi", 1) + + +@dataclass +class DC4(DC3): + c: float + + +DC4("", 0.2, b=3) +DC4(a="", b=3, c=0.2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassNamedTuple1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassNamedTuple1.py new file mode 100644 index 00000000..05683827 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassNamedTuple1.py @@ -0,0 +1,63 @@ +# This sample validates the Python 3.7 data class feature. + +from typing import ClassVar, Final, Hashable, NamedTuple + + +class Other: + pass + + +def standalone(obj: object) -> None: + print(obj) + + +class DataTuple(NamedTuple): + def _m(self): + pass + + id: int + aid: Other + value: str = "" + + # Unannotated variables should not be included. + not_annotated = 5 + + name: str | None = None + name2: str | None = None + + not_a_method = standalone + + +d1 = DataTuple(id=1, aid=Other(), name2="hi") +d1.not_a_method() + +d2 = DataTuple(id=1, aid=Other(), value="v") +d3 = DataTuple(id=1, aid=Other(), name="hello") +d4 = DataTuple(id=1, aid=Other(), name=None) +id = d1.id + +h4: Hashable = d4 +v = d3 == d4 + +# This should generate an error because the name argument +# is the incorrect type. +d5 = DataTuple(id=1, aid=Other(), name=3) + +# This should generate an error because aid is a required +# parameter and is missing an argument here. +d6 = DataTuple(id=1, name=None) + + +class DataTuple2(NamedTuple): + # This should generate an error because Final cannot + # be used in a NamedTuple. A second downstream error + # is also generated. + x: Final[int] + + # This should generate an error because Final cannot + # be used in a NamedTuple. + y: Final = 1 + + # This should generate an error because ClassVar cannot + # be used in a NamedTuple. + z: ClassVar[int] = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassNamedTuple2.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassNamedTuple2.py new file mode 100644 index 00000000..f8829380 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassNamedTuple2.py @@ -0,0 +1,18 @@ +# This sample validates the Python 3.7 data class feature, ensuring that +# NamedTuple must be a direct base class. + +from typing import NamedTuple + + +class Parent(NamedTuple): + pass + + +class DataTuple2(Parent): + id: int + + +# This should generate an error because DataTuple2 isn't considered +# a data class and won't have the associated __new__ or __init__ +# method defined. +data = DataTuple2(id=1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassPostInit1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassPostInit1.py new file mode 100644 index 00000000..297371ff --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassPostInit1.py @@ -0,0 +1,57 @@ +# This sample tests the __post_init__ validation logic. + +# pyright: reportIncompatibleMethodOverride=false + +from dataclasses import InitVar, dataclass, field +from typing import Iterable + + +@dataclass +class A: + a: InitVar[int] + b: InitVar[str] + c: InitVar[bool] + + def __post_init__(self, x: float, y: str, z: int, xx: int = 3) -> None: ... + + +@dataclass +class B: + items: list[int] + + # This should generate an error because the number of InitVars is zero. + def __post_init__(self, x: list[int]) -> None: ... + + +@dataclass +class C: + iterable: InitVar[Iterable[int]] + + items: list[int] = field(init=False) + + # This should generate an error because the number of InitVars is 1. + def __post_init__(self) -> None: ... + + +@dataclass +class D: + iterable: InitVar[Iterable[int]] + + # This should generate an error because the type is incompatible. + def __post_init__(self, iterable: Iterable[str]) -> None: ... + + +@dataclass +class E: + _name: InitVar[str] = field() + name: str = field(init=False) + + def __post_init__(self, _name: str): ... + + +@dataclass +class F(E): + _age: InitVar[int] = field() + age: int = field(init=False) + + def __post_init__(self, _name: str, _age: int): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassReplace1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassReplace1.py new file mode 100644 index 00000000..ba93f95e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassReplace1.py @@ -0,0 +1,45 @@ +# This sample tests the synthesis of a "__replace__" method for dataclass +# classes in Python 3.13 and newer. + +from dataclasses import dataclass +from typing import NamedTuple + + +@dataclass +class DC1: + a: int + b: str + c: str = "" + + +dc1: DC1 = DC1(1, "") + +dc1_clone = dc1.__replace__(b="", a=1, c="") +reveal_type(dc1_clone, expected_text="DC1") + +dc1.__replace__(c="") +dc1.__replace__(b="2") + +# This should generate an error. +dc1.__replace__(b=2) + +# This should generate an error. +dc1.__replace__(d="") + + +class NT1(NamedTuple): + a: int + b: str + c: str = "" + + +nt1 = NT1(1, "") + +nt1_clone = nt1.__replace__(c="") +reveal_type(nt1_clone, expected_text="NT1") + +# This should generate an error. +nt1.__replace__(b=2) + +# This should generate an error. +nt1.__replace__(d="") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassSlots1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassSlots1.py new file mode 100644 index 00000000..520731e5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassSlots1.py @@ -0,0 +1,69 @@ +# This sample tests the "slots" parameter for dataclasses introduced +# in Python 3.10. + +from dataclasses import dataclass + + +# This should generate an error because __slots__ is already defined. +@dataclass(slots=True) +class A: + x: int + + __slots__ = () + + +@dataclass(slots=True) +class B: + x: int + + def __init__(self): + self.x = 3 + + # This should generate an error because "y" is not in slots. + self.y = 3 + + +@dataclass(slots=False) +class C: + x: int + + __slots__ = ("x",) + + def __init__(self): + self.x = 3 + + # This should generate an error because "y" is not in slots. + self.y = 3 + + +@dataclass +class D: + __slots__ = ("y", "x") + x: int + y: str + + +D(1, "bar") + + +@dataclass(slots=True) +class E: + a: int + + +E.__slots__ +E(1).__slots__ + +reveal_type(E.__slots__, expected_text="Iterable[str]") + + +@dataclass +class F: + a: int + + +# This should generate an error. +F.__slots__ + +# This should generate an error. +F(1).__slots__ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform1.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform1.py new file mode 100644 index 00000000..f5165928 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform1.py @@ -0,0 +1,93 @@ +# This sample tests the handling of the dataclass_transform mechanism +# when applied to a decorator function. + +from typing import Any, Callable, TypeVar, overload +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + dataclass_transform, +) + +_T = TypeVar("_T") + + +@overload +@dataclass_transform(kw_only_default=True, order_default=True) +def create_model(cls: _T) -> _T: ... + + +@overload +@dataclass_transform(kw_only_default=True, order_default=True) +def create_model( + *, + frozen: bool = False, + kw_only: bool = True, + order: bool = True, +) -> Callable[[_T], _T]: ... + + +def create_model(*args: Any, **kwargs: Any) -> Any: ... + + +@create_model(kw_only=False, order=False) +class Customer1: + id: int + name: str + + +@create_model(frozen=True) +class Customer2: + id: int + name: str + + +@create_model(frozen=True) +class Customer2Subclass(Customer2): + salary: float + + +c1_1 = Customer1(id=3, name="Sue") +c1_1.id = 4 + +c1_2 = Customer1(3, "Sue") +c1_2.name = "Susan" + +# This should generate an error because of a type mismatch. +c1_2.name = 3 + +# This should generate an error because comparison methods are +# not synthesized. +v1 = c1_1 < c1_2 + +# This should generate an error because salary is not +# a defined field. +c1_3 = Customer1(id=3, name="Sue", salary=40000) + +c2_1 = Customer2(id=0, name="John") + +# This should generate an error because Customer2 supports +# keyword-only parameters for its constructor. +c2_2 = Customer2(0, "John") + +v2 = c2_1 < c2_2 + + +@dataclass_transform(kw_only_default=True, order_default=True, frozen_default=True) +def create_model_frozen(cls: _T) -> _T: ... + + +@create_model_frozen +class Customer3: + id: int + name: str + + +# This should generate an error because a non-frozen class +# cannot inherit from a frozen class. +@create_model +class Customer3Subclass(Customer3): + age: int + + +c3_1 = Customer3(id=2, name="hi") + +# This should generate an error because Customer3 is frozen. +c3_1.id = 4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform2.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform2.py new file mode 100644 index 00000000..9a7f35c4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform2.py @@ -0,0 +1,98 @@ +# This sample tests the handling of the dataclass_transform mechanism +# when applied to a metaclass. + +from typing import Any, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + dataclass_transform, +) + +_T = TypeVar("_T") + + +class ModelField: + def __init__(self, *, init: bool = True, default: Any | None = None) -> None: ... + + +def model_field( + *, init: bool = True, default: Any | None = None, alias: str | None = None +) -> Any: ... + + +@dataclass_transform( + kw_only_default=True, + field_specifiers=(ModelField, model_field), +) +class ModelMeta(type): + not_a_field: str + + +class ModelBase(metaclass=ModelMeta): + def __init_subclass__( + cls, + *, + frozen: bool = False, + kw_only: bool = True, + order: bool = True, + ) -> None: ... + + +class Customer1(ModelBase, frozen=True): + id: int = model_field() + name: str = model_field() + name2: str = model_field(alias="other_name", default="None") + + +# This should generate an error because a non-frozen class cannot +# derive from a frozen one. +class Customer1Subclass(Customer1, frozen=False): + salary: float = model_field() + + +class Customer2(ModelBase, order=True): + id: int + name: str = model_field(default="None") + + +c1_1 = Customer1(id=3, name="Sue", other_name="Susan") + +# This should generate an error because the class is frozen. +c1_1.id = 4 + +# This should generate an error because the class is kw_only. +c1_2 = Customer1(3, "Sue") + +# This should generate an error because other_name is missing. +c1_3 = Customer1(id=3, name="John") + +# This should generate an error because comparison methods are +# not synthesized. +v1 = c1_1 < c1_2 + +c2_1 = Customer2(id=0, name="John") + +c2_2 = Customer2(id=1) + +v2 = c2_1 < c2_2 + +# This should generate an error because Customer2 supports +# keyword-only parameters for its constructor. +c2_3 = Customer2(0, "John") + + +@dataclass_transform(frozen_default=True) +class ModelMetaFrozen(type): + pass + + +class ModelBaseFrozen(metaclass=ModelMetaFrozen): ... + + +class Customer3(ModelBaseFrozen): + id: int + name: str + + +c3_1 = Customer3(id=2, name="hi") + +# This should generate an error because Customer3 is frozen. +c3_1.id = 4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform3.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform3.py new file mode 100644 index 00000000..6b72b366 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform3.py @@ -0,0 +1,125 @@ +# This sample tests the handling of the dataclass_transform mechanism +# when applied to a class. + +from typing import Any, Callable, Generic, TypeVar + +_T = TypeVar("_T") + + +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: tuple[type | Callable[..., Any], ...] = (()), +) -> Callable[[_T], _T]: + return lambda a: a + + +class ModelField: + def __init__(self, *, init: bool = True, default: Any | None = None) -> None: ... + + +def model_field( + *, init: bool = True, default: Any | None = None, alias: str | None = None +) -> Any: ... + + +@__dataclass_transform__( + kw_only_default=True, + field_specifiers=(ModelField, model_field), +) +class ModelBase: + not_a_field: str + + def __init_subclass__( + cls, + *, + frozen: bool = False, + kw_only: bool = True, + order: bool = True, + ) -> None: ... + + +class Customer1(ModelBase, frozen=True): + id: int = model_field() + name: str = model_field() + name2: str = model_field(alias="other_name", default="None") + + +# This should generate an error because a non-frozen dataclass cannot +# derive from a frozen one. +class Customer1Subclass(Customer1): + salary: float = model_field() + + +class Customer2(ModelBase, order=True): + id: int + name: str = model_field(default="None") + + +c1_1 = Customer1(id=3, name="Sue", other_name="Susan") + +# This should generate an error because the class is frozen. +c1_1.id = 4 + +# This should generate an error because the class is kw_only. +c1_2 = Customer1(3, "Sue") + +c1_3 = Customer1(id=3, name="John") + +# This should generate an error because comparison methods are +# not synthesized. +v1 = c1_1 < c1_2 + +c2_1 = Customer2(id=0, name="John") + +c2_2 = Customer2(id=1) + +v2 = c2_1 < c2_2 + +# This should generate an error because Customer2 supports +# keyword-only parameters for its constructor. +c2_3 = Customer2(0, "John") + +_T = TypeVar("_T") + + +@__dataclass_transform__( + kw_only_default=True, + field_specifiers=(ModelField, model_field), +) +class GenericModelBase(Generic[_T]): + not_a_field: _T + + def __init_subclass__( + cls, + *, + frozen: bool = False, + kw_only: bool = True, + order: bool = True, + ) -> None: ... + + +class GenericCustomer(GenericModelBase[int]): + id: int = model_field() + + +gc_1 = GenericCustomer(id=3) + + +@__dataclass_transform__(frozen_default=True) +class ModelBaseFrozen: + not_a_field: str + + +class Customer3(ModelBaseFrozen): + id: int + name: str + + +c3_1 = Customer3(id=2, name="hi") + +# This should generate an error because Customer3 is frozen. +c3_1.id = 4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform4.py b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform4.py new file mode 100644 index 00000000..d36f3797 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dataclassTransform4.py @@ -0,0 +1,84 @@ +# This sample tests the case where a field descriptor has an implicit +# "init" parameter type based on an overload. + +from typing import ( + Any, + Callable, + Literal, + TypeVar, + overload, +) + +T = TypeVar("T") + + +@overload +def field1( + *, + default: str | None = None, + resolver: Callable[[], Any], + init: Literal[False] = False, +) -> Any: ... + + +@overload +def field1( + *, + default: str | None = None, + resolver: None = None, + init: Literal[True] = True, +) -> Any: ... + + +def field1( + *, + default: str | None = None, + resolver: Callable[[], Any] | None = None, + init: bool = True, +) -> Any: ... + + +def field2(*, init=False, kw_only=True) -> Any: ... + + +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_specifiers: tuple[type | Callable[..., Any], ...] = (()), +) -> Callable[[T], T]: + # If used within a stub file, the following implementation can be + # replaced with "...". + return lambda a: a + + +@__dataclass_transform__(kw_only_default=True, field_specifiers=(field1, field2)) +def create_model(*, init: bool = True) -> Callable[[type[T]], type[T]]: ... + + +@create_model() +class CustomerModel1: + id: int = field1(resolver=lambda: 0) + name: str = field1(default="Voldemort") + + +CustomerModel1() +CustomerModel1(name="hi") + +# This should generate an error because "id" is not +# supposed to be part of the init function. +CustomerModel1(id=1, name="hi") + + +@create_model() +class CustomerModel2: + id: int = field2() + name: str = field2(init=True) + + +# This should generate an error because kw_only is True +# by default for field2. +CustomerModel2(1) + +CustomerModel2(name="Fred") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/decorator1.py b/python-parser/packages/pyright-internal/src/tests/samples/decorator1.py new file mode 100644 index 00000000..3c82568d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/decorator1.py @@ -0,0 +1,21 @@ +# This sample tests the a class-based decorator that uses a +# __get__ method as a way to provide access to a __call__ method. + +# pyright: reportIncompatibleMethodOverride=false + + +class Wrapper: + def __init__(self, func): + self.func = func + + def __get__(self, instance, owner): + return lambda **kwargs: self.func(instance, wrapped=True, **kwargs) + + +class Foo: + @Wrapper + def __init__(self, **kwargs): + print(f"{kwargs}") + + +Foo(bar=3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/decorator2.py b/python-parser/packages/pyright-internal/src/tests/samples/decorator2.py new file mode 100644 index 00000000..0d76d988 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/decorator2.py @@ -0,0 +1,26 @@ +# This sample tests the usage of overloads in decorators. + +from typing import Callable, TypeVar, overload, Optional, Union + +F = TypeVar("F", bound=Callable[[], None]) + + +@overload +def atomic(__func: F) -> F: ... + + +@overload +def atomic(*, savepoint: bool = True) -> Callable[[F], F]: ... + + +def atomic( + __func: Optional[Callable[..., None]] = None, *, savepoint: bool = True +) -> Union[Callable[[], None], Callable[[F], F]]: ... + + +@atomic +def func1() -> None: ... + + +@atomic(savepoint=False) +def func2() -> None: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/decorator3.py b/python-parser/packages/pyright-internal/src/tests/samples/decorator3.py new file mode 100644 index 00000000..4050d6b1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/decorator3.py @@ -0,0 +1,27 @@ +# This sample tests that arbitrary expressions (including +# subscripts) work for decorators. This support was added +# in Python 3.9. + +my_decorators = (staticmethod, classmethod, property) + + +class Foo: + # This should generate an error if version < 3.9. + @my_decorators[0] + def my_static_method(): + return 3 + + # This should generate an error if version < 3.9. + @my_decorators[1] + def my_class_method(cls): + return 3 + + # This should generate an error if version < 3.9. + @my_decorators[2] + def my_property(self): + return 3 + + +Foo.my_static_method() +Foo.my_class_method() +Foo().my_property diff --git a/python-parser/packages/pyright-internal/src/tests/samples/decorator4.py b/python-parser/packages/pyright-internal/src/tests/samples/decorator4.py new file mode 100644 index 00000000..1791c5f5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/decorator4.py @@ -0,0 +1,37 @@ +# This sample tests whether a decorator that contains an unknown +# type is ignored and treated as though it wasn't applied. + +# pyright: reportMissingImports=false + +import my_module + + +class Class2: + pass + + +def decorator1(fn): + # This decorator returns a value that is + # inferred to be a union containing an Unknown type. + if fn: + return my_module.unknown + return Class2 + + +@decorator1 +class ClassA: + def __init__(self, a, b, c): + pass + + +v1 = ClassA(1, 2, 3) +reveal_type(v1, expected_text="ClassA") + + +@decorator1 +def func1() -> int: + return 3 + + +v2 = func1() +reveal_type(v2, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/decorator5.py b/python-parser/packages/pyright-internal/src/tests/samples/decorator5.py new file mode 100644 index 00000000..3bf309cd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/decorator5.py @@ -0,0 +1,29 @@ +# This sample tests the logic that determines whether +# an unannotated decorator should allow the decorated +# function type (and docstring) to pass through unmodified. + + +def decorator1(method): + def wrapper(*args, **kw): + result = method(*args, **kw) + return result + + return wrapper + + +@decorator1 +def func1(var: str, kvar: str): + return + + +reveal_type(func1, expected_text="(var: str, kvar: str) -> None") + + +class ClassA: + @decorator1 + def method1(self, var: str, kvar: str): + return + + +reveal_type(ClassA().method1, expected_text="(var: str, kvar: str) -> None") +reveal_type(ClassA.method1, expected_text="(self: ClassA, var: str, kvar: str) -> None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/decorator6.py b/python-parser/packages/pyright-internal/src/tests/samples/decorator6.py new file mode 100644 index 00000000..a41d300d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/decorator6.py @@ -0,0 +1,34 @@ +# This sample tests that function decorators can be combined with +# staticmethod or classmethod. + +from typing import Callable, TypeVar +import functools + +_T = TypeVar("_T") + + +def decorator1(func: Callable[[_T, str], None]) -> Callable[[_T, str], None]: + @functools.wraps(func) + def func_wrapper(param1: _T, param2: str) -> None: + return func(param1, param2) + + return func_wrapper + + +class ClassA: + def __init__(self): + self.test1(1, "a") + self.test2("hi") + + @staticmethod + @decorator1 + def test1(param1: int, param2: str) -> None: + print(param2) + + @classmethod + @decorator1 + def test2(cls, param2: str) -> None: + print(param2) + + +ClassA() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/decorator7.py b/python-parser/packages/pyright-internal/src/tests/samples/decorator7.py new file mode 100644 index 00000000..32492022 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/decorator7.py @@ -0,0 +1,20 @@ +# This sample tests the case where a class decorator needs to evaluate +# the type of __init__ prior to the class being fully evaluated. + +from typing import Any, Callable, Generic, TypeVar + +T = TypeVar("T") +FuncType = Callable[..., Any] +FT = TypeVar("FT", bound=FuncType) + + +def decorate() -> Callable[[FT], FT]: ... + + +@decorate() +class ValueDecorated(Generic[T]): + def __init__(self, value: T) -> None: + self._value: T = value + + def __call__(self) -> T: + return self._value diff --git a/python-parser/packages/pyright-internal/src/tests/samples/defaultInitializer1.py b/python-parser/packages/pyright-internal/src/tests/samples/defaultInitializer1.py new file mode 100644 index 00000000..6de9a4dd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/defaultInitializer1.py @@ -0,0 +1,26 @@ +# This sample tests the type analyzer's reporting of issues +# with parameter default initializer expressions. This is +# covered by the reportCallInDefaultInitializer diagnostic rule. + + +def func1( + a=None, + # This should generate an error + b=dict(), + # This should generate an error + c=max(3, 4), +): + return 3 + + +def func2( + a=None, + # This should generate an error + b={}, + # This should generate an error + c=[], + # This should generate an error + d={1, 2, 3}, + e=(1, 2, 3), +): + return 3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/del1.py b/python-parser/packages/pyright-internal/src/tests/samples/del1.py new file mode 100644 index 00000000..79d08fdd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/del1.py @@ -0,0 +1,68 @@ +# This sample tests del statements. + +# This should generate two errors because x1 and x2 are not defined. +del x1, x2 + +x1 = 1 +del x1 + +# This should generate an error because x1 isn't defined. +del x1 + + +def func1(y1: int): + # This should generate an error because y2 is unbound. + del y1, y2 + + # This should generate an error because y1 is unbound. + del y1 + + y2 = 1 + del y2 + + +class ClassA: + # This should generate an error because z1 is unbound. + del z1 + + z1 = 1 + del z1 + + +class ClassB: + x: int + + +b = ClassB() +b.x = 3 +reveal_type(b.x, expected_text="Literal[3]") +del b.x +reveal_type(b.x, expected_text="int") + +x2: list[str | int] = ["a", 1, "b", 2] +reveal_type(x2[0], expected_text="str | int") +x2[0] = 0 +reveal_type(x2[0], expected_text="Literal[0]") +reveal_type(x2[1], expected_text="str | int") +del x2[0] +reveal_type(x2[0], expected_text="str | int") + + +class ClassC: + @property + def x(self) -> str: ... + + @x.setter + def x(self, value: str) -> None: ... + + @x.deleter + def x(self) -> None: ... + + +c = ClassC() +c.x = "x" + +reveal_type(c.x, expected_text="Literal['x']") + +del c.x +reveal_type(c.x, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/del2.py b/python-parser/packages/pyright-internal/src/tests/samples/del2.py new file mode 100644 index 00000000..f8759f0c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/del2.py @@ -0,0 +1,12 @@ +# This sample tests that the type analyzer flags as an error +# an attempt to assign to or delete a generic type. + +from typing import Dict + +# This should generate an error because assignment +# of generic types isn't allowed. +Dict[str, int] = {} + +# This should generate an error because deletion +# of generic types isn't allowed. +del Dict[str, int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecated2.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecated2.py new file mode 100644 index 00000000..0825b491 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecated2.py @@ -0,0 +1,175 @@ +# This sample tests the @warning.deprecated decorator introduced in PEP 702. + +from contextlib import contextmanager +from typing import Any, Callable, Self, TypeVar + +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + deprecated, + overload, +) + + +@deprecated("Use ClassB instead") +class ClassA: ... + + +# This should generate an error if reportDeprecated is enabled. +ClassA() + + +class ClassC: + @deprecated("Don't temp me") + def method1(self) -> None: ... + + @overload + @deprecated("Int is no longer supported") + def method2(self, a: int) -> None: ... + + @overload + def method2(self, a: None = None) -> None: ... + + def method2(self, a: int | None = None) -> None: ... + + +c1 = ClassC() + +# This should generate an error if reportDeprecated is enabled. +c1.method1() + +c1.method2() + +# This should generate an error if reportDeprecated is enabled. +c1.method2(2) + + +@deprecated("Test") +def func1() -> None: ... + + +# This should generate an error if reportDeprecated is enabled. +func1() + + +@overload +def func2(a: str) -> None: ... + + +@overload +@deprecated("int no longer supported") +def func2(a: int) -> int: ... + + +def func2(a: str | int) -> int | None: ... + + +func2("hi") + +# This should generate an error if reportDeprecated is enabled. +func2(3) + + +class ClassD: + @overload + def __init__(self, x: int) -> None: ... + + @overload + @deprecated("str no longer supported") + def __init__(self, x: str) -> None: ... + + def __init__(self, x: int | str) -> None: ... + + +ClassD(3) + +# This should generate an error if reportDeprecated is enabled. +ClassD("") + + +class ClassE: + @overload + def __new__(cls, x: int) -> Self: ... + + @overload + @deprecated("str no longer supported") + def __new__(cls, x: str) -> Self: ... + + def __new__(cls, x: int | str) -> Self: ... + + +ClassE(3) + +# This should generate an error if reportDeprecated is enabled. +ClassE("") + + +@deprecated("Deprecated async function") +async def func3(): ... + + +async def func4(): + # This should generate an error if reportDeprecated is enabled. + await func3() + + +@overload +def func5(val: int): ... + + +@overload +def func5(val: str): ... + + +@deprecated("All overloads are deprecated") +def func5(val: object): ... + + +# This should generate an error if reportDeprecated is enabled. +func5(1) + +# This should generate an error if reportDeprecated is enabled. +func5("") + +# This should generate an error if reportDeprecated is enabled. +v1 = func5 + + +T = TypeVar("T", bound=Callable[..., Any]) + + +@deprecated("Use different decorator") +@overload +def deco1(value: T) -> T: ... + + +@overload +def deco1(value: str): ... + + +def deco1(value: object) -> object: ... + + +# This should generate an error if reportDeprecated is enabled. +@deco1 +def func6(): ... + + +@contextmanager +@deprecated("Func is deprecated") +def func7(): + yield + + +# This should generate an error if reportDeprecated is enabled. +with func7(): + ... + + +@deprecated("Func is deprecated") +@contextmanager +def func8(): + yield + + +# This should generate an error if reportDeprecated is enabled. +with func8(): + ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecated3.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecated3.py new file mode 100644 index 00000000..f5aab774 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecated3.py @@ -0,0 +1,25 @@ +# This sample tests the @warning.deprecated decorator introduced in PEP 702. + +# This should generate an error if reportDeprecated is enabled. +from .deprecated2 import func1 + +# This should generate an error if reportDeprecated is enabled. +from .deprecated2 import ClassA as A + +from .deprecated2 import func2 +from .deprecated2 import ClassC as C + +func2("hi") + +# This should generate an error if reportDeprecated is enabled. +func2(1) + +# This should generate an error if reportDeprecated is enabled. +c1 = C.method1 + + +c2 = C() +c2.method2() + +# This should generate an error if reportDeprecated is enabled. +c2.method2(3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecated4.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecated4.py new file mode 100644 index 00000000..492ddda3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecated4.py @@ -0,0 +1,93 @@ +# This sample tests the handling of deprecated properties and decorators. + +from typing import overload + +from typing_extensions import deprecated # pyright: ignore[reportMissingModuleSource] + + +class A: + @property + @deprecated("Deprecated v1 getter") + def v1(self) -> str: + return "" + + @v1.setter + def v1(self, value: str) -> None: ... + + @v1.deleter + def v1(self) -> None: ... + + @property + def v2(self) -> str: + return "" + + @deprecated("Deprecated v2 setter") + @v2.setter + def v2(self, value: str) -> None: ... + + @v2.deleter + @deprecated("Deprecated v2 deleter") + def v2(self) -> None: ... + + +a = A() + +# This should generate an error if reportDeprecated is enabled. +v1 = a.v1 + +a.v1 = "" +del a.v1 + + +v2 = a.v2 + +# This should generate an error if reportDeprecated is enabled. +a.v2 = "" + +# This should generate an error if reportDeprecated is enabled. +a.v2 += "" + +# This should generate an error if reportDeprecated is enabled. +del a.v2 + + +class DescB1: + @overload + @deprecated("DescB1 __get__") + def __get__(self, obj: None, owner: object) -> str: ... + + @overload + def __get__(self, obj: object, owner: object) -> str: ... + + def __get__(self, obj: object | None, owner: object) -> str: + return "" + + +class DescB2: + def __get__(self, obj: object | None, owner: object) -> str: + return "" + + @deprecated("DescB2 __set__") + def __set__(self, obj: object | None, value: str) -> None: ... + + @deprecated("DescB2 __delete__") + def __delete__(self, obj: object | None) -> None: ... + + +class B: + b1: DescB1 = DescB1() + b2: DescB2 = DescB2() + + +# This should generate an error if reportDeprecated is enabled. +v3 = B.b1 + +b = B() +v4 = b.b1 + + +# This should generate an error if reportDeprecated is enabled. +b.b2 = "" + +# This should generate an error if reportDeprecated is enabled. +del b.b2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecated5.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecated5.py new file mode 100644 index 00000000..a9579e9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecated5.py @@ -0,0 +1,15 @@ +# This sample tests the deprecation messages for class properties. + + +class A: + @classmethod + @property + # This should generate an error if reportDeprecated is enabled. + def prop1(cls) -> int: + return 1 + + @classmethod + @prop1.setter + # This should generate an error if reportDeprecated is enabled. + def prop1(cls, value: int) -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecated6.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecated6.py new file mode 100644 index 00000000..dfef566d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecated6.py @@ -0,0 +1,50 @@ +# This sample tests the case where a __call__ is marked deprecated. + +from typing import Callable, Generic, ParamSpec, TypeVar +from typing_extensions import deprecated # pyright: ignore[reportMissingModuleSource] + + +class A: + @deprecated("Use ClassB instead") + def __call__(self) -> None: ... + + +a = A() + +# This should generate an error if reportDeprecated is enabled. +a() + +P = ParamSpec("P") +R = TypeVar("R") + + +class B(Generic[P, R]): + def __init__(self, cb: Callable[P, R]) -> None: + self.cb = cb + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: + return self.cb(*args, **kwargs) + + +@B +@deprecated("Don't use this.") +def func1(x: int) -> None: + pass + + +# This should generate an error if reportDeprecated is enabled. +func1(3) + + +def deco1(cb: Callable[P, R]) -> B[P, R]: + return B(cb) + + +@deco1 +@deprecated("Don't use this.") +def func2(x: int) -> None: + pass + + +# This should generate an error if reportDeprecated is enabled. +func2(3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecated7.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecated7.py new file mode 100644 index 00000000..a4259945 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecated7.py @@ -0,0 +1,30 @@ +# This sample tests the case where a "deprecated" instance is instantiated +# prior to being used as a decorator. + +# pyright: reportMissingModuleSource=false + +from typing_extensions import deprecated + + +todo = deprecated("This needs to be implemented!!") + + +@todo +class ClassA: ... + + +# This should generate an error if reportDeprecated is enabled. +ClassA() + + +@todo +def func1() -> None: + pass + + +# This should generate an error if reportDeprecated is enabled. +func1() + + +def func2() -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecated8.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecated8.py new file mode 100644 index 00000000..3d9e0a22 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecated8.py @@ -0,0 +1,44 @@ +# This sample tests reporting of deprecated magic methods. + +# pyright: reportMissingModuleSource=false + +from typing import Self, overload + +from typing_extensions import deprecated + + +class ClassA: + @deprecated("Adding a str is deprecated") + @overload + def __add__(self, other: str) -> Self: ... + + @overload + def __add__(self, other: object) -> Self: ... + + def __add__(self, other: object) -> Self: ... + + @deprecated("Bool is not supported") + def __bool__(self) -> bool: ... + + @deprecated("Negation is no longer supported") + def __neg__(self) -> Self: ... + + +a = ClassA() + +v1 = a + 1 + +# This should be marked as deprecated. +v2 = a + "" + +a += 1 + +# This should be marked as deprecated. +a += "" + + +# This should be marked as deprecated. +v3 = -a + +# This should be marked as deprecated. +v4 = not a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecatedAlias1.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecatedAlias1.py new file mode 100644 index 00000000..3c221348 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecatedAlias1.py @@ -0,0 +1,62 @@ +# This sample tests the detection of deprecated classes from the typing +# module. + +from typing import ( + ChainMap as CM1, + Counter as CT1, + DefaultDict, + Deque, + Dict, + FrozenSet, + List, + Optional, + OrderedDict as OD1, + Set, + Tuple, + Type, + Union, + Awaitable, + Coroutine, + AsyncIterable, + AsyncGenerator, + Iterable, + Iterator, + Generator, + Reversible, + Container, + Collection as C1, + Callable, + AbstractSet, + MutableSet, + Mapping, + MutableMapping, + Sequence, + MutableSequence, + ByteString as BS1, + MappingView, + KeysView, + ItemsView, + ValuesView, + ContextManager as CM1, + AsyncContextManager, + Pattern as P1, + Match as M1, +) + +from collections.abc import Collection, ByteString, Set as AS +from contextlib import AbstractContextManager +from re import Pattern, Match + +# These should be marked deprecated for Python >= 3.9 +v1: List[int] = [1, 2, 3] +v2: Dict[int, str] = {} +v3: Set[int] = set() +v4: Tuple[int] = (3,) +v5: FrozenSet[int] = frozenset() +v6: Type[int] = int +v7 = Deque() +v8 = DefaultDict() + +# These should be marked deprecated for Python >= 3.10 +v20: Union[int, str] +v21: Optional[int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/deprecatedAlias2.py b/python-parser/packages/pyright-internal/src/tests/samples/deprecatedAlias2.py new file mode 100644 index 00000000..38a53cac --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/deprecatedAlias2.py @@ -0,0 +1,64 @@ +# This sample tests the detection of deprecated classes from the +# typing_extensions module. + +# This test is heavily derived from deprecatedAlias1.py. + +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + ChainMap as CM1, + Counter as CT1, + DefaultDict, + Deque, + Dict, + FrozenSet, + List, + Optional, + OrderedDict as OD1, + Set, + Tuple, + Type, + Union, + Awaitable, + Coroutine, + AsyncIterable, + AsyncGenerator, + Iterable, + Iterator, + Generator, + Reversible, + Container, + Collection as C1, + Callable, + AbstractSet, + MutableSet, + Mapping, + MutableMapping, + Sequence, + MutableSequence, + # ByteString as BS1, # This symbol doesn't exist in typing_extensions + MappingView, + KeysView, + ItemsView, + ValuesView, + ContextManager as CM1, # This symbol is reexported from contextlib + AsyncContextManager, # This symbol is reexported from contextlib + Pattern as P1, + Match as M1, +) + +from collections.abc import Collection, ByteString, Set as AS +from contextlib import AbstractContextManager +from re import Pattern, Match + +# These should be marked deprecated for Python >= 3.9 +v1: List[int] = [1, 2, 3] +v2: Dict[int, str] = {} +v3: Set[int] = set() +v4: Tuple[int] = (3,) +v5: FrozenSet[int] = frozenset() +v6: Type[int] = int +v7 = Deque() +v8 = DefaultDict() + +# These should be marked deprecated for Python >= 3.10 +v20: Union[int, str] +v21: Optional[int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/descriptor1.py b/python-parser/packages/pyright-internal/src/tests/samples/descriptor1.py new file mode 100644 index 00000000..564473c1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/descriptor1.py @@ -0,0 +1,165 @@ +# This sample tests the detection and handling of asymmetric descriptors +# and properties. Type narrowing should be disabled in these cases. + +from typing import Any, Hashable, Iterable, Literal, Self, overload + + +class A: + @property + def prop1(self) -> int | None: ... + + @prop1.setter + def prop1(self, val: int | None) -> None: ... + + @property + def prop2(self) -> int | None: ... + + @prop2.setter + def prop2(self, val: int) -> None: ... + + @prop2.deleter + def prop2(self) -> None: ... + + @property + def prop3(self) -> int: ... + + @prop3.setter + def prop3(self, val: int | None) -> None: ... + + @prop3.deleter + def prop3(self) -> None: ... + + +def func1(obj: A) -> Literal[3]: + obj.prop1 = None + + b: None = obj.prop1 + + obj.prop1 = 3 + + v1 = obj.prop1 + 1 + return obj.prop1 + + +def func2(obj: A) -> Literal[3]: + obj.prop2 = 3 + + # This should generate an error because prop2 isn't + # narrowed in this case. + b: int = obj.prop2 + + # This should generate an error because prop2 isn't + # narrowed in this case. + return obj.prop2 + + +def func3(obj: A) -> Literal[3]: + obj.prop3 = 3 + + b: int = obj.prop3 + + # This should generate an error because prop2 isn't + # narrowed in this case. + return obj.prop3 + + +class Descriptor1: + def __get__(self, instance: Any, owner: Any) -> int | None: ... + + def __set__(self, owner: Any, value: int | None) -> None: ... + + +class Descriptor2: + def __get__(self, instance: Any, owner: Any) -> int | None: ... + + def __set__(self, owner: Any, value: int) -> None: ... + + +class Descriptor3: + def __get__(self, instance: Any, owner: Any) -> int: ... + + def __set__(self, owner: Any, value: int | None) -> None: ... + + +class Descriptor4: + @overload + def __get__(self, instance: None, owner: Any) -> int: ... + @overload + def __get__(self, instance: Any, owner: Any) -> str: ... + def __get__(self, instance: Any, owner: Any) -> int | str: ... + + def __set__(self, owner: Any, value: int | None) -> None: ... + + +class Descriptor5: + def __get__(self, instance: Any, owner: Any) -> int: ... + + @overload + def __set__(self, owner: bytes, value: int | None) -> None: ... + @overload + def __set__(self, owner: "B", value: int | None) -> None: ... + def __set__(self, owner: Any, value: int | None) -> None: ... + + +class Descriptor6[GT, ST]: + @overload + def __get__(self, instance: None, owner: Any) -> Self: ... + + @overload + def __get__(self, instance: Any, owner: Any) -> GT: ... + def __get__(self, instance: Any, owner: Any) -> Any: ... + + def __set__(self, instance: Any, value: ST): ... + + +class B: + desc1: Descriptor1 + desc2: Descriptor2 + desc3: Descriptor3 + desc4: Descriptor4 + desc5: Descriptor5 + desc6: Descriptor6[int | None, int | None] + + +def func4(obj: B) -> Literal[3]: + obj.desc1 = None + + b: None = obj.desc1 + + obj.desc1 = 3 + + v1 = obj.desc1 + 1 + return obj.desc1 + + +def func5(obj: B) -> Literal[3]: + obj.desc2 = 3 + + # This should generate an error because desc2 isn't + # narrowed in this case. + b: int = obj.desc2 + + # This should generate an error because desc2 isn't + # narrowed in this case. + return obj.desc2 + + +def func6(obj: B) -> Literal[3]: + obj.desc3 = 3 + + b: int = obj.desc3 + + # This should generate an error because prop2 isn't + # narrowed in this case. + return obj.desc3 + + +def func7(obj: B): + obj.desc4 = 3 + reveal_type(obj.desc4, expected_text="str") + + obj.desc5 = 3 + reveal_type(obj.desc5, expected_text="int") + + obj.desc6 = 1 + reveal_type(obj.desc6, expected_text="Literal[1]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/descriptor2.py b/python-parser/packages/pyright-internal/src/tests/samples/descriptor2.py new file mode 100644 index 00000000..b5702a9f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/descriptor2.py @@ -0,0 +1,29 @@ +# This sample validates that a member's descriptor protocol is +# accessed via a member access expression only when accessing it +# through a class variable, not through an instance variable. + +from typing import Any + + +class Descriptor: + def __get__(self, obj: Any, objtype: Any = None) -> float: + return 1.0 + + +class ClassA: + x: Descriptor + + def __init__(self, x: Descriptor): + reveal_type(self.x, expected_type="float") + + def func1(self): + reveal_type(self.x, expected_type="float") + + +class ClassB: + def __init__(self, x: Descriptor): + self.x = x + reveal_type(self.x, expected_type="Descriptor") + + def func1(self): + reveal_type(self.x, expected_type="Descriptor") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/descriptor3.py b/python-parser/packages/pyright-internal/src/tests/samples/descriptor3.py new file mode 100644 index 00000000..0ae13b46 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/descriptor3.py @@ -0,0 +1,24 @@ +# This sample tests that bidirectional type inference works when +# assigning to a class-scoped variable that is annotated with a +# descriptor. The setter type should not be used in this case. + + +from typing import Callable, Generic, TypeVar + + +T = TypeVar("T") + + +class Desc1(Generic[T]): + def __get__( + self, instance: object | None, owner: type | None = None + ) -> list[T]: ... + + def __set__(self, instance: object, value: list[T]) -> None: ... + + +def func1(factory: Callable[[], list[T]]) -> Desc1[T]: ... + + +class ClassA: + not_working: Desc1[int] = func1(list) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dictionary1.py b/python-parser/packages/pyright-internal/src/tests/samples/dictionary1.py new file mode 100644 index 00000000..0258d191 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dictionary1.py @@ -0,0 +1,57 @@ +# This sample tests the type checker's type inference logic for +# dictionaries. + +from typing import Any, Callable, Literal, Sequence + + +def wants_int_dict(a: dict[int, int]): + pass + + +wants_int_dict({3: 3, 5: 5}) +wants_int_dict({x: x for x in [2, 3, 4]}) + +# This should generate an error because +# the type is wrong. +wants_int_dict({"hello": 3, "bye": 5}) + +# This should generate an error because +# the type is wrong. +wants_int_dict({"sdf": x for x in [2, 3, 4]}) + +t1 = () + +# This should generate an error because t1 is not a mapping. +d1 = {**t1} + +d2 = {"hi": 3} +d3 = {**d2, "": 4} +reveal_type(d3, expected_text="dict[str, int]") + + +LitChoices = Literal["ab", "bcd"] + +keys: list[LitChoices] = ["ab", "bcd"] +d4: dict[LitChoices, int] = {k: len(k) for k in keys} + + +d5: dict[str, Callable[[Sequence[Any]], float]] = { + "min": min, + "max": max, + "sum": sum, +} + +LiteralDict = dict[LitChoices, str] + +d6: LiteralDict = {"ab": "x"} +d7: LiteralDict = {"bcd": "y"} +d6 = {**d6, **d7} +d6 = d6 | d7 + + +def func1(args): + d1 = {**args, "x": 123} + reveal_type(d1, expected_text="dict[Unknown, Unknown]") + + +# diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dictionary2.py b/python-parser/packages/pyright-internal/src/tests/samples/dictionary2.py new file mode 100644 index 00000000..5d668ca6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dictionary2.py @@ -0,0 +1,34 @@ +# This sample tests dictionary inference logic. + +from typing import Mapping, TypeAlias, TypeVar + +T = TypeVar("T") + + +def func1(mapping: Mapping[str | bytes, int]): + return mapping + + +func1({"x": 1}) +func1({b"x": 1}) + +# This should generate an error. +func1({3: 1}) + + +RecursiveMapping: TypeAlias = ( + int | Mapping[int, "RecursiveMapping"] | Mapping[str, "RecursiveMapping"] +) + + +class HasName: + name: str | None + + +def func2(x: T | None) -> T: + assert x is not None + return x + + +def func3(v: list[HasName]) -> RecursiveMapping: + return {func2(x.name): 1 for x in v} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dictionary3.py b/python-parser/packages/pyright-internal/src/tests/samples/dictionary3.py new file mode 100644 index 00000000..fe1067de --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dictionary3.py @@ -0,0 +1,30 @@ +# This sample tests various dictionary diagnostics. + +from typing import Generic, Mapping, TypeVar + +t1 = () + +# This should generate an error because t1 is not a mapping. +d1 = {**t1} + + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + + +def func1(m: Mapping[str, int] | Mapping[str, str]): + d1 = {**m} + reveal_type(d1, expected_text="dict[str, int | str]") + + +class MyMapping(Generic[_KT, _VT]): + def keys(self) -> list[_KT]: + raise NotImplementedError + + def __getitem__(self, key: _KT) -> _VT: + raise NotImplementedError + + +def func2(m: MyMapping[str, int] | MyMapping[str, str]): + d1 = {**m} + reveal_type(d1, expected_text="dict[str, int | str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dictionary4.py b/python-parser/packages/pyright-internal/src/tests/samples/dictionary4.py new file mode 100644 index 00000000..55cb3078 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dictionary4.py @@ -0,0 +1,106 @@ +# This sample tests a relatively expensive dictionary type evaluation +# case. It is included to ensure that the performance of this case +# is reasonable and doesn't regress. + +CONFIG: dict[str, list] = { + "0": [], + "1": [()], + "2": [(), ("2",)], + "3": [(), ("3",), ("3", 3)], + "4": [(), ("4",), ("4", 4), ()], + "5": [(), ("5",), ("5", 5), (), ("5",)], + "6": [(), ("6",), ("6", 6), (), ("6",), ("6", 6)], + "7": [], + "8": [()], + "9": [(), ("9",)], + "10": [(), ("10",), ("10", 10)], + "11": [(), ("11",), ("11", 11), ()], + "12": [(), ("12",), ("12", 12), (), ("12",)], + "13": [(), ("13",), ("13", 13), (), ("13",), ("13", 13)], + "14": [], + "15": [()], + "16": [(), ("16",)], + "17": [(), ("17",), ("17", 17)], + "18": [(), ("18",), ("18", 18), ()], + "19": [(), ("19",), ("19", 19), (), ("19",)], + "20": [(), ("20",), ("20", 20), (), ("20",), ("20", 20)], + "21": [], + "22": [()], + "23": [(), ("23",)], + "24": [(), ("24",), ("24", 24)], + "25": [(), ("25",), ("25", 25), ()], + "26": [(), ("26",), ("26", 26), (), ("26",)], + "27": [(), ("27",), ("27", 27), (), ("27",), ("27", 27)], + "28": [], + "29": [()], + "30": [(), ("30",)], + "31": [(), ("31",), ("31", 31)], + "32": [(), ("32",), ("32", 32), ()], + "33": [(), ("33",), ("33", 33), (), ("33",)], + "34": [(), ("34",), ("34", 34), (), ("34",), ("34", 34)], + "35": [], + "36": [()], + "37": [(), ("37",)], + "38": [(), ("38",), ("38", 38)], + "39": [(), ("39",), ("39", 39), ()], + "40": [(), ("40",), ("40", 40), (), ("40",)], + "41": [(), ("41",), ("41", 41), (), ("41",), ("41", 41)], + "42": [], + "43": [()], + "44": [(), ("44",)], + "45": [(), ("45",), ("45", 45)], + "46": [(), ("46",), ("46", 46), ()], + "47": [(), ("47",), ("47", 47), (), ("47",)], + "48": [(), ("48",), ("48", 48), (), ("48",), ("48", 48)], + "49": [], + "50": [()], + "51": [(), ("51",)], + "52": [(), ("52",), ("52", 52)], + "53": [(), ("53",), ("53", 53), ()], + "54": [(), ("54",), ("54", 54), (), ("54",)], + "55": [(), ("55",), ("55", 55), (), ("55",), ("55", 55)], + "56": [], + "57": [()], + "58": [(), ("58",)], + "59": [(), ("59",), ("59", 59)], + "60": [(), ("60",), ("60", 60), ()], + "61": [(), ("61",), ("61", 61), (), ("61",)], + "62": [(), ("62",), ("62", 62), (), ("62",), ("62", 62)], + "63": [], + "64": [()], + "65": [(), ("65",)], + "66": [(), ("66",), ("66", 66)], + "67": [(), ("67",), ("67", 67), ()], + "68": [(), ("68",), ("68", 68), (), ("68",)], + "69": [(), ("69",), ("69", 69), (), ("69",), ("69", 69)], + "70": [], + "71": [()], + "72": [(), ("72",)], + "73": [(), ("73",), ("73", 73)], + "74": [(), ("74",), ("74", 74), ()], + "75": [(), ("75",), ("75", 75), (), ("75",)], + "76": [(), ("76",), ("76", 76), (), ("76",), ("76", 76)], + "77": [], + "78": [()], + "79": [(), ("79",)], + "80": [(), ("80",), ("80", 80)], + "81": [(), ("81",), ("81", 81), ()], + "82": [(), ("82",), ("82", 82), (), ("82",)], + "83": [(), ("83",), ("83", 83), (), ("83",), ("83", 83)], + "84": [], + "85": [()], + "86": [(), ("86",)], + "87": [(), ("87",), ("87", 87)], + "88": [(), ("88",), ("88", 88), ()], + "89": [(), ("89",), ("89", 89), (), ("89",)], + "90": [(), ("90",), ("90", 90), (), ("90",), ("90", 90)], + "91": [], + "92": [()], + "93": [(), ("93",)], + "94": [(), ("94",), ("94", 94)], + "95": [(), ("95",), ("95", 95), ()], + "96": [(), ("96",), ("96", 96), (), ("96",)], + "97": [(), ("97",), ("97", 97), (), ("97",), ("97", 97)], + "98": [], + "99": [()], +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dunderAll1.py b/python-parser/packages/pyright-internal/src/tests/samples/dunderAll1.py new file mode 100644 index 00000000..6b44cc93 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dunderAll1.py @@ -0,0 +1,32 @@ +# This sample tests the reportUnsupportedDunderAll diagnostic rule. + +# pyright: reportMissingModuleSource=false + +from typing import Any + +test = 3 +hello = 3 +bar = 3 + +__all__: Any + +__all__ = ("test", "hello") +__all__ = ["test", "hello"] +__all__.append("foo") +__all__.extend(["foo"]) +__all__.remove("foo") +__all__ += ["bar"] + + +my_string = "foo" + +# The following should all generate diagnostics if reportUnsupportedDunderAll +# is enabled. +__all__ = ("test", my_string) +__all__ = ["test", my_string] +__all__ = "test" +__all__.append(my_string) +__all__.extend([my_string]) +__all__.remove(my_string) +__all__ += [my_string] +__all__.something() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/dunderAll2.py b/python-parser/packages/pyright-internal/src/tests/samples/dunderAll2.py new file mode 100644 index 00000000..d7cbe993 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/dunderAll2.py @@ -0,0 +1,16 @@ +# This sample tests the check for symbols that are present in __all__ but +# are not present in the module (reportUnsupportedDunderAll). + +a = 3 +b = 4 +g = 4 + +# This should generate an error for "d" +__all__ = ["a", "b", "c", "d"] +__all__.remove("c") + +# This should generate an error for "e" +__all__.append("e") + +# This should generate an error for "f" +__all__ += ["f", "g"] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/duplicateDeclaration1.py b/python-parser/packages/pyright-internal/src/tests/samples/duplicateDeclaration1.py new file mode 100644 index 00000000..89977c37 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/duplicateDeclaration1.py @@ -0,0 +1,113 @@ +# This sample tests the detection of duplicate (overwritten) symbols. + + +from typing import Callable, overload + + +class C: + # This should generate an error. + def f(self): + return 0 + + # This should generate an error. + def f(self): + return 0 + + def f(self): + return 1 + + # This should generate an error. + def g(self): + return 0 + + g: int + + @property + def h(self) -> int: + return 1 + + @h.setter + def h(self, val: int): + pass + + # This should generate an error. + @property + def j(self) -> int: + return 1 + + def j(self) -> int: + return 3 + + +@overload +def a() -> None: ... + + +@overload +def a(x: int) -> None: ... + + +# This should generate an error. +def a(x: int = 3): + pass + + +def a(x: int = 3): + pass + + +# This should generate an error. +def b(): + pass + + +b: int = 3 + + +def func1(cond: bool): + if cond: + + def a() -> int: + return 3 + + # This should generate an error because its inferred return + # type differs from b above. + def b(): + return 3 + + # This should generate an error because the parameter names don't match. + def c(a: int, b: str) -> None: + return None + + # This should generate an error because the parameter is positional-only. + def d(a: int) -> None: + return None + + def e(a: int, /) -> None: + return None + + # This should generate an error because the parameter is not positional-only. + f: Callable[[int], None] = lambda a: None + + g: Callable[[int], None] = lambda a: None + + else: + + def a() -> int: + return 2 + + def b(): + return 2 + + def c(a: int, c: str) -> None: + return None + + d: Callable[[int], None] = lambda a: None + + e: Callable[[int], None] = lambda a: None + + def f(a: int) -> None: + return None + + def g(a: int, /) -> None: + return None diff --git a/python-parser/packages/pyright-internal/src/tests/samples/duplicateDeclaration2.py b/python-parser/packages/pyright-internal/src/tests/samples/duplicateDeclaration2.py new file mode 100644 index 00000000..de5623b9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/duplicateDeclaration2.py @@ -0,0 +1,45 @@ +# This sample tests the detection of duplicate (overwritten) +# properties. + +# pyright: strict + + +class MyClass: + def __init__(self): + self._property: str = "" + + # This should generate an error because "prop" + # is overwritten below. + @property + def prop(self): + return self._property + + # This should generate an error because "prop" + # is overwritten below. + @prop.setter + def prop(self, val: str): + self._property = val + + # This should generate an error because "prop" + # is overwritten below. + @prop.deleter + def prop(self): + pass + + # This should generate an error because "prop" + # is overwritten below. + @property + def prop(self): + return self._property + + @property + def prop(self): + return self._property + + @prop.setter + def prop(self, val: str): + self._property = val + + @prop.deleter + def prop(self): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/duplicateImports1.py b/python-parser/packages/pyright-internal/src/tests/samples/duplicateImports1.py new file mode 100644 index 00000000..de0493e0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/duplicateImports1.py @@ -0,0 +1,13 @@ +# This sample tests the duplicate import detection. + +import sys + +# This should generate an error because Any is duplicated +from typing import Any, Dict, Any + +# This should generate an error because sys is duplicated +import sys + + +a: Dict[Any, Any] +b = sys.api_version diff --git a/python-parser/packages/pyright-internal/src/tests/samples/emptyContainers1.py b/python-parser/packages/pyright-internal/src/tests/samples/emptyContainers1.py new file mode 100644 index 00000000..e3d5e90c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/emptyContainers1.py @@ -0,0 +1,85 @@ +# This sample tests type inference for empty lists and dictionaries. + +# pyright: reportUnknownVariableType=true, reportUnknownArgumentType=true + + +def func1(a: bool): + val1 = [] + + if a: + val1 = [2, 3] + + reveal_type(val1, expected_text="list[int]") + + if a: + val2 = [] + else: + val2 = [] + + reveal_type(val2, expected_text="list[Unknown]") + + # This should generate an error because val2 is partially unknown. + val2 += [3] + + val3 = val2 + + # This should generate an error because val3 is partially unknown. + print(val3) + reveal_type(val3, expected_text="list[Unknown]") + + if a: + val3 = [3.4] + + print(val3) + reveal_type(val3, expected_text="list[float]") + + +def func2(a: bool): + val1 = {} + + if a: + val1 = {"a": 2} + + reveal_type(val1, expected_text="dict[str, int]") + + if a: + val2 = {} + else: + val2 = {} + + reveal_type(val2, expected_text="dict[Unknown, Unknown]") + + # This should generate an error because val2 is partially unknown. + val2.pop() + + val3 = val2 + + # This should generate an error because val3 is partially unknown. + print(val3) + reveal_type(val3, expected_text="dict[Unknown, Unknown]") + + if a: + val3 = {"b": 3.4} + + print(val3) + reveal_type(val3, expected_text="dict[str, float]") + + +class A: + def method1(self): + self.val1 = [] + self.val2 = {} + self.val3 = [] + + def method2(self): + self.val1 = [3.4] + self.val2 = {"a": 1} + + def method3(self): + reveal_type(self.val1, expected_text="list[float]") + reveal_type(self.val2, expected_text="dict[str, int]") + reveal_type(self.val3, expected_text="list[Unknown]") + + def method4(self) -> list[int]: + # This should generate an error because of a type mismatch. + return self.val1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum1.py b/python-parser/packages/pyright-internal/src/tests/samples/enum1.py new file mode 100644 index 00000000..7092eb86 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum1.py @@ -0,0 +1,291 @@ +# This sample tests the type checker's handling of Enum. + +from enum import Enum, EnumMeta, IntEnum +from typing import Self + + +TestEnum1 = Enum("TestEnum1", " A B, , ,C , \t D\t") +TestEnum2 = IntEnum("TestEnum2", "AA BB CC DD") + + +class TestEnum3(Enum): + A = 0 + B = 1 + C = 2 + D = 3 + + +a = TestEnum1["A"] +aa = TestEnum1.A +reveal_type(aa.name, expected_text="Literal['A']") +reveal_type(aa._name_, expected_text="Literal['A']") +reveal_type(aa.value, expected_text="Literal[1]") +reveal_type(aa._value_, expected_text="Literal[1]") +reveal_type(TestEnum1.D.name, expected_text="Literal['D']") +reveal_type(TestEnum1.D._name_, expected_text="Literal['D']") +reveal_type(TestEnum1.D.value, expected_text="Literal[4]") +reveal_type(TestEnum1.D._value_, expected_text="Literal[4]") + + +def func1(te3: TestEnum3): + reveal_type(te3.name, expected_text="Literal['A', 'B', 'C', 'D']") + reveal_type(te3._name_, expected_text="Literal['A', 'B', 'C', 'D']") + reveal_type(te3.value, expected_text="Literal[0, 1, 2, 3]") + reveal_type(te3._value_, expected_text="Literal[0, 1, 2, 3]") + + +reveal_type(TestEnum3.name, expected_text="property") +reveal_type(TestEnum3._name_, expected_text="str") +reveal_type(TestEnum3.value, expected_text="property") +reveal_type(TestEnum3._value_, expected_text="Any") + + +# This should generate an error because "Z" isn't +# a valid member. +z = TestEnum1.Z + + +bb = TestEnum2.BB + +# This should generate an error because "A" isn't +# a valid member. +z = TestEnum2.A + + +b = TestEnum3.B + +# This should generate an error because "Z" isn't +# a valid member. +z = TestEnum3.Z + +reveal_type(TestEnum3["A"], expected_text="TestEnum3") + +# Test that enum classes are iterable. +list1 = list(TestEnum3) +reveal_type(list1, expected_text="list[TestEnum3]") + +list2 = [i for i in TestEnum3] +reveal_type(list2, expected_text="list[TestEnum3]") + +num_items_in_enum3 = len(TestEnum3) +reveal_type(num_items_in_enum3, expected_text="int") + +reveal_type(TestEnum3.A.name, expected_text="Literal['A']") +reveal_type(TestEnum3.B._name_, expected_text="Literal['B']") +reveal_type(TestEnum3.C.value, expected_text="Literal[2]") +reveal_type(TestEnum3.D._value_, expected_text="Literal[3]") + + +TestEnum4 = Enum("TestEnum4", ["A", "B", "C", "D"]) +reveal_type(TestEnum4.A, expected_text="Literal[TestEnum4.A]") +reveal_type(TestEnum4.D, expected_text="Literal[TestEnum4.D]") +reveal_type(TestEnum4.A.name, expected_text="Literal['A']") +reveal_type(TestEnum4.B._name_, expected_text="Literal['B']") +reveal_type(TestEnum4.C.value, expected_text="Literal[3]") +reveal_type(TestEnum4.D._value_, expected_text="Literal[4]") + +TestEnum5 = Enum("TestEnum5", ("A", "B", "C", "D")) +reveal_type(TestEnum5.A, expected_text="Literal[TestEnum5.A]") +reveal_type(TestEnum5.D, expected_text="Literal[TestEnum5.D]") +reveal_type(TestEnum5.A.name, expected_text="Literal['A']") +reveal_type(TestEnum5.B._name_, expected_text="Literal['B']") +reveal_type(TestEnum5.C.value, expected_text="Literal[3]") +reveal_type(TestEnum5.D._value_, expected_text="Literal[4]") + +d_value = "d" + +TestEnum6 = Enum("TestEnum6", [("A", 1), ("B", [1, 2]), ("C", "c"), ("D", d_value)]) +reveal_type(TestEnum6.A, expected_text="Literal[TestEnum6.A]") +reveal_type(TestEnum6.D, expected_text="Literal[TestEnum6.D]") +reveal_type(TestEnum6.A.name, expected_text="Literal['A']") +reveal_type(TestEnum6.B._name_, expected_text="Literal['B']") +reveal_type(TestEnum6.A.value, expected_text="Literal[1]") +reveal_type(TestEnum6.B.value, expected_text="list[int]") +reveal_type(TestEnum6.C.value, expected_text="Literal['c']") +reveal_type(TestEnum6.D._value_, expected_text="Literal['d']") + +TestEnum7 = Enum("TestEnum7", (("A", 1), ("D", "d"))) +reveal_type(TestEnum7.A, expected_text="Literal[TestEnum7.A]") +reveal_type(TestEnum7.D, expected_text="Literal[TestEnum7.D]") +reveal_type(TestEnum7.A.name, expected_text="Literal['A']") +reveal_type(TestEnum7.A.value, expected_text="Literal[1]") +reveal_type(TestEnum7.D._value_, expected_text="Literal['d']") + +TestEnum8 = Enum("TestEnum8", {"A": 1, "B": [1, 2], "C": "c", "D": d_value}) +reveal_type(TestEnum8.A, expected_text="Literal[TestEnum8.A]") +reveal_type(TestEnum8.D, expected_text="Literal[TestEnum8.D]") +reveal_type(TestEnum8.A.name, expected_text="Literal['A']") +reveal_type(TestEnum8.B._name_, expected_text="Literal['B']") +reveal_type(TestEnum8.A.value, expected_text="Literal[1]") +reveal_type(TestEnum8.B.value, expected_text="list[int]") +reveal_type(TestEnum8.C.value, expected_text="Literal['c']") +reveal_type(TestEnum8.D._value_, expected_text="Literal['d']") + + +class TestEnum9(Enum): + _other1: int + _other2: int + + def __new__(cls, value: str, other1: int, other2: int): + obj = object.__new__(cls) + obj._value_ = value + obj._other1 = other1 + obj._other2 = other2 + return obj + + A = ("a", 1, 2) + B = ("b", 2, 3) + + +te9_A = TestEnum9.A +reveal_type(te9_A, expected_text="Literal[TestEnum9.A]") +reveal_type(te9_A.value, expected_text="Any") +reveal_type(te9_A._value_, expected_text="Any") +reveal_type(te9_A.name, expected_text="Literal['A']") +reveal_type(te9_A._name_, expected_text="Literal['A']") + + +class CustomEnumMeta1(EnumMeta): + pass + + +class TestEnum10(Enum, metaclass=CustomEnumMeta1): + A = 1 + B = 2 + + +te10_A = TestEnum10.A +reveal_type(te10_A, expected_text="Literal[TestEnum10.A]") +reveal_type(te10_A.value, expected_text="Any") +reveal_type(te9_A._value_, expected_text="Any") +reveal_type(te9_A.name, expected_text="Literal['A']") +reveal_type(te9_A._name_, expected_text="Literal['A']") + + +def func2(e: type[Enum]): + values = {v.value for v in e} + reveal_type(values, expected_text="set[Any]") + + names = {v.name for v in e} + reveal_type(names, expected_text="set[str]") + + +class TestEnum11(Enum): + (A, B, C) = range(3) + + +te11_A = TestEnum11.A +reveal_type(te11_A, expected_text="Literal[TestEnum11.A]") +reveal_type(te11_A.value, expected_text="int") + + +def func3(self) -> None: + pass + + +class TestEnum12(Enum): + a = 1 + b = lambda self: None + c = func3 + + +reveal_type(TestEnum12.a, expected_text="Literal[TestEnum12.a]") +reveal_type(TestEnum12.b, expected_text="(self: Unknown) -> None") +reveal_type(TestEnum12.c, expected_text="(self: Unknown) -> None") + + +class TestEnum13(metaclass=CustomEnumMeta1): + pass + + +TestEnum14 = TestEnum13("TestEnum14", "A, B, C") +reveal_type(TestEnum14.A, expected_text="Literal[TestEnum14.A]") + + +class TestEnum15(Enum): + _value_: str + A = 1 + B = 2 + + def __init__(self, value: int): + self._value_ = str(value) + + +te15_A = TestEnum15.A +reveal_type(te15_A, expected_text="Literal[TestEnum15.A]") +reveal_type(te15_A.value, expected_text="str") +reveal_type(te15_A._value_, expected_text="str") + + +class TestEnum16(IntEnum): + A = 1 + B = 2 + C = 3 + + D = C # Alias for C + + +reveal_type(TestEnum16.D, expected_text="Literal[TestEnum16.C]") +reveal_type(TestEnum16.D.value, expected_text="Literal[3]") + + +class TestEnum17(IntEnum): + def __new__(cls, val: int, doc: str) -> Self: + obj = int.__new__(cls, val) + obj._value_ = val + obj.__doc__ = doc + return obj + + +class TestEnum18(TestEnum17): + A = (1, "A") + B = (2, "B") + + +class TestEnum19(Enum): + A = 1 + __B = 2 + + +reveal_type(TestEnum19.A, expected_text="Literal[TestEnum19.A]") +reveal_type(TestEnum19.__B, expected_text="Literal[2]") + + +class TestEnum20(Enum): + A = 1 + B = A + 1 + + +reveal_type(TestEnum20.A, expected_text="Literal[TestEnum20.A]") +reveal_type(TestEnum20.A.value, expected_text="Literal[1]") +reveal_type(TestEnum20.B, expected_text="Literal[TestEnum20.B]") +reveal_type(TestEnum20.B.value, expected_text="Literal[2]") +reveal_type(TestEnum20.A.A.A, expected_text="Literal[TestEnum20.A]") +reveal_type(TestEnum20.A.B.A, expected_text="Literal[TestEnum20.A]") +reveal_type(TestEnum20.A.B, expected_text="Literal[TestEnum20.B]") + + +class TestEnum21Base(Enum, metaclass=CustomEnumMeta1): + @property + def value(self) -> str: + return "test" + + +class TestEnum21(TestEnum21Base): + A = 1 + + +reveal_type(TestEnum21.A.value, expected_text="str") + + +class TestEnum22Base(Enum): + @property + def value(self) -> str: + return "test" + + +class TestEnum22(TestEnum22Base): + A = 1 + + +reveal_type(TestEnum22.A.value, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum10.py b/python-parser/packages/pyright-internal/src/tests/samples/enum10.py new file mode 100644 index 00000000..f185dd6c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum10.py @@ -0,0 +1,15 @@ +# This sample tests that a call to an enum class does not invoke the +# constructor for the class if the enum class defines members. + +from enum import Enum + + +class Example(Enum): + A = (1, 2) + + def __init__(self, value, other) -> None: + self._value_ = value + self.other = other + + +Example(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum11.py b/python-parser/packages/pyright-internal/src/tests/samples/enum11.py new file mode 100644 index 00000000..e80497ca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum11.py @@ -0,0 +1,54 @@ +# This sample tests the validation of member values. + +from enum import Enum + + +class Enum1(Enum): + _value_: int + RED = 1 + + # This should generate an error because of a type mismatch. + GREEN = "green" + + +class Enum2(Enum): + def __new__(cls, value: int): + obj = object.__new__(cls) + obj._value_ = value + + RED = 1 + + # This should generate an error because of a type mismatch. + GREEN = "green" + + # This should generate an error because of a type mismatch. + BLUE = (1, "blue") + + +class Enum3(Enum): + def __init__(self, value: int): + self._value_ = value + + RED = 1 + + # This should generate an error because of a type mismatch. + GREEN = "green" + + # This should generate an error because of a type mismatch. + BLUE = (1, "blue") + + +class Enum4(Enum): + def __init__(self, value: int, other: str): + self._value_ = value + + # This should generate an error because of a type mismatch. + RED = 1 + + # This should generate an error because of a type mismatch. + GREEN = "green" + + BLUE = (1, "blue") + + # This should generate an error because of a type mismatch. + GRAY = (1, "blue", 1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum12.py b/python-parser/packages/pyright-internal/src/tests/samples/enum12.py new file mode 100644 index 00000000..f2cbf631 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum12.py @@ -0,0 +1,18 @@ +# This sample tests that any attribute that is treated as a member at +# runtime does not have a type annotation. The typing spec indicates that +# type checkers should flag such conditions as errors. + +from enum import Enum +from typing import Callable, Final + + +class Enum1(Enum): + # This should generate an error. + MEMBER_1: int = 1 + + # This should generate an error. + MEMBER_2: Final = 3 + + _NON_MEMBER_: int = 3 + + NON_MEMBER_CALLABLE: Callable[[], int] = lambda: 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum13.py b/python-parser/packages/pyright-internal/src/tests/samples/enum13.py new file mode 100644 index 00000000..f250c804 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum13.py @@ -0,0 +1,39 @@ +# This sample tests the handling of IntEnum and StrEnum literal values. + +from enum import IntEnum, StrEnum, ReprEnum +from typing import Literal, LiteralString + + +class IntEnum1(IntEnum): + MEMBER_1 = 1 + MEMBER_2 = 2 + + +i1: Literal[1] = IntEnum1.MEMBER_1.value + +# This should generate an error. +i2: Literal[1] = IntEnum1.MEMBER_2.value + + +class StrEnum1(StrEnum): + MEMBER_1 = "a" + MEMBER_2 = "b" + + +s1: Literal["a"] = StrEnum1.MEMBER_1.value + +# This should generate an error. +s2: Literal["b"] = StrEnum1.MEMBER_1.value + +s3: LiteralString = StrEnum1.MEMBER_1.value + + +class BytesEnum(bytes, ReprEnum): + MEMBER_1 = b"1" + MEMBER_2 = b"2" + + +b1: Literal[b"1"] = BytesEnum.MEMBER_1.value + +# This should generate an error. +b2: Literal[b"2"] = BytesEnum.MEMBER_1.value diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum14.py b/python-parser/packages/pyright-internal/src/tests/samples/enum14.py new file mode 100644 index 00000000..1bba055f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum14.py @@ -0,0 +1,16 @@ +# This sample tests certain error conditions that previously caused +# an infinite recursion condition in the type evaluator. + +from __future__ import annotations +from enum import Enum +from typing import Literal + + +class A(Enum): + # This should generate two errors. + x: Literal[A.x] + + +class B(Enum): + # This should generate an error. + x: B.x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum2.py b/python-parser/packages/pyright-internal/src/tests/samples/enum2.py new file mode 100644 index 00000000..fc61ef7c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum2.py @@ -0,0 +1,24 @@ +# This sample verifies that enums are iterable and indexable. + +import enum +from typing import Type + + +class Color(enum.Enum): + RED = enum.auto() + GREEN = enum.auto() + + +class Foo: + _foo: Type[enum.Enum] + + def __init__(self): + self._foo = Color + + def _print_foo(self): + for f in self._foo: + print(f) + + +def test_fn(color: Type[enum.Enum]): + print(color["RED"]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum3.py b/python-parser/packages/pyright-internal/src/tests/samples/enum3.py new file mode 100644 index 00000000..1660e0e9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum3.py @@ -0,0 +1,23 @@ +# This sample tests that enum values are treated as constant even if +# they are not named as such. + +from enum import Enum + + +class EnumA(Enum): + bad = 0 + good = 1 + + +class EnumB: + def __init__(self): + self.status = EnumA.bad + self.foo = 1 + + +myobj = EnumB() + +reveal_type(myobj.status, expected_text="EnumA") + +myobj.status = EnumA.good +reveal_type(myobj.status, expected_text="Literal[EnumA.good]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum4.py b/python-parser/packages/pyright-internal/src/tests/samples/enum4.py new file mode 100644 index 00000000..a7139fa6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum4.py @@ -0,0 +1,16 @@ +# This sample tests logical operators on enums. + +import enum + + +class CustomFlags(enum.Flag): + A = enum.auto() + B = enum.auto() + C = A | B + + +flags1 = CustomFlags.A | CustomFlags.B +reveal_type(flags1, expected_text="CustomFlags") + +flags2 = CustomFlags.A & CustomFlags.B +reveal_type(flags2, expected_text="CustomFlags") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum5.py b/python-parser/packages/pyright-internal/src/tests/samples/enum5.py new file mode 100644 index 00000000..641ac204 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum5.py @@ -0,0 +1,33 @@ +# This sample tests the handling of instance variables within +# an enum class that are initialized by a custom initializer. +# They should not be treated as enum objects. + +from enum import Enum + + +class Descriptor: + def __get__(self, instance, owner=None) -> complex: + return 3j + + +class EnumA(Enum): + ENTRY = (123, "abc") + + desc = Descriptor() + + _exempt_ = 12 + + foo: int + bar: str + + def __init__(self, foo: int, bar: str) -> None: + self.foo = foo + self.bar = bar + + +baz = 123 + EnumA.ENTRY.foo +reveal_type(baz, expected_text="int") + +reveal_type(EnumA._exempt_, expected_text="int") + +reveal_type(EnumA.desc, expected_text="complex") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum6.py b/python-parser/packages/pyright-internal/src/tests/samples/enum6.py new file mode 100644 index 00000000..d0346919 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum6.py @@ -0,0 +1,53 @@ +# This sample tests error detection of duplicate enum members and +# an attempt to subclass an enum. + +from enum import Enum + + +class Color(Enum): + red = "red" + blue = "blue" + yellow = "yellow" + + # This should generate an error because the enum member + # already exists. + blue = "blue" + + def __init__(self, value: str): + if value == "blue": + self.foo = False + else: + self.foo = True + + +class NonEnum: ... + + +# This should generate an error because enums cannot +# be subclassed. +class ExtraColor(NonEnum, Color): + pass + + +# This should generate an error because reassignment of enum +# values is not allowed. +Color.red = "new" + + +class EnumWithoutValue(Enum): + def do_something(self): + pass + + @property + def y(self) -> None: + pass + + +class EnumWithValue(EnumWithoutValue): + x = 0 + + +# This should generate an error because enums with values +# cannot be subclassed. +class EnumSubclass(EnumWithValue): + z: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum7.py b/python-parser/packages/pyright-internal/src/tests/samples/enum7.py new file mode 100644 index 00000000..b770e3fe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum7.py @@ -0,0 +1,16 @@ +# This sample tests the custom __call__ method on the EnumMeta class. + +from enum import Enum + + +class Foo(Enum): + A = 1 + B = 2 + + +Foo(1) + +# This would have previously generated an error prior to Python 3.12, +# but it now does not because of an additional overload on the EnumMeta +# __call__ method. +Foo(1, 2, 3, 4) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum8.py b/python-parser/packages/pyright-internal/src/tests/samples/enum8.py new file mode 100644 index 00000000..49afbf3d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum8.py @@ -0,0 +1,15 @@ +# This sample tests the case where an enum class is used as a bound +# for a TypeVar and instantiated. + +# pyright: strict + +from enum import Enum +from typing import TypeVar + +TEnum = TypeVar("TEnum", bound=Enum) + + +def func1(enum_cls: type[TEnum], enum_value: object) -> TEnum: + enum_member = enum_cls(enum_value) + reveal_type(enum_member, expected_text="TEnum@func1") + return enum_member diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enum9.py b/python-parser/packages/pyright-internal/src/tests/samples/enum9.py new file mode 100644 index 00000000..cb7cdea1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enum9.py @@ -0,0 +1,50 @@ +# This sample tests the enum.member and enum.nonmember classes introduced +# in Python 3.11. + +import sys +from enum import Enum, member, nonmember +from typing import Literal + + +class Enum1(Enum): + MEMBER = 1 + ANOTHER_MEMBER = member(2) + NON_MEMBER = nonmember(3) + + @member + @staticmethod + def ALSO_A_MEMBER() -> Literal[4]: + return 4 + + @member + class ClassA: + pass + + @nonmember + class ClassB: + pass + + class ClassC: + pass + + +reveal_type(Enum1.MEMBER, expected_text="Literal[Enum1.MEMBER]") +reveal_type(Enum1.ANOTHER_MEMBER, expected_text="Literal[Enum1.ANOTHER_MEMBER]") +reveal_type(Enum1.ALSO_A_MEMBER, expected_text="Literal[Enum1.ALSO_A_MEMBER]") +reveal_type(Enum1.NON_MEMBER, expected_text="int") +reveal_type(Enum1.ClassA, expected_text="Literal[Enum1.ClassA]") +reveal_type(Enum1.ClassB, expected_text="type[ClassB]") + +if sys.version_info >= (3, 13): + reveal_type(Enum1.ClassC, expected_text="type[ClassC]") +else: + reveal_type(Enum1.ClassC, expected_text="Literal[Enum1.ClassC]") + + +reveal_type(Enum1.MEMBER.value, expected_text="Literal[1]") +reveal_type(Enum1.ANOTHER_MEMBER.value, expected_text="int") +reveal_type(Enum1.ALSO_A_MEMBER.value, expected_text="() -> Literal[4]") +reveal_type(Enum1.ClassA.value, expected_text="type[ClassA]") + +if sys.version_info < (3, 13): + reveal_type(Enum1.ClassC.value, expected_text="type[ClassC]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enumAuto1.py b/python-parser/packages/pyright-internal/src/tests/samples/enumAuto1.py new file mode 100644 index 00000000..901e6b8f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enumAuto1.py @@ -0,0 +1,15 @@ +# This sample tests enum types with auto() values. + +from enum import Enum, auto + + +class CacheBehavior(Enum): + ALWAYS = auto() + NEVER = auto() + AUTO = auto() + + +a: CacheBehavior = CacheBehavior.ALWAYS +b: CacheBehavior = CacheBehavior["ALWAYS"] +foo = "A" + "UTO" +c: CacheBehavior = CacheBehavior[foo] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/enumGenNextValue1.py b/python-parser/packages/pyright-internal/src/tests/samples/enumGenNextValue1.py new file mode 100644 index 00000000..57dc8931 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/enumGenNextValue1.py @@ -0,0 +1,23 @@ +# This sample tests the handling of a custom _generate_next_value_ override. + +from enum import Enum, auto + + +class EnumA(Enum): + x = auto() + + +reveal_type(EnumA.x.value, expected_text="int") + + +class EnumC(str, Enum): + @staticmethod + def _generate_next_value_(name, start, count, last_values) -> str: + return name + + +class EnumD(EnumC): + x = auto() + + +reveal_type(EnumD.x.value, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/exceptionGroup1.py b/python-parser/packages/pyright-internal/src/tests/samples/exceptionGroup1.py new file mode 100644 index 00000000..c1916921 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/exceptionGroup1.py @@ -0,0 +1,127 @@ +# This sample tests the syntax handling for Python 3.11 exception groups +# as described in PEP 654. + + +def func1(): + + try: + pass + + # This should generate an error if using Python 3.10 or earlier. + except* ValueError as e: + reveal_type(e, expected_text="ExceptionGroup[ValueError]") + pass + + # This should generate an error if using Python 3.10 or earlier. + except*: + pass + + +def func2(): + try: + pass + # This should generate an error because ExceptionGroup derives + # from BaseExceptionGroup. + except* ExceptionGroup as e: + pass + + # This should generate an error because ExceptionGroup derives + # from BaseExceptionGroup. + except* (ValueError, ExceptionGroup) as e: + pass + +def func3(): + try: + pass + + except* ValueError: + pass + + # This should generate an error because except and except* cannot be mixed. + except NameError: + pass + + except* ValueError: + pass + +def func4(): + try: + pass + + except ValueError: + pass + + except NameError: + pass + + # This should generate an error because except and except* cannot be mixed. + except* ValueError: + pass + + +def func5(): + try: + pass + + except* ValueError: + pass + + # This should generate an error because except and except* cannot be mixed. + except: + pass + +def func6(): + try: + pass + + # This should generate an error because except* requires an exception type. + except*: + pass + + +def func7(): + while True: + try: + ... + except* ValueError: + def inner(): + while True: + if 1 < 1: + continue + else: + break + return + + if 1 < 2: + # This should generate an error because + # break is not allowed in an except* block. + break + if 1 < 2: + # This should generate an error because + # continue is not allowed in an except* block. + continue + + while 1 < 2: + # This is allowed because it's within a nested loop. + break + + # This should generate an error because + # return is not allowed in an except* block. + return + + + +def func8(): + + try: + pass + + # This should generate an error if using Python 3.10 or earlier. + except* (ValueError, FloatingPointError) as e: + reveal_type(e, expected_text="ExceptionGroup[ValueError | FloatingPointError]") + pass + + # This should generate an error if using Python 3.10 or earlier. + except* BaseException as e: + reveal_type(e, expected_text="BaseExceptionGroup[BaseException]") + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression1.py b/python-parser/packages/pyright-internal/src/tests/samples/expression1.py new file mode 100644 index 00000000..f395ca47 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression1.py @@ -0,0 +1,54 @@ +# This sample tests various arithmetic expressions. + + +def returnsInt1() -> int: + a = 1 + b = 2 + + return a + b % b // a - b // a + + +def returnsInt2() -> int: + a = 1.0 + b = 2 + + # This should generate an error because + # it should evaluate to a float, which is + # not compatible with the specified return + # type. + return a + b % b // a - b // a + + +def returnsFloat1() -> float: + a = 1 + b = 2 + return a + b % b / a - b // a + + +def returnsFloat2() -> float: + a = complex(1, 2) + b = 2 + + # This should generate an error because it + # should evaluate to a complex, which is + # not compatible with the specified return + # type. + return a + b % b / a - b // a + + +def returnsComplex1() -> complex: + a = complex(1, 2) + b = 2 + c = 4.0 + + # This should generate an error because a + # float should be divisible by a complex. + return a + b % (b / a - c // a) + + +a = 3 +b = 4 + +# This should generate an error because matrix multiply +# isn't supported for int. +c = a @ b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression2.py b/python-parser/packages/pyright-internal/src/tests/samples/expression2.py new file mode 100644 index 00000000..b41609c3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression2.py @@ -0,0 +1,29 @@ +# This sample tests various logical expressions. + + +class Foo: + def do_something1(self): + pass + + def do_something2(self): + pass + + +class Bar: + def do_something1(self): + pass + + +a = 0 +foo = Foo() +bar = Bar() + +b = a and foo or bar + +# This should not be flagged as an error because +# the type of b should be type Foo. +b.do_something1() + +# This should be flagged as an error because +# Bar doesn't define a do_something2 method. +b.do_something2() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression3.py b/python-parser/packages/pyright-internal/src/tests/samples/expression3.py new file mode 100644 index 00000000..83faa2a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression3.py @@ -0,0 +1,26 @@ +# This sample tests various unary expressions. + + +def returnsFloat1() -> float: + a = 1 + b = not a + + return b + + +def returnsInt1() -> int: + a = 1 + b = -a + return b + + +def returnsInt2() -> int: + a = 1 + b = +a + return b + + +def returnsInt3() -> int: + a = 4 + b = ~a + return b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression4.py b/python-parser/packages/pyright-internal/src/tests/samples/expression4.py new file mode 100644 index 00000000..9388caf4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression4.py @@ -0,0 +1,21 @@ +# This sample tests the handling of "in" and "not in" operators. + + +def func1(a: int | str): + # This should generate an error because a's type doesn't + # support a __contains__ method. + if 3 in a: + pass + + # This should generate an error because a's type doesn't + # support a __contains__ method. + if 3 not in a: + pass + + +def func(a: list[int] | set[float]): + if 3 in a: + pass + + if 3 not in a: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression5.py b/python-parser/packages/pyright-internal/src/tests/samples/expression5.py new file mode 100644 index 00000000..3b8e09f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression5.py @@ -0,0 +1,70 @@ +# This sample tests the validation of binary operations, especially +# in cases where one or both operands are union types and some combination +# of the types is not supported. + +from typing import Optional, Union + + +def arith( + v1: Union[str, int], + v2: Optional[Union[str, int]], + v3: str, + v4: int, + v5: Optional[int], +): + # This should generate an error. + a1 = v1 + v2 + + # This should generate an error + a2 = 3 + v2 + + # This should generate an error + a3 = 3 + v3 + + # This should generate an error + a4 = 3 + None + + a5 = 3 + v4 + + a6 = "hi" + v3 + + # This should generate an error + a7 = "hi" + v4 + + # This should generate an error + a8 = 3 + v5 + + assert v5 is not None + a8 = 3 + v5 + + +def comparison( + v1: Union[str, int], + v2: Optional[Union[str, int]], + v3: str, + v4: int, + v5: Optional[int], +): + # This should generate an error. + a1 = v1 < v2 + + # This should generate an error + a2 = 3 > v2 + + a3 = 3 <= v3 + + # This should generate an error + a4 = 3 >= None + + a5 = 3 < v4 + + a6 = "hi" < v3 + + # This should generate an error + a7 = "hi" < v4 + + # This should generate an error + a8 = 3 < v5 + + assert v5 is not None + a8 = 3 < v5 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression6.py b/python-parser/packages/pyright-internal/src/tests/samples/expression6.py new file mode 100644 index 00000000..9889eed9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression6.py @@ -0,0 +1,14 @@ +# This sample tests that binary operations "or" and "and" +# properly handle bidirectional type inference. + +from typing import Any + + +def func_or(a: dict[str, Any] | None): + a = a or {"": 0} + reveal_type(a, expected_text="dict[str, Any]") + + +def func_and(): + a: dict[str, Any] | None = True and {"": 0} + reveal_type(a, expected_text="dict[str, Any]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression7.py b/python-parser/packages/pyright-internal/src/tests/samples/expression7.py new file mode 100644 index 00000000..5b7f52ca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression7.py @@ -0,0 +1,37 @@ +# This sample tests various conditions with AND and OR operators. + + +def func1() -> bool: ... + + +def func2() -> int: ... + + +def func3() -> str: ... + + +reveal_type(func1() and func2(), expected_text="int | Literal[False]") +reveal_type(func1() and func3(), expected_text="str | Literal[False]") +reveal_type(func2() and func1(), expected_text="bool | Literal[0]") +reveal_type(func3() and func1(), expected_text="bool | Literal['']") + +reveal_type(func1() or func2(), expected_text="int | Literal[True]") +reveal_type(func1() or func3(), expected_text="str | Literal[True]") +reveal_type(func2() or func1(), expected_text="int | bool") +reveal_type(func3() or func1(), expected_text="str | bool") + + +class ClassA: ... + + +class ClassB: ... + + +# This should generate an error because binary operators are not allowed +# in type annotations. +def func4(a: ClassA and ClassB): ... + + +# This should generate an error because binary operators are not allowed +# in type annotations. +def func5(a: ClassA or ClassB): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression8.py b/python-parser/packages/pyright-internal/src/tests/samples/expression8.py new file mode 100644 index 00000000..cdf02196 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression8.py @@ -0,0 +1,26 @@ +# This sample verifies that binary expressions like "less than" +# work if the operands are constrained TypeVars. + +from abc import abstractmethod +from typing import Protocol, TypeVar + +_T_contra = TypeVar("_T_contra", contravariant=True) +_T = TypeVar("_T") + + +class ComparableTo(Protocol[_T_contra]): + @abstractmethod + def __lt__(self, __x: _T_contra) -> bool: + pass + + +def custom_compare(a: ComparableTo[_T], b: _T) -> bool: + return a < b + + +custom_compare("first", "second") + +custom_compare(3, 2) + +# This should generate an error. +custom_compare(3, "hi") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/expression9.py b/python-parser/packages/pyright-internal/src/tests/samples/expression9.py new file mode 100644 index 00000000..9633a9f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/expression9.py @@ -0,0 +1,12 @@ +# This sample tests binary expressions that use list expressions as +# a RHS operand. + + +def func1(a: list[int | str]): + a += [5] + + return a + [5] + + +def func2(a: list[int], b: int): + return a + [b] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/final1.py b/python-parser/packages/pyright-internal/src/tests/samples/final1.py new file mode 100644 index 00000000..e795e233 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/final1.py @@ -0,0 +1,14 @@ +# This sample tests the handling of the @final class decorator. + +from typing import final + + +@final +class ClassA: + pass + + +# This should generate an error because ClassA is +# decorated as final. +class ClassB(ClassA): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/final2.py b/python-parser/packages/pyright-internal/src/tests/samples/final2.py new file mode 100644 index 00000000..d38bf97d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/final2.py @@ -0,0 +1,162 @@ +# This sample tests the handling of the @final method decorator. + +from typing import Any, cast, final, overload + + +class ClassA: + def func1(self): + pass + + @classmethod + def func2(cls): + pass + + @final + def func3(self): + pass + + @final + @classmethod + def func4(cls): + pass + + @final + def _func5(self): + pass + + @final + def __func6(self): + pass + + @overload + def func7(self, x: int) -> int: ... + + @overload + def func7(self, x: str) -> str: ... + + @final + def func7(self, x: int | str) -> int | str: ... + + # This should generate an error because the implementation + # of func8 is marked as not final but this overload is. + @overload + @final + def func8(self, x: int) -> int: ... + + @overload + def func8(self, x: str) -> str: ... + + def func8(self, x: int | str) -> int | str: ... + + @final + @property + def prop1(self) -> int: ... + + @property + @final + def prop2(self) -> int: ... + + @property + def prop3(self) -> int: ... + + @prop3.setter + @final + def prop3(self, value: int) -> None: ... + + +# This should generate an error because func3 is final. +ClassA.func3 = lambda self: None + +# This should generate an error because func4 is final. +ClassA.func4 = lambda cls: None + +# This should generate an error because _func5 is final. +ClassA._func5 = lambda self: None + +# This should generate an error because func7 is final. +ClassA.func7 = cast(Any, lambda self, x: "") + + +class ClassB(ClassA): + def func1(self): + # This should generate an error because @final isn't allowed + # on non-method functions. + @final + def func1_inner(): + pass + + @classmethod + def func2(cls): + pass + + # This should generate an error because func3 is + # defined as final. + def func3(self): + pass + + # This should generate an error because func3 is + # defined as final. + @classmethod + def func4(cls): + pass + + # This should generate an error because func3 is + # defined as final. + def _func5(self): + pass + + # This should not generate an error because double + # underscore symbols are exempt from this check. + def __func6(self): + pass + + @overload + def func7(self, x: int) -> int: ... + + @overload + def func7(self, x: str) -> str: ... + + @final + # This should generate an error because func7 is + # defined as final. + def func7(self, x: int | str) -> int | str: ... + + # This should generate an error because prop1 is + # defined as final. + @property + def prop1(self) -> int: ... + + # This should generate an error because prop2 is + # defined as final. + @property + def prop2(self) -> int: ... + + @property + def prop3(self) -> int: ... + + # This should generate an error because prop3's setter is + # defined as final. + @prop3.setter + @final + def prop3(self, value: int) -> None: ... + + +class Base4: ... + + +class Base5: + @final + def __init__(self, v: int) -> None: ... + + +class C(Base4, Base5): + # This should generate an error because it overrides Base5, + # and __init__ is marked final there. + def __init__(self) -> None: ... + + +# This should generate an error because @final isn't allowed on +# non-method functions. +@final +def func1(): + return None diff --git a/python-parser/packages/pyright-internal/src/tests/samples/final3.py b/python-parser/packages/pyright-internal/src/tests/samples/final3.py new file mode 100644 index 00000000..f58799a6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/final3.py @@ -0,0 +1,270 @@ +# This sample tests the "Final" type annotation +# introduced in Python 3.8. + +import typing +from typing import Annotated, Any, Final, Protocol, TypeVar + +T = TypeVar("T") + +v1: typing.Final = 3 + +must_be_int: int = v1 + +# This should generate an error because +# reassignment of a Final variable should +# not be allowed. +v1 = 4 + +# This should generate an error because there +# is a previous Final declaration. +v1: Final[int] + +# This should generate an error because +# the type doesn't match. +v2: Final[str] = 3 + +# This should generate an error because +# we expect only one type argument for Final. +v3: Final[str, int] = "hello" + + +v4: Final = 5 +reveal_type(v4, expected_text="Literal[5]") + + +class ClassA: + member1: Final = 4 + + # This should generate an error because only + # one declaration can have a Final attribute. + member1: Final + + member2: typing.Final[int] = 3 + + member4: Final[int] + + # This should generate an error because there is + # no assignment. + member5: Final[str] + + member6: Final[int] + + _member7: Final = 6 + __member8: Final = 6 + + member9: Final = 2 + + # This should generate an error. + member9 = 3 + + def __init__(self, a: bool): + # This should generate an error because a Final + # member outside of a stub file or a class body + # must have an initializer. + self.member3: Final + + # This should generate an error because this symbol + # already has a final declaration. + self.member2: Final[int] + + if a: + self.member4 = 5 + else: + self.member4 = 6 + + self.member4 = 6 + + def another_method(self): + # This should generate an error because assignments + # can occur only within class bodies or __init__ methods. + self.member6 = 4 + + # This should generate an error because 'Final' cannot + # be used to annotate instance variables outside of + # an __init__ method. + self.member7: Final = 6 + + +reveal_type(ClassA.member1, expected_text="Literal[4]") +reveal_type(ClassA(True).member1, expected_text="Literal[4]") + + +class ClassB(ClassA): + # This should generate an error because we are overriding + # a member that is marked Final in the parent class. + member1 = 5 + + # This should generate an error because we are overriding + # a member that is marked Final in the parent class. + _member7: Final = 6 + + # This should not generate an error because it's private. + __member8: Final = 6 + + def __init__(self): + # This should generate an error because we are overriding + # a member that is marked Final in the parent class. + self.member6 = 5 + + +# This should generate an error because Final isn't allowed for +# function parameters. +def func1(a: Final[int]): + pass + + +# This should generate an error because Final must the outermost +# type in assignments. +b: list[Final[int]] = [] + + +class ClassC: + member1: Final = 3 + member2: Final + member4: Final + member5: Final = 3 + + def __init__(self): + # This should generate an error. + self.member1 = 5 + + self.member2 = "hi" + + self.member3: Final = "hi" + + # This should generate an error. + ClassC.member4 = "hi" + + # This should generate an error. + ClassC.member5 = 3 + + def other(self): + # This should generate an error. + self.member1 = 5 + + # This should generate an error. + self.member2 = "hi" + + # This should generate an error. + self.member3 = "hi" + + +a = ClassC() + +# This should generate an error. +a.member1 = 4 + +# This should generate an error. +a.member3 = "x" + + +def func2(): + x: Final[Any] = 3 + + # This should generate an error because x is Final. + x += 1 + + # This should generate an error because x is Final. + a = (x := 4) + + # This should generate an error because x is Final. + for x in [1, 2, 3]: + pass + + # This should generate an error because x is Final. + with open("Hi") as x: + pass + + try: + pass + # This should generate an error because x is Final. + except ModuleNotFoundError as x: + pass + + # This should generate an error because x is Final. + (a, x) = (1, 2) + + +class ClassD: + def __init__(self): + self.x: Final = 1 + + def method1(self): + # This should generate an error because x is Final. + self.x += 1 + + +class ClassE(Protocol): + x: Final[int] + + +def func3(x: type[T]) -> T: + return x() + + +# This should generate an error because Final isn't compatible with type. +func3(Final[int]) + + +v5: Final = lambda: None + + +# This should generate an error because foo5 is declared as Final. +def v5() -> None: + pass + + +# This should generate an error because ClassVar is Final. +from typing import ClassVar + +ClassVar: Final = 3 + + +v6: Annotated[Final[int], "meta"] = 1 + +# This should generate an error +v6 = 2 + +v7: Annotated[Annotated[Final[int], "meta"], "meta"] = 1 + +# This should generate an error +v7 = 2 + +v8: Annotated[Final, "meta"] = 1 + +# This should generate an error +v8 = 2 + +v9: Final = 2 or "2" +reveal_type(v9, expected_text="Literal[2]") + +v10: Final = 0 or "2" +reveal_type(v10, expected_text="Literal['2']") + +v11: Final = b"" and True +reveal_type(v11, expected_text='Literal[b""]') + +v12: Final = b"2" and True +reveal_type(v12, expected_text="Literal[True]") + + +def func4(): + while 1 < 1: + # This should generate an error because it's in a loop. + x1: Final = 1 + + for i in range(10): + if i < 3: + # This should generate an error because it's in a loop. + x2: Final[int] = 1 + + +class ClassF: + while 1 < 2: + # This should generate an error because it's in a loop. + x1: Final = 1 + + for i in range(10): + if i < 3: + # This should generate an error because it's in a loop. + x2: Final[int] = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/final5.py b/python-parser/packages/pyright-internal/src/tests/samples/final5.py new file mode 100644 index 00000000..17ce2f05 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/final5.py @@ -0,0 +1,18 @@ +# This sample tests that instance variables declared as Final within +# a dataclass do not need to have an explicit assignment because +# the generated __init__ method will assign them. + +from dataclasses import dataclass +from typing import Final + + +class ClassA: + x: Final[int] + + def __init__(self, x: int) -> None: + self.x = x + + +@dataclass +class ClassB: + x: Final[int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/final7.py b/python-parser/packages/pyright-internal/src/tests/samples/final7.py new file mode 100644 index 00000000..275687ae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/final7.py @@ -0,0 +1,8 @@ +# This sample is used in conjunction with final8.py to test that imported +# Final symbols cannot be overwritten. + +from typing import Final + + +var1: Final[int] = 1 +var2: Final = 2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/final8.py b/python-parser/packages/pyright-internal/src/tests/samples/final8.py new file mode 100644 index 00000000..58952ad5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/final8.py @@ -0,0 +1,20 @@ +# This sample tests that final variables imported from another module +# cannot be overwritten. + +from .final7 import * + +# This should generate an error. +var1 = 1 + +# This should generate an error. +var2 = 1 + + +def func1(): + from .final7 import var1, var2 + + # This should generate an error. + var1 = 1 + + # This should generate an error. + var2 = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/finallyExit1.py b/python-parser/packages/pyright-internal/src/tests/samples/finallyExit1.py new file mode 100644 index 00000000..d0ae37ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/finallyExit1.py @@ -0,0 +1,69 @@ +# This sample tests that pyright's parser correctly identifies +# illegal exits from a finally block as specified in PEP 765. + + +def func1(): + try: + return + finally: + # This should generate an error if using Python 3.14 or later. + return + + +def func2(): + try: + return + finally: + + def inner(): + return + + +def func3(): + while True: + try: + return + finally: + if 1 < 1: + # This should generate an error if using Python 3.14 or later. + break + + if 1 > 2: + # This should generate an error if using Python 3.14 or later. + continue + + for x in (1, 2): + try: + return + finally: + if 1 < 1: + # This should generate an error if using Python 3.14 or later. + break + + if 1 > 2: + # This should generate an error if using Python 3.14 or later. + continue + + +def func4(): + try: + return + finally: + while 1 < 2: + if 1 < 1: + break + + if 1 > 2: + continue + + +def func5(): + try: + return + finally: + for x in (1, 2): + if 1 < 1: + break + + if 1 > 2: + continue diff --git a/python-parser/packages/pyright-internal/src/tests/samples/forLoop1.py b/python-parser/packages/pyright-internal/src/tests/samples/forLoop1.py new file mode 100644 index 00000000..7539d36c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/forLoop1.py @@ -0,0 +1,118 @@ +# This sample tests 'for' operations (both simple for loops +# and list comprehension for loops). + +from typing import overload + + +def requires_int(val: int): + pass + + +list1: list[int] = [1, 2, 3] + +for a in list1: + requires_int(a) + + +int1 = 1 + +# This should generate an error because +# an int type is not iterable. +for foo1 in int1: + pass + + +async def func1(): + # This should generate an error because + # list1 isn't an async iterator. + async for foo2 in list1: + requires_int(foo2) + + +class AsyncIterable1(object): + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + +iter1 = AsyncIterable1() + + +async def func2(): + async for foo3 in iter1: + requires_int(foo3) + + for d in [b for b in list1]: + requires_int(d) + + for e in [b async for b in iter1]: + requires_int(e) + + +class ClassWithGetItem(object): + def __getitem__(self, item) -> str: + return "hello" + + +def testGetItemIterator() -> str: + objWithGetItem = ClassWithGetItem() + for f in objWithGetItem: + return f + return "none" + +# This should generate a syntax error. +for in range(3): + pass + + +class A: + def __init__(self): + self.__iter__ = lambda: iter([]) + + +# This should generate an error because A +# is not iterable. The __iter__ method is an +# instance variable. +for a in A(): + ... + +class B: + __slots__ = ("__iter__",) + def __init__(self): + self.__iter__ = lambda: iter([]) + + +for b in B(): + ... + +def func3(): + x = None + for x in range(1): + pass + + reveal_type(x, expected_text="int | None") + + +class C: + @overload + def __getitem__(self, i: int) -> str: + ... + + @overload + def __getitem__(self, i: slice) -> list[str]: + ... + + def __getitem__(self, i: int | slice) -> str | list[str]: + ... + + +c = C() + +for c1 in iter(c): + reveal_type(c1, expected_text="str") + +for c2 in c: + reveal_type(c2, expected_text="str") + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/forLoop2.py b/python-parser/packages/pyright-internal/src/tests/samples/forLoop2.py new file mode 100644 index 00000000..d3d7ddb1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/forLoop2.py @@ -0,0 +1,48 @@ +# This sample tests for/else loops for cases where variables +# are potentially unbound. + + +# For with no break and no else. +def func1(): + for x in []: + a = 0 + + # This should generate a "potentially unbound" error. + print(a) + + # This should generate a "potentially unbound" error. + print(x) + + +# For with no break and else. +def func2(): + for x in []: + a = 0 + else: + b = 0 + + # This should generate a "potentially unbound" error. + print(a) + + print(b) + + # This should generate a "potentially unbound" error. + print(x) + + +# For with break and else. +def func3(): + for x in []: + a = 0 + break + else: + b = 0 + + # This should generate a "potentially unbound" error. + print(a) + + # This should generate a "potentially unbound" error. + print(b) + + # This should generate a "potentially unbound" error. + print(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/fstring1.py b/python-parser/packages/pyright-internal/src/tests/samples/fstring1.py new file mode 100644 index 00000000..e3499434 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/fstring1.py @@ -0,0 +1,159 @@ +# This tests various f-string parsing and analysis cases. + +# Test nested f-strings. +a = f'hello { f"hi {1}" } bye { f"hello" }' + + +# Test f-string with a backslash in the expression. +# This should generate an error. +b = f"hello { \t1 }" + +# This should generate an error prior to Python 3.12. +b1 = f"""{"\n"}""" + +# This should generate an error prior to Python 3.12. +b2 = f"{r'\n'}" + + +# Test f-string with unterminated expression. +# This should generate an error. +c = f"hello { 1 " + + +# Test f-string with double braces. +d = f"hello {{{1}}}" + +# Test f-string with formatting directives. +e = f"hello { 2 != 3 !r:2 }" + +# Test f-string with formatting directives. +f = f"hello { 2 != 3 :3 }" + +# Test f-string with embedded colon. +g = f"hello { a[2:3] :3 }" + +# Test f-string with embedded bang. +h = f"hello { {}['hello!'] :3 }" + +# Test f-string with expression that contains newlines. +i = f""" + { + f''' hi ''' + + 'bye' + } +""" + +j = f""" +{ + f''' + {f' { f":" } '} + ''' +} +""" + +# Test f-string with raw string and backslash. +j = rf"aaa\{4}" + +# Test f-string with comma-separated expressions. +h = f"List: { 2 + 3, 'hi' + ' there'}" + +# Test f-string with unpack operator. +my_dict = {"a": "A", "b": "B"} +i = f"{*my_dict.keys(),}" + +# f-string with empty expression should generate error. +j = f"{}" + +# f-string with quotes within quotes within quotes. +k = f"""{"#M's#".replace(f"'", '')!r}""" + + +# f-strings with escape characters in the format string section. +my_str = "" +width = 3 +l = f"{my_str:\>{width}s}" +m = f"{my_str:\x00>{width}s}" +n = f"{my_str:\u2007>{width}s}" + +# f-strings with nested expressions in the format string section. +o = f"{1+2:{1+2:{1+1:}}}" + +# This should generate an error because the nesting is too deep. +p = f"{1+2:{1+2:{1+1:{1}}}}" + +# This should generate a warning because of the unknown +# escape sequence but not an error. +q = f"hello\{4}" + +s1 = f"""{f'''{f'{f"{1+1}"}'}'''}""" + +# This should generate an error prior to Python 3.12. +s2 = f"""{f'''{f'{f"""{1+1}"""}'}'''}""" + +# This should generate an error prior to Python 3.12. +s3 = f'{f'''{r'abc'}'''}' + +q1 = f"""{ + 1 + 1 # Comment + }""" + +# This should generate an error prior to Python 3.12, but +# pyright doesn't currently detect this error. +q2 = f'{ + 1 + 1 # Comment + }' + +# This should generate an error because an expression is missing. +r1 = f'{!r}' + +# This should generate an error because an expression is missing. +r2 = f'{!}' + +# This should generate an error because an expression is missing. +r3 = f'{:}' + +# This should generate an error because an expression is missing. +r4 = f'{=}' + +r5 = f'{1!s:}' +r6 = f'{1:}' +r7 = f'{1=}' +r8 = f'{1=:}' +r9 = f'{1=!r:}' + +s1 = f"}}" + +# This should generate an error because a single right brace is used. +s2 = f"}" + +t1 = f'{0==1}' +t2 = f'{0!=1}' +t3 = f'{0<=1}' + +# This should generate an error because this isn't a walrus +# operator as it appears. +t4 = f'{x1:=3}' + +t5 = f"{(x2:=3):{(x3:=0)}}" + +u1 = f"'{{\"{0}\": {0}}}'" + +def func1(x): + f"x:{yield (lambda i: x * i)}" + +v1 = f"x \ +y" + +v2 = f'x \ +y' + +w1 = 1 + +w2 = f"__{ + w1:d +}__" + + +# This should generate an error because it's unterminated. +w3 = f"test + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/fstring2.py b/python-parser/packages/pyright-internal/src/tests/samples/fstring2.py new file mode 100644 index 00000000..eb53bc30 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/fstring2.py @@ -0,0 +1,12 @@ +# This tests f-strings with format expressions. + +# pyright: strict + + +def return_right_aligned_string(): + some_length = 10 + some_length2 = 2 + some_string = "some string to print" + + string_right_aligned = f"{some_string:>{some_length - 2} {some_length2: 3}}" + return string_right_aligned diff --git a/python-parser/packages/pyright-internal/src/tests/samples/fstring3.py b/python-parser/packages/pyright-internal/src/tests/samples/fstring3.py new file mode 100644 index 00000000..8c74f179 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/fstring3.py @@ -0,0 +1,27 @@ +# This sample tests f-strings where expressions contain +# other string literals. + +# pyright: strict + +a1 = f"[{{name}}{'}' if True else ''}]" + +b1 = f"{'''hello'''}" + +c1 = f"""{'"'}""" + +hello1 = 3 +d1 = f"{f'{hello1}'}" + +print(f"{'a' if 'b' != d1 else 'd'}") + + +a2 = rf"[{{name}}{'}' if True else ''}]" + +b2 = rf"{'''hello'''}" + +c2 = rf"""{'"'}""" + +hello2 = 3 +d2 = rf"{rf'{hello2}'}" + +e1 = f""" {f''' {"".join(["this", "that"])}'''}""" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/fstring4.py b/python-parser/packages/pyright-internal/src/tests/samples/fstring4.py new file mode 100644 index 00000000..a2ab6520 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/fstring4.py @@ -0,0 +1,15 @@ +# This sample tests nested braces within an f-string. + + +def foo(spam, dictval: dict): + print(dictval) + return "Done" + + +print(f"{foo(0, {'bar' : 1, 'baz': 2})}") + +hello = 200 +print(f"({hello} \N{greek capital letter sigma})") +print(f"({hello} \N{GREEK CAPITAL LETTER SIGMA})") +print(f"({hello} \N{VARIATION SELECTOR-16})") +print(f"({hello} \N{VARIATION SELECTOR-16})") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/fstring5.py b/python-parser/packages/pyright-internal/src/tests/samples/fstring5.py new file mode 100644 index 00000000..b037ca9a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/fstring5.py @@ -0,0 +1,17 @@ +# This sample tests the parsing and analysis of f-strings +# that end in an "=" sign. Support for this was added +# in Python 3.8. + +key = 3 + +print(f"Value for {key =}") + +print(f"Value for {key = }") + +print(f"Value for {key = :.2f}") + +print(f"Value for {key=}") + +print(f"Value for {key= }") + +print(f"Value for {key= :.2f}") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/fstring6.py b/python-parser/packages/pyright-internal/src/tests/samples/fstring6.py new file mode 100644 index 00000000..d4219c4f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/fstring6.py @@ -0,0 +1,14 @@ +# This sample tests the parsing and analysis of f-strings with empty {}, +# with a backslash in the format specifier, and with a colon in a format +# specifier expression. + +msg = "test" +a = f"{}" + +message = "hi" +f"{message:\u3000>10}" + + +x = 0 +precision = 3 +print(f"{x: .{precision:d}f}") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function1.py b/python-parser/packages/pyright-internal/src/tests/samples/function1.py new file mode 100644 index 00000000..dfebdd6e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function1.py @@ -0,0 +1,10 @@ +# This sample tests that args and kwargs parameters are +# properly typed. + + +def function_with_args(*args: str): + reveal_type(args, expected_text="tuple[str, ...]") + + +def function_with_kwargs(**kwargs: list[str]): + reveal_type(kwargs, expected_text="dict[str, list[str]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function10.py b/python-parser/packages/pyright-internal/src/tests/samples/function10.py new file mode 100644 index 00000000..38bb6441 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function10.py @@ -0,0 +1,24 @@ +# This sample tests the case where a union of callables is passed +# to a generic function and the parameter types are subtypes of +# each other. + +from typing import Any, Callable, Generic, TypeVar + +T_contra = TypeVar("T_contra", contravariant=True) + + +class Thing1: + prop1: str + + +class Thing2: + prop1: str + prop2: str + + +class ClassA(Generic[T_contra]): + def __init__(self, callback: Callable[[T_contra], Any]) -> None: ... + + +def func1(cb: Callable[[Thing1], Any] | Callable[[Thing1 | Thing2], Any]): + reveal_type(ClassA(cb), expected_text="ClassA[Thing1]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function2.py b/python-parser/packages/pyright-internal/src/tests/samples/function2.py new file mode 100644 index 00000000..7c4dfaed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function2.py @@ -0,0 +1,7 @@ +# This sample tests the case where a param with no default +# arg value can follow a param with a default arg value +# if they are both followed by a *args param. + + +def f(*a, b=1, c): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function3.py b/python-parser/packages/pyright-internal/src/tests/samples/function3.py new file mode 100644 index 00000000..ede2a43d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function3.py @@ -0,0 +1,13 @@ +# This sample tests assignment of a function that uses +# a synthesized TypeVar type for the "self" parameter. + +from typing import Callable + + +class TestClass: + def method(self) -> None: + pass + + +# This should generate an error. +func1: Callable[[float], None] = TestClass.method diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function5.py b/python-parser/packages/pyright-internal/src/tests/samples/function5.py new file mode 100644 index 00000000..6861efb6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function5.py @@ -0,0 +1,13 @@ +# This test validates that a function can be treated as an object +# for type checking purposes. + +from typing import Hashable + + +def func1(a: int) -> int: + return a + + +x: object = func1 + +y: Hashable = func1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function6.py b/python-parser/packages/pyright-internal/src/tests/samples/function6.py new file mode 100644 index 00000000..45f5db6e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function6.py @@ -0,0 +1,33 @@ +# This sample verifies that functions are treated as though they +# derive from object. + +from typing import Callable, overload + + +@overload +def func1(a: str) -> str: ... + + +@overload +def func1(a: int) -> int: ... + + +def func1(a: str | int) -> str | int: ... + + +def func2(a: str | int) -> str | int: ... + + +def takes_object(val: object) -> None: ... + + +takes_object(func1) +takes_object(func2) + + +def func3(b: Callable[[str], bool]) -> None: + if b == func1: + pass + + if b != func2: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function7.py b/python-parser/packages/pyright-internal/src/tests/samples/function7.py new file mode 100644 index 00000000..230e4ec2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function7.py @@ -0,0 +1,75 @@ +# This sample tests the case where a function type is compared to another +# function type where one contains a positional-only marker and the +# other does not. + +from typing import Protocol + + +class _Writer1(Protocol): + def write(self, a: str, b: str) -> object: + pass + + +class Writer1: + def write(self, a: str, /, b: str): + pass + + +def make_writer1(w: _Writer1): + pass + + +# This should generate an error because the source function is positional-only. +make_writer1(Writer1()) + + +class _Writer2(Protocol): + def write(self, a: str, /, b: str) -> object: + pass + + +class Writer2: + def write(self, a: str, b: str): + pass + + +def make_writer2(w: _Writer2): + pass + + +make_writer2(Writer2()) + + +class _Writer3(Protocol): + def write(self, a: str, b: str) -> object: + pass + + +class Writer3: + def write(self, __a: str, b: str): + pass + + +def make_writer3(w: _Writer3): + pass + + +# This should generate an error because the source function is positional-only. +make_writer3(Writer3()) + + +class _Writer4(Protocol): + def write(self, __a: str, b: str) -> object: + pass + + +class Writer4: + def write(self, a: str, b: str): + pass + + +def make_writer4(w: _Writer4): + pass + + +make_writer4(Writer4()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function8.py b/python-parser/packages/pyright-internal/src/tests/samples/function8.py new file mode 100644 index 00000000..d7d5c23a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function8.py @@ -0,0 +1,20 @@ +# This sample tests bidirectional type inference for a function when +# a union includes a "bare" TypeVar and another (non-generic) type. + +from dataclasses import dataclass +from typing import Generic, Sequence, TypeVar + +T = TypeVar("T") + + +@dataclass +class Container(Generic[T]): + values: Sequence[float | T] + + +def create_container(values: Sequence[float | T]) -> Container[T]: + return Container(values) + + +arg: Sequence[float | int] = (1, 2.0) +x: Container[int] = create_container(arg) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/function9.py b/python-parser/packages/pyright-internal/src/tests/samples/function9.py new file mode 100644 index 00000000..1d08f3ea --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/function9.py @@ -0,0 +1,33 @@ +# This sample tests the case where a function type is assigned to another +# and the source contains parameters that are annotated as literals and +# the destination has corresponding TypeVars. + +from typing import Callable, TypeVar, Literal + +_A = TypeVar("_A") + + +def wrapper1(fn: Callable[[_A], int]) -> _A: ... + + +def f1(a: Literal[0]) -> int: ... + + +reveal_type(wrapper1(f1), expected_text="Literal[0]") + + +def wrapper2(fn: Callable[..., _A]) -> Callable[..., _A]: ... + + +def f2() -> Literal["Foo"]: + return "Foo" + + +reveal_type(wrapper2(f2)(), expected_text="Literal['Foo']") + + +def f3(): + return "Foo" + + +reveal_type(wrapper2(f3)(), expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation1.py b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation1.py new file mode 100644 index 00000000..51d296e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation1.py @@ -0,0 +1,61 @@ +# This sample tests support for comment-style function annotations. + +# pyright: strict, reportMissingParameterType=false, reportTypeCommentUsage=false + +from typing import Optional, Literal as _Literal, Union + + +def func1a(a, b): + # type: (int, str) -> str + return "" + + +def func1b(a, b): # type: (Optional[str], int) -> str + return "" + + +def func1c( + a, # type: int + b, # type: str +): + # type: (...) -> str + return "" + + +def func1d( + a, # type: int + b, # type: Foo +): + # type: (...) -> str + return "" + + +def func1e( + a, # type: int + b, # type: str +): + # type: (...) -> str + return "" + + +# This should generate an error because a is unannotated. +def func1f(a): + # type: (...) -> str + return "" + + +class Foo: + pass + + +def func1g(*args, **kwargs): + # type: (*int, **float) -> int + return sum(args) + sum(round(kwarg) for kwarg in kwargs.values()) + + +def func1h( + a, # type: _Literal["{", "}"] + b, # type: Union[_Literal["%"], _Literal["{"], _Literal["$"]] +): + # type: (...) -> str + return "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation2.py b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation2.py new file mode 100644 index 00000000..445e4ec0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation2.py @@ -0,0 +1,27 @@ +# This sample tests support for comment-style function annotations. + + +# Too few annotations +def func1a(a, b): + # type: (str) -> str + return "" + + +# Too many annotations +def func1b(a, b): # type: (str, int, int) -> str + return "" + + +class ClassA: + def method0(self, a, b): + # type: (str, int) -> str + return "" + + # Too few annotations + def method1(self, a, b): + # type: (str) -> str + return "" + + # Too many annotations + def method2(self, a, b): # type: (str, int, int, int) -> str + return "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation3.py b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation3.py new file mode 100644 index 00000000..a7245af2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation3.py @@ -0,0 +1,36 @@ +# This sample tests support for comment-style function annotations +# that include extra annotations for the "self" or "cls" parameters. + +from typing import TypeVar + + +_T = TypeVar("_T", bound="ClassA") + + +class ClassA: + foo: str + + def method0(self, a, b): + # type: (_T, str, list[_T]) -> str + return self.foo + + def method1(self, a, b): + # type: (_T, str, list[_T]) -> ClassB + return ClassB() + + # Too many annotations + def method2(self, a, b): # type: (_T, str, int, list[_T]) -> str + return "" + + # Too few annotations + @staticmethod + def method3(a, b): + # type: (int) -> str + return "" + + def method4(self, a, b): + # type: (str, ClassB) -> str + return self.foo + + +class ClassB: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation4.py b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation4.py new file mode 100644 index 00000000..66f5b7c3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/functionAnnotation4.py @@ -0,0 +1,23 @@ +# This sample tests the reportTypeCommentUsage diagnostic check. + +from typing import Optional + + +# This should generate an error if reportTypeCommentUsage is enabled. +def func1a(a, b): + # type: (int, str) -> str + return "" + + +# This should generate an error if reportTypeCommentUsage is enabled. +def func1b(a, b): # type: (Optional[str], int) -> str + return "" + + +# This should generate an error if reportTypeCommentUsage is enabled. +def func1c( + a, # type: int + b, # type: str +): + # type: (...) -> str + return "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/functionMember1.py b/python-parser/packages/pyright-internal/src/tests/samples/functionMember1.py new file mode 100644 index 00000000..571360bb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/functionMember1.py @@ -0,0 +1,18 @@ +# This sample tests the reportFunctionMemberAccess diagnostic rule. + + +def func1(): + pass + + +a = func1.__annotations__ +b = func1.__class__ + +# This should generate an error +c = func1.bar + +# This should generate an error +func1.baz = 3 + +# This should generate an error +del func1.baz diff --git a/python-parser/packages/pyright-internal/src/tests/samples/functionMember2.py b/python-parser/packages/pyright-internal/src/tests/samples/functionMember2.py new file mode 100644 index 00000000..329ef922 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/functionMember2.py @@ -0,0 +1,61 @@ +# This sample tests the special-case handling of the __self__ +# attribute for a function when it is bound to a class or object. + +# pyright: reportFunctionMemberAccess=error + + +from typing import Protocol + + +def func1(a: int) -> str: ... + + +# This should generate an error because func1 isn't +# bound to a "self". +s1 = func1.__self__ + + +class A: + def method1(self) -> None: ... + + @classmethod + def method2(cls) -> None: ... + + @staticmethod + def method3() -> None: ... + + +s2 = A().method1.__self__ +reveal_type(s2, expected_text="A") + +s3 = A.method2.__self__ +reveal_type(s3, expected_text="type[A]") + +s3 = A.method2.__self__ +reveal_type(s3, expected_text="type[A]") + +s4 = A().method2.__self__ +reveal_type(s4, expected_text="type[A]") + +# This should generate an error because method3 is static. +s5 = A().method3.__self__ + +# This should generate an error because method3 is static. +s6 = A.method3.__self__ + + +class HasSelf(Protocol): + @property + def __self__(self, /) -> object: ... + + +f1: HasSelf +f1 = A.method2 +f1 = A().method1 +f1 = A().method2 + +# These three should generate an error because they are not +# MethodTypes but are instead FunctionTypes. +f1 = A.method1 +f1 = A.method3 +f1 = A().method3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/futureImport1.py b/python-parser/packages/pyright-internal/src/tests/samples/futureImport1.py new file mode 100644 index 00000000..69346e31 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/futureImport1.py @@ -0,0 +1,7 @@ +# This sample tests that __future__ imports are found +# only at the beginning of a file. + +""" Doc String """ "Extension" + +from __future__ import annotations +from __future__ import with_statement diff --git a/python-parser/packages/pyright-internal/src/tests/samples/futureImport2.py b/python-parser/packages/pyright-internal/src/tests/samples/futureImport2.py new file mode 100644 index 00000000..f2bb34e2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/futureImport2.py @@ -0,0 +1,11 @@ +# This sample tests that __future__ imports are found +# only at the beginning of a file. + +"""Doc String""" + +"Extension" +from __future__ import annotations # This should generate an error + + +def func(): + from __future__ import annotations # This should generate an error diff --git a/python-parser/packages/pyright-internal/src/tests/samples/futureImport3.py b/python-parser/packages/pyright-internal/src/tests/samples/futureImport3.py new file mode 100644 index 00000000..7f09203e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/futureImport3.py @@ -0,0 +1,5 @@ +# This sample tests that __future__ imports are found +# only at the beginning of a file. + +from typing import Any +from __future__ import annotations # This should generate an error diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator1.py b/python-parser/packages/pyright-internal/src/tests/samples/generator1.py new file mode 100644 index 00000000..6abe6240 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator1.py @@ -0,0 +1,170 @@ +# This sample tests various type checking operations relating to +# generator functions (those with a "yield" method). + +from typing import ( + Any, + Awaitable, + Generator, + Generic, + Iterable, + Iterator, + NamedTuple, + Protocol, + TypedDict, + TypeVar, +) + +T = TypeVar("T") + + +class ClassA: + pass + + +s = True + + +class ClassB: + def shouldContinue(self): + global s + return s + + +class ClassC: + pass + + +class NT1(NamedTuple, Generic[T]): + value: T + + +class ClassD(Generic[T]): + def __init__(self, obj: T) -> None: + self.obj = obj + + def ingest(self) -> Generator[NT1[T], None, None]: + yield NT1(self.obj) + + +def generator1() -> Generator[ClassA, ClassB, ClassC]: + cont = ClassB() + while cont.shouldContinue(): + yield ClassA() + + return ClassC() + + +# This should generate an error because the function +# has no return statement at the bottom, so it +# returns None which is not assignable to ClassC. +def generator2() -> Generator[ClassA, ClassB, ClassC]: + cont = ClassB() + if cont.shouldContinue(): + # This should generate an error because False isn't + # assignable to the return type (ClassC). + return False + + while cont.shouldContinue(): + # This should generate an error because 3 isn't + # assignable to the yield type (ClassA). + yield 3 + + +def generator3() -> Generator[ClassA, int, Any]: + cont = ClassB() + if cont.shouldContinue(): + return 3 + + while cont.shouldContinue(): + # This should generate an error because 3 isn't + # assignable to the yield type (ClassA). + yield 3 + + +def generator4() -> Iterable[ClassA]: + yield ClassA() + + return True + + +def generator5() -> Iterator[ClassA]: + # This should generate an error because ClassB isn't + # assignable to ClassA. + yield ClassB() + + +def generate(): + for i in range(2): + yield i + + +s = generate() + +# Verify that a call to a Generator method succeeds +s.close() + + +def generator6(): + yield + + +def generator7() -> Generator[None, None, None]: + yield + + +def generator8() -> Iterator[dict[str, int]]: + # This tests the bidirectional type inference + # of dict types. It should not generate an error. + yield {"hello": 3} + + +# This should generate an error. +def generator9() -> int: + # This should generate an error. + yield None + return 3 + + +# This should generate an error. +async def generator10() -> int: + # This should generate an error. + yield None + + +# This should generate an error. +def generator11() -> list[int]: + # This should generate an error. + yield 3 + + +class TD1(TypedDict): + x: str + + +def generator12() -> Generator[TD1, None, None]: + yield {"x": "x"} + + +def generator13() -> Generator[TD1, None, None]: + # This should generate an error. + yield {"y": "x"} + + +def generator14() -> Iterator[TD1]: + yield {"x": "x"} + + +class IntIterator(Protocol): + def __next__(self, /) -> int: ... + + +def generator15() -> IntIterator: + yield 0 + + +class AsyncIntIterator(Protocol): + def __anext__(self, /) -> Awaitable[int]: ... + + +async def generator16() -> AsyncIntIterator: + yield 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator10.py b/python-parser/packages/pyright-internal/src/tests/samples/generator10.py new file mode 100644 index 00000000..d2fc6269 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator10.py @@ -0,0 +1,18 @@ +# This sample tests type inference for a generator returned +# by an __await__ function. + +from collections.abc import Awaitable +from asyncio import get_event_loop, sleep + + +class MyAwaitable(Awaitable): + def __await__(self): + yield from (sleep(0.1).__await__()) + + +async def func1(): + x: None = await MyAwaitable() + + +loop = get_event_loop() +loop.run_until_complete(func1()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator11.py b/python-parser/packages/pyright-internal/src/tests/samples/generator11.py new file mode 100644 index 00000000..bb22c81a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator11.py @@ -0,0 +1,40 @@ +# This sample tests the return type inference for a generator. + +from typing import Awaitable, Generator, TypeVar + +S = TypeVar("S") +T = TypeVar("T") + + +def func1() -> Generator[int, int, str]: + yield 1 + return "done" + + +def func2() -> Generator[int, int, None]: + # This should generate an error because yield is not allowed + # from within a list comprehension. + x = [(yield from func1()) for lel in range(5)] + + v1 = yield from func1() + reveal_type(v1, expected_text="str") + + v2 = yield 4 + reveal_type(v2, expected_text="int") + + +def func3(): + [x for x in (yield [[[1]], [[2]], [[3]]]) for y in x] + + # This should generate an error. + [x for x in [[[1]], [[2]], [[3]]] for y in (yield x)] + + +class ClassA[S, T](Awaitable[T]): + def __init__(self, val: S) -> None: + self.val = val + + def __await__(self) -> Generator[S, T, T]: + z = yield self.val + reveal_type(z, expected_text="T@ClassA") + return z diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator12.py b/python-parser/packages/pyright-internal/src/tests/samples/generator12.py new file mode 100644 index 00000000..0f2777eb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator12.py @@ -0,0 +1,25 @@ +# This sample tests the inference of types relating to +# "yield from" statements. + +from typing import Generator + + +class Yielder: + def __iter__(self) -> Generator[int, None, bool]: + yield 1 + return True + + +def collect1() -> Generator[str, None, bool]: + y = Yielder() + + # This should generate an error because int doesn't match str. + z = yield from y + return z + + +def collect2(): + y = Yielder() + z = yield from y + reveal_type(z, expected_text="bool") + return z diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator13.py b/python-parser/packages/pyright-internal/src/tests/samples/generator13.py new file mode 100644 index 00000000..0b4b96a3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator13.py @@ -0,0 +1,74 @@ +# This sample tests async generator and non-generator functions. + +import asyncio +from typing import AsyncGenerator, AsyncIterator, Protocol + + +async def get_data() -> list[int]: + await asyncio.sleep(1) + return [1, 2, 3] + + +async def generate(nums: list[int]) -> AsyncGenerator[str, None]: + for n in nums: + await asyncio.sleep(1) + yield f"The number is {n}" + + +async def func1() -> AsyncGenerator[str, None]: + data = await get_data() + v1 = generate(data) + reveal_type(v1, expected_text="AsyncGenerator[str, None]") + return v1 + + +async def func2() -> AsyncIterator[str]: + data = await get_data() + v1 = generate(data) + reveal_type(v1, expected_text="AsyncGenerator[str, None]") + return v1 + + +async def get_value(v: int) -> int: + await asyncio.sleep(1) + return v + 1 + + +async def func3() -> AsyncGenerator[int, None]: + return (await get_value(v) for v in [1, 2, 3]) + + +def func4() -> AsyncGenerator[int, None]: + return (await get_value(v) for v in [1, 2, 3]) + + +async def func5() -> None: + v1 = func1() + reveal_type(v1, expected_text="CoroutineType[Any, Any, AsyncGenerator[str, None]]") + gen = await v1 + reveal_type(gen, expected_text="AsyncGenerator[str, None]") + async for s in gen: + print(s) + + +async def func6() -> None: + v1 = func2() + reveal_type(v1, expected_text="CoroutineType[Any, Any, AsyncIterator[str]]") + gen = await v1 + reveal_type(gen, expected_text="AsyncIterator[str]") + async for s in gen: + print(s) + + +loop = asyncio.get_event_loop() +loop.run_until_complete(func5()) +loop.run_until_complete(func6()) + + +class Proto(Protocol): + async def iter(self) -> AsyncGenerator[bytes, None]: ... + + +async def func7(p: Proto): + async for x in await p.iter(): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator14.py b/python-parser/packages/pyright-internal/src/tests/samples/generator14.py new file mode 100644 index 00000000..65df1d3c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator14.py @@ -0,0 +1,24 @@ +# This sample tests the inferred type of async and sync generators. + + +async def foo() -> int: ... + + +async def main() -> None: + v1 = (x for x in [2, 3] if x > 3) + reveal_type(v1, expected_text="Generator[int, None, None]") + + v2 = (x for x in [2, 3] if await foo()) + reveal_type(v2, expected_text="AsyncGenerator[int, None]") + + v3 = (x for x in [2, 3]) + reveal_type(v3, expected_text="Generator[int, None, None]") + + v4 = (await foo() for _ in [2, 3]) + reveal_type(v4, expected_text="AsyncGenerator[int, None]") + + v5 = ((0, await foo()) for _ in [1, 2]) + reveal_type(v5, expected_text="AsyncGenerator[tuple[int, int], None]") + + v6 = (x for x in [1, 2] if (x, await foo())) + reveal_type(v6, expected_text="AsyncGenerator[int, None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator15.py b/python-parser/packages/pyright-internal/src/tests/samples/generator15.py new file mode 100644 index 00000000..195b9df7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator15.py @@ -0,0 +1,26 @@ +# This sample tests that the parser emits an error when a generator +# is used as an argument without parentheses. + + +from typing import Any + + +def func1(*x: Any) -> None: + pass + +func1(x for x in [0, 1]) + +func1((x for x in [0, 1]), 1) + +func1((x for x in [0, 1]),) + +func1(1, (x for x in [0, 1])) + +# This should generate an error. +func1(x for x in [0, 1], 1) + +# This should generate an error. +func1(x for x in [0, 1],) + +# This should generate an error. +func1(1, x for x in [0, 1]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator16.py b/python-parser/packages/pyright-internal/src/tests/samples/generator16.py new file mode 100644 index 00000000..c73a7b44 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator16.py @@ -0,0 +1,20 @@ +# This sample tests the case where a return statement within an async +# generator has an explicit return value. This generates a syntax +# error at runtime. + +from typing import Any, AsyncIterable + + +async def func1(n: int, fa: AsyncIterable[Any]): + if n <= 0: + # This should generate an error because return statements + # are not allowed in async generators. + return None + + g = aiter(fa) + + while True: + try: + yield await g.__anext__() + except StopAsyncIteration: + return diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator2.py b/python-parser/packages/pyright-internal/src/tests/samples/generator2.py new file mode 100644 index 00000000..c0d41d83 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator2.py @@ -0,0 +1,66 @@ +# This sample tests various type checking operations relating to +# generator functions that use the "yield from" clause. + +from typing import Generator, Iterator, TypeVar + +T = TypeVar("T") + + +class ClassA: + pass + + +class ClassB: + def shouldContinue(self): + return True + + +class ClassC: + pass + + +def generator1() -> Iterator[ClassA]: + yield from generator1() + + +def generator2() -> Iterator[ClassB]: + # This should generate an error because it yields + # an iterator of the wrong type. + yield from generator1() + + # This should also generate an error because it + # yields the wrong type. + yield from [1] + + +def generator3( + arg: Generator[int, None, T] | Generator[str, None, T], +) -> Generator[int | str, None, T]: + x = yield from arg + reveal_type(x, expected_text="T@generator3") + return x + + +def generator4( + arg: Generator[int, None, int] | Generator[str, None, str], +) -> Generator[int | str, None, int | str]: + x = yield from arg + reveal_type(x, expected_text="int | str") + return x + + +def generator5() -> Generator[None, float, None]: + x: float = yield + + +def generator6() -> Generator[None, int, None]: + yield from generator5() + + +def generator7() -> Generator[None, int, None]: + x: float = yield + + +def generator8() -> Generator[None, float, None]: + # This should generate an error because of the send type. + yield from generator7() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator3.py b/python-parser/packages/pyright-internal/src/tests/samples/generator3.py new file mode 100644 index 00000000..98ca70f8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator3.py @@ -0,0 +1,64 @@ +# This sample tests various type checking operations relating to +# generator functions where the return type is inferred. + + +class ClassA: + pass + + +class ClassB: + pass + + +def generator1(): + yield ClassB() + + +reveal_type(generator1(), expected_text="Generator[ClassB, Any, None]") + + +def generator2(): + yield "Hello" + yield ClassA() + return 3 + + +reveal_type( + generator2(), expected_text="Generator[ClassA | Literal['Hello'], Any, Literal[3]]" +) + + +def generator3(): + x = yield 3 + return 0 + + +reveal_type(generator3(), expected_text="Generator[Literal[3], Unknown, Literal[0]]") + + +def consumer1() -> ClassB: + return next(generator1()) + + +def consumer2() -> ClassA: + # This should generate an error because the + # inferred type of generator1 should be incompatible + # with ClassA. + return next(generator1()) + + +def consumer3() -> ClassA | None: + value = next(generator2()) + + if isinstance(value, str): + print(str) + else: + return value + + +def generator4(): + return + yield 1 + + +reveal_type(generator4(), expected_text="Generator[Never, Any, None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator4.py b/python-parser/packages/pyright-internal/src/tests/samples/generator4.py new file mode 100644 index 00000000..d8780909 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator4.py @@ -0,0 +1,71 @@ +# This sample tests various type checking operations relating to +# async generator functions where the return type is inferred. + +from typing import AsyncGenerator, Awaitable, Generator, Iterator + + +async def g1(): + yield 1 + yield 2 + + +async def g2(): + async for v in g1(): + yield v + + +def g1_explicit1() -> Generator[int, None, None]: + yield 1 + yield 2 + + +async def g1_explicit2() -> AsyncGenerator[int, None]: + yield 1 + yield 2 + + +async def g2_explicit(): + for v in g1_explicit1(): + yield v + + async for v in g1_explicit2(): + yield v + + +async def g3(xs: Awaitable[list[int]]) -> list[int]: + return [x for x in await xs] + + +async def g4(xs: list[Awaitable[int]]) -> list[int]: + return [await x for x in xs] + + +class SomeIterable: + def __init__(self): + self.x = 1 + + def __iter__(self) -> Iterator[int]: + yield self.x + + +async def func1() -> SomeIterable: + return SomeIterable() + + +def func2() -> Iterator[int]: + yield 2 + + +def g5() -> None: + val = (y for y in func2()) + reveal_type(val, expected_text="Generator[int, None, None]") + + +async def g6() -> None: + val = (x + y for y in func2() for x in await func1()) + reveal_type(val, expected_text="AsyncGenerator[int, None]") + + +async def g7() -> None: + val = (x + y for y in await func1() for x in func2()) + reveal_type(val, expected_text="Generator[int, None, None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator5.py b/python-parser/packages/pyright-internal/src/tests/samples/generator5.py new file mode 100644 index 00000000..060f329a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator5.py @@ -0,0 +1,24 @@ +# This sample tests various type checking operations relating to +# async generator functions where the return type is declared. + +from typing import AsyncIterable, AsyncIterator + + +async def g1_explicit() -> AsyncIterator[int]: + yield 1 + yield 2 + + +async def g2_explicit(): + async for v in g1_explicit(): + yield v + + +async def g3_explicit() -> AsyncIterable[int]: + yield 1 + yield 2 + + +async def g4_explicit(): + async for v in g3_explicit(): + yield v diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator6.py b/python-parser/packages/pyright-internal/src/tests/samples/generator6.py new file mode 100644 index 00000000..6eb9f54a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator6.py @@ -0,0 +1,14 @@ +# This sample tests that functions containing unreachable +# yield statements are still treated as generators. + +from typing import Iterable, AsyncIterable + + +def func1() -> Iterable[str]: + return + yield "" + + +async def func2() -> AsyncIterable[str]: + return + yield "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator7.py b/python-parser/packages/pyright-internal/src/tests/samples/generator7.py new file mode 100644 index 00000000..8c80ada6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator7.py @@ -0,0 +1,16 @@ +# This sample tests the handling of the "yield from" statement +# and inferred return types from generators that use this +# statement. + + +def func1(): + yield from [1, 2, 3] + + +def func2(): + yield from func1() + + +a: dict[int, int] = {} +for i in func2(): + a[i] = i diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator8.py b/python-parser/packages/pyright-internal/src/tests/samples/generator8.py new file mode 100644 index 00000000..1b3afbff --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator8.py @@ -0,0 +1,12 @@ +# This sample verifies that the "yield from" argument +# is wrapped in an Iterable. + +from typing import Generator + +ints1 = [1, 2] +ints2 = [3, 4] + + +def func1() -> Generator[int, None, None]: + yield from ints1 + yield from ints2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generator9.py b/python-parser/packages/pyright-internal/src/tests/samples/generator9.py new file mode 100644 index 00000000..4b19068d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generator9.py @@ -0,0 +1,19 @@ +# This sample tests the check that "yield" is not used outside +# of a function or lambda. + +a = lambda: (yield) + + +def func1(a: bool): + if a: + yield 3 + yield 5 + + +# This should generate an error +yield 7 + + +class Foo: + # This should generate an error + yield diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generic1.py b/python-parser/packages/pyright-internal/src/tests/samples/generic1.py new file mode 100644 index 00000000..8a776100 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generic1.py @@ -0,0 +1,41 @@ +# This sample tests error handling for the "Generic" special form. + +from typing import Generic, TypeVar + +T = TypeVar("T") + +# This should generate an error. +class Class1(Generic): + ... + +# This should generate two errors (a parse error and a semantic error). +class Class2(Generic[]): + ... + +# This should generate an error. +class Class3(Generic[int]): + ... + +# This should generate two errors. +class Class4(Generic[T, T, T]): + ... + + +# This should generate an error. +def func1(x: Generic[T]) -> T: + ... + +# This should generate an error. +def func2(x: T) -> Generic[T]: + ... + +class Class5(Generic[T]): + # This should generate an error. + x: Generic[T] + + +def func3(x: type): + if x is Generic: + return + + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generic2.py b/python-parser/packages/pyright-internal/src/tests/samples/generic2.py new file mode 100644 index 00000000..8dee0af4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generic2.py @@ -0,0 +1,29 @@ +# This sample verifies that "Generic" is tagged as +# invalid in contexts outside of a class definition. + +from typing import Generic, TypeVar + + +_T1 = TypeVar("_T1") + + +class ClassA(Generic[_T1]): + pass + + +# This should generate an error. +def func1(a: _T1) -> Generic[_T1]: ... + + +# This should generate an error. +def func2(p1: Generic[_T1]) -> _T1: ... + + +TA1 = Generic + + +# This should generate an error. +def func3(a: _T1) -> TA1[_T1]: ... + + +class ClassB(TA1[_T1]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/generic3.py b/python-parser/packages/pyright-internal/src/tests/samples/generic3.py new file mode 100644 index 00000000..f0de256e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/generic3.py @@ -0,0 +1,40 @@ +# This sample tests that a Generic base class overrides the type parameter +# ordering of other type parameters. + +# pyright: reportIncompatibleMethodOverride=false + +from typing import Generic, Iterable, Iterator, Mapping, TypeVar + +_T1 = TypeVar("_T1") +_T2 = TypeVar( + "_T2", "str", "int" +) # 'str' and 'int' should be treated as forward reference + + +class Foo(Iterable[_T2], Generic[_T1, _T2]): + def __init__(self, a: _T1, b: _T2): + pass + + def foo(self, a: _T1, b: _T2) -> _T2: + return b + + def __iter__(self) -> Iterator[int]: ... + + +a: Foo[int, str] = Foo(2, "") +b: str = a.foo(4, "") + + +# This should generate an error because a class shouldn't +# derive from Generic more than once. +class Bar(Generic[_T1], Generic[_T2]): + pass + + +K = TypeVar("K") +V = TypeVar("V") + + +# This should generate an error because V isn't included +# in the Generic type variable list. +class A(Mapping[K, V], Generic[K]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType1.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType1.py new file mode 100644 index 00000000..9cbfa38e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType1.py @@ -0,0 +1,38 @@ +# This sample tests bidirectional type inference and constraint solving. + +m = int(1) +n = float(1.1) +p = "hello" + +a = dict(x=m, y=m) +a1: int = a["x"] + +b = dict(x=n, y=n) +reveal_type(b, expected_text="dict[str, float]") + +# This should generate an error. +b1: int = b["x"] +b2: float = b["x"] + +c = dict(x=m, y=n) +reveal_type(c, expected_text="dict[str, float]") + +# This should generate an error. +c1: int = c["x"] +c2: float = c["x"] + +d = dict(x=p, y=p) +reveal_type(d, expected_text="dict[str, str]") + +# This should generate an error. +d1: float = d["x"] +d2: str = d["x"] + +e = dict(x=n, y=p) +reveal_type(e, expected_text="dict[str, float | str]") + +# This should generate an error. +e1: str = e["x"] +# This should generate an error. +e2: float = e["x"] +e3: float | str = e["x"] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType10.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType10.py new file mode 100644 index 00000000..b8984e86 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType10.py @@ -0,0 +1,19 @@ +# This sample tests the case where a bound TypeVar is bound to a +# class method. + +from typing import Any, Mapping, Type, TypeVar + +_Configuration = TypeVar("_Configuration", bound="Configuration") + + +class Configuration: + @classmethod + def _create( + cls: Type[_Configuration], data: Mapping[str, Any] + ) -> _Configuration: ... + + @classmethod + def _from_dict( + cls: Type[_Configuration], data: Mapping[str, Any] + ) -> _Configuration: + return cls._create({}) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType11.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType11.py new file mode 100644 index 00000000..0b09bdf2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType11.py @@ -0,0 +1,49 @@ +# This sample tests situations where bidirectional type inference +# influences the type of a generic function call. + +from typing import Callable, Iterable, List, Literal, TypeVar + +_T = TypeVar("_T") + + +def func1(arg: _T) -> _T: + return arg + + +v1: Literal["test"] = func1("test") + + +x: List[Literal["test"]] = ["test"] +v2: List[Literal["test"]] = func1(x) + + +def func2(arg: _T) -> List[_T]: + return [arg] + + +v3: List[Literal["test"]] = func2("test") + +v4 = func1("test") +reveal_type(v4, expected_text="str") + +v5 = func2("test") +reveal_type(v5, expected_text="List[str]") + + +def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... + + +dicts = [{"a": "b"}, {"c": "d"}] +v6 = reduce(lambda x, y: x | y, dicts) +reveal_type(v6, expected_text="dict[str, str]") + +v7 = reduce(lambda x, y: {**x, **y}, dicts) +reveal_type(v7, expected_text="dict[str, str]") + + +def func3(func: Callable[[_T], bool], b: dict[_T, int]) -> _T: + return next(iter(b.keys())) + + +def func4(func: Callable[[_T], bool]) -> _T: + return func3(func, {}) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType12.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType12.py new file mode 100644 index 00000000..d489425e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType12.py @@ -0,0 +1,60 @@ +# This sample tests the case where the return type of a function is +# a generic Callable that can be specialized with type variables +# provided by the caller. + +from typing import Callable, List, TypeVar, Union + +_T = TypeVar("_T") + + +def identity_generic(x: _T) -> _T: + return x + + +def identity_callable_1(x: Callable[[_T], _T]) -> Callable[[_T], _T]: + return x + + +MyCallable = Callable[[_T], _T] + + +def identity_callable_2(x: MyCallable[_T]) -> MyCallable[_T]: + return x + + +def identity_int(x: int) -> int: + return x + + +v1 = identity_callable_1(identity_int) +reveal_type(v1, expected_text="(int) -> int") +reveal_type(v1(0), expected_text="int") + +v2 = identity_callable_1(identity_generic) +reveal_type(v2, expected_text="(_T@identity_generic) -> _T@identity_generic") +reveal_type(v2(0), expected_text="int") +reveal_type(v2(""), expected_text="str") + +v3 = identity_callable_2(identity_int) +reveal_type(v3, expected_text="(int) -> int") +reveal_type(v3(0), expected_text="int") + +v4 = identity_callable_2(identity_generic) +reveal_type(v4, expected_text="(_T@identity_generic) -> _T@identity_generic") +reveal_type(v4(0), expected_text="int") +reveal_type(v4(""), expected_text="str") + + +_U = TypeVar("_U") + + +def dec() -> Callable[[_U], _U]: ... + + +@dec() +def func1(x: _T, y: Union[_T, List[_T]]) -> None: + pass + + +func1(1, 2) +func1(1, [2, 3]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType13.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType13.py new file mode 100644 index 00000000..f48497d9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType13.py @@ -0,0 +1,17 @@ +# This sample tests that various types can be assigned to Type[Any]. + +from typing import Any, Type, TypeVar + + +class ClassA: ... + + +T = TypeVar("T") + + +def func1(x: Type[Any], y: Type[T]) -> T: + v1: Type[Any] = x + v2: Type[Any] = ClassA + v3: Type[Any] = y + + return y() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType14.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType14.py new file mode 100644 index 00000000..833c3564 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType14.py @@ -0,0 +1,34 @@ +# This sample tests a case where Type[X] and X are used within the +# same class declaration. + +from typing import Generic, TypeVar +from dataclasses import dataclass, field + +K = TypeVar("K") +V = TypeVar("V") + + +@dataclass +class Registry(Generic[K, V]): + key: K + value: dict[str, V] = field(default_factory=dict) + + +class Base: + pass + + +BaseType = TypeVar("BaseType", bound=Base) + + +class BaseTypeRegistry(Registry[type[BaseType], BaseType]): + pass + + +class Thing1(Base): + pass + + +reveal_type(BaseTypeRegistry(Thing1), expected_text="BaseTypeRegistry[Thing1]") + +foo: BaseTypeRegistry[Thing1] = BaseTypeRegistry(Thing1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType15.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType15.py new file mode 100644 index 00000000..a44c379b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType15.py @@ -0,0 +1,24 @@ +# This sample tests the assignment of invariant and contravariant +# type variables to other type variables. + + +from typing import Callable, Generic, TypeVar + + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +T3 = TypeVar("T3") + + +class ClassA(Generic[T1]): ... + + +class ClassB(Generic[T2]): + def broken(self, p0: ClassA[T2], p1: Callable[[T2], object]): + func(p0, p1) + + +def func( + p0: ClassA[T3], + p1: Callable[[T3], object], +): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType16.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType16.py new file mode 100644 index 00000000..fba37256 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType16.py @@ -0,0 +1,22 @@ +# This sample tests a special case of bidirectional type inference when +# the expected type is a union and the destination type is a union that +# contains Any and a TypeVar. + + +from typing import Any, Literal, TypeVar + +_T = TypeVar("_T") + + +def func1(__o: object, __name: str, __default: _T) -> Any | _T: ... + + +x: Literal[1, 2, 3] = func1(object(), "", 1) + + +def func2(a: _T) -> bool | _T: ... + + +y = func2(None) +if y is not True: + z = y or func2(False) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType17.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType17.py new file mode 100644 index 00000000..d34c20db --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType17.py @@ -0,0 +1,23 @@ +# This sample tests the handling of a generic type whose implementation +# includes the instantiation of another instance of itself using its +# own type parameters as type arguments. + +from typing import Generic, TypeVar + +A = TypeVar("A") +B = TypeVar("B") + + +class X(Generic[A, B]): + _dict: dict[A, B] + _pair: "X[B, A]" + + def method(self, a: A, b: B) -> None: + self._pair._dict[b] + + +x = X[int, str]() +x._pair._dict["foo"] + +reveal_type(x._pair, expected_text="X[str, int]") +reveal_type(x._pair._pair, expected_text="X[int, str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType18.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType18.py new file mode 100644 index 00000000..c404acb4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType18.py @@ -0,0 +1,21 @@ +# This sample tests the case where a generic function +# returns a generic Callable. + +from typing import Callable, TypeVar + + +_T = TypeVar("_T") + + +def func1(val1: _T) -> Callable[[_T], None]: + def f(a: str): ... + + # This should generate an error because str isn't + # compatible with _T. + return f + + +def func2(val1: _T) -> Callable[[_T], None]: + def f(a: _T): ... + + return f diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType19.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType19.py new file mode 100644 index 00000000..d3881e7b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType19.py @@ -0,0 +1,12 @@ +# This sample tests that a class that derives from Any can be used +# to satisfy a TypeVar. + +from typing import Any, TypeVar + + +T = TypeVar("T") + + +def foo(self, obj: T, foo: Any) -> T: + # NotImplemented is an instance of a class that derives from Any. + return NotImplemented diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType2.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType2.py new file mode 100644 index 00000000..de60ce33 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType2.py @@ -0,0 +1,27 @@ +# This sample tests the type checker's ability to do bidirectional +# type inference when the expected type is defined by a bound TypeVar. + +from typing import TypeVar + + +class A: + pass + + +class B(A): + pass + + +class C(A): + pass + + +_T_A = TypeVar("_T_A", bound=A) + + +def func1(value: dict[str, _T_A]) -> _T_A: + return value["a"] + + +x = func1({"b": B(), "c": C()}) +reveal_type(x, expected_text="B | C") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType20.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType20.py new file mode 100644 index 00000000..8d9f0d3a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType20.py @@ -0,0 +1,15 @@ +# This sample tests the case where a declared class variable with type +# type[T] is assigned a value in a base class that is not a type. + +from typing import Any, Generic, TypeVar + +T = TypeVar("T") + + +class Parent(Generic[T]): + y: type[T] + + +class Child(Parent[Any]): + # This should generate an error. + y = 42 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType21.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType21.py new file mode 100644 index 00000000..d8c6bb37 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType21.py @@ -0,0 +1,25 @@ +# This sample verifies that a type compatibility check that involves +# a union of a concrete type and a TypeVar does not depend on the order +# in the union. + +from typing import TypeVar + +_T = TypeVar("_T") + + +def f1(x: tuple[int | _T]) -> _T | None: + pass + + +def f2(x: tuple[_T | int]) -> None | _T: + pass + + +def g1(z: tuple[int] | tuple[_T]) -> _T | None: + reveal_type(f1(z), expected_text="_T@g1 | None") + reveal_type(f2(z), expected_text="_T@g1 | None") + + +def g2(z: tuple[_T] | tuple[int]) -> _T | None: + reveal_type(f1(z), expected_text="_T@g2 | None") + reveal_type(f2(z), expected_text="_T@g2 | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType22.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType22.py new file mode 100644 index 00000000..0e45b63d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType22.py @@ -0,0 +1,15 @@ +# This sample tests that a generic callable passed to a function cannot +# be called with parameters that don't match the generic. + +from typing import TypeVar, Callable, Any + +T = TypeVar("T") + + +def func1(cb: Callable[[T], list[T]], param1: T, param2: Any, param3: Any | T) -> None: + cb(param1) + cb(param2) + cb(param3) + + # This should generate an error. + cb(0) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType23.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType23.py new file mode 100644 index 00000000..1d838a78 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType23.py @@ -0,0 +1,13 @@ +# This sample tests that a recursive call to a generic function +# evaluates correctly. + +# pyright: strict + +from typing import TypeVar + +_T = TypeVar("_T") + + +def func1(x: list[_T]) -> list[_T]: + result = func1(x) + return result diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType24.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType24.py new file mode 100644 index 00000000..1663aa7c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType24.py @@ -0,0 +1,19 @@ +# This sample tests a case where a default argument in a parent class +# needs to be specialized in the context of a child class. + +from typing import Generic, Iterable, Iterator, TypeVar + +T = TypeVar("T") + + +class IterableProxy(Iterable[T]): + def __iter__(self) -> Iterator[T]: ... + + +class Parent(Generic[T]): + def m1(self, v: Iterable[T] = IterableProxy()) -> None: ... + + +class Child(Parent[T]): + def m2(self) -> None: + self.m1() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType25.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType25.py new file mode 100644 index 00000000..fa218812 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType25.py @@ -0,0 +1,10 @@ +# This sample tests that a union that includes two types that are subtypes +# of each other (like tuple[Any] and tuple[int]) is handled correctly +# when performing type compatibility tests. + +from typing import Any + + +def func(t: tuple[Any] | tuple[int]): + # This should generate a type violation. + x: int = t diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType26.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType26.py new file mode 100644 index 00000000..cd1a981e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType26.py @@ -0,0 +1,82 @@ +# This sample tests for the case where a generic callable type is +# specialized with type variables in a recursive manner. + +from dataclasses import dataclass +from typing import Callable, Generic, Iterable, Iterator, TypeVar, overload + +S = TypeVar("S") +T = TypeVar("T") +U = TypeVar("U") +V = TypeVar("V") + + +class ClassA(Generic[T, U]): + x: Callable[[T], U] + + def __init__(self, f: Callable[[T], U]): + self.x = f + + def __call__(self, x: T) -> U: + return self.x(x) + + def __add__(self, other: "ClassA[U, V]") -> "ClassA[T, V]": + f = self.x + g: Callable[[U], V] = other.x + return ClassA(lambda x: g(f(x))) + + +class ClassB(Generic[T]): + value: T + + def __init__(self, val: T) -> None: + self.value = val + + def method1(self, val: U) -> "ClassB[U]": + # This should generate an error. + return ClassB(self.value) + + +@dataclass +class DC1(Generic[T]): + value: T + + +@dataclass +class DC2(Generic[S]): + value: S + + +@dataclass +class ClassC(Generic[T, S]): + value: DC1[T] | DC2[S] + + def method1(self, val: U) -> "ClassC[U, S]": + if isinstance(self.value, DC1): + # This should generate an error. + return ClassC(self.value) + else: + return ClassC(self.value) + + +T_co = TypeVar("T_co", covariant=True) + + +class ClassD(Generic[T_co]): + @overload + def __init__(self, arg: Iterable[T_co]) -> None: ... + + @overload + def __init__(self, arg: Callable[[], Iterable[T_co]]) -> None: ... + + def __init__(self, arg: Iterable[T_co] | Callable[[], Iterable[T_co]]) -> None: ... + + def __iter__(self) -> Iterator[T_co]: ... + + +class ClassE(ClassD[T_co]): + def method(self) -> "ClassE[ClassE[T_co]]": + def inner(): + for x in self: + yield ClassE(lambda: [x]) + + return ClassE(inner) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType27.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType27.py new file mode 100644 index 00000000..e76972fb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType27.py @@ -0,0 +1,17 @@ +# This sample tests the case where a type conditioned on a TypeVar +# is assigned to that same TypeVar in an invariant context. + +from typing import TypeVar + + +class ClassA: ... + + +T = TypeVar("T", bound=ClassA) + + +def func1(cls: type[T]) -> list[type[T]]: + result = [cls] + for c in cls.__subclasses__(): + result.extend(func1(c)) + return result diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType28.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType28.py new file mode 100644 index 00000000..5cefac83 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType28.py @@ -0,0 +1,183 @@ +# This sample tests that a class-scoped TypeVar used to parameterize +# a base class within a class definition cannot be covariant or +# contravariant if the base class requires an invariant type parameter. + +from typing import Generic, Sequence, TypeVar, TypeVarTuple, Unpack + +T = TypeVar("T") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +# This should generate an error because the type parameter for list +# is invariant, so T_co here cannot be covariant. +class Class1(list[T_co]): + pass + + +# This should generate an error because the type parameter for list +# is invariant, so T_co here cannot be contravariant. +class Class2(list[T_contra]): + pass + + +class Class3(Generic[T_co]): ... + + +class Class3_Child1(Class3[T_co]): ... + + +class Class3_Child2(Class3[T]): ... + + +# This should generate an error because T_contra isn't +# compatible with T_co. +class Class3_Child3(Class3[T_contra]): ... + + +class Class4(Generic[T_contra]): ... + + +class Class4_Child1(Class4[T_contra]): ... + + +class Class4_Child2(Class4[T]): ... + + +# This should generate an error because T_co isn't +# compatible with T_contra. +class Class4_Child3(Class4[T_co]): ... + + +class Class5(Generic[T_contra]): ... + + +class Class5_Child1(Class5[frozenset[T_contra]]): ... + + +# This should generate an error because Sequence[T_co] +# is covariant and is therefore not compatible with +# a contravariant type parameter. +class Class5_Child2(Class5[Sequence[T_co]]): ... + + +class Class5_Child3(Class5[Sequence[T]]): ... + + +class Class6(Generic[T_co, T_contra]): ... + + +class Class6_Child1(Class6[T_co, T_contra]): ... + + +# This should generate an error because T_co isn't +# compatible with T_contra. +class Class6_Child2(Class6[T_co, T_co]): ... + + +# This should generate an error because T_contra isn't +# compatible with T_co. +class Class6_Child3(Class6[T_contra, T_contra]): ... + + +class Class6_Child4(Class6[T, T]): ... + + +# This should generate an error because Sequence[T_co] isn't +# compatible with T_contra. +class Class6_Child5(Class6[Sequence[T_co], Sequence[T_co]]): ... + + +class Co(Generic[T_co]): ... + + +class Contra(Generic[T_contra]): ... + + +class CoToContra(Contra[Co[T_contra]]): ... + + +class ContraToContra(Contra[Contra[T_co]]): ... + + +class CoToCo(Co[Co[T_co]]): ... + + +class ContraToCo(Co[Contra[T_contra]]): ... + + +# This should generate an error. +class CoToContraToContra(Contra[Co[Contra[T_contra]]]): ... + + +# This should generate an error. +class ContraToContraToContra(Contra[Contra[Contra[T_co]]]): ... + + +Co_TA = Co[T_co] +Contra_TA = Contra[T_contra] + + +class CoToContra_WithTA(Contra_TA[Co_TA[T_contra]]): ... + + +class ContraToContra_WithTA(Contra_TA[Contra_TA[T_co]]): ... + + +class CoToCo_WithTA(Co_TA[Co_TA[T_co]]): ... + + +class ContraToCo_WithTA(Co_TA[Contra_TA[T_contra]]): ... + + +# This should generate an error. +class CoToContraToContra_WithTA(Contra_TA[Co_TA[Contra_TA[T_contra]]]): ... + + +# This should generate an error. +class ContraToContraToContra_WithTA(Contra_TA[Contra_TA[Contra_TA[T_co]]]): ... + + +Ts = TypeVarTuple("Ts") + + +class Variadic(Generic[Unpack[Ts]]): ... + + +class VariadicChild(Variadic[T]): ... + + +# This should generate an error. +class VariadicChildCo(Variadic[T_co]): ... + + +# This should generate an error. +class VariadicChildContra(Variadic[T_contra]): ... + + +Variadic_TA = Variadic[Unpack[tuple[int, Unpack[Ts]]]] + + +class VariadicChild_WithTA(Variadic_TA[T]): ... + + +# This should generate an error. +class VariadicChildCo_WithTA(Variadic_TA[T_co]): ... + + +# This should generate an error. +class VariadicChildContra_WithTA(Variadic_TA[T_contra]): ... + + +Variadic_TA2 = Variadic[Unpack[tuple[int, T]]] + + +class VariadicChild_WithTA2(Variadic_TA2[T]): ... + + +# This should generate an error. +class VariadicChildCo_WithTA2(Variadic_TA2[T_co]): ... + + +# This should generate an error. +class VariadicChildContra_WithTA2(Variadic_TA2[T_contra]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType29.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType29.py new file mode 100644 index 00000000..7652125a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType29.py @@ -0,0 +1,17 @@ +# This sample tests the case where a contravariant type parameter +# has a union type that must be matched against another union +# type for purposes of bidirectional type inference. + +from typing import Generic, TypeVar + +T1 = TypeVar("T1", contravariant=True) +T2 = TypeVar("T2") + + +class A(Generic[T1]): ... + + +def func1(x: A[T2]) -> A[T2 | None]: ... + + +x1: A[int | None] = func1(A[int]()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType3.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType3.py new file mode 100644 index 00000000..eee5d179 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType3.py @@ -0,0 +1,27 @@ +# This sample tests the type checker's handling of constructors +# that specify a specialized class type. + +from typing import Generic, TypeVar, Callable + +T = TypeVar("T", bound=float) + + +class Root(Generic[T]): + def __init__(self, _lambda: Callable[[T], T]): + self._lambda = _lambda + + def call(self, val: T) -> T: + return self._lambda(val) + + +class Leaf(Root[T]): + pass + + +root_int: Root[int] = Root[int](lambda x: x << 2) + +# This should generate an error. +root_float: Root[float] = root_int + +# This should generate an error. +root_float: Root[float] = Root[int](lambda x: x << 2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType30.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType30.py new file mode 100644 index 00000000..7defeea9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType30.py @@ -0,0 +1,16 @@ +# This sample tests the case where a specialized generic class references +# itself in a magic method like __iter__. + +from typing import Iterator, Generic, TypeVar + +A = TypeVar("A") + + +class Iter(Generic[A]): + def __iter__(self) -> Iterator[A]: ... + + def enumerate(self) -> "Iter[tuple[int, A]]": ... + + def method1(self) -> None: + for x in self.enumerate(): + reveal_type(x, expected_text="tuple[int, A@Iter]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType31.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType31.py new file mode 100644 index 00000000..d52e407a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType31.py @@ -0,0 +1,16 @@ +# This sample tests the assignment of unions that contain TypeVars. + +from typing import TypeVar + + +T = TypeVar("T") + + +def func1(x: T | None) -> T | str: + # This should generate an error. + return x + + +def func2(x: T | int) -> T | str: + # This should generate an error. + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType32.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType32.py new file mode 100644 index 00000000..e8df863a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType32.py @@ -0,0 +1,20 @@ +# This sample tests the case where a invariant type parameter is used +# within a contravariant type argument. + +from typing import TypeVar, Generic + +T = TypeVar("T") +T_contra = TypeVar("T_contra", contravariant=True) + + +class Contra(Generic[T_contra]): ... + + +class Foo(Generic[T]): ... + + +class Bar(Foo[T]): ... + + +def func(x: Contra[Foo[int]]): + v: Contra[Bar[int]] = x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType33.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType33.py new file mode 100644 index 00000000..5a6f623e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType33.py @@ -0,0 +1,27 @@ +# This sample tests the case where a contravariant TypeVar is used in a protocol. + +from typing import Generic, Protocol, TypeVar + +T_contra = TypeVar("T_contra", contravariant=True) + + +class Contra(Generic[T_contra]): ... + + +class Foo(Protocol[T_contra]): + def f(self) -> Contra[T_contra]: ... + + +def t1(x: Foo[T_contra]) -> list[T_contra] | None: ... + + +def t2(x: Foo[object]) -> None: ... + + +def func1(x: Foo[T_contra]) -> list[T_contra] | None: + # This should generate an error. + t2(x) + + +def func2(x: Foo[object]) -> None: + t1(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType34.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType34.py new file mode 100644 index 00000000..2cc39000 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType34.py @@ -0,0 +1,18 @@ +# This sample tests bidirectional type inference for the call of a generic +# method that returns a generic type with multiple type arguments where +# some of these type arguments can be satisfied directly by the expected +# type's type arguments and some cannot. + +from typing import Generic, Literal, TypeVar + +_T_co = TypeVar("_T_co", covariant=True) +_N = TypeVar("_N", bound=int) + + +class ClassA(Generic[_T_co, _N]): ... + + +def func1(n: _N) -> ClassA[Literal[0], _N]: ... + + +v1: ClassA[int, Literal[1]] = func1(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType35.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType35.py new file mode 100644 index 00000000..e58f3a1a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType35.py @@ -0,0 +1,28 @@ +# This sample tests the case where a function that includes a tuple +# parameter type is assignable to a generic callable that also includes +# a tuple type. + +from typing import Callable, TypeVar + +X = TypeVar("X") +Y = TypeVar("Y") + + +def deco1(func: Callable[[tuple[X]], Y]) -> Callable[[X], Y]: ... + + +def func1(x: tuple[str]) -> int: ... + + +v1 = deco1(func1) +reveal_type(v1, expected_text="(str) -> int") + + +def deco2(func: Callable[[tuple[X, ...]], Y]) -> Callable[[X], Y]: ... + + +def func2(x: tuple[str]) -> int: ... + + +# This should generate an error because of a tuple size mismatch. +deco2(func2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType36.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType36.py new file mode 100644 index 00000000..255df5f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType36.py @@ -0,0 +1,26 @@ +# This sample tests the case where bidirectional type inference is used +# for a call expression that returns a callable and requires bidirectional +# type inference to determine the desired results and the expected type +# is a union that includes some non-callable types. + +from typing import Callable, Literal, TypeVar + +ABC = Literal["a", "b", "c"] +T = TypeVar("T") + + +def func1(x: T | Callable[[], T]) -> Callable[[], T]: ... + + +def func2(a: Callable[[], ABC] | ABC, b: ABC | Callable[[], ABC]): + v1 = func1(a) + reveal_type(v1, expected_text="() -> str") + + v2 = func1(b) + reveal_type(v2, expected_text="() -> str") + + v3: Callable[[], ABC] = func1(a) + reveal_type(v3, expected_text="() -> Literal['a', 'b', 'c']") + + v4: Callable[[], ABC] = func1(b) + reveal_type(v4, expected_text="() -> Literal['a', 'b', 'c']") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType37.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType37.py new file mode 100644 index 00000000..d4b4e355 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType37.py @@ -0,0 +1,17 @@ +# This sample tests bidirectional type inference for cases that involve +# constructors and dict inference. + +from typing import TypeVar + +T = TypeVar("T") + + +def func1(some_dict: dict[str, set[T] | frozenset[T]]) -> list[T]: + return [] + + +v1 = func1({"foo": set({1, 2, 3})}) +reveal_type(v1, expected_text="list[int]") + +v2 = func1({"foo": frozenset({1, 2, 3})}) +reveal_type(v2, expected_text="list[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType38.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType38.py new file mode 100644 index 00000000..d4cbd05d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType38.py @@ -0,0 +1,48 @@ +# This sample tests bidirectional type inference with nested constructors. + +from typing import Generic, Iterable, Iterator, TypeVar, overload, Any +from itertools import chain + +_T = TypeVar("_T") + + +class ClassA(Generic[_T]): + @overload + def __init__(self, _: _T): ... + + @overload + def __init__(self, _: Any): ... + + def __init__(self, _: Any): ... + + +class ClassB(Generic[_T]): + def __init__(self, _: ClassA[_T]): ... + + +v1 = ClassA(0) +v2 = ClassB(v1) +v3 = ClassB(ClassA(0)) + +reveal_type(v1, expected_text="ClassA[int]") +reveal_type(v2, expected_text="ClassB[int]") +reveal_type(v3, expected_text="ClassB[int]") + + +def func1(x: list[_T], /) -> list[_T]: + return x + + +def func2(any: Any): + v1 = list([any]) + v2 = func1(v1) + v3 = func1(list([any])) + + reveal_type(v1, expected_text="list[Any]") + reveal_type(v2, expected_text="list[Any]") + reveal_type(v3, expected_text="list[Any]") + + +def func3(val1: Iterator[Iterable[int]]): + val2 = list(chain.from_iterable(val1)) + reveal_type(val2, expected_text="list[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType39.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType39.py new file mode 100644 index 00000000..f2bc199b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType39.py @@ -0,0 +1,8 @@ +# This sample tests bidirectional type inference for a generic class +# constructor that is passed an argument expression that contains a +# binary operator. + + +def func1(x: list[str] | None): + for _, v in enumerate(x or []): + reveal_type(v, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType4.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType4.py new file mode 100644 index 00000000..e503fcd4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType4.py @@ -0,0 +1,32 @@ +# This sample tests the type checker's handling of derived specialized +# objects assigned to their parent class type (also specialized). + +from typing import Generic, TypeVar + +T = TypeVar("T", bound=float) + + +class Base1(Generic[T]): + pass + + +class Derived1(Base1[T]): + pass + + +val1: Base1[int] = Derived1[int]() + + +class Base2(Generic[T]): + pass + + +class Derived2(Base2[float], Generic[T]): + pass + + +val2_1: Base2[float] = Derived2[int]() + +# This should generate an error because Derived2[int] +# isn't assignable to Base2[int]. +val2_2: Base2[int] = Derived2[int]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType40.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType40.py new file mode 100644 index 00000000..94804470 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType40.py @@ -0,0 +1,29 @@ +# This sample tests the case where a generic function returns a generic +# Callable. There are certain cases where we want the type variables in +# the return type to be rescoped to the return callable. + +from typing import Callable, TypeVar + + +_T1 = TypeVar("_T1") + + +def func1(a: _T1 | None) -> Callable[[_T1], _T1]: ... + + +v1 = func1(None) +reveal_type(v1, expected_text="(Unknown) -> Unknown") + + +def func2(a: None) -> Callable[[_T1], _T1]: ... + + +v2 = func2(None) +reveal_type(v2, expected_text="(_T1@func2) -> _T1@func2") + + +def func3(a: None) -> Callable[[type[_T1]], type[_T1]]: ... + + +v3 = func3(None) +reveal_type(v3, expected_text="(type[_T1@func3]) -> type[_T1@func3]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType41.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType41.py new file mode 100644 index 00000000..86c49c90 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType41.py @@ -0,0 +1,23 @@ +# This sample tests the case where a generic function that returns a tuple +# is called with an expected type. + +from typing import Iterable, Literal, TypeVar + +A = TypeVar("A") +B = TypeVar("B") + + +def func1(a: A, b: B) -> tuple[Iterable[A], Iterable[B]]: + return ((a,), (b,)) + + +def func2(a: A, b: B) -> tuple[Iterable[B], Iterable[A]]: + return ([b], [a]) + + +def func3() -> tuple[Iterable[str], Iterable[int]]: + return func1("", 3) + + +def func4() -> tuple[Iterable[Literal["hi"]], Iterable[Literal[3]]]: + return func2(3, "hi") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType42.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType42.py new file mode 100644 index 00000000..b009c9f7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType42.py @@ -0,0 +1,56 @@ +# This sample tests the case where a generic function call is nested +# within itself. + +from typing import Callable, ParamSpec, Protocol, Type, TypeVar, Union, overload +from itertools import chain + +P = ParamSpec("P") +R = TypeVar("R") +T = TypeVar("T", covariant=True) + + +def func1(): + return [ + f"{12 if hour == 0 else hour!s}:{minute!s:0>2} {meridian}" + for hour, minute, meridian in chain.from_iterable( + chain.from_iterable( + [(hour, minute, meridian) for minute in range(0, 60, 15)] + for hour in range(12) + ) + for meridian in ("am", "pm") + ) + ] + + +class A(Protocol[T, P]): + def __init__(self, *args: P.args, **kwds: P.kwargs): ... + + +def make_a(x: Callable[P, R]) -> Type[A[R, P]]: ... + + +@overload +def func2(x: Type[A[R, P]]) -> Type[A[R, P]]: ... + + +@overload +def func2(x: Callable[P, R]) -> Type[A[R, P]]: ... + + +def func2(x: Union[Type[A[R, P]], Callable[P, R]]) -> Type[A[R, P]]: ... + + +def func3(): + def foo(x: int) -> str: ... + + x = make_a(foo) + y = func2(x) + z = func2(make_a(foo)) + + reveal_type(y, expected_text="type[A[str, (x: int)]]") + reveal_type(z, expected_text="type[A[str, (x: int)]]") + + +def func4(my_dict: dict[str, str]): + reveal_type(my_dict.get("item1", ""), expected_text="str") + reveal_type(my_dict.get("item1", my_dict.get("item2", "")), expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType43.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType43.py new file mode 100644 index 00000000..addbfe41 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType43.py @@ -0,0 +1,25 @@ +# This sample tests bidirectional type inference in cases where the +# expected type is a union of multiple class instances. + +from typing import Iterable, Sequence + + +def func1(points: tuple[float, float] | Iterable[tuple[float, float]]) -> None: ... + + +def test1(val: tuple[float, float]): + func1(tuple((val, val))) + + +def func2(points: tuple[float, float] | Sequence[tuple[float, float]]) -> None: ... + + +def test2(val: tuple[float, float]): + func2(tuple([val, val])) + + +def func3(points: tuple[float, float] | tuple[str, str]) -> None: ... + + +def test3(val: tuple[float, float]): + func3(val) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType44.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType44.py new file mode 100644 index 00000000..3ff2786e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType44.py @@ -0,0 +1,25 @@ +# This sample tests the case where a protocol uses a covariant +# type parameter but a corresponding implementation uses an +# invariant type parameter. Literal types need to be handled +# carefully in this case. + +from typing import Any, Awaitable, Generator, Literal, TypeVar + +_T = TypeVar("_T") + + +class Future(Awaitable[_T]): + def __await__(self) -> Generator[Any, None, _T]: ... + + +def func1(future: Future[_T]) -> Future[_T]: ... + + +def func2(cb: Awaitable[_T]) -> Future[_T]: ... + + +def func3() -> Awaitable[Literal[True]]: ... + + +v1 = func1(func2(func3())) +reveal_type(v1, expected_text="Future[Literal[True]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType45.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType45.py new file mode 100644 index 00000000..ce033c55 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType45.py @@ -0,0 +1,57 @@ +# This sample tests that invariance is properly enforced when appropriate. + +from typing import Any, TypeVar + +T = TypeVar("T") + + +def func1(v: list[float | str]): + # This should generate an error. + x1: list[float] = v + + x2: list[str | float] = v + + # This should generate an error. + x3: list[float | str | None] = v + + x4: list[Any | str] = v + + # This should generate an error. + x5: list[int | str] = v + + x6: list[float | int | str] = v + + +def func2(v: list[T]) -> T: + x1: list[T] = v + + x2: list[Any | T] = v + + # This should generate an error. + x3: list[int | T] = v + + return v[0] + + +def func3(v: list[float | T]) -> float | T: + # This should generate an error. + x1: list[T] = v + + x2: list[Any | T] = v + + x3: list[T | float] = v + + # This should generate an error. + x4: list[T | int] = v + + x5: list[float | int | T] = v + + return v[0] + + +def func4(v: list[Any | int | str]): + x1: list[Any | int] = v + + x2: list[Any | list[str]] = v + + x3: list[Any | int | str | list[str]] = v diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType46.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType46.py new file mode 100644 index 00000000..feb44448 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType46.py @@ -0,0 +1,36 @@ +# This sample tests calls to a generic class constructor from within the class. + +from typing import Callable, Generic, TypeVar + +T = TypeVar("T") +U = TypeVar("U") +W = TypeVar("W") + + +class ClassA(Generic[T]): + def __init__(self, vals: list[U], func: Callable[[U], T]): + self._vals = list(map(func, vals)) + + def method1(self, func: Callable[[T], W]) -> "ClassA[W]": + return ClassA(self._vals, func) + + +class ClassB(Generic[T]): + def __init__(self, vals: list[U], func: Callable[[U], T]): + self._vals = list(map(func, vals)) + + def method1(self, func: Callable[[T], W]) -> "ClassB[W]": + return func0(self, func) + + +def func0(c: ClassB[T], func: Callable[[T], W]) -> ClassB[W]: + return ClassB(c._vals, func) + + +class ClassC(Generic[T, U]): + def __init__(self): + pass + + def test2(self) -> None: + x1: ClassC[U, T] + x1 = ClassC[U, T]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType47.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType47.py new file mode 100644 index 00000000..0c15043a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType47.py @@ -0,0 +1,34 @@ +# This sample tests specialization of nested generic classes. + +# pyright: strict + + +from typing import Collection, Generic, Iterable, Iterator, TypeVar + +A = TypeVar("A") +T = TypeVar("T") + + +class ClassA(Collection[T]): + def __init__(self, value: Iterable[T]) -> None: + self.values = tuple(value) + + def __contains__(self, item: object) -> bool: + return True + + def __iter__(self) -> Iterator[T]: + return iter(self.values) + + def __len__(self) -> int: + return len(self.values) + + +class ClassB(Generic[T]): + pass + + +def func1(input: ClassA[ClassB[A]]) -> ClassB[ClassA[A]]: + v = input.values + result = func1(ClassA(v)) + + return result diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType5.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType5.py new file mode 100644 index 00000000..859e7085 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType5.py @@ -0,0 +1,41 @@ +# This sample tests bidirectional inference when the type derives from the +# expected type and both are generic. + +from typing import Generic, Iterable, Mapping, TypeVar + +v0: Mapping[str, int | str] | None = dict([("test1", 1), ("test2", 2)]) + +v1: Mapping[str, float] | None = dict([("test1", 1), ("test2", 2)]) + +# This should generate an error because of a type mismatch. +v2: Mapping[str, str] = dict([("test1", 1), ("test2", "2")]) + + +options: dict[int | str, int] = {} +channel_types: dict[str, int] = {} + +keys = channel_types.keys() + +options.update(dict.fromkeys(keys, 1)) + + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_S = TypeVar("_S") +_T = TypeVar("_T") +_X = TypeVar("_X") + + +class A(Generic[_KT, _VT]): + @classmethod + def method1(cls, i: Iterable[_T], v: _S) -> "A[_T, _S]": ... + + +def func1(__x: A[int, _X] | A[str, _X] | A[str | int, _X]) -> A[int, _X]: ... + + +v3 = func1(A.method1("a", "b")) +reveal_type(v3, expected_text="A[int, str]") + +v4 = str.maketrans(dict.fromkeys("a", "b")) +reveal_type(v4, expected_text="dict[int, str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType6.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType6.py new file mode 100644 index 00000000..fbf1b334 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType6.py @@ -0,0 +1,25 @@ +# This sample verifies that the use of a bound TypeVar defined +# by a generic class is not used inappropriately. + +from typing import Any, Generic, TypeVar + + +class ClassA: ... + + +_T = TypeVar("_T", bound=ClassA) + + +class ClassB(Generic[_T]): + def func1(self, a: _T): + pass + + def func2(self, y: _T): + x: int = 3 + # This should generate an error + self.func1(x) + + self.func1(y) + + z: Any = 3 + self.func1(z) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType7.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType7.py new file mode 100644 index 00000000..f5372682 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType7.py @@ -0,0 +1,69 @@ +# This sample tests the handling of TypeVars defined by +# a generic function. + +from typing import Generic, TypeVar + + +class ClassA: + pass + + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2", bound=ClassA) +_T2A = TypeVar("_T2A", bound=ClassA) +_T3 = TypeVar("_T3", ClassA, int, str) + + +class Class1(Generic[_T1]): + def __init__(self, a: _T1): + self._a: dict[str, _T1] = {} + self._b: tuple[_T1, ...] = (a, a, a) + self._c: tuple[_T1, _T1] = (a, a) + self._d: list[_T1] = [a] + + +class Class2(Generic[_T2]): + def __init__(self, a: _T2): + self._a: dict[str, _T2] = {} + self._b: tuple[_T2, ...] = (a, a, a) + self._c: tuple[_T2, _T2] = (a, a) + self._d: list[_T2] = [a] + + +class Class2A(Generic[_T2, _T2A]): + def __init__(self, a: _T2, b: _T2A): + self._a1: dict[str, _T2A] = {"a": b} + self._a2: dict[str, _T2] = {"a": a} + self._b: tuple[_T2, ...] = (a, a, a) + self._c: tuple[_T2, _T2] = (a, a) + self._d: list[_T2] = [a] + + +class Class3(Generic[_T3]): + def __init__(self, a: _T3): + self._a: dict[str, _T3] = {} + self._b: tuple[_T3, ...] = (a, a, a) + self._c: tuple[_T3, _T3] = (a, a) + self._d: list[_T3] = [a] + + +class Animal: + pass + + +class Cow(Animal): + pass + + +_TA = TypeVar("_TA", bound=Animal) + + +def fn(p2: _TA) -> _TA: + # This should generate an error. + p2 = Animal() + + if 1 + 1 == 3: + return p2 + + # This should generate an error. + return Animal() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType8.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType8.py new file mode 100644 index 00000000..8cf936d6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType8.py @@ -0,0 +1,24 @@ +# This sample tests that default parameter values can be assigned +# to types that are generic. + +from typing import Generic, List, Type, TypeVar + + +class ClassA: + pass + + +T_A = TypeVar("T_A", bound=ClassA) +T = TypeVar("T") + + +class ClassB(Generic[T_A, T]): + def __init__( + self, + p1: Type[T_A] = ClassA, + p2: List[T] = [], + # This should generate an error. + p3: List[T_A] = [2], + p4: List[T] = [2], + ): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/genericType9.py b/python-parser/packages/pyright-internal/src/tests/samples/genericType9.py new file mode 100644 index 00000000..4fb1d9a1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/genericType9.py @@ -0,0 +1,43 @@ +# This sample tests the case where a method is invoked on a +# generic class that is not specialized prior to binding to +# the method but is specialized implicitly via the arguments +# to the method. + +from typing import Generic, TypeVar + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + + +class ClassA(Generic[_T1]): + @staticmethod + def func1(value: _T1) -> "ClassA[_T1]": + return ClassA[_T1]() + + @classmethod + def func2(cls, value: _T1) -> "ClassA[_T1]": + return cls() + + +class ClassASub1(ClassA[_T2]): + pass + + +class ClassASub2(ClassA[int]): + pass + + +def test1(val_str: str, val_int: int): + reveal_type(ClassA.func1(val_str), expected_text="ClassA[Unknown]") + reveal_type(ClassASub1.func1(val_str), expected_text="ClassA[Unknown]") + reveal_type(ClassASub2.func1(val_int), expected_text="ClassA[int]") + + # This should generate an error because the argument type doesn't match. + ClassASub2.func1(val_str) + + reveal_type(ClassA.func2(val_str), expected_text="ClassA[Unknown]") + reveal_type(ClassASub1.func2(val_str), expected_text="ClassA[Unknown]") + reveal_type(ClassASub2.func2(val_int), expected_text="ClassA[int]") + + # This should generate an error because the argument type doesn't match. + ClassASub2.func2(val_str) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/hashability1.py b/python-parser/packages/pyright-internal/src/tests/samples/hashability1.py new file mode 100644 index 00000000..dbe2419c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/hashability1.py @@ -0,0 +1,56 @@ +# This sample tests the check for hashability that applies to entries +# within a set expression and keys within a dictionary expression. + +# pyright: reportIncompatibleVariableOverride=false + +from dataclasses import dataclass +from typing import Any + + +# This should generate two errors because {} and [] are not hashable. +s1 = {{}, 2, dict, frozenset(), []} + +# This should generate two errors because {} and [] are not hashable. +s2: set[Any] = {{}, 2, dict, frozenset(), []} + + +class StrList(list[str]): + def __hash__(self) -> int: ... + + +s3 = {StrList()} + + +# This should generate two errors because {} and [] are not hashable. +d1 = {{}: None, None: 2, dict: 3, frozenset(): 4, []: ""} + +# This should generate two errors because {} and [] are not hashable. +d2: dict[Any, Any] = {{}: None, None: 2, dict: 3, frozenset(): 4, []: ""} + + +def func1(x: str | dict[Any, Any], y: Any, z: None): + # This should generate an error because dict isn't hashable + d3 = {x: "hi"} + + d4 = {y: "hi", z: "hi"} + + +@dataclass +class DC1: + a: int + + +@dataclass(frozen=True) +class DC2: + a: int + + +dc1 = DC1(0) + +# This should generate an error because a non-frozen +# dataclass is not hashable. +d5 = {dc1: 100} + + +dc2 = DC2(0) +d6 = {dc2: 100} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/hashability2.py b/python-parser/packages/pyright-internal/src/tests/samples/hashability2.py new file mode 100644 index 00000000..44cce052 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/hashability2.py @@ -0,0 +1,51 @@ +# This sample tests that unhashable user classes are detected as unhashable. + +# pyright: reportIncompatibleMethodOverride=false + + +class A: ... + + +s1 = {A()} +d1 = {A(): 100} + + +class B: + def __eq__(self, other): ... + + +# Both of these should generate an error because a class that +# defines __eq__ but not __hash__ is not hashable. +s2 = {B()} +d2 = {B(): 100} + + +class C: + __hash__: None = None + + +class D(B, C): ... + + +# Both of these should generate an error because B is unhashable. +s3 = {D()} +d3 = {D(): 100} + + +class E: + def __hash__(self): ... + + +class F(D, E): ... + + +# Both of these should generate an error because D is unhashable. +s4 = {F()} +d4 = {F(): 100} + + +class G(E, D): ... + + +s5 = {G()} +d5 = {G(): 100} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/hashability3.py b/python-parser/packages/pyright-internal/src/tests/samples/hashability3.py new file mode 100644 index 00000000..ae74edfc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/hashability3.py @@ -0,0 +1,19 @@ +# This sample tests that __hash__ is set to None if +# __hash__ isn't set but __eq__ is. + + +class A: ... + + +A().__hash__() + + +class B: + def __eq__(self, value: object) -> bool: ... + + ... + + +# This should generate an error because __hash__ is implicitly set to None +# for a class that defines __eq__ but not __hash__. +B().__hash__() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import1.py b/python-parser/packages/pyright-internal/src/tests/samples/import1.py new file mode 100644 index 00000000..380c59fd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import1.py @@ -0,0 +1,7 @@ +# This sample tests the type analyzer's handling of the built-in +# __import__ function. + +reveal_type(__path__, expected_text="MutableSequence[str]") + +# This should not generate a type error. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import10.py b/python-parser/packages/pyright-internal/src/tests/samples/import10.py new file mode 100644 index 00000000..7d1cef89 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import10.py @@ -0,0 +1,11 @@ +# This sample tests the handling of an unresolved import. +# It should report a single error but not have cascading +# errors when the unresolved symbol is used. + +# This should generate an error. +import unresolved_import + + +def test_zero_division(): + with unresolved_import.raises(ZeroDivisionError): + v = 1 / 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import11.py b/python-parser/packages/pyright-internal/src/tests/samples/import11.py new file mode 100644 index 00000000..295f1c61 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import11.py @@ -0,0 +1,11 @@ +# This sample tests that the type checker properly handles +# the "from .decoder import JSONDecodeError" statement in +# the json/__init__.pyi type stub file. According to PEP 484, +# this import statement should cause the json module to export +# not only the symbol JSONDecodeError but also the symbol +# "decoder". + +import json + +a = json.decoder.JSONDecodeError +b = json.JSONDecodeError diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import12.py b/python-parser/packages/pyright-internal/src/tests/samples/import12.py new file mode 100644 index 00000000..c2444539 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import12.py @@ -0,0 +1,12 @@ +# This sample tests the reportWildcardImportFromLibrary option. + +# This should generate a warning or error depending on whether +# strict mode is enabled. +from typing import * + +# This should also generate the same warning or error. It's here to +# a double (redundant) wildcard import. +from typing import * + + +reveal_type(Dict, expected_text="type[Dict[Unknown, Unknown]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import13.py b/python-parser/packages/pyright-internal/src/tests/samples/import13.py new file mode 100644 index 00000000..435ff8f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import13.py @@ -0,0 +1,5 @@ +# This sample is used in conjunction with import14.py to test +# PEP 562 (module-level __getattr__) support. + + +def __getattr__(name: str) -> int: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import14.py b/python-parser/packages/pyright-internal/src/tests/samples/import14.py new file mode 100644 index 00000000..9d05c9c6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import14.py @@ -0,0 +1,10 @@ +# This sample is used in conjunction with import13.py to test +# PEP 562 (module-level __getattr__) support. + +# pyright: strict + +from .import13 import foo1 +from . import import13 + +reveal_type(foo1, expected_text="int") +reveal_type(import13.foo2, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import15.py b/python-parser/packages/pyright-internal/src/tests/samples/import15.py new file mode 100644 index 00000000..145f1675 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import15.py @@ -0,0 +1,11 @@ +# This sample tests the case where a symbol is imported from two different +# sources, one of them in a try block and another in an except block. + +try: + from typing import TypedDict +except ImportError: + from typing_extensions import TypedDict # pyright: ignore[reportMissingModuleSource] + + +class TD1(TypedDict): + x: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import16.py b/python-parser/packages/pyright-internal/src/tests/samples/import16.py new file mode 100644 index 00000000..228354d3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import16.py @@ -0,0 +1,6 @@ +# This source ensures that a multi-part import statement without an alias +# implicitly imports all modules in the multi-part chain. + +import html.entities + +x = html.escape diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import17.py b/python-parser/packages/pyright-internal/src/tests/samples/import17.py new file mode 100644 index 00000000..412af22c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import17.py @@ -0,0 +1,8 @@ +# This sample is used by import18.py. + +a: str = "hi" +b: float = 1.0 +c: int + +_d: int +__e__: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import18.py b/python-parser/packages/pyright-internal/src/tests/samples/import18.py new file mode 100644 index 00000000..448d3c4d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import18.py @@ -0,0 +1,11 @@ +# This should generate two errors because b and c overwrite declared +# types in an incompatible manner. +from .import17 import * + +a: str +b: str +c: str + +_d: str +__e__: str +g: str diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import2.py b/python-parser/packages/pyright-internal/src/tests/samples/import2.py new file mode 100644 index 00000000..42574e5c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import2.py @@ -0,0 +1,27 @@ +# This sample tests import resolution for relative imports. + +from datetime import datetime + +# This should generate an error because relative imports can +# be used only with the "from . import A" form. +import .package1 as p0 + +from . import package1 as p1 +a = p1.foo() + +from .package1 import foo +b = foo() + +# This should generate an error because there is no +# directory or file named packageXXX. +from . import packageXXX as p2 + +from .package1.sub import subfoo +# subfoo should resolve to the package1/sub/__init__.py, +# which returns a datetime. Verify that it does. +c: datetime = subfoo() + +from .package1.psyche import psyche1 +# This should resolve to package1/psyche.py even though +# there is a package1/psyche directory present. +d: int = psyche1() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import3.py b/python-parser/packages/pyright-internal/src/tests/samples/import3.py new file mode 100644 index 00000000..6fea83b1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import3.py @@ -0,0 +1,8 @@ +# This sample is imported by import4.py. + +__all__ = ["foo", "_foo", "_bar"] + +foo = 3 +_foo = 4 +bar = 5 +_bar = 6 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import4.py b/python-parser/packages/pyright-internal/src/tests/samples/import4.py new file mode 100644 index 00000000..234f7ccf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import4.py @@ -0,0 +1,15 @@ +# This sample tests wildcard imports. + +from .import3 import * + +a = foo +b = _foo + +# This should generate an error because bar isn't +# included in the __all__ assignment. +c = bar +d = _bar + +# This should generate an error because a trailing comma +# isn't allowed in a "from import" statement without parens. +from .import3 import foo, diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import5.py b/python-parser/packages/pyright-internal/src/tests/samples/import5.py new file mode 100644 index 00000000..5944a8f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import5.py @@ -0,0 +1,6 @@ +# This sample is imported by import6.py. + +foo = 3 +__foo = 4 +bar = 5 +_bar = 6 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import6.py b/python-parser/packages/pyright-internal/src/tests/samples/import6.py new file mode 100644 index 00000000..dd2142ca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import6.py @@ -0,0 +1,17 @@ +# This sample tests wildcard imports. + +from .import5 import * + +a = foo + +# This should generate an error because there is no +# __all__ assignment, and names starting with a double underscore +# should not be imported in a wildcard. +b = __foo + +c = bar + +# This should generate an error because there is no __all__ assignment +# and names starting with a single underscore should not be imported +# in a wildcard. +d = _bar diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import7.py b/python-parser/packages/pyright-internal/src/tests/samples/import7.py new file mode 100644 index 00000000..0d5388d3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import7.py @@ -0,0 +1,14 @@ +# This sample tests that wildcard imports are not allowed +# outside of the module scope. + +from .import5 import * + + +class A: + # This should generate an error. + from .import5 import * + + +def func1(): + # This should generate an error. + from .import5 import * diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import8.py b/python-parser/packages/pyright-internal/src/tests/samples/import8.py new file mode 100644 index 00000000..dc07fa89 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import8.py @@ -0,0 +1,7 @@ +# This sample is imported by import9.py. + +# Implement __getattr__ function as described in PEP 562. + + +def __getattr__(name: str): + return str diff --git a/python-parser/packages/pyright-internal/src/tests/samples/import9.py b/python-parser/packages/pyright-internal/src/tests/samples/import9.py new file mode 100644 index 00000000..b9f40fbf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/import9.py @@ -0,0 +1,7 @@ +# This sample tests support for PEP 562's __getattr__ function. + +# This should not generate an error because import8 has +# a __getattr__ method. +from .import8 import foo + +foo() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/inconsistentConstructor1.py b/python-parser/packages/pyright-internal/src/tests/samples/inconsistentConstructor1.py new file mode 100644 index 00000000..ffff3485 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/inconsistentConstructor1.py @@ -0,0 +1,34 @@ +# This sample tests the reportInconsistentConstructor diagnostic check. + +from typing import Any, Self + + +class Parent1: + def __init__(self, a: int) -> None: ... + + +class Child1(Parent1): + # This should generate an error if reportInconsistentConstructor is enabled. + def __new__(cls, a: int | str): ... + + +class Parent2: + def __init__(self, b: int) -> None: ... + + +class Child2(Parent2): + # This should generate an error if reportInconsistentConstructor is enabled. + def __new__(cls, b: str): ... + + +class Class3: + def __new__(cls, *args: object, **kwargs: object) -> Self: ... + + # This should generate an error if reportInconsistentConstructor is enabled. + def __init__(self, a: int) -> None: ... + + +class Class4: + def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... + + def __init__(self, a: int) -> None: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab1.py b/python-parser/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab1.py new file mode 100644 index 00000000..ceb71f11 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab1.py @@ -0,0 +1,9 @@ +# This sample tests the tokenizer's ability to detect inconsistent +# use of tab and spaces in a way that's ambiguous. + +def main(jsonIn): + print("a") + # This should generate an error because of inconsistent use of + # tabs and spaces. + print("b") + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab2.py b/python-parser/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab2.py new file mode 100644 index 00000000..3adc8e5c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab2.py @@ -0,0 +1,10 @@ +# This sample tests the reporting of inconsistent space/tab usage +# for dedent tokens. + + +if True: + if True: + print("False") + print("True") # Should generate an error here. + + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/index1.py b/python-parser/packages/pyright-internal/src/tests/samples/index1.py new file mode 100644 index 00000000..8e79b50d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/index1.py @@ -0,0 +1,128 @@ +# This sample tests the handling of __index__ magic method +# when used with the __getitem__ and __setitem__ method. + + +from typing import Generic, Literal, Self, Type, TypeVar, Any + + +class MyInt: + def __init__(self, value: int) -> None: + self.value = value + + def __index__(self) -> int: + return self.value + + +l = ["foo", "bar"] +t = ("foo", "bar") + +hex(MyInt(7)) +l[MyInt(0)] +l[MyInt(0)] = "hi" +t[MyInt(1)] + + +class MyNonInt: + def __init__(self) -> None: + pass + + +# These should generate errors +hex(MyNonInt()) +l[MyNonInt()] +l[MyNonInt()] = "hi" +t[MyNonInt()] + + +T = TypeVar("T") + + +class MyMetaclass(type): + def __getitem__(cls: Type[T], item: int) -> T: + return cls() + + +class ClassA(metaclass=MyMetaclass): + pass + + +a1 = ClassA[1] +reveal_type(a1, expected_text="ClassA") + +# This should generate an error +ClassA["1"] + + +class ClassB: + def __setitem__(self, index: int, value: "ClassB"): ... + + +class ClassC: + def __setitem__(self, index: int, value: "ClassC"): ... + + +B_or_C = TypeVar("B_or_C", ClassB, ClassC) + + +def func1(container: B_or_C): + a = container + a[1] = container + + +TD = TypeVar("TD", bound="ClassD[Any]") + + +class ClassD(Generic[TD]): + def __setitem__(self, index: int, value: TD): ... + + +def func2(container: ClassD[TD], value: TD): + container[1] = value + + +class ClassE: + def __getattr__(self, s: str) -> Any: + raise NotImplementedError() + + +e = ClassE() + +# This should generate an error +v_e = e["test"] + +# This should generate an error +e["test"] = 3 + + +class ClassF(Generic[T]): + def __getitem__(self, args: int) -> Self: ... + + def get(self, index: int) -> Self: + reveal_type(self[index], expected_text="Self@ClassF[T@ClassF]") + return self[index] + + +class ClassG: + __slots__ = ["x"] + + +def func3(g: ClassG): + reveal_type(g.x, expected_text="Unbound") + reveal_type(g.x[0], expected_text="Unknown") + + +class ClassH: + def __call__(self, *args, **kwargs) -> Self: + return self + + +class ClassI: + __getitem__ = ClassH() + + +reveal_type(ClassI()[0], expected_text="ClassH") + + +def func4(l: list[Literal["a", "b"]]): + l[0] = "a" + l[0:0] = ["a", "b"] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes1.py b/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes1.py new file mode 100644 index 00000000..7a5193aa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes1.py @@ -0,0 +1,25 @@ +# This sample tests the analyzer's ability to infer types +# across execution contexts. + +from typing import Callable + + +def perform_request(build_req: Callable[[], str]) -> str: + return "purr" + + +def make_api_request(auth: str) -> str: + return "meow" + + +def func1() -> None: + resp = open("test") + auth = resp.read() + + def build_req(): + # "auth" is declared in a different execution context + # and included here in the closure. Make sure its type + # is properly inferred. + return make_api_request(auth) + + resp = perform_request(build_req) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes2.py b/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes2.py new file mode 100644 index 00000000..e988cc92 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes2.py @@ -0,0 +1,16 @@ +# This sample tests the ability of the type checker to infer +# the types of instance variables based on their assigned values. + + +class ClassA: + def __init__(self): + self.value = None + + def func(self, param: int): + reveal_type(self.value, expected_text="int | None") + + if self.value is not None: + reveal_type(self.value, expected_text="int") + self.value.bit_length() + + self.value = param diff --git a/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes3.py b/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes3.py new file mode 100644 index 00000000..23ed555d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/inferredTypes3.py @@ -0,0 +1,32 @@ +# This sample tests return type annotations for functions that +# do not return. + +from abc import ABC, abstractmethod + + +class OtherError(NotImplementedError): ... + + +class A(ABC): + def func1(self): + raise Exception("test") + + def func2(self): + raise NotImplementedError() + + def func3(self): + raise OtherError + + @abstractmethod + def func4(self): + raise Exception() + + +def func1(a: A): + reveal_type(a.func1(), expected_text="NoReturn") + + reveal_type(a.func2(), expected_text="Unknown") + + reveal_type(a.func3(), expected_text="Unknown") + + reveal_type(a.func4(), expected_text="Unknown") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/initVar1.py b/python-parser/packages/pyright-internal/src/tests/samples/initVar1.py new file mode 100644 index 00000000..fc72ae99 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/initVar1.py @@ -0,0 +1,23 @@ +# This sample tests the use of the InitVar annotation. + +from dataclasses import InitVar as InitVarAlias + +from dataclasses import * + + +@dataclass +class Container: + init_var1: InitVarAlias[int] + init_var2: InitVar[int] + + not_init_var1: int + + +c = Container(1, 2, 3) +reveal_type(c.not_init_var1, expected_text="int") + +# This should generate an error +c.init_var1 + +# This should generate an error +c.init_var2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/initsubclass1.py b/python-parser/packages/pyright-internal/src/tests/samples/initsubclass1.py new file mode 100644 index 00000000..f39eb94c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/initsubclass1.py @@ -0,0 +1,75 @@ +# This sample tests the type checker's validation +# of the __init_subclass__ method described in +# PEP 487. + +from datetime import datetime +from typing import Any, Optional, Type, TypedDict + + +class ClassA: + def __init_subclass__( + cls, *, param1: str, param2: float, param3: Optional[Any] = None + ) -> None: + super().__init_subclass__() + + +# This should generate two errors because param1 is +# the wrong type. +class ClassB(ClassA, param1=0, param2=4): + pass + + +# This should generate two errors because param2 is missing. +class ClassC(ClassA, param1="0", param3=datetime.now()): + pass + + +class ClassD(ClassA, param1="0", param2=5.0): + pass + + +class ClassE: + def __init_subclass__(cls, *, arg: int) -> None: + func1(cls, arg) + + def __new__(cls) -> "ClassE": + func1(cls, 9) + return super().__new__(cls) + + +def func1(klass: Type[ClassE], arg: int): + pass + + +class ClassF(ClassA, param1="hi", param2=3.4): + def __init_subclass__(cls, param_alt1: int): + super().__init_subclass__(param1="yo", param2=param_alt1) + + +def func2(cls): + pass + + +class ClassG: + __init_subclass__ = func2 + + +class ClassH(ClassG): + pass + + +# This should generate two errors because "a" is not present +# in the object.__init_subclass__ method. +class ClassI(a=3): + a: int + + +class ClassJ: + def __init_subclass__(cls, **kwargs: Any) -> None: + super().__init_subclass__(**kwargs) + cls.custom_attribute = 9 + + +class ClassJChild(ClassJ): + def __init__(self): + reveal_type(self.custom_attribute, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/initsubclass2.py b/python-parser/packages/pyright-internal/src/tests/samples/initsubclass2.py new file mode 100644 index 00000000..74837ab1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/initsubclass2.py @@ -0,0 +1,16 @@ +# This sample verifies that a subclass of a class that supports +# __init_subclass__ provides the required initialization parameters. + + +class A: + def __init_subclass__(cls, param_a: int): + super().__init_subclass__() + + +class B(A, param_a=123): + pass + + +# This should generate two errors because param_a is missing. +class C(B): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/initsubclass3.py b/python-parser/packages/pyright-internal/src/tests/samples/initsubclass3.py new file mode 100644 index 00000000..b6a1ad2d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/initsubclass3.py @@ -0,0 +1,24 @@ +# This sample tests the case where an __init_subclass__ is overloaded. + + +from typing import overload + + +class BaseClass1: + @overload + def __init_subclass__(cls, x: str, y: str) -> None: ... + + @overload + def __init_subclass__(cls, x: int, y: int) -> None: ... + + def __init_subclass__(cls, x: int | str, y: int | str) -> None: ... + + +class Subclass1A(BaseClass1, x=3, y=3): ... + + +class Subclass1B(BaseClass1, x="", y=""): ... + + +# This should generate three errors. +class Subclass1C(BaseClass1, x=1, y=""): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/isinstance1.py b/python-parser/packages/pyright-internal/src/tests/samples/isinstance1.py new file mode 100644 index 00000000..cf1d7beb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/isinstance1.py @@ -0,0 +1,8 @@ +# This sample tests the use of "self.__class__" and "__class__" +# in an isinstance call. + + +class Foo: + def bar(self): + a = isinstance(object(), self.__class__) + b = isinstance(object(), __class__) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/isinstance2.py b/python-parser/packages/pyright-internal/src/tests/samples/isinstance2.py new file mode 100644 index 00000000..f99fcf12 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/isinstance2.py @@ -0,0 +1,45 @@ +# This sample tests the case where a base class has +# an unknown type and the type is used in an +# isinstance check. + +# pyright: reportUnnecessaryIsInstance=true + +from typing import TypeVar, Union + +# This should generate an error because "dummy" can't be resolved. +# The symbol Document should have an unknown type. +from dummy import Document + + +class DbModel(Document): + pass + + +def func1() -> Union[int, DbModel]: + return DbModel() + + +# This should not generate an error even though DbModel is +# derived from an unknown base class. +isinstance(func1(), int) + + +def func2(obj: object, typ: type): + return isinstance(obj, typ) + + +def func3(obj: float): + if isinstance(obj, float): + reveal_type(obj, expected_text="float") + else: + reveal_type(obj, expected_text="int") + + +T = TypeVar("T", bound=float) + + +def func4(t: type[T]): + if issubclass(t, float): + reveal_type(t, expected_text="type[float]*") + else: + reveal_type(t, expected_text="type[int]*") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/isinstance3.py b/python-parser/packages/pyright-internal/src/tests/samples/isinstance3.py new file mode 100644 index 00000000..20f8d327 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/isinstance3.py @@ -0,0 +1,119 @@ +# This sample tests the logic that validates the second parameter to +# an isinstance or issubclass call and ensures that it's a class or +# tuple of classes. + + +import sys +from abc import abstractmethod +from typing import ( + Annotated, + Any, + Callable, + Generic, + Sequence, + Tuple, + Type, + TypeVar, + TypedDict, + Union, +) + +if sys.version_info >= (3, 10): + from types import NoneType +else: + NoneType = type(None) + +_T = TypeVar("_T", int, str) + + +class A(Generic[_T]): + pass + + +a = A() + +if isinstance(a, A): + pass + +# This should generate an error because generic types with +# subscripts are not allowed. +if isinstance(a, A[str]): + pass + +# This should generate an error in Python 3.9 and older because +# unions are not allowed, but this error isn't currently caught. +if issubclass(A, Union[A, int]): + pass + +if issubclass(A, type(None)): + pass + +if issubclass(A, NoneType): + pass + + +class ClassA(Generic[_T]): + v1: _T + v2: Type[_T] + + @property + @abstractmethod + def _elem_type_(self) -> Union[Type[_T], Tuple[Type[_T], ...]]: + raise NotImplementedError + + def check_type(self, var: Any) -> bool: + return isinstance(var, self._elem_type_) + + def execute(self, var: Union[_T, Tuple[_T]]) -> None: + if isinstance(var, self._elem_type_): + pass + + if isinstance(var, type(self.v1)): + pass + + if isinstance(var, self.v2): + pass + + +def func1(exceptions: Sequence[type[BaseException]], exception: Exception): + return isinstance(exception, tuple(exceptions)) + + +if isinstance(a, Callable): + ... + +# This should generate an error because a subscripted Callable +# will result in a runtime exception. +if isinstance(a, Callable[[], Any]): + ... + +if isinstance(a, type(len)): + ... + + +class TD1(TypedDict): + a: int + + +# This should generate an error because TypedDict classes can't +# be used in an isinstance call. +if isinstance(a, TD1): + pass + + +TA1 = Annotated[int, ""] + +# This should generate two errors because Annotated can't be used +# in an isinstance call. +if isinstance(1, TA1): + pass + +# This should generate an error because Any can't be used +# in an isinstance call. +if isinstance(1, Any): + pass + +# This should generate an error because Literal can't be used +# in an isinstance call. +if isinstance(1, Literal[1, 2]): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/isinstance4.py b/python-parser/packages/pyright-internal/src/tests/samples/isinstance4.py new file mode 100644 index 00000000..16439148 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/isinstance4.py @@ -0,0 +1,70 @@ +# This sample checks that isinstance and issubclass don't +# allow the second argument to be a Protocol class. + +from inspect import isfunction +from typing import Any, Callable, Protocol, Type, TypeVar, Union, runtime_checkable +from types import FunctionType, LambdaType + + +class MyProtocol1(Protocol): + pass + + +# This should generate an error because Sized is a Protocol that +# is not runtime checkable. +isinstance(4, MyProtocol1) + + +# This should generate an error because Iterable is a Protocol. +issubclass(str, (str, MyProtocol1)) + + +def func1(t: type[MyProtocol1]): + isinstance(1, t) + + +@runtime_checkable +class MyProtocol2(Protocol): + pass + + +isinstance(4, MyProtocol2) +issubclass(str, (str, MyProtocol2)) + + +class CustomClass: + def __call__(self, *args: Any): + pass + + +def get_type_of_object(object: Union[Callable[..., Any], CustomClass]): + # This would normally generate an error, but FunctionType is special. + if isinstance(object, FunctionType): + return "is function" + + if isinstance(object, LambdaType): + return "is lambda" + + if isinstance(object, Callable): + return "is callable" + + return "nothing" + + +_T1 = TypeVar("_T1", bound=CustomClass) + + +def func2(cls: Type[_T1], val: _T1): + if issubclass(cls, CustomClass): + reveal_type(cls, expected_text="type[CustomClass]*") + else: + reveal_type(cls, expected_text="Never") + + +_T2 = TypeVar("_T2") + + +def func3(x: _T2) -> Union[_T2, int]: + if callable(x) and isfunction(x): + return 1 + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/isinstance5.py b/python-parser/packages/pyright-internal/src/tests/samples/isinstance5.py new file mode 100644 index 00000000..c8b9bfc4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/isinstance5.py @@ -0,0 +1,45 @@ +# This tests error reporting for the use of data protocols in an +# issubclass call. + +from typing import Any, Protocol, runtime_checkable + +# > isinstance() can be used with both data and non-data protocols, while +# > issubclass() can be used only with non-data protocols. + + +@runtime_checkable +class DataProtocol(Protocol): + name: str + + def method1(self) -> int: ... + + +@runtime_checkable +class DataProtocol2(DataProtocol, Protocol): + def method2(self) -> int: ... + + +@runtime_checkable +class NonDataProtocol(Protocol): + def method1(self) -> int: ... + + +def func2(a: Any): + if isinstance(a, DataProtocol): + return + + if isinstance(a, NonDataProtocol): + return + + # This should generate an error because data protocols + # are not allowed with issubclass checks. + if issubclass(a, (DataProtocol, NonDataProtocol)): + return + + # This should generate an error because data protocols + # are not allowed with issubclass checks. + if issubclass(a, (DataProtocol2, NonDataProtocol)): + return + + if issubclass(a, NonDataProtocol): + return diff --git a/python-parser/packages/pyright-internal/src/tests/samples/isinstance6.py b/python-parser/packages/pyright-internal/src/tests/samples/isinstance6.py new file mode 100644 index 00000000..e0d27fc9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/isinstance6.py @@ -0,0 +1,39 @@ +# This sample tests the detection of a runtime checkable protocol +# that unsafely overlaps a class within an isinstance or issubclass +# call. + +# > Type checkers should reject an isinstance() or issubclass() call if there +# > is an unsafe overlap between the type of the first argument and the protocol. + + +from typing import Protocol, runtime_checkable + + +@runtime_checkable +class Proto3(Protocol): + def method1(self, a: int) -> int: ... + + +class Concrete3A: + def method1(self, a: str) -> None: + pass + + +@runtime_checkable +class Proto2(Protocol): + def other(self) -> None: ... + + +class Concrete3B: + method1: int = 1 + + +def func3(): + if isinstance(Concrete3A(), Proto3): # Type error: unsafe overlap + pass + + if isinstance(Concrete3B(), (Proto3, Proto2)): # Type error: unsafe overlap + pass + + if issubclass(Concrete3A, (Proto3, Proto2)): # Type error: unsafe overlap + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/kwargsUnpack1.py b/python-parser/packages/pyright-internal/src/tests/samples/kwargsUnpack1.py new file mode 100644 index 00000000..7dbb740f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/kwargsUnpack1.py @@ -0,0 +1,142 @@ +# This sample tests the handling of Unpack[TypedDict] when used with +# a **kwargs parameter in a function signature. + +from typing import Protocol, TypedDict +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + NotRequired, + Required, + Unpack, +) + + +class TD1(TypedDict): + v1: Required[int] + v2: NotRequired[str] + + +class TD2(TD1): + v3: Required[str] + + +def func1(**kwargs: Unpack[TD2]) -> None: + v1 = kwargs["v1"] + reveal_type(v1, expected_text="int") + + # This should generate an error because v2 might not be present. + kwargs["v2"] + + if "v2" in kwargs: + v2 = kwargs["v2"] + reveal_type(v2, expected_text="str") + + v3 = kwargs["v3"] + reveal_type(v3, expected_text="str") + + +reveal_type(func1, expected_text="(**kwargs: **TD2) -> None") + + +def func2(v3: str, **kwargs: Unpack[TD1]) -> None: + pass + + +def func3(): + # This should generate an error because it is + # missing required keyword arguments. + func1() + + func1(v1=1, v2="", v3="5") + + td2 = TD2(v1=2, v3="4") + func1(**td2) + + # This should generate an error because v4 is not in TD2. + func1(v1=1, v2="", v3="5", v4=5) + + # This should generate an error because args are passed by position. + func1(1, "", "5") + + my_dict: dict[str, str] = {} + # This should generate an error because it's an untyped dict. + func1(**my_dict) + + d1 = {"v1": 2, "v3": "4", "v4": 4} + func1(**d1) + + func2(**td2) + + # This should generate an error because v1 is already specified. + func1(v1=2, **td2) + + # This should generate an error because v1 is already specified. + func2(1, **td2) + + # This should generate an error because v1 is matched to a + # named parameter and is not available for kwargs. + func2(v1=1, **td2) + + +class TDProtocol1(Protocol): + def __call__(self, *, v1: int, v3: str) -> None: ... + + +class TDProtocol2(Protocol): + def __call__(self, *, v1: int, v3: str, v2: str = "") -> None: ... + + +class TDProtocol3(Protocol): + def __call__(self, *, v1: int, v2: int, v3: str) -> None: ... + + +class TDProtocol4(Protocol): + def __call__(self, *, v1: int) -> None: ... + + +class TDProtocol5(Protocol): + def __call__(self, v1: int, v3: str) -> None: ... + + +class TDProtocol6(Protocol): + def __call__(self, **kwargs: Unpack[TD2]) -> None: ... + + +v1: TDProtocol1 = func1 +v2: TDProtocol2 = func1 + +# This should generate an error because v2 is the wrong type. +v3: TDProtocol3 = func1 + +# This should generate an error because v3 is missing. +v4: TDProtocol4 = func1 + +# This should generate an error because parameters are positional. +v5: TDProtocol5 = func1 + +v6: TDProtocol6 = func1 + + +def func4(v1: int, /, **kwargs: Unpack[TD2]) -> None: ... + + +# This should generate an error because parameter v1 overlaps +# with the TypedDict. +def func5(v1: int, **kwargs: Unpack[TD2]) -> None: ... + + +class TD3(TypedDict): + a: int + + +def func6(a: int, /, **kwargs: Unpack[TD3]): + pass + + +func6(1, a=2) + + +def func7(*, v1: int, v3: str, v2: str = "") -> None: ... + + +# This should generate an error because func7 doesn't +# accept additional keyword arguments. +v7: TDProtocol6 = func7 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda1.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda1.py new file mode 100644 index 00000000..2468a431 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda1.py @@ -0,0 +1,93 @@ +# This sample tests type checking for lambdas and their parameters. +from typing import Any, Callable, Iterable, TypeVar + +#------------------------------------------------------ +# Test basic lambda matching + +def needs_function(callback: Callable[[str, int], str]): + pass + +needs_function(lambda x, y:x) + +# This should generate an error because the lambda doesn't +# accept two parameters. +needs_function(lambda x:x) + + +#------------------------------------------------------ +# Test lambda matching when dest includes default parameter values + +def needs_function2(callback: Callable[[str, int], str]): + pass + +needs_function(lambda x, y:x) + + +#------------------------------------------------------ +# Test parameter rules for lambdas + +# This should generate an error because a parameter with +# no default follows a parameter with a default. +lambda2 = lambda x=1, y:y + +lambda3 = lambda x, y=5:y +lambda3(1) +lambda3(1, 2) + +lambda4 = lambda x, *y, z:y + + +#------------------------------------------------------ +# Test calling of lambdas + +lambda1 = lambda x, y:x +lambda1(1, 2) + +# This should generate an error because the lambda doesn't +# accept three parameters. +lambda1(1, 2, 3) + +lambda4(1, z=3) +lambda4(1, 3, 4, 5, 6, z=3) + +# This should generate an error because the arguments +# don't match the parameter list. +lambda4(1, 3) + +# This should generate an error because the arguments +# don't match the parameter list (no named value for z). +lambda4(1, 3, 4) + + +#------------------------------------------------------ +# Test generic parameter matching in lambdas + +_T1 = TypeVar('_T1') + +def may_need_function_generic(callback: Callable[[_T1], _T1] | None): + pass + +may_need_function_generic(lambda x: x) + + +def reduce(function: Callable[[_T1, _T1], _T1], sequence: Iterable[_T1]) -> _T1: + ... + + +a: object = reduce((lambda x, y: x * y), [1, 2, 3, 4]) + + +#------------------------------------------------------ +# Test lambdas with *args + +b1: Callable[[int, int, str], Any] = lambda _, *b: reveal_type( + b, expected_text="tuple[Unknown, ...]" +) + +b2: Callable[[str, str], Any] = lambda *b: reveal_type( + b, expected_text="tuple[Unknown, ...]" +) + +b3: Callable[[int], Any] = lambda _, *b: reveal_type( + b, expected_text="tuple[Unknown, ...]" +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda10.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda10.py new file mode 100644 index 00000000..8195e4e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda10.py @@ -0,0 +1,26 @@ +# This sample tests the handling of immediately-called lambdas. + +# pyright: strict + +from typing import Callable + +func1: Callable[[int, int], int] = lambda x, y, /: ( + lambda x, y: max( + reveal_type(x, expected_text="int"), reveal_type(y, expected_text="int") + ) + // min(x, y) +)(abs(reveal_type(x, expected_text="int")), abs(reveal_type(y, expected_text="int"))) + +v1 = func1(-2, 4) +reveal_type(v1, expected_text="int") + + +v2 = (lambda a, b: a + b)(3, 4) +reveal_type(v2, expected_text="int") + + +v3 = (lambda a, b: a + b)("foo", "bar") +reveal_type(v3, expected_text="LiteralString") + +v4 = (lambda a, b: a + b)("foo", (lambda c, d: c + d)("b", "ar")) +reveal_type(v4, expected_text="LiteralString") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda11.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda11.py new file mode 100644 index 00000000..8f30ca92 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda11.py @@ -0,0 +1,28 @@ +# This sample tests the case where a lambda's expected type includes +# a ParamSpec. + +from typing import Callable, Generic, TypeVar, Concatenate, ParamSpec + +T = TypeVar("T") +P = ParamSpec("P") + + +class Callback(Generic[T]): + def __init__( + self, + func: Callable[Concatenate[T, P], object], + *args: P.args, + **kwargs: P.kwargs, + ) -> None: ... + + +v1: Callback[tuple[int, int]] = Callback(lambda p: (p[1], p[0])) + + +def func1( + func: Callable[Concatenate[int, P], T], *args: P.args, **kwargs: P.kwargs +) -> T: ... + + +v2 = func1(lambda p: p) +reveal_type(v2, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda12.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda12.py new file mode 100644 index 00000000..f1c287d1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda12.py @@ -0,0 +1,12 @@ +# This sample tests the case where a lambda includes one or more parameters +# that accept a default value and the the expected type does not include +# these parameters. In this case, the types of the extra parameters should +# be inferred based on the default value type. + +# pyright: strict + +from typing import Callable + + +def func1() -> list[Callable[[int], int]]: + return [lambda x, i=i: i * x for i in range(5)] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda13.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda13.py new file mode 100644 index 00000000..3b4086c5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda13.py @@ -0,0 +1,43 @@ +# This sample tests the case where a lambda's expected type is a callable +# that accepts another generic callable as a parameter. + +from typing import Callable, Generic, TypeVar + +T = TypeVar("T") + + +def func1(callable: Callable[[Callable[[T], T]], T]) -> T: + return callable(lambda x: x) + + +v1 = func1(lambda a: a(0)) +reveal_type(v1, expected_text="int") + +v2 = func1(lambda a: a("")) +reveal_type(v2, expected_text="str") + + +def test1(untyped): + v1 = func1(lambda a: a(untyped)) + reveal_type(v1, expected_text="Unknown") + + +class A(Generic[T]): + def __init__(self, value: T) -> None: + self.value = value + + +def func2(callable: Callable[[type[A[T]]], A[T]]) -> T: + return callable(A).value + + +v3 = func2(lambda A: A(0)) +reveal_type(v3, expected_text="int") + +v4 = func2(lambda A: A("")) +reveal_type(v4, expected_text="str") + + +def test2(untyped): + v1 = func2(lambda A: A(untyped)) + reveal_type(v1, expected_text="Unknown") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda14.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda14.py new file mode 100644 index 00000000..c238d5ae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda14.py @@ -0,0 +1,8 @@ +# This sample tests type inference for a lambda that has no inference +# context but has a default argument value. + +lambda1 = lambda x="": x +reveal_type(lambda1, expected_text='(x: str = "") -> str') + +lambda2 = lambda x=None: x +reveal_type(lambda2, expected_text="(x: Unknown | None = None) -> (Unknown | None)") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda15.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda15.py new file mode 100644 index 00000000..a2321df9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda15.py @@ -0,0 +1,24 @@ +# This sample tests the case where a lambda is passed to a constructor +# where both __new__ and __init__ methods are present and have different +# types. + +# pyright: strict + +from typing import Any, TypeVar, Callable + +T = TypeVar("T") + + +def identity(val: T) -> T: + return val + + +class ClassA: + def __new__(cls, *args: Any, **kwargs: Any) -> "ClassA": + return super().__new__(*args, **kwargs) + + def __init__(self, x: Callable[[float], float]) -> None: + self.x = x + + +ClassA(lambda r: identity(r) + 1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda2.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda2.py new file mode 100644 index 00000000..cf9bc0c9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda2.py @@ -0,0 +1,32 @@ +# This sample verifies that the type checker properly handles +# lambdas with position-only and keyword-only markers. + +from typing import Callable + +foo1: Callable[[int], int] = lambda x, /: x + 1 + +# This should generate an error because there are too few +# parameters provided by the lambda. +foo2: Callable[[int, int], int] = lambda x, /: x + 1 + +# This should generate an error because there are too many +# parameters provided by the lambda. +foo3: Callable[[int, int], int] = lambda x, /, y, z: x + 1 + +# This should generate an error because there is no named +# parameter y. +foo4: Callable[[int, int], int] = lambda x, *, y: x + y + 1 + +# This should generate an error because there are too few +# parameters provided by the lambda. +foo5: Callable[[int, int, int], int] = lambda x, *, y: x + y + 1 + +# This should generate an error because there are too many +# parameters provided by the lambda. +foo6: Callable[[int], int] = lambda x, *, y: x + y + 1 + +foo7: Callable[[int, str], int] = lambda *args: 1 + +# This should generate an error because there are too many +# parameters provided by the lambda. +foo8: Callable[[int, str], int] = lambda a, b, c, *args: 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda3.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda3.py new file mode 100644 index 00000000..4765f733 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda3.py @@ -0,0 +1,33 @@ +# This sample tests the handling of default parameter value +# expressions in a lambda. + + +from typing import Callable, Protocol + + +def test1(): + var = 1 + + lambda _=var: ... + + +def test2(): + # This should generate an error because var2 isn't defined. + lambda _=var2: ... + + +def test3(): + var = 0 + lambda var=var: ... + + +class MyCallback(Protocol): + def __call__(self, y: int, a: int = 0) -> bool: ... + + +lambda1: Callable[[int, int], bool] = lambda y, a=0: a == y +lambda2: MyCallback = lambda y, a=0: a == y + +lambda1(20) +lambda2(20) +lambda2(20, 30) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda4.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda4.py new file mode 100644 index 00000000..95269e67 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda4.py @@ -0,0 +1,78 @@ +# This sample tests the case where a lambda is assigned to +# a union type that contains multiple callables. + +from typing import Callable, Protocol, TypeVar + + +U1 = Callable[[int, str], bool] | Callable[[str], bool] + + +def accepts_u1(cb: U1) -> U1: + return cb + + +def callback_1(p0: int, p1: str): + return True + + +def callback_2(p0: str): + return True + + +def callback_3(*p0: str): + return True + + +accepts_u1(lambda s: s.startswith("hello")) +accepts_u1(lambda i, s: i > 0 and s.startswith("hello")) +accepts_u1(lambda *i: True) +accepts_u1(callback_1) +accepts_u1(callback_2) +accepts_u1(callback_3) + +# This should generate an error +accepts_u1(lambda a, b, c: True) + + +class Callable1(Protocol): + def __call__(self, p0: int, p1: str) -> bool: ... + + +class Callable2(Protocol): + def __call__(self, p0: str) -> bool: ... + + +class Callable3(Protocol): + def __call__(self, *p0: str) -> bool: ... + + +class Callable4(Protocol): + def __call__(self, p0: int, p1: str, *p2: str) -> bool: ... + + +U2 = Callable1 | Callable2 | Callable3 | Callable4 + + +def accepts_u2(cb: U2) -> U2: + return cb + + +accepts_u2(lambda p0: p0.startswith("hello")) +accepts_u2(lambda p0, p1: p0 > 0 and p1.startswith("hello")) +accepts_u2(lambda *i: True) +accepts_u2(lambda p0, p1, *p2: True) +accepts_u2(callback_1) +accepts_u2(callback_2) +accepts_u2(callback_3) + + +T = TypeVar("T") + +Takes = Callable[[T], object] + +U3 = Takes[Takes[int]] | Takes[Takes[str]] + + +def accepts_u3(u: U3): + # This should generate an error. + u(lambda v: v.lower()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda5.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda5.py new file mode 100644 index 00000000..e750073d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda5.py @@ -0,0 +1,21 @@ +# This sample tests the case where a lambda's type is determined using +# bidirectional type inference and one or more of the parameters +# corresponds to a generic type. + +from typing import Callable, TypeVar, Generic, Any + +T = TypeVar("T") +MsgT = TypeVar("MsgT", bound="Msg[Any]") + + +class Msg(Generic[T]): + body: T + + +class Request: ... + + +def check(func: "Callable[[MsgT, int], object]") -> MsgT: ... + + +notification: Msg[Request] = check(lambda msg, foo: (msg.body, foo)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda6.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda6.py new file mode 100644 index 00000000..a7d85696 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda6.py @@ -0,0 +1,15 @@ +# This sample validates that lambdas declared within a class +# body do not reference class-scoped variables within the +# lambda return expression. + + +var1 = [1, 2] + + +class A: + x1 = lambda: var1 + + var2 = [1, 2] + + # This should generate an error. + x2 = lambda: var2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda7.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda7.py new file mode 100644 index 00000000..4428736b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda7.py @@ -0,0 +1,9 @@ +# This sample tests the case where a lambda's expression must be +# evaluated multiple times as more type information is gathered +# in the presence of an overloaded method. + +# pyright: strict + + +def func1(keys: list[str]): + filter(lambda s: s.startswith(""), keys) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda8.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda8.py new file mode 100644 index 00000000..ff511767 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda8.py @@ -0,0 +1,19 @@ +# This sample tests the case where a lambda is passed to a generic +# Callable with two different type variables. + +from typing import Callable, Generic, TypeVar + +T = TypeVar("T") +R = TypeVar("R") + + +class A(Generic[T, R]): + def __init__(self, x: Callable[[T], R], y: T): ... + + +class B(Generic[R]): + def __init__(self, x: Callable[[T], R], y: T): ... + + +reveal_type(A(lambda x: x, 123), expected_text="A[int, int]") +reveal_type(B(lambda x: x, 123), expected_text="B[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lambda9.py b/python-parser/packages/pyright-internal/src/tests/samples/lambda9.py new file mode 100644 index 00000000..f3ab2971 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lambda9.py @@ -0,0 +1,35 @@ +# This sample tests the case where a lambda's expected type is incomplete +# the first time it is evaluated. + +from typing import Callable, Generic, TypeVar, cast, overload + + +_OutT = TypeVar("_OutT") +_Out2T = TypeVar("_Out2T", bound=str) + + +class Flow(Generic[_OutT]): + @overload + def map(self, func: Callable[[_OutT], Exception], /) -> "Flow[None]": ... + + @overload + def map(self, func: Callable[[_OutT], _Out2T], /) -> "Flow[_Out2T]": ... + + def map(self, obj, /): + return cast("Flow", self) + + +class Data: ... + + +x1 = Flow[Data]().map(lambda aa: _get_date(reveal_type(aa, expected_text="Data"))) +reveal_type(x1, expected_text="Flow[str]") + +x2 = x1.map(lambda bb: reveal_type(bb, expected_text="str")) +reveal_type(x2, expected_text="Flow[str]") + +x3 = x2.map(lambda cc: "any value") +reveal_type(x3, expected_text="Flow[str]") + + +def _get_date(d: Data) -> str: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/lines1.py b/python-parser/packages/pyright-internal/src/tests/samples/lines1.py new file mode 100644 index 00000000..0724ac74 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/lines1.py @@ -0,0 +1,14 @@ +# This sample tests that the tokenizer properly handles +# line feeds. + +""" +This is a multi-line comment \ +with escape characters. +""" + +# This is a raw string with an escaped EOL. +foo = r"\ +" + +# The final token should be on line 14 +bar = foo diff --git a/python-parser/packages/pyright-internal/src/tests/samples/list1.py b/python-parser/packages/pyright-internal/src/tests/samples/list1.py new file mode 100644 index 00000000..605bf7f1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/list1.py @@ -0,0 +1,104 @@ +# This sample tests type inference for list expressions. + +# pyright: strict, reportUnknownVariableType=false + +from typing import Any, Collection, Generic, Literal, MutableSequence, Sequence, TypeVar + + +v1 = [1, 2, 3] +reveal_type(v1, expected_text="list[int]") + +v2 = [1, 3.4, "hi"] +reveal_type(v2, expected_text="list[int | float | str]") + +v3 = [] +reveal_type(v3, expected_text="list[Unknown]") + +v4: list[object] = [] + +v5: object = [] + +v6: Sequence[float] = [3, 4, 5] + +v7: Collection[object] = [[]] + + +_T = TypeVar("_T") + + +class Baz(Generic[_T]): + def __get__(self, instance: Any, owner: Any) -> _T: ... + + def __set__(self, instance: Any, value: _T) -> None: ... + + +class Foo: ... + + +class Bar: + baz: Baz[list[Foo]] + + +v10 = Bar() +reveal_type(v10.baz, expected_text="list[Foo]") +v10.baz = [Foo()] +reveal_type(v10.baz, expected_text="list[Foo]") + +v11: list[Any] = [["hi", ["hi"], [[{}]]]] +reveal_type(v11, expected_text="list[Any]") + +v12: list[int | None] = [None] * 3 +reveal_type(v12, expected_text="list[int | None]") + +v13: list[str | None] = ["3", None] * 2 +reveal_type(v13, expected_text="list[str | None]") + +x1 = 3 +v14: list[str | None] = [None] * x1 + +x2 = [1, 2, 3] +v15: list[str | None] = [None] * sum(x2) + +v16: dict[str, list[str | None]] = {n: [None] * len(n) for n in ["a", "aa", "aaa"]} + + +ScalarKeysT = TypeVar("ScalarKeysT", bound=Literal["name", "country"]) + + +def func1(by: list[ScalarKeysT]) -> ScalarKeysT: ... + + +reveal_type(func1(["country"]), expected_type="Literal['country']") +reveal_type(func1(["name"]), expected_type="Literal['name']") +reveal_type(func1(["name", "country"]), expected_type="Literal['name', 'country']") + +# This should generate an error. +func1(["id"]) + + +def func2(thing: str | list[str | int] | list[list[str | int]]): ... + + +func2("") +func2(["", 0]) +func2([["", 0], ["", 0]]) +func2([[""]]) + + +def func3(value: _T) -> list[_T]: + to_add = [value, str(value)] + # This should generate an error. + return to_add + + +def func4(value: _T) -> list[_T]: + # This should generate an error. + return [value, str(value)] + + +def func5(): + v1: Sequence[int | str] = [1] + reveal_type(v1, expected_text="list[int]") + + v2: MutableSequence[int | str] = [1] + reveal_type(v2, expected_text="list[int | str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/list2.py b/python-parser/packages/pyright-internal/src/tests/samples/list2.py new file mode 100644 index 00000000..fe004812 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/list2.py @@ -0,0 +1,13 @@ +# This sample tests that list and list comprehension type errors are +# reported up for correct overload selection. + +import random +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + LiteralString, +) + + +# The join method is overloaded with both LiteralString and str variants. +# We need to use the str overload here. +def func(x: LiteralString): + "".join([random.choice(x) for _ in range(8)]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/list3.py b/python-parser/packages/pyright-internal/src/tests/samples/list3.py new file mode 100644 index 00000000..f179a353 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/list3.py @@ -0,0 +1,18 @@ +# This sample tests list inference in a loop where the type of +# the inferred list changes each time through the loop. + + +def func1(k: str): + keys = ["a", "b", "c"] + value = [] + + while keys: + if not k: + continue + + if not k: + value = {k: value} + else: + value = [None] * int(k) + [value] + + return value diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literalString1.py b/python-parser/packages/pyright-internal/src/tests/samples/literalString1.py new file mode 100644 index 00000000..71f404ac --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literalString1.py @@ -0,0 +1,92 @@ +# This sample tests the evaluation of LiteralString as described +# in PEP 675. + +from typing import Iterable +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Literal, + LiteralString, +) + + +def func1(a: str, b: bytes, c: Literal["a"], d: Literal["a", "b"], e: Literal["a", 1]): + # This should generate an error. + v1: LiteralString = a + + # This should generate an error. + v2: LiteralString = b + + # This should generate an error. + v3: LiteralString = b"" + + v4: LiteralString = "Hello!" + + v5: LiteralString = "Hello " + "Bob" + + # This should generate an error. + v6: LiteralString = f"{a}" + + # This should generate an error. + v7: LiteralString[int] + + v8: LiteralString = c + + v9: LiteralString = d + + # This should generate an error. + v10: LiteralString = e + + +def func2(a: str): ... + + +def func3(a: LiteralString): + func2(a) + a.lower() + + _ = a + "hi" + a.capitalize() + + +def func4(a: LiteralString, parts: Iterable[LiteralString]): + v1 = "".join(parts) + reveal_type(v1, expected_text="LiteralString") + + v2 = "".join([a, a]) + reveal_type(v2, expected_text="LiteralString") + + +def func5( + a: LiteralString, b: str, parts: Iterable[tuple[LiteralString, LiteralString]] +): + v1: LiteralString = f"{a} {a}" + + v2: LiteralString = f"{a}{a}" + + v3: LiteralString = f"{'xxx'}{'xxx'}" + + # This should generate an error because "b" is not literal. + v4: LiteralString = f"{a} {b}" + + +def func6(a: LiteralString): + v1 = a.capitalize() + + v2 = a[0] + + a = "hi" + + v3: list[LiteralString] = "1 2 3".split(" ") + + +def func7(a: Literal["a", "b"], b: Literal["a", 1]): + v1: LiteralString = f"{a}" + + # This should generate an error because "b" is not a string literal. + v2: LiteralString = f"{b}" + + +def func8(a: list[LiteralString], b: list[Literal["a"]]): + # This should generate an error because of invariance rules. + v1: list[str] = a + + # This should generate an error because of invariance rules. + v2: list[LiteralString] = b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literalString2.py b/python-parser/packages/pyright-internal/src/tests/samples/literalString2.py new file mode 100644 index 00000000..472b874c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literalString2.py @@ -0,0 +1,17 @@ +# This tests the case where LiteralString is used as a bound for a +# type variable. + +from typing import TypeVar, Generic +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + LiteralString, +) + +L = TypeVar("L", bound=LiteralString) + + +class Foo(Generic[L]): + def __init__(self, value: L) -> None: + self.value = value + + +foo = Foo("hmm") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literalString3.py b/python-parser/packages/pyright-internal/src/tests/samples/literalString3.py new file mode 100644 index 00000000..e9c49414 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literalString3.py @@ -0,0 +1,25 @@ +# This sample tests the case where a LiteralString is used as the bound +# of a TypeVar. + +from typing import Generic, LiteralString, TypeVar + +T = TypeVar("T") +T_LS = TypeVar("T_LS", bound=LiteralString) + + +class ClassA(Generic[T]): + def __init__(self, val: T) -> None: ... + + +def func1(x: T) -> ClassA[T]: + return ClassA(x) + + +def func2(x: T_LS | None, default: T_LS) -> ClassA[T_LS]: + if x is None: + x = default + + reveal_type(x, expected_text="T_LS@func2") + out = func1(x) + reveal_type(out, expected_text="ClassA[T_LS@func2]") + return out diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literals1.py b/python-parser/packages/pyright-internal/src/tests/samples/literals1.py new file mode 100644 index 00000000..44b514ce --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literals1.py @@ -0,0 +1,69 @@ +# This sample tests type checking support for "Literal" + +from typing import Literal + +ValidResponses = Literal["a", b"b", Literal["cc", True, None]] + + +def foo(a: ValidResponses): + pass + + +foo("a") +foo(b"b") +foo("cc") +foo(True) +foo(None) + +# This should generate an error because 'b' +# isn't a valid literal value. +foo("b") + +# This should generate an error because 'cc' +# isn't a valid literal value. +foo("c") + +# This should generate an error because False +# isn't a valid literal value. +foo(False) + +# This should generate an error because 3 +# isn't a valid literal value. +foo(3) + + +# This should generate an error because floats +# cannot be used as literals. +invalidType = 3 # type: Literal[3.4] + +# This should generate an error because 2 +# is not a valid literal value. +mismatch = 2 # type: Literal[3, 4, '5'] + +a: Literal[+3] = -(-(+++3)) +b: Literal[-2] = +-+2 + +# This should generate an error because literals are +# not instantiable. +c = Literal[1]() + + +bytes1 = b"\x7f" +reveal_type(bytes1, expected_text='Literal[b"\\x7f"]') +bytes2 = b"\x20" +reveal_type(bytes2, expected_text='Literal[b" "]') +bytes3 = b'"' +reveal_type(bytes3, expected_text='Literal[b"\\""]') +bytes4 = b"'" +reveal_type(bytes4, expected_text='Literal[b"\'"]') + + +t1 = [Literal[1], Literal[2]] +reveal_type(t1, expected_text="list[type[Literal]]") + +t2 = Literal[*(1, 2)] +reveal_type(t2, expected_text="Literal") + +values = ("a", "b", "c") +t3 = Literal[values] +reveal_type(t3, expected_text="Literal") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literals2.py b/python-parser/packages/pyright-internal/src/tests/samples/literals2.py new file mode 100644 index 00000000..857a39ba --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literals2.py @@ -0,0 +1,29 @@ +# This sample tests assignment of literals to declared +# types that use literals in their type arguments. + +from typing import Dict, List, Literal, Set, TypeVar + +Number = Literal["One", "Two"] + +# This should generate an error because 'Three' is not +# allowed in the Number type. +numbers_mapping: Dict[Number, int] = {"One": 1, "Two": 2, "Three": 3} + +# This should generate an error because 'Three' is not +# allowed in the Number type. +a: List[Number] = ["Three"] + +# This should generate an error because 'Three' is not +# allowed in the Number type. +b: Set[Number] = {"One", "Three"} + + +LetterGrade = Literal["A", "B", "C", "D", "F"] + +_T = TypeVar("_T") + + +def func1(x: _T) -> _T: ... + + +grade: LetterGrade = func1("A") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literals3.py b/python-parser/packages/pyright-internal/src/tests/samples/literals3.py new file mode 100644 index 00000000..b429d95c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literals3.py @@ -0,0 +1,40 @@ +# This sample tests that literal enums work. + +from enum import Enum +from typing import Literal + + +class SomeEnum(Enum): + SOME_ENUM_VALUE1 = "1" + SOME_ENUM_VALUE2 = "2" + SOME_ENUM_VALUE3 = "3" + + +class Foo: + pass + + +# This should generate two errors because Foo() is not a valid +# type expression, and Foo is not an allowed literal value. +a: Literal["hi", Foo()] + +# This should generate an error because SomeEnum is not an +# allowed literal value. +b: Literal["hi", SomeEnum] + +L2 = Literal["hi", SomeEnum.SOME_ENUM_VALUE1] + + +def foo(a: int) -> L2: + if a > 3: + return "hi" + elif a > 4: + return SomeEnum.SOME_ENUM_VALUE1 + elif a > 5: + # This should generate an error because it's + # not part of the L1 literal. + return SomeEnum.SOME_ENUM_VALUE2 + else: + # This should generate an error because it's + # not part of the L1 literal. + return "bye" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literals4.py b/python-parser/packages/pyright-internal/src/tests/samples/literals4.py new file mode 100644 index 00000000..aeb1a7c1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literals4.py @@ -0,0 +1,10 @@ +# This sample tests various aliases of the typing module +# when used with Literal. + +import typing +import typing as t +import typing as typ + +a: typing.Literal[True] = True +b: t.Literal["Hello"] = "Hello" +c: typ.Literal[True, "Hello"] = True diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literals5.py b/python-parser/packages/pyright-internal/src/tests/samples/literals5.py new file mode 100644 index 00000000..e7517645 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literals5.py @@ -0,0 +1,28 @@ +# This sample tests literals that employ type aliases. + +from typing import Literal, Union + + +Numeric = Literal[1, "3.4"] + +DerivedLiteral1 = Literal["a", Numeric] + +var1: DerivedLiteral1 = "3.4" + +# This should generate an error. +var2: DerivedLiteral1 = "3.5" + + +NotNumeric = Union[Literal[1, 3], int] + +# This should generate an error because NotNumeric +# isn't a legal literal. +DerivedLiteral2: Literal[NotNumeric, 3] + + +ReadOnlyMode = Literal["r", "r+"] +WriteAndTruncateMode = Literal["w", "w+", "wt", "w+t"] +WriteNoTruncateMode = Literal["r+", "r+t"] +AppendMode = Literal["a", "a+", "at", "a+t"] + +AllModes = Literal[ReadOnlyMode, WriteAndTruncateMode, WriteNoTruncateMode, AppendMode] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literals6.py b/python-parser/packages/pyright-internal/src/tests/samples/literals6.py new file mode 100644 index 00000000..9f849a16 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literals6.py @@ -0,0 +1,93 @@ +# This sample tests various illegal forms of Literal. + +from enum import Enum +from pathlib import Path +from typing import Any, Literal, TypeVar + +Wrong1 = Literal[3 + 4] + +Wrong2 = Literal["foo".replace("o", "b")] + +Wrong3 = Literal[4 + 3j] + +Wrong4 = Literal[-4 + 2j] + +Wrong5 = Literal[(1, "foo", "bar")] + +Wrong6 = Literal[{"a": "b", "c": "d"}] + +Wrong7 = Literal[Path("abcd")] +T = TypeVar("T") + +Wrong8 = Literal[T] + +Wrong9 = Literal[3.14] + +Wrong10 = Literal[Any] + +Wrong11 = Literal[...] + + +def func(): ... + + +Wrong12 = Literal[func] +some_variable = "foo" + +Wrong13 = Literal[some_variable] + + +# This should generate two errors. +var1: Literal[3 + 4] + +# This should generate two errors. +var2: Literal["foo".replace("o", "b")] + +# This should generate two errors. +var3: Literal[4 + 3j] + +# This should generate three errors. +var4: Literal[-4 + 2j] + +# This should generate an error. +var5: Literal[(1, "foo", "bar")] + +# This should generate an error. +var6: Literal[{"a": "b", "c": "d"}] + +# This should generate two errors. +var7: Literal[Path("abcd")] + +# This should generate an error. +var8: Literal[T] + +# This should generate an error. +var9: Literal[3.14] + +# This should generate an error. +var10: Literal[Any] + +# This should generate an error. +var11: Literal[...] + +# This should generate an error. +var12: Literal[func] + +# This should generate two errors. +var13: Literal[some_variable] + + +class Enum1(Enum): + A = 1 + B = 2 + + x: str + + +a = Enum1.A + +# This should generate two errors. +var14: Literal[a] + +# This should generate two errors. +var15: Literal[Enum1.x] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/literals7.py b/python-parser/packages/pyright-internal/src/tests/samples/literals7.py new file mode 100644 index 00000000..a9524260 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/literals7.py @@ -0,0 +1,42 @@ +# This sample tests the handling of very large integer values used in +# literals. + +from typing import Literal + +big_int: Literal[9223372036854775808] = 0x8000000000000000 + +# This should generate an error. +y1: Literal[900001231231231456487987456452132130000000000000000000000000000001] = ( + 900001231231231456487987456452132130000000000000000000000000000000 +) + +y2: Literal[900001231231231456487987456452132130000000000000000000000000000001] = ( + 900001231231231456487987456452132130000000000000000000000000000001 +) + +reveal_type( + y2, + expected_text="Literal[900001231231231456487987456452132130000000000000000000000000000001]", +) + +y3 = y2 + 1 +reveal_type( + y3, + expected_text="Literal[900001231231231456487987456452132130000000000000000000000000000002]", +) + +y4 = 0xFFFFFFFFFFF123456789456123456789456123456789456123 +reveal_type( + y4, + expected_text="Literal[1606938044258905427252460960878516708721138816242982137979171]", +) + +y5 = 0b101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010 +reveal_type(y5, expected_text="Literal[886151997189943915269204706853563050]") + + +y6 = 0xFFFFFFFFFFFFFFFFADF85458A2BB4A9AAFDC5620273D3CF1D8B9C583CE2D3695A9E13641146433FBCC939DCE249B3EF97D2FE363630C75D8F681B202AEC4617AD3DF1ED5D5FD65612433F51F5F066ED0856365553DED1AF3B557135E7F57C935984F0C70E0E68B77E2A689DAF3EFE8721DF158A136ADE73530ACCA4F483A797ABC0AB182B324FB61D108A94BB2C8E3FBB96ADAB760D7F4681D4F42A3DE394DF4AE56EDE76372BB190B07A7C8EE0A6D709E02FCE1CDF7E2ECC03404CD28342F619172FE9CE98583FF8E4F1232EEF28183C3FE3B1B4C6FAD733BB5FCBC2EC22005C58EF1837D1683B2C6F34A26C1B2EFFA886B4238611FCFDCDE355B3B6519035BBC34F4DEF99C023861B46FC9D6E6C9077AD91D2691F7F7EE598CB0FAC186D91CAEFE130985139270B4130C93BC437944F4FD4452E2D74DD364F2E21E71F54BFF5CAE82AB9C9DF69EE86D2BC522363A0DABC521979B0DEADA1DBF9A42D5C4484E0ABCD06BFA53DDEF3C1B20EE3FD59D7C25E41D2B669E1EF16E6F52C3164DF4FB7930E9E4E58857B6AC7D5F42D69F6D187763CF1D5503400487F55BA57E31CC7A7135C886EFB4318AED6A1E012D9E6832A907600A918130C46DC778F971AD0038092999A333CB8B7A1A1DB93D7140003C2A4ECEA9F98D0ACC0A8291CDCEC97DCF8EC9B55A7F88A46B4DB5A851F44182E1C68A007E5E0DD9020BFD64B645036C7A4E677D2C38532A3A23BA4442CAF53EA63BB454329B7624C8917BDD64B1C0FD4CB38E8C334C701C3ACDAD0657FCCFEC719B1F5C3E4E46041F388147FB4CFDB477A52471F7A9A96910B855322EDB6340D8A00EF092350511E30ABEC1FFF9E3A26E7FB29F8C183023C3587E38DA0077D9B4763E4E4B94B2BBC194C6651E77CAF992EEAAC0232A281BF6B3A739C1226116820AE8DB5847A67CBEF9C9091B462D538CD72B03746AE77F5E62292C311562A846505DC82DB854338AE49F5235C95B91178CCF2DD5CACEF403EC9D1810C6272B045B3B71F9DC6B80D63FDD4A8E9ADB1E6962A69526D43161C1A41D570D7938DAD4A40E329CCFF46AAA36AD004CF600C8381E425A31D951AE64FDB23FCEC9509D43687FEB69EDD1CC5E0B8CC3BDF64B10EF86B63142A3AB8829555B2F747C932665CB2C0F1CC01BD70229388839D2AF05E454504AC78B7582822846C0BA35C35F5C59160CC046FD8251541FC68C9C86B022BB7099876A460E7451A8A93109703FEE1C217E6C3826E52C51AA691E0E423CFC99E9E31650C1217B624816CDAD9A95F9D5B8019488D9C0A0A1FE3075A577E23183F81D4A3F2FA4571EFC8CE0BA8A4FE8B6855DFE72B0A66EDED2FBABFBE58A30FAFABE1C5D71A87E2F741EF8C1FE86FEA6BBFDE530677F0D97D11D49F7A8443D0822E506A9F4614E011E2A94838FF88CD68C8BB7C5C6424CFFFFFFFFFFFFFFFF +y7 = y6 * y6 + +y8: Literal[10] = 0b1010 +y9: Literal[10] = 0o12 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop1.py b/python-parser/packages/pyright-internal/src/tests/samples/loop1.py new file mode 100644 index 00000000..8bb46624 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop1.py @@ -0,0 +1,45 @@ +# This sample tests the type checker's ability to handle type +# inferences within loop constructs. + + +def func1(a: list): + pass + + +def func2(): + data = None + + for x in [2, 3]: + if not data: + data = [1, 2] + else: + reveal_type(data, expected_text="list[int]") + func1(data) + else: + reveal_type(data, expected_text="list[int] | None") + + # This should generate an error because the + # type checker should be able to determine that + # data must contain None at this point. + func1(data) + + +x = 20 + 20 + + +def func3(): + data = None + + while x: + if not data: + data = [1, 2] + else: + reveal_type(data, expected_text="list[int]") + func1(data) + else: + reveal_type(data, expected_text="list[int] | None") + + # This should generate an error because the + # type checker should be able to determine that + # data must contain None at this point. + func1(data) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop10.py b/python-parser/packages/pyright-internal/src/tests/samples/loop10.py new file mode 100644 index 00000000..78c9cb6e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop10.py @@ -0,0 +1,11 @@ +# This sample tests the case where dependent types within +# a loop are assigned using tuples. + + +def fibonacci(): + a, b = 1, 1 + while True: + yield a + a, b = b, a + b + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop11.py b/python-parser/packages/pyright-internal/src/tests/samples/loop11.py new file mode 100644 index 00000000..4fa979b1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop11.py @@ -0,0 +1,22 @@ +# This sample tests for the detection of unbound or partially-unbound +# variables within loops. + +import random + +for a in [1, 2, 3]: + # This should generate an error because b is unbound. + if b == 1: + b = 2 + + +for a in [1, 2, 3]: + if random.random() > 0.5: + c = 2 + + # This should generate an error because c is potentially unbound. + print(c) + +while True: + # This should generate an error because d is unbound. + if d == 1: + d = 2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop12.py b/python-parser/packages/pyright-internal/src/tests/samples/loop12.py new file mode 100644 index 00000000..685dda52 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop12.py @@ -0,0 +1,17 @@ +# This sample tests the evaluation of a variable whose type is narrowed +# within a loop body. + + +class ClassA: + def non_property(self) -> int: ... + + def do_stuff(self, x: int | None): + while True: + if x is not None: + a = x + else: + a = self.non_property + + # This should generate an error because the type of "a" + # is not compatible with a "-" operator. + _ = a - 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop13.py b/python-parser/packages/pyright-internal/src/tests/samples/loop13.py new file mode 100644 index 00000000..5ef58bb8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop13.py @@ -0,0 +1,13 @@ +# This sample tests the case where a loop uses tuple assignments. It verifies +# that no "unknown" values are evaluated for variables assigned in the loop. + +# pyright: strict + +nums: list[int] = [1, 2, 3] +max_product, min_product = nums[0], nums[0] + +for x in nums[1:]: + candidates = max_product * x, min_product * x + min_product = min(candidates) + max_product = max(candidates) + reveal_type(candidates, expected_text="tuple[int, int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop14.py b/python-parser/packages/pyright-internal/src/tests/samples/loop14.py new file mode 100644 index 00000000..6cfeb7ad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop14.py @@ -0,0 +1,20 @@ +# This sample tests a loop that modifies a variable through type narrowing. + + +class State: + def confirm_dialog(self) -> "State | bool": + return False + + +state = State() +reveal_type(state, expected_text="State") + +for _ in range(1): + result = state.confirm_dialog() + if isinstance(result, State): + reveal_type(state, expected_text="State") + reveal_type(result, expected_text="State") + state = result + else: + reveal_type(state, expected_text="State") + reveal_type(result, expected_text="bool") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop15.py b/python-parser/packages/pyright-internal/src/tests/samples/loop15.py new file mode 100644 index 00000000..6cf4ed21 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop15.py @@ -0,0 +1,22 @@ +# This sample tests loops that contain nested comprehensions and +# variables that depend on each other. + +# pyright: strict + + +def func1(boards: list[list[list[int]]]): + for _ in [0]: + remain = [[set(line) for line in zip(*b)] for b in boards] + boards = [b for b, u in zip(boards, remain) if all(u)] + + +def func2(boards: list[list[list[int]]]): + for _ in [1]: + remain = [[set(line) for line in b] for b in boards] + boards = [b for b, u in zip(boards, remain) if all(u)] + + +def func3(boards: list[list[list[int]]]): + for _ in [1]: + remain = [[set(line) for line in (*b, *zip(*b))] for b in boards] + boards = [b for b, u in zip(boards, remain) if all(u)] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop16.py b/python-parser/packages/pyright-internal/src/tests/samples/loop16.py new file mode 100644 index 00000000..4ba894e8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop16.py @@ -0,0 +1,322 @@ +# This sample tests a complex function that contains a loop and a long +# chain of if/elif statements. + +from random import randint + + +def get_ipv4(): + try: + while 1: + ip1, ip2, ip3, ip4 = ( + randint(1, 255), + randint(1, 255), + randint(1, 255), + randint(1, 255), + ) + + if ip1 == 127: + continue + elif ip1 == 0: + continue + elif ip1 == 1: + continue + elif ip1 == 2: + continue + elif ip1 == 3: + continue + elif ip1 == 4: + continue + elif ip1 == 5: + continue + elif ip1 == 6: + continue + elif ip1 == 7: + continue + elif ip1 == 8: + continue + elif ip1 == 9: + continue + elif ip1 == 11: + continue + elif ip1 == 12: + continue + elif ip1 == 17: + continue + elif ip1 == 19: + continue + elif ip1 == 15: + continue + elif ip1 == 56: + continue + elif ip1 == 10: + continue + elif ip1 == 25: + continue + elif ip1 == 49: + continue + elif ip1 == 50: + continue + elif ip1 == 73: + continue + elif ip1 == 137: + continue + elif ip1 == 11: + continue + elif ip1 == 21: + continue + elif ip1 == 22: + continue + elif ip1 == 26: + continue + elif ip1 == 28: + continue + elif ip1 == 29: + continue + elif ip1 == 30: + continue + elif ip1 == 33: + continue + elif ip1 == 55: + continue + elif ip1 == 214: + continue + elif ip1 == 215: + continue + elif ip1 == 192 and ip2 == 168: + continue + elif ip1 == 146 and ip2 == 17: + continue + elif ip1 == 146 and ip2 == 80: + continue + elif ip1 == 146 and ip2 == 98: + continue + elif ip1 == 146 and ip2 == 154: + continue + elif ip1 == 147 and ip2 == 159: + continue + elif ip1 == 148 and ip2 == 114: + continue + elif ip1 == 150 and ip2 == 125: + continue + elif ip1 == 150 and ip2 == 133: + continue + elif ip1 == 150 and ip2 == 144: + continue + elif ip1 == 150 and ip2 == 149: + continue + elif ip1 == 150 and ip2 == 157: + continue + elif ip1 == 150 and ip2 == 184: + continue + elif ip1 == 150 and ip2 == 190: + continue + elif ip1 == 150 and ip2 == 196: + continue + elif ip1 == 152 and ip2 == 82: + continue + elif ip1 == 152 and ip2 == 229: + continue + elif ip1 == 157 and ip2 == 202: + continue + elif ip1 == 157 and ip2 == 217: + continue + elif ip1 == 161 and ip2 == 124: + continue + elif ip1 == 162 and ip2 == 32: + continue + elif ip1 == 155 and ip2 == 96: + continue + elif ip1 == 155 and ip2 == 149: + continue + elif ip1 == 155 and ip2 == 155: + continue + elif ip1 == 155 and ip2 == 178: + continue + elif ip1 == 164 and ip2 == 158: + continue + elif ip1 == 156 and ip2 == 9: + continue + elif ip1 == 167 and ip2 == 44: + continue + elif ip1 == 168 and ip2 == 68: + continue + elif ip1 == 168 and ip2 == 85: + continue + elif ip1 == 168 and ip2 == 102: + continue + elif ip1 == 203 and ip2 == 59: + continue + elif ip1 == 204 and ip2 == 34: + continue + elif ip1 == 207 and ip2 == 30: + continue + elif ip1 == 117 and ip2 == 55: + continue + elif ip1 == 117 and ip2 == 56: + continue + elif ip1 == 80 and ip2 == 235: + continue + elif ip1 == 207 and ip2 == 120: + continue + elif ip1 == 209 and ip2 == 35: + continue + elif ip1 == 64 and ip2 == 70: + continue + elif ip1 == 172 and ip2 >= 16 and ip2 < 32: + continue + elif ip1 == 100 and ip2 >= 64 and ip2 < 127: + continue + elif ip1 == 169 and ip2 > 254: + continue + elif ip1 == 198 and ip2 >= 18 and ip2 < 20: + continue + elif ip1 == 64 and ip2 >= 69 and ip2 < 227: + continue + elif ip1 == 128 and ip2 >= 35 and ip2 < 237: + continue + elif ip1 == 129 and ip2 >= 22 and ip2 < 255: + continue + elif ip1 == 130 and ip2 >= 40 and ip2 < 168: + continue + elif ip1 == 131 and ip2 >= 3 and ip2 < 251: + continue + elif ip1 == 132 and ip2 >= 3 and ip2 < 251: + continue + elif ip1 == 134 and ip2 >= 5 and ip2 < 235: + continue + elif ip1 == 136 and ip2 >= 177 and ip2 < 223: + continue + elif ip1 == 138 and ip2 >= 13 and ip2 < 194: + continue + elif ip1 == 139 and ip2 >= 31 and ip2 < 143: + continue + elif ip1 == 140 and ip2 >= 1 and ip2 < 203: + continue + elif ip1 == 143 and ip2 >= 45 and ip2 < 233: + continue + elif ip1 == 144 and ip2 >= 99 and ip2 < 253: + continue + elif ip1 == 146 and ip2 >= 165 and ip2 < 166: + continue + elif ip1 == 147 and ip2 >= 35 and ip2 < 43: + continue + elif ip1 == 147 and ip2 >= 103 and ip2 < 105: + continue + elif ip1 == 147 and ip2 >= 168 and ip2 < 170: + continue + elif ip1 == 147 and ip2 >= 198 and ip2 < 200: + continue + elif ip1 == 147 and ip2 >= 238 and ip2 < 255: + continue + elif ip1 == 150 and ip2 >= 113 and ip2 < 115: + continue + elif ip1 == 152 and ip2 >= 151 and ip2 < 155: + continue + elif ip1 == 153 and ip2 >= 21 and ip2 < 32: + continue + elif ip1 == 155 and ip2 >= 5 and ip2 < 10: + continue + elif ip1 == 155 and ip2 >= 74 and ip2 < 89: + continue + elif ip1 == 155 and ip2 >= 213 and ip2 < 222: + continue + elif ip1 == 157 and ip2 >= 150 and ip2 < 154: + continue + elif ip1 == 158 and ip2 >= 1 and ip2 < 21: + continue + elif ip1 == 158 and ip2 >= 235 and ip2 < 247: + continue + elif ip1 == 159 and ip2 >= 120 and ip2 < 121: + continue + elif ip1 == 160 and ip2 >= 132 and ip2 < 151: + continue + elif ip1 == 64 and ip2 >= 224 and ip2 < 227: + continue + elif ip1 == 162 and ip2 >= 45 and ip2 < 47: + continue + elif ip1 == 163 and ip2 >= 205 and ip2 < 207: + continue + elif ip1 == 164 and ip2 >= 45 and ip2 < 50: + continue + elif ip1 == 164 and ip2 >= 217 and ip2 < 233: + continue + elif ip1 == 207 and ip2 >= 60 and ip2 < 62: + continue + elif ip1 == 104 and ip2 >= 16 and ip2 < 31: + continue + elif ip1 == 193 and ip2 == 164: + continue + elif ip1 == 120 and ip2 >= 103 and ip2 < 108: + continue + elif ip1 == 188 and ip2 == 68: + continue + elif ip1 == 78 and ip2 == 46: + continue + elif ip1 >= 224: + continue + elif (ip1 == 178 and ip2 == 128) or (ip1 == 123 and ip2 == 59): + continue + elif ( + (ip1 == 124 and ip2 == 244) + or (ip1 == 178 and ip2 == 254) + or (ip1 == 185 and ip2 == 168) + or (ip1 == 178 and ip2 == 79) + ): + continue + elif ip1 == 192 and ip2 == 88 and ip3 == 99: + continue + elif ip1 == 240: + continue + elif ip1 == 255 and ip2 == 255 and ip3 == 255 and ip4 == 255: + continue + elif ip1 == 126: + continue + elif ip1 == 13 and ip2 == 107 and ip3 == 6 and ip4 == 152: + continue + # elif ip1 == 13 and ip2 == 107 and ip3 == 18 and ip4 == 10: + # continue + # elif ip1 == 13 and ip2 == 107 and ip3 == 128 and ip4 == 0: + # continue + # elif ip1 == 23 and ip2 == 103 and ip3 == 160 and ip4 == 0: + # continue + # elif ip1 == 40 and ip2 == 96 and ip3 == 0 and ip4 == 0: + # continue + # elif ip1 == 40 and ip2 == 104 and ip3 == 0 and ip4 == 0: + # continue + # elif ip1 == 52 and ip2 == 96 and ip3 == 0 and ip4 == 0: + # continue + # elif ip1 == 131 and ip2 == 253 and ip3 == 33 and ip4 == 215: + # continue + # elif ip1 == 132 and ip2 == 245 and ip3 == 0 and ip4 == 0: + # continue + # elif ip1 == 150 and ip2 == 171 and ip3 == 32 and ip4 == 0: + # continue + # elif ip1 == 204 and ip2 == 79 and ip3 == 197 and ip4 == 215: + # continue + # elif ip1 == 208 and ip2 == 71 and (ip3 > 120 and ip3 < 127): + # continue + # elif ip1 == 117 and ip2 == 102 and (ip3 > 128 and ip3 < 159): + # continue + # elif ip1 == 203 and ip2 == 171 and (ip3 > 192 and ip3 < 207): + # continue + # elif ip1 == 59 and (ip3 > 192 and ip3 < 255): + # continue + # elif ip1 == 163 and ip2 == 233: + # continue + # elif ip1 == 62 and ip2 <= 30: + # continue # honey pots + # elif ip1 == 207 and ip2 >= 31 and ip3 <= 120: + # continue # fbi honey pots + # elif ip1 == 65 and ip2 >= 224 and ip3 <= 226: + # continue # more honey pots + # elif ip1 == 195 and ip2 == 10: + # continue # another honeypot + # elif ip1 == 216 and (ip2 == 25 or ip2 == 94): + # continue + # elif ip1 == 212 and ip2 == 56: + # continue + + ip = f"{str(ip1)}.{str(ip2)}.{str(ip3)}.{str(ip4)}" + return ip + except: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop17.py b/python-parser/packages/pyright-internal/src/tests/samples/loop17.py new file mode 100644 index 00000000..663eb4c7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop17.py @@ -0,0 +1,10 @@ +# This sample tests the case where a loop involves an unannotated parameter +# and therefore an "unknown" that propagates through the loop. + + +def f(x): + e = 0 + for _ in [0]: + e += x + reveal_type(e, expected_text="Unknown | Literal[0]") + return e diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop18.py b/python-parser/packages/pyright-internal/src/tests/samples/loop18.py new file mode 100644 index 00000000..e6ad198d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop18.py @@ -0,0 +1,22 @@ +# This sample tests type narrowing in a loop. + +from typing_extensions import Self # pyright: ignore[reportMissingModuleSource] +from collections.abc import Generator + + +class A: + parent: Self | None + + +class B: ... + + +def foo(v: A | B | None) -> Generator[A, None, None]: + reveal_type(v) + if not isinstance(v, B): + reveal_type(v, expected_text="A | None") + while v is not None: + reveal_type(v, expected_text="A") + yield v + v = v.parent + reveal_type(v, expected_text="A | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop19.py b/python-parser/packages/pyright-internal/src/tests/samples/loop19.py new file mode 100644 index 00000000..d68b9e32 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop19.py @@ -0,0 +1,15 @@ +# This sample tests a loop that references instance variables. + + +class Results: + zzz: int + + +class Foo: + yyy: int + + def method1(self, results: list[Results]): + abc = None + for result in results: + if abc is not None and abc.zzz < result.zzz: + abc = result diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop2.py b/python-parser/packages/pyright-internal/src/tests/samples/loop2.py new file mode 100644 index 00000000..05edac95 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop2.py @@ -0,0 +1,22 @@ +# This sample tests a piece of code that involves lots +# of cyclical dependencies for type resolution. + + +def needs_str(a: str) -> tuple[str, str]: ... + + +def xxx(): + v1 = "" + v2 = "" + v3 = "" + + v4 = None + + _ = v1 + v3, _ = v3, v2 + v4 = v3 + + for _ in range(1): + assert v4 is not None + v1, v2 = needs_str(v4) + v3 = v1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop20.py b/python-parser/packages/pyright-internal/src/tests/samples/loop20.py new file mode 100644 index 00000000..d46a66c2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop20.py @@ -0,0 +1,40 @@ +# This sample tests a case where variables within multiple loops, both +# nested and sequential, depend on each other in ways that can result +# in long analysis times. + +from math import isnan +from typing import Callable + + +def linspace(start: float, stop: float, num: int = 50): + if num == 1: + yield stop + return + step = (stop - start) / (num - 1) + for i in range(num): + yield start + step * i + + +def find_zero(f: Callable[[float], float]) -> float: + x_0 = 0 + x_1 = 0 + f_x_0 = 0 + f_x_1 = 0 + while True: + if not (isnan(f_x_0)) and isnan(f_x_1): + x_tests = list(linspace(x_1, x_0, 25)) + f_x_tests = (f(x) for x in x_tests) + for x, f_x in zip(x_tests, f_x_tests): + if not (isnan(f_x)): + x_1 = x + f_x_1 = f_x + break + + elif isnan(f_x_0) and not (isnan(f_x_1)): + x_tests = list(linspace(x_0, x_1, 25)) + f_x_tests = (f(x) for x in x_tests) + for x, f_x in zip(x_tests, f_x_tests): + if not (isnan(f_x)): + x_0 = x + f_x_0 = f_x + break diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop21.py b/python-parser/packages/pyright-internal/src/tests/samples/loop21.py new file mode 100644 index 00000000..2b835a80 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop21.py @@ -0,0 +1,18 @@ +# This sample tests a loop where some of the variables within the loop +# depend on each other. + +# pyright: strict + + +def find_min(nums: list[int]) -> int: + low = 0 + high = len(nums) - 1 + while low < high: + mid = (low + high) // 2 + if nums[mid] > nums[high]: + low = mid + 1 + elif nums[mid] < nums[high]: + high = mid + else: + high -= 1 + return nums[low] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop22.py b/python-parser/packages/pyright-internal/src/tests/samples/loop22.py new file mode 100644 index 00000000..bd2e9d75 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop22.py @@ -0,0 +1,21 @@ +# This sample tests a case where a loop contains multiple conditional +# checks. + +# pyright: strict + +from __future__ import annotations + + +class ListNode: + def __init__(self, val: int = 0, next: ListNode | None = None): + self.val = val + self.next = next + + +def has_cycle(head: ListNode | None) -> bool: + fast_head = head + while head and fast_head: + fast_head = fast_head.next + if fast_head: + fast_head = fast_head.next + return False diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop23.py b/python-parser/packages/pyright-internal/src/tests/samples/loop23.py new file mode 100644 index 00000000..6a95c0e8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop23.py @@ -0,0 +1,16 @@ +# This sample covers a case that resulted in a crash due to infinite +# recursion within the code flow engine and type narrowing logic. + + +from typing import Any + + +def func(): + c: Any = None + + while True: + if a: # type: ignore + if c: + pass + + a = c == c.foo diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop24.py b/python-parser/packages/pyright-internal/src/tests/samples/loop24.py new file mode 100644 index 00000000..45942c39 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop24.py @@ -0,0 +1,11 @@ +# This sample tests a loop that involves assignment of a tuple +# within a loop. + +# pyright: strict + +var = 0 +while True: + if var and True: + break + else: + var, _ = var + 1, 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop25.py b/python-parser/packages/pyright-internal/src/tests/samples/loop25.py new file mode 100644 index 00000000..403192cd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop25.py @@ -0,0 +1,45 @@ +# This sample tests a series of nested loops containing variables +# with significant dependencies. + +for val1 in range(10): + cnt1 = 4 + for val2 in range(10 - val1): + cnt2 = 4 + if val2 == val1: + cnt2 -= 1 + for val3 in range(10 - val1 - val2): + cnt3 = 4 + if val3 == val1: + cnt3 -= 1 + if val3 == val2: + cnt3 -= 1 + for val4 in range(10 - val1 - val2 - val3): + cnt4 = 4 + if val4 == val1: + cnt4 -= 1 + if val4 == val2: + cnt4 -= 1 + if val4 == val3: + cnt4 -= 1 + for val5 in range(10 - val1 - val2 - val3 - val4): + cnt5 = 4 + if val5 == val1: + cnt5 -= 1 + if val5 == val2: + cnt5 -= 1 + if val5 == val3: + cnt5 -= 1 + if val5 == val4: + cnt5 -= 1 + val6 = 10 - val1 - val2 - val3 - val4 - val5 + cnt6 = 4 + if val6 == val1: + cnt6 -= 1 + if val6 == val2: + cnt6 -= 1 + if val6 == val3: + cnt6 -= 1 + if val6 == val4: + cnt6 -= 1 + if val6 == val5: + cnt6 -= 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop26.py b/python-parser/packages/pyright-internal/src/tests/samples/loop26.py new file mode 100644 index 00000000..09c7ff05 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop26.py @@ -0,0 +1,12 @@ +# This sample tests proper type narrowing within a double loop. + +# pyright: strict + +from typing import Callable + + +def func(call: Callable[[], None] | None): + while True: + while True: + if call is None or call(): + break diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop27.py b/python-parser/packages/pyright-internal/src/tests/samples/loop27.py new file mode 100644 index 00000000..0674551c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop27.py @@ -0,0 +1,16 @@ +# This sample tests the case where an annotated variable in a loop +# is used as an index, an implicit argument for __getitem__. + +# pyright: strict + + +def func1(nums: list[int], target: int) -> None: + var = nums[0] + while True: + mid = var + if nums[mid] == target: + return + if var: + var = mid + else: + var = mid diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop28.py b/python-parser/packages/pyright-internal/src/tests/samples/loop28.py new file mode 100644 index 00000000..87500b56 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop28.py @@ -0,0 +1,28 @@ +# This sample tests type evaluation for a nested loop that involves +# accesses to an instance variable accessed through a member access +# expression that requires narrowing. + +from concurrent import futures +from concurrent.futures import Future +from typing import Any, Dict, Optional + + +class A: + def __init__(self): + self.pending: Optional[Dict[Future[Any], int]] + self.foo: bool + + def poll(self): + assert self.pending is not None + while True: + if self.pending: + pass + + ready, _ = futures.wait(self.pending) + + for future_id in ready: + self.pending.pop(future_id) + + future_id.result() + if self.foo: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop29.py b/python-parser/packages/pyright-internal/src/tests/samples/loop29.py new file mode 100644 index 00000000..b23eafd4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop29.py @@ -0,0 +1,16 @@ +# This sample tests the case where a variable type declaration is found +# within a loop and the variable is used within a conditional expression +# within the same loop. + +from enum import Enum + + +class MyEnum(Enum): + A = 0 + + +def func1(vals: list[MyEnum]): + for val1 in vals: + val2: MyEnum = val1 + if val2 == MyEnum.A: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop3.py b/python-parser/packages/pyright-internal/src/tests/samples/loop3.py new file mode 100644 index 00000000..3c510913 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop3.py @@ -0,0 +1,10 @@ +# This sample tests a piece of code that involves lots +# of cyclical dependencies for type resolution. + + +n: str | None = None +while True: + if n is None: + n = "" + else: + n = n + "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop30.py b/python-parser/packages/pyright-internal/src/tests/samples/loop30.py new file mode 100644 index 00000000..54b56298 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop30.py @@ -0,0 +1,10 @@ +# This sample tests type evaluation in a nested loop. + +a: int | None = None + +for _ in range(1): + for i in range(1): + a = i + j = a + +reveal_type(a, expected_type=int | None) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop31.py b/python-parser/packages/pyright-internal/src/tests/samples/loop31.py new file mode 100644 index 00000000..9eebe2b8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop31.py @@ -0,0 +1,14 @@ +# This sample tests the case where an unannotated local variable +# has a dependency on itself when evaluating its effective type. + + +def func1(arg: str): ... + + +def func2(arg: int): + for _ in range(1): + loc = arg + loc = loc if loc else loc + + # This should generate an error. + func1(loc) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop32.py b/python-parser/packages/pyright-internal/src/tests/samples/loop32.py new file mode 100644 index 00000000..36b97c83 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop32.py @@ -0,0 +1,10 @@ +# This sample tests type narrowing of instance variables in the presence +# of a double nested loop. + + +def func1(x: str | None): + assert x is not None + + for i in range(10): + for j in range(10): + x = x + "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop33.py b/python-parser/packages/pyright-internal/src/tests/samples/loop33.py new file mode 100644 index 00000000..607ef426 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop33.py @@ -0,0 +1,9 @@ +# This sample tests a nested loop containing an augmented assignment. + +count = 0 + +for x in range(1): + for y in range(1): + count += 1 + +reveal_type(count, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop34.py b/python-parser/packages/pyright-internal/src/tests/samples/loop34.py new file mode 100644 index 00000000..136da58a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop34.py @@ -0,0 +1,33 @@ +# This sample tests a doubly-nested loop with a function (max) that +# uses a TypeVar. + +from typing import Any, Protocol, TypeAlias, TypeVar + +_T_contra = TypeVar("_T_contra", contravariant=True) + + +class SupportsDunderGT(Protocol[_T_contra]): + def __gt__(self, __other: _T_contra) -> bool: ... + + +class SupportsDunderLT(Protocol[_T_contra]): + def __lt__(self, __other: _T_contra) -> bool: ... + + +SupportsRichComparison: TypeAlias = SupportsDunderLT[Any] | SupportsDunderGT[Any] + +SupportsRichComparisonT = TypeVar( + "SupportsRichComparisonT", bound=SupportsRichComparison +) + + +def max( + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT +) -> SupportsRichComparisonT: ... + + +a: int = 1 +while True: + while a >= 0: + a -= 1 + a = max(0, a) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop35.py b/python-parser/packages/pyright-internal/src/tests/samples/loop35.py new file mode 100644 index 00000000..835c4f4b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop35.py @@ -0,0 +1,8 @@ +# This sample tests that code flow analysis of a list comprehension +# within a loop eliminates any Unknowns. + +# pyright: strict + +lst = [1] +while True: + lst = [val for val in lst] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop36.py b/python-parser/packages/pyright-internal/src/tests/samples/loop36.py new file mode 100644 index 00000000..218273b5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop36.py @@ -0,0 +1,15 @@ +# This sample tests code that uses an augmented assignment to a subscript +# within a loop. + +# pyright: strict + +from typing import Any + + +def func1(any: Any): + l: list[int] = any + while any: + if any: + l[0] += 0 + else: + l[0] += 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop37.py b/python-parser/packages/pyright-internal/src/tests/samples/loop37.py new file mode 100644 index 00000000..8906077d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop37.py @@ -0,0 +1,13 @@ +# This sample tests that a variable assigned in a doubly-nested loop +# eliminates any Unknowns. + +# pyright: strict + + +def func(): + a_value: int = 0 + a_list: list[int] = [] + + for _ in range(1): + for _ in range(1): + a_value = a_list[a_value] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop38.py b/python-parser/packages/pyright-internal/src/tests/samples/loop38.py new file mode 100644 index 00000000..22767163 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop38.py @@ -0,0 +1,20 @@ +# This sample tests a code flow graph that includes a nested loop +# and a variable that is assigned only in the outer loop. + + +# pyright: strict + + +# * Code flow graph for func1: +# Assign[step+=1] ── True[step==0] ── Assign[node=] ── Loop ┬─ Loop ┬─ Assign[step=1] ── Start +# │ ╰ Circular(Assign[step+=1]) +# ╰ FalseNever ─ False ─ Circular(Assign[node]) +def func1(nodes: list[int]): + step = 1 + while True: + for node in nodes: + if node or step == 0: + step += 1 + break + else: + return diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop39.py b/python-parser/packages/pyright-internal/src/tests/samples/loop39.py new file mode 100644 index 00000000..43015435 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop39.py @@ -0,0 +1,19 @@ +# This sample tests a loop where there are multiple symbols +# that depend on each other. + +# pyright: strict + + +def func1() -> str | None: ... + + +s1: str | None = None +s2 = None +while True: + obj = func1() + + x = s2 + condition = obj and obj != s1 + + s1 = obj + s2 = obj diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop4.py b/python-parser/packages/pyright-internal/src/tests/samples/loop4.py new file mode 100644 index 00000000..bdcc1af3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop4.py @@ -0,0 +1,16 @@ +# This sample tests a loop with self-references. + +a: bool = False +x: int = 0 + +while len(input()) < 42: + x += 43 + + if a and a: + continue + + if a: + print("") + continue + + x += 44 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop40.py b/python-parser/packages/pyright-internal/src/tests/samples/loop40.py new file mode 100644 index 00000000..494853e3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop40.py @@ -0,0 +1,13 @@ +# This sample verifies that unknown types are properly eliminated from +# a loop. + +# pyright: strict + + +def func1(a: int, b: str, c: str): + v1: list[tuple[str, str, str]] = [] + for _ in range(0): + if a == 0: + print(v1[-1][0]) + last = v1[-1] + v1[-1] = (b, last[1], c) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop41.py b/python-parser/packages/pyright-internal/src/tests/samples/loop41.py new file mode 100644 index 00000000..21e2c883 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop41.py @@ -0,0 +1,34 @@ +# This sample tests a few cases that involve a loop with a conditional. These +# case regressed and were not caught by any other test cases. + +from typing import TypeVar, Any + +T = TypeVar("T") + + +def func1(x: T) -> T: ... + + +def func2(schema: bool): ... + + +def func3(v1: list[bool], v2: int | str): + for _ in v1: + if v2 in set([1, 2, 3]): + func1(v2) + + # This should generate an error. + func2(v2) + + +def func4(x: Any, b: Any): + a = x + while a < 1: + if a: + b = int(b) + else: + b = a + + if b: + # This should generate an error. + return a.x(dummy) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop42.py b/python-parser/packages/pyright-internal/src/tests/samples/loop42.py new file mode 100644 index 00000000..6805e85d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop42.py @@ -0,0 +1,14 @@ +# This sample tests a looping case involving type narrowing within +# a loop where the act of determining whether it's a supported type +# guard results in a circular dependency between variables. + +# pyright: reportUnnecessaryComparison=true + + +def func1(): + a = None + b = "" + + while True: + if b != a: + a = b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop43.py b/python-parser/packages/pyright-internal/src/tests/samples/loop43.py new file mode 100644 index 00000000..7bdc7a26 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop43.py @@ -0,0 +1,16 @@ +# This sample tests the proper type narrowing of a constrained TypeVar +# within a loop. + +from typing import TypeVar + +T = TypeVar("T", str, None) + + +def func1(input_string: T) -> T: + if input_string is None: + return input_string + + for bad_char in set(input_string): + input_string = input_string.replace(bad_char, "") + + return input_string diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop44.py b/python-parser/packages/pyright-internal/src/tests/samples/loop44.py new file mode 100644 index 00000000..35ca9e1d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop44.py @@ -0,0 +1,13 @@ +# This sample tests the case where a variable assigned within a loop +# initially appears to be unreachable (while some variable types are +# incomplete) but is later determined to be reachable. + + +def func(lines: list[str], val: list[str] | None): + for line in lines: + if val is None: + if line == "": + val = [] + continue + match = line + match.encode() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop45.py b/python-parser/packages/pyright-internal/src/tests/samples/loop45.py new file mode 100644 index 00000000..e7a7815b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop45.py @@ -0,0 +1,17 @@ +# This sample tests the case where a match statement is used in a loop +# and the subject is potentially narrowed in the loop, therefore creating +# a circular dependency. + +from typing import Literal + + +def func1(lit: Literal["a", "b"]) -> None: + for _ in range(2): + match lit: + case "a": + v = "123" + + case "b": + v = "234" + + v.replace(",", ".") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop46.py b/python-parser/packages/pyright-internal/src/tests/samples/loop46.py new file mode 100644 index 00000000..b5430569 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop46.py @@ -0,0 +1,10 @@ +# This sample tests a doubly-nested loop with an indexed expression being +# updated each time. + + +def func1(m: list[str | int]): + while True: + if isinstance(m[0], str): + x = m[0] + y = x + "" + m[0] = y diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop47.py b/python-parser/packages/pyright-internal/src/tests/samples/loop47.py new file mode 100644 index 00000000..43266971 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop47.py @@ -0,0 +1,14 @@ +# This sample tests a case where there are dependencies between +# variables within a loop. + +a = None +b = False + +for _ in []: + if b > 0: + pass + if a: + reveal_type(a, expected_text="int") + c = int(a or 1) + a = c + c.is_integer() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop48.py b/python-parser/packages/pyright-internal/src/tests/samples/loop48.py new file mode 100644 index 00000000..55539fe7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop48.py @@ -0,0 +1,24 @@ +# This sample tests a case where an instance variable is assigned within +# a loop using its own value. + +# pyright: strict + + +class ClassA: + x: int | None + + def method1(self) -> None: + self.x = 0 + + for _ in range(1, 10): + self.x = reveal_type(self.x, expected_text="int") + 1 + + reveal_type(self.x, expected_text="int") + + def method2(self) -> None: + self.x = 0 + + for _ in range(1, 10): + self.x += 1 + + reveal_type(self.x, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop49.py b/python-parser/packages/pyright-internal/src/tests/samples/loop49.py new file mode 100644 index 00000000..ffe050ec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop49.py @@ -0,0 +1,13 @@ +# This sample tests a doubly-nested loop that was incorrectly evaluated. + +a = b = c = 0 + +while True: + if a < 0: + c += b - 1 + a = b + + while a != (d := a + 1): + b = max(b, d) + c += abs(a - d) + a = d diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop5.py b/python-parser/packages/pyright-internal/src/tests/samples/loop5.py new file mode 100644 index 00000000..31f79353 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop5.py @@ -0,0 +1,14 @@ +# This sample tests a case where a potential type alias +# ("a") is involved in a recursive type dependency +# ("a" depends on "test" which depends on "a"). + +# pyright: strict + + +test = {"key": "value"} + +while True: + a = test + reveal_type(a, expected_text="dict[str, str]") + test = a.copy() + reveal_type(test, expected_text="dict[str, str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop50.py b/python-parser/packages/pyright-internal/src/tests/samples/loop50.py new file mode 100644 index 00000000..87b92c36 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop50.py @@ -0,0 +1,15 @@ +# This sample tests the case where a type alias is accessed within +# a loop as both an annotation and a value expression. + +from typing import Literal + + +TA1 = Literal["a", "b"] + + +def func1(values: list): + for value in values: + x: TA1 = value["x"] + + if x not in TA1.__args__: + raise ValueError() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop51.py b/python-parser/packages/pyright-internal/src/tests/samples/loop51.py new file mode 100644 index 00000000..1f3b4131 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop51.py @@ -0,0 +1,21 @@ +# This sample tests a case where type evaluation for a type guard +# within a loop may trigger a false positive "type depends on itself" +# error message. + +# For details, see https://github.com/microsoft/pyright/issues/9139. + +from enum import StrEnum + + +class MyEnum(StrEnum): + A = "A" + + +for _ in range(2): + x: dict[MyEnum, int] = {} + + if MyEnum.A in x: + ... + + for _ in x.values(): + ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop52.py b/python-parser/packages/pyright-internal/src/tests/samples/loop52.py new file mode 100644 index 00000000..c1a4c872 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop52.py @@ -0,0 +1,13 @@ +# This sample tests the case where a function accesses its own decorated +# form within a loop. + +from contextlib import contextmanager + + +@contextmanager +def func1(): + yield + + for _ in (): + with func1(): + return diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop6.py b/python-parser/packages/pyright-internal/src/tests/samples/loop6.py new file mode 100644 index 00000000..68c6bb85 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop6.py @@ -0,0 +1,25 @@ +# This sample tests a difficult set of circular dependencies +# between untyped variables. + + +class ClassA: + def method1(self, param1): + return ClassA() + + def method2(self): + return {}, {} + + def method3(self, param3): + while True: + for key in param3.keys(): + foo1 = self.method1({key: None}) + var1, var2 = foo1.method2() + + if len(var1) < 2: + param3 = var2 + break + + foo2 = foo1.method1({}) + var1, var2 = foo2.method2() + else: + break diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop7.py b/python-parser/packages/pyright-internal/src/tests/samples/loop7.py new file mode 100644 index 00000000..26b6750d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop7.py @@ -0,0 +1,14 @@ +# This sample tests a difficult set of circular dependencies +# between untyped variables. + + +from typing import Optional + + +class ClassA: + name: Optional[str] + + def method1(self): + if self.name is not None: + for _ in []: + self.name = self.name.replace("", "") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop8.py b/python-parser/packages/pyright-internal/src/tests/samples/loop8.py new file mode 100644 index 00000000..ee09691c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop8.py @@ -0,0 +1,17 @@ +# This sample tests a difficult set of circular dependencies +# between untyped variables. + +# pyright: strict + +from typing import Iterable + + +def func1(parts: Iterable[str]): + x: list[str] = [] + ns = "" + for part in parts: + if ns: + ns += "a" + else: + ns += part + x.append(ns) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/loop9.py b/python-parser/packages/pyright-internal/src/tests/samples/loop9.py new file mode 100644 index 00000000..8e162cc5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/loop9.py @@ -0,0 +1,25 @@ +# This sample tests a difficult set of circular dependencies +# between untyped variables. + +# pyright: strict + +from typing import Dict + + +class A: + pass + + +class B(A): + pass + + +def func1(v: A, s: Dict[B, A]) -> object: + if not isinstance(v, B): + return v + u = s.get(v) + while isinstance(u, B): + v = u + u = s.get(v) + x = v if u is None else u + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/match1.py b/python-parser/packages/pyright-internal/src/tests/samples/match1.py new file mode 100644 index 00000000..cb8dd7c4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/match1.py @@ -0,0 +1,227 @@ +# This sample tests basic parsing of match statements as +# described in PEP 634. + +from typing import Any + +value_obj: Any = 4 + +class Foo: + x: int + + +match (1, ): + case a1, b1 if True: + pass + + case (a2, b2): + pass + + case [a3, b3]: + pass + + case () | []: + pass + + # This should generate an error because of a missing pattern. + case : + pass + + # This should generate an error because it is an irrefutable pattern + # and is not at the end. + case (a4): + pass + + case (a5,): + pass + + case [a6,]: + pass + + case a7 as b7, c7 as d7 if True: + pass + + case (a8, b8, ) as c8 if 1 == 3: + pass + + case a9, *b8: + pass + + # This should generate an error because multiple star + # patterns in a sequence are not allowed. + case *a10, *b10: + pass + + # This should generate an error because star + # patterns cannot be used with "as". + case *a11 as b11, b12: + pass + + case value_obj.a, value_obj.b: + pass + + # This should generate an error because star + # patterns can't be used with |. + case (3 as b13) | (4 as b13) | *b13: + pass + + case *a14, b14: + pass + + case (a20, (b20,), [c20, *d20]) as e20: + pass + + case 3 | -3: + pass + + case 3.2 - 2.1j | -3.2 + 2.1j | 3j: + pass + + # This should generate an error because the grammar + # indicates that imaginary number must come second. + case 2j + 4: + pass + + # This should generate an error because the grammar + # indicates that imaginary number must come second. + case - 2j + 4: + pass + + case "hi" """hi""" | r"hi" r"""hi""": + pass + + # This should generate an error because f-strings are + # not allowed. + case "hi" f"""hi""": + pass + + # This should generate an error. + case {}: + pass + + case {"a": 3, -3 + 4j: a30, value_obj.a: b30, **c30}: + pass + + # This should generate an error because only one ** expression + # can be used. + case {"a": 3, **a31, "b": -3j, **b31}: + pass + + # This should generate an error because ** cannot be used with + # wildcard "_". + case {"a": 3, **_, "b": -3}: + pass + + case (3 as x) as y: + pass + + case int(): + pass + + case Foo(1, a40, value_obj.b as b40, c40=3|-2 + 5j|"hi" as d40, y=[e40, f40] as g40,): + pass + + # This should generate an error because positional arguments + # cannot appear after keyword arguments. + case Foo(1, a41, x=3, value_obj.b as b41, c41=3, y=[d41, e41] as f41): + pass + + # This should generate three errors because irrefutable patterns + # must appear only as the last entry in an or pattern. + case (_ as x) | x: + pass + + # This should generate an error because it's an irrefutable pattern + # but is not the last case statement. + case _: + pass + + # This should generate an error because it's an irrefutable pattern + # but is not the last case statement. + case (x): + pass + + case _ if value_obj: + pass + + # This should generate an error because or patterns must target the + # same names. + case 3 | x: + pass + + case _: + pass + + + +def func1(): + match = Foo() + + # This should be treated as an expression statement, not a match statement. + match.x + + +def func2(): + match = [3] + + # This should be treated as an expression statement, not a match statement. + match[0] + + match [0]: + case _: + pass + +def func3(): + def match(a: int): ... + + # This should be treated as a call statement. + match(0) + + match (0): + case _: + pass + +def func4(): + match 1, 2, "3": + case _: + pass + +def func5(match: Any): + # This should be treated as a list, not a match statement. + match[2:8, 2:8] = 0 + + +class Point: + def __init__(self, x: int, y: int) -> None: + self.x = x + self.y = y + +def func6(subj: Any): + match subj: + # This should generate an error because a is used twice in the same pattern. + case [a, *a]: + pass + + case ([c, d] as f) | ([d, c] as f): + pass + + # This should generate an error because h is used twice in the same pattern. + case (g, 1 as h) as h: + pass + + # This should generate an error because j is used twice in the same pattern. + case Point(x=j, y=j): + pass + +def func7(): + match +1: + case _: + pass + + match -1: + case _: + pass + + match ~1: + case _: + pass + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/match2.py b/python-parser/packages/pyright-internal/src/tests/samples/match2.py new file mode 100644 index 00000000..fefcba87 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/match2.py @@ -0,0 +1,65 @@ +# This sample tests type narrowing of subject expressions for +# match statements. + + +def func1(subj: int | dict[str, str] | tuple[int] | str, cond: bool): + match subj: + case 3 | "hi": + reveal_type(subj, expected_text="Literal[3, 'hi']") + return + + case int(y) if cond: + reveal_type(subj, expected_text="int") + return + + case int(y): + reveal_type(subj, expected_text="int") + return + + case int(): + reveal_type(subj, expected_text="Never") + return + + case str(z): + reveal_type(subj, expected_text="str") + return + + reveal_type(subj, expected_text="dict[str, str] | tuple[int]") + return subj + + +# This should generate an error because there is the potential +# for fall-through if the subject expression is a str. +def func2(subj: int | str) -> str: + match subj: + case int(): + return "int" + + reveal_type(subj, expected_text="str") + + +# This should generate an error because there is the potential +# for fall-through if the guard expressions are false. +def func3(subj: int | str) -> str: + match subj: + case str() if len(subj) > 0: + return "str" + + case int() if subj < 0: + return "int" + + reveal_type(subj, expected_text="int | str") + + +def func4(subj: int | str) -> str: + match subj: + case int(): + return "int" + + case str(): + return "str" + + case _: + # This should be ignored because the pattern has already + # been exhaustively matched. + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/match3.py b/python-parser/packages/pyright-internal/src/tests/samples/match3.py new file mode 100644 index 00000000..8a168634 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/match3.py @@ -0,0 +1,99 @@ +# This sample tests narrowing of subject subexpressions in match statements. + +from typing import Literal, TypedDict + + +class TD1(TypedDict): + name: Literal["a"] + extra_value: int + + +class TD2(TypedDict): + name: Literal["b"] + other_extra_value: int + + +class TD3(TypedDict): + name: Literal["c"] + extra_value: int + + +def func1(item: TD1 | TD2): + match item["name"]: + case "d": + reveal_type(item, expected_text="Never") + case "a": + reveal_type(item, expected_text="TD1") + case "b": + reveal_type(item, expected_text="TD2") + + +def func2(item: TD1 | TD2 | TD3): + match item["name"]: + case "a" | "c": + reveal_type(item, expected_text="TD1 | TD3") + case _: + reveal_type(item, expected_text="TD2") + + +T1 = tuple[Literal[0], int] +T2 = tuple[Literal[1], str] + + +def func3(item: T1 | T2): + match item[0]: + case 0: + reveal_type(item, expected_text="tuple[Literal[0], int]") + case 1: + reveal_type(item, expected_text="tuple[Literal[1], str]") + + +def func4(a: object, b: int) -> None: + match a, b: + case (complex(), 3): + reveal_type(a, expected_text="complex") + reveal_type(b, expected_text="Literal[3]") + + +Token = ( + str + | tuple[Literal["define"], str, str] + | tuple[Literal["include"], str] + | tuple[Literal["use"], str, int, int] +) + + +def func5(token: Token): + match token: + case str(x): + reveal_type(token, expected_text="str") + case "define", _, _: + reveal_type(token, expected_text="tuple[Literal['define'], str, str]") + case "include", _: + reveal_type(token, expected_text="tuple[Literal['include'], str]") + case "use", _, _, _: + reveal_type(token, expected_text="tuple[Literal['use'], str, int, int]") + case _: + reveal_type(token, expected_text="Never") + + +def func6(a: int | str, b: int | str) -> None: + match a, b: + case (_, _): + reveal_type(a, expected_text="int | str") + reveal_type(b, expected_text="int | str") + case (x, y): + reveal_type(x, expected_text="Never") + reveal_type(y, expected_text="Never") + reveal_type(a, expected_text="Never") + reveal_type(b, expected_text="Never") + + +def func7(a: str | None, b: str | None) -> None: + match (a, b): + case (_, None): + return + case (None, _): + return + reveal_type(a, expected_text="str") + reveal_type(b, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass1.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass1.py new file mode 100644 index 00000000..0c7b30ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass1.py @@ -0,0 +1,553 @@ +# This sample tests type checking for match statements (as +# described in PEP 634) that contain class patterns. + +from typing import ( + Any, + Generic, + Literal, + NamedTuple, + Protocol, + TypeVar, + TypedDict, + runtime_checkable, +) +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + LiteralString, +) +from dataclasses import dataclass, field + +foo = 3 + +T = TypeVar("T") + + +class ClassA: + __match_args__ = ("attr_a", "attr_b") + attr_a: int + attr_b: str + + +class ClassB(Generic[T]): + __match_args__ = ("attr_a", "attr_b") + attr_a: T + attr_b: str + + +class ClassC: ... + + +class ClassD(ClassC): ... + + +def test_unknown(value_to_match): + match value_to_match: + case ClassA(attr_a=a2) as a1: + reveal_type(a1, expected_text="ClassA") + reveal_type(a2, expected_text="int") + reveal_type(value_to_match, expected_text="ClassA") + + # This should generate an error because foo isn't instantiable. + case foo() as a3: + pass + + +def test_any(value_to_match: Any): + match value_to_match: + case list() as a1: + reveal_type(a1, expected_text="list[Unknown]") + reveal_type(value_to_match, expected_text="list[Unknown]") + + +def test_custom_type(value_to_match: ClassA | ClassB[int] | ClassB[str] | ClassC): + match value_to_match: + case int() as a1: + reveal_type(a1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case ClassA(attr_a=a4, attr_b=a5) as a3: + reveal_type(a3, expected_text="ClassA") + reveal_type(a4, expected_text="int") + reveal_type(a5, expected_text="str") + reveal_type(value_to_match, expected_text="ClassA") + reveal_type(value_to_match, expected_text="ClassA") + + case ClassB(a6, a7): + reveal_type(a6, expected_text="int | str") + reveal_type(a7, expected_text="str") + reveal_type(value_to_match, expected_text="ClassB[int] | ClassB[str]") + + case ClassD() as a2: + reveal_type(a2, expected_text="ClassD") + reveal_type(value_to_match, expected_text="ClassD") + + case ClassC() as a8: + reveal_type(a8, expected_text="ClassC") + reveal_type(value_to_match, expected_text="ClassC") + + +def test_subclass(value_to_match: ClassD): + match value_to_match: + case ClassC() as a1: + reveal_type(a1, expected_text="ClassD") + + case _ as a2: + reveal_type(a2, expected_text="Never") + + +def test_literal(value_to_match: Literal[3]): + match value_to_match: + case float() as a2: + reveal_type(a2, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case str() as a3: + reveal_type(a3, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case int() as a1: + reveal_type(a1, expected_text="Literal[3]") + reveal_type(value_to_match, expected_text="Literal[3]") + + +def test_literal2(value_to_match: Literal[0, "1", b"2"]) -> None: + match value_to_match: + case float() as a2: + reveal_type(a2, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case str() as a3: + reveal_type(a3, expected_text="Literal['1']") + reveal_type(value_to_match, expected_text="Literal['1']") + + case int() as a1: + reveal_type(a1, expected_text="Literal[0]") + reveal_type(value_to_match, expected_text="Literal[0]") + + case x: + reveal_type(x, expected_text='Literal[b"2"]') + reveal_type(value_to_match, expected_text='Literal[b"2"]') + + +def test_literal_string(value_to_match: LiteralString) -> None: + match value_to_match: + case "a" as a1: + reveal_type(value_to_match, expected_text="Literal['a']") + reveal_type(a1, expected_text="Literal['a']") + + case str() as a2: + reveal_type(value_to_match, expected_text="LiteralString") + reveal_type(a2, expected_text="LiteralString") + + case a3: + reveal_type(value_to_match, expected_text="Never") + reveal_type(a3, expected_text="Never") + + +TFloat = TypeVar("TFloat", bound=float) + + +def test_bound_typevar(value_to_match: TFloat) -> TFloat: + match value_to_match: + case int() as a1: + reveal_type(a1, expected_text="int*") + reveal_type(value_to_match, expected_text="int*") + + case float() as a2: + reveal_type(a2, expected_text="float*") + reveal_type(value_to_match, expected_text="float*") + + case str() as a3: + reveal_type(a3, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + return value_to_match + + +TInt = TypeVar("TInt", bound=int) + + +def test_union( + value_to_match: TInt | Literal[3] | float | str, +) -> TInt | Literal[3] | float | str: + match value_to_match: + case int() as a1: + reveal_type(a1, expected_text="int* | int") + reveal_type(value_to_match, expected_text="int* | int") + + case float() as a2: + reveal_type(a2, expected_text="float") + reveal_type(value_to_match, expected_text="float") + + case str() as a3: + reveal_type(a3, expected_text="str") + reveal_type(value_to_match, expected_text="str") + + return value_to_match + + +T = TypeVar("T") + + +class Point(Generic[T]): + __match_args__ = ("x", "y") + x: T + y: T + + +def func1(points: list[Point[float] | Point[complex]]): + match points: + case [] as a1: + reveal_type(a1, expected_text="list[Point[float] | Point[complex]]") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") + + case [Point(0, 0) as b1]: + reveal_type(b1, expected_text="Point[float] | Point[complex]") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") + + case [Point(c1, c2)]: + reveal_type(c1, expected_text="float | complex") + reveal_type(c2, expected_text="float | complex") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") + + case [Point(0, d1), Point(0, d2)]: + reveal_type(d1, expected_text="float | complex") + reveal_type(d2, expected_text="float | complex") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") + + case _ as e1: + reveal_type(e1, expected_text="list[Point[float] | Point[complex]]") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") + + +def func2(subj: object): + match subj: + case list() as a1: + reveal_type(a1, expected_text="list[Unknown]") + reveal_type(subj, expected_text="list[Unknown]") + + +def func3(subj: int | str | dict[str, str]): + match subj: + case int(x): + reveal_type(x, expected_text="int") + reveal_type(subj, expected_text="int") + + case str(x): + reveal_type(x, expected_text="str") + reveal_type(subj, expected_text="str") + + case dict(x): + reveal_type(x, expected_text="dict[str, str]") + reveal_type(subj, expected_text="dict[str, str]") + + +def func4(subj: object): + match subj: + case int(x): + reveal_type(x, expected_text="int") + reveal_type(subj, expected_text="int") + + case str(x): + reveal_type(x, expected_text="str") + reveal_type(subj, expected_text="str") + + +# Test the auto-generation of __match_args__ for dataclass. +@dataclass +class Dataclass1: + val1: int + val2: str = field(init=False) + val3: complex + + +@dataclass +class Dataclass2: + val1: int + val2: str + val3: float + + +def func5(subj: object): + match subj: + case Dataclass1(a, b): + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="complex") + reveal_type(subj, expected_text="Dataclass1") + + case Dataclass2(a, b, c): + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="str") + reveal_type(c, expected_text="float") + reveal_type(subj, expected_text="Dataclass2") + + +@dataclass(match_args=False) +class DataclassNoMatchArgs: + a: int + b: int + + +def func5b(subj: object): + match subj: + # This should generate an error because match_args is False. + case DataclassNoMatchArgs(1, 2): + pass + + +# Test the auto-generation of __match_args__ for named tuples. +NT1 = NamedTuple("NT1", [("val1", int), ("val2", complex)]) +NT2 = NamedTuple("NT2", [("val1", int), ("val2", str), ("val3", float)]) + + +def func6(subj: object): + match subj: + case NT1(a, b): + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="complex") + reveal_type(subj, expected_text="NT1") + + case NT2(a, b, c): + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="str") + reveal_type(c, expected_text="float") + reveal_type(subj, expected_text="NT2") + + +def func7(subj: object): + match subj: + case complex(real=a, imag=b): + reveal_type(a, expected_text="float") + reveal_type(b, expected_text="float") + + +T2 = TypeVar("T2") + + +class Parent(Generic[T]): ... + + +class Child1(Parent[T]): ... + + +class Child2(Parent[T], Generic[T, T2]): ... + + +def func8(subj: Parent[int]): + match subj: + case Child1() as a1: + reveal_type(a1, expected_text="Child1[int]") + reveal_type(subj, expected_text="Child1[int]") + + case Child2() as b1: + reveal_type(b1, expected_text="Child2[int, Unknown]") + reveal_type(subj, expected_text="Child2[int, Unknown]") + + +T3 = TypeVar("T3") + + +def func9(v: T3) -> T3 | None: + match v: + case str(): + reveal_type(v, expected_text="str*") + return v + + case _: + return None + + +T4 = TypeVar("T4", int, str) + + +def func10(v: T4) -> T4 | None: + match v: + case str(): + reveal_type(v, expected_text="str*") + return v + + case int(): + reveal_type(v, expected_text="int*") + return v + + case list(): + reveal_type(v, expected_text="Never") + return v + + case _: + return None + + +def func11(subj: Any): + match subj: + case Child1() as a1: + reveal_type(a1, expected_text="Child1[Unknown]") + reveal_type(subj, expected_text="Child1[Unknown]") + + case Child2() as b1: + reveal_type(b1, expected_text="Child2[Unknown, Unknown]") + reveal_type(subj, expected_text="Child2[Unknown, Unknown]") + + +class TD1(TypedDict): + x: int + + +def func12(subj: int, flt_cls: type[float], union_val: float | int): + match subj: + # This should generate an error because int doesn't accept two arguments. + case int(1, 2): + pass + + match subj: + # This should generate an error because float doesn't accept keyword arguments. + case float(x=1): + pass + + match subj: + case flt_cls(): + pass + + # This should generate an error because it is a union. + case union_val(): + pass + + # This should generate an error because it is a TypedDict. + case TD1(): + pass + + +def func13(subj: tuple[Literal[0]]): + match subj: + case tuple((1,)) as a: + reveal_type(subj, expected_text="Never") + reveal_type(a, expected_text="Never") + + case tuple((0, 0)) as b: + reveal_type(subj, expected_text="Never") + reveal_type(b, expected_text="Never") + + case tuple((0,)) as c: + reveal_type(subj, expected_text="tuple[Literal[0]]") + reveal_type(c, expected_text="tuple[Literal[0]]") + + case d: + reveal_type(subj, expected_text="Never") + reveal_type(d, expected_text="Never") + + +class ClassE(Generic[T]): + __match_args__ = ("x",) + x: list[T] + + +class ClassF(ClassE[T]): + pass + + +def func14(subj: ClassE[T]) -> T | None: + match subj: + case ClassF(a): + reveal_type(subj, expected_text="ClassF[T@func14]") + reveal_type(a, expected_text="list[T@func14]") + return a[0] + + +class IntPair(tuple[int, int]): + pass + + +def func15(x: IntPair | None) -> None: + match x: + case IntPair((y, z)): + reveal_type(y, expected_text="int") + reveal_type(z, expected_text="int") + + +def func16(x: str | float | bool | None): + match x: + case str(v) | bool(v) | float(v): + reveal_type(v, expected_text="str | bool | float") + reveal_type(x, expected_text="str | bool | float") + case v: + reveal_type(v, expected_text="int | None") + reveal_type(x, expected_text="int | None") + reveal_type(x, expected_text="str | bool | float | int | None") + + +def func17(x: str | float | bool | None): + match x: + case str() | float() | bool(): + reveal_type(x, expected_text="str | float | bool") + case _: + reveal_type(x, expected_text="int | None") + reveal_type(x, expected_text="str | float | bool | int | None") + + +def func18(x: str | float | bool | None): + match x: + case str(v) | float(v) | bool(v): + reveal_type(v, expected_text="str | float | bool") + reveal_type(x, expected_text="str | float | bool") + case _: + reveal_type(x, expected_text="int | None") + reveal_type(x, expected_text="str | float | bool | int | None") + + +T5 = TypeVar("T5", complex, str) + + +def func19(x: T5) -> T5: + match x: + case complex(): + return x + case str(): + return x + + reveal_type(x, expected_text="float* | int*") + return x + + +T6 = TypeVar("T6", bound=complex | str) + + +def func20(x: T6) -> T6: + match x: + case complex(): + return x + case str(): + return x + + reveal_type(x, expected_text="float* | int*") + return x + + +@runtime_checkable +class Proto1(Protocol): + x: int + + +class Proto2(Protocol): + x: int + + +def func21(subj: object): + match subj: + case Proto1(): + pass + + # This should generate an error because Proto2 isn't runtime checkable. + case Proto2(): + pass + + +class Impl1: + x: int + + +def func22(subj: Proto1 | int): + match subj: + case Proto1(): + reveal_type(subj, expected_text="Proto1") + + case _: + reveal_type(subj, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass2.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass2.py new file mode 100644 index 00000000..1439a3c7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass2.py @@ -0,0 +1,20 @@ +# This sample tests keyword-only class pattern matching for +# dataclasses. + +from dataclasses import dataclass, field + + +@dataclass +class Point: + optional: int | None = field(default=None, kw_only=True) + x: int + y: int + + +obj = Point(1, 2) +match obj: + case Point(x, y, optional=opt): + reveal_type(x, expected_text="int") + reveal_type(y, expected_text="int") + reveal_type(opt, expected_text="int | None") + distance = (x**2 + y**2) ** 0.5 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass3.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass3.py new file mode 100644 index 00000000..eda1d4d5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass3.py @@ -0,0 +1,80 @@ +# This sample tests class-based pattern matching when the class is +# marked final and can be discriminated based on the argument patterns. + +from typing import final, Protocol, runtime_checkable +from dataclasses import dataclass + + +class A: + title: str + + +class B: + name: str + + +class C: + name: str + + +def func1(r: A | B | C): + match r: + case object(title=_): + reveal_type(r, expected_text="A | B | C") + + case object(name=_): + reveal_type(r, expected_text="A | B | C") + + case _: + reveal_type(r, expected_text="A | B | C") + + +@final +class AFinal: + title: str + + +@final +class BFinal: + name: str + + +@final +class CFinal: + name: str + + +@final +class DFinal: + nothing: str + + +def func2(r: AFinal | BFinal | CFinal | DFinal): + match r: + case object(title=_): + reveal_type(r, expected_text="AFinal") + + case object(name=_): + reveal_type(r, expected_text="BFinal | CFinal") + + case _: + reveal_type(r, expected_text="DFinal") + + +@runtime_checkable +class ProtoE(Protocol): + __match_args__ = ("x",) + x: int + + +@dataclass +class E: + x: int + + +match E(1): + case ProtoE(x): + pass + + case y: + reveal_type(y, expected_text="Never") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass4.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass4.py new file mode 100644 index 00000000..423a29f3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass4.py @@ -0,0 +1,47 @@ +# This sample tests the case where a subject is narrowed against a +# class pattern that includes a type() or subclass thereof and +# the subject contains a type[T]. + + +class MyMeta(type): + pass + + +class A: + pass + + +class B(A, metaclass=MyMeta): + pass + + +def func1(subj: type[A]): + match subj: + case type(): + reveal_type(subj, expected_text="type[A]") + case _: + reveal_type(subj, expected_text="Never") + + +def func2(subj: type[A]): + match subj: + case MyMeta(): + reveal_type(subj, expected_text="type[A]") + case _: + reveal_type(subj, expected_text="type[A]") + + +def func3(subj: type[B]): + match subj: + case MyMeta(): + reveal_type(subj, expected_text="type[B]") + case _: + reveal_type(subj, expected_text="Never") + + +def func4(subj: type[B] | type[int]): + match subj: + case MyMeta(): + reveal_type(subj, expected_text="type[B] | type[int]") + case _: + reveal_type(subj, expected_text="type[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass5.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass5.py new file mode 100644 index 00000000..c1ad5751 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass5.py @@ -0,0 +1,64 @@ +# This sample tests the detection of too many positional patterns. + +from dataclasses import dataclass + + +@dataclass +class A: + a: int + + +class B: + a: int + b: int + + __match_args__ = ("a", "b") + + +class C(B): ... + + +class D(int): ... + + +def func1(subj: A | B): + match subj: + # This should generate an error because A accepts only + # one positional pattern. + case A(1, 2): + pass + + case A(1): + pass + + case A(): + pass + + case B(1, 2): + pass + + # This should generate an error because B accepts only + # two positional patterns. + case B(1, 2, 3): + pass + + # This should generate an error because B accepts only + # two positional patterns. + case C(1, 2, 3): + pass + + case D(1): + pass + + # This should generate an error because D accepts only + # one positional pattern. + case D(1, 2): + pass + + case int(1): + pass + + # This should generate an error because int accepts only + # one positional pattern. + case int(1, 2): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass6.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass6.py new file mode 100644 index 00000000..bbd311d7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass6.py @@ -0,0 +1,65 @@ +# This sample tests the case where `Callable()` is used as a class pattern. + +from collections.abc import Callable +from typing import Any, Protocol, TypeVar + +T = TypeVar("T") + + +def func1(obj: T | Callable[..., T]) -> T | None: + match obj: + case Callable(): + reveal_type(obj, expected_text="((...) -> Unknown) | ((...) -> T@func1)") + return obj() + + +def func2(obj: T | Callable[..., T]) -> T | None: + if isinstance(obj, Callable): + reveal_type(obj, expected_text="((...) -> Unknown) | ((...) -> T@func2)") + return obj() + + +def func3(obj: type[int] | Callable[..., str]) -> int | str | None: + match obj: + case Callable(): + reveal_type(obj, expected_text="type[int] | ((...) -> str)") + return obj() + + +def func4(obj): + match obj: + case Callable(): + reveal_type(obj, expected_text="(...) -> Unknown") + return obj() + + +def func5(obj: Any): + match obj: + case Callable(): + reveal_type(obj, expected_text="(...) -> Any") + return obj() + + +def func6(obj: Callable[[], None]): + match obj: + case Callable(): + reveal_type(obj, expected_text="() -> None") + return obj() + + case x: + reveal_type(obj, expected_text="Never") + + +class CallableProto(Protocol): + def __call__(self) -> None: + pass + + +def func7(obj: CallableProto): + match obj: + case Callable(): + reveal_type(obj, expected_text="CallableProto") + return obj() + + case x: + reveal_type(obj, expected_text="Never") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass7.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass7.py new file mode 100644 index 00000000..c2f40756 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass7.py @@ -0,0 +1,34 @@ +# This sample tests the case where a class pattern overwrites the subject +# expression. + +from dataclasses import dataclass + + +@dataclass +class DC1: + val: str + + +def func1(val: DC1): + result = val + + match result: + case DC1(result): + reveal_type(result, expected_text="str") + + +@dataclass +class DC2: + val: DC1 + + +def func2(val: DC2): + result = val + + match result.val: + case DC1(result): + reveal_type(result, expected_text="str") + + # This should generate an error because result.val + # is no longer valid at this point. + print(result.val) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchClass8.py b/python-parser/packages/pyright-internal/src/tests/samples/matchClass8.py new file mode 100644 index 00000000..60dda0f9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchClass8.py @@ -0,0 +1,27 @@ +# This sample tests the case where a NewType is used as a class pattern. + +from typing import NewType + + +NT1 = NewType("NT1", int) + + +def accepts_widget_id(value: NT1 | int) -> None: + match value: + case NT1(): + # This should generate an error because NewType returns a function at runtime. + pass + + +NT2 = NewType("NT2", str) + + +def accepts_union(value: NT1 | NT2) -> None: + match value: + case NT1(): + # This should generate an error because NewType returns a function at runtime. + pass + + case NT2(): + # This should generate an error because NewType returns a function at runtime. + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchExhaustion1.py b/python-parser/packages/pyright-internal/src/tests/samples/matchExhaustion1.py new file mode 100644 index 00000000..a08e9e32 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchExhaustion1.py @@ -0,0 +1,116 @@ +# This sample tests the reportMatchNotExhaustive diagnostic check. + +from types import NoneType +from typing import Literal +from enum import Enum + + +def func1(subj: Literal["a", "b"], cond: bool): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case "a": + pass + + case "b" if cond: + pass + + +def func2(subj: object): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case int(): + pass + + +def func3(subj: object): + match subj: + case object(): + pass + + +def func4(subj: tuple[str] | tuple[int]): + match subj[0]: + case str(): + pass + + case int(): + pass + + +def func5(subj: Literal[1, 2, 3]): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case 1 | 2: + pass + + +class Color(Enum): + red = 0 + green = 1 + blue = 2 + + +def func6(subj: Color): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case Color.red: + pass + + case Color.green: + pass + + +class ClassA: + def method1(self) -> str: + match self: + case ClassA(): + return "" + + +def func7() -> int: + match [10]: + case [*values]: + return values[0] + + +class SingleColor(Enum): + red = 0 + + +def func8(subj: SingleColor) -> int: + match subj: + case SingleColor.red: + return 1 + + +def func9(subj: int | None): + match subj: + case NoneType(): + return 1 + case int(): + return 2 + + +def func10(subj: Color | None = None) -> list[str]: + results = [""] + for x in [""]: + match subj: + case None: + results.append(x) + case Color.red: + pass + case Color.green: + pass + case Color.blue: + pass + return results + + +def func11(subj: int | float | None): + match subj: + case float(): + reveal_type(subj, expected_text="float") + case int(): + reveal_type(subj, expected_text="int") + case NoneType(): + reveal_type(subj, expected_text="None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchLiteral1.py b/python-parser/packages/pyright-internal/src/tests/samples/matchLiteral1.py new file mode 100644 index 00000000..b10e551b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchLiteral1.py @@ -0,0 +1,121 @@ +# This sample tests type checking for match statements (as +# described in PEP 634) that contain literal patterns. + + +from typing import Literal, TypeVar + + +def test_unknown(value_to_match): + match value_to_match: + case 3 as a1, -3 as a2: + reveal_type(a1, expected_text="Unknown") + reveal_type(a2, expected_text="Unknown") + reveal_type(value_to_match, expected_text="Sequence[Unknown]") + + case 3j as b1, -3 + 5j as b2: + reveal_type(b1, expected_text="complex") + reveal_type(b2, expected_text="complex") + reveal_type(value_to_match, expected_text="Sequence[complex]") + + case "hi" as c1, None as c2: + reveal_type(c1, expected_text="Unknown") + reveal_type(c2, expected_text="None") + reveal_type(value_to_match, expected_text="Sequence[Unknown]") + + case True as d1, False as d2: + reveal_type(d1, expected_text="Unknown") + reveal_type(d2, expected_text="Unknown") + reveal_type(value_to_match, expected_text="Sequence[Unknown]") + + +def test_tuple(value_to_match: tuple[int | float | str | complex, ...]): + match value_to_match: + case (3, -3) as a1: + reveal_type( + a1, + expected_text="tuple[float | complex | Literal[3], float | complex | Literal[-3]]", + ) + reveal_type( + value_to_match, + expected_text="tuple[float | complex | Literal[3], float | complex | Literal[-3]]", + ) + + case (3j, -3 + 5j) as b1: + reveal_type( + b1, expected_text="tuple[int | float | complex, int | float | complex]" + ) + reveal_type( + value_to_match, + expected_text="tuple[int | float | complex, int | float | complex]", + ) + + +def test_union(value_to_match: int | float | str | complex | bool | None): + match value_to_match: + case (3 | -3j) as a1: + reveal_type(a1, expected_text="float | complex | bool | int") + reveal_type(value_to_match, expected_text="float | complex | bool | int") + + case (True | False | 3.4 | -3 + 3j | None) as b1: + reveal_type(b1, expected_text="int | float | complex | bool | None") + reveal_type( + value_to_match, expected_text="int | float | complex | bool | None" + ) + + case ("hi" | 3.4) as c1: + reveal_type(c1, expected_text="int | float | Literal['hi']") + reveal_type(value_to_match, expected_text="int | float | Literal['hi']") + + case ((True | "True") as d1) | ((False | "False") as d1): + reveal_type( + d1, expected_text="int | float | complex | Literal['True', 'False']" + ) + reveal_type( + value_to_match, + expected_text="int | float | complex | Literal['True', 'False']", + ) + + +def test_none(value_to_match: int | None): + match value_to_match: + case None as a1: + reveal_type(a1, expected_text="None") + + case a2: + reveal_type(a2, expected_text="int") + + +class A(str): ... + + +def test_subclass1(a: A): + match a: + case "TEST" as m: + reveal_type(m, expected_text="A") + case x: + reveal_type(x, expected_text="A") + + +def test_subclass2(subj: int): + match subj: + case 1.0e4: + reveal_type(subj, expected_text="int") + + +def test_subclass3(subj: Literal[1]): + match subj: + case 1.0: + reveal_type(subj, expected_text="Literal[1]") + + +T1 = TypeVar("T1", Literal["A"], Literal["B"]) + + +def test_constrained_typevar(subj: T1): + match subj: + case "A": + reveal_type(subj, expected_text="Literal['A']") + case "B": + reveal_type(subj, expected_text="Literal['B']") + case x: + reveal_type(x, expected_text="Never") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchLiteral2.py b/python-parser/packages/pyright-internal/src/tests/samples/matchLiteral2.py new file mode 100644 index 00000000..e7e65de1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchLiteral2.py @@ -0,0 +1,32 @@ +# This sample tests type narrowing for a discriminated union that uses +# literal patterns to discriminate between objects with literal tags. + +from typing import Literal + + +class A: + tag: Literal["a"] + name: str + + +class B: + tag: Literal["b"] + num: int + + +class C: + tag: Literal["c"] + num: int + + +def g(d: A | B | C) -> None: + match d.tag: + case "d": + reveal_type(d.tag, expected_text="Never") + reveal_type(d, expected_text="Never") + case "a" | "c": + reveal_type(d.tag, expected_text="Literal['a', 'c']") + reveal_type(d, expected_text="A | C") + case "b": + reveal_type(d.tag, expected_text="Literal['b']") + reveal_type(d, expected_text="B") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchMapping1.py b/python-parser/packages/pyright-internal/src/tests/samples/matchMapping1.py new file mode 100644 index 00000000..9359dfa0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchMapping1.py @@ -0,0 +1,154 @@ +# This sample tests type checking for match statements (as +# described in PEP 634) that contain mapping patterns. + +from typing import Literal, TypedDict + +from typing_extensions import NotRequired # pyright: ignore[reportMissingModuleSource] + + +def test_unknown(value_to_match): + match value_to_match: + case {"hello": a1, **a2}: + reveal_type(a1, expected_text="Unknown") + reveal_type(a2, expected_text="dict[Unknown, Unknown]") + reveal_type(value_to_match, expected_text="Unknown") + + +def test_object(value_to_match: object): + match value_to_match: + case {"hello": a1, **a2}: + reveal_type(a1, expected_text="Unknown") + reveal_type(a2, expected_text="dict[Unknown, Unknown]") + reveal_type(value_to_match, expected_text="object") + + +def test_dict(value_to_match: dict[str | int, str | int]): + match value_to_match: + case {1: a1}: + reveal_type(a1, expected_text="str | int") + reveal_type(value_to_match, expected_text="dict[str | int, str | int]") + + case {"hi": b1, "hi2": b2, **b3}: + reveal_type(b1, expected_text="str | int") + reveal_type(b2, expected_text="str | int") + reveal_type(b3, expected_text="dict[str | int, str | int]") + reveal_type(value_to_match, expected_text="dict[str | int, str | int]") + + case {None: c1}: + reveal_type(c1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + +class Movie(TypedDict): + title: str + release_year: int + gross_earnings: float + + +class MovieInfo: + field_of_interest: Literal["release_year", "gross_earnings"] + + +def test_typed_dict(value_to_match: Movie): + match value_to_match: + case {"title": a1, "release_year": a2, **a3}: + reveal_type(a1, expected_text="str") + reveal_type(a2, expected_text="int") + reveal_type(a3, expected_text="dict[str, object]") + reveal_type(value_to_match, expected_text="Movie") + + case {3: b1, "title": b2}: + reveal_type(b1, expected_text="Never") + reveal_type(b2, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case {"director": c1}: + reveal_type(c1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case {MovieInfo.field_of_interest: d1}: + reveal_type(d1, expected_text="int | float") + reveal_type(value_to_match, expected_text="Movie") + + +def test_union1(value_to_match: dict[str | int, str | int] | Movie | str): + match value_to_match: + case {3: a1}: + reveal_type(a1, expected_text="str | int") + reveal_type(value_to_match, expected_text="dict[str | int, str | int]") + + case {"gross_earnings": b1}: + reveal_type(b1, expected_text="str | int | float") + reveal_type( + value_to_match, expected_text="dict[str | int, str | int] | Movie" + ) + + +def test_union2(value_to_match: dict[int, int] | Movie | str): + match value_to_match: + case {**kw}: + reveal_type(kw, expected_text="dict[int | str, int | object]") + reveal_type(value_to_match, expected_text="dict[int, int] | Movie") + + case x: + reveal_type(x, expected_text="str") + + +class IntValue(TypedDict): + type: Literal["Int"] + int_value: int + + +class StrValue(TypedDict): + type: Literal["Str"] + str_value: str + + +class ComplexValue(TypedDict): + type: NotRequired[Literal["Complex"]] + complex_value: complex + + +def test_negative_narrowing1(value: IntValue | StrValue | ComplexValue | int) -> None: + match value: + case {"type": "Int"}: + reveal_type(value, expected_text="IntValue") + case {"type": "Str" | "Complex"}: + reveal_type(value, expected_text="StrValue | ComplexValue") + case _: + reveal_type(value, expected_text="ComplexValue | int") + + +def test_negative_narrowing2(value: StrValue | ComplexValue) -> None: + if "type" not in value: + raise + + match value: + case {"type": "Str"}: + reveal_type(value, expected_text="StrValue") + case {"type": "Complex"}: + reveal_type(value, expected_text="ComplexValue") + case _: + reveal_type(value, expected_text="Never") + + +class TD1(TypedDict): + v1: NotRequired[int] + v2: NotRequired[int] + v3: NotRequired[int] + + +def test_not_required_narrowing(subj: TD1) -> None: + match subj: + case {"v1": _}: + print(subj["v1"]) + + # This should generate an error. + print(subj["v2"]) + + case {"v2": 1, "v3": 2}: + # This should generate an error. + print(subj["v1"]) + + print(subj["v2"]) + print(subj["v3"]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchSequence1.py b/python-parser/packages/pyright-internal/src/tests/samples/matchSequence1.py new file mode 100644 index 00000000..5f76d33c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchSequence1.py @@ -0,0 +1,678 @@ +# This sample tests type checking for match statements (as +# described in PEP 634) that contain sequence patterns. + +# pyright: reportMissingModuleSource=false + +from enum import Enum +from typing import ( + Any, + Generic, + Iterator, + List, + Literal, + Protocol, + Reversible, + Sequence, + Tuple, + TypeVar, + Union, +) +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + + +def test_unknown(value_to_match): + match value_to_match: + case []: + reveal_type(value_to_match, expected_text="Sequence[Unknown]") + case a1, a2: + reveal_type(a1, expected_text="Unknown") + reveal_type(a2, expected_text="Unknown") + + case *b1, b2: + reveal_type(b1, expected_text="list[Unknown]") + reveal_type(b2, expected_text="Unknown") + + case c1, *c2: + reveal_type(c1, expected_text="Unknown") + reveal_type(c2, expected_text="list[Unknown]") + + case d1, *d2, d3: + reveal_type(d1, expected_text="Unknown") + reveal_type(d2, expected_text="list[Unknown]") + reveal_type(d3, expected_text="Unknown") + + case 3, *e1: + reveal_type(e1, expected_text="list[Unknown]") + + case "hi", *f1: + reveal_type(f1, expected_text="list[Unknown]") + + case *g1, "hi": + reveal_type(g1, expected_text="list[Unknown]") + + +def test_any(value_to_match: Any): + match value_to_match: + case []: + reveal_type(value_to_match, expected_text="Sequence[Any]") + case [*a1]: + reveal_type(a1, expected_text="list[Any]") + case b1: + reveal_type(b1, expected_text="Any") + + +def test_reversible(value_to_match: Reversible[int]): + match value_to_match: + case [*a1]: + reveal_type(a1, expected_text="list[int]") + case b1: + reveal_type(b1, expected_text="Reversible[int]") + + +_T_co = TypeVar("_T_co", covariant=True) + + +class SeqProto(Protocol[_T_co]): + def __reversed__(self) -> Iterator[_T_co]: ... + + +def test_protocol(value_to_match: SeqProto[str]): + match value_to_match: + case [*a1]: + reveal_type(a1, expected_text="list[str]") + case b1: + reveal_type(b1, expected_text="SeqProto[str]") + + +def test_list(value_to_match: List[str]): + match value_to_match: + case a1, a2: + reveal_type(a1, expected_text="str") + reveal_type(a2, expected_text="str") + reveal_type(value_to_match, expected_text="List[str]") + + case *b1, b2: + reveal_type(b1, expected_text="list[str]") + reveal_type(b2, expected_text="str") + reveal_type(value_to_match, expected_text="List[str]") + + case c1, *c2: + reveal_type(c1, expected_text="str") + reveal_type(c2, expected_text="list[str]") + reveal_type(value_to_match, expected_text="List[str]") + + case d1, *d2, d3: + reveal_type(d1, expected_text="str") + reveal_type(d2, expected_text="list[str]") + reveal_type(d3, expected_text="str") + reveal_type(value_to_match, expected_text="List[str]") + + case 3, *e1: + reveal_type(e1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case "hi", *f1: + reveal_type(f1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="List[str]") + + case *g1, "hi": + reveal_type(g1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="List[str]") + + +def test_open_ended_tuple(value_to_match: Tuple[str, ...]): + match value_to_match: + case a1, a2: + reveal_type(a1, expected_text="str") + reveal_type(a2, expected_text="str") + reveal_type(value_to_match, expected_text="tuple[str, str]") + + case *b1, b2: + reveal_type(b1, expected_text="list[str]") + reveal_type(b2, expected_text="str") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") + + case c1, *c2: + reveal_type(c1, expected_text="str") + reveal_type(c2, expected_text="list[str]") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") + + case d1, *d2, d3: + reveal_type(d1, expected_text="str") + reveal_type(d2, expected_text="list[str]") + reveal_type(d3, expected_text="str") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") + + case 3, *e1: + reveal_type(e1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case "hi", *f1: + reveal_type(f1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") + + case *g1, "hi": + reveal_type(g1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") + + +def test_definite_tuple(value_to_match: Tuple[int, str, float, complex]): + match value_to_match: + case a1, a2, a3, a4 if value_to_match[0] == 0: + reveal_type(a1, expected_text="int") + reveal_type(a2, expected_text="str") + reveal_type(a3, expected_text="float") + reveal_type(a4, expected_text="complex") + reveal_type(value_to_match, expected_text="tuple[int, str, float, complex]") + + case *b1, b2 if value_to_match[0] == 0: + reveal_type(b1, expected_text="list[int | str | float]") + reveal_type(b2, expected_text="complex") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") + + case c1, *c2 if value_to_match[0] == 0: + reveal_type(c1, expected_text="int") + reveal_type(c2, expected_text="list[str | float | complex]") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") + + case d1, *d2, d3 if value_to_match[0] == 0: + reveal_type(d1, expected_text="int") + reveal_type(d2, expected_text="list[str | float]") + reveal_type(d3, expected_text="complex") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") + + case 3, *e1: + reveal_type(e1, expected_text="list[str | float | complex]") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") + + case "hi", *f1: + reveal_type(f1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + case *g1, 3j: + reveal_type(g1, expected_text="list[int | str | float]") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") + + case *h1, "hi": + reveal_type(h1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + +def test_union( + value_to_match: Union[ + Tuple[complex, complex], + Tuple[int, str, float, complex], + List[str], + Tuple[float, ...], + Any, + ], +): + match value_to_match: + case a1, a2, a3, a4 if value_to_match[0] == 0: + reveal_type(a1, expected_text="int | str | float | Any") + reveal_type(a2, expected_text="str | float | Any") + reveal_type(a3, expected_text="float | str | Any") + reveal_type(a4, expected_text="complex | str | float | Any") + reveal_type( + value_to_match, + expected_text="tuple[int, str, float, complex] | List[str] | tuple[float, float, float, float] | Sequence[Any]", + ) + + case *b1, b2 if value_to_match[0] == 0: + reveal_type( + b1, + expected_text="list[complex] | list[int | str | float] | list[str] | list[float] | list[Any]", + ) + reveal_type(b2, expected_text="complex | str | float | Any") + reveal_type( + value_to_match, + expected_text="Tuple[complex, complex] | Tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Sequence[Any]", + ) + + case c1, *c2 if value_to_match[0] == 0: + reveal_type(c1, expected_text="complex | int | str | float | Any") + reveal_type( + c2, + expected_text="list[complex] | list[str | float | complex] | list[str] | list[float] | list[Any]", + ) + reveal_type( + value_to_match, + expected_text="Tuple[complex, complex] | Tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Sequence[Any]", + ) + + case d1, *d2, d3 if value_to_match[0] == 0: + reveal_type(d1, expected_text="complex | int | str | float | Any") + reveal_type( + d2, + expected_text="list[Any] | list[str | float] | list[str] | list[float]", + ) + reveal_type(d3, expected_text="complex | str | float | Any") + reveal_type( + value_to_match, + expected_text="Tuple[complex, complex] | Tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Sequence[Any]", + ) + + case 3, e1: + reveal_type(e1, expected_text="complex | float | Any") + reveal_type( + value_to_match, + expected_text="tuple[complex, complex] | tuple[float, float] | Sequence[Any]", + ) + + case "hi", *f1: + reveal_type(f1, expected_text="list[str] | list[Any]") + reveal_type(value_to_match, expected_text="List[str] | Sequence[Any]") + + case *g1, 3j: + reveal_type( + g1, + expected_text="list[complex] | list[int | str | float] | list[float] | list[Any]", + ) + reveal_type( + value_to_match, + expected_text="tuple[complex, complex] | Tuple[int, str, float, complex] | Tuple[float, ...] | Sequence[Any]", + ) + + case *h1, "hi": + reveal_type(h1, expected_text="list[str] | list[Any]") + reveal_type(value_to_match, expected_text="List[str] | Sequence[Any]") + + +class SupportsLessThan(Protocol): + def __lt__(self, __other: Any) -> bool: ... + + def __le__(self, __other: Any) -> bool: ... + + +SupportsLessThanT = TypeVar("SupportsLessThanT", bound=SupportsLessThan) + + +def sort(seq: List[SupportsLessThanT]) -> List[SupportsLessThanT]: + match seq: + case [] | [_]: + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") + return seq + + case [x, y] if x <= y: + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") + return seq + + case [x, y]: + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") + return [y, x] + + case [x, y, z] if x <= y <= z: + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") + return seq + + case [x, y, z] if x > y > z: + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") + return [z, y, x] + + case [p, *rest]: + a = sort([x for x in rest if x <= p]) + b = sort([x for x in rest if p < x]) + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") + return a + [p] + b + return seq + + +def test_exceptions(seq: Union[str, bytes, bytearray]): + match seq: + case [x, y]: + reveal_type(x, expected_text="Never") + reveal_type(y, expected_text="Never") + return seq + + +def test_object1(seq: object): + match seq: + case (a1, a2) as a3: + reveal_type(a1, expected_text="Unknown") + reveal_type(a2, expected_text="Unknown") + reveal_type(a3, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case (*b1, b2) as b3: + reveal_type(b1, expected_text="list[Unknown]") + reveal_type(b2, expected_text="Unknown") + reveal_type(b3, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case (c1, *c2) as c3: + reveal_type(c1, expected_text="Unknown") + reveal_type(c2, expected_text="list[Unknown]") + reveal_type(c3, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case (d1, *d2, d3) as d4: + reveal_type(d1, expected_text="Unknown") + reveal_type(d2, expected_text="list[Unknown]") + reveal_type(d3, expected_text="Unknown") + reveal_type(d4, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case (3, *e1) as e2: + reveal_type(e1, expected_text="list[Unknown]") + reveal_type(e2, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case ("hi", *f1) as f2: + reveal_type(f1, expected_text="list[Unknown]") + reveal_type(f2, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case (*g1, "hi") as g2: + reveal_type(g1, expected_text="list[Unknown]") + reveal_type(g2, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case [1, "hi", True] as h1: + reveal_type(h1, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + case [1, i1] as i2: + reveal_type(i1, expected_text="Unknown") + reveal_type(i2, expected_text="Sequence[Unknown]") + reveal_type(seq, expected_text="Sequence[Unknown]") + + +def test_object2(value_to_match: object): + match value_to_match: + case [*a1]: + reveal_type(a1, expected_text="list[Unknown]") + case b1: + reveal_type(b1, expected_text="object") + + +def test_sequence(value_to_match: Sequence[Any]): + match value_to_match: + case [*a1]: + reveal_type(a1, expected_text="list[Any]") + case b1: + reveal_type(b1, expected_text="Never") + + +_T = TypeVar("_T") + + +class A(Generic[_T]): + a: _T + + +class B: ... + + +class C: ... + + +AAlias = A + +AInt = A[int] + +BOrC = B | C + + +def test_illegal_type_alias(m: object): + match m: + case AAlias(a=i): + pass + + # This should generate an error because it raises an + # exception at runtime. + case AInt(a=i): + pass + + # This should generate an error because it raises an + # exception at runtime. + case BOrC(a=i): + pass + + +def test_negative_narrowing1(subj: tuple[Literal[0]] | tuple[Literal[1]]): + match subj: + case (1, *a) | (*a): + reveal_type(subj, expected_text="tuple[Literal[1]] | tuple[Literal[0]]") + reveal_type(a, expected_text="list[Any] | list[int]") + + case b: + reveal_type(subj, expected_text="Never") + reveal_type(b, expected_text="Never") + + +def test_negative_narrowing2(subj: tuple[int, ...]): + match subj: + case (1, *a): + reveal_type(subj, expected_text="tuple[int, ...]") + reveal_type(a, expected_text="list[int]") + + case (b,): + reveal_type(subj, expected_text="tuple[int]") + reveal_type(b, expected_text="int") + + case (*c,): + reveal_type(subj, expected_text="tuple[int, ...]") + reveal_type(c, expected_text="list[int]") + + case d: + reveal_type(subj, expected_text="Never") + reveal_type(d, expected_text="Never") + + +def test_negative_narrowing3(subj: tuple[Any, Any]): + match subj: + case (a, b): + reveal_type(a, expected_text="Any") + reveal_type(b, expected_text="Any") + + case x: + reveal_type(x, expected_text="Never") + + +def test_negative_narrowing4(a: str | None, b: str | None): + match (a, b): + case (None, _) as x: + reveal_type(x, expected_text="tuple[None, str | None]") + case (_, None) as x: + reveal_type(x, expected_text="tuple[str, None]") + case (a, b) as x: + reveal_type(x, expected_text="tuple[str, str]") + + +def test_negative_narrowing5(a: str | None, b: str | None): + match (a, b): + case (None, _) | (_, None) as x: + reveal_type(x, expected_text="tuple[None, str | None] | tuple[str, None]") + case (a, b) as x: + reveal_type(x, expected_text="tuple[str, str]") + + +def test_negative_narrowing6(a: str | None, b: str | None): + match (a, b): + case (None, None) as x: + reveal_type(x, expected_text="tuple[None, None]") + reveal_type(a, expected_text="None") + reveal_type(b, expected_text="None") + case (None, _) as x if 2 > 1: + reveal_type(x, expected_text="tuple[None, str]") + reveal_type(a, expected_text="None") + reveal_type(b, expected_text="str") + case (a, b) as x: + reveal_type( + x, expected_text="tuple[str, str | None] | tuple[str | None, str]" + ) + reveal_type(a, expected_text="str | None") + reveal_type(b, expected_text="str | None") + + +def test_negative_narrowing7(a: tuple[str, str] | str): + match a: + case (_, _): + reveal_type(a, expected_text="tuple[str, str]") + case _: + reveal_type(a, expected_text="str") + + +def test_negative_narrowing8(a: str | int, b: str | int): + t = a, b + match t: + case int(), int(): + reveal_type(t, expected_text="tuple[int, int]") + case str(), int(): + reveal_type(t, expected_text="tuple[str, int]") + case int(), str(): + reveal_type(t, expected_text="tuple[int, str]") + case x, y: + reveal_type(t, expected_text="tuple[str, str]") + reveal_type(x, expected_text="str") + reveal_type(y, expected_text="str") + + +class MyEnum(Enum): + A = 1 + B = 2 + C = 3 + + +def test_tuple_with_subpattern( + subj: Literal[MyEnum.A] + | tuple[Literal[MyEnum.B], int] + | tuple[Literal[MyEnum.C], str], +): + match subj: + case MyEnum.A: + reveal_type(subj, expected_text="Literal[MyEnum.A]") + case (MyEnum.B, a): + reveal_type(subj, expected_text="tuple[Literal[MyEnum.B], int]") + reveal_type(a, expected_text="int") + case (MyEnum.C, b): + reveal_type(subj, expected_text="tuple[Literal[MyEnum.C], str]") + reveal_type(b, expected_text="str") + + +def test_unbounded_tuple1( + subj: tuple[int] | tuple[str, str] | tuple[int, Unpack[tuple[str, ...]], complex], +): + match subj: + case (x,): + reveal_type(subj, expected_text="tuple[int]") + reveal_type(x, expected_text="int") + + case (x, y): + reveal_type(subj, expected_text="tuple[str, str] | tuple[int, complex]") + reveal_type(x, expected_text="str | int") + reveal_type(y, expected_text="str | complex") + + case (x, y, z): + reveal_type(subj, expected_text="tuple[int, str, complex]") + reveal_type(x, expected_text="int") + reveal_type(y, expected_text="str") + reveal_type(z, expected_text="complex") + + +def test_unbounded_tuple_2(subj: tuple[int, str, Unpack[tuple[range, ...]]]) -> None: + match subj: + case [1, *ts1]: + reveal_type(ts1, expected_text="list[str | range]") + + case [1, "", *ts2]: + reveal_type(ts2, expected_text="list[range]") + + +def test_unbounded_tuple_3(subj: tuple[int, ...]): + match subj: + case []: + return + case x: + reveal_type(x, expected_text="tuple[int, ...]") + + +def test_unbounded_tuple_4(subj: tuple[str, ...]): + match subj: + case x, "": + reveal_type(subj, expected_text="tuple[str, Literal['']]") + case (x,): + reveal_type(subj, expected_text="tuple[str]") + case x: + reveal_type(subj, expected_text="tuple[str, ...]") + + +def test_unbounded_tuple_5(subj: tuple[int, Unpack[tuple[str, ...]]]): + match subj: + case x, *rest: + reveal_type(subj, expected_text="tuple[int, *tuple[str, ...]]") + reveal_type(x, expected_text="int") + reveal_type(rest, expected_text="list[str]") + case x: + reveal_type(x, expected_text="Never") + + +def test_unbounded_tuple_6(subj: tuple[str, ...]): + match subj: + case ("a", b, _, _): + reveal_type(b, expected_text="str") + + case ("a", b, _, _, _): + reveal_type(b, expected_text="str") + + case (_, b, _, _): + reveal_type(b, expected_text="str") + + case (_, b, _, _, _): + reveal_type(b, expected_text="str") + + case r: + reveal_type(r, expected_text="tuple[str, ...]") + + +def test_unbound_tuple_7(subj: tuple[str, Unpack[tuple[object, ...]], int]): + match subj: + case (*args,): + reveal_type(args, expected_text="list[str | object | int]") + case a: + reveal_type(a, expected_text="Never") + + match subj: + case (*args, last): + reveal_type(args, expected_text="list[str | object]") + reveal_type(last, expected_text="int") + case a: + reveal_type(a, expected_text="Never") + + match subj: + case (first, *args, last): + reveal_type(first, expected_text="str") + reveal_type(args, expected_text="list[object]") + reveal_type(last, expected_text="int") + case a: + reveal_type(a, expected_text="Never") + + match subj: + case (first, second, *args, last): + reveal_type(first, expected_text="str") + reveal_type(second, expected_text="object") + reveal_type(args, expected_text="list[object]") + reveal_type(last, expected_text="int") + case a: + reveal_type(a, expected_text="tuple[str, *tuple[object, ...], int]") + + +def test_variadic_tuple(subj: tuple[int, Unpack[Ts]]) -> tuple[Unpack[Ts]]: + match subj: + case _, *rest: + reveal_type(rest, expected_text="list[Unknown]") + return (*rest,) + + +class D: + x: float + y: float + + +def test_tuple_subexpressions(d: D): + match (d.x, d.y): + case (int(), int()): + reveal_type(d.x, expected_text="int") + reveal_type(d.y, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchSequence2.py b/python-parser/packages/pyright-internal/src/tests/samples/matchSequence2.py new file mode 100644 index 00000000..90ef429e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchSequence2.py @@ -0,0 +1,187 @@ +# This sample tests pattern matching with tuple expansion when the number +# of expanded tuples grows very large. + + +class A1: + pass + + +class A2: + pass + + +class A3: + pass + + +class A4: + pass + + +class A5: + pass + + +class A6: + pass + + +class A7: + pass + + +class A8: + pass + + +class A9: + pass + + +class A10: + pass + + +class A11: + pass + + +class A12: + pass + + +class A13: + pass + + +class A14: + pass + + +class A15: + pass + + +class A16: + pass + + +class B1: + pass + + +class B2: + pass + + +class B3: + pass + + +class B4: + pass + + +class B5: + pass + + +class B6: + pass + + +class B7: + pass + + +class B8: + pass + + +class B9: + pass + + +class B10: + pass + + +class B11: + pass + + +class B12: + pass + + +class B13: + pass + + +class B14: + pass + + +class B15: + pass + + +class B16: + pass + + +type UA = ( + A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 +) + +type UB = ( + B1 | B2 | B3 | B4 | B5 | B6 | B7 | B8 | B9 | B10 | B11 | B12 | B13 | B14 | B15 | B16 +) + + +def test(a: UA, b: UB) -> bool: + t = a, b + match t: + case A1(), B1(): + return True + case A2(), B2(): + return True + case A3(), B3(): + return True + case A4(), B4(): + reveal_type(t, expected_text="tuple[A4, B4]") + return True + case A5(), B5(): + return True + case A6(), B6(): + reveal_type(t, expected_text="tuple[A6, B6]") + return True + case A7(), B7(): + reveal_type(t, expected_text="tuple[A7, B7]") + return True + case A8(), B8(): + reveal_type(t, expected_text="tuple[A8, B8]") + return True + case A9(), B9(): + # The type will become less precise in this case + # because narrowing in the negative case needs + # to fall back on less-precise types. + reveal_type(t, expected_text="Sequence[A9 | B9]") + return True + case A10(), B10(): + return True + case A11(), B11(): + return True + case A12(), B12(): + return True + case A13(), B13(): + return True + case A14(), B14(): + return True + case A15(), B15(): + return True + case A16(), B16(): + return True + case _: + reveal_type(t, expected_text="Any") + raise ValueError() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchUnnecessary1.py b/python-parser/packages/pyright-internal/src/tests/samples/matchUnnecessary1.py new file mode 100644 index 00000000..88eae4ba --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchUnnecessary1.py @@ -0,0 +1,97 @@ +# This sample tests the reportUnnecessaryComparison check when applied +# to match statements. + +from typing import Literal, Mapping, Sequence + +Letters = Literal["A", "B", "C"] + + +def func1(subj: Letters): + match subj: + # This should generate an error if reportUnnecessaryComparison is enabled. + case "A" | "B" | "D": + pass + case str(): + pass + # This should generate an error if reportUnnecessaryComparison is enabled. + case "C": + pass + # This should generate an error if reportUnnecessaryComparison is enabled. + case x: + print(x) + + +def func2(subj: int | dict[str, str]): + match subj: + # This should generate an error if reportUnnecessaryComparison is enabled. + case str() if subj > 4: + pass + case int() if subj > 4: + pass + case int(): + pass + # This should generate an error if reportUnnecessaryComparison is enabled. + case int(): + pass + # This should generate an error if reportUnnecessaryComparison is enabled. + case (a, b): + print(a, b) + case {"": d}: + print(d) + case dict(): + pass + # This should generate an error if reportUnnecessaryComparison is enabled. + case x: + print(x) + + +JsonValue = ( + None | bool | int | float | str | Sequence["JsonValue"] | Mapping[str, "JsonValue"] +) +JsonObject = Mapping[str, JsonValue] + + +def func3(json_object: JsonObject) -> None: + match json_object: + case { + "a": { + "b": [ + { + "c": "d", + } + ], + } + }: + pass + + +TA1 = tuple[Literal["a", "b", "c"], int] + + +def func4(vals: list[str]) -> TA1: + x: TA1 = ("c", 0) + + for val in vals: + match x[0]: + case "b": + if val.startswith("x"): + x = ("a", 1) + continue + case "c": + if val.startswith("y"): + x = ("b", 2) + continue + case _: + pass + return x + + +def func5(subj: int | str): + match subj: + case int() | str(): + pass + + # This should not generate a diagnostic becuase _ is exempted + # from the reportUnnecessaryComparison check. + case _: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/matchValue1.py b/python-parser/packages/pyright-internal/src/tests/samples/matchValue1.py new file mode 100644 index 00000000..94a1b263 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/matchValue1.py @@ -0,0 +1,169 @@ +# This sample tests type checking for match statements (as +# described in PEP 634) that contain value patterns. + +from dataclasses import dataclass +from enum import Enum, auto +from http import HTTPStatus +from typing import Annotated, Literal, TypeVar + +# pyright: reportIncompatibleMethodOverride=false + + +def handle_reply(reply: tuple[HTTPStatus, str] | tuple[HTTPStatus]): + match reply: + case (HTTPStatus.OK as a1, a2): + reveal_type(a1, expected_text="Literal[HTTPStatus.OK]") + reveal_type(a2, expected_text="str") + + case (HTTPStatus.NOT_FOUND as d1,): + reveal_type(d1, expected_text="Literal[HTTPStatus.NOT_FOUND]") + + +class MyEnum1(Enum): + V1 = 0 + V2 = 1 + + +class MyClass: + class_var_1: "MyClass" + + def __eq__(self, object: "MyClass") -> bool: ... + + +def test_unknown(value_to_match): + match value_to_match: + case MyEnum1.V1 as a1: + reveal_type(a1, expected_text="Unknown") + reveal_type(value_to_match, expected_text="Unknown") + + +def test_enum(value_to_match: MyEnum1): + match value_to_match: + case MyEnum1.V1 as a1: + reveal_type(a1, expected_text="Literal[MyEnum1.V1]") + reveal_type(value_to_match, expected_text="Literal[MyEnum1.V1]") + case y: + reveal_type(y, expected_text="Literal[MyEnum1.V2]") + reveal_type(value_to_match, expected_text="Literal[MyEnum1.V2]") + + +def test_class_var(value_to_match: str): + match value_to_match: + case MyClass.class_var_1 as a1: + reveal_type(a1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") + + +TInt = TypeVar("TInt", bound=MyEnum1) + + +def test_union(value_to_match: TInt | MyEnum1) -> TInt | MyEnum1: + match value_to_match: + case MyEnum1.V1 as a1: + reveal_type(a1, expected_text="Literal[MyEnum1.V1]") + reveal_type(value_to_match, expected_text="Literal[MyEnum1.V1]") + + return value_to_match + + +class MyEnum2(Enum): + V1 = 0 + V2 = 1 + + +def test_multiple_enums(x: MyEnum2 | MyEnum1): + match x: + case MyEnum1.V1 | MyEnum1.V2 | MyEnum2.V1: + return + + reveal_type(x, expected_text="Literal[MyEnum2.V2]") + + +class Medal(Enum): + gold = 1 + silver = 2 + bronze = 3 + + +class Color(Enum): + red = 1 + blue = 2 + green = 3 + + +def test_enum_narrowing(m: Medal | Color | int): + match m: + case Medal.gold as a1: + reveal_type(a1, expected_text="Literal[Medal.gold]") + reveal_type(m, expected_text="Literal[Medal.gold]") + + case Medal.silver as b1: + reveal_type(b1, expected_text="Literal[Medal.silver]") + reveal_type(m, expected_text="Literal[Medal.silver]") + + case Color() as c1: + reveal_type(c1, expected_text="Color") + reveal_type(m, expected_text="Color") + + case d1: + reveal_type(d1, expected_text="int | Literal[Medal.bronze]") + reveal_type(m, expected_text="int | Literal[Medal.bronze]") + + +@dataclass +class DC1: + a: Annotated[Color, str] + + +def test_enum_narrowing_with_annotated(subj: DC1) -> None: + match subj.a: + case Color.red: + pass + case Color.blue: + pass + case x: + reveal_type(x, expected_text="Literal[Color.green]") + + +class Foo(Enum): + bar = auto() + + def __str__(self) -> str: + match self: + case Foo.bar: + return "bar" + + case x: + reveal_type(x, expected_text="Never") + + +class Numbers: + ZERO = 0.0 + ONE = 1 + INFINITY = float("inf") + + +def test_enum_narrowing_with_inf(subj: float): + match subj: + case Numbers.ONE: + reveal_type(subj, expected_text="Literal[1]") + case Numbers.INFINITY: + reveal_type(subj, expected_text="float") + case Numbers.ZERO: + reveal_type(subj, expected_text="float") + case f: + reveal_type(subj, expected_text="float") + + +@dataclass +class DC2: + a: Literal[False] + + +def test_bool_expansion(subj: bool): + match subj: + case DC2.a: + reveal_type(subj, expected_text="Literal[False]") + + case x: + reveal_type(subj, expected_text="Literal[True]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/maxParseDepth1.py b/python-parser/packages/pyright-internal/src/tests/samples/maxParseDepth1.py new file mode 100644 index 00000000..14e32058 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/maxParseDepth1.py @@ -0,0 +1,499 @@ +# This sample tests the parser's ability to detect parse trees that are too deep +# and report an error rather than risking a crash in the binder or type evaluator. + +x = 3 + +for _ in range(2): + x = ( + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + ) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/maxParseDepth2.py b/python-parser/packages/pyright-internal/src/tests/samples/maxParseDepth2.py new file mode 100644 index 00000000..203a34b0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/maxParseDepth2.py @@ -0,0 +1,51 @@ +from typing import Any + + +def func(x: dict[int, Any], y: Any): + x[0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][ + 0 + ][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0] + + y.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x + + y()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()() + + y.x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[ + 0 + ]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[ + 0 + ]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[ + 0 + ]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[ + 0 + ]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[ + 0 + ]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[ + 0 + ]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[ + 0 + ]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess1.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess1.py new file mode 100644 index 00000000..7e7cfb4c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess1.py @@ -0,0 +1,134 @@ +# This sample validates that member access magic functions +# like __get__ and __set__ are handled correctly. + +from contextlib import ExitStack +from typing import ( + Any, + Awaitable, + Callable, + Concatenate, + ContextManager, + Generic, + ParamSpec, + TypeVar, + overload, +) +from functools import cached_property + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +class DescriptorA(Generic[_T]): + @overload + def __get__(self, instance: None, owner: Any) -> "DescriptorA[_T]": # type: ignore + ... + + @overload + def __get__(self, instance: Any, owner: Any) -> _T: ... + + +class ClassA: + bar = DescriptorA[str]() + + @classmethod + def func1(cls): + a: DescriptorA[str] = cls.bar + + +reveal_type(ClassA.bar, expected_text="DescriptorA[str]") +reveal_type(ClassA().bar, expected_text="str") + + +class ClassB: + @cached_property + def baz(self) -> int: + return 3 + + +c: cached_property[int] = ClassB.baz +d: int = ClassB().baz + + +class Factory: + def __get__(self, obj: Any, cls: type[_T]) -> _T: + return cls() + + +class ClassC: + instance: Factory + + +reveal_type(ClassC.instance, expected_text="ClassC") + + +class DescriptorD(Generic[_T]): + value: _T + + def __get__(self, instance: object | None, cls: type[object]) -> _T: ... + + def __set__(self, instance: object, value: _T) -> None: ... + + +class ClassD: + abc: DescriptorD[str] = DescriptorD() + stack: ExitStack + + def test(self, value: ContextManager[str]) -> None: + self.abc = self.stack.enter_context(value) + + +class DescriptorE: + def __get__(self, instance: "ClassE | None", owner: "type[ClassE]"): + return None + + +class MetaDescriptorE: + def __get__(self, instance: "type[ClassE] | None", owner: "MetaclassE"): + return None + + +class MetaclassE(type): + y = MetaDescriptorE() + + +class ClassE(metaclass=MetaclassE): + x = DescriptorE() + + +ClassE.x +ClassE().x +ClassE.y + + +class Decorator(Generic[_T, _P, _R]): + def __init__(self, func: Callable[Concatenate[_T, _P], Awaitable[_R]]) -> None: + self.func = func + + @overload + def __get__(self, obj: None, objtype: type[_T]) -> "Decorator[_T, _P, _R]": ... + + @overload + def __get__( + self, obj: _T, objtype: type[_T] | None + ) -> Callable[_P, Awaitable[_R]]: ... + + def __get__( + self, obj: _T | None, objtype: type[_T] | None = None + ) -> "Decorator[_T, _P, _R] | Callable[_P, Awaitable[_R]]": ... + + +class ClassF: + @Decorator + async def method1(self, a: int, *, b: str) -> str: ... + + def method2(self): + reveal_type(self.method1, expected_text="(a: int, *, b: str) -> Awaitable[str]") + + @classmethod + def method3(cls): + reveal_type( + cls.method1, + expected_text="Decorator[Self@ClassF, (a: int, *, b: str), str]", + ) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess10.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess10.py new file mode 100644 index 00000000..36b5d98c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess10.py @@ -0,0 +1,63 @@ +# This sample tests the case where a metaclass defines a descriptor +# protocol (i.e. a `__get__` method), and a member is accessed through +# the class. + +from typing import Any + + +class _IntDescriptorMeta(type): + def __get__(self, instance: Any, owner: Any) -> int: + return 123 + + def __set__(self, instance: Any, value: str) -> None: + pass + + +class IntDescriptorClass(metaclass=_IntDescriptorMeta): ... + + +class X: + number_cls = IntDescriptorClass + + +reveal_type(X.number_cls, expected_text="int") +reveal_type(X().number_cls, expected_text="int") + +X.number_cls = "hi" + +X().number_cls = "hi" + +# This should generate an error +X.number_cls = 1 + +# This should generate an error +X().number_cls = 1 + + +class FlagValue: + def __init__(self, func): + self.value: bool = bool(func(None)) + + def __set__(self, instance: "Flags", value: int): + self.value = bool(value) + + +class Flags: + @FlagValue + def suppress(self): + return 2 + + +flags = Flags() + + +def func1(new: Any): + flags.suppress = new + + +def func2(new: int): + flags.suppress = new + + +def func3(new: bool): + flags.suppress = new diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess11.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess11.py new file mode 100644 index 00000000..98e6b3b6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess11.py @@ -0,0 +1,37 @@ +# This sample tests that methods are bound properly regardless of +# whether they are decorated. + +from typing import Callable, TypeVar + +S = TypeVar("S", bound="MyClass") + +Callback = Callable[[S, int], str] + + +def decorator1(method: Callback[S]) -> Callback[S]: + def wrapper(self: S, a: int) -> str: + return "wrapped " + method(self, a) + + return wrapper + + +class MyClass: + def __init__(self): + self.method4 = lambda x: x + + @decorator1 + def method1(self, a: int) -> str: + return "foo" + + def method2(self, a: int) -> str: + return "foo" + + method3 = decorator1(method2) + + +mc = MyClass() + +mc.method1(1) +mc.method2(1) +mc.method3(1) +mc.method4(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess12.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess12.py new file mode 100644 index 00000000..dc85c2e1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess12.py @@ -0,0 +1,31 @@ +# This sample tests a member access when the metaclass implements a descriptor +# protocol. + +from typing import Any, TypeVar, overload + + +T = TypeVar("T") + + +class MetaClass(type): + @overload + def __get__(self: type[T], instance: None, owner: Any) -> type[T]: ... + + @overload + def __get__(self: type[T], instance: object, owner: Any) -> T: ... + + def __get__(self: type[T], instance: object | None, owner: Any) -> type[T] | T: + if instance is None: + return self + return self() + + +class A(metaclass=MetaClass): ... + + +class B: + a = A + + +reveal_type(B.a, expected_text="type[A]") +reveal_type(B().a, expected_text="A") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess13.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess13.py new file mode 100644 index 00000000..19fdf437 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess13.py @@ -0,0 +1,16 @@ +# This sample tests a member access when the member is a class +# that inherits from Any. + +from unittest.mock import Mock + + +class MockProducer: + produce: type[Mock] = Mock + + +reveal_type(MockProducer.produce, expected_text="type[Mock]") +reveal_type(MockProducer().produce, expected_text="type[Mock]") + + +reveal_type(MockProducer.produce(), expected_text="Mock") +reveal_type(MockProducer().produce(), expected_text="Mock") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess14.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess14.py new file mode 100644 index 00000000..af058cdb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess14.py @@ -0,0 +1,55 @@ +# This sample tests the use of a generic descriptor class that +# is parameterized based on the type of the object that instantiates +# the descriptor. + + +from typing import Any, Callable, Generic, TypeVar, overload + +T = TypeVar("T") +T_contra = TypeVar("T_contra", contravariant=True) +V_co = TypeVar("V_co", covariant=True) +CachedSlotPropertyT = TypeVar( + "CachedSlotPropertyT", bound="CachedSlotProperty[Any, Any]" +) + + +class CachedSlotProperty(Generic[T_contra, V_co]): + def __init__(self, name: str, function: Callable[[T_contra], V_co]) -> None: ... + + @overload + def __get__( + self: CachedSlotPropertyT, instance: None, owner: type[T_contra] + ) -> CachedSlotPropertyT: ... + + @overload + def __get__(self, instance: T_contra, owner: Any) -> V_co: ... + + def __get__( + self: CachedSlotPropertyT, instance: T_contra | None, owner: Any + ) -> CachedSlotPropertyT | V_co: ... + + +def cached_slot_property( + name: str, +) -> Callable[[Callable[[T_contra], V_co]], CachedSlotProperty[T_contra, V_co]]: ... + + +class C(Generic[T]): + def __init__(self, data: T) -> None: ... + + @cached_slot_property("_prop") + def prop(self) -> int: ... + + +class D(C[float]): ... + + +reveal_type(C.prop, expected_text="CachedSlotProperty[C[Unknown], int]") +reveal_type(D.prop, expected_text="CachedSlotProperty[D, int]") + + +c = C("") +reveal_type(c.prop, expected_text="int") + +d = D(1) +reveal_type(d.prop, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess15.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess15.py new file mode 100644 index 00000000..a69cc4b3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess15.py @@ -0,0 +1,20 @@ +# This sample tests the case where an accessed member is a +# method that has a "self" or "cls" parameter with no explicit +# type annotation and an inferred type that is based on this value. + + +class A: + async def get(self): + return self + + +class B(A): + pass + + +async def run(): + val1 = await A().get() + reveal_type(val1, expected_text="A") + + val2 = await B().get() + reveal_type(val2, expected_text="B") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess16.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess16.py new file mode 100644 index 00000000..fb98fabb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess16.py @@ -0,0 +1,12 @@ +# This sample tests the case where a member is accessed from a "type" +# instance or a Type[T]. + +# pyright: strict + +from typing import TypeVar + +Cls = TypeVar("Cls") + + +def func(cls: type[Cls]) -> list[type[Cls]]: + return cls.__subclasses__() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess17.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess17.py new file mode 100644 index 00000000..fb7514c6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess17.py @@ -0,0 +1,47 @@ +# This sample tests the case where a __getattr__ method override +# differentiates based on the name of the accessed member. + +from typing import Any, overload, Literal + + +class Obj: + @overload + def __getattr__(self, name: Literal["foo"]) -> int: ... + + @overload + def __getattr__(self, name: Literal["bar"]) -> str: ... + + def __getattr__(self, name: str) -> Any: + if name == "foo": + return 1 + return "1" + + def __setattr__(self, name: str, value: str) -> None: + pass + + +obj = Obj() +b1 = obj.foo +reveal_type(b1, expected_text="int") +b2 = getattr(obj, "foo") +reveal_type(b2, expected_text="Any") + +c1 = obj.bar +reveal_type(c1, expected_text="str") +c2 = getattr(obj, "bar") +reveal_type(c2, expected_text="Any") + +# This should generate two errors. +d1 = obj.bogus + +obj.foo = "" +obj.bar = "" + +# This should generate an error. +obj.foo = 1 + +# This should generate an error. +obj.other = 1 + +# This should generate an error. +del obj.foo diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess18.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess18.py new file mode 100644 index 00000000..c9d5849d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess18.py @@ -0,0 +1,31 @@ +# This sample tests that a descriptor returned by a __getattr__ method +# is not applied as part of a member access expression evaluation. + +from typing import Any, Generic, TypeVar + +_T = TypeVar("_T") + + +class A: + pass + + +class Descriptor: + def __get__(self, instance: object, owner: Any) -> A: + return A() + + +class CollectionThing(Generic[_T]): + thing: _T + + def __getitem__(self, key: str) -> _T: + return self.thing + + def __getattr__(self, key: str) -> _T: + return self.thing + + +c1: CollectionThing[Descriptor] = CollectionThing() + +reveal_type(c1["key"], expected_text="Descriptor") +reveal_type(c1.key, expected_text="Descriptor") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess19.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess19.py new file mode 100644 index 00000000..a6e5b0ac --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess19.py @@ -0,0 +1,86 @@ +# This sample tests the handling of __getattr__, __setattr__, and +# __delattr__ methods. + +from typing import Any, Literal, TypeVar, overload + +T = TypeVar("T") + + +class A: + @overload + def __getattr__(self, key: Literal["a"]) -> Literal["x"]: ... + + @overload + def __getattr__(self, key: Literal["b"]) -> Literal[4]: ... + + @overload + def __getattr__(self, key: Literal["c"]) -> Literal["y"]: ... + + @overload + def __getattr__(self: T, key: Literal["d"]) -> T: ... + + def __getattr__(self, key: Literal["a", "b", "c", "d"]) -> Any: ... + + @overload + def __setattr__(self, key: Literal["e"], val: str): ... + + @overload + def __setattr__(self, key: Literal["f"], val: int): ... + + def __setattr__(self, key: str, val: str | int): + pass + + @overload + def __delattr__(self, key: Literal["g"]): ... + + @overload + def __delattr__(self, key: Literal["h"]): ... + + def __delattr__(self, key: str): + pass + + +a = A() + +reveal_type(a.a, expected_text="Literal['x']") +reveal_type(a.b, expected_text="Literal[4]") +reveal_type(a.c, expected_text="Literal['y']") +reveal_type(a.d, expected_text="A") + +# This should generate an error. +reveal_type(a.e) + +# This should generate an error. +a.a = 4 + +a.e = "4" + +# This should generate an error. +a.e = 4 + +# This should generate an error. +a.f = "4" + +a.f = 4 + +# This should generate an error. +del a.e + +del a.g + +del a.h + + +# Test asymmetric __getattr__ and __setattr__ methods. We should not +# narrow the type on assignment in this case. +class B: + def __setattr__(self, __name: str, __value: Any) -> None: + pass + + def __getattr__(self, __attr: str) -> int: + return 10 + + +a = B() +a.test = "anything" +reveal_type(a.test, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess2.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess2.py new file mode 100644 index 00000000..f142e272 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess2.py @@ -0,0 +1,46 @@ +# This sample verifies that callable variables are bound +# to instances when they appear within most classes, but +# not within TypedDict or data classes. + +from typing import Callable, NamedTuple, TypedDict + + +# This class follows the normal rules where variable +# b, which is callable, acts like a class member and +# is bound to an instance by the member access operator. +class Foo1: + def __init__(self): + self.c = lambda s: s + + def a(self, s: str): + return s + + b = lambda a_inst, s: a_inst.inner_str + s + + +sample = Foo1() +a = sample.a("") +b = sample.b("") +c = sample.c("") + +d = Foo1.a(Foo1(), "") +e = Foo1.b(Foo1(), "") + + +# This class is a data class (because it derives from +# named tuple), so all variables that appear to be class +# variables are actually instance variables. +class Foo2(NamedTuple): + a: Callable[[int], int] + + +foo2 = Foo2(a=lambda a: a) +f = foo2.a(3) + + +class Foo3(TypedDict): + a: Callable[[int], int] + + +foo3 = Foo3(a=lambda a: a) +g = foo3["a"](3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess20.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess20.py new file mode 100644 index 00000000..e628cf57 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess20.py @@ -0,0 +1,15 @@ +# This sample tests the case where an instance member write +# targets an instance variable with a TypeVar type. + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class ClassA(Generic[T]): + def __init__(self, value: T) -> None: + self.value: T = value + + def set_value(self, value: int): + # This should generate an error. + self.value = value diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess21.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess21.py new file mode 100644 index 00000000..416c8fb1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess21.py @@ -0,0 +1,35 @@ +# This sample tests the case where a member access is performed through +# an object using a field that is annotated as a ClassVar. Normally this +# is disallowed, but it is permitted if the type of the ClassVar is +# a descriptor object. + +from typing import ClassVar, Generic, TypeVar, overload, Self + +T = TypeVar("T") + + +class Descriptor(Generic[T]): + @overload + def __get__(self, instance: None, owner) -> Self: ... + + @overload + def __get__(self, instance: object, owner) -> T: ... + + def __get__(self, instance: object | None, owner) -> Self | T: ... + + def __set__(self, instance: object, value: T) -> None: ... + + def is_null(self) -> bool: ... + + +class Example: + field1: ClassVar = Descriptor[str]() + + field2: ClassVar = "" + + def reset(self) -> None: + self.field1 = "" + + # This should generate an error because field2 isn't + # a descriptor object. + self.field2 = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess22.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess22.py new file mode 100644 index 00000000..ad9289f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess22.py @@ -0,0 +1,15 @@ +# This sample tests the case where a `type[T]` or `type[Self]` typevar is +# used as the base for a member access but is then used to call an +# instance method on the resulting class. + +from contextlib import contextmanager + + +class A: + @classmethod + def method1(cls) -> None: + cls.method2 + + @contextmanager + def method2(self): + yield diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess23.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess23.py new file mode 100644 index 00000000..12ec6f27 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess23.py @@ -0,0 +1,53 @@ +# This sample tests the case where a custom metaclass has an attribute +# that holds a descriptor object and the attribute is accessed through +# a class constructed from the metaclass. The runtime has some surprising +# behavior in this case. It favors the metaclass descriptor object +# in this case even if the class has an instance attribute with the +# same name. + + +class MyMeta(type): + @property + def attr1(cls) -> int: + return 1 + + @property + def attr3(cls) -> int: + return 3 + + attr4 = "4" + + @property + def attr5(cls) -> int: + return 5 + + attr6 = 6 + + def __getattr__(self, name: str) -> complex: ... + + +class A(metaclass=MyMeta): + @property + def attr2(self) -> int: + return 2 + + @property + def attr3(self) -> int: + return 3 + + @property + def attr4(self) -> int: + return 4 + + attr5 = "5" + + +reveal_type(A.attr1, expected_text="int") +reveal_type(A().attr2, expected_text="int") +reveal_type(A.attr2, expected_text="property") +reveal_type(A().attr3, expected_text="int") +reveal_type(A.attr3, expected_text="int") +reveal_type(A.attr4, expected_text="property") +reveal_type(A.attr5, expected_text="int") +reveal_type(A.attr6, expected_text="int") +reveal_type(A.attr7, expected_text="complex") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess24.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess24.py new file mode 100644 index 00000000..125fd4e2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess24.py @@ -0,0 +1,33 @@ +# This sample tests the case where an attribute is accessed from a +# class that derives from an unknown type or Any. + +from typing import Any, overload +from dummy import UnknownX # type: ignore + + +class Desc: + @overload + def __get__(self, instance: None, owner: Any) -> "Desc": ... + + @overload + def __get__(self, instance: object, owner: Any) -> int: ... + + def __get__(self, instance: object | None, owner: Any) -> "Desc | int": ... + + +class DerivesFromUnknown(UnknownX): + y: Desc + + +class DerivesFromAny(Any): + y: Desc + + +v1 = DerivesFromUnknown().x +reveal_type(v1, expected_text="Unknown") + +v2 = DerivesFromAny().x +reveal_type(v2, expected_text="Any") + +reveal_type(DerivesFromUnknown().y, expected_text="int") +reveal_type(DerivesFromAny().y, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess25.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess25.py new file mode 100644 index 00000000..411723d7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess25.py @@ -0,0 +1,75 @@ +# This sample tests for the check of a member access through a generic class +# when the type of the attribute is generic (and therefore its type is +# ambiguous). + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class ClassA(Generic[T]): + x: T + y: int + + def __init__(self, label: T | None = None) -> None: ... + + +ClassA[int].y = 1 +ClassA[int].y +del ClassA[int].y + +ClassA.y = 1 +ClassA.y +del ClassA.y + +# This should generate an error because x is generic. +ClassA[int].x = 1 + +# This should generate an error because x is generic. +ClassA[int].x + +# This should generate an error because x is generic. +del ClassA[int].x + +# This should generate an error because x is generic. +ClassA.x = 1 + +# This should generate an error because x is generic. +ClassA.x + +# This should generate an error because x is generic. +del ClassA.x + + +class ClassB(ClassA[T]): + pass + + +# This should generate an error because x is generic. +ClassB[int].x = 1 + +# This should generate an error because x is generic. +ClassB[int].x + +# This should generate an error because x is generic. +del ClassB[int].x + +# This should generate an error because x is generic. +ClassB.x = 1 + +# This should generate an error because x is generic. +ClassB.x + +# This should generate an error because x is generic. +del ClassB.x + + +class ClassC(ClassA[int]): + pass + + +ClassC.x = 1 +ClassC.x +del ClassC.x +ClassC.x +del ClassC.x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess26.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess26.py new file mode 100644 index 00000000..91f9952d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess26.py @@ -0,0 +1,39 @@ +# This sample tests that type declarations for class members do not +# include method-local type variables. + +from typing import TypeVar + +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +def func(x: list[T1]): + class ClassB: + def __init__(self, val: list[T2]): + # This should generate an error because T is scoped to + # the method. + self.a1: list[T2] = val + + self.a2: list[T1] = [] + + @classmethod + def method_b(cls, val: list[T2]): + # This should generate an error because T is scoped to + # the method. + cls.b1: list[T2] = val + + cls.b2: list[T1] = [] + + def method_c(self, val: list[T2]): + # This should generate an error because T is scoped to + # the method. + self.c1: list[T2] = val + + self.c2: list[T1] = [] + + b = ClassB([]) + reveal_type(b.a1, expected_text="list[Unknown]") + reveal_type(b.b1, expected_text="list[Unknown]") + reveal_type(b.c1, expected_text="list[Unknown]") + + return b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess27.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess27.py new file mode 100644 index 00000000..ce696e97 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess27.py @@ -0,0 +1,19 @@ +# This sample tests that members of an inner class that are parameterized +# by type variables from an outer scope are handled correctly. + +from typing import Callable, TypeVar + +T = TypeVar("T") + + +def func1(cb: Callable[[], T]) -> T: + class InnerClass: + def __init__(self) -> None: + self.result: T | None = None + + def run(self) -> None: + self.result = cb() + + reveal_type(InnerClass().result, expected_text="T@func1 | None") + + return cb() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess28.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess28.py new file mode 100644 index 00000000..cd9b22e6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess28.py @@ -0,0 +1,18 @@ +# This sample tests the case where an attribute is accessed +# via a bound TypeVar. + +from dataclasses import dataclass + + +@dataclass +class DC1: + x: int + + +class GenericClass[T: DC1]: + def method1(self, t: T): + pass + + def method2(self, t: T): + # This should generate an error. + self.method1(t.x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess3.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess3.py new file mode 100644 index 00000000..c38e686a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess3.py @@ -0,0 +1,64 @@ +# This sample tests instance and class variable type inference when +# the type can't be inferred from the class itself and must use +# the parent class. + + +from typing import Generic, TypeVar + + +class A: + pi = 3.1415 + + def __init__(self): + self.x = 1 + + +class B(A): + def __init__(self): + self.y = "hi" + + +class C(B): + def method1(self): + a = self.x + require_int(a) + + # This should generate an error because a should be an int + require_str(a) + + b = self.y + require_str(b) + + # This should generate an error because b should be an str + require_int(b) + + c = self.pi + require_float(c) + + # This should generate an error because c should be a float + require_int(c) + + +def require_int(val: int): + pass + + +def require_str(val: str): + pass + + +def require_float(val: float): + pass + + +_TParent = TypeVar("_TParent") +_TChild = TypeVar("_TChild") + + +class Parent(Generic[_TParent]): + member1: _TParent + + +class Child(Parent[_TChild]): + def __init__(self, val: _TChild): + self.member1 = val diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess4.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess4.py new file mode 100644 index 00000000..a0e241b0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess4.py @@ -0,0 +1,120 @@ +# This sample tests the logic for validating that an explicit +# "self" or "cls" parameter type is honored when binding the +# method to an object or class. + +from typing import Callable, Protocol, TypeVar + + +class HasItemProtocol1(Protocol): + item: int + + +class Mixin1: + def do_stuff(self: HasItemProtocol1): + pass + + +class A1(Mixin1): + item = 1 + + +class B1(Mixin1): + item = "hi" + + +class C1(Mixin1): + pass + + +A1().do_stuff() + +# This should generate an error because B1 doesn't +# match the protocol. +B1().do_stuff() + +# This should generate an error because C1 doesn't +# match the protocol. +C1().do_stuff() + + +class HasItemProtocol2(Protocol): + def must_have(self) -> None: + pass + + +class Mixin2: + @classmethod + def do_stuff(cls: type[HasItemProtocol2]): + pass + + +class A2(Mixin2): + def must_have(self) -> None: + pass + + +class B2(Mixin2): + pass + + +A2.do_stuff() + +# This should generate an error because B2 doesn't +# match the protocol. +B2.do_stuff() + + +class D: + pass + + +class E: + @staticmethod + def get_or_create(context: D): + return object.__getattribute__(context, "") + + +T_F = TypeVar("T_F", bound="F") + + +class F: + @classmethod + def bar(cls: type[T_F]) -> T_F: ... + + def baz(self) -> None: + self.bar() + + +class ClassWithNewOverride: + def __new__(cls): + return object.__new__(cls) + + +def curry[First, *Rest, Result]( + function: Callable[[First, *Rest], Result], +) -> Callable[[*Rest], Callable[[First], Result]]: ... + + +class EvilProto1(Protocol): + @curry + def __call__[A, B]( + self, a: Callable[[A], B], b: Callable[[Callable[[A], B]], A] + ) -> B: ... + + +# This should generate an error and not hang. +EvilProto1.__call__ + + +# This should generate an error and not hang. +p: EvilProto1 = curry(lambda a, b: a(b(a))) + + +class G: + type TypeAlias1 = Callable[[], None] + + def __init__(self): + self.someVarNoArgs: G.TypeAlias1 + + +g_ta: G.TypeAlias1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess5.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess5.py new file mode 100644 index 00000000..c86e1de5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess5.py @@ -0,0 +1,26 @@ +# This sample tests the handling of unions between +# objects that provide a __get__ method and those +# that do not. + +from typing import Any + + +class IntProvider: + def __get__(self, instance: object, owner: Any) -> int: + return 0 + + +class Foo: + _int_value_declared: IntProvider | int = 3 + _int_value_inferred = 3 + + def __init__(self): + pass + + def get_int_value_1(self) -> int: + reveal_type(self._int_value_declared, expected_text="int") + return self._int_value_declared + + def get_int_value_2(self) -> int: + reveal_type(self._int_value_inferred, expected_text="int") + return self._int_value_inferred diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess6.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess6.py new file mode 100644 index 00000000..ce934fd3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess6.py @@ -0,0 +1,48 @@ +# This sample tests member access "set" operations when the target +# member is an object that doesn't provide a __set__ method. + +# pyright: strict + +from typing import Any, Generic, TypeVar, overload + + +_T = TypeVar("_T") + + +class ParentA: + pass + + +class Column(Generic[_T]): + def __init__(self, type: type[_T]) -> None: ... + + @overload + def __get__(self: "Column[_T]", instance: None, type: Any) -> "Column[_T]": ... + + @overload + def __get__(self: "Column[_T]", instance: ParentA, type: Any) -> _T: ... + + def __get__( + self, instance: ParentA | None, type: Any + ) -> _T | None | "Column[_T]": ... + + +class ChildA(ParentA): + attr1: Column[str] = Column(str) + attr2 = Column(str) + + +ChildA.attr1 +ChildA().attr1 +ChildA.attr2 +ChildA().attr2 + +foo = ChildA() + +# This should generate an error because bar is declared as containing a +# Column[str], which doesn't provide a __set__ method. +foo.attr1 = "" + +# This should generate an error because baz's inferred type is +# Column[str], which doesn't provide a __set__ method. +foo.attr2 = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess7.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess7.py new file mode 100644 index 00000000..7fedfe70 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess7.py @@ -0,0 +1,36 @@ +# This sample tests the handling of a __getattr__ method that returns +# a callable. Such a method should not be bound. + +from typing import Callable, TypeVar + + +class ClassA: + def __init__(self): + return + + def __getattr__(self, key: str) -> Callable[[str], str]: + return lambda a: a + + +a = ClassA() + +a.foo("hi") + + +T = TypeVar("T") + + +class MetaClass(type): + def __getattr__(cls, key: str) -> Callable[[T], T]: + return lambda x: x + + +class ClassB(metaclass=MetaClass): + pass + + +v1 = ClassB.some_function(3) +reveal_type(v1, expected_text="int") + +v2 = ClassB.some_function("hi") +reveal_type(v2, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess8.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess8.py new file mode 100644 index 00000000..d26f9477 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess8.py @@ -0,0 +1,68 @@ +# This sample tests the use of a generic descriptor class. + +from typing import Any, Callable, Generic, TypeVar, overload + + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + + +class Column(Generic[_T]): + def __get__(self, instance: object, type: Any) -> _T: ... + + def __set__(self, instance: object, value: _T) -> _T: ... + + def __delete__(self, instance: object) -> None: ... + + +class Foo: + bar: Column[str] = Column() + baz: Column[list[int]] = Column() + + +foo = Foo() + +v1 = foo.bar +reveal_type(v1, expected_text="str") + +foo.bar = "" +del foo.bar + + +v2 = foo.baz +reveal_type(v2, expected_text="list[int]") + +foo.baz = [1] +del foo.baz + + +class Minimal(Generic[_T, _T_co]): + def __init__(self, name: str, func: Callable[[_T], _T_co]): ... + + @overload + def __get__(self, instance: None, owner: type[_T]) -> "Minimal[_T, _T_co]": ... + + @overload + def __get__(self, instance: _T, owner: type[_T]) -> _T_co: ... + + def __get__(self, instance: _T | None, owner: type[_T]) -> Any: ... + + +def minimal_property( + name: str, +) -> Callable[[Callable[[_T], _T_co]], Minimal[_T, _T_co]]: + def decorator(func: Callable[[_T], _T_co]) -> Minimal[_T, _T_co]: + return Minimal(name, func) + + return decorator + + +class B: + @minimal_property("foo") + def foo(self) -> str: + return "hello" + + +b = B() +reveal_type(b.foo, expected_text="str") +reveal_type(B.foo, expected_text="Minimal[B, str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/memberAccess9.py b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess9.py new file mode 100644 index 00000000..46930b20 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/memberAccess9.py @@ -0,0 +1,10 @@ +# This sample tests that the presence of a __getattr__ doesn't +# mean that an __init__ method is assumed. + + +class GetAttrTest: + def __getattr__(self, name: str) -> int: ... + + +def test_get_attr() -> None: + a = GetAttrTest() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass1.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass1.py new file mode 100644 index 00000000..5186b9c1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass1.py @@ -0,0 +1,48 @@ +# This sample tests pyright's ability to use metaclasses. + +from ctypes import Array, c_uint64 +from typing import Any, TypeAlias, TypeVar + +myArray1 = (c_uint64 * 5)() + +myArray2: Array[c_uint64] = (c_uint64 * 5)() + + +T = TypeVar("T") + + +class CustomMeta(type): + def __getitem__(self, key: Any) -> "type[int]": ... + + +class Custom(metaclass=CustomMeta): ... + + +class OtherMeta(type): ... + + +# This should generate an error because the class isn't +# Generic even though it supports a metaclass with a +# __getitem__. +y1: Custom[int] + +# This should not generate an error because it is used +# as a runtime expression rather than a type annotation. +y2 = Custom[int] + +# This should generate an error. +y3: TypeAlias = Custom[int] + + +def func1(m: CustomMeta): + v1: type = m + v2: type[Any] = m + + # This should generate an error. + v3: type[Custom] = m + + +def func2(m: OtherMeta): + # This should generate an error because OtherMeta + # and the metaclass for Custom are not compatible. + v3: type[Custom] = m diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass10.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass10.py new file mode 100644 index 00000000..b69eac39 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass10.py @@ -0,0 +1,21 @@ +# This sample tests the case where a member access expression is used +# to access an instance method on a metaclass. Binding should not be +# performed in this case. + +from enum import EnumMeta +from typing import TypeVar + +_EnumMemberT = TypeVar("_EnumMemberT") + + +class EnumMeta2(EnumMeta): + def __getitem__(cls: type[_EnumMemberT], name: str) -> _EnumMemberT: + return EnumMeta.__getitem__(cls, name) + + +class MyMeta(type): + @classmethod + def meta_method(cls) -> None: ... + + +MyMeta.meta_method() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass11.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass11.py new file mode 100644 index 00000000..90d186cf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass11.py @@ -0,0 +1,49 @@ +# This sample verifies that the type checker allows access +# to instance variables provided by a metaclass. + +from typing import Any + + +class MetaA(type): + var0 = 3 + + def __init__(cls, name, bases, dct): + cls.var1 = "hi" + + +class ClassA(metaclass=MetaA): + pass + + +# This should generate an error because var0 isn't +# accessible via an instance of this class. +ClassA().var0 +reveal_type(ClassA.var0, expected_text="int") +ClassA.var0 = 1 + +reveal_type(ClassA().var1, expected_text="str") +reveal_type(ClassA.var1, expected_text="str") + +ClassA.var1 = "hi" +ClassA().var1 = "hi" + + +class MetaB(type): + def __setattr__(cls, key: str, value: Any) -> None: ... + + +class ClassB(metaclass=MetaB): + var0: int + + +# This should generate an error +ClassB.var0 = "" +ClassB.var1 = "" + +ClassB().var0 = 1 + +# This should generate an error +ClassB().var0 = "" + +# This should generate an error +ClassB().var1 = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass2.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass2.py new file mode 100644 index 00000000..20b9efc3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass2.py @@ -0,0 +1,52 @@ +# This sample verifies that the type checker allows access +# to class variables provided by a metaclass. + +from enum import Enum +from typing import Mapping + + +class Fruit(Enum): + apple = 1 + orange = 2 + pear = 3 + + +def requires_fruit_mapping(a: Mapping[str, Fruit]): + pass + + +requires_fruit_mapping(Fruit.__members__) + +aaa = len(Fruit) + +for i in Fruit: + print(i) + + +class Meta(type): + inst_var1: int + + def __init__(self): + self.inst_var1 = 1 + + def method1(cls, some_param: str): + pass + + +class MyClass1(metaclass=Meta): + pass + + +MyClass1.method1("some argument") +reveal_type(MyClass1.inst_var1, expected_text="int") + + +class MyClass2(metaclass=Meta): + # This should generate an error + inst_var1 = "" + + +class MyClass3(metaclass=Meta): + def __init__(self): + # This should generate an error + self.inst_var1 = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass3.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass3.py new file mode 100644 index 00000000..1062e0a2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass3.py @@ -0,0 +1,61 @@ +# This sample tests the detection of metaclass conflicts. + +from typing import Protocol + + +class Meta1(type): + pass + + +class Meta2(type): + pass + + +class Base1(metaclass=Meta1): + pass + + +class Base2(metaclass=Meta2): + pass + + +# This should generate an error because the two +# metaclasses conflict. +class Base3(Base1, Base2): + pass + + +class Meta3(type): + pass + + +class SubMeta3(Meta3): + pass + + +class Base4(metaclass=Meta3): + pass + + +class Base5(metaclass=SubMeta3): + pass + + +class Base6(Base4, Base5): + pass + + +class Meta10(type): ... + + +class Base10(metaclass=Meta10): ... + + +class Proto10(Protocol): ... + + +class Meta11(type(Base10), type(Proto10)): + pass + + +class Base11(Base10, Proto10, metaclass=Meta11): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass4.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass4.py new file mode 100644 index 00000000..89f78a43 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass4.py @@ -0,0 +1,26 @@ +# This sample covers the case where a metaclass (a class that derives +# from "type") is directly instantiated to create a new class, +# and that class is then used as a base class for another class. + + +class MyMeta(type): + def do_something(self, p1: str, p2: int): + pass + + +MyCustomClass = MyMeta("MyCustomClass", (object,), {}) + +reveal_type(MyCustomClass, expected_text="type[MyCustomClass]") + + +class DerivedCustomClass(MyCustomClass): + pass + + +DerivedCustomClass.do_something("hi", 3) + +# This should generate an error because the second +# argument is the wrong type. +DerivedCustomClass.do_something("hi", "no") + +instance = DerivedCustomClass() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass5.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass5.py new file mode 100644 index 00000000..1eb67118 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass5.py @@ -0,0 +1,33 @@ +# This sample tests the handling of metaclass magic methods for +# binary operators. + +# pyright: reportIncompatibleMethodOverride=false + + +class MetaA(type): + def __eq__(self, a: "type[ClassA]") -> str: + return "hi" + + def __add__(self, a: "type[ClassA]") -> int: + return 0 + + +class ClassA(metaclass=MetaA): + pass + + +def func1(a: ClassA): + reveal_type(type(a), expected_text="type[ClassA]") + reveal_type(type("string1"), expected_text="type[str]") + + reveal_type(type(a) == type("hi"), expected_text="bool") + reveal_type(type("hi") == type("hi"), expected_text="bool") + reveal_type(str != str, expected_text="bool") + reveal_type(ClassA == type(a), expected_text="str") + reveal_type(ClassA != type(a), expected_text="bool") + reveal_type(type(a) == ClassA, expected_text="str") + + # This should generate an error + x = str + str + + reveal_type(ClassA + ClassA, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass6.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass6.py new file mode 100644 index 00000000..05fb4470 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass6.py @@ -0,0 +1,15 @@ +# This sample verifies that metaclasses can be used to satisfy +# protocols if a class type is passed. + +from enum import Enum + + +class Foo(str, Enum): + bar = "bar" + + +for member in Foo: + reveal_type(member, expected_text="Foo") + +foo_members = list(Foo) +reveal_type(foo_members, expected_text="list[Foo]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass7.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass7.py new file mode 100644 index 00000000..8a8a3fcd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass7.py @@ -0,0 +1,63 @@ +# This sample tests the case where a metaclass defines a customer +# __call__ method, thus overriding the __new__ method on classes +# that are created from it. + +# pyright: reportIncompatibleMethodOverride=false + + +from typing import Any, Self + + +class MetaClass1(type): + def __call__(cls, **kwargs): + return object.__new__(**kwargs) + + +class Class1(metaclass=MetaClass1): + def __new__(cls, *args, **kwargs): + raise RuntimeError("Cannot instantiate directly") + + +v1 = Class1() +reveal_type(v1, expected_text="NoReturn") + + +class MetaClass2(type): + pass + + +class Class2(metaclass=MetaClass2): + def __new__(cls, *args, **kwargs): + raise RuntimeError("Cannot instantiate directly") + + +v2 = Class2() +reveal_type(v2, expected_text="NoReturn") + + +class MetaClass3(type): + def __call__(cls, *args, **kwargs) -> Any: + return super().__call__(*args, **kwargs) + + +class Class3(metaclass=MetaClass3): + def __new__(cls, *args, **kwargs): + raise RuntimeError("You cannot instantiate BaseFactory") + + +v3 = Class3() +reveal_type(v3, expected_text="Any") + + +class MetaClass4(type): + def __call__(cls, *args, **kwargs): + return super().__call__(*args, **kwargs) + + +class Class4(metaclass=MetaClass4): + def __new__(cls, *args, **kwargs) -> Self: + return super().__new__(cls, *args, **kwargs) + + +v4 = Class4() +reveal_type(v4, expected_text="Class4") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass8.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass8.py new file mode 100644 index 00000000..41a74cbe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass8.py @@ -0,0 +1,17 @@ +# This sample tests the case where a generic class is used +# for a metaclass. + +from typing import Any, Generic, TypeVar + + +T = TypeVar("T") + + +class A(type, Generic[T]): ... + + +# This should generate an error because generic metaclasses are not allowed. +class B(Generic[T], metaclass=A[T]): ... + + +class C(metaclass=A[Any]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/metaclass9.py b/python-parser/packages/pyright-internal/src/tests/samples/metaclass9.py new file mode 100644 index 00000000..86c5d594 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/metaclass9.py @@ -0,0 +1,66 @@ +# This sample tests the handling of metaclass keyword arguments. + +from typing import Any, TypeVar + +T = TypeVar("T") + + +class Meta1(type): + def __new__( + cls: type[T], + cls_name: str, + bases: tuple[type, ...], + attrs: dict[str, Any], + *, + param1: int, + param2: str, + param3: str = "", + ) -> T: ... + + +class Class1_1(metaclass=Meta1, param1=1, param2="", param3=""): ... + + +class Class1_2(metaclass=Meta1, param2="", param1=1): ... + + +# This should generate an error because param1 is the wrong type. +class Class1_3(metaclass=Meta1, param1="", param2=""): ... + + +# This should generate an error because param1 and param2 are missing. +class Class1_4(metaclass=Meta1): ... + + +# This should generate an error because param4 doesn't exist. +class Class1_5(metaclass=Meta1, param2="", param1=1, param4=3): ... + + +class Meta2(type): + def __new__( + cls: type[T], + cls_name: str, + bases: tuple[type, ...], + attrs: dict[str, Any], + *, + param1: int, + **kwargs: str, + ) -> T: ... + + +class Class2_1(metaclass=Meta2, param1=1, param2="", param3=""): ... + + +class Class2_2(metaclass=Meta2, param2="", param1=1, param20=""): ... + + +# This should generate an error because param1 is the wrong type. +class Class2_3(metaclass=Meta2, param1="", param2=""): ... + + +# This should generate an error because param1 is missing. +class Class2_4(metaclass=Meta2): ... + + +# This should generate an error because param4 is the wrong type. +class Class2_5(metaclass=Meta2, param2="", param1=1, param4=3): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/methodOverride1.py b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride1.py new file mode 100644 index 00000000..9d6fdaf4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride1.py @@ -0,0 +1,546 @@ +# This sample tests the reportIncompatibleMethodOverride +# diagnostic check. + +from typing import ( + Any, + Callable, + Generic, + Iterable, + ParamSpec, + Sequence, + TypedDict, + TypeVar, + overload, +) + +T_ParentClass = TypeVar("T_ParentClass", bound="ParentClass") + +P = ParamSpec("P") +T = TypeVar("T") +S = TypeVar("S") +U = TypeVar("U", bound=int) + + +def decorator(func: Callable[P, None]) -> Callable[P, int]: ... + + +class ParentClass: + def my_method1(self, a: int): + return 1 + + def my_method2(self, a: int, b: int): + return 1 + + def my_method3(self, a: int, b: int): + return 1 + + def my_method4(self, a: int, *b: int): + return 1 + + def my_method5(self, a: int, _b: int): + return 1 + + def my_method6(self, a: int, /, b: int): + return 1 + + def my_method7(self, a: int, /, b: int): + return 1 + + def my_method8(self, a: int, b: int): + return 1 + + def my_method9(self, a: int, b: int): + return 1 + + def my_method10(self, a: int, b: int): + return 1 + + def my_method11(self, a: int, b: int): + return 1 + + def my_method12(self, a: int | str) -> int | str: + return 1 + + def my_method13(self, a: int) -> int: + return 1 + + def my_method14(self, a: int) -> int: + return 1 + + def my_method15(self, a: int) -> int: + return 1 + + def my_method16(self, a: int) -> int: + return 1 + + def my_method17(self, a: str, b: int, c: float, d: bool) -> None: ... + + def my_method18(self, a: str, b: int, c: float, d: bool) -> None: ... + + def my_method19(self, a: str, b: int, c: float, d: bool) -> None: ... + + @classmethod + def my_method20(cls: type[T_ParentClass], a: str) -> T_ParentClass: ... + + def my_method21(self, var: int) -> None: ... + + def _protected_method1(self, a: int): + return 1 + + def __private_method1(self, a: int): + return 1 + + def my_method22(self, a: str, b: int, c: float, d: bool) -> None: ... + + def my_method23(self, a: str = "") -> None: ... + + def my_method24(self, a: str) -> None: ... + + def my_method25(self, *, a: str = "") -> None: ... + + def my_method26(self, *, a: str) -> None: ... + + def my_method27(self, a: object, /) -> None: ... + + def my_method28(self, __a: object) -> None: ... + + @classmethod + def my_method29(cls, /) -> None: ... + + @classmethod + def my_method30(cls, /) -> None: ... + + @staticmethod + def my_method31(a: "type[ParentClass]", /) -> None: ... + + @staticmethod + def my_method32(a: "type[ParentClass]", /) -> None: ... + + def my_method33(self, /) -> None: ... + + def my_method34(self, /) -> None: ... + + def my_method35(self, *, a: int) -> None: ... + + def my_method36(self, *, a: int) -> None: ... + + def my_method37(self, a: int, /) -> None: ... + + def my_method38(self, a: int, /) -> None: ... + + def my_method39(self, a: int, /) -> None: ... + + def my_method40(self, a: int, /) -> None: ... + + def my_method41(self, a: int, b: str, c: str) -> None: ... + + def my_method42(self, a: int, b: int, c: str) -> None: ... + + my_method43: Callable[..., None] + + def my_method44(self, *args: object, **kwargs: object) -> None: ... + + def my_method45(self, __i: int) -> None: ... + + def __my_method46__(self, x: int) -> None: ... + + def __my_method47__(self, x: int) -> None: ... + + def my_method48(self, /, **kwargs: object) -> None: ... + + +T_ChildClass = TypeVar("T_ChildClass", bound="ChildClass") + + +class ChildClass(ParentClass): + # This should generate an error because the type of 'a' doesn't match. + def my_method1(self, a: str): + return 1 + + # This should generate an error because it's missing a param named 'b'. + def my_method2(self, a: int): + return 1 + + # This should generate an error because the name doesn't match. + def my_method3(self, a: int, c: int): + return 1 + + # This should generate an error because the param category for 'b' + # doesn't match. + def my_method4(self, a: int, **b: int): + return 1 + + def my_method5(self, a: int, _c: int): + return 1 + + def my_method6(self, not_a: int, /, b: int): + return 1 + + # This should generate an error because c is not a position-only parameter. + def my_method7(self, a: int, /, c: int): + return 1 + + # This should generate an error because it contains too many parameters. + def my_method8(self, a: int, b: int, c: int, d: str = ""): + return 1 + + def my_method9(self, a: int, b: int, c: int = 4): + return 1 + + def my_method10(self, a: int, b: int, *args): + return 1 + + def my_method11(self, a: int, b: int, *, c: str = "", **kwargs): + return 1 + + # This should generate an error because the type of 'a' is + # narrower than the original method. + def my_method12(self, a: int) -> int: + return 1 + + def my_method13(self, a: int | str) -> int: + return 1 + + # This should generate an error because the return type is + # wider than in the original method. + def my_method14(self, a: int) -> int | str: + return 1 + + # This should generate an error because we're overriding a + # method with a variable. + my_method15: int = 3 + + # This should generate an error because we're overriding a + # method with a class. + class my_method16: + pass + + def my_method17(self, *args: object, **kwargs: object) -> None: ... + + def my_method18(self, a: str, *args: object, **kwargs: object) -> None: ... + + # This should generate an error because b param doesn't match a in name. + def my_method19(self, b: str, *args: object, **kwargs: object) -> None: ... + + @classmethod + def my_method20(cls: type[T_ChildClass], a: str) -> T_ChildClass: ... + + # This should generate an error. + @decorator + def my_method21(self, var: int) -> None: ... + + # This should generate an error. + def _protected_method1(self): + return 1 + + def __private_method1(self): + return 1 + + # This should generate an error. + def my_method22(self, a: str, b: int, c: float, d: bool, /) -> None: ... + + # This should generate an error because a is missing a default value. + def my_method23(self, a: str) -> None: ... + + def my_method24(self, a: str = "") -> None: ... + + # This should generate an error because a is missing a default value. + def my_method25(self, *, a: str) -> None: ... + + def my_method26(self, *, a: str = "") -> None: ... + + def my_method27(self, __a: object) -> None: ... + + def my_method28(self, a: object, /) -> None: ... + + # This should generate an error because it is not a classmethod. + def my_method29(self, /) -> None: ... + + # This should generate an error because it is not a classmethod. + @staticmethod + def my_method30(a: type[ParentClass], /) -> None: ... + + # This should generate an error because it is not a staticmethod. + @classmethod + def my_method31(cls, /) -> None: ... + + # This should generate an error because it is not a staticmethod. + def my_method32(self, /) -> None: ... + + # This should generate an error because it is not an instance method. + @classmethod + def my_method33(cls, /) -> None: ... + + # This should generate an error because it is not an instance method. + @staticmethod + def my_method34(a: type[ParentClass], /) -> None: ... + + def my_method35(self, **kwargs: int) -> None: ... + + # This should generate an error because the method in the parent + # class has a keyword-only parameter that is type 'int', and this + # isn't compatible with 'str'. + def my_method36(self, **kwargs: str) -> None: ... + + def my_method37(self, *args: Any) -> None: ... + + # This should generate an error because the number of position-only + # parameters doesn't match. + def my_method38(self, **kwargs: Any) -> None: ... + + def my_method39(self, *args: Any) -> None: ... + + # This should generate an error because the number of position-only + # parameters doesn't match. + def my_method40(self, **kwargs: Any) -> None: ... + + # This should generate an error because keyword parameters "a" + # and "b" are missing. + def my_method41(self, a: int, *args: str) -> None: ... + + # This should generate an error because args doesn't have the right type. + def my_method42(self, a: int, *args: int) -> None: ... + + def my_method43(self, a: int, b: str, c: str) -> None: ... + + # This should generate an error because kwargs is missing. + def my_method44(self, *object) -> None: ... + + def my_method45(self, i: int, /) -> None: ... + + def __my_method46__(self, y: int) -> None: ... + + # This should generate an error because of a type mismatch. + def __my_method47__(self, y: str) -> None: ... + + # This should generate an error because the keyword-only parameter "x: int" + # is not compatible with the base method's "**kwargs: object". + def my_method48(self, /, *, x: int = 3, **kwargs: object) -> None: ... + + +class A: + def test(self, t: Sequence[int]) -> Sequence[str]: ... + + +class GeneralizedArgument(A): + def test(self, t: Iterable[int], bbb: str = "") -> Sequence[str]: ... + + +class NarrowerArgument(A): + # This should generate error because list[int] is narrower + # than Iterable[int]. + def test(self, t: list[int]) -> Sequence[str]: ... + + +class NarrowerReturn(A): + def test(self, t: Sequence[int]) -> list[str]: ... + + +class GeneralizedReturn1(A): + # This should generate an error because Iterable[str] is + # wider than Sequence[str]. + def test(self, t: Sequence[int]) -> Iterable[str]: ... + + +class GeneralizedReturn2(A): + # This should generate an error because list[int] is + # incompatible with Sequence[str]. + def test(self, t: Sequence[int]) -> list[int]: ... + + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + + +class Base1: + def submit(self, fn: Callable[..., _T1], *args: Any, **kwargs: Any) -> list[_T1]: + return [] + + +class Base2(Base1): + def submit(self, fn: Callable[..., _T2], *args: Any, **kwargs: Any) -> list[_T2]: + return [] + + +class Foo: + pass + + +_T2A = TypeVar("_T2A", bound=Foo) +_T2B = TypeVar("_T2B", bound=Foo) + + +class ClassA(Generic[_T2A]): + def func1(self) -> _T2A | None: + return None + + @property + def prop1(self) -> _T2A | None: + return None + + @property + def prop2(self) -> _T2A | None: + return None + + @prop2.setter + def prop2(self, val: _T2A): + pass + + @prop2.deleter + def prop2(self): + pass + + @property + def prop3(self) -> _T2A | None: + return None + + @prop3.setter + def prop3(self, val: _T2A): + pass + + @property + def prop4(self) -> _T2A | None: + return None + + @prop4.deleter + def prop4(self): + pass + + @property + def prop5(self) -> int: + return 3 + + @property + def prop6(self) -> int: + return 3 + + @prop6.setter + def prop6(self, x: int) -> None: + pass + + +class ClassB(ClassA[_T2B]): + # This should generate an error because a variable + # cannot override a property. + prop1: _T2B + + def func1(self) -> _T2B | None: + return None + + @property + def prop2(self) -> _T2B: + return self.prop1 + + @prop2.setter + def prop2(self, val: _T2B): + pass + + @prop2.deleter + def prop2(self): + pass + + # This should generate an error because it is missing + # a setter (fset method). + @property + def prop3(self) -> _T2B | None: + return None + + # This should generate an error because it is missing + # a deleter (fdel method). + @property + def prop4(self) -> _T2B | None: + return None + + # This should generate an error because prop4's getter + # method isn't compatible with base class. + @property + def prop5(self) -> str: + return "hi" + + def func6(self): + self.prop6 = 1 + + +class Base3: + def case(self, value: Any) -> Iterable[Any]: + return [] + + +class Derived3(Base3): + @overload + def case(self, value: int) -> Iterable[int]: ... + + @overload + def case(self, value: float) -> Iterable[float]: ... + + def case(self, value: Any) -> Iterable[Any]: + return [] + + +class Base4: + def a(self) -> int: ... + + +class Base5: + def a(self) -> int: ... + + +class C(Base4, Base5): + # This should generate two error if reportIncompatibleMethodOverride + # is enabled. + def a(self) -> float: ... + + +class MyObject(TypedDict): + values: list[str] + + +class Base6(Generic["T"]): + def method1(self, v: int) -> None: ... + + def method2(self, v: T) -> None: ... + + def method3(self, v: T) -> None: ... + + def method4(self, v: S) -> S: ... + + def method5(self, v: S) -> S: ... + + +class Derived6(Base6[int], Generic["T"]): + # This should generate an error. + def method1(self, v: T): ... + + # This should generate an error. + def method2(self, v: T) -> None: ... + + def method3(self, v: int) -> None: ... + + def method4(self, v: T) -> T: ... + + def method5(self, v: S) -> S: ... + + +class Base7(Generic[T]): + def method1(self, x: T) -> T: + return x + + +class Derived7_1(Base7[T]): + # This should generate an error. + def method1(self, x: S) -> S: + return x + + +class Derived7_2(Base7[int]): + def method1(self, x: U) -> U: + return x + + +class Base8[T]: + def method1(self, x: T) -> T: ... + + +class Derived8[T](Base8[T]): + # This should generate an error. + def method1[U: str](self, x: U) -> U: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/methodOverride2.py b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride2.py new file mode 100644 index 00000000..a557a44a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride2.py @@ -0,0 +1,94 @@ +# This sample tests the reportIncompatibleMethodOverride +# diagnostic check. + + +from typing import Any, Generic, ParamSpec, Self, TypeVar + + +class Base1: + def f1(self, *, kwarg0: int) -> None: ... + + def f2(self, *, kwarg0: int) -> None: ... + + def f3(self, *, kwarg0: int) -> None: ... + + def f4(self, *, kwarg0: int) -> None: ... + + def g1(self, a: int, /, b: str, *, kwarg0: int) -> None: ... + + def g2(self, a: int, /, b: str, *, kwarg0: int) -> None: ... + + def g3(self, a: int, /, b: str, *, kwarg0: int) -> None: ... + + def g4(self, a: int, /, b: str, *, kwarg0: int) -> None: ... + + def g5(self, a: int, /, b: str, *, kwarg0: int) -> None: ... + + def g6(self, a: int, /, b: str, *, kwarg0: int) -> None: ... + + def h1(self, a: int, *args: int) -> None: ... + + +class Derived1(Base1): + def f1(self, arg0: int = 0, *, kwarg0: int, kwarg1: int = 0) -> None: ... + + # This should generate an error because of a positional parameter mismatch. + def f2(self, arg0: int, *, kwarg0: int, kwarg1: int = 0) -> None: ... + + # This should generate an error because of a missing kwarg1. + def f3(self, arg0: int = 0, *, kwarg0: int, kwarg1: int) -> None: ... + + # This should generate an error because kwarg0 is the wrong type. + def f4(self, arg0: int = 0, *kwarg0: str) -> None: ... + + def g1(self, xxx: int, /, b: str, *, kwarg0: int) -> None: ... + + def g2(self, __a: int, b: str, *, kwarg0: int) -> None: ... + + # This should generate an error because of a name mismatch between b and c. + def g3(self, __a: int, c: str, *, kwarg0: int) -> None: ... + + # This should generate an error because of a type mismatch for b. + def g4(self, __a: int, b: int, *, kwarg0: int) -> None: ... + + def g5(self, __a: int, b: str = "hi", *, kwarg0: int) -> None: ... + + def g6(self, __a: int, b: str, c: str = "hi", *, kwarg0: int) -> None: ... + + +P = ParamSpec("P") +R = TypeVar("R") + + +class Base2(Generic[P, R]): + def method1(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + + def method2(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + + +class Derived2(Base2[P, R]): + def method1(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + + def method2(self, *args: Any, **kwargs: Any) -> R: ... + + +T = TypeVar("T") + + +class Base3: + def method1(self, x: Self) -> Self: ... + + def method2(self, x: Self) -> Self: ... + + def method3(self, x: Self) -> Self: ... + + +class Derived3(Generic[T], Base3): + # This should generate an error. + def method1(self, x: "Derived3[T]") -> "Derived3[T]": ... + + # This should generate an error. + def method2(self, x: "Derived3[int]") -> "Derived3[int]": ... + + # This should generate an error. + def method3(self, x: Self) -> Self: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/methodOverride3.py b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride3.py new file mode 100644 index 00000000..fa809c66 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride3.py @@ -0,0 +1,199 @@ +# This sample tests incompatible method overrides for multiple inheritance. +# This functionality is controlled by the reportIncompatibleMethodOverride +# diagnostic rule. + + +from typing import Generic, Iterable, ParamSpec, TypeVar, overload + + +class A1: + def func1(self, a: int) -> str: ... + + +class A2: + def func1(self, a: int, b: int = 3) -> str: ... + + +# This should generate an error because func1 is incompatible. +class ASub(A1, A2): ... + + +class B1: + def func1(self) -> int: ... + + +class B2: + def func1(self) -> float: ... + + +class BSub(B1, B2): ... + + +class C1: + def func1(self) -> float: ... + + +class C2: + def func1(self) -> int: ... + + +# This should generate an error because func1 is incompatible. +class CSub(C1, C2): ... + + +class D1: + def func1(self, a: int) -> None: ... + + +class D2: + def func1(self, b: int) -> None: ... + + +# This should generate an error because func1 is incompatible. +class DSub(D1, D2): ... + + +_T_E = TypeVar("_T_E") + + +class E1(Generic[_T_E]): + def func1(self, a: _T_E) -> None: ... + + +class E2(Generic[_T_E]): + def func1(self, a: _T_E) -> None: ... + + +class ESub(E1[int], E2[int]): ... + + +_T_F = TypeVar("_T_F") + + +class F1(Generic[_T_F]): + def do_stuff(self) -> Iterable[_T_F]: ... + + +class F2(F1[_T_F]): + def do_stuff(self) -> Iterable[_T_F]: ... + + +class F3(F1[_T_F]): ... + + +class FSub1(F3[int], F2[int]): + pass + + +class FSub2(F3[int], F1[int]): + pass + + +class FSub3(F2[int], F1[int]): + pass + + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +class G1(Generic[_P, _R]): + def f(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + + def g(self) -> _R: ... + + +class G2(G1[_P, _R]): + # This should generate an error because f is missing ParamSpec parameters. + def f(self) -> _R: ... + + def g(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + + +class G3(G1[[], _R]): + def f(self) -> _R: ... + + def g(self) -> _R: ... + + +class G4(G1[[int, int], str]): + def f(self, a: int, b: int, /) -> str: ... + + def g(self) -> str: ... + + +class G5(G1[[], str]): + # This should generate an error because the specialized + # signature of f in the base class has no positional parameters. + def f(self, a: int, b: int) -> str: ... + + def g(self) -> str: ... + + +class H1: + @property + def prop1(self) -> int: + return 3 + + @property + def prop2(self) -> int: + return 3 + + @prop2.setter + def prop2(self, val: int) -> None: + pass + + @property + def prop3(self) -> int: + return 3 + + @prop3.setter + def prop3(self, val: int) -> None: + pass + + +class H2: + @property + def prop1(self) -> str: + return "" + + @property + def prop2(self) -> int: + return 3 + + @property + def prop3(self) -> int: + return 3 + + @prop3.setter + def prop3(self, val: str) -> None: + pass + + +# This should generate three errors: prop1, prop2 and prop3. +class H(H2, H1): ... + + +class I1: + @overload + def func1(self, x: int) -> int: ... + + @overload + def func1(self, x: str) -> str: ... + + def func1(self, x: int | str) -> int | str: + return x + + +class I2: + @overload + def func1(self, x: int) -> int: ... + + @overload + def func1(self, x: str) -> str: ... + + def func1(self, x: int | str) -> int | str: + return x + + +class I(I1, I2): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/methodOverride4.py b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride4.py new file mode 100644 index 00000000..1902c225 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride4.py @@ -0,0 +1,53 @@ +# This sample tests the handling of methods that combine TypeVars +# from a class and local method TypeVars in an override. + +# pyright: strict + +from abc import abstractmethod +from typing import Callable, Generic, TypeVar + +_TSource = TypeVar("_TSource") +_TResult = TypeVar("_TResult") + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") + + +class BaseA(Generic[_TSource]): + @abstractmethod + def method1( + self, mapper: Callable[[_TSource, _T1], _TResult], other: "BaseA[_T1]" + ) -> "BaseA[_TResult]": + raise NotImplementedError + + +class SubclassA1(BaseA[_TSource]): + def method1( + self, mapper: Callable[[_TSource, _T2], _TResult], other: BaseA[_T2] + ) -> BaseA[_TResult]: + return SubclassA2() + + +class SubclassA2(BaseA[_TSource]): + def method1( + self, mapper: Callable[[_TSource, _T3], _TResult], other: BaseA[_T3] + ) -> BaseA[_TResult]: + return SubclassA2() + + +class BaseB: + def f(self, v: str) -> str: ... + + +class SubclassB1(BaseB): + def f[T](self, v: T) -> T: ... + + +class BaseC: + def method1[T: BaseC](self, x: T) -> T: ... + + +class SubclassC(BaseC): + # This should generate an error because of the upper bound. + def method1[T: SubclassC](self, x: T) -> T: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/methodOverride5.py b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride5.py new file mode 100644 index 00000000..e401c51b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride5.py @@ -0,0 +1,21 @@ +# This sample tests an edge case where a base method uses an unpacked +# tuple or a specialized TypeVarTuple and is overridden by a method +# that supplies specific arguments. + +# pyright: strict + +from typing import Generic, TypeVarTuple + +Ts = TypeVarTuple("Ts") + + +class Parent(Generic[*Ts]): + def method_1(self, *args: *Ts) -> None: ... + + def method_2(self, *args: *tuple[*Ts]) -> None: ... + + +class Child(Parent[int]): + def method_1(self, arg1: int) -> None: ... + + def method_2(self, arg1: int) -> None: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/methodOverride6.py b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride6.py new file mode 100644 index 00000000..20b2bdbf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/methodOverride6.py @@ -0,0 +1,240 @@ +# This sample tests the case where a method is overriding an overloaded method. + +from typing import Any, Generic, Literal, TypeVar, overload + +_T = TypeVar("_T") + + +class Parent1(Generic[_T]): + @overload + def m1(self, x: Literal[True]) -> int: ... + + @overload + def m1(self, x: Literal[False]) -> float: ... + + @overload + def m1(self, x: _T) -> _T: ... + + def m1(self, x: bool | _T) -> int | float | _T: + return x + + +class Child1_1(Parent1[str]): + @overload + def m1(self, x: bool) -> int: ... + + @overload + def m1(self, x: str) -> str: ... + + def m1(self, x: bool | str) -> int | str: + return x + + +class Child1_2(Parent1[str]): + def m1(self, x: Any) -> Any: + return x + + +class Child1_3(Parent1[str]): + @overload + def m1(self, x: bool) -> int: ... + + @overload + def m1(self, x: str) -> str: ... + + def m1(self, x: bool | str) -> int | float | str: + return x + + +class Child1_4(Parent1[str]): + @overload + def m1(self, x: str) -> str: ... + + @overload + def m1(self, x: bool) -> int: ... + + # This should generate an error because the overloads are + # in the wrong order. + def m1(self, x: bool | str) -> int | float | str: + return x + + +class Child1_5(Parent1[str]): + @overload + def m1(self, x: Literal[True]) -> int: ... + + @overload + def m1(self, x: Literal[False]) -> float: ... + + @overload + def m1(self, x: bytes) -> bytes: ... + + # This should generate an error because the overloads are + # in the wrong order. + def m1(self, x: bool | bytes) -> int | float | bytes: + return x + + +class Child1_6(Parent1[bytes]): + @overload + def m1(self, x: Literal[True]) -> int: ... + + @overload + def m1(self, x: Literal[False]) -> float: ... + + @overload + def m1(self, x: bytes) -> bytes: ... + + def m1(self, x: bool | bytes) -> int | float | bytes: + return x + + +class Parent2(Generic[_T]): + @overload + def method1(self: "Parent2[int]", x: list[int]) -> list[int]: ... + + @overload + def method1(self, x: str) -> dict[str, str]: ... + + def method1(self, x: Any) -> Any: ... + + @overload + def method2(self: "Parent2[int]", x: list[int]) -> list[int]: ... + + @overload + def method2(self, x: str) -> dict[str, str]: ... + + @overload + def method2(self, x: int) -> int: ... + + def method2(self, x: Any) -> Any: ... + + @overload + @classmethod + def method3(cls: "type[Parent2[int]]", x: list[int]) -> list[int]: ... + + @overload + @classmethod + def method3(cls, x: str) -> dict[str, str]: ... + + @classmethod + def method3(cls, x: Any) -> Any: ... + + @overload + @classmethod + def method4(cls: "type[Parent2[int]]", x: list[int]) -> list[int]: ... + + @overload + @classmethod + def method4(cls, x: str) -> dict[str, str]: ... + + @overload + @classmethod + def method4(cls, x: int) -> int: ... + + @classmethod + def method4(cls, x: Any) -> Any: ... + + +class Child2_1(Parent2[str]): + def method1(self, x: str) -> dict[str, str]: ... + + +class Child2_2(Parent2[str]): + @overload + def method2(self, x: str) -> dict[str, str]: ... + + @overload + def method2(self, x: int) -> int: ... + + def method2(self, x: Any) -> Any: ... + + +class Child2_3(Parent2[str]): + @classmethod + def method3(cls, x: str) -> dict[str, str]: ... + + +class Child2_4(Parent2[str]): + @overload + @classmethod + def method4(cls, x: str) -> dict[str, str]: ... + + @overload + @classmethod + def method4(cls, x: int) -> int: ... + + @classmethod + def method4(cls, x: Any) -> Any: ... + + +class Parent3: + @overload + def method(self, x: int) -> int: ... + + @overload + def method(self, x: str) -> str: ... + + def method(self, x: int | str) -> int | str: + return x + + +class Child3_1(Parent3): + @overload + def method(self, x: int) -> int: ... + + @overload + def method(self, x: str) -> str: ... + + @overload + def method(self, x: list[float]) -> list[float]: ... + + def method(self, x: int | str | list[float]) -> int | str | list[float]: + return x + + +class Parent4(Generic[_T]): + @overload + def m1(self: "Parent4[int]", a: None) -> float: ... + + @overload + def m1(self: "Parent4[int]", a: int) -> float: ... + + @overload + def m1(self: "Parent4[float]", a: None) -> str: ... + + def m1(self, a: int | None = None) -> float | str: + raise NotImplementedError + + +class Child4_1(Parent4[int]): + @overload + def function(self: Parent4[int], a: None) -> float: ... + + @overload + def function(self: Parent4[int], a: int) -> float: ... + + def function(self, a: int | None = None) -> float: + return 0.0 + + +class Parent5: + @overload + def m1(self, x: int) -> int: ... + + @overload + def m1(self, x: str) -> str: ... + + def m1(self, x: int | str) -> int | str: ... + + +class Parent5_1(Parent5): + @overload + def m1(self, x: bytes) -> bytes: ... + + @overload + def m1(self, x: str) -> str: ... + + # This should generate an error because the overloads are + # incompatible + def m1(self, x: bytes | str) -> bytes | str: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/methods1.py b/python-parser/packages/pyright-internal/src/tests/samples/methods1.py new file mode 100644 index 00000000..4c24e9ba --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/methods1.py @@ -0,0 +1,115 @@ +# This sample tests that instance methods, regardless of how they're +# defined or decorated, act like instance methods. + +from typing import Any, Callable, ClassVar, Concatenate, Generic, ParamSpec, TypeVar + +P = ParamSpec("P") +R = TypeVar("R") + + +def func1(self) -> None: + print("func1", f"{self=}") + + +def deco1(x: Callable[P, R]) -> Callable[P, R]: + return x + + +def deco2( + func: Callable[P, Any], +) -> Callable[[Callable[..., Any]], Callable[Concatenate["ClassA", P], None]]: + return lambda f: f # type: ignore + + +class Deco3(Generic[P, R]): + def __init__(self, func: Callable[P, R]): + self.func = func + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: + print("Deco3.__call__:", f"{self=}") + return self.func(*args, **kwargs) + + +class Deco4: + def __init__(self, func: Callable[..., Any]): + self.func = func + + def __call__(self) -> None: + print("Deco4.__call__:", f"{self=}") + + +class CallableA: + def __call__(self) -> None: + print("CallableA.__call__:", f"{self=}") + + +class DummyClass: + def __init__(self, a: str, b: float) -> None: + pass + + +def dummyFunc(a: str, b: float) -> None: + pass + + +class ClassA: + a: ClassVar[Callable[[Any], None]] = lambda self: None + + b1 = lambda self: None + b2: ClassVar = lambda self: None + + c1 = func1 + c2: ClassVar = func1 + + d1: CallableA = CallableA() + d2: ClassVar[CallableA] = CallableA() + + e1 = deco1(func1) + e2: ClassVar = deco1(func1) + + @deco1 + def f1(self) -> None: + print("f1:", f"{self=}") + + @Deco3 + def g1(self) -> None: + print("g1:", f"{self=}") + + @Deco4 + def h1(self) -> None: + print("h1:", f"{self=}") + + @deco2(DummyClass) + def i1(self, a: str, b: float) -> None: + print("i1:", f"{self=}") + + @deco2(dummyFunc) + def j1(self, a: str, b: float) -> None: + print("j1:", f"{self=}") + + +a = ClassA() + +a.a() + +a.b1() +a.b2() + +a.c1() +a.c2() + +a.d1() +a.d2() + +a.e1() +a.e2() + +a.f1() + +a.g1(a) + +a.h1() + +a.i1("", 0) + +a.j1("", 0) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/missingSuper1.py b/python-parser/packages/pyright-internal/src/tests/samples/missingSuper1.py new file mode 100644 index 00000000..cd45717d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/missingSuper1.py @@ -0,0 +1,73 @@ +# This sample tests the reportMissingSuperCall diagnostic check. + +from typing import final + + +class ParentA: + pass + + +class ParentB: + # This should generate an error because it's missing a super().__init__ call. + def __init__(self): + pass + + +class ParentBPrime(ParentB): + pass + + +class ParentC: + pass + + +@final +class ParentD: + def __init__(self): + pass + + def __init_subclass__(cls) -> None: + pass + + +class ChildA(ParentA, ParentB): + # This should generate an error. + def __init__(self): + pass + + # This should generate an error. + def __init_subclass__(cls) -> None: + pass + + +class ChildB(ParentA, ParentB): + def __init__(self): + super().__init__() + + +class ChildC1(ParentA, ParentB): + def __init__(self): + ParentB.__init__(self) + + +class ChildC2(ParentA, ParentB): + def __init__(self): + ParentA.__init__(self) + ParentB.__init__(self) + + +class ChildCPrime(ParentA, ParentBPrime, ParentC): + def __init__(self): + super(ParentBPrime).__init__() + + +class ChildD(ParentC): + # This should generate an error. + def __init__(self): + pass + + +@final +class ChildE(ParentC): + def __init__(self): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/missingTypeArg1.py b/python-parser/packages/pyright-internal/src/tests/samples/missingTypeArg1.py new file mode 100644 index 00000000..685a7f16 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/missingTypeArg1.py @@ -0,0 +1,59 @@ +# This sample tests the reportMissingTypeArgument diagnostic rule. + +import collections +from typing import Generic, TypeVar + + +_T1 = TypeVar("_T1") + + +class Class1(Generic[_T1]): + pass + + +# This should generate an error when reportMissingTypeArgument is enabled. +class Class2(Class1): + pass + + +# This should generate an error when reportMissingTypeArgument is enabled. +_T2 = TypeVar("_T2", bound=Class1) + + +# This should generate an error when reportMissingTypeArgument is enabled. +var1: Class1 | None = None + + +GenericTypeAlias = Class1[_T1] | int + + +# This should generate an error when reportMissingTypeArgument is enabled. +var2: GenericTypeAlias | None = None + + +class Class3(Generic[_T1, _T2]): + pass + + +# This should generate an error regardless of whether reportMissingTypeArgument +# is enabled because this class requires two type arguments and this will +# generate a runtime exception. +a = Class3[int] + + +# This should generate an error when reportMissingTypeArgument is enabled. +def func1() -> collections.deque: ... + + +def func2(obj: object): + if isinstance(obj, Class1): + pass + if isinstance(obj, Class1 | Class2): + pass + + +class ClassA: + @staticmethod + def method1(data: int | str | dict[str, str]): + if isinstance(data, dict | str): + return data diff --git a/python-parser/packages/pyright-internal/src/tests/samples/module1.py b/python-parser/packages/pyright-internal/src/tests/samples/module1.py new file mode 100644 index 00000000..d2ecb2ac --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/module1.py @@ -0,0 +1,13 @@ +# This sample tests that a module is treated as though it's derived +# from 'object' from the perspective of the type checker. + +import typing + + +def func1(a: object): + pass + + +func1(typing) + +dir(typing) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/module2.py b/python-parser/packages/pyright-internal/src/tests/samples/module2.py new file mode 100644 index 00000000..2c87a96c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/module2.py @@ -0,0 +1,40 @@ +# This sample tests that a module is assignable +# to the built-in type "ModuleType". + +import typing +import importlib +from typing import Protocol +from types import ModuleType + +importlib.reload(typing) + + +def func1(a: ModuleType): + pass + + +func1(typing) + + +v1 = [importlib] +reveal_type(v1, expected_text="list[ModuleType]") + +v2 = {importlib: typing} +reveal_type(v2, expected_text="dict[ModuleType, ModuleType]") + +v3 = (importlib, typing) +reveal_type(v3, expected_text="tuple[ModuleType, ModuleType]") + + +class ModuleProto(Protocol): + def reload(self, module: ModuleType) -> ModuleType: ... + + +v4: ModuleProto = importlib +reveal_type(v4, expected_text='Module("importlib")') + +v5: tuple[ModuleProto] = (importlib,) +reveal_type(v5, expected_text='tuple[Module("importlib")]') + +v6: list[ModuleProto] = [importlib] +reveal_type(v6, expected_text="list[ModuleProto]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/module3.py b/python-parser/packages/pyright-internal/src/tests/samples/module3.py new file mode 100644 index 00000000..818d8d0c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/module3.py @@ -0,0 +1,15 @@ +# This sample tests accesses to standard attributes of a module. + +import datetime + +reveal_type(datetime.__name__, expected_text="str") +reveal_type(datetime.__loader__, expected_text="Any") +reveal_type(datetime.__package__, expected_text="str | None") +reveal_type(datetime.__spec__, expected_text="Any") +reveal_type(datetime.__path__, expected_text="MutableSequence[str]") +reveal_type(datetime.__file__, expected_text="str") +reveal_type(datetime.__cached__, expected_text="str") +reveal_type(datetime.__dict__, expected_text="dict[str, Any]") +reveal_type(datetime.__annotations__, expected_text="dict[str, Any]") +reveal_type(datetime.__builtins__, expected_text="Any") +reveal_type(datetime.__doc__, expected_text="str | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/mro1.py b/python-parser/packages/pyright-internal/src/tests/samples/mro1.py new file mode 100644 index 00000000..8d23bc78 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/mro1.py @@ -0,0 +1,16 @@ +# This sample tests the type checker's ability to +# detect a bad MRO. + + +class A: + pass + + +class B(A): + pass + + +# This should generate an error because a valid +# MRO linearization isn't possible. +class C(A, B): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/mro2.py b/python-parser/packages/pyright-internal/src/tests/samples/mro2.py new file mode 100644 index 00000000..a60ece37 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/mro2.py @@ -0,0 +1,47 @@ +# This sample tests the type checker's handling +# of proper method resolution order (MRO). + +# pyright: reportIncompatibleMethodOverride=false + + +class A: + def foo(self, v1: str): + return None + + def bar(self): + return None + + +class B(A): + def foo(self, v1: float): + return None + + +class C(A): + def foo(self, v1: A): + return None + + def bar(self, v1: float): + return None + + +class D(B, C): + pass + + +a = A() +a.foo("hello") + +b = B() +b.foo(3) + +c = C() +c.foo(a) + +d = D() +d.foo(3) + +# This should generate an error because +# the bar method from class C should be +# selected before the bar method from A. +d.bar() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/mro3.py b/python-parser/packages/pyright-internal/src/tests/samples/mro3.py new file mode 100644 index 00000000..69425684 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/mro3.py @@ -0,0 +1,30 @@ +# This sample tests a more complicated class hierarchy to ensure +# that the MRO calculation does not detect a conflict. + + +class Object: + pass + + +class QualifiedObject(Object): + pass + + +class DerivableObject(QualifiedObject): + pass + + +class SubclassableObject(Object): + pass + + +class InheritingObject(DerivableObject, SubclassableObject): + pass + + +class Source(QualifiedObject, SubclassableObject): + pass + + +class ObjectType(InheritingObject, Source): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/mro4.py b/python-parser/packages/pyright-internal/src/tests/samples/mro4.py new file mode 100644 index 00000000..224dc720 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/mro4.py @@ -0,0 +1,25 @@ +# This sample tests the special-case handling of Generic in a class +# hierarchy. The Generic class implementation in CPython has a method +# called __mro_entries__ that elides the Generic base class in cases +# where one or more subsequent base classes are specialized generic classes. + +from typing import Generic, TypeVar + +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +class Foo1(Generic[T1]): ... + + +class Foo2(Generic[T1]): ... + + +class Bar1(Generic[T1, T2], Foo1[T1], Foo2[T2]): ... + + +class Bar2(Generic[T1, T2], Foo1, Foo2[T2]): ... + + +# This should generate an error because a consistent MRO cannot be found. +class Bar3(Generic[T1, T2], Foo1, Foo2): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/nameBinding1.py b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding1.py new file mode 100644 index 00000000..cb93aa4c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding1.py @@ -0,0 +1,48 @@ +# Tests the type checker's handling of global and nonlocal keywords. + +global a + +a = 3 +f = 3 + +# This should generate an error because nonlocal bindings aren't +# allowed at the module level. +nonlocal b + + +def func1(): + global a + + +def func2(): + global c + + +def func3(): + a = 3 + # This should generate an error because a is assigned locally + # before its name binding is declared. + global a + + d = 3 + h = 3 + + def func3_1(): + nonlocal d + + h = 5 + + # This should generate an error because h is assigned + # locally before its name binding is declared. + nonlocal h + + global e + + # This should generate an error because f is not available + # in a nonlocal scope. + nonlocal f + + nonlocal g + + e = 4 + g = 10 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/nameBinding2.py b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding2.py new file mode 100644 index 00000000..2d3e2af9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding2.py @@ -0,0 +1,9 @@ +# This test covers the case where a nonlocal reference +# is made to a symbol that doesn't exist in an outer +# scope but is then assigned to. + + +class Test: + def test(self): + nonlocal missing_symbol + missing_symbol = 4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/nameBinding3.py b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding3.py new file mode 100644 index 00000000..ef284456 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding3.py @@ -0,0 +1,36 @@ +# This sample tests nonlocal and global bindings +# with declared types. + +foo: int = 23 +baz: int = 23 + + +def func(): + foo: str = "Hi" + baz: str = "Hi" + + def func_1(): + global foo + + # This should generate an error because + # the global "foo" is typed as a str. + foo = "25" + + global bar + + bar: str = "Hi" + + nonlocal baz + + # This should generate an error because the + # nonlocal "baz" is typed as str. + baz = 25 + + func_1() + + +func() + +# This should generate an error because the +# type of "bar" is defined in func_1. +bar = 24 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/nameBinding4.py b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding4.py new file mode 100644 index 00000000..2aeb2d71 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding4.py @@ -0,0 +1,10 @@ +# This sample tests the case where a symbol is imported within an +# inner scope but the target symbol has global binding. + + +def func1(): + global Enum + from enum import Enum + + +reveal_type(Enum, expected_text="type[Enum]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/nameBinding5.py b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding5.py new file mode 100644 index 00000000..98f534b2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/nameBinding5.py @@ -0,0 +1,53 @@ +# This sample tests that unassigned variables within a class body +# are resolved to the global scope. + +a = 0 +b = 0 +c = 0 +d = 0 + + +def func_a() -> None: + a = "a" + + class A: + reveal_type(a, expected_text="Literal['a']") + + +def func_b() -> None: + b = "a" + + class A: + reveal_type(b, expected_text="int") + b = "b" + reveal_type(b, expected_text="str") + + reveal_type(b, expected_text="Literal['a']") + + +def func_c() -> None: + c = "a" + + class A: + nonlocal c + reveal_type(c, expected_text="Literal['a']") + c = 0 + + reveal_type(c, expected_text="int") + + +def func_d() -> None: + d = "a" + + class A: + global d + reveal_type(d, expected_text="int") + d = "b" + + reveal_type(d, expected_text="Literal['a']") + + +reveal_type(a, expected_text="Literal[0]") +reveal_type(b, expected_text="Literal[0]") +reveal_type(c, expected_text="Literal[0]") +reveal_type(d, expected_text="Literal[0]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple1.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple1.py new file mode 100644 index 00000000..df601a7e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple1.py @@ -0,0 +1,110 @@ +# This sample tests the type checker's handling of named tuples. + +from collections import namedtuple +from typing import Final, NamedTuple + +NamedTuple1 = namedtuple("NamedTuple1", "field1 field2") +NamedTuple1(1, 2) +NamedTuple1(field2=1, field1=2) + +# This should generate an error because there +# is no field called field3. +NamedTuple1(field1=1, field2=3, field3=2) + +# This should generate an error because there +# should be two parameters. +NamedTuple1(1) + +# This should generate an error because there +# should be two parameters. +NamedTuple1(1, 2, 3) + +s1: tuple[float, float] = NamedTuple1(3, 4) + +# This should generate an error because there are not enough entries. +s2: tuple[float, float, float] = NamedTuple1(3, 4) + +NamedTuple2 = namedtuple("NamedTuple2", "field1, field2") +NamedTuple2.__new__.__defaults__ = ([],) +NamedTuple2() +NamedTuple2(1) + +NamedTuple2(field1=1, field2=3) + +# This should generate an error because there +# should be two or fewer parameters. +NamedTuple2(1, 2, 3) + +Field1: Final = "field1" +Field2: Final = "field2" + +NamedTuple3 = NamedTuple( + "NamedTuple3", + [ + (Field1, "str"), # 'str' should be treated as forward reference + (Field2, int), + ], +) +NamedTuple3("hello", 2) + +# This should generate an error because of a +# type mismatch. +NamedTuple3("1", "2") + +# This should generate an error because of a +# type mismatch. +NamedTuple3(field2=1, field1=2) + +t1: tuple[str, float] = NamedTuple3("hello", 2) + +# This should generate an error because the types are incompatible. +t2: tuple[float, float] = NamedTuple3("hello", 2) + +# This should generate an error because the lengths are incompatible. +t3: tuple[str, float, str] = NamedTuple3("hello", 2) + +t4: NamedTuple = NamedTuple3("hello", 2) + +NamedTuple4 = namedtuple("NamedTuple4", "field1 field2 field3", defaults=(1, 2)) + +# This should generate an error (too few params) +NamedTuple4() +NamedTuple4(1) +NamedTuple4(1, 2, 3) +# This should generate an error (too many params) +NamedTuple4(1, 2, 3, 4) + +NamedTuple5 = namedtuple( + "NamedTuple5", "field1 field2 field3", defaults=(1, 2, 3, 4, 5) +) +NamedTuple5() + +NamedTuple6 = namedtuple("NamedTuple6", "field1 field2 field3", defaults=[1, 2]) +NamedTuple6() + + +def func1(dyn_str: str): + NamedTuple7 = namedtuple("NamedTuple7", dyn_str) + + n = NamedTuple7() + a, b = n + reveal_type(a, expected_text="Any") + reveal_type(b, expected_text="Any") + + +def func2(): + NamedTuple8 = namedtuple("NamedTuple8", ("a", "b", "c")) + n1 = NamedTuple8(a=1, b=2, c=3) + + a, b, c = n1 + reveal_type(a, expected_text="Unknown") + reveal_type(b, expected_text="Unknown") + reveal_type(c, expected_text="Unknown") + + # This should generate an error. + n2 = NamedTuple8(a=1, b=2) + + +# This should generate an error because NamedTuple isn't allowed in isinstance. +if isinstance(1, NamedTuple): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple10.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple10.py new file mode 100644 index 00000000..a402d6b6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple10.py @@ -0,0 +1,22 @@ +# This sample tests that a subclass of a named tuple doesn't override +# a read-only entry in the parent class. + +from typing import NamedTuple + + +class Point(NamedTuple): + x: int + y: int + + def f(self): + pass + + +class BadPointWithName(Point): + name: str + + # This should generate an error. + x: int + + def f(self): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple11.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple11.py new file mode 100644 index 00000000..1a5faa34 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple11.py @@ -0,0 +1,22 @@ +# This sample tests that named tuple field names beginning with +# an underscore is flagged as an error. + +from collections import namedtuple +from typing import NamedTuple + +# This should generate an error because a field name starting with an +# underscore isn't allowed. +NT1 = namedtuple("NT1", ["_oops"]) + +# This should generate an error because a field name starting with an +# underscore isn't allowed. +NT2 = namedtuple("NT2", "a, b, _oops") + + +class NT3(NamedTuple): + # This should generate an error because a field name starting with an + # underscore isn't allowed. + _oops: int + + +NT4 = namedtuple("NT4", "a, b, _c", rename=True) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple2.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple2.py new file mode 100644 index 00000000..b5b06cda --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple2.py @@ -0,0 +1,56 @@ +# This test validates that NamedTuple instances can be destructured +# and indexed to get to their constituent element types. + +from typing import NamedTuple + + +class MyDataClass(NamedTuple): + entry_1: str + entry_2: int + + +nt1 = MyDataClass("yes", 1) + +(a1, a2) = nt1 +a1_1: str = a1 +a2_1: int = a2 + +# These should generate an error because a1 and a2 are +# the wrong types. +a1_2: int = a1 +a2_2: str = a2 + + +b1 = nt1[0] +b2 = nt1[1] +b1_1: str = b1 +b2_1: int = b2 + +# These should generate an error because a1 and a2 are +# the wrong types. +b1_2: int = b1 +b2_2: str = b2 + + +MyNT = NamedTuple("MyNT", [("hi", int), ("bye", str)]) + +nt2 = MyNT(3, "yo") + +(c1, c2) = nt2 +c1_2: int = c1 +c2_2: str = c2 + +# These should generate an error because a1 and a2 are +# the wrong types. +c1_1: str = c1 +c2_1: int = c2 + +d1 = nt2[0] +d2 = nt2[1] +d1_2: int = d1 +d2_2: str = d2 + +# These should generate an error because a1 and a2 are +# the wrong types. +d1_1: str = d1 +d2_1: int = d2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple3.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple3.py new file mode 100644 index 00000000..f5afd328 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple3.py @@ -0,0 +1,16 @@ +# This sample tests that an attempt to use multiple inheritance +# with a NamedTuple will result in an error. + +from typing import Generic, NamedTuple, TypeVar + + +# This should generate an error. +class A(NamedTuple, object): + x: int + + +T = TypeVar("T") + + +class B(NamedTuple, Generic[T]): + x: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple4.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple4.py new file mode 100644 index 00000000..2f6c7a3d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple4.py @@ -0,0 +1,24 @@ +# This sample tests the case where a class derives from a named tuple. +# The synthesized __new__ method should be able to handle this. + +from collections import namedtuple +from typing import NamedTuple + + +Class1 = namedtuple("Class1", "name") + + +class Class2(Class1): + some_class_member = 1 + + +reveal_type(Class2(name="a"), expected_text="Class2") + +Class3 = NamedTuple("Class3", [("name", str)]) + + +class Class4(Class3): + some_class_member = 1 + + +reveal_type(Class4(name="a"), expected_text="Class4") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple5.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple5.py new file mode 100644 index 00000000..a3d6ffb0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple5.py @@ -0,0 +1,15 @@ +# This sample tests the case where a NamedTuple object is referenced +# through a `self` parameter. + +from typing import NamedTuple + + +class Fruit(NamedTuple): + name: str + cost: float + + def new_cost(self, new_cost: float): + my_name, my_cost = self + reveal_type(my_name, expected_text="str") + reveal_type(my_cost, expected_text="float") + return Fruit(my_name, new_cost) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple6.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple6.py new file mode 100644 index 00000000..d30861f7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple6.py @@ -0,0 +1,39 @@ +# This sample tests that writes to named attributes within a named +# tuple class are flagged as errors. + +from collections import namedtuple +from typing import NamedTuple + + +class NT1(NamedTuple): + val1: str + val2: int + + +nt1 = NT1("x", 0) + +# This should generate an error. +nt1.val1 = "" + +# This should generate an error. +nt1[0] = "" + +# This should generate an error. +del nt1.val1 + +# This should generate an error. +del nt1[0] + +NT2 = NamedTuple("NT2", [("val1", str), ("val2", int)]) + +nt2 = NT2("x", 0) + +# This should generate an error. +nt2.val2 = 3 + +NT3 = namedtuple("NT3", ["val1", "val2"]) + +nt3 = NT3("x", 0) + +# This should generate an error. +nt3.val1 = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple7.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple7.py new file mode 100644 index 00000000..96337680 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple7.py @@ -0,0 +1,27 @@ +# This sample tests the handling of NamedTuple classes with generics, +# which is supported in Python 3.11 and newer. + +from typing import Generic, NamedTuple, TypeVar + + +_T1 = TypeVar("_T1") + + +class NT1(NamedTuple, Generic[_T1]): + a: _T1 + b: int + c: list[_T1] + + +reveal_type(NT1(3, 4, []), expected_text="NT1[int]") +reveal_type(NT1(3.4, 4, [1, 2]), expected_text="NT1[float]") +reveal_type(NT1(3.4, 4, [2j]), expected_text="NT1[complex]") + + +class NT2(NT1[str]): ... + + +reveal_type(NT2("", 4, []), expected_text="NT2") + +# This should generate an error. +NT2(1, 4, []) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple8.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple8.py new file mode 100644 index 00000000..8f59415e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple8.py @@ -0,0 +1,16 @@ +# This sample tests the case where a NamedTuple class is generic. + +from typing import AnyStr, Generic, NamedTuple + + +class GenericNT(NamedTuple, Generic[AnyStr]): + scheme: AnyStr + + +class SpecializedNT(GenericNT[str]): + def geturl(self) -> str: ... + + +def func(x: SpecializedNT): + reveal_type(x.__iter__, expected_text="() -> Iterator[str]") + reveal_type(list(x), expected_text="list[str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/namedTuple9.py b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple9.py new file mode 100644 index 00000000..fb18d1c2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/namedTuple9.py @@ -0,0 +1,24 @@ +# This sample tests the detection of keywords in a named tuple +# definition and support for the "rename" parameter. + + +from collections import namedtuple +from typing import NamedTuple + + +# This should generate an error because "def" is a keyword. +NT1 = namedtuple("NT1", ["abc", "def"]) + +# This should generate an error because "class" is a keyword. +NT2 = namedtuple("NT2", ["abc", "class"], rename=False) + +NT3 = namedtuple("NT3", ["abc", "def"], rename=True) + +v3 = NT3(abc=0, _1=0) + +# This should generate an error because "def" is a keyword. +NT4 = NamedTuple("NT4", [("abc", int), ("def", int)]) + + +# These are soft keywords, so they shouldn't generate an error. +NT5 = namedtuple("NT5", ["type", "match"]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/never1.py b/python-parser/packages/pyright-internal/src/tests/samples/never1.py new file mode 100644 index 00000000..76d7b387 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/never1.py @@ -0,0 +1,57 @@ +# This sample tests the handling of the "Never" type, +# ensuring that it's treated as the same as NoReturn. + +from typing import NoReturn, TypeVar, Generic +from typing_extensions import Never # pyright: ignore[reportMissingModuleSource] + +T = TypeVar("T") + + +class ClassA(Generic[T]): ... + + +def func1(val: ClassA[Never]): + # This should generate an error because + # the type parameter for ClassA is invariant. + x: ClassA[object] = val + + +def assert_never1(val: Never) -> NoReturn: + raise Exception("Should never get here") + + +def assert_never2(val: NoReturn) -> NoReturn: + raise Exception("Should never get here") + + +# This should generate an error because Never doesn't accept type arguments. +def assert_never3(val: Never[int]): ... + + +# This should generate an error because NoReturn doesn't accept type arguments. +def assert_never4(val: NoReturn[int]): ... + + +def func2(val: str | int) -> str: + if isinstance(val, (str, int)): + return "str or int" + else: + assert_never1(val) + + +def func3(val: str | int) -> str: + if isinstance(val, (str, int)): + return "str or int" + else: + assert_never2(val) + + +def func4(): + # This should generate an error because of the missing argument. + assert_never1() + + +reveal_type(assert_never1, expected_text="(val: Never) -> NoReturn") + +# This should generate an error. +assert_never1(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/never2.py b/python-parser/packages/pyright-internal/src/tests/samples/never2.py new file mode 100644 index 00000000..93437bdd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/never2.py @@ -0,0 +1,35 @@ +# This sample validates that Never is treated as a bottom type for +# covariant type arguments. + +from typing import Generic, Never, TypeVar + +U = TypeVar("U") + +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) +T = TypeVar("T") + + +class ClassA(Generic[T_co]): + pass + + +def func1(x: U) -> ClassA[U]: + return ClassA[Never]() + + +class ClassB(Generic[T]): + pass + + +def func2(x: U) -> ClassB[U]: + # This should generate an error because T is invariant. + return ClassB[Never]() + + +class ClassC(Generic[T_contra]): + def __init__(self, x: T_contra): ... + + +def func3(x: U) -> U | ClassC[Never]: + return ClassC(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/newType1.py b/python-parser/packages/pyright-internal/src/tests/samples/newType1.py new file mode 100644 index 00000000..b9f06913 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/newType1.py @@ -0,0 +1,93 @@ +# This sample tests the type handler's handling of the +# built-in NewType function. + +from abc import ABC, abstractmethod +from typing import Any, NewType, TypeVar, TypedDict + +MyString = NewType("MyString", "str") + + +def must_take_string(p1: str): + pass + + +must_take_string(MyString("hello")) + + +def must_take_my_string(p1: MyString): + pass + + +must_take_my_string(MyString("hello")) + +# This should generate an error because 'hello' +# isn't a valid MyString. +must_take_my_string("hello") + + +_T = TypeVar("_T") + + +def func1(x: type[_T]) -> type[_T]: + return x + + +MyString2 = NewType("MyString2", func1(str)) + +# This should generate an error because NewType requires two arguments. +NewTypeBad1 = NewType() + +# This should generate an error because NewType requires two arguments. +NewTypeBad2 = NewType("NewTypeBad2") + +# This should generate an error because NewType requires two arguments. +NewTypeBad3 = NewType("NewTypeBad3", int, int) + +# This should generate an error because the first argument must be a string literal. +NewTypeBad4 = NewType(int, int) + +args = ("Hi", int) +# This should generate an error because two positional args are needed. +NewTypeBad5 = NewType(*args) + +# This should generate an error because type cannot be Any. +NewTypeBad6 = NewType("NewTypeBad6", Any) + + +class TD1(TypedDict): + x: int + + +# This should generate an error because type cannot be a TypedDict. +NewTypeBad7 = NewType("NewTypeBad7", TD1) + +NewTypeGood8 = NewType("NewTypeGood8", MyString) + +# This should generate an error because the name doesn't match. +NewTypeBad9 = NewType("NewTypeBad9Not", int) + + +def func2(x: MyString): + # This should generate two errors because isinstance can't be used + # with a NewType and it violates the isinstance siganture. + if isinstance(x, MyString): + pass + + # This should generate two errors because issubclass can't be used + # with a NewType and it violates the issubclass signature. + if issubclass(type(x), (MyString, int)): + pass + + +class AbstractBase(ABC): + @abstractmethod + def method1(self, /) -> int: ... + + +class DerivedBase(AbstractBase): + def method1(self, /) -> int: + return 0 + + +NewDerived = NewType("NewDerived", AbstractBase) +new_derived = NewDerived(DerivedBase()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/newType2.py b/python-parser/packages/pyright-internal/src/tests/samples/newType2.py new file mode 100644 index 00000000..caf8df91 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/newType2.py @@ -0,0 +1,38 @@ +# This sample tests the special-case handle of the multi-parameter +# form of the built-in "type" call. + +# pyright: strict + +X1 = type("X1", (object,), {}) +X2 = type("X2", (object,), {}) + + +class A(X1): ... + + +class B(X2, A): ... + + +# This should generate two errors (one for `__new__` and one for `__init__`) +# because the first arg is not a string. +X3 = type(34, (object,)) + +# This should generate two errors (one for `__new__` and one for `__init__`) +# because the second arg is not a tuple of class types. +X4 = type("X4", 34) + +# This should generate two errors (one for `__new__` and one for `__init__`) +# because the second arg is not a tuple of class types. +X5 = type("X5", (3,)) + + +X6 = type("", tuple({str}), {}) +X7 = type("", (float, str), {}) + + +class Meta1(type): ... + + +X8 = Meta1("X8", (list,), {}) +reveal_type(X8, expected_text="type[X8]") +reveal_type(type(X8), expected_text="type[type[X8]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/newType3.py b/python-parser/packages/pyright-internal/src/tests/samples/newType3.py new file mode 100644 index 00000000..15a60a2a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/newType3.py @@ -0,0 +1,33 @@ +# This sample tests that the type checker is properly synthesizing +# a constructor for a NewType. + +from typing import NewType + + +UserId = NewType("UserId", int) + +# This should generate an error because the constructor +# requires a single int. +var1 = UserId() + +var2 = UserId(2) + +# This should generate an error because the constructor +# requires a single int. +var3 = UserId("2") + +# This should generate an error because the constructor +# requires a single int. +var4 = UserId(2, 3) + + +def require_user_id(a: UserId): ... + + +require_user_id(var2) + +# this should generate an error. +require_user_id(2) + +var5 = 4 + var2 +var6 = var2 * 2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/newType4.py b/python-parser/packages/pyright-internal/src/tests/samples/newType4.py new file mode 100644 index 00000000..583aa740 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/newType4.py @@ -0,0 +1,20 @@ +# This sample tests some error conditions for NewType usage. + +from typing import Annotated, Literal, NewType, Sized, Union + + +# This should generate an error. +A = NewType("A", Union[int, str]) + +# This should generate an error. +B = NewType("B", Literal[1]) + +# This should generate an error. +C = NewType("B", Sized) + +# This should generate an error. +D = NewType("A", int | str) + +AnnotatedInt = Annotated[int, ""] +# This should generate an error. +E = NewType("E", AnnotatedInt) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/newType5.py b/python-parser/packages/pyright-internal/src/tests/samples/newType5.py new file mode 100644 index 00000000..6d98e447 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/newType5.py @@ -0,0 +1,16 @@ +# This sample tests the case where an Any type is passed as the second +# argument to NewType. + +from typing import Any, NewType + +# This should generate an error. +A = NewType("A", Any) + + +def func(x: A) -> A: + x() + x(1, 2, 3) + + y: list[int] = [x, x()] + + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/newType6.py b/python-parser/packages/pyright-internal/src/tests/samples/newType6.py new file mode 100644 index 00000000..e4a65d44 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/newType6.py @@ -0,0 +1,12 @@ +# This sample ensures that types created with NewType are treated +# as though they're final and cannot be subclassed. The runtime +# enforces this. + +from typing import NewType + + +MyStr = NewType("MyStr", str) + + +# This should generate an error. +class A(MyStr): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/newType7.py b/python-parser/packages/pyright-internal/src/tests/samples/newType7.py new file mode 100644 index 00000000..2de94f24 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/newType7.py @@ -0,0 +1,15 @@ +# This sample tests that classes created with NewType are treated +# as though they're functions at runtime. + +from typing import NewType + +MyStr = NewType("MyStr", str) + +# This should generate an error. +v1: type = MyStr + +# This should generate an error. +MyStr.capitalize + +MyStr.__name__ # OK + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/noTypeCheck1.py b/python-parser/packages/pyright-internal/src/tests/samples/noTypeCheck1.py new file mode 100644 index 00000000..f82ea703 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/noTypeCheck1.py @@ -0,0 +1,28 @@ +# This sample tests the handling of the @no_type_check decorator. + + +from typing import no_type_check + + +@no_type_check +class A: + # This should generate an error because no_type_check has + # no effect when applied to a class. + x: int = "" + + +@no_type_check +def func1(a: int, b: int(), *args, c: int = 3) -> dummy: + x: int = "" + + +reveal_type( + func1, + expected_text="(a: Unknown, b: Unknown, *args: Unknown, c: Unknown = 3) -> Unknown", +) + + +# This should generate an error. +func1() + +func1("", "", c="") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/none1.py b/python-parser/packages/pyright-internal/src/tests/samples/none1.py new file mode 100644 index 00000000..39383983 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/none1.py @@ -0,0 +1,30 @@ +# This sample tests properties of the special NoneType. + +from typing import Hashable, Iterable + +a: Hashable = None + +# This should generate an error because None isn't iterable. +b: Iterable = None + +c = None +c.__class__ +c.__doc__ + + +def func1(a: int | None): + a.__class__ + a.__doc__ + + +def func2(x: type[None]): ... + + +func2(None.__class__) +func2(type(None)) + +reveal_type(type(None).__name__, expected_text="str") + +_ = type(None) == type(None) + +None.__eq__(0) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/none2.py b/python-parser/packages/pyright-internal/src/tests/samples/none2.py new file mode 100644 index 00000000..a92ad85b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/none2.py @@ -0,0 +1,24 @@ +# This sample checks that type[None] is handled correctly. + + +def func1(a: type[None]) -> type[str] | type[None]: + reveal_type(a, expected_text="type[None]") + + # This should generate an error because None is + # not compatible with Type[None]. + return None + + +val1 = func1(type(None)) + +if val1 is not None: + reveal_type(val1, expected_text="type[str] | type[None]") + +# This should generate an error because None isn't +# assignable to Type[None]. +val2 = func1(None) + +val3: type[object] = type(None) + +val4 = type(None)() +reveal_type(val4, expected_text="None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/noreturn1.py b/python-parser/packages/pyright-internal/src/tests/samples/noreturn1.py new file mode 100644 index 00000000..bc5ffd77 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/noreturn1.py @@ -0,0 +1,92 @@ +# This sample tests the type checker's handling of the NoReturn annotation type. + +from typing import Callable, NoReturn, overload + + +# This should generate an error because the function +# implicitly returns None. +def func1() -> NoReturn: + pass + + +def func2(x: bool) -> NoReturn: + if x: + # This should generate an error because the function + # explicitly returns a value. + return 4 + + raise Exception() + + +def func3() -> NoReturn: + raise Exception() + + +def func4(x: bool) -> str: + if x: + return "hello" + else: + func3() + + +def func5(x: bool) -> NoReturn: + if x: + # This should generate an error because the function + # explicitly yields a value. + yield 4 + + raise Exception() + + +x1: Callable[[bool], bool] = func2 + + +async def func6() -> NoReturn: ... + + +async def func7() -> NoReturn: + await func6() + + +class A: + def __new__(cls) -> NoReturn: ... + + +def func8() -> NoReturn: + A() + + +class C: + def __call__(self) -> NoReturn: ... + + +def func10() -> NoReturn: + C()() + + +@overload +def func11() -> NoReturn: ... + + +@overload +def func11(x: int) -> None: ... + + +def func11(x: int = 0) -> NoReturn | None: ... + + +def func12() -> NoReturn: + func11() + + +def func13() -> NoReturn: + # This should generate an error. + func11(0) + + +def func14(x: int) -> NoReturn: ... + + +def func15(): + # This should generate an error. + return func14() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/noreturn2.py b/python-parser/packages/pyright-internal/src/tests/samples/noreturn2.py new file mode 100644 index 00000000..ddf46897 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/noreturn2.py @@ -0,0 +1,37 @@ +# This sample tests that the NoReturn logic is able to handle +# union types in call expressions. + +from typing import NoReturn + + +def func1() -> NoReturn: + raise TypeError + + +class B: + def always_noreturn(self) -> NoReturn: + func1() + + def sometimes_noreturn(self) -> NoReturn: + raise TypeError + + +class C: + def always_noreturn(self) -> NoReturn: + func1() + + def sometimes_noreturn(self) -> int: + return 0 + + +class A: + def __init__(self): + # Note the union type declaration here. + self._B_or_C: B | C = B() + + def m3(self) -> NoReturn: + self._B_or_C.always_noreturn() + + def m4(self) -> int: + x = self._B_or_C.sometimes_noreturn() + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/noreturn3.py b/python-parser/packages/pyright-internal/src/tests/samples/noreturn3.py new file mode 100644 index 00000000..5b84e504 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/noreturn3.py @@ -0,0 +1,13 @@ +# This sample tests the case where a local NoReturn call depends +# on the inferred type of a local variable. + +from typing import NoReturn + + +class MyClass: + def no_return(self) -> NoReturn: ... + + +def client_code() -> NoReturn: + instance = MyClass() + instance.no_return() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/noreturn4.py b/python-parser/packages/pyright-internal/src/tests/samples/noreturn4.py new file mode 100644 index 00000000..cbf79156 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/noreturn4.py @@ -0,0 +1,27 @@ +# This sample verifies that a `NoReturn` type can be assigned +# to any other type. + +from typing import Callable, NoReturn, TypeVar + + +_T = TypeVar("_T", int, str) + + +def func1(x: Callable[[NoReturn], None]): ... + + +def func2(x: int) -> NoReturn: ... + + +def func3(x: _T) -> _T: + return x + + +def func4(x: NoReturn): + v1: object = x + v2: int = x + v3: str | int = x + v4: None = x + v5: Callable[[int, str], str] = x + func1(func2) + func3(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator1.py b/python-parser/packages/pyright-internal/src/tests/samples/operator1.py new file mode 100644 index 00000000..50749f38 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator1.py @@ -0,0 +1,119 @@ +# This sample tests the type checker's ability to check +# custom operator overrides. + +# pyright: reportIncompatibleMethodOverride=false + +from typing import NoReturn, Self + + +class A: + def __eq__(self, Foo): + return "equal" + + +class B: + def __ne__(self, Bar): + return self + + def __lt__(self, Bar): + return "string" + + def __gt__(self, Bar): + return "string" + + def __ge__(self, Bar): + return "string" + + def __le__(self, Bar): + return "string" + + +def needs_a_string(val: str): + pass + + +def needs_a_string_or_bool(val: bool | str): + pass + + +def test(): + a = A() + needs_a_string(a == a) + + # This should generate an error because there + # is no __ne__ operator defined, so a bool + # value will result. + needs_a_string(a != a) + + if True: + a = B() + + # At this point, a should be of type Union[Foo, Bar], + # so the == operator should return either a str or + # a bool. + needs_a_string_or_bool(a == a) + + # This should generate an error. + needs_a_string(a == a) + + # This should generate an error. + needs_a_string_or_bool(a != a) + + b = B() + needs_a_string(b < b) + needs_a_string(b > b) + needs_a_string(b <= b) + needs_a_string(b >= b) + + +class C: + def __getattr__(self, name: str, /): + if name == "__add__": + return lambda _: 0 + + +a = C() +a.__add__ + +# This should generate an error because __getattr__ is not used +# when looking up operator overload methods. +b = a + 0 + + +class D: + def __init__(self): + self.__add__ = lambda x: x + + +d = D() + +# This should generate an error because __add__ is not a class variable. +_ = d + d + + +class E: + __slots__ = ("__add__",) + + def __init__(self): + self.__add__ = lambda x: x + + +e = E() + +_ = e + e + + +class F: + def __add__(self, other: object) -> NoReturn: ... + + +f = F() + "" +reveal_type(f, expected_text="NoReturn") + + +class G: + def __add__(self, other: int) -> Self: + return self + + def method1(self) -> Self: + return self + 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator10.py b/python-parser/packages/pyright-internal/src/tests/samples/operator10.py new file mode 100644 index 00000000..016ab534 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator10.py @@ -0,0 +1,16 @@ +# This sample tests the case where an operator (__or__) cannot be +# properly evaluated when using bidirectional type inference but +# can be without. + +from typing import Iterable + + +def func(a: set[int], b: set[str]): + x1: Iterable[int | str] = a | a + + x2: set[int] = a | a + + # This should generate an error + x3: set[int | str] = a | a + + x4: set[int | str] = a | b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator11.py b/python-parser/packages/pyright-internal/src/tests/samples/operator11.py new file mode 100644 index 00000000..ead436ec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator11.py @@ -0,0 +1,23 @@ +# This sample tests the case where an operator overload method is +# defined as a callable protocol object. + +from typing import Protocol + + +class ComparisonOp(Protocol): + def __call__(self, other: object, /) -> bool: ... + + +class Number: + __lt__: ComparisonOp + __le__: ComparisonOp + __gt__: ComparisonOp + __ge__: ComparisonOp + + +n1 = Number() +n2 = Number() + +v1 = n1 < n2 +v2 = n1 >= n2 +v2 = n1 > n2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator12.py b/python-parser/packages/pyright-internal/src/tests/samples/operator12.py new file mode 100644 index 00000000..35812d50 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator12.py @@ -0,0 +1,14 @@ +# This sample tests the handling of operators when used with constrained +# type variables. + +from typing import Generic, TypeVar + +T = TypeVar("T", int, float) + + +class A(Generic[T]): + def __init__(self, x: T): + self.x: T = x + + def __neg__(self) -> "A[T]": + return A[T](x=-self.x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator2.py b/python-parser/packages/pyright-internal/src/tests/samples/operator2.py new file mode 100644 index 00000000..322d991b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator2.py @@ -0,0 +1,30 @@ +# This sample tests the type checker's handling of chained +# comparison operators. + +from datetime import datetime + + +def requires_bool(val: bool): + pass + + +date1 = datetime.now() +date2 = datetime.now() +date3 = datetime.now() + +foo1 = date1 < date2 <= date3 +requires_bool(foo1) + +int1 = 3 +foo2 = 2 < int1 < 5 +requires_bool(foo2) + +# This should generate an error because +# int and datetime cannot be compared. +foo3 = date1 < date2 < 3 + +foo4 = (date1 < date2) < 3 + +foo5 = "1" == "1" in "1" + +foo6 = "1" in "1" == "1" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator3.py b/python-parser/packages/pyright-internal/src/tests/samples/operator3.py new file mode 100644 index 00000000..07c58341 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator3.py @@ -0,0 +1,42 @@ +# This sample tests the handling of the "or" and "and" operators +# when used with bidirectional type inference. + + +from typing import Any, TypeVar, overload + +_T = TypeVar("_T", bound=str) + + +@overload +def func1(cmd: _T) -> _T: ... + + +@overload +def func1(cmd: bytes) -> None: ... + + +def func1(cmd: Any) -> Any: ... + + +def func2(x: bool): + y = x or func1("") + reveal_type(y, expected_text="str | Literal[True]") + + +def func3(x: list[str]): + y = x or [] + reveal_type(y, expected_text="list[str]") + + +def func4(x: set[str]): + y = x or [] + reveal_type(y, expected_text="set[str] | list[Any]") + + +def identity(v: _T) -> _T: + return v + + +def func5(x: int): + v = x and identity("") + reveal_type(v, expected_text="str | Literal[0]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator4.py b/python-parser/packages/pyright-internal/src/tests/samples/operator4.py new file mode 100644 index 00000000..0f31ee54 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator4.py @@ -0,0 +1,22 @@ +# This sample tests operator overloads for matrix multiply operations. + + +class A: + pass + + +class B: + def __rmul__(self, a: A): + pass + + def __rmatmul__(self, a: A): + pass + + def __matmul__(self, a: A): + pass + + +a, b = A(), B() + +v1 = a @ b +v2 = b @ a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator5.py b/python-parser/packages/pyright-internal/src/tests/samples/operator5.py new file mode 100644 index 00000000..2e895206 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator5.py @@ -0,0 +1,5 @@ +# This sample tests the parsing of the deprecated <> operator. + +# This should generate a single error, not a cascade of errors. +if 3 <> 5: + print("OK") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator6.py b/python-parser/packages/pyright-internal/src/tests/samples/operator6.py new file mode 100644 index 00000000..eb07d2ce --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator6.py @@ -0,0 +1,10 @@ +# This sample tests whether metaclasses that support operator +# magic methods work correctly. + +import ctypes + +v1 = ctypes.POINTER(ctypes.c_bool) * 3 +reveal_type(v1, expected_text="type[Array[_Pointer[c_bool]]]") + +v2 = 3 * ctypes.POINTER(ctypes.c_bool) +reveal_type(v2, expected_text="type[Array[_Pointer[c_bool]]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator7.py b/python-parser/packages/pyright-internal/src/tests/samples/operator7.py new file mode 100644 index 00000000..f88239a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator7.py @@ -0,0 +1,24 @@ +# This sample tests the handling of binary operators when used with +# generic types. + +from typing import TypeVar + +_TInt = TypeVar("_TInt", bound=int) + + +def func1(n: _TInt) -> _TInt: + x = n + 1 + reveal_type(x, expected_text="int") + + # This should generate an error. + return x + + +_TIntOrStr = TypeVar("_TIntOrStr", int, str) + + +def func2(n: _TIntOrStr) -> _TIntOrStr: + x = n + n + reveal_type(x, expected_text="int* | str*") + + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator8.py b/python-parser/packages/pyright-internal/src/tests/samples/operator8.py new file mode 100644 index 00000000..39901ce5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator8.py @@ -0,0 +1,261 @@ +# This sample tests various "literal math" binary and unary operations that +# are applied when all operands are literal types with the same associated +# class. + +from functools import reduce +from typing import Iterable, Literal, TypeVar + +TLiteral = TypeVar("TLiteral", bound=Literal[0, 1, 2]) + + +def func1(a: Literal[1, 2], b: Literal[0, 4], c: Literal[3, 4]): + c1 = a * b + c + reveal_type(c1, expected_text="Literal[3, 4, 7, 8, 11, 12]") + + c2 = a // 0 + reveal_type(c2, expected_text="int") + + c3 = a % 0 + reveal_type(c3, expected_text="int") + + c4 = ((a * 1000) % 39) // c + reveal_type(c4, expected_text="Literal[8, 6, 3, 2]") + + c5 = a + True + reveal_type(c5, expected_text="int") + + c1 -= 5 + reveal_type(c1, expected_text="Literal[-2, -1, 2, 3, 6, 7]") + + c1 = -c1 + reveal_type(c1, expected_text="Literal[2, 1, -2, -3, -6, -7]") + + c1 = +c1 + reveal_type(c1, expected_text="Literal[2, 1, -2, -3, -6, -7]") + + c1 = ~c1 + reveal_type(c1, expected_text="Literal[-3, -2, 1, 2, 5, 6]") + + # Verify bitwise invert for a large int literal that doesn't fit in JS 32-bit + reveal_type(~2147483648, expected_text="Literal[-2147483649]") + + c1 = (-5 & 1) ^ (4 | 2) + reveal_type(c1, expected_text="Literal[7]") + + c1 = 1 << 128 + reveal_type(c1, expected_text="Literal[340282366920938463463374607431768211456]") + + c1 = 2**10 + reveal_type(c1, expected_text="Literal[1024]") + + c1 = (-1) ** 10 + reveal_type(c1, expected_text="Literal[1]") + + c1 = (2 - 3) ** 100001 + reveal_type(c1, expected_text="Literal[-1]") + + c1 = 2**100 + reveal_type(c1, expected_text="Literal[1267650600228229401496703205376]") + + c6 = 1 + for _ in range(100): + c6 += a + reveal_type(c6, expected_text="int") + + c7 = -10 // 8 + reveal_type(c7, expected_text="Literal[-2]") + + c8 = 10 // -6 + reveal_type(c8, expected_text="Literal[-2]") + + c9 = 0 // -6 + reveal_type(c9, expected_text="Literal[0]") + + c10 = 0 // 6 + reveal_type(c10, expected_text="Literal[0]") + + c11 = -6 // 6 + reveal_type(c11, expected_text="Literal[-1]") + + c12 = 6 // -6 + reveal_type(c12, expected_text="Literal[-1]") + + c13 = 6 // -3 + reveal_type(c13, expected_text="Literal[-2]") + + c14 = 256 // -16 + reveal_type(c14, expected_text="Literal[-16]") + + c20 = 1 << -1 + reveal_type(c20, expected_text="int") + + c21 = 1 >> -1 + reveal_type(c21, expected_text="int") + + c30 = -129 % 16 + reveal_type(c30, expected_text="Literal[15]") + + c31 = -129 % 32 + reveal_type(c31, expected_text="Literal[31]") + + c32 = -129 % 100 + reveal_type(c32, expected_text="Literal[71]") + + c33 = 256 % -32678 + reveal_type(c33, expected_text="Literal[-32422]") + + c34 = 256 % -129 + reveal_type(c34, expected_text="Literal[-2]") + + c35 = 0 % -1 + reveal_type(c35, expected_text="Literal[0]") + + c36 = -1 % -1 + reveal_type(c36, expected_text="Literal[0]") + + c37 = 1 % 1 + reveal_type(c37, expected_text="Literal[0]") + + c38 = -2 % 1 + reveal_type(c38, expected_text="Literal[0]") + + c39 = 4 % -2 + reveal_type(c39, expected_text="Literal[0]") + + +def func2(cond: bool): + c1 = "Hi " + ("Steve" if cond else "Amy") + reveal_type(c1, expected_text="Literal['Hi Steve', 'Hi Amy']") + + +def func3(cond: bool): + c1 = b"Hi " + (b"Steve" if cond else b"Amy") + reveal_type(c1, expected_text='Literal[b"Hi Steve", b"Hi Amy"]') + + +def func4(a: Literal[True], b: Literal[False]): + c1 = a and b + reveal_type(c1, expected_text="Literal[False]") + + c2 = a and a + reveal_type(c2, expected_text="Literal[True]") + + c3 = a or b + reveal_type(c3, expected_text="Literal[True]") + + c4 = not a + reveal_type(c4, expected_text="Literal[False]") + + c5 = not b + reveal_type(c5, expected_text="Literal[True]") + + c6 = not b and not a + reveal_type(c6, expected_text="Literal[False]") + + c7 = not b or not a + reveal_type(c7, expected_text="Literal[True]") + + c8 = b + reveal_type(c8, expected_text="Literal[False]") + while True: + c8 = not c8 + reveal_type(c8, expected_text="bool") + + +mode = Literal[ + "a", + "b", + "c", + "d", + "e", + "f", + "g", + "h", + "i", + "j", + "k", + "l", + "m", + "n", + "o", + "p", + "q", + "r", + "s", + "t", + "u", + "v", + "w", + "z", + "y", + "z", +] + + +def func5( + a: mode, b: mode, c: mode, d: mode, e: mode, f: mode, g: mode, h: mode, i: mode +): + # Make sure this degenerate case falls back to "LiteralString". + reveal_type(a + b + c + d + e + f + g + h + i, expected_text="LiteralString") + + +def func6(x: Literal[1, 3, 5, 7, 11, 13]): + y = x + y *= x + + reveal_type( + y, + expected_text="Literal[1, 3, 5, 7, 11, 13, 9, 15, 21, 33, 39, 25, 35, 55, 65, 49, 77, 91, 121, 143, 169]", + ) + + y *= x + reveal_type(y, expected_text="int") + + +def func7(values: Iterable[str]) -> tuple[str, int]: + return reduce( + lambda x, value: (x[0] + value, x[1] + 1), + values, + ("", 0), + ) + + +def func8(values: Iterable[float]) -> float: + total, num_of_values = reduce( + lambda total_and_count, value: ( + total_and_count[0] + value, + total_and_count[1] + 1, + ), + values, + (0, 0), + ) + return total / num_of_values + + +def func9(a: int) -> None: + b = 0 + reveal_type(b, expected_text="Literal[0]") + + def inner1() -> None: + nonlocal b + b += 1 + reveal_type(b, expected_text="int") + b = b + 1 + + def inner2() -> None: + nonlocal b + b = b + 1 + reveal_type(b, expected_text="int") + + inner1() + inner2() + + +def func10(a: TLiteral) -> TLiteral: + # This should generate an error. + return -a + + +def func11(a: TLiteral, b: TLiteral) -> TLiteral: + # This should generate an error. + return a + b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/operator9.py b/python-parser/packages/pyright-internal/src/tests/samples/operator9.py new file mode 100644 index 00000000..ba639a3e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/operator9.py @@ -0,0 +1,23 @@ +# This sample tests bidirectional type inference for | operators. This +# should apply only to TypedDict types. + +from typing import Literal, TypeVar, Generic, Callable + +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +class S(Generic[T1]): + def __or__(self, other: "S[T2]") -> "S[T1 | T2]": ... + + +def to(x: Callable[..., T1]) -> "S[T1]": ... + + +x1 = to(int) | to(float) + + +def func1(f: set[Literal["A", "B"]]): + v1: set[Literal["A", "B"]] = f | f + + v2 = " ".join({"A"} | {"B"}) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/optional1.py b/python-parser/packages/pyright-internal/src/tests/samples/optional1.py new file mode 100644 index 00000000..ff983970 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/optional1.py @@ -0,0 +1,83 @@ +# This sample tests use of "Optional" types. + +from typing import Any, Optional + + +class Foo: + def __init__(self): + self.value = 3 + + def do_stuff(self): + pass + + def __enter__(self): + return 3 + + def __exit__( + self, + t: Optional[type] = None, + exc: Optional[BaseException] = None, + tb: Optional[Any] = None, + ) -> bool: + return True + + +a = None +if 1: + a = Foo() + +# If "reportOptionalMemberAccess" is enabled, +# this should generate an error. +a.value = 3 + + +def foo(): + pass + + +b = None +if 1: + b = foo + +# If "reportOptionalCall" is enabled, +# this should generate an error. +b() + + +c = None +if 1: + c = [3, 4, 5] + +# If "reportOptionalSubscript" is enabled, +# this should generate an error. +c[2] + + +# If "reportOptionalIterable" is enabled, +# this should generate an error. +for val in c: + pass + +# If "reportOptionalContextManager" is enabled, +# this should generate an error. +cm = None +if 1: + cm = Foo() +with cm as val: + pass + +e = None +if 1: + e = 4 + +# If "reportOptionalOperand" is enabled, +# this should generate an error. +v1 = e + 4 + +# If "reportOptionalOperand" is enabled, +# this should generate an error. +v2 = e < 5 + +# If "reportOptionalOperand" is enabled, +# this should generate an error. +v3 = ~e diff --git a/python-parser/packages/pyright-internal/src/tests/samples/optional2.py b/python-parser/packages/pyright-internal/src/tests/samples/optional2.py new file mode 100644 index 00000000..998105bd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/optional2.py @@ -0,0 +1,22 @@ +# This sample verifies that the reportOptionalOperand diagnostic +# isn't generated when the RHS operand accepts None. + +# pyright: reportIncompatibleMethodOverride=false + +from typing import Optional + + +class Cmp: + def __eq__(self, other: "Optional[Cmp]") -> bool: ... + + def __lt__(self, other: "Optional[Cmp]") -> bool: ... + + def __gt__(self, other: "Cmp") -> bool: ... + + +def valid(value: Optional[Cmp], needed: Cmp): + x = value > needed + y = value == needed + + # This should generate an error if reportOptionalOperand is enabled. + z = value < needed diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overload1.py b/python-parser/packages/pyright-internal/src/tests/samples/overload1.py new file mode 100644 index 00000000..fab26bfa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overload1.py @@ -0,0 +1,27 @@ +# This sample verifies that the type checker doesn't use the +# final function that omits the @overload decorator when matching +# a caller against an overloaded function. + +from typing import TypeVar, overload + +T = TypeVar("T") + + +@overload +def mouse_event(x1: int, y1: int) -> int: ... + + +@overload +def mouse_event(x1: int, y1: int, x2: int, y2: int) -> tuple[int, int]: ... + + +def mouse_event( + x1: int, y1: int, x2: int | None = None, y2: int | None = None +) -> int | tuple[int, int]: + return 1 + + +# This should generate an error because it doesn't match either +# of the @overload versions, even though it does match the +# version of the function that omits the @overload. +t = mouse_event(1, 2, 3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overload2.py b/python-parser/packages/pyright-internal/src/tests/samples/overload2.py new file mode 100644 index 00000000..dce67ec3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overload2.py @@ -0,0 +1,91 @@ +# This sample verifies that a lone overload is reported +# as an error. + +from typing import Any, Callable, ParamSpec, Protocol, TypeVar, overload + +T = TypeVar("T") +P = ParamSpec("P") + + +# This should generate an error because there is only one overload. +@overload +def func1() -> None: ... + + +def func1() -> None: ... + + +# This should generate an error because there is only one overload. +@overload +def func2(a: int) -> None: ... + + +def func2(a: int) -> None: + pass + + +class ClassA: + # This should generate an error because there is no implementation. + @overload + def func3(self) -> None: ... + + @overload + def func3(self, a: int) -> None: ... + + +class ClassB(Protocol): + # An implementation should not be required in a protocol class. + @overload + def func4(self) -> None: ... + + @overload + def func4(self, name: str) -> str: ... + + +def deco1( + _origin: Callable[P, T], +) -> Callable[[Callable[..., Any]], Callable[P, T]]: ... + + +@overload +def func5(v: int) -> int: ... + + +@overload +def func5(v: str) -> str: ... + + +def func5(v: int | str) -> int | str: ... + + +@deco1(func5) +def func6(*args: Any, **kwargs: Any) -> Any: ... + + +@overload +def deco2() -> Callable[[Callable[P, T]], Callable[P, T | None]]: ... + + +@overload +def deco2( + x: Callable[[], T], +) -> Callable[[Callable[P, T]], Callable[P, T]]: ... + + +def deco2( + x: Callable[[], T | None] = lambda: None, +) -> Callable[[Callable[P, T]], Callable[P, T | None]]: ... + + +@deco2(x=dict) +def func7() -> dict[str, str]: + return {} + + +class ClassC[T]: + def __init__(self, _: T): ... + def __call__(self, a) -> T: ... + + +@ClassC(print) +def func8(a: int) -> None: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overload3.py b/python-parser/packages/pyright-internal/src/tests/samples/overload3.py new file mode 100644 index 00000000..055eaa10 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overload3.py @@ -0,0 +1,39 @@ +# This sample tests the reporting of inconsistent use of @classmethod +# and @staticmethod in overloads. + +from typing import Any, overload + + +class A: + @overload + # This should emit an error because @staticmethod is used inconsistently. + def method1(self, x: int) -> int: ... + + @overload + @staticmethod + def method1(x: str) -> str: ... + + def method1(*args: Any, **kwargs: Any) -> Any: + return + + @overload + @classmethod + # This should emit an error because @classmethod is used inconsistently. + def method2(cls, x: str) -> str: ... + + @overload + def method2(self, x: int) -> int: ... + + def method2(*args: Any, **kwargs: Any) -> Any: + return + + @overload + # This should emit an error because @staticmethod is used inconsistently. + def method3(self, x: str) -> str: ... + + @overload + def method3(self, x: int) -> int: ... + + @staticmethod + def method3(*args: Any, **kwargs: Any) -> Any: + return diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overload4.py b/python-parser/packages/pyright-internal/src/tests/samples/overload4.py new file mode 100644 index 00000000..cc842a62 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overload4.py @@ -0,0 +1,17 @@ +# This sample tests a complex overload case that was causing a hang +# in pyright's logic. + +from typing import Callable, overload + + +@overload +def func1[K, VI, VO](d: dict[K, VI], func: Callable[[VI], VO]) -> dict[K, VO]: ... + + +@overload +def func1[K, VI, VO](d: VI, func: Callable[[VI], VO]) -> VO: ... + + +def func1[K, VI, VO]( + d: dict[K, VI] | VI, func: Callable[[VI], VO] +) -> dict[K, VO] | VO: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overload5.py b/python-parser/packages/pyright-internal/src/tests/samples/overload5.py new file mode 100644 index 00000000..b6faa721 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overload5.py @@ -0,0 +1,60 @@ +# This sample tests for proper usage of @final and @override within +# an overload definition. + +from typing import Any, Protocol, final, overload, override + + +class ABase: + def method2(self, x: int | str) -> int | str: ... + + +class A(ABase): + @final + @overload + # This should generate an error. + def method1(self, x: int) -> int: ... + + @final + @overload + # This should generate an error. + def method1(self, x: str) -> str: ... + + @final + def method1(self, x: int | str) -> int | str: ... + + @override + @overload + # This should generate an error. + def method2(self, x: int) -> int: ... + + @override + @overload + # This should generate an error. + def method2(self, x: str) -> str: ... + + @override + def method2(self, x: int | str) -> int | str: ... + + +class BBase(Protocol): + def method2(self, x: Any) -> Any: ... + + +class B(BBase, Protocol): + @final + @overload + def method1(self, x: int) -> int: ... + + @final + @overload + # This should generate an error. + def method1(self, x: str) -> str: ... + + @override + @overload + def method2(self, x: int) -> int: ... + + @override + @overload + # This should generate an error. + def method2(self, x: str) -> str: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall1.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall1.py new file mode 100644 index 00000000..cbddb049 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall1.py @@ -0,0 +1,44 @@ +# This sample tests the type checker's handling of the overload decorator. + +from typing import overload +from datetime import datetime, timezone, timedelta + + +@overload +def func1(ts: int) -> datetime: ... + + +@overload +def func1(ts: None) -> None: ... + + +@overload +def func1(ts: complex): ... + + +def func1(ts: int | complex | None) -> datetime | None: + return ( + None + if not isinstance(ts, int) + else (datetime(1970, 1, 1, tzinfo=timezone.utc) + timedelta(milliseconds=ts)) + ) + + +reveal_type(func1(2418049), expected_text="datetime") +reveal_type(func1(None), expected_text="None") +reveal_type(func1(3j), expected_text="Unknown") + + +@overload +def func2(x: int) -> int: ... + + +@overload +def func2(x: float) -> float: ... + + +def func2(x): + return x + + +reveal_type(func2(abs(0.0)), expected_text="float") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall10.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall10.py new file mode 100644 index 00000000..c1115630 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall10.py @@ -0,0 +1,30 @@ +# This sample tests that overload matching for partially-overlapping overload +# signatures considers the "expected type" when using bidirectional type +# inference. + +from typing import Any, Generic, LiteralString, TypeVar, overload + + +T = TypeVar("T") + + +class A(Generic[T]): + @overload + def func1(self: "A[bool]", x: "A[bool]") -> list[LiteralString]: ... + + @overload + def func1(self, x: "A[str]") -> list[str]: ... + + def func1(self, x: "A[Any]") -> list[str] | list[LiteralString]: + return [] + + +def func2(a: A[bool], b: A[str]): + v1: list[LiteralString] = a.func1(a) + + # This should generate an error. + v2: list[str] = a.func1(a) + + # This should generate an error. + v3: list[LiteralString] = b.func1(b) + v4: list[str] = b.func1(b) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall2.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall2.py new file mode 100644 index 00000000..41886c79 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall2.py @@ -0,0 +1,27 @@ +# This sample verifies that overloads work in +# conjunction with async methods. + +from typing import overload + + +@overload +async def func(x: int) -> int: ... + + +@overload +async def func(x: str) -> str: ... + + +async def func(x) -> int | str: + if isinstance(x, int): + return 32 + else: + return "that" + + +async def test_function(): + v1 = await func("2") + reveal_type(v1, expected_text="str") + + v2 = await func(2) + reveal_type(v2, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall3.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall3.py new file mode 100644 index 00000000..37878c3f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall3.py @@ -0,0 +1,21 @@ +# This sample tests the case where an implementation of an overload uses +# a decorator that turns it into a non-function type. + +from functools import lru_cache +from typing import AnyStr, overload + + +@overload +def func1(url: str) -> str: ... + + +@overload +def func1(url: bytes) -> bytes: ... + + +@lru_cache() +def func1(url: AnyStr) -> str | bytes: ... + + +reveal_type(func1(""), expected_text="str") +reveal_type(func1(b""), expected_text="bytes") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall4.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall4.py new file mode 100644 index 00000000..1f1ed998 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall4.py @@ -0,0 +1,284 @@ +# This sample tests the expansion of argument types during overload matching. + + +from enum import Enum +from typing import AnyStr, Literal, TypeVar, overload + + +class A: ... + + +class B: ... + + +class C: ... + + +_T1 = TypeVar("_T1", bound=B) + + +@overload +def overloaded1(x: A) -> str: ... + + +@overload +def overloaded1(x: _T1) -> _T1: ... + + +def overloaded1(x: A | B) -> str | B: ... + + +def func1(a: A | B, b: A | B | C): + v1 = overloaded1(a) + reveal_type(v1, expected_text="str | B") + + # This should generate an error because C is not allowed + # for the first argument. + v2 = overloaded1(b) + + +class LargeEnum(Enum): + x00 = 0 + x01 = 0 + x02 = 0 + x03 = 0 + x04 = 0 + x05 = 0 + x06 = 0 + x07 = 0 + x08 = 0 + x09 = 0 + x10 = 0 + x11 = 0 + x12 = 0 + x13 = 0 + x14 = 0 + x15 = 0 + x16 = 0 + x17 = 0 + x18 = 0 + x19 = 0 + x20 = 0 + x21 = 0 + x22 = 0 + x23 = 0 + x24 = 0 + x25 = 0 + x26 = 0 + x27 = 0 + x28 = 0 + x29 = 0 + x30 = 0 + x31 = 0 + x32 = 0 + x33 = 0 + x34 = 0 + x35 = 0 + x36 = 0 + x37 = 0 + x38 = 0 + x39 = 0 + x40 = 0 + x41 = 0 + x42 = 0 + x43 = 0 + x44 = 0 + x45 = 0 + x46 = 0 + x47 = 0 + x48 = 0 + x49 = 0 + x50 = 0 + x51 = 0 + x52 = 0 + x53 = 0 + x54 = 0 + x55 = 0 + x56 = 0 + x57 = 0 + x58 = 0 + x59 = 0 + x60 = 0 + x61 = 0 + x62 = 0 + x63 = 0 + x64 = 0 + x65 = 0 + x66 = 0 + x67 = 0 + x68 = 0 + x69 = 0 + + +LargeUnion = ( + Literal[ + "a", + "b", + "c", + "d", + "e", + "f", + "g", + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + ] + | LargeEnum +) + + +@overload +def overloaded2(a: LargeUnion, b: Literal[2]) -> str: ... + + +@overload +def overloaded2(a: LargeUnion, b: Literal[3]) -> str: ... + + +@overload +def overloaded2(a: LargeUnion, b: Literal[4]) -> float: ... + + +@overload +def overloaded2(a: LargeUnion, b: Literal[9]) -> float: ... + + +@overload +def overloaded2(a: LargeUnion, b: Literal[10]) -> float: ... + + +def overloaded2(a: LargeUnion, b: LargeUnion | Literal[9, 10]) -> str | float: ... + + +def func2(a: LargeUnion, b: Literal[2, 3, 4], c: Literal[2, 3, 4, 9, 10] | LargeEnum): + v1 = overloaded2("a", 2) + reveal_type(v1, expected_text="str") + + v2 = overloaded2(a, b) + reveal_type(v2, expected_text="str | float") + + # This should generate an error because the expansion of union types + # will exceed the max number of expansions (256). + v3 = overloaded2(a, c) + reveal_type(v2, expected_text="str | float") + + +_T2 = TypeVar("_T2", str, bytes) + + +@overload +def overloaded3(x: str) -> str: ... + + +@overload +def overloaded3(x: bytes) -> bytes: ... + + +def overloaded3(x: str | bytes) -> str | bytes: ... + + +def func3(y: _T2): + overloaded3(y) + + +_T3 = TypeVar("_T3") + + +def func5(a: _T3) -> _T3: + return a + + +@overload +def overloaded4(b: str) -> str: ... + + +@overload +def overloaded4(b: int) -> int: ... + + +def overloaded4(b: str | int) -> str | int: ... + + +def func6(x: str | int) -> None: + y: str | int = overloaded4(func5(x)) + + +@overload +def overloaded5(pattern: AnyStr) -> AnyStr: ... + + +@overload +def overloaded5(pattern: int) -> int: ... + + +def overloaded5(pattern: AnyStr | int) -> AnyStr | int: + return 0 + + +def func7(a: str | bytes) -> str | bytes: + return overloaded5(a) + + +def func8(a: AnyStr | str | bytes) -> str | bytes: + return overloaded5(a) + + +class E(Enum): + A = "A" + B = "B" + + +@overload +def func9(v: Literal[E.A]) -> int: ... +@overload +def func9(v: Literal[E.B]) -> str: ... +@overload +def func9(v: bool) -> list[str]: ... + + +def func9(v: E | bool) -> int | str | list[str]: ... + + +def test9(a1: E | bool): + reveal_type(func9(a1), expected_text="int | str | list[str]") + + +@overload +def func10(v: Literal[True]) -> int: ... +@overload +def func10(v: Literal[False]) -> str: ... + + +def func10(v: bool) -> int | str: ... + + +def test10(a1: bool): + reveal_type(func10(a1), expected_text="int | str") + + +@overload +def func11(v: tuple[int, int]) -> int: ... + + +@overload +def func11(v: tuple[str, int]) -> str: ... + + +@overload +def func11(v: tuple[int, str]) -> int: ... + + +@overload +def func11(v: tuple[str, str]) -> str: ... + + +def func11(v: tuple[int | str, int | str]) -> int | str: ... + + +def test11(a1: tuple[int | str, int | str]): + reveal_type(func11(a1), expected_text="int | str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall5.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall5.py new file mode 100644 index 00000000..fb2730a2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall5.py @@ -0,0 +1,107 @@ +# This sample tests an overload that provides a signature for +# a *args parameter. + + +from typing import Any, Iterable, Tuple, TypeVar, overload + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + + +# This should generate an error because this overload overlaps +# with the third one and returns a different type. +@overload +def func1(__iter1: Iterable[_T1]) -> Tuple[_T1]: ... + + +@overload +def func1(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Tuple[_T1, _T2]: ... + + +@overload +def func1(*iterables: Iterable[_T1]) -> float: ... + + +def func1(*iterables: Iterable[_T1 | _T2]) -> Tuple[_T1 | _T2, ...] | float: ... + + +def test1(x: Iterable[int]): + v1 = func1(x) + reveal_type(v1, expected_text="Tuple[int]") + + v2 = func1(x, x) + reveal_type(v2, expected_text="Tuple[int, int]") + + y = [x, x, x, x] + + v3 = func1(*y) + reveal_type(v3, expected_text="float") + + z = (x, x) + + v4 = func1(*z) + reveal_type(v4, expected_text="Tuple[int, int]") + + +@overload +def func2() -> tuple[()]: ... + + +@overload +def func2(x: int, /) -> tuple[int]: ... + + +@overload +def func2(*x: int) -> tuple[int, ...]: ... + + +def func2(*x: int) -> tuple[int, ...]: + return x + + +reveal_type(func2(), expected_text="tuple[()]") +reveal_type(func2(1), expected_text="tuple[int]") +reveal_type(func2(1, 2), expected_text="tuple[int, ...]") +reveal_type(func2(*[1, 2, 3]), expected_text="tuple[int, ...]") + + +@overload +def func3(x: int, /) -> str: ... + + +@overload +def func3(x: int, y: int, /, *args: int) -> int: ... + + +def func3(*args: int) -> int | str: + return 1 + + +def test3(v: list[int]) -> None: + r = func3(*v) + reveal_type(r, expected_text="int") + + +def test4(v: list[tuple[int, str]]): + z1 = zip(*v) + reveal_type(z1, expected_text="zip[tuple[Any, ...]]") + + z2 = zip(v[0]) + reveal_type(z2, expected_text="zip[tuple[int | str]]") + + z3 = zip(v[0], v[1]) + reveal_type(z3, expected_text="zip[tuple[int | str, int | str]]") + + +@overload +def func4() -> tuple[()]: ... +@overload +def func4[T](**kwargs: T) -> tuple[T, ...]: ... +def func4(**kwargs: Any) -> tuple[Any, ...]: ... + + +def test5(): + v1 = func4(**{"a": 1}) + reveal_type(v1, expected_text="tuple[int, ...]") + v2 = func4() + reveal_type(v2, expected_text="tuple[()]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall6.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall6.py new file mode 100644 index 00000000..f40c9c34 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall6.py @@ -0,0 +1,321 @@ +# This sample tests overload matching in cases where the match +# is ambiguous due to an Any or Unknown argument. + +# pyright: reportMissingModuleSource=false + +from typing import Any, Generic, Literal, TypeVar, overload +from typing_extensions import LiteralString, TypeIs + +_T = TypeVar("_T") + + +@overload +def overload1(x: int, y: float) -> float: ... + + +@overload +def overload1(x: str, y: float) -> str: ... + + +def overload1(x: str | int, y: float) -> float | str: ... + + +def func1(a: Any): + v1 = overload1(1, 3.4) + reveal_type(v1, expected_text="float") + + v2 = overload1("", 3.4) + reveal_type(v2, expected_text="str") + + v3 = overload1(a, 3.4) + reveal_type(v3, expected_text="Unknown") + + v4 = overload1("", a) + reveal_type(v4, expected_text="str") + + +@overload +def overload2(x: int) -> Any: ... + + +@overload +def overload2(x: str) -> str: ... + + +def overload2(x: str | int) -> Any | str: ... + + +def func2(a: Any): + v1 = overload2("") + reveal_type(v1, expected_text="str") + + v2 = overload2(3) + reveal_type(v2, expected_text="Any") + + v3 = overload2(a) + reveal_type(v3, expected_text="Any") + + +@overload +def overload3(x: LiteralString) -> LiteralString: ... + + +@overload +def overload3(x: str) -> str: ... + + +def overload3(x: str) -> str: ... + + +def func3(a: Any, b: str): + v1 = overload3("") + reveal_type(v1, expected_text="LiteralString") + + v2 = overload3(b) + reveal_type(v2, expected_text="str") + + v3 = overload3(a) + reveal_type(v3, expected_text="str") + + +def func4(a: Any): + d = dict(a) + reveal_type(d, expected_text="dict[Any, Any]") + + +@overload +def overload4(x: str, *, flag: Literal[True]) -> int: ... + + +@overload +def overload4(x: str, *, flag: Literal[False] = ...) -> str: ... + + +@overload +def overload4(x: str, *, flag: bool = ...) -> int | str: ... + + +def overload4(x: str, *, flag: bool = False) -> int | str: ... + + +reveal_type(overload4("0"), expected_text="str") +reveal_type(overload4("0", flag=True), expected_text="int") +reveal_type(overload4("0", flag=False), expected_text="str") + + +def unknown_any() -> Any: ... + + +def func5(a: Any): + reveal_type(overload4(a, flag=False), expected_text="str") + reveal_type(overload4("0", flag=a), expected_text="Unknown") + + +@overload +def overload5(x: list[int]) -> list[int]: ... + + +@overload +def overload5(x: list[str]) -> list[str]: ... + + +def overload5(x: list[str] | list[int]) -> list[str] | list[int]: + return x + + +def func6(y: list[Any]): + reveal_type(overload5(y), expected_text="list[int]") + + +class ClassA(Generic[_T]): + @overload + def m1(self: "ClassA[int]") -> "ClassA[int]": ... + + @overload + def m1(self: "ClassA[str]") -> "ClassA[str]": ... + + def m1(self) -> "ClassA[Any]": + return self + + +def func7(a: ClassA[Any]): + reveal_type(a.m1(), expected_text="ClassA[int]") + + +class ClassB(Generic[_T]): + @overload + def m1(self: "ClassB[int]", obj: "int | ClassB[int]") -> "ClassB[int]": ... + + @overload + def m1(self: "ClassB[str]", obj: "str | ClassB[str]") -> "ClassB[str]": ... + + def m1(self, obj: Any) -> "ClassB[Any]": + return self + + +def func8(b: ClassB[Any]): + reveal_type(b.m1(b), expected_text="ClassB[int]") + + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + + +@overload +def overload6(a: _T1, /) -> tuple[_T1]: ... + + +@overload +def overload6(a: _T1, b: _T2, /) -> tuple[_T1, _T2]: ... + + +@overload +def overload6(*args: _T1) -> tuple[_T1, ...]: ... + + +def overload6(*args: Any) -> tuple[Any, ...]: + return tuple(args) + + +def func9(*args: int): + reveal_type(overload6(*args), expected_text="tuple[int, ...]") + + +@overload +def overload7(a: float = ..., *, b: Literal[True] = ...) -> float: ... + + +@overload +def overload7(a: float = ..., *, b: bool) -> str: ... + + +def overload7(a: float = 1.0, *, b: bool = True) -> float | str: ... + + +def func10(kwargs_dict: dict[Any, Any]): + reveal_type(overload7(**kwargs_dict), expected_text="Unknown") + + +def func11(kwargs_dict: dict[str, Any]): + reveal_type(overload7(**kwargs_dict), expected_text="Unknown") + + +def func12(kwargs_dict: dict[str, bool]): + reveal_type(overload7(**kwargs_dict), expected_text="str") + + +def func13(kwargs_dict: dict[str, Literal[True]]): + reveal_type(overload7(**kwargs_dict), expected_text="float") + + +def func14(): + reveal_type(overload7(), expected_text="float") + + +def func15(kwargs_dict: dict[str, str]): + # This should generate an error because str isn't a valid type for + # the b parameter. + overload7(1.0, **kwargs_dict) + + +@overload +def overload8(x: int = 3, **kwargs: int) -> int: ... + + +@overload +def overload8(**kwargs: str) -> str: ... + + +def overload8(*args, **kwargs) -> Any: + pass + + +def func16(a: dict[str, Any], i: int): + reveal_type(overload8(x=i, **a), expected_text="int") + reveal_type(overload8(**a), expected_text="Unknown") + + +@overload +def overload9(x: int, y: int) -> int: ... + + +@overload +def overload9(x: float, y: int, z: str) -> float: ... + + +@overload +def overload9(x: object, y: int, z: str, a: None) -> str: ... + + +def overload9(x, y, z="", a=None) -> Any: + pass + + +def func17(a: Any): + reveal_type(overload9(*a), expected_text="Unknown") + reveal_type(overload9(a, *a), expected_text="Unknown") + reveal_type(overload9(1, *a), expected_text="Unknown") + reveal_type(overload9(1.1, *a), expected_text="Unknown") + reveal_type(overload9("", *a), expected_text="str") + + +@overload +def overload10(x: list[int]) -> list[int]: ... + + +@overload +def overload10(x: list[Any]) -> list[Any]: ... + + +def overload10(x) -> Any: + pass + + +def func18(a: Any, b: list[Any], c: list[str], d: list[int]): + reveal_type(overload10(a), expected_text="list[int]") + reveal_type(overload10(b), expected_text="list[int]") + reveal_type(overload10(c), expected_text="list[Any]") + reveal_type(overload10(d), expected_text="list[int]") + + +class ClassC: + @overload + def method1(self, k: Literal["hi"], default: Any) -> float: ... + + @overload + def method1(self, k: str, default: _T) -> Any | _T: ... + + def method1(self, k: str, default: _T) -> Any | _T: ... + + +def func19(a: ClassC, b: list, c: Any): + my_list1: list = [] + v1 = a.method1("hi", my_list1) + reveal_type(v1, expected_text="float") + + v2 = a.method1("hi", b) + reveal_type(v2, expected_text="float") + + v3 = a.method1("hi", c) + reveal_type(v3, expected_text="float") + + my_list2: list[int] = [] + v1 = a.method1("hi", my_list2) + reveal_type(v1, expected_text="float") + + +@overload +def overload11(x: str) -> TypeIs[str]: ... + + +@overload +def overload11(x: int) -> TypeIs[int]: ... + + +def overload11(x: Any) -> Any: + return True + + +def func20(val: Any): + if overload11(val): + reveal_type(val, expected_text="Any") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall7.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall7.py new file mode 100644 index 00000000..989e3541 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall7.py @@ -0,0 +1,5 @@ +# This sample tests the case of nested overload resolution where the +# selected overload depends on bidirectional inference. + +l: list[str] = [] +"{s}".format(s="\n".join(sorted(l))) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall8.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall8.py new file mode 100644 index 00000000..005ef0dd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall8.py @@ -0,0 +1,9 @@ +# This sample tests the case where the overloads have different +# parameter counts. This particular sample exposed a bug +# in pyright's logic at one point. + +import subprocess + + +def my_method(cmd, *args, **kwargs): + return subprocess.run(cmd, *args, **kwargs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadCall9.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall9.py new file mode 100644 index 00000000..72207d3d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadCall9.py @@ -0,0 +1,131 @@ +# This sample tests the handling of overloads with a ParamSpec. + +from typing import Callable, Concatenate, overload, TypeVar, ParamSpec + +P = ParamSpec("P") +R = TypeVar("R") + + +def callable1( + func: Callable[P, R], *args: P.args, **kwargs: P.kwargs +) -> Callable[[], R]: ... + + +@overload +def func1() -> None: ... + + +@overload +def func1(a: int) -> None: ... + + +def func1(a: int = 1) -> None: ... + + +callable1(func1) +callable1(func1, 1) +callable1(func1, a=1) + +# This should generate an error because none of the overloads +# captured by the ParamSpec match those arguments. +callable1(func1, 1, 2) + +# This should generate an error because none of the overloads +# captured by the ParamSpec match those arguments. +callable1(func1, b=2) + + +def callable2( + func: Callable[Concatenate[int, P], R], *args: P.args, **kwargs: P.kwargs +) -> Callable[[], R]: ... + + +@overload +def func2() -> None: ... + + +@overload +def func2(a: int) -> int: ... + + +@overload +def func2(a: int, b: str) -> str: ... + + +def func2(a: int = 1, b: str = "") -> None | int | str: ... + + +callable2(func2) +callable2(func2, "") +callable2(func2, b="") + +# This should generate an error because none of the overloads +# captured by the ParamSpec match those arguments. +callable2(func2, 1, "") + + +def callable3(func: Callable[P, R]) -> Callable[Concatenate[int, P], R]: ... + + +c3_2 = callable3(func2) +c3_2(1) +c3_2(1, a=1) +c3_2(1, 1, b="") + +# This should generate an error because none of the overloads +# match these arguments. +c3_2(1, "") + +# This should generate an error because none of the overloads +# match these arguments. +c3_2(1, 1, c="") + + +@overload +def func3(x: int) -> None: ... + + +@overload +def func3(x: str) -> None: ... + + +def func3(x) -> None: + pass + + +def callable4(func: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R: ... + + +callable4(func3, 1) +callable4(func3, x=1) +callable4(func3, "") +callable4(func3, x="") + +# This should generate an error. +callable4(func3, 1.0) + +# This should generate two errors because x is missing and y is unknown. +callable4(func3, y=1) + + +@overload +def func4(x: str) -> str: ... + + +@overload +def func4(x: int) -> int: ... + + +def func4(x: str | int): + return x + + +def callable5(f: Callable[P, R]): + def inner(*args: P.args, **kwargs: P.kwargs) -> list[R]: + return [f(*args, **kwargs)] + + return inner + + +callable5(func4)(0) +callable5(func4)("") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadImpl1.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadImpl1.py new file mode 100644 index 00000000..13842efd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadImpl1.py @@ -0,0 +1,262 @@ +# This sample tests the verification that overload implementation signatures +# are a superset of their associated overload signatures. + +from typing import ( + Any, + Awaitable, + Callable, + Generic, + Iterable, + Literal, + NoReturn, + TypeVar, + overload, +) + +T = TypeVar("T") + + +@overload +def func1(a: int) -> str: ... + + +@overload +def func1(a: str) -> int: ... + + +# This should generate two errors: +# The first is because of an incompatibility with overload 1 +# because the input parameter type is incompatible. +# This second is because of an incompatibility with overload 2 +# because the return type is incompatible. +def func1(a: str) -> str: + return a + + +@overload +def func2(a: int, b: str = ...) -> str: ... + + +@overload +def func2(a: None) -> str: ... + + +# This should generate an error because the parameter "b" is missing +# from the implementation but is required by overload 1. +def func2(a: int | None) -> str: ... + + +@overload +def func3(a: int, *, b: Literal["r"]) -> str: ... + + +@overload +def func3(a: int, *, b: Literal["b"]) -> bytes: ... + + +def func3(*args: Any, **kwargs: Any) -> Any: ... + + +@overload +def func4(a: None) -> None: ... + + +@overload +def func4(a: list[T]) -> T: ... + + +def func4(a: list[T] | None) -> T | None: ... + + +class ClassA: + @overload + def method4(self, a: None) -> None: ... + + @overload + def method4(self, a: list[T]) -> T: ... + + def method4(self, a: list[T] | None) -> T | None: ... + + +@overload +def func5(a: list[T]) -> T: ... + + +@overload +def func5(a: None) -> None: ... + + +# This should generate an error because list is not compatible with dict. +def func5(a: dict[Any, Any] | None) -> Any | None: ... + + +@overload +def func6(foo: int, /) -> int: ... + + +@overload +def func6(bar: str, /) -> int: ... + + +def func6(p0: int | str, /) -> int: + return 3 + + +class ClassB(Generic[T]): + @overload + def method1(self: "ClassB[None]") -> None: ... + + @overload + def method1(self, value: T) -> None: ... + + def method1(self, value: Any = None) -> None: ... + + +class ClassC: ... + + +class ClassD: ... + + +T_CD = TypeVar("T_CD", ClassC, ClassD) + + +@overload +def func7(cls: type[ClassC], var: int) -> ClassC: ... + + +@overload +def func7(cls: type[ClassD], var: str) -> ClassD: ... + + +def func7(cls: type[T_CD], var: int | str) -> T_CD: + return cls() + + +T_str = TypeVar("T_str", bound=str) + + +@overload +def func8(foo: int) -> int: ... + + +@overload +def func8(foo: T_str) -> tuple[T_str]: ... + + +def func8(foo: T_str | int) -> tuple[T_str] | int: ... + + +class ClassE: ... + + +T_E = TypeVar("T_E", bound=ClassE) + + +@overload +def func9() -> None: ... + + +@overload +def func9(bar: T_E) -> T_E: ... + + +def func9(bar: T_E | None = None) -> T_E | None: + raise NotImplementedError + + +T_int_str = TypeVar("T_int_str", int, str) + + +@overload +def func10(option: Literal["a"], var: str) -> str: ... + + +@overload +def func10(option: Literal["b"], var: int) -> str: ... + + +# This should generate an error. +def func10(option: Literal["a", "b"], var: T_int_str) -> T_int_str: ... + + +class ClassF: ... + + +T_F = TypeVar("T_F", bound=type[ClassF]) + + +@overload +def func11(var: T_F) -> T_F: ... + + +@overload +def func11(var: int) -> int: ... + + +def func11(var: T_F | int) -> T_F | int: ... + + +T7 = TypeVar("T7") +T8 = TypeVar("T8") +T9 = TypeVar("T9") + + +@overload +def func12( + func: Callable[[T7], T8], iterable: Iterable[T7], default_value: None = None, / +) -> Iterable[T8 | None]: ... + + +@overload +def func12( + func: Callable[[T7], T8], iterable: Iterable[T7], /, default_value: T9 +) -> Iterable[T8 | T9]: ... + + +def func12( + func: Callable[[T7], T8], + iterable: Iterable[T7], + /, + default_value: T9 = None, +) -> Iterable[T8 | T9]: ... + + +@overload +def func13(x: int) -> NoReturn: ... + + +@overload +def func13(x: str) -> str | NoReturn: ... + + +def func13(x: int | str) -> str: ... + + +class ClassG(Generic[T]): ... + + +@overload +def func14(target: Callable[..., Awaitable[T]]) -> ClassG[T]: ... + + +@overload +def func14(target: Callable[..., T]) -> ClassG[T]: ... + + +def func14( + target: Callable[..., Awaitable[T]] | Callable[..., T], +) -> ClassG[T]: ... + + +@overload +def func15(client_id: str, client_secret: str, /) -> None: ... + + +@overload +def func15(client_id: str, client_secret: str) -> None: ... + + +# This should generate an error because some of the keyword arguments are not present. +def func15(*creds: str) -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadImpl2.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadImpl2.py new file mode 100644 index 00000000..ba69d207 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadImpl2.py @@ -0,0 +1,146 @@ +# This sample tests the verification that overload implementation signatures +# are a superset of their associated overload signatures. + +from typing import ( + Any, + Callable, + Generic, + Literal, + ParamSpec, + Protocol, + TypeVar, + TypeVarTuple, + overload, +) + + +T = TypeVar("T") +T_contra = TypeVar("T_contra", contravariant=True) +T_co = TypeVar("T_co", covariant=True) +TCall = TypeVar("TCall", bound=Callable[..., Any]) +R = TypeVar("R") +P = ParamSpec("P") +Ts = TypeVarTuple("Ts") + + +class ClassA(Protocol[T_co]): + _target_: str + + +class ClassB(ClassA[T_co], Protocol[T_co, P]): + def __init__(self, *args: P.args, **kwds: P.kwargs): ... + + +class ClassC(Protocol): + # This should generate a overlapping overload error. + @overload + def __call__( + self, + x: Callable[P, R], + *, + sig: Literal[True] = ..., + ) -> ClassB[type[R], P]: ... + + @overload + def __call__( + self, x: TCall, *, sig: Literal[False] = ... + ) -> ClassA[type[TCall]]: ... + + @overload + def __call__( + self, x: TCall | Callable[P, R], *, sig: bool + ) -> ClassA[type[TCall]] | ClassB[type[R], P]: ... + + def __call__( + self, x: TCall | Callable[P, R], *, sig: bool = False + ) -> ClassA[type[TCall]] | ClassB[type[R], P]: ... + + +Func = Callable[[*Ts], None] + + +@overload +def func1(function: Func[*Ts]) -> Func[*Ts]: ... + + +@overload +def func1() -> Callable[[Func[*Ts]], Func[*Ts]]: ... + + +def func1( + function: Func[*Ts] | None = None, +) -> Func[*Ts] | Callable[[Func[*Ts]], Func[*Ts]]: ... + + +@overload +def func2(d: dict[str, float], /) -> None: ... + + +@overload +def func2(**kwargs: float) -> None: ... + + +def func2(d: dict[str, float] | None = None, /, **kwargs: float) -> None: + pass + + +@overload +def func3(a: int) -> int: ... + + +@overload +def func3(*args: int) -> int: ... + + +# This should generate an error because the keyword parameter "a" is missing. +def func3(*args: int) -> int: ... + + +@overload +def func4(a: int) -> int: ... + + +@overload +def func4(*args: int) -> int: ... + + +def func4(*args: int, a: int = 1) -> int: ... + + +@overload +def func5(a: int) -> int: ... + + +@overload +def func5(*args: int) -> int: ... + + +def func5(*args: int, **kwargs: int) -> int: ... + + +@overload +def func6(x: tuple[()], /) -> None: ... + + +@overload +def func6(x: tuple[object], /) -> None: ... + + +def func6(x: tuple[object, ...], /) -> None: ... + + +class ClassD(Generic[T_contra]): + def method(self, x: T_contra) -> int: + assert False + + +@overload +def func7(x: None) -> int: ... + + +@overload +def func7(x: ClassD[T]) -> int: ... + + +def func7(x: ClassD[T] | None) -> int: + assert False diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadOverlap1.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadOverlap1.py new file mode 100644 index 00000000..ce8f6876 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadOverlap1.py @@ -0,0 +1,511 @@ +# This sample tests the type checker's detection of overlapping +# overload declarations. + +from typing import ( + Any, + AnyStr, + Callable, + Concatenate, + Generic, + Literal, + ParamSpec, + Protocol, + Sequence, + TypeVar, + overload, +) + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_P = ParamSpec("_P") + + +@overload +def func1(a: float, b: float | None, c: bool | None = None) -> int: ... + + +# This should generate an error because the overload is obscured. +@overload +def func1(a: int, b: int) -> int: ... + + +@overload +def func1(a: int, b: int, *, named: int = 3) -> int: ... + + +# This should generate an error because the overload is obscured. +@overload +def func1(a: int, b: int, *, named: int) -> int: ... + + +@overload +def func1(a: complex, b: int) -> int: ... + + +def func1(*args: Any, **kwargs: Any) -> Any: + pass + + +@overload +def func2(a: int, b: Any) -> int: + """Overload""" + + +# This should generate an error because the overload is obscured. +@overload +def func2(a: int, b: int) -> int: + """Overload""" + + +def func2(*args: Any, **kwargs: Any) -> Any: + pass + + +@overload +def func3(a: int, b: int) -> int: ... + + +@overload +def func3(a: int, b: int, **c: Any) -> int: ... + + +@overload +def func3(a: int, b: Any) -> int: ... + + +def func3(*args: Any, **kwargs: Any) -> Any: + pass + + +@overload +def func4(a: int, *, c: int, b: int) -> int: ... + + +# This should generate an error because the overload is obscured. +@overload +def func4(a: int, *, b: int, c: int) -> int: ... + + +def func4(*args: Any, **kwargs: Any) -> Any: + pass + + +# This should generate an error because the overload is overlapping +# in an unsafe way (i.e. returns an incompatible type). +@overload +def func5(a: int, b: int) -> int: ... + + +@overload +def func5(a: float, b: float = 3.4, *c: int, d: float = 4.5) -> str: ... + + +def func5(*args: Any, **kwargs: Any) -> Any: + pass + + +class GenericClass(Generic[_T1, _T2]): + @overload + def method1(self, a: _T1, b: tuple[_T2, ...]) -> int: ... + + @overload + def method1(self, a: _T1, b: tuple[Any, ...]) -> int: ... + + def method1(self, *args: Any, **kwargs: Any) -> Any: ... + + @overload + def method2(self, a: _T2, b: int) -> int: ... + + @overload + def method2(self, a: _T1, b: _T2) -> int: ... + + def method2(self, *args: Any, **kwargs: Any) -> Any: + pass + + +class Parent: ... + + +class Child(Parent): ... + + +# Test 1: Literal subtype + + +# This should generate an error because the overload is overlapping +# in an unsafe way (i.e. returns an incompatible type). +@overload +def func10(x: Literal[3]) -> int: ... + + +@overload +def func10(x: int) -> str: ... + + +def func10(*args: Any, **kwargs: Any) -> Any: + pass + + +# Test 2: Subclass subtype + + +# This should generate an error because the overload is overlapping +# in an unsafe way (i.e. returns an incompatible type). +@overload +def func11(x: Child) -> str: ... + + +@overload +def func11(x: Parent) -> int: ... + + +def func11(*args: Any, **kwargs: Any) -> Any: + pass + + +# Test 3: Implicit subtype + + +# This should generate an error because the overload is overlapping +# in an unsafe way (i.e. returns an incompatible type). +@overload +def func12(x: int) -> str: ... + + +@overload +def func12(x: float) -> int: ... + + +def func12(*args: Any, **kwargs: Any) -> Any: + pass + + +# Test 4: Union subtype + + +# This should generate an error because the overload is overlapping +# in an unsafe way (i.e. returns an incompatible type). +@overload +def func13(x: int) -> str: ... + + +@overload +def func13(x: int | str) -> int: ... + + +def func13(*args: Any, **kwargs: Any) -> Any: + pass + + +# Test 5: non-matching keyword argument + + +# This should generate an error because the overload is overlapping +# in an unsafe way (i.e. returns an incompatible type). +@overload +def func14(x: int, *, cls: str, **kwargs: Any) -> int: ... + + +@overload +def func14(x: int, **kwargs: Any) -> str: ... + + +def func14(*args: Any, **kwargs: Any) -> Any: + pass + + +# Test 6: non-matching keyword argument (shouldn't generate error) +@overload +def func15(cls: str, **kwargs: Any) -> int: ... + + +@overload +def func15(**kwargs: Any) -> str: ... + + +def func15(*args: Any, **kwargs: Any) -> Any: + pass + + +@overload +def func16(var: None) -> list[Any]: ... + + +@overload +def func16(var: _T1) -> list[_T1]: ... + + +def func16(var: _T1 | None) -> list[_T1] | list[Any]: ... + + +@overload +def func17(a: int, b: list[int]) -> int: ... + + +@overload +def func17(a: int, b: list[_T1]) -> _T1: ... + + +def func17(*args: Any, **kwargs: Any) -> Any: + pass + + +class ClassA(Generic[_T1]): + @overload + def __call__(self, f: _T1) -> _T1: ... + + @overload + def __call__(self, f: _T1 | None) -> _T1: ... + + def __call__(self, f: _T1 | None) -> _T1: ... + + +class ClassB: + @overload + def method1(self, x: type[Any]) -> bool: ... + + @overload + def method1(self, x: Any) -> str | bool: ... + + def method1(self, x: Any) -> Any: ... + + +class ClassC: + @overload + def method1(self, x: type) -> bool: ... + + @overload + def method1(self, x: Any) -> str | bool: ... + + def method1(self, x: Any) -> Any: ... + + +@overload +def func18(s: Sequence[_T1], extra: Literal[False]) -> list[_T1]: ... + + +@overload +def func18(s: Sequence[_T1], extra: Literal[True]) -> list[_T1] | tuple[_T1]: ... + + +@overload +def func18(s: Sequence[_T1], extra: bool) -> list[_T1] | tuple[_T1]: ... + + +def func18(s: Sequence[_T1], extra: bool) -> list[_T1] | tuple[_T1]: ... + + +class DProto1(Protocol): + def __radd__(self, other: Any, /) -> Any: ... + + +class DProto2(Protocol): + def __radd__(self: _T1, other: Any, /) -> _T1: ... + + +@overload +def func19(a: Any, b: DProto2) -> DProto2: ... + + +@overload +def func19(a: Any, b: DProto1) -> Any: ... + + +def func19(a: Any, b: Any) -> Any: + return a + b + + +AllStr = bytes | str + + +@overload +def func20(choices: AnyStr) -> AnyStr: ... + + +@overload +def func20(choices: AllStr) -> AllStr: ... + + +def func20(choices: AllStr) -> AllStr: ... + + +# This should generate an overlapping overload error. +@overload +def func21(self, p1: int | set[int], /) -> str: ... + + +@overload +def func21(self, p1: int | list[int], /) -> int: ... + + +def func21(self, p1: int | set[int] | list[int], /) -> str | int: + return "" + + +@overload +def func22(self, p1: str | set[int], /) -> str: ... + + +@overload +def func22(self, p1: int | list[int], /) -> int: ... + + +def func22(self, p1: str | int | set[int] | list[int], /) -> str | int: + return "" + + +@overload +def func23(f: Callable[Concatenate[_T1, _P], _T2]) -> int: ... + + +@overload +def func23(f: Callable[_P, _T2]) -> int: ... + + +def func23(f: Any) -> int: + return 1 + + +class E1: ... + + +TE1 = TypeVar("TE1", bound=E1) + + +class E2(E1, Generic[TE1]): ... + + +class E3(E1): ... + + +TE3 = TypeVar("TE3", bound=E3) + + +class E4(E1): ... + + +class ClassE(E2[E4], Generic[TE1]): + @overload + def unpack(self: "ClassE[E2[TE3]]") -> Sequence[TE3]: ... + + @overload + def unpack(self) -> Sequence[Any]: ... + + def unpack(self) -> Sequence[Any]: ... + + +@overload +def func24(f: Callable[[int], str]) -> int: ... + + +@overload +def func24(f: Callable[[_T1], str]) -> float: ... + + +def func24(f: Any) -> Any: + return f + + +_TInt = TypeVar("_TInt", bound=int) + + +@overload +def func25(val: _TInt) -> list[_TInt]: ... + + +@overload +def func25(val: str) -> int: ... + + +def func25(val: Any) -> Any: ... + + +@overload +def func26(val: _T1) -> list[_T1]: ... + + +# This should generate an error because it will never be used. +@overload +def func26(val: str) -> int: ... + + +def func26(val: Any) -> Any: ... + + +@overload +# This should generate an error because it is partially overlapping. +def func27(bar: None = ...) -> None: ... +@overload +def func27(bar: bool = ...) -> bool: ... +def func27(bar: bool | None = None) -> bool | None: ... + + +@overload +# This should generate an error because it is partially overlapping. +def func28(a: int, /, b: None = ...) -> None: ... +@overload +def func28(a: int, /) -> bool: ... +def func28(a: int, /, b: bool | None = None) -> bool | None: ... + + +class CBProto29(Protocol): + def __call__(self, *args: Any) -> Any: ... + + +@overload +def func29(func: CBProto29) -> None: ... + + +@overload +def func29(func: Callable[..., Any]) -> None: ... + + +def func29(func: Any) -> None: ... + + +class CBProto30(Protocol): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + + +@overload +def func30(func: CBProto30) -> None: ... + + +@overload +# This should generate an error because this overload will never be used. +def func30(func: Callable[..., Any]) -> None: ... + + +def func30(func: Any) -> None: ... + + +@overload +# This should generate an error because of a partial overlap. +def func31(*args: Any, a: int = ...) -> int: ... +@overload +def func31(*args: Any, a: str = ...) -> str: ... +def func31(*args: Any, a: int | str = 0) -> int | str: ... + + +type NestedList[V] = V | list[V | NestedList[V]] + + +@overload +def func32[V](n: list[NestedList[V]]) -> list[V]: ... +@overload +def func32(n: list[NestedList[Any]]) -> list[Any]: ... +def func32(n: Any) -> Any: ... + + +@overload +def func33(a: int, /, *args: str) -> None: ... +@overload +def func33(*args: str) -> None: ... +def func33(*args: int | str) -> None: ... + + +@overload +def func34(fn: Callable[[], Any]) -> None: ... +@overload +def func34[**P](fn: Callable[P, Any]) -> None: ... +def func34(fn: ...) -> ...: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/overloadOverride1.py b/python-parser/packages/pyright-internal/src/tests/samples/overloadOverride1.py new file mode 100644 index 00000000..5fb09970 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/overloadOverride1.py @@ -0,0 +1,56 @@ +# This sample tests the case where a base class method is overridden +# in a derived class with an overloaded method. + +# pyright: reportIncompatibleMethodOverride=true + +from typing import overload + + +class Base1: + def foo(self, x: int) -> int: + return x + + +class Derived1(Base1): + @overload + def foo(self, x: int) -> int: ... + + @overload + def foo(self, x: str) -> str: ... + + def foo(self, x: int | str) -> int | str: + return x + + +class Base2: + def foo(self, x: int | str) -> int | str: + return x + + +class Derived2(Base2): + @overload + def foo(self, x: int) -> int: ... + + @overload + def foo(self, x: str) -> str: ... + + def foo(self, x: int | str) -> int | str: + return x + + +class Base3: + def foo(self, x: int) -> int: + return x + + +class Derived3(Base3): + @overload + def foo(self, x: float) -> float: ... + + @overload + def foo(self, x: str) -> str: ... + + # This should generate an error because no overloaded signature + # is compatible with the base method, nor is the implementation. + def foo(self, x: int | str | float) -> int | str | float: + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/override1.py b/python-parser/packages/pyright-internal/src/tests/samples/override1.py new file mode 100644 index 00000000..99ab73d1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/override1.py @@ -0,0 +1,121 @@ +# This sample tests the handling of the @override decorator as described +# in PEP 698. + +from typing import Callable, Protocol +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Any, + overload, + override, +) + + +class ClassA: + def method1(self) -> None: + pass + + +class ClassB: + def method3(self) -> None: + pass + + @overload + def method5(self, x: int) -> int: ... + + @overload + def method5(self, x: str) -> str: ... + + def method5(self, x: int | str) -> int | str: ... + + +class ClassC(ClassA, ClassB): + @property + @override + # This should generate an error because prop_a doesn't + # override anything in its base class. + def prop_a(self) -> int: + raise NotImplementedError + + @override + def method1(self) -> None: + pass + + def method2(self) -> None: + pass + + @override + def method3(self) -> None: + pass + + @override + # This should generate an error because method3 does not + # override anything in a base class. + def method4(self) -> None: + pass + + @overload + def method5(self, x: int) -> int: ... + + @overload + def method5(self, x: str) -> str: ... + + @override + def method5(self, x: int | str) -> int | str: ... + + @overload + def method6(self, x: int) -> int: ... + + @overload + def method6(self, x: str) -> str: ... + + @override + # This should generate an error because method6 does not + # override anything in a base class. + def method6(self, x: int | str) -> int | str: ... + + +class ClassD(Any): ... + + +class ClassE(ClassD): + @override + def method1(self) -> None: + pass + + +def evil_wrapper(func: Callable[..., Any], /): + def wrapped(*args: Any, **kwargs: Any) -> Any: + raise NotImplementedError + + return wrapped + + +class F: + def method1(self): + pass + + +class G(F): + @override + @evil_wrapper + def method1(self): + pass + + +class H(Protocol): + pass + + +class I(H, Protocol): + @override + # This should generate an error because method1 isn't present + # in the base. + def method1(self): + pass + + @overload + @override + # This should generate an error because method2 isn't present + # in the base. + def method2(self, x: int) -> int: ... + @overload + def method2(self, x: str) -> str: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/override2.py b/python-parser/packages/pyright-internal/src/tests/samples/override2.py new file mode 100644 index 00000000..530a7f97 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/override2.py @@ -0,0 +1,46 @@ +# This sample tests the reportImplicitOverride diagnostic check +# (strict enforcement of PEP 698). + +from typing import Any, Callable +from typing_extensions import override # pyright: ignore[reportMissingModuleSource] + + +def evil_wrapper(func: Callable[..., Any], /): + def wrapped(*args: Any, **kwargs: Any) -> Any: + raise NotImplementedError + + return wrapped + + +class Base: + @override + def __init__(self): + pass + + def method1(self): + pass + + @property + def prop_c(self) -> int: + return 0 + + def method2(self): + pass + + +class Child(Base): + def __init__(self): + pass + + # This should generate an error if reportImplicitOverride is enabled. + def method1(self): + pass + + @property + # This should generate an error if reportImplicitOverride is enabled. + def prop_c(self) -> int: + return 0 + + @evil_wrapper + def method2(self): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package1/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/package1/__init__.py new file mode 100644 index 00000000..18d86438 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package1/__init__.py @@ -0,0 +1,3 @@ + +def foo(): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package1/psyche.py b/python-parser/packages/pyright-internal/src/tests/samples/package1/psyche.py new file mode 100644 index 00000000..5eb20657 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package1/psyche.py @@ -0,0 +1,4 @@ + +def psyche1() -> int: + return 3 + \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package1/psyche/pysche.py b/python-parser/packages/pyright-internal/src/tests/samples/package1/psyche/pysche.py new file mode 100644 index 00000000..cdc461dc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package1/psyche/pysche.py @@ -0,0 +1,4 @@ + +def psyche1() -> str: + return "3" + \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package1/sub.py b/python-parser/packages/pyright-internal/src/tests/samples/package1/sub.py new file mode 100644 index 00000000..6a26b50c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package1/sub.py @@ -0,0 +1,4 @@ + +def subfoo() -> str: + return 'hello' + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package1/sub/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/package1/sub/__init__.py new file mode 100644 index 00000000..99004bc6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package1/sub/__init__.py @@ -0,0 +1,5 @@ + +from datetime import datetime + +def subfoo() -> datetime: + return datetime.now() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package2/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/package2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package2/module1.py b/python-parser/packages/pyright-internal/src/tests/samples/package2/module1.py new file mode 100644 index 00000000..b4d20b49 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package2/module1.py @@ -0,0 +1 @@ +a1 = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package2/module2.py b/python-parser/packages/pyright-internal/src/tests/samples/package2/module2.py new file mode 100644 index 00000000..a4c566c6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package2/module2.py @@ -0,0 +1 @@ +a2 = 2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/package2/module3.py b/python-parser/packages/pyright-internal/src/tests/samples/package2/module3.py new file mode 100644 index 00000000..a2006451 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/package2/module3.py @@ -0,0 +1 @@ +a3 = 3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramInference1.py b/python-parser/packages/pyright-internal/src/tests/samples/paramInference1.py new file mode 100644 index 00000000..0921c960 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramInference1.py @@ -0,0 +1,46 @@ +# This sample tests the logic that infers parameter types based on +# default argument values or annotated base class methods. + + +class Parent: + def __init__(self, a: int, b: str): ... + + def func1(self, a: int, b: str) -> float: ... + + +class Child(Parent): + def __init__(self, a, b): + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="str") + + def func1(self, a, b): + reveal_type(self, expected_text="Self@Child") + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="str") + return a + + +def func2(a, b=0, c=None): + reveal_type(a, expected_text="Unknown") + reveal_type(b, expected_text="int") + reveal_type(c, expected_text="Unknown | None") + + +def func3(a=(1, 2), b=[1, 2], c={1: 2}): + reveal_type(a, expected_text="Unknown") + reveal_type(b, expected_text="Unknown") + reveal_type(c, expected_text="Unknown") + + +class _Undefined: + pass + + +Undefined = _Undefined() + + +def func4(a=1, b=None, c=Undefined, d=lambda x: x): + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="Unknown | None") + reveal_type(c, expected_text="_Undefined | Unknown") + reveal_type(d, expected_text="Unknown") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramInference2.py b/python-parser/packages/pyright-internal/src/tests/samples/paramInference2.py new file mode 100644 index 00000000..07ea15e0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramInference2.py @@ -0,0 +1,36 @@ +# This sample tests the logic that infers parameter types based on +# annotated base class methods when the base class is generic. + +# pyright: reportIncompatibleMethodOverride=false + +from typing import Callable, Generic, ParamSpec, TypeVar + +T = TypeVar("T") +P = ParamSpec("P") +R = TypeVar("R") + + +class Parent1(Generic[T]): + def method1(self, a: T, b: list[T]) -> None: ... + + +class Child1(Parent1[float]): + def method1(self, a, b): + reveal_type(self, expected_text="Self@Child1") + reveal_type(a, expected_text="float") + reveal_type(b, expected_text="list[float]") + return a + + +class Parent2: + def method1(self, fn: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R: + return fn(*args, **kwargs) + + +class Child2(Parent2): + def method1(self, fn, *args, **kwargs): + reveal_type(self, expected_text="Self@Child2") + reveal_type(fn, expected_text="(...) -> Unknown") + reveal_type(args, expected_text="tuple[Unknown, ...]") + reveal_type(kwargs, expected_text="dict[str, Unknown]") + return super().method1(fn, *args, **kwargs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramNames1.py b/python-parser/packages/pyright-internal/src/tests/samples/paramNames1.py new file mode 100644 index 00000000..3244eaf4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramNames1.py @@ -0,0 +1,89 @@ +# This sample tests the reportSelfClsParameterName setting. + + +from typing import overload + + +def foo(): + pass + + +class Class1: + # This should generate an error or warning if the setting + # is enabled because __new__ is expected to take cls. + def __new__(blah): + return super().__new__(blah) + + # This should generate an error or warning if the setting + # is enabled because it's missing a "self" parameter. + def foo1(): + return 3 + + # This should generate an error or warning if the setting + # is enabled because "self" is misspelled. + def foo2(seeeelf): + return 4 + + # This should generate an error or warning if the setting + # is enabled because "self" is misspelled. + def foo3(cls): + return 4 + + @classmethod + def foo4(cls): + return 4 + + @classmethod + # This should generate an error or warning if the setting + # is enabled because "cls" is expected. + def foo5(self): + return 4 + + @overload + # This should generate an error or warning if the setting + # is enabled because "self" is expected. + def foo6(x: "Class1") -> int: ... + + @overload + # This should generate an error or warning if the setting + # is enabled because "self" is expected. + def foo6(x: int) -> str: ... + + # This should generate an error or warning if the setting + # is enabled because "self" is expected. + def foo6(x) -> int | str: ... + + @classmethod + # This should generate an error or warning if the setting + # is enabled because this isn't a metaclass. + def foo7(mcls): + return 4 + + +class Metaclass(type): + def __new__(mcls): ... + + # This should not generate a error because the class derives + # from type and is assumed to be a metaclass. + def foo1(cls): + return 3 + + # This should generate an error. + def foo2(mcls): + return 3 + + def foo3(self): + return 3 + + @classmethod + def foo4(cls): + return 3 + + @classmethod + def foo5(metacls): + return 3 + + # This should generate an error. + @classmethod + def foo6(bar): + return 3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec1.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec1.py new file mode 100644 index 00000000..f3d58afb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec1.py @@ -0,0 +1,60 @@ +# This sample tests error conditions for ParamSpec (PEP 612). + +from typing import Any, Callable, ParamSpec, Protocol, cast + +P = ParamSpec("P") + + +# This should generate an error because ParamSpecs +# can't be used as a type annotation. +def func1(a: P) -> int: + return 1 + + +a = 3 + +# This should generate an error. +b = cast(P, a) + +func1(1) + + +def func2(x: Callable[P, Any]): + # This should generate an error. + c: list[P] = [] + + d: Callable[P, int] + + # This should generate an error. + e: Callable[P, P] + + # This should generate an error. + f: Callable[[P], int] + + # This should generate an error. + g: tuple[P] + + +class SomeWrapper(Protocol[P]): + def __call__(self, *args: P.args, **kwargs: P.kwargs): ... + + +# This should generate an error because P cannot be used with other +# type arguments. +def func3(x: SomeWrapper[P, int]): + pass + + +# This should generate an error because P cannot be used with other +# type arguments. +def func4(x: SomeWrapper[[P, int]]): + pass + + +def func5(x: SomeWrapper[P]): + pass + + +# This form is considered an error. +def func6(x: SomeWrapper[[P]]): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec10.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec10.py new file mode 100644 index 00000000..3cb6447c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec10.py @@ -0,0 +1,54 @@ +# This sample tests the use of ParamSpec along with Concatenate in +# a return type. + +from typing import Callable, Protocol, TypeVar, Concatenate, ParamSpec +from threading import RLock +import functools + + +class HasLock(Protocol): + _lock: RLock + + +S = TypeVar("S", bound=HasLock) +P = ParamSpec("P") +R = TypeVar("R") + + +def with_lock(func: Callable[Concatenate[S, P], R]) -> Callable[Concatenate[S, P], R]: + @functools.wraps(func) + def wrapper(self: S, *args: P.args, **kwargs: P.kwargs) -> R: + with self._lock: + return func(self, *args, **kwargs) + + return wrapper + + +class MyClass: + def __init__(self): + self._lock = RLock() + + @with_lock + def test_1(self, param1: int) -> str: ... + + @with_lock + def test_2(self) -> str: ... + + +@with_lock +def test_3(cls: MyClass, param1: int) -> str: ... + + +testClass = MyClass() + +res1 = testClass.test_1(42) +reveal_type(res1, expected_text="str") + +res2 = testClass.test_2() +reveal_type(res2, expected_text="str") + +res3 = test_3(testClass, 42) +reveal_type(res3, expected_text="str") + +res4: Callable[[MyClass, int], str] = with_lock(test_3) +reveal_type(res4, expected_text="(MyClass, int) -> str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec11.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec11.py new file mode 100644 index 00000000..7633da8f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec11.py @@ -0,0 +1,36 @@ +# This sample tests the handling of generic classes that are parameterized +# using a ParamSpec. + +from typing import Callable, Generic, TypeVar, ParamSpec + + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +class MyDecorator(Generic[_P, _R]): + def __init__(self, function: Callable[_P, _R]): + self.function = function + + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: + print("Inside Function Call") + return self.function(*args, **kwargs) + + def do_stuff(self, name: str, *args: _P.args, **kwargs: _P.kwargs) -> int: + return 0 + + +@MyDecorator +def func1(x: int, y: int, *, z: int): + return x + y + + +func1(6, 6, z=6) + + +@MyDecorator +def func2(*, a: int): + pass + + +func2.do_stuff("hi", a=4) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec12.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec12.py new file mode 100644 index 00000000..dc0dbe44 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec12.py @@ -0,0 +1,87 @@ +# This sample tests various error conditions for ParamSpec usage. + +from typing import ( + Annotated, + Any, + Callable, + Mapping, + Sequence, + TypeVar, + ParamSpec, + Union, +) + +P = ParamSpec("P") +R = TypeVar("R") + + +def puts_p_into_scope(f: Callable[P, int]) -> None: + def inner1(*args: P.args, **kwargs: P.kwargs) -> None: + pass + + def inner2(*args: "P.args", **kwargs: P.kwargs) -> None: + pass + + def inner3(*args: P.args, **kwargs: "P.kwargs") -> None: + pass + + def inner4(*args: "P.args", **kwargs: "P.kwargs") -> None: + pass + + def inner5(*args: Annotated[P.args, ""], **kwargs: "P.kwargs") -> None: + pass + + def inner6(*args: P.args, **kwargs: Annotated["P.kwargs", ""]) -> None: + pass + + # This should generate two errors because P.kwargs cannot be + # used with *args and P.args cannot be used with **kwargs. + def mixed_up(*args: P.kwargs, **kwargs: P.args) -> None: + pass + + # This should generate an error because P.args cannot be used + # with a simple parameter. + def misplaced(x: P.args) -> None: + pass + + # This should generate an error + stored_args: P.args + + # This should generate an error + stored_kwargs: P.kwargs + + # This should generate an error because P.args cannot be used + # without P.kwargs. + def just_args(*args: P.args) -> None: + pass + + # This should generate an error because P.kwargs cannot be used + # without P.args. + def just_kwargs(**kwargs: P.kwargs) -> None: + pass + + # This should generate two errors because P.args and P.kwargs cannot be used in + # a union. + def union_args1( + *args: P.args | Sequence[Any], **kwargs: P.kwargs | Mapping[str, Any] + ) -> None: + pass + + # This should generate two errors because P.args cannot be used in + # a union. + def union_args2( + *args: P.args | Sequence[Any], **kwargs: Union[P.kwargs, Mapping[str, Any]] + ) -> None: + pass + + +# This should generate an error because P is not defined in this context. +def out_of_scope(*args: P.args, **kwargs: P.kwargs) -> None: + pass + + +# This should generate an error because ParamSpec isn't allowed in this context +out_of_scope_var2: P = 12 + +# This should generate an error because P isn't allowed in this context. +out_of_scope_var3: P.args = 12 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec13.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec13.py new file mode 100644 index 00000000..9d160bfb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec13.py @@ -0,0 +1,129 @@ +# This sample tests cases where a ParamSpec is used as a type parameter +# for a generic type alias, a generic function, and a generic class. + +import asyncio +from typing import ( + Any, + Callable, + Concatenate, + Coroutine, + Generic, + ParamSpec, + TypeAlias, + TypeVar, +) + +_P = ParamSpec("_P") +_R = TypeVar("_R") +_T = TypeVar("_T") + + +AddIntParam = Callable[Concatenate[int, _P], _T] + + +def func1(func: Callable[_P, _R]) -> AddIntParam[_P, _R]: ... + + +def func2(a: str, b: list[int]) -> str: ... + + +v1 = func1(func2) +reveal_type(v1, expected_text="(int, a: str, b: list[int]) -> str") + +# This should generate an error because 'int' isn't assignable to +# ParamSpec _P. +X = AddIntParam[int, int] + + +class RemoteResponse(Generic[_T]): ... + + +class RemoteFunction(Generic[_P, _R]): + def __init__(self, func: Callable[_P, _R]) -> None: ... + + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + + def remote(self, *args: _P.args, **kwargs: _P.kwargs) -> RemoteResponse[_R]: ... + + +r1 = RemoteFunction(func2) +reveal_type(r1, expected_text="RemoteFunction[(a: str, b: list[int]), str]") + +v2 = r1("hi", []) +reveal_type(v2, expected_text="str") + +v3 = r1.remote("hi", []) +reveal_type(v3, expected_text="RemoteResponse[str]") + +# This should generate an error +r1(1, []) + +# This should generate an error +r1("hi") + +# This should generate an error +r1.remote(1, []) + +# This should generate an error because 'int' is not assignable +# to ParamSpec _P. +A = RemoteFunction[int, int] + + +def remote(func: Callable[_P, _R]) -> RemoteFunction[_P, _R]: ... + + +v4 = remote(func2) +reveal_type(v4, expected_text="RemoteFunction[(a: str, b: list[int]), str]") + + +Coro = Coroutine[Any, Any, _T] +CoroFunc = Callable[_P, Coro[_T]] + + +class ClassA: ... + + +CheckFunc = CoroFunc[Concatenate[ClassA, _P], bool] + + +async def my_check_func(obj: ClassA, a: int, b: str) -> bool: + print(a, b) + return str(a) == b + + +async def takes_check_func( + check_func: CheckFunc[_P], *args: _P.args, **kwargs: _P.kwargs +): + await check_func(ClassA(), *args, **kwargs) + + +asyncio.run(takes_check_func(my_check_func, 1, "2")) + +# This should generate an error because the signature doesn't match. +asyncio.run(takes_check_func(my_check_func, 1, 2)) + + +TA1: TypeAlias = Callable[_P, Any] + +ta1_1: TA1[()] = lambda: 0 + +# This should generate an error. +ta1_2: TA1[()] = lambda x: x + + +TA2: TypeAlias = Callable[Concatenate[int, _P], None] + +TA3: TypeAlias = TA2[int, int] +TA4: TypeAlias = TA2[_P] + +# This should generate an error. +TA5: TypeAlias = TA2[[int, _P]] + +# This should generate an error. +TA6: TypeAlias = TA2[[int, ...]] + +TA7: TypeAlias = TA2[Concatenate[int, _P]] +TA8: TypeAlias = TA2[Concatenate[int, ...]] + +# This should generate two errors. +TA9: TypeAlias = TA2[int, Concatenate[int, _P]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec14.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec14.py new file mode 100644 index 00000000..c1584c26 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec14.py @@ -0,0 +1,53 @@ +# This sample tests the handling of ParamSpec when used with +# static methods and class methods. + +from typing import Any, Callable, Generic, ParamSpec, Self, TypeVar, overload + +P = ParamSpec("P") +T = TypeVar("T") + + +def deco(func: Callable[P, float]) -> Callable[P, int]: + def wrapper(*args: P.args, **kwargs: P.kwargs) -> int: + return round(func(*args, **kwargs)) + + return wrapper + + +class ClassA: + @deco + @classmethod + def identity_cls(cls, val: float) -> float: + return val + + @deco + @staticmethod + def identity_static(val: float) -> float: + return val + + +reveal_type(ClassA.identity_cls(1.2), expected_text="int") +reveal_type(ClassA.identity_static(1.2), expected_text="int") + + +class ClassB(Generic[P, T]): + @overload + @classmethod + def method1( + cls, run: Callable[P, T], /, *args: P.args, **kwargs: P.kwargs + ) -> Self: ... + + @overload + @classmethod + def method1(cls) -> "ClassB[[], None]": ... + + @classmethod + def method1(cls, *args: Any, **kwargs: Any) -> Any: ... + + +def func1() -> None: + pass + + +m1 = ClassB.method1 +m1(func1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec15.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec15.py new file mode 100644 index 00000000..f716d608 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec15.py @@ -0,0 +1,25 @@ +# This sample tests the handling of generic classes parameterized +# with a ParamSpec. + +from typing import Callable, Generic, TypeVar +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +P = ParamSpec("P") +T = TypeVar("T") + + +class Foo(Generic[P, T]): + def __init__(self, func: Callable[P, T]) -> None: ... + + +def foo(foo: Foo[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ... + + +def func(a: int) -> str: ... + + +a = Foo(func) +reveal_type(a, expected_text="Foo[(a: int), str]") + +c = foo(a, 2) +reveal_type(c, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec16.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec16.py new file mode 100644 index 00000000..3a11a6ce --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec16.py @@ -0,0 +1,29 @@ +# This sample tests the matching of nested callables that each use +# ParamSpec. + +from typing import Callable, Generic, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, +) + +P = ParamSpec("P") +Q = ParamSpec("Q") + +T = TypeVar("T") +U = TypeVar("U") + + +class ClassA(Generic[P, T, Q, U]): ... + + +def decorator1( + func: Callable[Concatenate[Callable[P, T], Q], U], +) -> ClassA[P, T, Q, U]: ... + + +@decorator1 +def func1(func: Callable[[int], float], a: str) -> bool: ... + + +reveal_type(func1, expected_text="ClassA[(int), float, (a: str), bool]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec17.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec17.py new file mode 100644 index 00000000..1bde4d42 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec17.py @@ -0,0 +1,30 @@ +# This sample validates that *P.args and **P.kwargs can be used as a +# tuple and dict, respectively. + +# pyright: strict + +from collections.abc import Callable +from typing import Any +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +P = ParamSpec("P") + + +def func1(func: Callable[P, object], *args: P.args, **kwargs: P.kwargs) -> str: + arg_reprs = [repr(arg) for arg in args] + arg_reprs.extend(k + "=" + repr(v) for k, v in kwargs.items()) + + return func.__name__ + "(" + ", ".join(arg_reprs) + ")" + + +def func2(*values: object, sep: str | None = ..., end: str | None = ...) -> None: ... + + +func1(func2) + + +def func3(a: int, b: int): ... + + +def func4(*args: Any, **kwargs: Any): + func1(func3, *args, **kwargs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec18.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec18.py new file mode 100644 index 00000000..c545efd7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec18.py @@ -0,0 +1,38 @@ +# This sample tests the handling of a ParamSpec within a callback protocol. + +from typing import Callable, Concatenate, ParamSpec, Protocol + + +P = ParamSpec("P") + + +def callback(a: int, b: str, c: str) -> int: ... + + +CallableWithConcatenate = Callable[Concatenate[int, P], int] + + +def func_with_callable(cb: CallableWithConcatenate[P]) -> Callable[P, bool]: ... + + +x1 = func_with_callable(callback) +reveal_type(x1, expected_text="(b: str, c: str) -> bool") + + +class ClassWithConcatenate(Protocol[P]): + def __call__(self, x: int, /, *args: P.args, **kwargs: P.kwargs) -> int: ... + + +def func_with_protocol(cb: ClassWithConcatenate[P]) -> Callable[P, bool]: ... + + +x2 = func_with_protocol(callback) +reveal_type(x2, expected_text="(b: str, c: str) -> bool") + + +class CallbackPos(Protocol[P]): + def __call__(self, /, *args: P.args, **kwargs: P.kwargs) -> None: ... + + +def invoke_pos(cb: CallbackPos[P], /, *args: P.args, **kwargs: P.kwargs) -> None: + cb(*args, **kwargs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec19.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec19.py new file mode 100644 index 00000000..462cb686 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec19.py @@ -0,0 +1,91 @@ +# This sample tests the case where a ParamSpec is used within a generic +# type alias with a Callable. + +from typing import Any, Callable, Generic, Protocol +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, +) + +P = ParamSpec("P") + +# Example 1: Callable generic type alias + +CommandHandler1 = Callable[Concatenate[int, P], dict[str, Any]] + + +class Command1(Generic[P]): + def __init__(self, handler: CommandHandler1[P]) -> None: ... + + +class Application1: + def func1(self, handler: CommandHandler1[P]) -> Command1[P]: + return Command1(handler) + + def func2( + self, + handler: CommandHandler1[P], + ) -> Callable[[CommandHandler1[P]], Command1[P]]: + def decorator(handler: CommandHandler1[P]) -> Command1[P]: + return self.func1(handler) + + return decorator + + +# Example 2: Callback Protocol + + +class CommandHandler2(Protocol[P]): + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> dict[str, Any]: ... + + +class Command2(Generic[P]): + def __init__(self, handler: CommandHandler2[P]) -> None: ... + + +class Application2: + def func1(self, handler: CommandHandler2[P]) -> Command2[P]: + return Command2(handler) + + def func2( + self, + handler: CommandHandler2[P], + ) -> Callable[[CommandHandler2[P]], Command2[P]]: + def decorator(handler: CommandHandler2[P]) -> Command2[P]: + return self.func1(handler) + + return decorator + + +def handler(arg1: int, arg2: str) -> dict[str, Any]: ... + + +v1: CommandHandler2 = handler + + +def func1_1(x: CommandHandler1[str]): + x(3, "hi") + + +def func1_2(x: CommandHandler1[[str, int]]): + x(3, "hi", 3) + + +def func2_1(x: CommandHandler2[str]): + x("hi") + + +def func2_2(x: CommandHandler2[[str, int]]): + x("hi", 3) + + +HandlerAlias = Callable[P, None] + +list_of_handlers: list[HandlerAlias[...]] = [] + + +class HandlerProtocol(Protocol[P]): + def __call__(self, /, *args: P.args, **kwargs: P.kwargs) -> None: ... + + +list_of_handler_protocols: list[HandlerProtocol[...]] = [] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec2.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec2.py new file mode 100644 index 00000000..aa67e436 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec2.py @@ -0,0 +1,25 @@ +# This sample tests ParamSpec (PEP 612) behavior. + +from asyncio import Future +from typing import Awaitable, Callable, ParamSpec, TypeVar + +P = ParamSpec("P") +R = TypeVar("R") + + +def awaitable_wrapper(a: Callable[P, R]) -> Callable[P, Awaitable[R]]: + def foo_internal(*args: P.args, **kwargs: P.kwargs) -> Awaitable[R]: + ft: "Future[R]" = Future() + ft.set_result(a(*args, **kwargs)) + return ft + + return foo_internal + + +@awaitable_wrapper +def bar(a: int, b: str) -> float: + return 2.3 + + +async def bbb() -> float: + return await bar(2, "3") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec20.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec20.py new file mode 100644 index 00000000..a68340f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec20.py @@ -0,0 +1,73 @@ +# This sample tests the handling of class specialization expressions +# that provide signatures for ParamSpecs. + +from typing import Any, Callable, Concatenate, Generic, ParamSpec, TypeVar + + +T = TypeVar("T") +P1 = ParamSpec("P1") +P2 = ParamSpec("P2") + + +class X(Generic[T, P1]): + f: Callable[P1, int] + x: T + + +def x1(x: X[int, P2]) -> str: ... + + +def x2(x: X[int, Concatenate[int, P2]]) -> str: ... + + +def X3(x: X[int, [int, bool]]) -> str: ... + + +def x4(x: X[int, ...]) -> str: ... + + +# This should generate an error because "int" can't be bound to a ParamSpec. +def x5(x: X[int, int]) -> str: ... + + +# This should generate an error. +def x6(x: X[..., ...]) -> str: ... + + +# This should generate an error. +def x7(x: X[[int], [int, int]]) -> str: ... + + +class Y(Generic[P2]): + def __init__(self, cb: Callable[P2, Any]) -> None: ... + + def m1(self) -> X[int, Concatenate[float, P2]]: ... + + +y1 = Y(x4) +reveal_type(y1, expected_text="Y[(x: X[int, ...])]") + +y2 = y1.m1() +reveal_type(y2, expected_text="X[int, (float, x: X[int, ...])]") + + +class Z(Generic[P1]): + f: Callable[P1, int] + + +def z1(x: Z[[int, str, bool]]) -> str: ... + + +def z2(x: Z[int, str, bool]) -> str: ... + + +# This should generate an error. +def z3(x: Z[[int, [str], bool]]) -> str: ... + + +# This should generate an error. +def z4(x: Z[[[int, str, bool]]]) -> str: ... + + +# This should generate an error. +def z5(x: Z[[...]]) -> str: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec21.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec21.py new file mode 100644 index 00000000..64a54c6b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec21.py @@ -0,0 +1,56 @@ +# This sample tests the case where a ParamSpec is used within a source +# and destination callback protocol. + +from typing import Callable, Protocol +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, +) + +P1 = ParamSpec("P1") +P2 = ParamSpec("P2") +P3 = ParamSpec("P3") +P4 = ParamSpec("P4") + + +class Context: ... + + +class Response: ... + + +class ContextCallback(Protocol[P1]): + def __call__( + self, ctx: Context, /, *args: P1.args, **kwargs: P1.kwargs + ) -> Response: ... + + +def call_context_callback( + callback: ContextCallback[P3], /, *args: P3.args, **kwargs: P3.kwargs +) -> Response: ... + + +class IntContextCallback(Protocol[P2]): + def __call__( + self, ctx: Context, value: int, /, *args: P2.args, **kwargs: P2.kwargs + ) -> Response: ... + + +def call_int_context_callback( + callback: IntContextCallback[P4], value: int, /, *args: P4.args, **kwargs: P4.kwargs +) -> Response: + return call_context_callback(callback, value, *args, **kwargs) + + +P5 = ParamSpec("P5") +P6 = ParamSpec("P6") +P7 = ParamSpec("P7") + +ContextCallable = Callable[Concatenate[Context, P5], Response] +IntContextCallable = Callable[Concatenate[Context, int, P6], Response] + + +def call_int_context_callable( + callback: IntContextCallable[P7], value: int, /, *args: P7.args, **kwargs: P7.kwargs +) -> Response: + return call_context_callback(callback, value, *args, **kwargs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec22.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec22.py new file mode 100644 index 00000000..1c826b13 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec22.py @@ -0,0 +1,28 @@ +# This sample tests the case where a specialized generic class that uses +# a ParamSpec and a callback protocol is assigned to a Callable that +# uses a ParamSpec. + +from typing import Callable, Generic, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, +) + +P = ParamSpec("P") +R = TypeVar("R") + + +class MyPartial(Generic[P, R]): + def __init__(self, first: int, func: Callable[Concatenate[int, P], R]) -> None: + self.first = first + self.func = func + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + + +class MyPartialCreator(Generic[P, R]): + def __init__(self, func: Callable[Concatenate[int, P], R]): + self.func = func + + def create_partial(self, first: int) -> Callable[P, R]: + return MyPartial[P, R](first=first, func=self.func) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec23.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec23.py new file mode 100644 index 00000000..1e7cecb3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec23.py @@ -0,0 +1,22 @@ +# This sample tests the case where a Callable that includes a Concatenate +# is assigned to a ParamSpec that doesn't include a Concatenate. + + +from typing import Callable, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, +) + +P = ParamSpec("P") + + +def is_inty(f: Callable[P, object]) -> Callable[P, int]: ... + + +T = TypeVar("T") + + +def outer(f: Callable[Concatenate[str, P], object]): + x = is_inty(f) + reveal_type(x, expected_text="(str, **P@outer) -> int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec24.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec24.py new file mode 100644 index 00000000..82b2d312 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec24.py @@ -0,0 +1,69 @@ +# This sample tests the case where a Callable that includes a Concatenate +# is used as an input parameter to a function that returns a generic +# type parameterized by a ParamSpec and specialized with a Concatenate. + +# pyright: reportOverlappingOverload=false + +from __future__ import annotations +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Self, + Concatenate, + ParamSpec, +) +from typing import Any, Callable, TypeVar, Protocol, Generic, overload + +T = TypeVar("T") +O = TypeVar("O") +P = ParamSpec("P") +Self_A = TypeVar("Self_A", bound="A") + + +class _callable_cache(Protocol[P, T]): + foo: int = 0 + val: T + + def __init__(self, val: T) -> None: + self.val = val + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + return self.val + + +class _wrapped_cache(_callable_cache[P, T], Generic[O, P, T]): + @overload + def __get__( # type: ignore + self, instance: None, owner: type[O] + ) -> _callable_cache[Concatenate[O, P], T]: ... + + @overload + def __get__(self, instance: O, owner: type[O]) -> Self: ... + + +@overload +def cache(fn: Callable[Concatenate[Self_A, P], T]) -> _wrapped_cache[Self_A, P, T]: # type: ignore + ... + + +@overload +def cache(fn: Callable[P, T]) -> _wrapped_cache[Any, P, T]: ... + + +@cache +def not_in_class(a: int, b: str) -> str: ... + + +class A: + @cache + def in_class(self, a: int, b: str) -> str: ... + + +reveal_type(not_in_class, expected_text="_wrapped_cache[Any, (a: int, b: str), str]") +not_in_class(1, "") + +a = A() + +reveal_type(a.in_class, expected_text="_wrapped_cache[A, (a: int, b: str), str]") +a.in_class(1, "") + +reveal_type(A.in_class, expected_text="_callable_cache[(A, a: int, b: str), str]") +A.in_class(a, 1, "") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec25.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec25.py new file mode 100644 index 00000000..4d4bc4b5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec25.py @@ -0,0 +1,29 @@ +# This sample tests the case where a generic type uses a ParamSpec +# as a type parameter and it is specialized using an empty signature. + +from typing import Any, Callable, Concatenate, Generic, ParamSpec + +P = ParamSpec("P") + + +class Context: ... + + +CommandHandler = Callable[Concatenate[Context, P], Any] + + +class Command(Generic[P]): + def __init__(self, handler: CommandHandler[P]) -> None: ... + + +def handler_no_args(ctx: Context) -> None: ... + + +def handler_one_arg(ctx: Context, a: int) -> None: ... + + +cmd_no_args = Command(handler_no_args) +reveal_type(cmd_no_args, expected_text="Command[()]") + +cmd_one_arg = Command(handler_one_arg) +reveal_type(cmd_one_arg, expected_text="Command[(a: int)]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec26.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec26.py new file mode 100644 index 00000000..4879253b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec26.py @@ -0,0 +1,22 @@ +# This sample tests the case where a generic class parameterized by a +# ParamSpec is specialized using a Concatenate[] type argument. + +from typing import ParamSpec, Concatenate, Generic, Callable, Any + +P = ParamSpec("P") + + +class A(Generic[P]): + def __init__(self, func: Callable[P, Any]) -> None: ... + + +def func1(a: A[Concatenate[int, P]]) -> A[P]: ... + + +def func2(a: int, b: str) -> str: ... + + +val1 = A(func2) +reveal_type(val1, expected_text="A[(a: int, b: str)]") +val2 = func1(val1) +reveal_type(val2, expected_text="A[(b: str)]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec27.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec27.py new file mode 100644 index 00000000..1521ecdb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec27.py @@ -0,0 +1,61 @@ +# This sample tests the case where an ellipsis is used to specialize +# a generic class parameterized by a ParamSpec. + +from typing import Callable, Generic, Protocol, assert_type +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, + TypeAlias, +) + +P = ParamSpec("P") + + +def func1(a: int, b: str) -> None: ... + + +def func2(a: str, b: str) -> None: ... + + +class Handler(Protocol[P]): + def __call__(self, /, *args: P.args, **kwargs: P.kwargs) -> None: ... + + +class ConcatHandler(Protocol[P]): + def __call__(self, a: int, /, *args: P.args, **kwargs: P.kwargs) -> None: ... + + +ConcatCallableHandler: TypeAlias = Callable[Concatenate[int, P], None] + + +handler_callable1: Callable[..., None] = func1 +concat_handler_callable1: ConcatCallableHandler[...] = func1 + +# This should generate an error because the first param of func2 is not int. +concat_handler_callable2: ConcatCallableHandler[...] = func2 + +handler1: Handler[...] = func1 +concat_handler1: ConcatHandler[...] = func1 + +# This should generate an error because the first param of func2 is not int. +concat_handler2: ConcatHandler[...] = func2 + + +def func0(x: ConcatCallableHandler[str, str]): + assert_type(x, Callable[[int, str, str], None]) + + +class Command(Generic[P]): + def __init__(self, handler: Handler[P]) -> None: + self.handler: Handler[P] = handler + + +commands: list[Command[...]] = [] + + +def do_something(int_handler: Handler[int], var_args_handler: Handler[P], /) -> None: + int_command = Command(int_handler) + commands.append(int_command) + + var_args_command = Command(var_args_handler) + commands.append(var_args_command) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec28.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec28.py new file mode 100644 index 00000000..f13fed80 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec28.py @@ -0,0 +1,36 @@ +# This sample tests a complicated combination of ParamSpec usage. + +# pyright: strict + +from typing import Any, Callable, Concatenate, ParamSpec, Protocol, TypeVar + + +_Fn = TypeVar("_Fn", bound=Callable[..., Any]) +_Ret = TypeVar("_Ret") +_Args = ParamSpec("_Args") +_Self = TypeVar("_Self", bound="_GenerativeType") + + +def decorator( + target: Callable[Concatenate[_Fn, _Args], _Ret], +) -> Callable[[_Fn], Callable[_Args, _Ret]]: ... + + +class _GenerativeType(Protocol): + def _generate(self: "_Self") -> "_Self": ... + + +def generative( + fn: Callable[Concatenate[_Self, _Args], None], +) -> Callable[Concatenate[_Self, _Args], _Self]: + @decorator + def _generative( + fn: Callable[Concatenate[_Self, _Args], None], + self: _Self, + *args: _Args.args, + **kw: _Args.kwargs, + ) -> _Self: ... + + decorated = _generative(fn) + + return decorated diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec29.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec29.py new file mode 100644 index 00000000..e9294acd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec29.py @@ -0,0 +1,37 @@ +# This sample tests the case where an inner function uses concatenation +# and the return type of the outer function doesn't. + +from typing import Callable, Concatenate, ParamSpec + +P = ParamSpec("P") + + +def decorator1(f: Callable[P, None]) -> Callable[P, None]: + def inner(var: int, *args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + # This should generate an error because the concatenated parameters don't match. + return inner + + +def decorator2(f: Callable[P, None]) -> Callable[Concatenate[int, P], None]: + def inner(*args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + # This should generate an error because the concatenated parameters don't match. + return inner + + +def decorator3(f: Callable[P, None]) -> Callable[Concatenate[int, P], None]: + def inner(var: str, *args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + # This should generate an error because the concatenated parameters don't match. + return inner + + +def decorator4(f: Callable[P, None]) -> Callable[Concatenate[str, P], None]: + def inner(var: str, *args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + return inner diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec3.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec3.py new file mode 100644 index 00000000..84986276 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec3.py @@ -0,0 +1,131 @@ +# This sample tests ParamSpec (PEP 612) behavior. + +from typing import ( + Awaitable, + Callable, + Generic, + Iterable, + ParamSpec, + TypeVar, + overload, +) + +P = ParamSpec("P") +R = TypeVar("R") + + +def decorator1(f: Callable[P, R]) -> Callable[P, Awaitable[R]]: + async def inner(*args: P.args, **kwargs: P.kwargs) -> R: + return f(*args, **kwargs) + + return inner + + +@decorator1 +def func1(x: int, y: str) -> int: + return x + 7 + + +async def func2(): + await func1(1, "A") + + # This should generate an error because + # the first parameter is not an int. + await func1("B", "2") + + +@overload +def func3(x: int) -> None: ... + + +@overload +def func3(x: str) -> str: ... + + +def func3(x: int | str) -> str | None: + if isinstance(x, int): + return None + else: + return x + + +reveal_type( + decorator1(func3), + expected_text="Overload[(x: int) -> Awaitable[None], (x: str) -> Awaitable[str]]", +) + + +class ClassA(Generic[P, R]): + def __init__(self, func: Callable[P, R]): + self.func = func + + +def func4(f: Callable[P, R]) -> ClassA[P, R]: + return ClassA(f) + + +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +def decorator2(f: Callable[P, R]) -> Callable[P, R]: + return f + + +def func5(f: Callable[[], list[T1]]) -> Callable[[list[T2]], list[T1 | T2]]: + def inner(res: list[T2], /) -> list[T1 | T2]: ... + + return decorator2(inner) + + +def func6(x: Iterable[Callable[P, None]]) -> Callable[P, None]: + def inner(*args: P.args, **kwargs: P.kwargs) -> None: + for fn in x: + fn(*args, **kwargs) + + return inner + + +class Callback1: + def __call__(self, x: int | str, y: int = 3) -> None: ... + + +class Callback2: + def __call__(self, x: int, /) -> None: ... + + +class Callback3: + def __call__(self, *args, **kwargs) -> None: ... + + +def func7(f1: Callable[P, R], f2: Callable[P, R]) -> Callable[P, R]: ... + + +def func8(cb1: Callback1, cb2: Callback2, cb3: Callback3): + v1 = func7(cb1, cb2) + reveal_type(v1, expected_text="(x: int, /) -> None") + + v2 = func7(cb1, cb3) + reveal_type(v2, expected_text="(x: int | str, y: int = 3) -> None") + + +def func9(f: Callable[P, object], *args: P.args, **kwargs: P.kwargs) -> object: + # This should generate an error because "name" doesn't exist. + return f(*args, **kwargs, name="") + + +def func10(data: int = 1) -> None: + pass + + +def func11[**P]( + cls: Callable[P, None], data: str, *args: P.args, **kwargs: P.kwargs +) -> None: ... + + +func11(func10, "") +func11(func10, "", 0) + +# This should generate an error because one of the two "data" parameters +# does not have a default value. +func11(func10) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec30.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec30.py new file mode 100644 index 00000000..7b24bbc5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec30.py @@ -0,0 +1,52 @@ +# This sample tests a complicated combination of ParamSpec usage. + +# pyright: strict + +from typing import Callable, TypeVar, overload +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, +) + +_T = TypeVar("_T") +_R = TypeVar("_R") +_P = ParamSpec("_P") + + +@overload +def error_decorator( + error_codes: None = None, +) -> Callable[ + [Callable[Concatenate[_T, _P], _R]], Callable[Concatenate[_T, _P], _R] +]: ... + + +@overload +def error_decorator( + error_codes: list[str], +) -> Callable[ + [Callable[Concatenate[_T, _P], _R]], Callable[Concatenate[_T, _P], _R | None] +]: ... + + +def error_decorator( + error_codes: list[str] | None = None, +) -> Callable[ + [Callable[Concatenate[_T, _P], _R]], Callable[Concatenate[_T, _P], _R | None] +]: + """Filter specific errors and raise custom exception for remaining once.""" + + def decorator( + func: Callable[Concatenate[_T, _P], _R], + ) -> Callable[Concatenate[_T, _P], _R | None]: + def wrapper(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> _R | None: + try: + return func(self, *args, **kwargs) + except Exception as ex: + if error_codes is not None: + return None + raise Exception("Custom exception") from ex + + return wrapper + + return decorator diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec31.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec31.py new file mode 100644 index 00000000..7d44a58d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec31.py @@ -0,0 +1,15 @@ +# This sample tests that an Any expression fills in a default signature +# when it binds to a ParamSpec. + +from typing import Any, Callable, TypeVar +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +T = TypeVar("T") +P = ParamSpec("P") + + +def func1(f: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ... + + +def func2(a: Any) -> None: + reveal_type(func1(a, 1), expected_text="Any") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec32.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec32.py new file mode 100644 index 00000000..0d06483c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec32.py @@ -0,0 +1,59 @@ +# This sample tests the case where a ParamSpec and its P.args and P.kwargs +# parameters are used within a constructor. + +from typing import Callable, Generic, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Concatenate, + ParamSpec, +) + +P = ParamSpec("P") +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +def add_k(x: int, k: int) -> int: + return x + k + + +class Class1(Generic[P, T2]): + def __init__(self, fn: Callable[P, T2], *args: P.args, **kwargs: P.kwargs) -> None: + self.fn = fn + self.args = args + self.kwargs = kwargs + + def __call__(self) -> T2: + return self.fn(*self.args, **self.kwargs) + + +# This should generate an error because arguments x and k are missing. +Class1(add_k) + +# This should generate an error because arguments x has the wrong type. +Class1(add_k, "3", 2) + +Class1(add_k, 3, 2) +Class1(add_k, x=3, k=2) + + +class Class2(Generic[P, T1, T2]): + def __init__( + self, fn: Callable[Concatenate[T1, P], T2], *args: P.args, **kwargs: P.kwargs + ) -> None: + self.fn = fn + self.args = args + self.kwargs = kwargs + + def __call__(self, value: T1) -> T2: + return self.fn(value, *self.args, **self.kwargs) + + +# This should generate an error because argument x is missing. +Class2(add_k) + +# This should generate an error because arguments x has the wrong type. +Class2(add_k, "3") + +Class2(add_k, 2) + +Class2(add_k, k=2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec33.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec33.py new file mode 100644 index 00000000..83158975 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec33.py @@ -0,0 +1,28 @@ +# This sample verifies that an error is returned if an inner function +# doesn't use P.args and P.kwargs in its parameter list but is returned +# by an outer function that uses P in its return type. + +from typing import Callable, Concatenate +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +P = ParamSpec("P") + + +def func1(func: Callable[P, int]) -> Callable[P, int]: + def inner_func(x: int) -> int: + # This should generate a type error. + return func() + + # This should generate a type error. + return inner_func + + +def func2( + func: Callable[Concatenate[int, P], int], +) -> Callable[Concatenate[int, P], int]: + def inner_func(x: int) -> int: + # This should generate a type error. + return func(x) + + # This should generate a type error. + return inner_func diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec34.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec34.py new file mode 100644 index 00000000..a316a9fa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec34.py @@ -0,0 +1,21 @@ +# This sample tests the handling of nested functions that involve ParamSpecs. + +import functools +from typing import Callable, TypeVar +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +P = ParamSpec("P") +R = TypeVar("R") + + +def deprecated( + instead: str | None = None, +) -> Callable[[Callable[P, R]], Callable[P, R]]: + def actual_decorator(func: Callable[P, R]) -> Callable[P, R]: + @functools.wraps(func) + def decorated(*args: P.args, **kwargs: P.kwargs) -> R: + return func(*args, **kwargs) + + return decorated + + return actual_decorator diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec35.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec35.py new file mode 100644 index 00000000..99cd7c8d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec35.py @@ -0,0 +1,25 @@ +# This sample ensures that when solving for a ParamSpec, all assignments +# are consistent. + +from typing import Callable, ParamSpec + +P = ParamSpec("P") + + +def func1(x: Callable[P, int], y: Callable[P, int]) -> Callable[P, bool]: ... + + +def a1(p0: int) -> int: ... + + +def a2(p0: int, p2: str) -> int: ... + + +v1 = func1(a1, a1) +reveal_type(v1, expected_text="(p0: int) -> bool") + +v2 = func1(a2, a2) +reveal_type(v2, expected_text="(p0: int, p2: str) -> bool") + +# This should generate an error because a1 and a2 are not compatible. +v3 = func1(a1, a2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec36.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec36.py new file mode 100644 index 00000000..ab4e625c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec36.py @@ -0,0 +1,42 @@ +# This sample tests the case where a callback protocol uses a method-scoped +# param spec. + +import contextlib +from concurrent.futures import Future, ThreadPoolExecutor +from typing import Callable, Iterator, ParamSpec, Protocol, TypeVar + +P = ParamSpec("P") +R = TypeVar("R") + + +class TakesFunctionWithArguments(Protocol): + def __call__( + self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> Future[R]: ... + + +@contextlib.contextmanager +def submit_wrapper() -> Iterator[TakesFunctionWithArguments]: + with ThreadPoolExecutor() as pool: + + def my_submit( + func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> Future[R]: + return pool.submit(func, *args, **kwargs) + + yield my_submit + + +def foo(a: int, b: int, c: int) -> int: + return a + b + c + + +with submit_wrapper() as submit: + submit(foo, a=1, b=2, c=3) + submit(foo, 1, 2, 3) + + # This should generate two errors. + submit(foo, a=1, b=2, d=3) + + # This should generate an error. + submit(foo, 1, 2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec37.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec37.py new file mode 100644 index 00000000..ebb16e7f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec37.py @@ -0,0 +1,28 @@ +# This sample tests the case where a source type includes a ParamSpec +# and a dest type includes an *args: Any and **kwargs: Any. + +from typing import Any, Callable, Protocol, TypeVar, ParamSpec + +P = ParamSpec("P") +R = TypeVar("R") +T = TypeVar("T") + + +class ClassA(Protocol[R]): + def __call__(self, n: int, /, *args: Any, **kwargs: Any) -> list[R]: ... + + +def noop(v: T) -> T: + return v + + +def func1(maker: Callable[P, R]) -> ClassA[R]: + def inner(n: int, /, *args: P.args, **kwargs: P.kwargs) -> list[R]: + reveal_type(args, expected_text="P@func1.args") + reveal_type(noop(args), expected_text="P@func1.args") + reveal_type(kwargs, expected_text="P@func1.kwargs") + reveal_type(noop(kwargs), expected_text="P@func1.kwargs") + + return [maker(*args, **kwargs) for _ in range(n)] + + return inner diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec38.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec38.py new file mode 100644 index 00000000..135f2fc1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec38.py @@ -0,0 +1,24 @@ +# This sample tests that a **kwargs parameter captured by a ParamSpec +# is preserved. + +from typing import Callable, Generic, ParamSpec, TypeVar + + +P = ParamSpec("P") +R = TypeVar("R") + + +class ClassA(Generic[P, R]): + def __init__(self, callback: Callable[P, R]): + self.callback = callback + + def method(self, *args: P.args, **kwargs: P.kwargs) -> R: + return self.callback(*args, **kwargs) + + +def func1(obj: object, **kwargs: object) -> object: ... + + +reveal_type( + ClassA(func1).method, expected_text="(obj: object, **kwargs: object) -> object" +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec39.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec39.py new file mode 100644 index 00000000..03b0eddc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec39.py @@ -0,0 +1,16 @@ +# This sample tests the case where a function that uses a ParamSpec +# is passed to itself. This should not cause a crash or infinite recursion +# within the type evaluator. + +from typing import TypeVar, Callable +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +P = ParamSpec("P") +R = TypeVar("R") + + +def func1(x: Callable[P, R]) -> Callable[P, R]: + return x + + +func1(func1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec4.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec4.py new file mode 100644 index 00000000..90fb1c0f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec4.py @@ -0,0 +1,159 @@ +# This sample tests the type checker's handling of ParamSpec +# and Concatenate as described in PEP 612. + +from typing import ( + Callable, + Concatenate, + Generic, + Iterable, + ParamSpec, + Protocol, + TypeVar, + assert_type, +) + +P = ParamSpec("P") +R = TypeVar("R") + + +class Request: ... + + +def with_request(f: Callable[Concatenate[Request, P], R]) -> Callable[P, R]: + def inner(*args: P.args, **kwargs: P.kwargs) -> R: + return f(Request(), *args, **kwargs) + + return inner + + +@with_request +def takes_int_str(request: Request, x: int, y: str) -> int: + # use request + return x + 7 + + +takes_int_str(1, "A") + +# This should generate an error because the first arg +# is the incorrect type. +takes_int_str("B", "A") + +# This should generate an error because there are too +# many parameters. +takes_int_str(1, "A", 2) + + +# This should generate an error because a ParamSpec can appear +# only within the last type arg for Concatenate +def decorator1(f: Callable[Concatenate[P, P], int]) -> Callable[P, int]: ... + + +# This should generate an error because the last type arg +# for Concatenate should be a ParamSpec. +def decorator2(f: Callable[Concatenate[int, int], int]) -> Callable[P, int]: ... + + +# This should generate an error because Concatenate is missing +# its type arguments. +def decorator3(f: Callable[Concatenate, int]) -> Callable[P, int]: ... + + +def decorator4(func: Callable[P, None]) -> Callable[Concatenate[int, P], None]: + def wrapper(x: int, /, *args: P.args, **kwargs: P.kwargs) -> None: ... + + return wrapper + + +def func1(func: Callable[Concatenate[int, P], None]) -> Callable[P, None]: ... + + +def func2(a: int, b: str, c: str) -> None: ... + + +def func3(a: int, /, b: str, c: str) -> None: ... + + +def func4(a: int, b: str, /, c: str) -> None: ... + + +v1 = func1(func2) +reveal_type(v1, expected_text="(b: str, c: str) -> None") + +v2 = func1(func3) +reveal_type(v2, expected_text="(b: str, c: str) -> None") + +v3 = func1(func4) +reveal_type(v3, expected_text="(b: str, /, c: str) -> None") + + +def func5(__fn: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R: ... + + +def func6(name: str, *args: str): ... + + +v5 = func5(func6, "a", "b", "c") + +# This should generate an error because 1 isn't assignable to str. +v6 = func5(func6, "a", "b", "c", 1) + + +def func7(name: str, **kwargs: str): ... + + +v7 = func5(func7, "a", b="b", c="c") + +# This should generate an error because 1 isn't assignable to str. +v8 = func5(func7, "a", b="b", c=1) + + +T = TypeVar("T", covariant=True) +X = TypeVar("X") + + +class DecoProto(Protocol[P, T]): + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ... + + +def func8(cb: Callable[Concatenate[X, P], T]) -> DecoProto[Concatenate[X, P], T]: + return cb + + +@func8 +def func9(x: Iterable[T]) -> T: + return next(iter(x)) + + +v9 = func9([1, 2]) +reveal_type(v9, expected_text="int") + + +class A(Generic[R, P]): + f: Callable[P, str] + prop: R + + def __init__(self, f: Callable[P, str], prop: R) -> None: + self.f = f + self.prop = prop + + +def func10(q: int, /) -> str: ... + + +y1 = A(func10, 1) +assert_type(y1, A[int, [int]]) +reveal_type(y1, expected_text="A[int, (q: int, /)]") + + +# This should generate an error because Concatenate is not +# allowed in this context. +def func11(func: Concatenate[int, ...]) -> None: + # This should generate an error because Concatenate is not + # allowed in this context. + x: Concatenate[int, ...] + + +class B: + # This should generate an error because Concatenate is not + # allowed in this context. + x: Concatenate[int, ...] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec40.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec40.py new file mode 100644 index 00000000..678b379b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec40.py @@ -0,0 +1,42 @@ +# This sample tests the interaction between a generic callable parameterized +# with a ParamSpec and another generic callable that is parameterized +# with a TypeVar. + +from typing import Callable, ParamSpec, TypeVar, TypedDict, Unpack + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +def call(obj: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: + return obj(*args, **kwargs) + + +def func1(): + return 0 + + +def func2(): + return 0.0 + + +result1 = map(call, [func1]) +reveal_type(result1, expected_text="map[int]") + +result2 = map(call, [func1, func2]) +reveal_type(result2, expected_text="map[float]") + + +class TD1(TypedDict, total=False): + e: str + f: str + + +def func3( + a: int, b: int, /, *, c: str = ..., d: str = ..., **kwargs: Unpack[TD1] +) -> float: ... + + +call(func3, 1, 2, e="", c="") +call(func3, 1, 2, c="", d="", e="") +call(func3, 1, 2, e="") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec41.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec41.py new file mode 100644 index 00000000..8226a422 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec41.py @@ -0,0 +1,19 @@ +# This sample tests the case where a function-scoped ParamSpec is +# partially specialized through a binding operation. + +from typing import Callable, ParamSpec, Self, reveal_type + +P = ParamSpec("P") + + +class A: + def __init__(self, x: int, y: int, z: str) -> None: + self.a = x + + # This should generate an error. + @classmethod + def f(cls: Callable[P, Self], *args: P.args, **kwargs: P.kwargs) -> int: + return cls(*args, **kwargs).a + + +reveal_type(A.f, expected_text="(x: int, y: int, z: str) -> int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec42.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec42.py new file mode 100644 index 00000000..c987b7fb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec42.py @@ -0,0 +1,51 @@ +# This sample tests the case where a generic class is parameterized +# by a ParamSpec, and this ParamSpec is used in a method with +# *args and **kwargs parameters. In cases where the ParamSpec captures +# a generic function, the TypeVars for this generic function should +# still be solvable. + +from typing import Callable, Generic, ParamSpec, TypeVar + + +S = TypeVar("S") +T = TypeVar("T") + +P = ParamSpec("P") +R = TypeVar("R") + + +def func1(a: S, b: T) -> dict[S, T]: ... + + +class DecoratorClass1(Generic[P, R]): + def __init__(self, func: Callable[P, R]): + self._func = func + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: + return self._func(*args, **kwargs) + + def other(self, val: int, *args: P.args, **kwargs: P.kwargs) -> R: ... + + +decorated_func1 = DecoratorClass1(func1) + +reveal_type( + decorated_func1, + expected_text="DecoratorClass1[(a: S@func1, b: T@func1), dict[S@func1, T@func1]]", +) + +func1_ret = decorated_func1(1, "") +reveal_type(func1_ret, expected_text="dict[int, str]") + + +func1_other_ret = decorated_func1.other(0, 1, "") +reveal_type(func1_other_ret, expected_text="dict[int, str]") + + +def func2(func: Callable[P, R]) -> Callable[P, R]: ... + + +d1 = func2(func1) +d2 = func2(d1) +d3 = d2(1, "") +reveal_type(d3, expected_text="dict[int, str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec43.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec43.py new file mode 100644 index 00000000..87923777 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec43.py @@ -0,0 +1,17 @@ +# This sample tests the case where a generic class has a function-local +# ParamSpec in its constructor. + +from typing import TypeVar, Callable, ParamSpec, Protocol + +P = ParamSpec("P") +R = TypeVar("R") + + +class Decorator(Protocol): + def __call__(self, __x: Callable[P, R]) -> Callable[P, R]: ... + + +def func1(deco: Decorator): + deco(lambda: None)() + deco(lambda x: x)(1) + deco(lambda x, y: x)(1, "") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec44.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec44.py new file mode 100644 index 00000000..237364ab --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec44.py @@ -0,0 +1,20 @@ +# This sample tests the handling of function assignments that involve +# ParamSpecs. + +from typing import overload, ParamSpec, TypeVar, Callable + +P = ParamSpec("P") +R = TypeVar("R") + + +@overload +def func1(f: Callable[P, R]) -> Callable[P, R]: ... + + +@overload +def func1() -> Callable[[Callable[P, R]], Callable[P, R]]: ... + + +def func1( + f: Callable[P, R] | None = None, +) -> Callable[P, R] | Callable[[Callable[P, R]], Callable[P, R]]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec45.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec45.py new file mode 100644 index 00000000..33fab200 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec45.py @@ -0,0 +1,29 @@ +# This sample tests the case where the same function that uses a ParamSpec +# is called multiple times as arguments to the same call. + +from typing import Callable, ParamSpec + +P = ParamSpec("P") + + +def func1(func: Callable[P, object], *args: P.args, **kwargs: P.kwargs) -> object: ... + + +def func2(x: str) -> int: ... + + +def func3(y: str) -> int: ... + + +print(func1(func2, x="..."), func1(func3, y="...")) + + +def func4(fn: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> int: + return fn(*args, **kwargs) + + +def func5(x: int, y: int) -> int: + return x + y + + +func5(func4(lambda x: x, 1), func4(lambda x, y: x + y, 2, 3)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec46.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec46.py new file mode 100644 index 00000000..ff68dd7f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec46.py @@ -0,0 +1,35 @@ +# This sample tests support for Concatenate with a ... type argument. + +from typing import Callable, Concatenate + +TA1 = Callable[Concatenate[int, ...], None] + + +def func1(cb: Callable[Concatenate[int, str, ...], None]): ... + + +def func2(cb: TA1): ... + + +def cb1(x: int, y: str, z: str) -> None: ... + + +func1(cb1) +func2(cb1) + + +def cb2(x: int, y: str, *args: int, **kwargs: str) -> None: ... + + +func1(cb2) +func2(cb2) + + +def cb3(x: str, y: str) -> None: ... + + +# This should generate an error. +func1(cb3) + +# This should generate an error. +func2(cb3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec47.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec47.py new file mode 100644 index 00000000..b19448b7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec47.py @@ -0,0 +1,32 @@ +# This sample tests the handling of recursive uses of ParamSpec. + +from typing import Any, Callable, ParamSpec + + +P = ParamSpec("P") + + +def func1(f: Callable[P, Any], *args: P.args, **kwargs: P.kwargs) -> str: ... + + +def func2(a: int) -> int: + return 42 + + +v2 = func1(func2, 42) +reveal_type(v2, expected_text="str") + +# This should generate an error. +func1(func2, "42") + +v3 = func1(func1, func2, 42) +reveal_type(v3, expected_text="str") + +# This should generate an error. +func1(func1, func2, "42") + +# This should generate an error. +func1(func1) + +v4 = func1(func1, lambda: None) +reveal_type(v4, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec48.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec48.py new file mode 100644 index 00000000..225c7b1f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec48.py @@ -0,0 +1,15 @@ +# This sample tests the case where a function with a ParamSpec is called +# with *args and **kwargs that are defined as Any. + +from typing import Any, Callable, Concatenate, ParamSpec + + +P = ParamSpec("P") + + +def func3(f: Callable[Concatenate[int, P], int], *args: Any, **kwargs: Any) -> int: + return f(*args, **kwargs) + + +def func4(f: Callable[Concatenate[int, ...], int], *args: Any, **kwargs: Any) -> int: + return f(*args, **kwargs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec49.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec49.py new file mode 100644 index 00000000..5869d2bb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec49.py @@ -0,0 +1,64 @@ +# This sample tests the case where a function parameterized with a +# ParamSpec P is called with *args: P.args and **kwargs: P.kwargs. + +from typing import Any, Generic, ParamSpec + +P = ParamSpec("P") + + +class TaskDeclaration(Generic[P]): + pass + + +class Dispatcher: + def dispatch( + self, + task_declaration: TaskDeclaration[P], + count: int, + /, + *args: P.args, + **kwargs: P.kwargs, + ) -> None: + pass + + +class Queue: + dispatcher: Dispatcher + + def method1(self, stub: TaskDeclaration[P]) -> Any: + def inner0(*args: P.args, **kwargs: P.kwargs) -> None: + self.dispatcher.dispatch(stub, 1, *args, **kwargs) + + def inner1(*args: P.args, **kwargs: P.kwargs) -> None: + # This should generate an error because a positional argument + # cannot appear after an unpacked keyword argument. + self.dispatcher.dispatch(stub, 1, **kwargs, *args) + + def inner2(*args: P.args, **kwargs: P.kwargs) -> None: + # This should generate an error because it's missing + # a positional argument for 'count'. + self.dispatcher.dispatch(stub, *args, **kwargs) + + def inner3(*args: P.args, **kwargs: P.kwargs) -> None: + # This should generate an error because it has an + # additional positional argument. + self.dispatcher.dispatch(stub, 1, 1, *args, **kwargs) + + def inner4(*args: P.args, **kwargs: P.kwargs) -> None: + # This should generate an error because it is missing + # the *args argument. + self.dispatcher.dispatch(stub, 1, **kwargs) + + def inner5(*args: P.args, **kwargs: P.kwargs) -> None: + # This should generate an error because it is missing + # the *kwargs argument. + self.dispatcher.dispatch(stub, 1, *args) + + def inner6(*args: P.args, **kwargs: P.kwargs) -> None: + # This should generate an error because it has an + # extra *args argument. + self.dispatcher.dispatch(stub, 1, *args, *args, **kwargs) + + # This should generate an error because it has an + # extra **kwargs argument. + self.dispatcher.dispatch(stub, 1, *args, **kwargs, **kwargs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec5.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec5.py new file mode 100644 index 00000000..60ce2ea6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec5.py @@ -0,0 +1,27 @@ +# This sample tests ParamSpec processing when the source +# callable type has keyword-only or positional-only +# parameter separators. + +from typing import Callable, ParamSpec, TypeVar + + +P = ParamSpec("P") +R = TypeVar("R") + + +def decorator(fn: Callable[P, R]) -> Callable[P, R]: + return fn + + +def func1(*, value: str) -> None: ... + + +f1 = decorator(func1) +reveal_type(f1, expected_text="(*, value: str) -> None") + + +def func2(value: str, /) -> None: ... + + +f2 = decorator(func2) +reveal_type(f2, expected_text="(value: str, /) -> None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec50.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec50.py new file mode 100644 index 00000000..5ae51b27 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec50.py @@ -0,0 +1,34 @@ +# This sample tests the case where a ParamSpec captures another ParamSpec. + +from typing import Callable, Iterator, ParamSpec, TypeVar + +P = ParamSpec("P") +T = TypeVar("T") + + +def deco1(func: Callable[P, Iterator[T]]) -> Callable[P, Iterator[T]]: ... + + +@deco1 +def func1( + func: Callable[P, str], + *func_args: P.args, + **func_kwargs: P.kwargs, +) -> Iterator[str]: ... + + +def func2(a: int, b: float) -> str: ... + + +def func3(a: int) -> str: ... + + +func1(func2, 3, 1.1) + +# This should generate an error. +func1(func2, 3.1, 1.1) + +func1(func3, 3) + +# This should generate an error. +func1(func3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec51.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec51.py new file mode 100644 index 00000000..7b619957 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec51.py @@ -0,0 +1,51 @@ +# This sample tests a case where a method-scoped ParamSpec is used within one +# of several overloads but not in others. + +from typing import Callable, Concatenate, overload, Any +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + ParamSpec, + Self, +) + +P = ParamSpec("P") + + +class A: + @overload + def method1( + self, + cb: Callable[Concatenate[Self, P], None], + *args: P.args, + **kwargs: P.kwargs, + ) -> None: ... + + @overload + def method1( + self, cb: tuple[Callable[..., None], str], *args: Any, **kwargs: Any + ) -> None: ... + + def method1(self, cb, *args, **kwargs) -> None: + if isinstance(cb, tuple): + cb[0](self, *args, **kwargs) + else: + cb(self, *args, **kwargs) + + +def func1(fo: A, x: int) -> None: ... + + +def func2(fo: A, x: int, /, y: str) -> None: ... + + +def func3(fo: A, x: int, /, y: str, *, z: tuple[int, int]) -> None: ... + + +a = A() + +a.method1(func1, 1) +a.method1(func2, 3, "f1") +a.method1(func3, 6, "f2", z=(0, 1)) + +a.method1((func1, "f1"), 1) +a.method1((func2, "f2"), 2, "a") +a.method1((func3, "f3"), 3, "b", z=(0, 1)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec52.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec52.py new file mode 100644 index 00000000..519f8436 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec52.py @@ -0,0 +1,19 @@ +# This sample tests an illegal use of a ParamSpec that resulted in +# a crash. + +from typing import Callable, Generic, ParamSpec + +P = ParamSpec("P") + + +class A(Generic[P]): + def __call__(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs) -> None: ... + + +class B: + # This should generate an error. + x: A[P] + + +# This should generate an error, not crash. +B().x(1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec53.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec53.py new file mode 100644 index 00000000..6d81060b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec53.py @@ -0,0 +1,22 @@ +# This sample tests the case where a ParamSpec captures a named parameter +# that is combined with a positional-only parameter of the same name. + +from typing import TypeVar, Callable, ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + + +class Mixin: + @classmethod + def factory( + cls: Callable[P, T], data: str, /, *args: P.args, **kwargs: P.kwargs + ) -> T: ... + + +class Next(Mixin): + def __init__(self, data: int) -> None: + pass + + +Next.factory("", data=2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec54.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec54.py new file mode 100644 index 00000000..a5e904fd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec54.py @@ -0,0 +1,31 @@ +# This sample tests a function that uses a Concatenate with a callback +# that has a *args parameter. + +from typing import Callable, Concatenate, reveal_type + + +def func1[T, **P, R](fn: Callable[Concatenate[T, P], R], val: T) -> Callable[P, R]: ... + + +def test1(*args: str) -> None: ... + + +reveal_type(func1(test1, ""), expected_text="(*args: str) -> None") +reveal_type(func1(func1(test1, ""), ""), expected_text="(*args: str) -> None") + + +def test2(p1: int, *args: str) -> None: ... + + +reveal_type(func1(test2, 0), expected_text="(*args: str) -> None") +reveal_type(func1(func1(test2, 0), ""), expected_text="(*args: str) -> None") +reveal_type(func1(func1(func1(test2, 0), ""), ""), expected_text="(*args: str) -> None") + + +def func2[T1, T2, **P, R]( + fn: Callable[Concatenate[T1, T2, P], R], val1: T1, val2: T2 +) -> Callable[P, R]: ... + + +reveal_type(func2(test1, "", ""), expected_text="(*args: str) -> None") +reveal_type(func2(func2(test1, "", ""), "", ""), expected_text="(*args: str) -> None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec55.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec55.py new file mode 100644 index 00000000..655a7b99 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec55.py @@ -0,0 +1,24 @@ +# This sample tests the case where a function with a ParamSpec +# is assigned to another function with a Concatenate and a ParamSpec. + +from typing import Any, Concatenate, Callable + + +class MyGeneric[**P0]: + def __call__(self, *args: P0.args, **kwargs: P0.kwargs) -> Any: ... + + +def deco1[**P1](func: Callable[[Callable[P1, Any]], Any]) -> MyGeneric[P1]: ... + + +@deco1 +def func1[**P2](func: Callable[Concatenate[int, P2], Any]): ... + + +reveal_type(func1, expected_text="MyGeneric[(int, **P2@func1)]") + + +v1: MyGeneric[[int]] = func1 + +# This should generate an error. +v2: MyGeneric[[int, int]] = func1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec6.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec6.py new file mode 100644 index 00000000..ccb262cc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec6.py @@ -0,0 +1,15 @@ +# This sample tests that ParamSpecs support parameters with default values. + +from typing import Callable, ParamSpec, TypeVar + +P = ParamSpec("P") +R = TypeVar("R") + + +def func1(fn: Callable[P, R]) -> Callable[P, R]: ... + + +def func2(a: str, b: str = "") -> str: ... + + +func1(func2)("") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec7.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec7.py new file mode 100644 index 00000000..2218a170 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec7.py @@ -0,0 +1,20 @@ +# This sample tests the handling of a specialized function +# used as an argument to a ParamSpec. + +from typing import Callable, Generic, ParamSpec, TypeVar + +P = ParamSpec("P") +R = TypeVar("R") + + +def func1(f: Callable[P, R]) -> Callable[P, R]: ... + + +class ClassA(Generic[R]): + def method1(self, v: R) -> None: ... + + +v1: ClassA[int] = ClassA() + +reveal_type(v1.method1, expected_text="(v: int) -> None") +reveal_type(func1(v1.method1), expected_text="(v: int) -> None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec8.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec8.py new file mode 100644 index 00000000..c548d8fd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec8.py @@ -0,0 +1,51 @@ +# This sample tests error conditions for ParamSpec (PEP 612). + +from typing import Callable, Concatenate, ParamSpec + +P = ParamSpec("P") + + +def add(f: Callable[P, int]) -> Callable[Concatenate[str, P], None]: + def func1(s: str, *args: P.args, **kwargs: P.kwargs) -> None: # Accepted + pass + + # Parameter 's' and 't' should generate an error according to PEP 612 + def func2(*args: P.args, s: str, t: int, **kwargs: P.kwargs) -> None: # Rejected + pass + + return func1 # Accepted + + +def remove(f: Callable[Concatenate[int, P], int]) -> Callable[P, None]: + def func1(*args: P.args, **kwargs: P.kwargs) -> None: + f(1, *args, **kwargs) # Accepted + + # Should generate an error because positional parameter + # after *args is not allowed. + f(*args, 1, **kwargs) # Rejected + + # Should generate an error because positional parameter + # is missing. + f(*args, **kwargs) # Rejected + + return func1 + + +def outer(f: Callable[P, None]) -> Callable[P, None]: + def func1(x: int, *args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + def func2(*args: P.args, **kwargs: P.kwargs) -> None: + func1(1, *args, **kwargs) # Accepted + + # This should generate an error because keyword parameters + # are not allowed in this situation. + func1(x=1, *args, **kwargs) # Rejected + + # This should generate an error because *args is duplicated. + func1(1, *args, *args, **kwargs) + + # This should generate an error because **kwargs is duplicated. + func1(1, *args, **kwargs, **kwargs) + + return func2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramSpec9.py b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec9.py new file mode 100644 index 00000000..c0945cc3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramSpec9.py @@ -0,0 +1,78 @@ +# This sample tests the handling of a ParamSpec used with +# *args: P.args, **kwargs: P.kwargs. + +from typing import Any, Callable, ParamSpec + + +P = ParamSpec("P") + + +def twice(f: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> int: + return f(*args, **kwargs) + f(*args, **kwargs) + + +def a_int_b_str(a: int, b: str) -> int: + return 1 + + +twice(a_int_b_str, 1, "A") # Accepted + +twice(a_int_b_str, b="A", a=1) # Accepted + +twice(a_int_b_str, 1, b="hi") # Accepted + +# This should generate an error because b is a incorrect type. +twice(a_int_b_str, 1, b=2) # Rejected + +# This should generate an error because a is a incorrect type. +twice(a_int_b_str, "1", b="2") # Rejected + +# This should generate two errors because c is unknown and b is missing. +twice(a_int_b_str, 1, c=2) # Rejected + +# This should generate an error because c is unknown. +twice(a_int_b_str, 1, b="hi", c=2) # Rejected + +# This should generate an error because type of a is wrong. +twice(a_int_b_str, "A", "1") # Rejected + +# This should generate an error because type of b is wrong. +twice(a_int_b_str, 1, 1) # Rejected + +# This should generate an error because of too many arguments. +twice(a_int_b_str, 1, "1", 2) # Rejected + +# This should generate an error because of too few arguments. +twice(a_int_b_str, 1) # Rejected + +# This should generate an error because of too few arguments. +twice(a_int_b_str) # Rejected + + +def func1(func: Callable[P, Any], *args: P.args, **kwargs: P.kwargs): + pass + + +def func2(func: Callable[P, Any], *args: P.args, **kwargs: P.kwargs): + func1(func, *args, **kwargs) + + +def args_b(*args: int, b: str) -> int: + return 1 + + +some_args = (1, 2, 3) + +# This should generate an error because of too few arguments. +twice(args_b) + +# This should generate an error because of too few arguments. +twice(args_b, 3) + +# This should generate an error because it's missing a keyword argument. +twice(args_b, *some_args, 3) + +twice(args_b, *some_args, b="3") + +# This should generate an error because the keyword argument type is incorrect. +twice(args_b, *some_args, b=3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/paramType1.py b/python-parser/packages/pyright-internal/src/tests/samples/paramType1.py new file mode 100644 index 00000000..7e7c5d8f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/paramType1.py @@ -0,0 +1,59 @@ +# This sample validates that parameter types specified for "self" +# and "cls" parameters are compatible with the containing class. + +from typing import Iterator, TypeVar + + +class Parent: + pass + + +_T = TypeVar("_T") +_TChild1 = TypeVar("_TChild1", bound="Child1") + + +class Child1: + def m1(self: "Child1"): ... + + # This should generate an error. + def m2(self: Parent): ... + + # This should generate an error. + def m3(self: type["Child1"]): ... + + def m4(self: _TChild1) -> _TChild1: ... + + # This should generate an error. + def m5(self: type[_TChild1]) -> _TChild1: ... + + def m6(self: _T) -> _T: ... + + # This should generate an error. + def __new__(cls: "Child1"): ... + + @classmethod + def cm1(cls: type["Child1"]): ... + + # This should generate an error. + @classmethod + # This should generate an error. + def cm2(cls: "Child1"): ... + + @classmethod + # This should generate an error. + def cm3(cls: type[Parent]): ... + + @classmethod + def cm4(cls: type[_TChild1]) -> _TChild1: ... + + # This should generate an error. + @classmethod + # This should generate an error. + def cm5(cls: _TChild1) -> _TChild1: ... + + @classmethod + def cm6(cls: type[_T]) -> _T: ... + + +class MyMeta(type): + def m1(self: type[_T]) -> Iterator[_T]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/parameters1.py b/python-parser/packages/pyright-internal/src/tests/samples/parameters1.py new file mode 100644 index 00000000..fabe4ee9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/parameters1.py @@ -0,0 +1,14 @@ +# This sample tests the reportMissingParameterType check. + + +class A: + # This should generate an error if reportMissingParameterType is enabled + # because 'y' is missing a type annotation. + def method1(self, x: int, _, y) -> int: ... + + def method2(self, x, y): + # type: (int, int) -> int + ... + + +def g(__p: int, x: int, y: str): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/parser1.py b/python-parser/packages/pyright-internal/src/tests/samples/parser1.py new file mode 100644 index 00000000..9c99aafd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/parser1.py @@ -0,0 +1,6 @@ + +# This file should parse without errors +a +b +c; d; e; +f; g diff --git a/python-parser/packages/pyright-internal/src/tests/samples/parser2.py b/python-parser/packages/pyright-internal/src/tests/samples/parser2.py new file mode 100644 index 00000000..7467c6ae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/parser2.py @@ -0,0 +1,11 @@ +class A: +\ + pass + +class B: + \ + pass + +class C: + \ +pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery1.py b/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery1.py new file mode 100644 index 00000000..ec041fb6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery1.py @@ -0,0 +1,4 @@ +def func1(input: str): + if input[0] == "[": + +def func2(ch: str): diff --git a/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery2.py b/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery2.py new file mode 100644 index 00000000..d1cf3a70 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery2.py @@ -0,0 +1,4 @@ +def func1(input: str): + if input[0] == "[": + + def func2(ch: str): diff --git a/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery3.py b/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery3.py new file mode 100644 index 00000000..756313b2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/parserRecovery3.py @@ -0,0 +1,6 @@ +def func1(input: str): + if True: + if False: + if False: + +def func2(ch: str): diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial1.py b/python-parser/packages/pyright-internal/src/tests/samples/partial1.py new file mode 100644 index 00000000..558c1907 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial1.py @@ -0,0 +1,175 @@ +# This sample tests the functools.partial support. + +from functools import partial +from typing import Callable, Protocol, Self, TypeVar + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2", covariant=True) + + +def func1(): + """func1""" + return 0 + + +p1_0 = partial(func1) + +reveal_type(p1_0(), expected_text="Literal[0]") + +# This should generate an error. +p1_0("") + +# This should generate an error. +p1_1 = partial(func1, "", "") + + +def func2(name: str, number: int) -> None: + """func2""" + pass + + +p2_0 = partial(func2) + +reveal_type(p2_0("", 3), expected_text="None") + +# This should generate an error. +p2_0() + +# This should generate an error. +p2_0("") + +# This should generate an error. +p2_0("", 3, 3) + +# This should generate an error. +p2_0("", 3, 3) + +p2_0("", number=3) + +# This should generate an error. +p2_0("", 3, number=3) + +p2_1 = partial(func2, "") + +# This should generate an error. +p2_1() + +p2_1(3) +p2_1(number=3) + +# This should generate an error. +p2_1(3, number=3) + +p2_2 = partial(func2, number=3) +p2_2("") + +p2_3 = partial(func2, number=3, name="") +p2_3() + + +def func3(name: str, /, number: int): + return 0 + + +p3_0 = partial(func3) + +reveal_type(p3_0("", 3), expected_text="Literal[0]") + +# This should generate an error. +p3_0(name="", number=3) + +p3_1 = partial(func3, "") + +p3_1(3) +p3_1(number=3) + + +def func4(name: str, *, number: int): + return 0 + + +p4_0 = partial(func4) + +# This should generate an error. +p4_0("", 3) + +p4_0("", number=3) + + +def func5(name: _T1, number: _T1) -> _T1: + return name + + +p5_0 = partial(func5) +reveal_type(p5_0(3, 3), expected_text="int") +reveal_type(p5_0("3", "3"), expected_text="str") + + +p5_1 = partial(func5, 2) + +p5_1(3) + +# This should generate an error. +p5_1("3") + + +def func6(a: int, name: _T1, number: _T1) -> _T1: + return name + + +p6_0 = partial(func6, 3, 4) + +reveal_type(p6_0(3), expected_text="int") + + +def func7(a: int, name: float, *args: str): + return 0 + + +p7_0 = partial(func7, 3, 3, "", "", "") +p7_0("", "") + +# This should generate an error. +p7_0(3) + +p7_1 = partial(func7) +p7_1(3, 0) +p7_1(3, 0, "", "") + +# This should generate an error. +p7_1(3, 0, foo=3) + + +def func8(a: int, name: str, **kwargs: int): + return 0 + + +p8_0 = partial(func8, 3, "") +p8_0() +p8_0(foo=3) + +# This should generate an error. +p8_0(foo="") + +p8_1 = partial(func8) +p8_1(3, "") + +# This should generate an error. +p8_1(3) + + +# This should generate an error. +p8_1(3, "", 5) + +p8_1(3, "", foo=4, bar=5) + + +class Partial(Protocol[_T2]): + def __new__(cls, __func: Callable[..., _T2]) -> Self: ... + + +def func9() -> int: ... + + +# This should generate an error. +x: Partial[str] = partial(func9) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial2.py b/python-parser/packages/pyright-internal/src/tests/samples/partial2.py new file mode 100644 index 00000000..c48453c9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial2.py @@ -0,0 +1,29 @@ +# This sample tests the functools.partial support for keyword parameters. + +from functools import partial +from typing import Protocol + + +def func1(a: int, b: int, x: str) -> str: + return x + + +class Proto1(Protocol): + def __call__(self, x: str) -> str: ... + + +func2: Proto1 = partial(func1, 3, 4, x="a") +func2() +func2(x="b") + + +class Proto2(Protocol): + def __call__(self, b: int) -> str: ... + + +func3: Proto2 = partial(func1, 3, b=3, x="a") +func3() +func3(x="b") +func3(b=3) +func3(x="b", b=3) +func3(b=3, x="b") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial3.py b/python-parser/packages/pyright-internal/src/tests/samples/partial3.py new file mode 100644 index 00000000..619cdb9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial3.py @@ -0,0 +1,31 @@ +# This sample tests that the functools.partial special-case logic +# properly handles bidirectional type inference for argument evaluation. + +from functools import partial + + +class BaseClass: + pass + + +class SubClass(BaseClass): + pass + + +def func_base(base: BaseClass): + pass + + +def func_list(base: list[BaseClass]): + pass + + +def func_set(base: set[BaseClass]): + pass + + +sub = SubClass() + +partial(func_base, sub) +partial(func_list, [sub]) +partial(func_set, {sub}) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial4.py b/python-parser/packages/pyright-internal/src/tests/samples/partial4.py new file mode 100644 index 00000000..f5bbafba --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial4.py @@ -0,0 +1,36 @@ +# This sample tests the case where an overloaded function is passed +# to functools.partial. + +from functools import partial +from typing import overload + + +@overload +def func1(val1: float, val2: float) -> float: ... + + +@overload +def func1(val1: str, val2: str) -> str: ... + + +def func1(val1: float | str, val2: float | str) -> float | str: + return max(val1, val2) + + +def func2(): + op_float = partial(func1, 1.0) + v1 = op_float(2.0) + reveal_type(v1, expected_text="float") + + # This should generate an error. + op_float("a") + + op_str = partial(func1, "a") + v2 = op_str("b") + reveal_type(v2, expected_text="str") + + # This should generate an error. + op_str(1.0) + + # This should generate an error. + op_complex = partial(func1, 3j) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial5.py b/python-parser/packages/pyright-internal/src/tests/samples/partial5.py new file mode 100644 index 00000000..960193ee --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial5.py @@ -0,0 +1,50 @@ +# This sample tests the case where a class is passed as the first argument +# to functools.partial. + +from dataclasses import dataclass +from functools import partial +from typing import Self, TypeVar + + +class A: + def __init__(self, x: int, y: int) -> None: ... + + +# This should generate an error because "y" has the wrong type. +v1 = partial(A, x=1, y="a") + +v2 = partial(A, x=1, y=2) +reveal_type(v2, expected_text="partial[A]") +v2() +v2(x=2) + + +T = TypeVar("T", bound=A) + + +def func1(x: type[T]): + # This should generate an error because "z" is not a valid parameter. + v1 = partial(x, x=1, z="a") + + v2 = partial(x, y=1) + + # This should generate an error because it's missing "x". + v2() + + v2(x=1) + + +@dataclass +class B: + x: int + y: str + + @classmethod + def from_x(cls, x: int) -> Self: + make_b = partial(cls, x=x) + reveal_type(make_b, expected_text="partial[Self@B]") + + self = make_b(y="") + reveal_type(self, expected_text="Self@B") + + return self diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial6.py b/python-parser/packages/pyright-internal/src/tests/samples/partial6.py new file mode 100644 index 00000000..8d28674f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial6.py @@ -0,0 +1,25 @@ +# This sample tests functools.partial with an unpacked TypedDict in the +# **kwargs annotation. + +from functools import partial +from typing import TypedDict, Unpack + + +class DC1(TypedDict, total=False): + x: str + y: int + + +def test1(**kwargs: Unpack[DC1]) -> None: ... + + +test1_partial = partial(test1, x="") + +# This should generate an error. +test1_partial(x=1) + +# This should generate an error. +test1_partial(y="") + +test1_partial(x="") +test1_partial(y=1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial7.py b/python-parser/packages/pyright-internal/src/tests/samples/partial7.py new file mode 100644 index 00000000..5214c206 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial7.py @@ -0,0 +1,28 @@ +# This sample tests the case where functools.partial is applied to +# a function that has a **kwargs parameter that is typed as an +# unpacked TypedDict. + +from functools import partial +from typing import TypedDict, NotRequired, Unpack + + +class TD1(TypedDict): + c: list[str] + a: int + b: NotRequired[str] + + +def func1(**kwargs: Unpack[TD1]) -> None: + print(f"a: {kwargs['a']}, b: {kwargs.get('b')}, c: {kwargs['c']}") + + +func1_1 = partial(func1, c=["a", "b"], a=2) +func1_1(b="2") + +func1_2 = partial(func1, a=2, b="", c=["a", "b"]) +func1_2(a=2, b="2") + +func1_3 = partial(func1, c=["a", "b"]) + +# This should generate an error. +func1_3(b="2") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/partial8.py b/python-parser/packages/pyright-internal/src/tests/samples/partial8.py new file mode 100644 index 00000000..7f959f87 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/partial8.py @@ -0,0 +1,17 @@ +# This sample tests the case where functools.partial is applied to +# a function that includes a positional-only parameter separator. + +from functools import partial + + +def func1(s: int, /, a: int, b: str) -> int: ... + + +func1_partial = partial(func1, 1, 0, "") +reveal_type(func1_partial(), expected_text="int") + +func1_partial_missing = partial(func1, 1) +reveal_type(func1_partial_missing(0, ""), expected_text="int") + +# This should generate an error. +partial(func1, s=1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/private1.py b/python-parser/packages/pyright-internal/src/tests/samples/private1.py new file mode 100644 index 00000000..9aa95d53 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/private1.py @@ -0,0 +1,55 @@ +# This sample tests the "reportPrivateUsage" feature. + +from typing import NamedTuple +from .private2 import TestClass, _TestClass, TestClass as _Foo + +_Test = 1 + + +class Foo(object): + _my_var1 = 1 + + _my_var2 = _my_var1 + + def foo(self): + a = _Test + return self._my_var1 + + +# This should generate an error +a = _TestClass() + +b = TestClass() + +a = _Foo() + +# This should generate an error +c = b.__priv1 + + +d = Foo() + +# This should generate an error +e = d._my_var1 + +f = _Test + + +class TestSubclass(TestClass): + def blah(self): + return self._prot1 + + def blah2(self): + # This should generate an error + return self.__priv1 + + +class MyTuple(NamedTuple): + field1: int + field2: str + + +# This should not generate an error because _replace is declared +# within a stub file and is presumably part of the public interface +# contract. +MyTuple(1, "2")._replace(field1=3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/private2.py b/python-parser/packages/pyright-internal/src/tests/samples/private2.py new file mode 100644 index 00000000..48a49ba5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/private2.py @@ -0,0 +1,11 @@ +# This sample tests the "reportPrivateUsage" feature. + + +class _TestClass(object): + pass + + +class TestClass(object): + def __init__(self): + self.__priv1 = 1 + self._prot1 = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project1/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project1/pyrightconfig.json new file mode 100644 index 00000000..1ed3a393 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project1/pyrightconfig.json @@ -0,0 +1,5 @@ +{ + "include": [ + "subfolder1" + ] +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project1/sample1.py b/python-parser/packages/pyright-internal/src/tests/samples/project1/sample1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project1/subfolder1/sample2.py b/python-parser/packages/pyright-internal/src/tests/samples/project1/subfolder1/sample2.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project1/subfolder1/subfolder1-1/sample3.py b/python-parser/packages/pyright-internal/src/tests/samples/project1/subfolder1/subfolder1-1/sample3.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project2/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project2/pyrightconfig.json new file mode 100644 index 00000000..2fefc82b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project2/pyrightconfig.json @@ -0,0 +1,3 @@ +{ + "files": "NotAnArray.py" +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project3/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project3/pyrightconfig.json new file mode 100644 index 00000000..561317c6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project3/pyrightconfig.json @@ -0,0 +1,7 @@ +{ + "files": [ + { + "NotAString": "Bad" + } + ] +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project4/presentfile.py b/python-parser/packages/pyright-internal/src/tests/samples/project4/presentfile.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project4/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project4/pyrightconfig.json new file mode 100644 index 00000000..562e7702 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project4/pyrightconfig.json @@ -0,0 +1,11 @@ +{ + "include": [ + "missingfile.py", + "presentfile.py", + "missingdirectory", + "subfolder" + ], + "exclude": [ + "subfolder/presentfile2.py" + ] +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project4/subfolder/presentfile2.py b/python-parser/packages/pyright-internal/src/tests/samples/project4/subfolder/presentfile2.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project4/subfolder/presentfile3.py b/python-parser/packages/pyright-internal/src/tests/samples/project4/subfolder/presentfile3.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project5/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project5/pyrightconfig.json new file mode 100644 index 00000000..d2cfcddc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project5/pyrightconfig.json @@ -0,0 +1,4 @@ +{ + "files": [ + // Bad json +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project6/app1.py b/python-parser/packages/pyright-internal/src/tests/samples/project6/app1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project6/app2.py b/python-parser/packages/pyright-internal/src/tests/samples/project6/app2.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project6/projectA/foo/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/project6/projectA/foo/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project6/projectA/foo/bar/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/project6/projectA/foo/bar/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project6/projectB/foo/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/project6/projectB/foo/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project6/projectB/foo/baz/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/project6/projectB/foo/baz/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src/src/module1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_src/src/module1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_is_pkg/src/__init__.py b/python-parser/packages/pyright-internal/src/tests/samples/project_src_is_pkg/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_is_pkg/src/module1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_src_is_pkg/src/module1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_extra_paths/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_extra_paths/pyrightconfig.json new file mode 100644 index 00000000..12bedf19 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_extra_paths/pyrightconfig.json @@ -0,0 +1,3 @@ +{ + "extraPaths": [] +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_extra_paths/src/module1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_extra_paths/src/module1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_no_extra_paths/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_no_extra_paths/pyrightconfig.json new file mode 100644 index 00000000..2c63c085 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_no_extra_paths/pyrightconfig.json @@ -0,0 +1,2 @@ +{ +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_no_extra_paths/src/module1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_config_no_extra_paths/src/module1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_extra_paths/src/_vendored/vendored1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_extra_paths/src/_vendored/vendored1.py new file mode 100644 index 00000000..0fe3bd98 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_extra_paths/src/_vendored/vendored1.py @@ -0,0 +1 @@ +MSG = 'hello' diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_extra_paths/src/module1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_extra_paths/src/module1.py new file mode 100644 index 00000000..b3697dfa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_src_with_extra_paths/src/module1.py @@ -0,0 +1,2 @@ +from vendored1 import MSG +print(MSG) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_all_config/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project_with_all_config/pyrightconfig.json new file mode 100644 index 00000000..337faba7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_all_config/pyrightconfig.json @@ -0,0 +1,31 @@ +{ + "analyzeUnannotatedFunctions": false, + "defineConstant": { + "CONSTANT1": "string" + }, + "executionEnvironments": [ + { + "root": ".", + "python": "python3", + "extraPaths": ["extraPath"] + } + ], + "typeCheckingMode": "off", + "exclude": ["test"], + "include": ["test"], + "ignore": ["test"], + "extraPaths": ["test"], + "pythonVersion": "3.7", + "pythonPlatform": "All", + "strict": ["test"], + "deprecateTypingAliases": true, + "disableBytesTypePromotions": true, + "enableExperimentalFeatures": true, + "enableTypeIgnoreComments": false, + "stubPath": "test", + "typeshedPath": "test", + "venv": "test", + "venvPath": "test", + "useLibraryCodeForTypes": false, + "verboseOutput": true +} \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_diag_overrides/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project_with_diag_overrides/pyrightconfig.json new file mode 100644 index 00000000..37d4f873 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_diag_overrides/pyrightconfig.json @@ -0,0 +1,4 @@ +{ + "include": ["subdir"], + "reportPrivateImportUsage": false +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_diag_overrides/subdir/sample.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_diag_overrides/subdir/sample.py new file mode 100644 index 00000000..f6488ba1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_diag_overrides/subdir/sample.py @@ -0,0 +1 @@ +x: int = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/baseconfig1.toml b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/baseconfig1.toml new file mode 100644 index 00000000..face7634 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/baseconfig1.toml @@ -0,0 +1,3 @@ +[tool.pyright] +extends = "sub2/baseconfig2.json" +typeCheckingMode = "strict" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/pyproject.toml b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/pyproject.toml new file mode 100644 index 00000000..784825fb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/pyproject.toml @@ -0,0 +1,2 @@ +[tool.pyright] +extends = "baseconfig1.toml" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/sub2/baseconfig2.json b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/sub2/baseconfig2.json new file mode 100644 index 00000000..65c0afdf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/sub2/baseconfig2.json @@ -0,0 +1,4 @@ +{ + "extends": "../sub3/baseconfig3.json", + "typeCheckingMode": "standard" +} \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/sub3/baseconfig3.json b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/sub3/baseconfig3.json new file mode 100644 index 00000000..33f7b917 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/sub3/baseconfig3.json @@ -0,0 +1,4 @@ +{ + "stubPath": "stubs", + "typeCheckingMode": "basic" +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/test.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/test.py new file mode 100644 index 00000000..80c06c44 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_extended_config/test.py @@ -0,0 +1,7 @@ +# pyright: reportMissingModuleSource=false + +from typing import assert_type +from sample import x + +assert_type(x, int) + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_pyproject_toml/pyproject.toml b/python-parser/packages/pyright-internal/src/tests/samples/project_with_pyproject_toml/pyproject.toml new file mode 100644 index 00000000..5362153a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_pyproject_toml/pyproject.toml @@ -0,0 +1,7 @@ +[tool.pyright] +include = ["test*.py"] + +reportMissingImports = true +reportUnusedClass = "warning" + +pythonVersion = "3.9" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_pyproject_toml_platform/pyproject.toml b/python-parser/packages/pyright-internal/src/tests/samples/project_with_pyproject_toml_platform/pyproject.toml new file mode 100644 index 00000000..a563232a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_pyproject_toml_platform/pyproject.toml @@ -0,0 +1,4 @@ +[tool.pyright] +executionEnvironments = [ + { python = "3.7", pythonPlatform = "platform" }, +] \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/myvenv/Lib/site-packages/library1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/myvenv/Lib/site-packages/library1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/myvenv/pyvenv.cfg b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/myvenv/pyvenv.cfg new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/pyrightconfig.json new file mode 100644 index 00000000..2c63c085 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/pyrightconfig.json @@ -0,0 +1,2 @@ +{ +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/sample1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/sample1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/subfolder1/sample2.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/subfolder1/sample2.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/subfolder1/subfolder1-1/sample3.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_exclude/subfolder1/subfolder1-1/sample3.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/excluded/excluded1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/excluded/excluded1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/myvenv/Lib/site-packages/library1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/myvenv/Lib/site-packages/library1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/myvenv/pyvenv.cfg b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/myvenv/pyvenv.cfg new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/pyrightconfig.json b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/pyrightconfig.json new file mode 100644 index 00000000..37e060e5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/pyrightconfig.json @@ -0,0 +1,3 @@ +{ + "exclude": ["excluded"] +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/sample1.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/sample1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/subfolder1/sample2.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/subfolder1/sample2.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/subfolder1/subfolder1-1/sample3.py b/python-parser/packages/pyright-internal/src/tests/samples/project_with_venv_auto_detect_include/subfolder1/subfolder1-1/sample3.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property1.py b/python-parser/packages/pyright-internal/src/tests/samples/property1.py new file mode 100644 index 00000000..13f35c3d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property1.py @@ -0,0 +1,93 @@ +# This sample tests the type checker's ability to validate +# properties. + + +from typing import Self + + +class ClassA: + @property + def read_only_prop(self): + return 1 + + @property + def read_write_prop(self): + return "hello" + + @read_write_prop.setter + def read_write_prop(self, value: str): + return + + @property + def deletable_prop(self): + return 1 + + @deletable_prop.deleter + def deletable_prop(self): + return + + +a = ClassA() + +# These are disabled because fget, fset and fdel are not +# properly modeled for type checking. +# ClassA.read_only_prop.fget(ClassA()) +# ClassA.read_write_prop.fset(ClassA(), "") +# ClassA.deletable_prop.fdel(ClassA()) + +val = a.read_only_prop + +reveal_type(ClassA.read_only_prop, expected_text="property") +reveal_type(ClassA.read_only_prop.__doc__, expected_text="str | None") + +# This should generate an error because this +# property has no setter. +a.read_only_prop = val + +# This should generate an error because this +# property has no deleter. +del a.read_only_prop + +val = a.read_write_prop + +a.read_write_prop = "hello" + +# This should generate an error because the type +# is incorrect. +a.read_write_prop = ClassA() + +# This should generate an error because this +# property has no deleter. +del a.read_write_prop + +val = a.deletable_prop + +# This should generate an error because this +# property has no setter. +a.deletable_prop = val + +del a.deletable_prop + + +class ClassB: + @property + def name(self) -> str: + return "bar" + + +p1: property = ClassA.read_only_prop +p2: property = ClassA.read_write_prop +p3: property = ClassA.deletable_prop + + +class ClassC: + @property + def prop1(self) -> type[Self]: ... + + def method1(self) -> None: + reveal_type(self.prop1, expected_text="type[Self@ClassC]") + + +class ClassD(ClassC): + def method1(self) -> None: + reveal_type(self.prop1, expected_text="type[Self@ClassD]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property10.py b/python-parser/packages/pyright-internal/src/tests/samples/property10.py new file mode 100644 index 00000000..c2540674 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property10.py @@ -0,0 +1,19 @@ +# This sample tests the case where properties are unannotated, +# and the type needs to be determined via inference. + + +class ClassA: + def __init__(self): + self._x = None + + @property + def x(self): + return self._x + + @x.setter + def x(self, value): + self._x = value + + +c = ClassA() +reveal_type(c.x, expected_text="Unknown | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property11.py b/python-parser/packages/pyright-internal/src/tests/samples/property11.py new file mode 100644 index 00000000..f29b2c46 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property11.py @@ -0,0 +1,43 @@ +# This sample tests the handling of class properties, which +# are supported in Python 3.9 and newer. + + +from typing import TypeVar + + +class Class1: + @classmethod + @property + def prop1(cls) -> str: + return "" + + @classmethod + @prop1.setter + def prop1(cls, value: str): + pass + + +reveal_type(Class1.prop1, expected_text="str") +reveal_type(Class1().prop1, expected_text="str") + +Class1.prop1 = "hi" + +# This should generate an error. +Class1.prop1 = 1 + + +T = TypeVar("T", bound="Class2") + + +class Class2: + @classmethod + @property + def prop1(cls: type[T]) -> type[T]: + return cls + + +class Class3(Class2): ... + + +reveal_type(Class2.prop1, expected_text="type[Class2]") +reveal_type(Class3.prop1, expected_text="type[Class3]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property12.py b/python-parser/packages/pyright-internal/src/tests/samples/property12.py new file mode 100644 index 00000000..bee49b46 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property12.py @@ -0,0 +1,23 @@ +# This sample tests the use of @functools.cache with a property. + +from functools import cache + + +class Rectangle: + def __init__(self, length: int, width: int) -> None: + self._length = length + self._width = width + + @property + @cache + def area(self) -> int: + return self._length * self._width + + +def is_large_rectangle(rec: Rectangle) -> bool: + print(rec.area) + return rec.area >= 100 + + +rec = Rectangle(10, 10) +print(is_large_rectangle(rec)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property13.py b/python-parser/packages/pyright-internal/src/tests/samples/property13.py new file mode 100644 index 00000000..8595d347 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property13.py @@ -0,0 +1,14 @@ +# This sample tests the case where a property is defined on a metaclass. + + +class MyMeta(type): + @property + def something(cls) -> "Base": + return Base(1234) + + +class Base(metaclass=MyMeta): + def __new__(cls, arg) -> "Base": ... + + +reveal_type(Base.something, expected_text="Base") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property14.py b/python-parser/packages/pyright-internal/src/tests/samples/property14.py new file mode 100644 index 00000000..a0ffc08e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property14.py @@ -0,0 +1,23 @@ +# This sample handles the case where a property setter contains +# a function-scoped TypeVar. + +from typing import Hashable, TypeVar, Sequence + +HashableT = TypeVar("HashableT", bound=Hashable) + + +class ClassA: + def __init__(self): + self._something = [] + + @property + def something(self) -> Sequence[Hashable]: + return self._something + + @something.setter + def something(self, thing: list[HashableT]): + self._something = thing + + +f = ClassA() +f.something = ["a", "b", "c"] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property15.py b/python-parser/packages/pyright-internal/src/tests/samples/property15.py new file mode 100644 index 00000000..2556940d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property15.py @@ -0,0 +1,24 @@ +# This sample tests the specialization of property return types. + +from typing import TypeVar, Generic + +T = TypeVar("T") + + +class ClassA(Generic[T]): + def __init__(self, bar: T): + self._bar = bar + + @property + def prop1(self) -> T: + return self._bar + + def method1(self) -> T: + reveal_type(self._bar, expected_text="T@ClassA") + return self._bar + + +a = ClassA[int](3) + +# This should work fine because a.bar should be an int +a.prop1.bit_length() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property16.py b/python-parser/packages/pyright-internal/src/tests/samples/property16.py new file mode 100644 index 00000000..045735c4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property16.py @@ -0,0 +1,40 @@ +# This sample tests the case where a property's getter and setter +# are defined in different classes. + +# pyright: reportIncompatibleMethodOverride=false + +from typing import Generic, Self, TypeVar + + +T = TypeVar("T") + + +class Parent(Generic[T]): + @property + def prop1(self) -> T: ... + + @property + def prop2(self) -> Self: ... + + +class Child(Parent[str]): + @Parent.prop1.setter + def prop1(self, value: str) -> None: ... + + @Parent.prop2.setter + def prop2(self, value: str) -> None: ... + + +parent = Parent[int]() +reveal_type(parent.prop1, expected_text="int") +reveal_type(parent.prop2, expected_text="Parent[int]") + +# This should generate an error because there is no setter +# on the parent's property. +parent.prop1 = "" + +child = Child() +reveal_type(child.prop1, expected_text="str") +reveal_type(child.prop2, expected_text="Child") + +child.prop1 = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property17.py b/python-parser/packages/pyright-internal/src/tests/samples/property17.py new file mode 100644 index 00000000..cf1cf1a7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property17.py @@ -0,0 +1,38 @@ +# This sample tests a complex intersection between generic protocols +# and properties that are defined in mix-in classes. + +from dataclasses import dataclass +from typing import Generic, Protocol, TypeVar + +T_co = TypeVar("T_co", covariant=True) +T = TypeVar("T") + + +class Proto(Protocol[T_co]): + @property + def prop(self) -> T_co: ... + + +@dataclass +class Model(Generic[T]): + prop: T + + +class RootProto(Protocol[T_co]): + @property + def root(self) -> Proto[T_co]: ... + + +class RootMixin(Generic[T]): + @property + def prop(self: RootProto[T]) -> T: + return self.root.prop + + +@dataclass +class Root(RootMixin[T]): + root: Model[T] + + +def func1(s: Root[str]): + x: Proto[str] = s diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property18.py b/python-parser/packages/pyright-internal/src/tests/samples/property18.py new file mode 100644 index 00000000..bb091de8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property18.py @@ -0,0 +1,44 @@ +# This sample tests the case where a @property decorator is applied to +# a method that has been previously decorated. + +from typing import Concatenate, ParamSpec, Protocol, TypeVar, Callable + +P = ParamSpec("P") +R = TypeVar("R") +S = TypeVar("S", bound="HasAttr") + + +def deco1(func: Callable[P, R]) -> Callable[P, R]: ... + + +class ClassA: + @property + @deco1 + def prop(self) -> int: + return 1 + + +a = ClassA() +reveal_type(a.prop, expected_text="int") + + +class HasAttr(Protocol): + my_attr: str + + +def decorate( + func: Callable[Concatenate[S, P], R], +) -> Callable[Concatenate[S, P], R]: ... + + +class ClassB: + my_attr: str + + @property + @decorate + def prop(self) -> int: + return 1 + + +b = ClassB() +reveal_type(b.prop, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property2.py b/python-parser/packages/pyright-internal/src/tests/samples/property2.py new file mode 100644 index 00000000..3b87aec4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property2.py @@ -0,0 +1,34 @@ +# This sample tests handling of the @abc.abstractproperty decorator. + +import abc + + +def requires_int(x: int): + pass + + +class Foo(abc.ABC): + @abc.abstractproperty + def x(self) -> int: + raise NotImplementedError + + @x.setter + def x(self, value: int): + raise NotImplementedError + + @abc.abstractproperty + def y(self) -> float: + raise NotImplementedError + + +a = Foo() +requires_int(a.x) + +a.x = 3 + +# This should generate an error because a.y is not an int +requires_int(a.y) + +# This should generate an error because the assigned type +# isn't compatible with the setter. +a.x = 4.5 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property3.py b/python-parser/packages/pyright-internal/src/tests/samples/property3.py new file mode 100644 index 00000000..708ddb95 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property3.py @@ -0,0 +1,93 @@ +# This sample tests the type checker's ability to handle +# custom subclasses of property. + + +from typing import Any, Callable + + +class custom_property1(property): + pass + + +class Custom1(object): + @custom_property1 + def x(self) -> int: + return 3 + + @custom_property1 + def y(self) -> float: + return 3.5 + + @y.setter + def y(self, val: float): + pass + + @y.deleter + def y(self): + pass + + +m1 = Custom1() + +a1: int = m1.x + +# This should generate an error because m.x is +# an int and cannot be assigned to str. +b1: str = m1.x + +c1: float = m1.y + +# This should generate an error because m.y is +# a float and cannot be assigned to int. +d1: int = m1.y + +# This should generate an error because there +# is no setter for x. +m1.x = 4 + +m1.y = 4 + +# This should generate an error because there is +# no deleter for x. +del m1.x + +del m1.y + + +class custom_property2(property): + _custom_func: Callable[..., Any] | None + + def custom_function(self, _custom_func: Callable[..., Any]): + self._custom_func = _custom_func + return self + + +class Custom2(object): + @custom_property2 + def x(self) -> int: + return 3 + + @custom_property2 + def y(self) -> float: + return 3.5 + + @y.setter + def y(self, val: float): + pass + + @y.deleter + def y(self): + pass + + @y.custom_function + def y(self): + pass + + +m2 = Custom2() + +a2 = m2.y +reveal_type(a2, expected_text="float") + +m2.y = 4 +del m2.y diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property4.py b/python-parser/packages/pyright-internal/src/tests/samples/property4.py new file mode 100644 index 00000000..338eb9ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property4.py @@ -0,0 +1,16 @@ +# This sample tests the handling of a property that's defined +# with a generic type for the "self" parameter. + +from typing import TypeVar + + +_P = TypeVar("_P", bound=str) + + +class ClassA(str): + @property + def parent(self: _P) -> _P: ... + + +p = ClassA().parent +reveal_type(p, expected_text="ClassA") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property5.py b/python-parser/packages/pyright-internal/src/tests/samples/property5.py new file mode 100644 index 00000000..272d5c7f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property5.py @@ -0,0 +1,29 @@ +# This sample tests the specialization of a property +# provided by a generic subclass. + +from typing import TypeVar, Generic + +T = TypeVar("T", bound=int) + + +class Foo(Generic[T]): + def __init__(self, bar: T): + self._bar = bar + + @property + def bar(self) -> T: + return self._bar + + def bar_method(self) -> T: + return self._bar + + +class NewInt(int): + def new_thing(self): + pass + + +class FooNewInt(Foo[NewInt]): + def fizz(self) -> None: + self.bar.new_thing() + self.bar_method().new_thing() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property6.py b/python-parser/packages/pyright-internal/src/tests/samples/property6.py new file mode 100644 index 00000000..b2033a10 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property6.py @@ -0,0 +1,35 @@ +# This sample tests the reportPropertyTypeMismatch diagnostic rule. + +from typing import Generic, TypeVar + +_T = TypeVar("_T") + + +class ClassA(Generic[_T]): + @property + def prop_1(self) -> float | None: + return 2 + + @prop_1.setter + def prop_1(self, value: int) -> None: + pass + + @property + def prop_2(self) -> int | None: + return 2 + + # This should generate an error because a float + # is not assignable to an Optional[int]. + @prop_2.setter + def prop_2(self, value: float) -> None: + pass + + @property + def prop_3(self) -> list[_T]: + return [] + + # This should generate an error because _T is + # not assignable to List[_T]. + @prop_3.setter + def prop_3(self, value: _T) -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property7.py b/python-parser/packages/pyright-internal/src/tests/samples/property7.py new file mode 100644 index 00000000..7c5df669 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property7.py @@ -0,0 +1,30 @@ +# This sample tests member access expressions where the +# LHS is a class and the RHS is a property. + + +class A: + def __init__(self): + return + + @property + def value(self): + return 42 + + def __getattr__(self, name: str): + return 0 + + +b1 = A.value +# This should generate an error because __getattr__ +# is not applied to a class. +b2 = A.blah +b3 = A.value.fget + + +a = A() + +c1 = a.value +c2 = a.blah +# This should generate an error because a.value is +# the property value. +c3 = a.value.fget diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property8.py b/python-parser/packages/pyright-internal/src/tests/samples/property8.py new file mode 100644 index 00000000..92c42f83 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property8.py @@ -0,0 +1,26 @@ +# This sample tests the error reporting for static methods +# used with property getters, setters and deleters. + + +class ClassA: + @property + def legal1(self) -> None: + pass + + # This should generate an error. + @property + @staticmethod + def illegal1() -> None: + pass + + # This should generate an error. + @illegal1.setter + @staticmethod + def illegal1(val: None) -> None: + pass + + # This should generate an error. + @illegal1.deleter + @staticmethod + def illegal1() -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/property9.py b/python-parser/packages/pyright-internal/src/tests/samples/property9.py new file mode 100644 index 00000000..9969c0dc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/property9.py @@ -0,0 +1,24 @@ +# This sample verifies the case where a property returns a callable. + +from typing import Callable + + +class ClassA: + def __init__(self, converter: Callable[[str, int], int]) -> None: + self.converter = converter + + @property + def converter_prop(self) -> Callable[[str, int], int]: + return self.converter + + +def str_to_int(arg: str, base: int) -> int: + return int(arg, base=base) + + +obj = ClassA(str_to_int) +val1: int = obj.converter("123", 10) +val2: int = obj.converter_prop("123", 10) + +reveal_type(obj.converter, expected_text="(str, int) -> int") +reveal_type(obj.converter_prop, expected_text="(str, int) -> int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol1.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol1.py new file mode 100644 index 00000000..2a1d0fec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol1.py @@ -0,0 +1,137 @@ +# This sample tests the type checker's handling of generic protocol types. + +from typing import Generic, Protocol, TypeVar + +T = TypeVar("T") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class Box(Protocol[T_co]): + def content(self) -> T_co: ... + + +class Box_Impl: + def content(self) -> int: ... + + +box: Box[float] +second_box: Box[int] = Box_Impl() + +# This should not generate an error due to the covariance of 'Box'. +box = second_box + + +class Sender(Protocol[T_contra]): + def send(self, data: T_contra) -> int: ... + + +class Sender_Impl: + def send(self, data: float) -> int: ... + + +sender: Sender[float] = Sender_Impl() +new_sender: Sender[int] + +# This should not generate an error because 'Sender' is contravariant. +new_sender = sender + + +class Proto(Protocol[T]): + def m1(self, p0: T) -> None: + pass + + attr: T + + +class Proto_Impl: + def m1(self, p0: int) -> None: + pass + + attr: int + + +class NotProto2: + attr: int + + +var: Proto[float] +another_var: Proto[int] = Proto_Impl() + +# This should generate an error because T is invariant. +var = another_var + +another_var2: NotProto2 = NotProto2() + +# This should generate an error because T is invariant. +var = another_var2 + + +# This should generate an error because "Protocol" cannot be used +# as a type argument. +var2: list[Protocol] = [] + + +class Abstract1(Protocol[T_contra]): + def do(self, x: T_contra | None): ... + + +class Concrete1: + def do(self, x: int | None): + pass + + +def use_protocol1(a: Abstract1[int]): + a.do(1) + + +use_protocol1(Concrete1()) + + +# This should generate an error because TypeVars cannot +# be defined in both Protocol and Generic. +class Proto2(Protocol[T_co], Generic[T_co]): ... + + +class Proto3(Protocol, Generic[T_co]): ... + + +_A = TypeVar("_A", covariant=True) +_B = TypeVar("_B", covariant=True, bound=int) + + +class ProtoBase1(Protocol[_A, _B]): ... + + +# This should generate an error because Protocol must +# include all of the TypeVars. +class Proto4(ProtoBase1[_A, _B], Protocol[_A]): ... + + +class ProtoBase2(Protocol[_B]): ... + + +class Proto5(ProtoBase2[_B], Protocol[_A, _B]): ... + + +p5_1: Proto5[float, int] + +# This should generate an error because the second type argument +# corresponds to _B, which is bound to int. +p5_2: Proto5[int, float] + + +def func1(): + # This should generate an error because Protocol isn't + # allowed in a type annotation. + v: Protocol | int + + +# This should generate an error because Protocol isn't +# allowed in a TypeVar bound. +T2 = TypeVar("T2", bound=Protocol | int) + + +# This should generate an error because int is not a TypeVar +class Proto6(Protocol[int]): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol10.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol10.py new file mode 100644 index 00000000..c0b70657 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol10.py @@ -0,0 +1,33 @@ +# This sample tests the interactions between the synthesized +# type for "self" and protocol matching. + +from typing import Protocol + + +class ProtocolBase(Protocol): + def a(self) -> None: ... + + def b(self) -> None: ... + + +class ProtocolExtended(ProtocolBase, Protocol): + def c(self) -> None: ... + + +class Base: + def a(self) -> None: + pass + + +class ImplementsBase(Base): + def b(self) -> None: + pass + + +class ImplementsExtended(ImplementsBase): + def c(self) -> None: + pass + + +a: ProtocolExtended +a = ImplementsExtended() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol11.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol11.py new file mode 100644 index 00000000..d81d7281 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol11.py @@ -0,0 +1,31 @@ +# This sample tests the assignment of generic classes to +# a generic protocol in the case where the protocol is +# satisfied by a generic subclass. + +from typing import Generic, Iterator, Optional, TypeVar + + +class Base: + pass + + +_T1 = TypeVar("_T1") +_TBase1 = TypeVar("_TBase1", bound=Base) +_TBase2 = TypeVar("_TBase2", bound=Base) + + +def my_next(__i: Iterator[_T1]) -> _T1: ... + + +class SourceProvider(Generic[_TBase1]): + def __iter__(self): + return self + + +class ManagedSourceProvider(SourceProvider[_TBase2]): + def get(self) -> Optional[_TBase2]: + source = my_next(self) + return source + + def __next__(self) -> _TBase2: + raise NotImplementedError diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol12.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol12.py new file mode 100644 index 00000000..7013b6f7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol12.py @@ -0,0 +1,12 @@ +# This sample verifies that an error is generated when a non-protocol +# class is used as a base class for a protocol class. + +from typing import Protocol + + +class BaseClass: + x: int + + +class DerivedClass(BaseClass, Protocol): + x: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol13.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol13.py new file mode 100644 index 00000000..48291915 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol13.py @@ -0,0 +1,19 @@ +# This sample tests the handling of protocol class methods that +# include keyword-only parameters that match to positional parameters +# within class that is being tested for protocol compatibility. + +from typing import Protocol + + +class CollectionProtocol(Protocol): + def watch(self, *, max_time: int | None = ..., key: str | None = ...) -> None: ... + + +class Collection: + def watch(self, key: str | None = None, max_time: int | None = None) -> None: ... + + +# This should not generate an error even though the "keys" and +# "max_time" parameters in Collection.watch are not marked as +# keyword-only parameters and are not in the same order. +col: CollectionProtocol = Collection() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol14.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol14.py new file mode 100644 index 00000000..6ce135e1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol14.py @@ -0,0 +1,26 @@ +# This sample tests a protocol that uses generics in the "self" +# parameter. + +from typing import Protocol, Self, TypeVar + +T = TypeVar("T") + + +class HasParent(Protocol): + def get_parent(self: T) -> T: ... + + +GenericNode = TypeVar("GenericNode", bound=HasParent) + + +def generic_get_parent(n: GenericNode) -> GenericNode: + return n.get_parent() + + +class ConcreteNode: + def get_parent(self) -> Self: + return self + + +node = ConcreteNode() +parent = generic_get_parent(node) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol15.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol15.py new file mode 100644 index 00000000..5a2e5d73 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol15.py @@ -0,0 +1,24 @@ +# This sample tests the handling of protocols with properties and +# methods that make use of generics. + +from typing import Callable, Protocol, TypeVar + +T = TypeVar("T") + + +class Proto(Protocol): + @property + def f(self: T) -> T: ... + + def m(self, item: T, callback: Callable[[T], str]) -> str: ... + + +class Concrete: + @property + def f(self: T) -> T: + return self + + def m(self, item: T, callback: Callable[[T], str]) -> str: ... + + +x: Proto = Concrete() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol16.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol16.py new file mode 100644 index 00000000..69b8dd20 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol16.py @@ -0,0 +1,19 @@ +# This sample tests that function parameter names match in a protocol. + +from typing import Any, Protocol + + +class Session(Protocol): + def execute(self, statement: Any, *args: Any, **kwargs: Any) -> None: ... + + +class CoolSession(Protocol): + def execute(self, stmt: Any, *args: Any, **kwargs: Any) -> None: ... + + +def func1(arg: Session) -> None: ... + + +def func2(x: CoolSession): + # This should generate an error because "statement" and "stmt" don't match. + func1(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol17.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol17.py new file mode 100644 index 00000000..077686b0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol17.py @@ -0,0 +1,95 @@ +# This sample tests for generic protocol variance consistency. + +from typing import Protocol, TypeVar +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +# pyright: strict + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2", bound=int) +_T3 = TypeVar("_T3", bytes, str) +_T1_co = TypeVar("_T1_co", covariant=True) +_T1_contra = TypeVar("_T1_contra", contravariant=True) + + +class Protocol1(Protocol[_T1, _T2, _T3]): + def m1(self, p0: _T1, p1: _T2, p2: _T3) -> _T1 | _T2: ... + + def m2(self) -> _T1: ... + + def m3(self) -> _T2: ... + + def m4(self) -> _T3: ... + + +# This should generate an error because _T3 should be contravariant +class Protocol2(Protocol[_T1, _T2, _T3]): + def m1(self, p0: _T1, p1: _T2, p2: _T3) -> _T1: ... + + def m2(self) -> _T1: ... + + def m3(self) -> _T2: ... + + +class Protocol3(Protocol[_T1_co]): + def m1(self) -> None: + pass + + +# This should generate an error because _T1 should be contravariant. +class Protocol4(Protocol[_T1]): + def m1(self, p0: _T1) -> None: ... + + +# This should generate an error because _T1_co should be contravariant. +class Protocol5(Protocol[_T1_co]): + # This should generate an error because a covariant TypeVar + # should not be used as a parameter type. + def m1(self, p0: _T1_co) -> None: ... + + +# This should generate an error because _T1 should be covariant. +class Protocol6(Protocol[_T1]): + def m1(self) -> _T1: ... + + +# This should generate an error because _T1_contra should be covariant. +class Protocol7(Protocol[_T1_contra]): + # This should generate an error because a contravariant TypeVar + # should not be used as a return type. + def m1(self) -> _T1_contra: ... + + +class Protocol8(Protocol[_T1]): + def m1(self) -> _T1: ... + + def m2(self, p1: _T1) -> None: + pass + + +P = ParamSpec("P") +R = TypeVar("R", covariant=True) + + +class Callback(Protocol[P, R]): + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + + +class Protocol9(Protocol[_T1_co]): + @property + def prop1(self) -> _T1_co: ... + + +class Protocol10(Protocol[_T1_co]): + def m1(self) -> type[_T1_co]: ... + + +class Protocol11(Protocol[_T1]): + x: _T1 | None + + +class Protocol12(Protocol[_T1_contra]): + def m1(self: "Protocol12[_T1_contra]", x: _T1_contra) -> None: ... + + @classmethod + def m2(cls: "type[Protocol12[_T1_contra]]") -> None: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol18.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol18.py new file mode 100644 index 00000000..03ceeb50 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol18.py @@ -0,0 +1,24 @@ +# This sample tests that instantiation of a protocol is flagged +# as an error. + +from typing import Protocol + + +class A(Protocol): ... + + +# This should generate an error. +A() + + +class B(A): ... + + +B() + + +class C(A, Protocol): ... + + +# This should generate an error. +C() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol19.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol19.py new file mode 100644 index 00000000..5a27ec99 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol19.py @@ -0,0 +1,48 @@ +# This sample tests the detection of Final mismatches between a protocol +# and a purported instance of a protocol. + +from dataclasses import dataclass, field +from typing import NamedTuple, Protocol, Final + + +class ProtoA(Protocol): + x: Final[int] = field() + + +@dataclass +class ConcreteA: + x: int = 0 + + +# This should generate an error +a1: ProtoA = ConcreteA(0) + + +class ProtoB(Protocol): + y: int + + +@dataclass +class ConcreteB: + y: Final[int] = 0 + + +# This should generate an error +b1: ProtoB = ConcreteB(0) + + +class ProtoC(Protocol): + x: Final[int] + + +class ConcreteC1(NamedTuple): + x: int + + +@dataclass(frozen=True) +class ConcreteC2: + x: int + + +c1: ProtoC = ConcreteC1(0) +c2: ProtoC = ConcreteC2(0) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol2.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol2.py new file mode 100644 index 00000000..c66e7202 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol2.py @@ -0,0 +1,37 @@ +# This sample tests the type checker's handling of +# generic protocols with invariant, constrained, and contravariant +# type arguments. + +from typing import TypeVar, Protocol + + +T = TypeVar("T") +StrLike = TypeVar("StrLike", str, bytes) +T_contra = TypeVar("T_contra", contravariant=True) + + +class Writer(Protocol[T_contra]): + def write(self, data: T_contra) -> None: ... + + +class WriteFile: + def write(self, data: bytes) -> None: + pass + + +def f(writer: Writer[bytes]): + pass + + +def g(writer: Writer[T], v: T | None = None): + pass + + +def h(writer: Writer[StrLike], v: StrLike | None = None): + pass + + +w = WriteFile() +f(w) +g(w) +h(w) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol20.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol20.py new file mode 100644 index 00000000..06702f96 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol20.py @@ -0,0 +1,21 @@ +# This sample tests the case where a TypeVar is bound to a +# protocol class. + +from typing import Protocol, TypeVar + + +class ClsProtocol(Protocol): + def __init__(self): ... + + +T1 = TypeVar("T1", bound="ClsProtocol") + + +class Sample: + @classmethod + def test(cls: type[T1]) -> T1: + return cls() + + +reveal_type(Sample.test(), expected_text="Sample") +reveal_type(Sample().test(), expected_text="Sample") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol21.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol21.py new file mode 100644 index 00000000..078dda29 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol21.py @@ -0,0 +1,25 @@ +# This sample tests the handling of protocol classes that define properties +# to indicate a read-only attribute. It also tests that a member access through +# a protocol class (not an instance) is flagged as an error. + +from typing import Protocol + + +class A(Protocol): + @property + def name(self) -> str: ... + + +class B: + name: str + + +def do_something(a: A, class_a: type[A]) -> None: + val1 = a.name + reveal_type(val1, expected_text="str") + + # This should generate an error because accesses to + # properties from a protocol class are not allowed. + val2 = class_a.name + + val3: A = B() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol22.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol22.py new file mode 100644 index 00000000..aa80b445 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol22.py @@ -0,0 +1,39 @@ +# This sample tests that a type variable existing in a union type +# of multiple type variables is treated as covariant with the +# union type, thus affecting the variance restriction. + +from typing import Protocol, TypeVar + +# pyright: strict + +_T1 = TypeVar("_T1") +_T1_co = TypeVar("_T1_co", covariant=True) +_T1_contra = TypeVar("_T1_contra", contravariant=True) + +_T2 = TypeVar("_T2") +_T2_co = TypeVar("_T2_co", covariant=True) +_T2_contra = TypeVar("_T2_contra", contravariant=True) + + +# This is right, as `_T1_co` and `_T2_co` are only covariant with +# return type. +class P1(Protocol[_T1_co, _T2_co]): + def m1(self) -> _T1_co | _T2_co: ... + + +# This is right, as `_T1_contra` and `_T2_contra` are only covariant +# with the argument type. +class P2(Protocol[_T1_contra, _T2_contra]): + def m1(self, a: _T1_contra | _T2_contra) -> None: ... + + +# This is right, as `_T1` and `_T2` are both covariant with the +# argument type and the return type. +class P3(Protocol[_T1, _T2]): + def m1(self, a: _T1, b: _T2) -> _T1 | _T2: ... + + +# This is right, as `_T1` and `_T2` are both covariant with the +# argument type and the return type. +class P4(Protocol[_T1, _T2]): + def m2(self, a: _T1 | _T2) -> tuple[_T1, _T2]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol23.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol23.py new file mode 100644 index 00000000..f7f7e1bc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol23.py @@ -0,0 +1,39 @@ +# This sample tests the provision in PEP 544 that a protocol class +# can't be assigned to Type[Proto]. + +from abc import abstractmethod +from typing import Protocol + + +class Proto(Protocol): + @abstractmethod + def meth(self) -> int: ... + + +class Concrete: + def meth(self) -> int: + return 42 + + +def func1(cls: type[Proto]) -> int: + return cls().meth() + + +func1(Concrete) + +# This should generate an error because Proto is a protocol class, +# not a concrete class type that implements the protocol. +func1(Proto) + +val1: type[Proto] +val1 = Concrete +val1().meth() + +# This should generate an error because Proto is a protocol class. +val1 = Proto + + +def func2() -> type[Proto]: ... + + +val1 = func2() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol24.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol24.py new file mode 100644 index 00000000..a0b79c49 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol24.py @@ -0,0 +1,101 @@ +# This sample tests the provision in PEP 544 where a class type can +# be assigned to a protocol. + +from typing import Any, ClassVar, Protocol + + +class ProtoA(Protocol): + def meth(_self, x: int) -> int: ... + + +class ProtoB(Protocol): + def meth(_self, self: Any, x: int) -> int: ... + + +class C: + def meth(self, x: int) -> int: ... + + +# This should generate an error because C.meth isn't compatible +# with ProtoA().meth. +a: ProtoA = C + +b: ProtoB = C + + +class ProtoD(Protocol): + var1: int + + @property + def var2(self) -> str: ... + + +class E: + var1: ClassVar[int] + var2: ClassVar[str] + + +class F: + var1: ClassVar[int] + var2: ClassVar[int] + + +d: ProtoD = E + +# This should generate an error because var2 is the wrong type. +e: ProtoD = F + + +class Jumps(Protocol): + def jump(self) -> int: ... + + +class Jumper1: + @classmethod + def jump(cls) -> int: ... + + +class Jumper2: + def jump(self) -> int: ... + + +def do_jump(j: Jumps): + print(j.jump()) + + +do_jump(Jumper1) +do_jump(Jumper2()) + + +class ProtoG1(Protocol): + attr1: ClassVar[int] + + +class ProtoG2(Protocol): + attr1: int + + +class ConcreteG1: + attr1: ClassVar[int] = 1 + + +class ConcreteG2: + attr1: int = 1 + + +class GMeta(type): + attr1: int + + +class ConcreteG3(metaclass=GMeta): + pass + + +# The following four lines should generate an error. +pc1: ProtoG1 = ConcreteG1 +pc2: ProtoG1 = ConcreteG2 +pc3: ProtoG2 = ConcreteG2 +pc4: ProtoG1 = ConcreteG3 + +pc5: ProtoG2 = ConcreteG1 +pc6: ProtoG2 = ConcreteG3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol25.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol25.py new file mode 100644 index 00000000..1ecb4ca5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol25.py @@ -0,0 +1,28 @@ +# This sample tests the special-case handling for __slots__ and +# __class_getitem__ during protocol matching. + + +from typing import Any, Iterable, Protocol + + +class B: ... + + +class C: + def __class_getitem__(cls, __item: Any) -> Any: ... + + +class SupportsClassGetItem(Protocol): + __slots__: str | Iterable[str] = () + + def __class_getitem__(cls, __item: Any) -> Any: ... + + +b1: SupportsClassGetItem = B() # OK (missing __class_getitem__ is ignored) +c1: SupportsClassGetItem = C() # OK + + +# This should generate an error because __class_getitem__ is not exempt +# when performing class object protocol matching. +b2: SupportsClassGetItem = B # Error +c2: SupportsClassGetItem = C # OK diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol26.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol26.py new file mode 100644 index 00000000..f7d67b94 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol26.py @@ -0,0 +1,35 @@ +# This sample tests protocol class assignment in a case that involves tricky +# recursion. + +from typing import Protocol, Sequence, TypeVar, overload + +_T_co = TypeVar("_T_co", covariant=True) + + +class SupportsIndex(Protocol): + def __index__(self) -> int: ... + + +class TupleLike(Sequence[_T_co]): + @overload + def __getitem__(self, index: SupportsIndex) -> _T_co: ... + + @overload + def __getitem__(self, index: slice) -> "TupleLike[_T_co]": ... + + def __getitem__( + self, index: slice | SupportsIndex + ) -> "_T_co | TupleLike[_T_co]": ... + + +class NestedSequence(Protocol[_T_co]): + @overload + def __getitem__(self, index: int, /) -> "_T_co | NestedSequence[_T_co]": ... + + @overload + def __getitem__(self, index: slice, /) -> "NestedSequence[_T_co]": ... + + +def func(t: TupleLike[int]): + x: int | NestedSequence[int] = t + y: NestedSequence[int] = t diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol28.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol28.py new file mode 100644 index 00000000..e80f64a9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol28.py @@ -0,0 +1,29 @@ +# This sample tests a complicated use case involving multiple +# callback protocols. + +from typing import Protocol, TypeVar, Any + + +_T1 = TypeVar("_T1", contravariant=True) +_T2 = TypeVar("_T2", covariant=True) +_T3 = TypeVar("_T3", covariant=True) + + +class Callable1(Protocol[_T1]): + def __call__(self, __x: _T1) -> Any: ... + + +_T4 = TypeVar("_T4", bound=Callable1[Any]) + + +class Decorator1(Protocol[_T2]): + def __call__(self, __x: Callable1[_T2]) -> Any: ... + + +def decorator1(__x: Decorator1[_T3]) -> Decorator1[_T3]: ... + + +def func1(__x: _T4) -> _T4: ... + + +decorator1(func1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol29.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol29.py new file mode 100644 index 00000000..5e489794 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol29.py @@ -0,0 +1,26 @@ +# This sample tests the special-case handling of properties that return +# generics within a protocol. + +from functools import partial +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Protocol, + Self, +) +from typing import Any, Callable, TypeVar + +_T = TypeVar("_T", covariant=True) + + +class Partial(Protocol[_T]): + @property + def func(self) -> Callable[..., _T]: ... + + def __new__( + cls: type[Self], __func: Callable[..., _T], *args: Any, **kwargs: Any + ) -> Self: ... + + +def func1(x: Partial[int]): ... + + +func1(partial(int)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol3.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol3.py new file mode 100644 index 00000000..261bb334 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol3.py @@ -0,0 +1,328 @@ +# This sample tests the assignment of protocols that +# include property declarations. + +from _typeshed import DataclassInstance +from dataclasses import dataclass +from typing import ( + ClassVar, + ContextManager, + Final, + Generic, + NamedTuple, + Protocol, + Sequence, + TypeVar, +) + + +class Class1(Protocol): + @property + def batch_shape(self) -> int: + return 0 + + +class MockClass1: + def __init__(self, batch_shape: int): + self._batch_shape = batch_shape + + @property + def batch_shape(self) -> int: + return self._batch_shape + + +# This should not generate an error. +d: Class1 = MockClass1(batch_shape=1) + + +class Class2(Protocol): + @property + def batch_shape(self) -> int: + return 0 + + +class MockClass2: + def __init__(self, batch_shape: int): + self._batch_shape = batch_shape + + @property + def batch_shape(self) -> float: + return self._batch_shape + + +# This should generate an error because the +# type of the batch_shape property is not compatible. +e: Class2 = MockClass2(batch_shape=1) + + +class Class3(Protocol): + @property + def batch_shape(self) -> int: + return 0 + + @batch_shape.setter + def batch_shape(self, value: int) -> None: + pass + + +class MockClass3: + def __init__(self, batch_shape: int): + self._batch_shape = batch_shape + + @property + def batch_shape(self) -> int: + return self._batch_shape + + +# This should generate an error because it is missing +# a setter. +f: Class3 = MockClass3(batch_shape=1) + + +class Class4(Protocol): + @property + def batch_shape(self) -> int: + return 0 + + @batch_shape.deleter + def batch_shape(self) -> None: + pass + + +class MockClass4: + def __init__(self, batch_shape: int): + self._batch_shape = batch_shape + + @property + def batch_shape(self) -> int: + return self._batch_shape + + @batch_shape.setter + def batch_shape(self, value: int) -> None: + pass + + +# This should generate an error because it is missing +# a deleter. +g: Class4 = MockClass4(batch_shape=1) + + +_T_co = TypeVar("_T_co", covariant=True) +_Self = TypeVar("_Self") + + +class Class5: + @property + def real(self: _Self) -> _Self: ... + + +class MockClass5(Protocol[_T_co]): + @property + def real(self) -> _T_co: ... + + +foo5 = Class5() +h: MockClass5[Class5] = foo5 + + +P6 = TypeVar("P6", bound="MockClass6") +C6 = TypeVar("C6", bound="Class6") + + +class MockClass6(Protocol): + @property + def bar(self: P6) -> ContextManager[P6]: ... + + +class Class6: + @property + def bar(self: C6) -> ContextManager[C6]: ... + + +i: MockClass6 = Class6() + + +class Proto7(Protocol): + x: str + + +class Class7(NamedTuple): + x: str + + +# This should generate an error because the protocol +# indicates that 'a' must be writable. +a: Proto7 = Class7("") + + +class Proto8(Protocol): + @property + def x(self) -> str: ... + + +class Class8(NamedTuple): + x: str + + +b: Proto8 = Class8("") + + +class Proto9(Protocol): + @property + def x(self) -> str: ... + + @x.setter + def x(self, n: str) -> None: ... + + +class Proto10(Protocol): + x: str + + +class NT9(NamedTuple): + x: str = "" + + +@dataclass(frozen=False) +class DC9: + x: str = "" + + +@dataclass(frozen=True) +class DCFrozen9: + x: str = "" + + +# This should generate an error because named tuple +# attributes are immutable. +p9_1: Proto9 = NT9() + +# This should generate an error because frozen dataclass +# attributes are immutable. +p9_2: Proto9 = DCFrozen9() + +p9_3: Proto9 = DC9() + +# This should generate an error because named tuple +# attributes are immutable. +p10_1: Proto10 = NT9() + +# This should generate an error because frozen dataclass +# attributes are immutable. +p10_2: Proto10 = DCFrozen9() + +p10_3: Proto10 = DC9() + + +class Proto11(Protocol): + val1: ClassVar[Sequence[int]] + + +class Concrete11: + val1: Sequence[int] + + +# This should generate an error because of a ClassVar mismatch. +p11_1: Proto11 = Concrete11() + + +class Proto12(Protocol): + val1: list[int] + + +class Concrete12: + val1: ClassVar = [1, 2, 3] + + +# This should generate an error because of a ClassVar mismatch. +p12_1: Proto12 = Concrete12() + + +def func12(p11: Proto11, p12: Proto12): + # This should generate an error because of a ClassVar mismatch. + v1: Proto12 = p11 + + # This should generate an error because of a ClassVar mismatch. + v2: Proto11 = p12 + + +T13 = TypeVar("T13", covariant=True) + + +class Proto13(Protocol[T13]): + @property + def prop1(self) -> T13: ... + + +class Proto14(Proto13[T13], Protocol): ... + + +class Concrete14(Generic[T13]): + def __init__(self, val: T13): + self.prop1 = val + + +def func14(val: Proto14[T13]): ... + + +func14(Concrete14(1)) + + +class Proto15(Protocol): + @property + def prop1(self) -> int: + return 0 + + +class Concrete15_1: + prop1: Final[int] = 0 + + +class Concrete15_2: + prop1: int = 0 + + +class Concrete15_3: + prop1: int + + def __init__(self): + self.prop1 = 0 + + +@dataclass +class Concrete15_4: + prop1: Final[int] = 0 + + +@dataclass(frozen=True) +class Concrete15_5: + prop1: int = 0 + + +# This should generate an error because it is not a ClassVar in the protocol. +p15_1: Proto15 = Concrete15_1() + +p15_2: Proto15 = Concrete15_2() +p15_3: Proto15 = Concrete15_3() + +p15_4_1: Proto15 = Concrete15_4() +p15_4_2: DataclassInstance = Concrete15_4() + +p15_5_1: Proto15 = Concrete15_5() +p15_5_2: DataclassInstance = Concrete15_5() + + +class Proto16(Protocol): + __name__: str + + +class Concrete16_1(NamedTuple): + other: int + + +@dataclass(frozen=True) +class Concrete16_2: + other: int + + +p16_1: Proto16 = Concrete16_1 +p16_2: Proto16 = Concrete16_2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol30.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol30.py new file mode 100644 index 00000000..699fa235 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol30.py @@ -0,0 +1,30 @@ +# This sample validates that mutable class and instance variables +# are treated as invariant during protocol matching. + +from typing import ClassVar, Protocol + + +class P1(Protocol): + v1: float + + +class C1(Protocol): + v1: int + + +def func1(c1: C1): + # This should generate an error because v1 is invariant. + x: P1 = c1 + + +class P2(Protocol): + v1: ClassVar[float] + + +class C2(Protocol): + v1: int + + +def func2(c2: C2): + # This should generate an error because v1 is invariant. + x: P2 = c2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol31.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol31.py new file mode 100644 index 00000000..3142cf9a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol31.py @@ -0,0 +1,21 @@ +# This sample tests the case where an implementation of a protocol +# implements a function with a named + positional parameter but +# the protocol has a name-only parameter. + +from typing import Generic, Protocol, TypeVar + +T = TypeVar("T") +Tct = TypeVar("Tct", contravariant=True) + + +class Interface(Protocol[Tct]): + def run(self, *, value1: Tct, value2: int) -> object: ... + + +class Implementation(Generic[Tct]): + def run(self, value2: float, value1: Tct) -> object: + return None + + +def get(_: T) -> Interface[T]: + return Implementation[T]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol32.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol32.py new file mode 100644 index 00000000..222840e4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol32.py @@ -0,0 +1,79 @@ +# This sample tests the case where a protocol class derives from +# another protocol class. + +from typing import Generic, TypeVar, Protocol, overload + +Arg = TypeVar("Arg", contravariant=True) +Value = TypeVar("Value") + + +class Base1(Protocol[Value]): + def method1(self, default: Value) -> Value: ... + + +class Base2(Base1[Value], Protocol): + def method2(self, default: Value) -> Value: ... + + +class Interface(Base2[Value], Protocol[Arg, Value]): + def another(self, arg: Arg) -> None: ... + + +class Implementation1(Generic[Arg, Value]): + def method1(self, default: Value) -> Value: + return default + + def method2(self, default: Value) -> Value: + return default + + def another(self, arg: Arg) -> None: + return + + +def func1(arg: Arg, value: Value) -> Interface[Arg, Value]: + return Implementation1[Arg, Value]() + + +class Implementation2(Generic[Arg, Value]): + def method1(self, default: Value) -> Value: + return default + + def another(self, arg: Arg) -> None: + return + + +def func2(arg: Arg, value: Value) -> Interface[Arg, Value]: + # This should generate an error because + # Implementation2 doesn't implement method2. + return Implementation2[Arg, Value]() + + +class Implementation3(Generic[Arg, Value]): + def method1(self, default: int) -> int: + return default + + def method2(self, default: Value) -> Value: + return default + + def another(self, arg: Arg) -> None: + return + + +def func3(arg: Arg, value: Value) -> Interface[Arg, Value]: + # This should generate an error because + # Implementation3's signature doesn't match. + return Implementation3[Arg, Value]() + + +class Base4(Protocol): + @overload + def method3(self, message: int) -> int: ... + + @overload + def method3(self, message: str) -> str: ... + + def method3(self, message: str | int): + return message + + +class Implementation4(Base4): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol33.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol33.py new file mode 100644 index 00000000..f4346496 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol33.py @@ -0,0 +1,35 @@ +# This sample tests a protocol matching case that involves +# a union of TypeVars. + +from typing import Generic, TypeVar, Protocol + +T = TypeVar("T", covariant=True) +U = TypeVar("U", covariant=True) + + +class AProto(Generic[T, U], Protocol): + def f(self) -> T | U: ... + + def g(self) -> "AProto[T, U]": ... + + +class A(Generic[T, U]): + def f(self) -> T | U: + raise NotImplementedError + + def g(self) -> AProto[T, U]: + return A[T, U]() + + +class BProto(Generic[T, U], Protocol): + def f(self) -> T | U: ... + + def g(self) -> "BProto[T, U]": ... + + +class B(Generic[T, U]): + def f(self) -> T | U: + raise NotImplementedError + + def g(self) -> BProto[T, U]: + return B[T, U]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol34.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol34.py new file mode 100644 index 00000000..4ba0958c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol34.py @@ -0,0 +1,20 @@ +# This sample tests a case where a method calls its own constructor +# with a specialized type that uses its own TypeVar and the expected +# type is a protocol. + +from typing import Generic, TypeVar, Protocol + +T = TypeVar("T") + + +class X(Protocol): + def f(self) -> int: ... + + +class Y(Generic[T]): + def f(self) -> T: + raise NotImplementedError + + def g(self) -> X: + # This should generate a type error. + return Y[T]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol35.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol35.py new file mode 100644 index 00000000..0fc1c8a7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol35.py @@ -0,0 +1,33 @@ +# This sample tests that protocol compatibility caching produces +# the correct result when the first example of protocol matching within +# the file does not require invariance enforcement but some later one +# does. The cached protocol compatibility cannot be used in this case. + +from dataclasses import dataclass +from typing import Protocol + + +class P1(Protocol): + x: int + + +class P2(Protocol): + y: P1 + + +@dataclass +class A: + x: int + + +@dataclass +class B: + y: A + + +y: P1 = A(3) + +# This should generate an error. +x: P2 = B(A(3)) + +z: P1 = A(3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol36.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol36.py new file mode 100644 index 00000000..921b11e8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol36.py @@ -0,0 +1,18 @@ +# This sample tests the handling of nested protocols. + +from typing import Protocol, TypeVar, overload + +_T_co = TypeVar("_T_co", covariant=True) + + +class NestedSequence(Protocol[_T_co]): + @overload + def __getitem__(self, __i: int) -> _T_co | "NestedSequence[_T_co]": ... + + @overload + def __getitem__(self, __s: slice) -> "NestedSequence[_T_co]": ... + + +def func(v1: list[list[list[int]]]): + a: NestedSequence[int] = v1 + b: NestedSequence[int] = [[[3, 4]]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol37.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol37.py new file mode 100644 index 00000000..b007a29a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol37.py @@ -0,0 +1,19 @@ +# This sample tests that a method can be satisfied by a metaclass +# when doing protocol matching. + +from typing import Iterator + + +class StyleMeta(type): + def __iter__(cls) -> Iterator[str]: + yield "a" + yield "b" + yield "c" + + +class Style(metaclass=StyleMeta): + pass + + +x: type[Style] = Style +print(list(x)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol38.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol38.py new file mode 100644 index 00000000..cc316cf5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol38.py @@ -0,0 +1,38 @@ +# This sample tests the case where a protocol is specialized with +# a literal type. + +from typing import Any, Literal, Protocol, TypeVar + + +class Negatable(Protocol): + def __neg__(self) -> "Negatable": ... + + +def func1(x: Negatable) -> None: ... + + +func1(0) + + +def func2(val: Literal[0, 1]): + func1(val) + + +T = TypeVar("T", covariant=True) + + +class SupportsGetItem(Protocol[T]): + def __getitem__(self, __k: int) -> T: ... + + +def func3(a: tuple[Any, ...]): + x: SupportsGetItem[Literal["a"]] = a + + +def func4(x: SupportsGetItem[T]) -> T: + return x[0] + + +def func5(x: list[int] | list[str]) -> None: + y = func4(x) + reveal_type(y, expected_text="int | str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol39.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol39.py new file mode 100644 index 00000000..ba7ed680 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol39.py @@ -0,0 +1,16 @@ +# This sample tests that functions (or any callable) conforms to +# a protocol that includes attributes defined in builtins.function. + +from typing import Any, Protocol + + +class SupportsGet(Protocol): + @property + def __get__(self) -> Any: ... + + +def func1(cls: Any) -> None: + pass + + +v1: SupportsGet = func1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol4.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol4.py new file mode 100644 index 00000000..c1d4f4f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol4.py @@ -0,0 +1,49 @@ +# This sample tests that instance and class variables +# assigned within a Protocol method are flagged as errors. + +from typing import ClassVar, Protocol + + +class ProtoA(Protocol): + a: int + b: ClassVar[str] + + def method(self) -> None: + self.a = 3 + + # This should be an error + self.temp: list[int] = [] + + @classmethod + def cls_method(cls) -> None: + cls.b = "3" + + # This should be an error + cls.test2 = 3 + + +class ProtoB(Protocol): + x: ClassVar[int] + + +class B: + x: int + + +# This should generate an error because x is not a ClassVar in B +# but is a ClassVar in the protocol. +b: ProtoB = B() + + +class ProtoC(Protocol): + x: ClassVar[int] + + +class C: + def __init__(self): + self.x: int = 0 + + +# This should generate an error because x is an instance-only variable +# and doesn't satisfy the ClassVar annotation in the protocol. +c: ProtoC = C() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol40.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol40.py new file mode 100644 index 00000000..7d6922a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol40.py @@ -0,0 +1,39 @@ +# This sample tests that the Self type in a Protocol subclass is partially +# specialized appropriately during protocol matching. + +from typing import Generic, Protocol, TypeVar, Self + +T = TypeVar("T", covariant=True) +S = TypeVar("S", covariant=True) + + +class P1Parent(Protocol[S]): + def f0(self, /) -> Self: ... + + +class P1Child(P1Parent[S], Protocol[S]): ... + + +class C1(Generic[T]): + def f0(self, /) -> Self: ... + + +a1: P1Parent[str] = C1[str]() +b1: P1Child[str] = C1[str]() + + +class P2Parent(Protocol[T]): + def f0(self, right: Self, /) -> "P2Parent[T]": + return right + + +class P2Child(P2Parent[T], Protocol[T]): ... + + +class C2(Generic[S]): + def f0(self, other: Self) -> "C2[S]": + return other + + +a2: P2Parent[str] = C2[str]() +b2: P2Child[str] = C2[str]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol41.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol41.py new file mode 100644 index 00000000..2af2e129 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol41.py @@ -0,0 +1,73 @@ +# This sample verifies that a generic protocol that is specialized with +# a type variable can be matched if that type variable's type is +# supplied by another argument in a call. + +from typing import Any, Protocol, TypeVar, overload + +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + + +class MyStr: ... + + +class MyBytes: + def __buffer__(self, __flags: int) -> memoryview: ... + + +MyAnyStr = TypeVar("MyAnyStr", MyStr, MyBytes) + + +class Buffer(Protocol): + def __buffer__(self, __flags: int) -> memoryview: ... + + +class SupportsRead(Protocol[_T_co]): + def read(self, __length: int = ...) -> _T_co: ... + + +class SupportsWrite(Protocol[_T_contra]): + def write(self, __s: _T_contra) -> object: ... + + +class BufferedWriter: + def write(self, __buffer: Buffer) -> int: + raise NotImplementedError + + +def func1(s: SupportsRead[MyAnyStr], t: SupportsWrite[MyAnyStr]) -> None: ... + + +def test1(src: SupportsRead[MyBytes], tgt: BufferedWriter) -> None: + func1(src, tgt) + + +def test2(src: Any, tgt: BufferedWriter) -> None: + func1(src, tgt) + + +AnyStr_contra = TypeVar("AnyStr_contra", str, bytes, contravariant=True) + + +class BytesIO: + def write(self, __b: Buffer) -> None: + pass + + +class WriteBuffer(Protocol[AnyStr_contra]): + def write(self, __b: AnyStr_contra) -> Any: ... + + +class NDFrame: + @overload + def to_csv(self, p: WriteBuffer[bytes]) -> None: ... + + @overload + def to_csv(self, p: None = ...) -> str: ... + + def to_csv(self, p: Any = None) -> Any: ... + + +def test3(b: BytesIO) -> None: + df = NDFrame() + df.to_csv(b) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol42.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol42.py new file mode 100644 index 00000000..38426dc7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol42.py @@ -0,0 +1,21 @@ +# This sample tests the case where a protocol implementation uses a +# method-scoped type variable. + +from typing import Protocol, Sequence, TypeVar + +Input = TypeVar("Input", contravariant=True) +Output = TypeVar("Output", covariant=True) +T = TypeVar("T") + + +class ProtoA(Protocol[Input, Output]): + def __call__(self, input: Input) -> Output: + raise NotImplementedError + + +class ImplA: + def __call__(self, input: Sequence[T]) -> T: + return input[0] + + +v1: ProtoA[Sequence[int], int] = ImplA() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol43.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol43.py new file mode 100644 index 00000000..fcb90d14 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol43.py @@ -0,0 +1,37 @@ +# This sample tests the handling of generic protocols or protocols +# with generic methods. + +from typing import Protocol, Sequence, TypeVar + +A = TypeVar("A") + + +class HasAdd1(Protocol[A]): + def __add__(self: A, other: A) -> A: ... + + +T1 = TypeVar("T1", bound=HasAdd1) + + +def merge_element_lists1(a: Sequence[T1], b: Sequence[T1]) -> Sequence[T1]: + retval: Sequence[T1] = [] + for a_elem in a: + for b_elem in b: + retval.append(a_elem + b_elem) + return retval + + +# This is similar to HasAdd1 except that the class isn't generic. +class HasAdd2(Protocol): + def __add__(self: A, other: A) -> A: ... + + +T2 = TypeVar("T2", bound=HasAdd2) + + +def merge_element_lists2(a: Sequence[T2], b: Sequence[T2]) -> Sequence[T2]: + retval: Sequence[T2] = [] + for a_elem in a: + for b_elem in b: + retval.append(a_elem + b_elem) + return retval diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol44.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol44.py new file mode 100644 index 00000000..00206023 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol44.py @@ -0,0 +1,22 @@ +# This sample tests the case where protocol matching requires that the type +# parameter for the concrete class map to a union of types in the protocol. + +from typing import Iterable, Protocol, TypeVar + +K = TypeVar("K") +V = TypeVar("V") + + +class SpecialDict(Protocol[K, V]): + def items(self) -> Iterable[tuple[K, V | int]]: ... + + def __getitem__(self, __key: K) -> V | int: ... + + def __setitem__(self, __key: K, __value: V | int) -> None: ... + + +def func1(k: K, v: V) -> SpecialDict[K, V]: + x1: SpecialDict[K, V] = {k: v} + x2: SpecialDict[K, V] = {k: v, k: 1} + x3: SpecialDict[K, V] = {k: 0} + return {} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol45.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol45.py new file mode 100644 index 00000000..566ffd07 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol45.py @@ -0,0 +1,31 @@ +# This sample tests a case where a protocol method has a method-scoped +# type parameter. + +from typing import Callable, Generic, Protocol, TypeVar + +S = TypeVar("S") +T = TypeVar("T") +Input = TypeVar("Input") +Output = TypeVar("Output") + + +class Proto1(Protocol): + def __call__(self, item: S, /) -> S: ... + + +class Impl1: + def __call__(self, item: T, /) -> T: + return item + + +class Wrapper(Generic[Input, Output]): + def __init__(self, f: Callable[[Input], Output]) -> None: + self.__f = f + + def __call__(self, item: Input, /) -> Output: + return self.__f(item) + + +y = Wrapper(Impl1()) +reveal_type(y, expected_text="Wrapper[T@__call__, T@__call__]") +x: Proto1 = y diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol46.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol46.py new file mode 100644 index 00000000..274c6db0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol46.py @@ -0,0 +1,42 @@ +# This sample tests protocol matching for multiple protocols that refer +# to each other in a recursive fashion. In particular, this sample tests +# the case where a "cls" parameter is annotated with a protocol type. + +from typing import Never, Self, TypeVar, Protocol + +T_contra = TypeVar("T_contra", contravariant=True) +T = TypeVar("T") + + +class ProtoA(Protocol[T_contra, T]): + def method1(self, value: T_contra) -> "ProtoA[T_contra, T]": ... + + @classmethod + def method2(cls, value: T) -> T: ... + + +class ProtoB(Protocol[T_contra, T]): + def method3(self) -> ProtoA[T_contra, T]: ... + + +class ImplA: + def method1(self, value: int) -> Self: ... + + @classmethod + def method2(cls, value: int) -> int: ... + + +class ImplB: + def method3(self) -> ImplA: ... + + def method1(self, value: int) -> Self: ... + + @classmethod + def method2(cls: type[ProtoB[Never, T]], value: list[T]) -> list[T]: ... + + +def func1(x: ProtoA[Never, T]) -> T: ... + + +v1 = func1(ImplB()) +reveal_type(v1, expected_text="list[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol47.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol47.py new file mode 100644 index 00000000..57b6eb90 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol47.py @@ -0,0 +1,40 @@ +# This sample tests protocol matching for a protocol and an implementation +# that use a mixture of class-scoped and function-scoped TypeVars. + +from typing import Generic, Protocol, TypeVar + +T1 = TypeVar("T1", covariant=True) +T2 = TypeVar("T2") + + +class ProtoA(Protocol[T1]): + def method1(self, __key: str, __default: T2) -> "T1 | T2": ... + + +T3 = TypeVar("T3", covariant=True) +T4 = TypeVar("T4") + + +class A(Generic[T3]): + def method1(self, key: str, default: T4) -> "T3 | T4": + raise NotImplementedError + + +a1: A[str] = A() + + +def func1(storage: ProtoA[str]): ... + + +v1: ProtoA[str] = a1 +func1(a1) + + +def func2(storage: ProtoA[int]): ... + + +# This should generate an error. +v2: ProtoA[int] = a1 + +# This should generate an error. +func2(a1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol48.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol48.py new file mode 100644 index 00000000..9a36e469 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol48.py @@ -0,0 +1,25 @@ +# This sample tests the handling of the Self type during protocol matching. + +from typing import Protocol, Self, TypeVar + +T = TypeVar("T", covariant=True) + + +class SupportsMethod1(Protocol[T]): + def method1(self) -> T: ... + + +def apply_method1(__x: SupportsMethod1[T]) -> T: ... + + +class A: + def method1(self) -> tuple[Self, Self]: ... + + def method2(self): + x = apply_method1(self) + reveal_type(x, expected_text="tuple[Self@A, Self@A]") + + +def func1(a: A): + x = apply_method1(a) + reveal_type(x, expected_text="tuple[A, A]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol49.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol49.py new file mode 100644 index 00000000..72f5f06c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol49.py @@ -0,0 +1,21 @@ +# This sample tests the case where a protocol is matched against a +# dataclass. Dataclass fields need to act as if they are instance +# members rather than class members, which means a callable stored +# in a dataclass member should not be bound to the dataclass itself. + +from dataclasses import dataclass +from typing import Callable, Protocol + + +class HasA(Protocol): + @property + def a(self) -> Callable[[int], int]: ... + + +@dataclass +class A: + a: Callable[[int], int] + + +def func1(a: A): + has_a: HasA = a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol5.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol5.py new file mode 100644 index 00000000..3157801c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol5.py @@ -0,0 +1,25 @@ +# This sample is taken from PEP 544. It verifies that +# the type checker allows instance variables that are initialized +# in a method to be counted toward conformance to a defined Protocol. + +from typing import Protocol + + +class Template(Protocol): + name: str # This is a protocol member + value: int = 0 # This one too (with default) + + def method(self) -> None: + pass + + +class Concrete: + def __init__(self, name: str, value: int) -> None: + self.name = name + self.value = value + + def method(self) -> None: + return + + +var: Template = Concrete("value", 42) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol50.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol50.py new file mode 100644 index 00000000..54627c67 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol50.py @@ -0,0 +1,18 @@ +# This sample tests the case where a protocol is used as a type argument +# for itself. + +from typing import Generic, Protocol, TypeVar + +V_co = TypeVar("V_co", covariant=True) + + +class Proto1(Protocol[V_co]): + def f(self, /) -> V_co: ... + + +class Concrete1(Generic[V_co]): + def f(self, /) -> V_co: ... + + +def func1(v0: Concrete1[Concrete1[object]]): + v2: Proto1[Proto1[object]] = v0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol51.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol51.py new file mode 100644 index 00000000..f7acb6ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol51.py @@ -0,0 +1,6 @@ +# This sample tests a regression case related to the protocol compatibility +# cache. + + +int(round(1.2, 0)) +round(3.4, 2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol52.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol52.py new file mode 100644 index 00000000..ebd0437d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol52.py @@ -0,0 +1,24 @@ +# This sample tests the case where a protocol includes a callable +# attribute that is an instance variable. It shouldn't be bound +# to the concrete class in this case. + +from typing import Callable, Protocol + + +class A: + def __init__(self, *, p1: int, p2: str) -> None: ... + + +class ProtoB[**P, T](Protocol): + x: Callable[P, T] + + +class B: + x: type[A] + + +def func1[**P, T](v: ProtoB[P, T]) -> Callable[P, T]: ... + + +x1 = func1(B()) +reveal_type(x1, expected_text="(*, p1: int, p2: str) -> A") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol53.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol53.py new file mode 100644 index 00000000..c3cec832 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol53.py @@ -0,0 +1,212 @@ +# This sample tests protocol matching and override compatibility checks +# for cases involving `Self`. + +from typing import Protocol, Self + + +class Proto_CoRecurs(Protocol): + def m(self) -> "Proto_CoRecurs": ... + + +class Proto_CoSelf(Protocol): + def m(self) -> Self: ... + + +class Proto_CoGeneric(Protocol): + def m[T: Proto_CoGeneric](self: T) -> T: ... + + +class Impl_CoRecurs: + def m(self) -> "Impl_CoRecurs": ... + + +class Impl_CoSelf: + def m(self) -> Self: ... + + +class Impl_CoGeneric: + def m[T: Impl_CoGeneric](self: T) -> T: ... + + +class Impl_CoOther: + def m(self) -> Impl_CoSelf: ... + + +class Impl_CoRecursExplicit1(Proto_CoRecurs): + def m(self) -> "Impl_CoRecursExplicit1": ... + + +class Impl_CoSelfExplicit1(Proto_CoRecurs): + def m(self) -> Self: ... + + +class Impl_CoGenericExplicit1(Proto_CoRecurs): + def m[T: Impl_CoGenericExplicit1](self: T) -> T: ... + + +class Impl_CoOtherExplicit1(Proto_CoRecurs): + def m(self) -> Impl_CoSelf: ... + + +class Impl_CoRecursExplicit2(Proto_CoSelf): + def m(self) -> "Impl_CoRecursExplicit2": ... + + +class Impl_CoSelfExplicit2(Proto_CoSelf): + def m(self) -> Self: ... + + +class Impl_CoGenericExplicit2(Proto_CoSelf): + def m[T: Impl_CoGenericExplicit2](self: T) -> T: ... + + +class Impl_CoOtherExplicit2(Proto_CoSelf): + # This should generate a reportIncompatibleMethodOverride error. + def m(self) -> Impl_CoSelf: ... + + +class Impl_CoRecursExplicit3(Proto_CoGeneric): + def m(self) -> "Impl_CoRecursExplicit3": ... + + +class Impl_CoSelfExplicit3(Proto_CoGeneric): + def m(self) -> Self: ... + + +class Impl_CoGenericExplicit3(Proto_CoGeneric): + def m[T: Impl_CoGenericExplicit3](self: T) -> T: ... + + +class Impl_CoOtherExplicit3(Proto_CoGeneric): + # This should generate a reportIncompatibleMethodOverride error + # but does not currently. + def m(self) -> Impl_CoSelf: ... + + +x01: Proto_CoRecurs = Impl_CoRecurs() +x02: Proto_CoRecurs = Impl_CoSelf() +x03: Proto_CoRecurs = Impl_CoGeneric() +x04: Proto_CoRecurs = Impl_CoOther() + +x11: Proto_CoSelf = Impl_CoRecurs() +x12: Proto_CoSelf = Impl_CoSelf() +x13: Proto_CoSelf = Impl_CoGeneric() +# This should generate a reportAsignmentType error. +x14: Proto_CoSelf = Impl_CoOther() + +x21: Proto_CoGeneric = Impl_CoRecurs() +x22: Proto_CoGeneric = Impl_CoSelf() +x23: Proto_CoGeneric = Impl_CoGeneric() +# This should generate a reportAsignmentType error. +x24: Proto_CoGeneric = Impl_CoOther() + + +class Proto_ContraRecurs(Protocol): + def m(self, x: "Proto_ContraRecurs") -> None: ... + + +class Proto_ContraSelf(Protocol): + def m(self, x: Self) -> None: ... + + +class Proto_ContraGeneric(Protocol): + def m[T: Proto_ContraGeneric](self: T, x: T) -> None: ... + + +class Impl_ContraRecurs: + def m(self, x: "Impl_ContraRecurs") -> None: ... + + +class Impl_ContraSelf: + def m(self, x: Self) -> None: ... + + +class Impl_ContraGeneric: + def m[T: Impl_ContraGeneric](self: T, x: T) -> None: ... + + +class Impl_ContraOther: + def m(self, x: Impl_ContraSelf) -> None: ... + + +class Impl_ContraRecursExplicit1(Proto_ContraRecurs): + # This should generate a reportIncompatibleMethodOverride error. + def m(self, x: "Impl_ContraRecursExplicit1") -> None: ... + + +class Impl_ContraSelfExplicit1(Proto_ContraRecurs): + # This should generate a reportIncompatibleMethodOverride error. + def m(self, x: Self) -> None: ... + + +class Impl_ContraGenericExplicit1(Proto_ContraRecurs): + # This should generate a reportIncompatibleMethodOverride error. + def m[T: Impl_ContraGenericExplicit1](self: T, x: T) -> None: ... + + +class Impl_ContraOtherExplicit1(Proto_ContraRecurs): + # This should generate a reportIncompatibleMethodOverride error. + def m(self, x: Impl_ContraSelf) -> None: ... + + +class Impl_ContraRecursExplicit2(Proto_ContraSelf): + def m(self, x: "Impl_ContraRecursExplicit2") -> None: ... + + +class Impl_ContraSelfExplicit2(Proto_ContraSelf): + def m(self, x: Self) -> None: ... + + +class Impl_ContraGenericExplicit2(Proto_ContraSelf): + def m[T: Impl_ContraGenericExplicit2](self: T, x: T) -> None: ... + + +class Impl_ContraOtherExplicit2(Proto_ContraSelf): + # This should generate a reportIncompatibleMethodOverride error. + def m(self, x: Impl_ContraSelf) -> None: ... + + +class Impl_ContraRecursExplicit3(Proto_ContraGeneric): + # This should not generate a reportIncompatibleMethodOverride error + # but does currently. + def m(self, x: "Impl_ContraRecursExplicit3") -> None: ... + + +class Impl_ContraSelfExplicit3(Proto_ContraGeneric): + # This should not generate a reportIncompatibleMethodOverride error + # but does currently. + def m(self, x: Self) -> None: ... + + +class Impl_ContraGenericExplicit3(Proto_ContraGeneric): + # This should not generate a reportIncompatibleMethodOverride error + # but does currently. + def m[T: Impl_ContraGenericExplicit3](self: T, x: T) -> None: ... + + +class Impl_ContraOtherExplicit3(Proto_ContraGeneric): + # This should not generate a reportIncompatibleMethodOverride error + # but does currently. + def m(self, x: Impl_ContraSelf) -> None: ... + + +# This should generate a reportAsignmentType error. +y01: Proto_ContraRecurs = Impl_ContraRecurs() +# This should generate a reportAsignmentType error. +y02: Proto_ContraRecurs = Impl_ContraSelf() +# This should generate a reportAsignmentType error. +y03: Proto_ContraRecurs = Impl_ContraGeneric() +# This should generate a reportAsignmentType error. +y04: Proto_ContraRecurs = Impl_ContraOther() + +y11: Proto_ContraSelf = Impl_ContraRecurs() +y12: Proto_ContraSelf = Impl_ContraSelf() +y13: Proto_ContraSelf = Impl_ContraGeneric() +# This should generate a reportAsignmentType error. +y14: Proto_ContraSelf = Impl_ContraOther() + +y21: Proto_ContraGeneric = Impl_ContraRecurs() +y22: Proto_ContraGeneric = Impl_ContraSelf() +y23: Proto_ContraGeneric = Impl_ContraGeneric() +# This should generate a reportAsignmentType error. +y24: Proto_ContraGeneric = Impl_ContraOther() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol6.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol6.py new file mode 100644 index 00000000..f2ce381f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol6.py @@ -0,0 +1,86 @@ +# This sample tests nested protocol definitions. + +from typing import Literal, Protocol, TypeVar + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") + + +class Animal(Protocol[_T1]): + species: str + attributes: list[_T1] + + +class Mammal(Animal[_T2], Protocol): + pass + + +class Ungulate(Mammal[_T3], Protocol): + type_of_hooves: _T3 + + +class CamelLike(Ungulate[bytes], Protocol): + species: Literal["camel"] # pyright: ignore[reportIncompatibleVariableOverride] + + +class Sloth: + species: str + attributes: list[str] + + +class Armadillo: + species: str + attributes: list[bytes] + + +class Tapir: + species: str + + +class Camel: + species: Literal["camel"] + attributes: list[bytes] + type_of_hooves: bytes + + +class Cow: + species: str + attributes: list[str] + type_of_hooves: str + + +a: Mammal[str] = Sloth() + +# This should generate an error because Armadillo +# uses bytes for its attributes, not str. +b: Mammal[str] = Armadillo() + +# This should generate an error because Tapir +# doesn't provide an attributes. +c: Mammal[str] = Tapir() + +# This should generate an error because "species" +# is incompatible. +d: Ungulate[bytes] = Camel() + +e: Ungulate[str] = Cow() +f: CamelLike = Camel() + + +class CallTreeProto(Protocol): + subcalls: list["CallTreeProto"] + + +class MyCallTree: + subcalls: list["MyCallTree"] + + +class OtherCallTree: + subcalls: list["CallTreeProto"] + + +# This should generate an error. +x1: CallTreeProto = MyCallTree() + +x2: CallTreeProto = OtherCallTree() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol7.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol7.py new file mode 100644 index 00000000..11fe01a2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol7.py @@ -0,0 +1,24 @@ +# This sample verifies that the type checker properly +# flags the use of a protocol class when it's used +# in the second parameter of isinstance. + +from typing import Protocol, runtime_checkable + + +class P1(Protocol): + name: str + + +@runtime_checkable +class P2(Protocol): + name: str + + +def foo(a: int): + # This should generate an error because P1 is not + # runtime_checkable. + if isinstance(a, P1): + return + + if isinstance(a, P2): + return diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol8.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol8.py new file mode 100644 index 00000000..7c790e10 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol8.py @@ -0,0 +1,30 @@ +# This sample tests the handling of protocol classes +# that represent callbacks when used with class constructors. + +from typing import Protocol + + +class _BaseClass: ... + + +class _Protocol1(Protocol): + def __call__(self, p1: str, p2) -> _BaseClass: ... + + +def func1(callback: _Protocol1): + pass + + +class _Class1(_BaseClass): + def __init__(self, my_str: str): ... + + +class _Class2(_BaseClass): + def __init__(self, p1: str, p2: str): ... + + +# This should generate an error because the +# parameter types don't match. +func1(_Class1) + +func1(_Class2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocol9.py b/python-parser/packages/pyright-internal/src/tests/samples/protocol9.py new file mode 100644 index 00000000..f86e3d74 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocol9.py @@ -0,0 +1,49 @@ +# This sample tests a recursive protocol class (i.e. a protocol +# that refers to itself). + +from typing import Protocol + + +class TreeLike(Protocol): + value: int + + @property + def left(self) -> "TreeLike | None": ... + + @property + def right(self) -> "TreeLike | None": ... + + +class SimpleTree: + value: int + + @property + def left(self) -> "SimpleTree | None": + return self._left + + @property + def right(self) -> "SimpleTree | None": + return self._right + + def __init__(self, value: int) -> None: + self.value = value + self._left: SimpleTree | None = None + self._right: SimpleTree | None = None + + +root: TreeLike = SimpleTree(0) + + +class ProtoA(Protocol): + def method1(self) -> "ProtoA": ... + + +class ImplA: + class CallableClass: + def __call__(self) -> "ImplA": + return ImplA() + + method1 = CallableClass() + + +v1: ProtoA = ImplA() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocolExplicit1.py b/python-parser/packages/pyright-internal/src/tests/samples/protocolExplicit1.py new file mode 100644 index 00000000..95c30c2a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocolExplicit1.py @@ -0,0 +1,120 @@ +# This sample tests the logic that validates that a concrete class that +# explicitly derives from a protocol class implements the variables +# and functions defined in the protocol. + +from abc import ABC, abstractmethod +from typing import ClassVar, Protocol, final + + +class Protocol1(Protocol): + cm1: ClassVar[int] + cm2: ClassVar[int] = 0 + + im1: int + im2: int = 2 + im3: int + + def __init__(self): + self.im3 = 3 + + +class Protocol2(Protocol): + cm10: int + + +class Protocol3(Protocol2, Protocol): + cm11: int + + +class Concrete1(Protocol1): ... + + +# This should generate an error because some attributes are not implemented. +Concrete1() + + +class Concrete2(Protocol1): + cm1 = 3 + im1 = 0 + + +Concrete2() + + +class Concrete3(Protocol1, Protocol3): + cm1 = 3 + + def __init__(self): + im1 = 0 + + +# This should generate an error. +Concrete3() + + +class Concrete4(Protocol1, Protocol3): + cm1 = 3 + cm10 = 3 + + def __init__(self): + self.im1 = 3 + self.im10 = 10 + self.cm11 = 3 + + +Concrete4() + + +class Protocol5(Protocol): + def method1(self) -> int: ... + + +# This should generate an error because "method1" is +# not implemented and it is marked final. +@final +class Concrete5(Protocol5): + pass + + +class Protocol6(Protocol): + x: int + + +class Mixin: + x = 3 + + +@final +class Concrete6(Mixin, Protocol6): + pass + + +class Protocol7(Protocol): + @abstractmethod + def method1(self): ... + + +class Mixin7(Protocol7, ABC): + def method1(self): + pass + + +# This should generate an error because it +# does not implement method1 and is marked final. +@final +class Concrete7A(Protocol7): + pass + + +@final +class Concrete7B(Mixin7, Protocol7): + pass + + +class Protocol8(Protocol): + x: int + + +class Concrete8(Protocol8): + # This should generate an error because x is a ClassVar. + x: ClassVar = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocolExplicit3.py b/python-parser/packages/pyright-internal/src/tests/samples/protocolExplicit3.py new file mode 100644 index 00000000..183198ec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocolExplicit3.py @@ -0,0 +1,77 @@ +# This sample tests the logic that validates that a concrete class that +# explicitly derives from a protocol class implements the variables +# and functions defined in the protocol. Specifically, this tests +# the case where the parent protocol class is implemented in a stub file. + +# pyright: reportMissingModuleSource=false + +from abc import ABC +from typing import final +from .protocolExplicit2 import Protocol1, Protocol3, Protocol5, Protocol6, Protocol7 + + +class Concrete1(Protocol1): ... + + +# This should generate an error because some attributes are not implemented. +Concrete1() + + +class Concrete2(Protocol1): + cm1 = 3 + im1 = 0 + + +Concrete2() + + +class Concrete3(Protocol1, Protocol3): + cm1 = 3 + + +# This should generate an error. +Concrete3() + + +class Concrete4(Protocol1, Protocol3): + cm1 = 3 + cm10 = 3 + + def __init__(self): + self.im1 = 3 + self.im10 = 10 + self.cm11 = 3 + + +Concrete4() + + +@final +class Concrete5(Protocol5): + pass + + +class Mixin: + x = 3 + + +@final +class Concrete6(Mixin, Protocol6): + pass + + +class Mixin7(Protocol7, ABC): + def method1(self): + pass + + +# This should generate an error because it +# does not implement method1 and is marked final. +@final +class Concrete7A(Protocol7): + pass + + +@final +class Concrete7B(Mixin7, Protocol7): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocolModule1.py b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule1.py new file mode 100644 index 00000000..ff852f1a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule1.py @@ -0,0 +1,13 @@ +# This sample is used in conjunction with protocolModule2.py. + + +var_1: int = 3 +var_2: int | str = "hello" + + +def func_1(a: int, b: str) -> str: + return "hi" + + +def func_2() -> str: + return "hi" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocolModule2.py b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule2.py new file mode 100644 index 00000000..d2bd17de --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule2.py @@ -0,0 +1,100 @@ +# This sample tests protocol matching for modules. + +from typing import Protocol, TypeVar, runtime_checkable +from . import protocolModule1 +import datetime +from importlib import import_module + + +@runtime_checkable +class P1(Protocol): + var_1: int + var_2: int | str + + def func_1(self, a: int, b: str) -> str: ... + + @staticmethod + def func_2() -> str: ... + + +v1: P1 = protocolModule1 + + +@runtime_checkable +class P2(Protocol): + var_1: str + + +# This should generate an error because var_1 has the +# wrong type. +v2: P2 = protocolModule1 + + +class P3(Protocol): + def func_1(self, a: int, b: str) -> int: ... + + +# This should generate an error because func_1 has the +# wrong type. +v3: P3 = protocolModule1 + + +class P4(Protocol): + def func_2(self) -> str: ... + + y: int + + +# This should generate an error because y is missing. +v4: P4 = protocolModule1 + + +_T = TypeVar("_T", bound=P2) + + +class NonProtocol: ... + + +# Test type narrowing of module symbols for isinstance checks. +def func1(x: type[_T]): + if isinstance(datetime, (P1, P2, NonProtocol, x)): + reveal_type(datetime, expected_text="P1 | P2 | _T@func1") + else: + reveal_type(datetime, expected_text='Module("datetime")') + + +def func2(): + if not isinstance(datetime, P1): + reveal_type(datetime, expected_text='Module("datetime")') + else: + reveal_type(datetime, expected_text="P1") + + +def func3(): + my_module = import_module("my_module") + if isinstance(my_module, (P1, NonProtocol)): + reveal_type(my_module, expected_text="P1") + else: + reveal_type(my_module, expected_text="ModuleType") + + +_T1 = TypeVar("_T1") + + +class P5(Protocol[_T1]): + def func_1(self, a: int, b: _T1) -> _T1: ... + + +def func4(x: P5[_T1]) -> _T1: ... + + +v5 = func4(protocolModule1) +reveal_type(v5, expected_text="str") + + +class P6(Protocol): + @property + def var_1(self) -> int: ... + + +v6: P6 = protocolModule1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocolModule3.py b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule3.py new file mode 100644 index 00000000..134b0531 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule3.py @@ -0,0 +1,13 @@ +# This sample is used in conjunction with protocolModule4.py. + +from typing import Protocol, TypeVar + +Y = TypeVar("Y", contravariant=True) + + +class Fn(Protocol[Y]): + def __call__(self, y: Y) -> None: ... + + +def x(x: Fn[int]) -> None: + print(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/protocolModule4.py b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule4.py new file mode 100644 index 00000000..ac2e3ed2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/protocolModule4.py @@ -0,0 +1,25 @@ +# This sample tests protocol matching for modules when using +# a generic protocol class. + +from typing import Protocol, TypeVar + +from . import protocolModule3 +from .protocolModule3 import Fn + +X = TypeVar("X", covariant=True) +Z = TypeVar("Z") + + +class FnHandler(Protocol[X]): + def __call__(self, x: Fn[X]) -> None: ... + + +class ModuleSpec(Protocol[Z]): + x: FnHandler[Z] + + +m1: ModuleSpec[int] = protocolModule3 +m1.x(lambda y: None) + +# This should generate an error. +m2: ModuleSpec[str] = protocolModule3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric1.py b/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric1.py new file mode 100644 index 00000000..099b970a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric1.py @@ -0,0 +1,38 @@ +# This sample tests type checking scenarios related to "pseudo generic" +# classes - those whose constructors are unannotated. + +_DEFAULT_VALUE = object() + + +class ClassA: + def __init__(self, name, description=_DEFAULT_VALUE): ... + + @classmethod + def create_new(cls): + return cls("", None) + + +a1: list[ClassA] = [ClassA("a", description="b")] +a2: list[ClassA] = [ClassA("c")] +a3: list[ClassA] = a1 + a2 + + +class ClassB: + def __init__(self, a, b, c=None, d=""): + self.a = a + self.b = b + self.c = c + self.d = d + + +b1 = ClassB(1, "") +reveal_type(b1.a, expected_text="int") +reveal_type(b1.b, expected_text="str") +reveal_type(b1.c, expected_text="Unknown | None") +reveal_type(b1.d, expected_text="str") + +b2 = ClassB("", 1.2, 2, "") +reveal_type(b2.a, expected_text="str") +reveal_type(b2.b, expected_text="float") +reveal_type(b2.c, expected_text="Unknown | None") +reveal_type(b2.d, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric2.py b/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric2.py new file mode 100644 index 00000000..17de559e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric2.py @@ -0,0 +1,38 @@ +# This sample tests the "pseudo-generic class" functionality, +# where a class is made into a generic class in cases where +# it has no annotated constructor parameters. + +# We use "strict" here because we want to ensure that there are +# no "unknown" types remaining in this file. +# pyright: strict, reportUnknownParameterType=false, reportMissingParameterType=false + +from logging import Handler, NOTSET + + +class ClassA(Handler): + def __init__(self, a, b="hello", level=NOTSET): + super().__init__(level) + self._foo_a = a + self._foo_b = b + + @property + def value_a(self): + return self._foo_a + + @property + def value_b(self): + return self._foo_b + + +a1 = ClassA(27) +reveal_type(a1.value_a, expected_text="int") +reveal_type(a1.value_b, expected_text="str") + + +a2 = ClassA("hello", "27") +reveal_type(a2.value_a, expected_text="str") +reveal_type(a2.value_b, expected_text="str") + +# This should generate an error because a pseudo-generic +# class is not actually generic. +a3: ClassA[int, str, int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric3.py b/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric3.py new file mode 100644 index 00000000..4cf33269 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/pseudoGeneric3.py @@ -0,0 +1,30 @@ +# This sample is similar to pseudoGeneric2.py in that it tests +# the case where the class's constructor is unannotated. This +# particular case was causing an internal crash. + +import abc + + +class ClassB(metaclass=abc.ABCMeta): + def __init__(self, value=None): + self._cache = {"value": value} + + @property + def cache(self): + return self._cache + + @cache.deleter + def cache(self): + self._cache = {key: None for key in self._cache} + + def __getattr__(self, attr): + cache = self.cache + if attr in cache: + return cache[attr] + else: + return self.__getattribute__(attr) + + +b1 = ClassB("test") +reveal_type(b1.value, expected_text="Unknown | Any | None") +del b1.cache diff --git a/python-parser/packages/pyright-internal/src/tests/samples/pyrightComment1.py b/python-parser/packages/pyright-internal/src/tests/samples/pyrightComment1.py new file mode 100644 index 00000000..11dfca8f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/pyrightComment1.py @@ -0,0 +1,28 @@ +# This sample tests error handling for pyright comments. + +# This should generate an error because "stricter" isn't a valid directive. +# pyright: basic , stricter + +# This should generate an error because it's missing a directive. +# pyright: + +# This should generate an error because the value is missing. +# pyright: reportMissingTypeStubs + +# This should generate an error because the value is missing. +# pyright: reportMissingTypeStubs= + +# This should generate two errors because the values are invalid. +# pyright: reportMissingTypeStubs = blah , strictListInference = none + +# This should generate two errors because the rule is invalid. +# pyright: reportBlahBlah = true + +# This should generate an error because it's not on its own line. +a = 1 # pyright: reportGeneralTypeIssues=false + + +def func1(): + # This should generate an error because it's not on its own line. + # pyright: reportGeneralTypeIssues=false + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/pyrightIgnore1.py b/python-parser/packages/pyright-internal/src/tests/samples/pyrightIgnore1.py new file mode 100644 index 00000000..af108ce6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/pyrightIgnore1.py @@ -0,0 +1,15 @@ +# This sample tests the # pyright: ignore comment. + +import sys + + +def func1(self, x: int | None) -> str: + # This should suppress the error + x + "hi" # pyright: ignore - test + + # This should not suppress the error because the rule doesn't match. + return 3 # pyright: ignore [foo] + + +if sys.version_info < (3, 8): + x = 3 # pyright: ignore diff --git a/python-parser/packages/pyright-internal/src/tests/samples/pyrightIgnore2.py b/python-parser/packages/pyright-internal/src/tests/samples/pyrightIgnore2.py new file mode 100644 index 00000000..279efddd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/pyrightIgnore2.py @@ -0,0 +1,22 @@ +# This sample tests the use of a pyright ignore comment in conjunction +# with the reportUnnecessaryTypeIgnoreComment mechanism. + + +def func1(self, x: int | None) -> str: + # This should suppress the error + v1 = x + "hi" # pyright: ignore - test + + # This is unnecessary + v2 = x + x # pyright: ignore + + # This will not suppress the error + # These are both unnecessary + v3 = x + x # pyright: ignore [foo, bar] + + # This will not suppress the error + v4 = x + x # pyright: ignore [] + + # One of these is unnecessary + v5 = x + "hi" # test # pyright: ignore [reportOperatorIssue, foo] + + return 3 # pyright: ignore [reportReturnType] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/python2.py b/python-parser/packages/pyright-internal/src/tests/samples/python2.py new file mode 100644 index 00000000..d3e4bebb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/python2.py @@ -0,0 +1,32 @@ +# This sample includes Python 2.x syntax that is illegal +# in Python 3.x. The Pyright parser should flag these as +# errors, but it should exhibit good recovery, preferably +# emitting one error per instance, not a cascade of errors. + +# pyright: reportUnusedExpression=false + +# This should generate an error. +print 3 + 3 + +# This should generate an error. +exec 3 + 4 + +try: + bar = 3 +# This should generate one error on Python 3.14 and newer +# and two errors on older versions. +except NameError, 'error caused': + pass + +b = 3 + +# This should generate an error. +a = `b` + +# This should generate an error. +def foo(a, (b, c), d): + pass + +# This should generate two errors. +raise NameError, a > 4, a < 4 + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias1.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias1.py new file mode 100644 index 00000000..05cf1f8f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias1.py @@ -0,0 +1,77 @@ +# This sample tests Pyright's handling of recursive type aliases. + +from typing import Mapping, TypeVar, Union + +MyTree = list[Union["MyTree", int]] + +t1: MyTree = [1, 2, 3, [3, 4], [[3], 5]] + +# This should generate an error because a str is not allowed. +t2: MyTree = [3, ""] + +# This should generate an error because a str is not allowed. +t3: MyTree = [1, 2, 3, [3, 4], [3, 4, 5, [3, "4"]]] + +_T = TypeVar("_T") +GenericUnion = Union[int, _T] + +i1: GenericUnion[str] = "hi" +i1 = 3 + +i2: GenericUnion[float] = 3 +# This should generate an error because str isn't compatible. +i2 = "hi" + +Foo = Union[bool, list["Foo"], dict["Foo", "Foo"]] + +bar1: Foo = [True, [True, False]] +bar2: Foo = [True, [True], {True: False}] +bar4: Foo = {True: [False]} + +# These should generate errors. +baz1: Foo = [True, ["True", False]] +baz2: Foo = [True, [True], {True: "False"}] +baz4: Foo = {True: ["False"]} + +Json = Union[None, int, str, float, list["Json"], dict[str, "Json"]] + +# This should generate an error +a1: Json = {"a": 1, "b": 3j} + +# This should generate an error +a2: Json = [2, 3j] + +RecursiveTuple = Union[str | int, tuple["RecursiveTuple", ...]] + + +b1: RecursiveTuple = (1, 1) +b2: RecursiveTuple = (1, "1") +b3: RecursiveTuple = (1, "1", 1, "2") +b4: RecursiveTuple = (1, ("1", 1), "2") +b5: RecursiveTuple = (1, ("1", 1), (1, (1, 2))) + +# This should generate an error +b6: RecursiveTuple = (1, ("1", 1), (1, (1, [2]))) + +# This should generate an error +b6: RecursiveTuple = (1, [1]) + + +RecursiveMapping = Union[str, int, Mapping[str, "RecursiveMapping"]] + + +c1: RecursiveMapping = 1 +c2: RecursiveMapping = "1" +c3: RecursiveMapping = {"1": "1"} +c4: RecursiveMapping = {"1": "1", "2": 1} +c5: RecursiveMapping = {"1": "1", "2": 1, "3": {}} +c6: RecursiveMapping = {"1": "1", "2": 1, "3": {"0": "0", "1": "2", "2": {}}} + +# This should generate an error. +c7: RecursiveMapping = {"1": [1]} + +# This should generate an error. +c8: RecursiveMapping = {"1": "1", "2": 1, "3": [1, 2]} + +# This should generate an error. +c9: RecursiveMapping = {"1": "1", "2": 1, "3": {"0": "0", "1": 1, "2": [1, 2, 3]}} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias10.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias10.py new file mode 100644 index 00000000..a43a2925 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias10.py @@ -0,0 +1,32 @@ +# This sample tests the case where two recursive type aliases +# with different definitions overlap. + +from typing import Mapping, Optional, Sequence, Union + +JsonArr1 = Sequence[Optional["JsonVal1"]] +JsonObj1 = Mapping[str, Optional["JsonVal1"]] +JsonVal1 = Union[bool, float, int, str, "JsonArr1", "JsonObj1"] + +JsonArr2 = Sequence[Optional["JsonVal2"]] +JsonObj2 = Mapping[str, Optional["JsonVal2"]] +JsonVal2 = Union[bool, float, int, str, "JsonArr2", "JsonObj2"] + + +def func1(v: JsonVal1): + x: JsonVal2 = v + + return x + + +def func2(v: Optional[JsonVal1]): + # This should generate an error. + x: JsonVal2 = v + + return x + + +def func3(v: Optional[JsonVal1]): + # This should generate an error. + x: Optional[JsonVal2] = v + + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias11.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias11.py new file mode 100644 index 00000000..40239e92 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias11.py @@ -0,0 +1,46 @@ +# This sample tests the case where a recursive type alias makes use of +# a bound or constrained TypeVar. + + +from typing import Any, Generic, TypeVar + +""" Test bound TypeVar """ + + +class ClassA1: + pass + + +T1 = TypeVar("T1", bound=ClassA1) + + +class ClassA2(ClassA1, Generic[T1]): + pass + + +class ClassA3(ClassA1): + pass + + +TA1 = ClassA2["TA1"] | ClassA3 + + +""" Test constrained TypeVar """ + + +class ClassB1: + pass + + +T2 = TypeVar("T2", "ClassB2[Any] | ClassB3", int) + + +class ClassB2(ClassB1, Generic[T2]): + pass + + +class ClassB3(ClassB1): + pass + + +TA2 = ClassB2["TA2"] | ClassB3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias12.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias12.py new file mode 100644 index 00000000..683a7559 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias12.py @@ -0,0 +1,25 @@ +# This sample tests the case where a recursive type alias is evaluated +# for type compatibility with a recursive protocol. We want to make sure +# this doesn't lead to extremely long evaluation times or stack overflows. + +from collections.abc import Callable +from types import FrameType +from typing import Any, Protocol, Self, TypeAlias + + +class TraceFunctionProto(Protocol): + def __call__(self, frame: FrameType, event: str, arg: Any) -> Self | None: ... + + +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], "TraceFunction | None"] + + +def settrace(tf: TraceFunction | None) -> None: ... + + +def func1(frame: FrameType, event: str, arg: Any) -> TraceFunction: ... + + +def func2(tf: TraceFunctionProto | None): + settrace(tf) + settrace(func1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias13.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias13.py new file mode 100644 index 00000000..ae00c456 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias13.py @@ -0,0 +1,22 @@ +# This sample tests the case where a recursive type alias previous +# led to infinite recursion. + +from typing import Iterable, TypeVar, Union + +T = TypeVar("T") +Tree = list[Union["Tree[T]", T]] + + +def _flatten(tree: Union[Tree[T], T]) -> Iterable[T]: + if not isinstance(tree, list): + yield tree + return + for v in tree: + yield from _flatten(v) + + +def flatten(tree: Tree[T]) -> Iterable[T]: + return _flatten(tree) + + +flatten([1, [2, 3]]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias14.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias14.py new file mode 100644 index 00000000..633c6432 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias14.py @@ -0,0 +1,23 @@ +# This sample tests the case where a recursive type alias contains +# a `Sequence` that potentially overlaps with str (which derives from +# Sequence[str]). + +from typing import Sequence + + +NestedIntList = int | Sequence["NestedIntList"] + + +def func1() -> NestedIntList: + result: NestedIntList = 1 + for _ in range(1): + result = [result] + return result + + +def func2() -> NestedIntList: + # This should generate an error. + result: NestedIntList = "" + for _ in range(1): + result = [result] + return result diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias15.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias15.py new file mode 100644 index 00000000..f7e924f5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias15.py @@ -0,0 +1,32 @@ +# This sample tests that the computed variance for a recursive type +# alias is correct. + +from typing import Callable, TypeAlias, TypeVar + + +type A[T] = Callable[[A[T]], Callable[[T], None]] + + +def testA_co(x: A[int]) -> A[int | str]: + # This should generate an error because A is invariant. + return x + + +def testA_cn(x: A[int | str]) -> A[int]: + # This should generate an error because A is invariant. + return x + + +T = TypeVar("T") + +B: TypeAlias = "Callable[[B[T]], Callable[[T], None]]" + + +def testB_co(x: B[int]) -> B[int | str]: + # This should generate an error because B is invariant. + return x + + +def testB_cn(x: B[int | str]) -> B[int]: + # This should generate an error because B is invariant. + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias16.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias16.py new file mode 100644 index 00000000..d25c5b89 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias16.py @@ -0,0 +1,23 @@ +# This sample tests a case that previously resulted in infinite recursion. + +from typing import TypeVar, Generic + +U = TypeVar("U") +T = TypeVar("T") + + +class A(Generic[T]): + pass + + +class B(Generic[T]): + pass + + +class C(Generic[T]): + pass + + +TA1 = A["TA2[U]"] | B["TA2[U]"] +TA2 = TA1[U] | C[TA1[U]] +TA3 = TA2[U] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias2.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias2.py new file mode 100644 index 00000000..3d1146c2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias2.py @@ -0,0 +1,32 @@ +# This sample tests Pyright's handling of recursive type aliases +# that are also generic. + +from typing import TypeVar, Union + +_T1 = TypeVar("_T1", str, int) +_T2 = TypeVar("_T2") + +GenericTypeAlias1 = list[Union["GenericTypeAlias1[_T1]", _T1]] + +SpecializedTypeAlias1 = GenericTypeAlias1[str] + +a1: SpecializedTypeAlias1 = ["hi", ["hi", "hi"]] + +# This should generate an error because int doesn't match the +# constraint of the TypeVar _T1. +SpecializedClass2 = GenericTypeAlias1[float] + +b1: GenericTypeAlias1[str] = ["hi", "bye", [""], [["hi"]]] + +# This should generate an error. +b2: GenericTypeAlias1[str] = ["hi", [2.4]] + + +GenericTypeAlias2 = list[Union["GenericTypeAlias2[_T1, _T2]", _T1, _T2]] + +c2: GenericTypeAlias2[str, int] = [[3, ["hi"]], "hi"] + +c3: GenericTypeAlias2[str, float] = [[3, ["hi", 3.4, [3.4]]], "hi"] + +# This should generate an error because a float is a type mismatch. +c4: GenericTypeAlias2[str, int] = [[3, ["hi", 3, [3.4]]], "hi"] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias3.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias3.py new file mode 100644 index 00000000..8d07bec7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias3.py @@ -0,0 +1,24 @@ +# This sample tests Pyright's handling of recursive type aliases. + +from typing import TypeAlias, Union + +# This should generate an error because the forward reference +# type needs to be in quotes. +GenericClass0 = list[GenericClass0 | int] + +# This should generate an error because the type alias directly +# refers to itself. +RecursiveUnion = Union["RecursiveUnion", int] + +# This should generate an error because the type alias refers +# to itself through a mutually-referential type alias. +MutualReference1 = Union["MutualReference2", int] +MutualReference2 = Union["MutualReference1", str] + +# This should generate an error because the type alias refers +# to itself. +MutualReference3: TypeAlias = "MutualReference3" + + +RecursiveType: TypeAlias = list[Union[str, "RecursiveType"]] +reveal_type(RecursiveType, expected_text="type[list[str | RecursiveType]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias4.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias4.py new file mode 100644 index 00000000..a1cee24d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias4.py @@ -0,0 +1,55 @@ +# This sample tests the handling of complex recursive types. + +# pyright: strict, reportUnusedVariable=false + +from typing import Generator + + +JSONArray = list["JSONType"] +JSONObject = dict[str, "JSONType"] + +JSONPrimitive = str | float | int | bool | None +JSONStructured = JSONArray | JSONObject + +JSONType = JSONPrimitive | JSONStructured + + +# Using type alias checking for list: +def f2(args: JSONStructured): + if isinstance(args, list): + reveal_type( + args, + expected_text="list[str | float | int | bool | list[JSONType] | dict[str, JSONType] | None]", + ) + else: + reveal_type( + args, + expected_text="dict[str, str | float | int | bool | list[JSONType] | dict[str, JSONType] | None]", + ) + dargs: JSONObject = args + + +# Using type alias checking for dict: +def f3(args: JSONStructured): + if isinstance(args, dict): + reveal_type( + args, + expected_text="dict[str, str | float | int | bool | list[JSONType] | dict[str, JSONType] | None]", + ) + else: + reveal_type( + args, + expected_text="list[str | float | int | bool | list[JSONType] | dict[str, JSONType] | None]", + ) + largs: JSONArray = args + + +# Using type alias for "is None" narrowing: +LinkedList = tuple[int, "LinkedList"] | None + + +def g(xs: LinkedList) -> Generator[int, None, None]: + while xs is not None: + x, rest = xs + yield x + xs = rest diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias6.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias6.py new file mode 100644 index 00000000..ced16808 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias6.py @@ -0,0 +1,23 @@ +# This sample tests the handling of recursive type aliases that are generic. + +from __future__ import annotations +from typing import Mapping, Sequence, TypeVar, Union + +S = TypeVar("S") +RecList = Union[Mapping[str, "RecList[S]"], Sequence["RecList[S]"], S] + +T3 = TypeVar("T3", RecList[int], RecList[str]) + + +def f3(x: RecList[int] | RecList[str]) -> None: ... + + +def g3(x: T3): + return f3(x) + + +def f4(x: RecList[str] | RecList[int]) -> None: ... + + +def g4(x: T3): + return f4(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias7.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias7.py new file mode 100644 index 00000000..abde0df3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias7.py @@ -0,0 +1,15 @@ +# This sample tests a recursive type alias used within +# a recursive function. + +from typing import Dict, Union + + +A = Union[str, Dict[str, "A"]] + + +def func1(x: A): + if isinstance(x, str): + print(x) + else: + for _, v in x.items(): + func1(v) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias8.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias8.py new file mode 100644 index 00000000..17e71927 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias8.py @@ -0,0 +1,38 @@ +# This sample tests the case where a recursive type alias is used +# to define a TypedDict that refers to itself in one of its fields. + +from __future__ import annotations + +from typing import TypedDict + + +class ClassA(TypedDict, total=False): + options: list[CorD] + type: int + + +class ClassB(ClassA): + id: int + name: str + + +class ClassC(TypedDict): + type: int + + +class ClassD(TypedDict): + options: list[CorD] + type: int + + +CorD = ClassC | ClassD + + +def foo(a: CorD): + reveal_type(a, expected_text="ClassC | ClassD") + options = a.get("options", []) + reveal_type(options, expected_text="list[ClassC | ClassD] | Any | list[Any]") + + for option in options: + reveal_type(option, expected_text="ClassC | ClassD | Any") + reveal_type(option["type"], expected_text="int | Any") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias9.py b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias9.py new file mode 100644 index 00000000..3223af60 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/recursiveTypeAlias9.py @@ -0,0 +1,22 @@ +# This sample tests that recursive type aliases work well with +# a generic dataclass constructor. + +from dataclasses import dataclass +from typing import Union, Generic, TypeVar + +A = TypeVar("A") +JSON = Union[str, dict[str, "JSON"]] + + +@dataclass +class Example(Generic[A]): + val: A + + +a: JSON = {"a": "b"} +b: JSON = "a" +c: Example[JSON] = Example(a) +d: Example[JSON] = Example("a") +e: Example[JSON] = Example({}) +f: Example[JSON] = Example({"a": "b"}) +g: Example[JSON] = Example({"a": {"a": "b"}}) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/required1.py b/python-parser/packages/pyright-internal/src/tests/samples/required1.py new file mode 100644 index 00000000..5d674a55 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/required1.py @@ -0,0 +1,50 @@ +# This sample tests the handling of Required and NotRequired +# (PEP 655) in TypedDict definitions. + +# pyright: reportMissingModuleSource=false + +from typing import Annotated, TypedDict +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + NotRequired, + Required, +) + + +class TD1(TypedDict): + a: Required[int] + b: NotRequired[int] + + # This should generate an error because NotRequired can't be + # used in this context. + c: NotRequired[NotRequired[int]] + + # This should generate an error because Required can't be + # used in this context. + d: Required[Required[int]] + + e: NotRequired[Annotated[int, "hi"]] + + # This should generate an error because it's missing type args. + f: Required + + # This should generate an error because it's missing type args. + g: NotRequired + + +# This should generate an error because Required can't be +# used in this context. +x: Required[int] + +# This should generate an error because NotRequired can't be +# used in this context. +y: Required[int] + + +class Foo: + # This should generate an error because Required can't be + # used in this context. + x: Required[int] + + # This should generate an error because NotRequired can't be + # used in this context. + y: Required[int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/required2.py b/python-parser/packages/pyright-internal/src/tests/samples/required2.py new file mode 100644 index 00000000..690c26bf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/required2.py @@ -0,0 +1,57 @@ +# This sample tests the handling of Required and NotRequired +# (PEP 655) in TypedDict definitions. + +# pyright: reportMissingModuleSource=false + +from typing import Literal, TypedDict, Annotated +import typing_extensions as te +from typing_extensions import Required, NotRequired + + +class TD1(TypedDict, total=False): + a: Annotated["te.Required[int]", ""] + b: Annotated[te.NotRequired[str], ""] + c: "te.Required[int | str]" + d: te.Required[str | None] + e: Required[Literal[1, 2, 3]] + f: Required[None] + g: Required[type[int]] + + +td1_1: TD1 = {"a": 3, "c": "hi", "d": None, "e": 3, "f": None, "g": int} + +# This should generate an error because a is missing. +td1_2: TD1 = {"c": "hi", "d": None, "e": 3, "f": None, "g": int} + +# This should generate an error because c is missing. +td1_3: TD1 = {"a": 3, "d": None, "e": 3, "f": None, "g": int} + +# This should generate an error because d is missing. +td1_4: TD1 = {"a": 3, "c": "hi", "e": 3, "f": None, "g": int} + +# This should generate an error because e is missing. +td1_5: TD1 = {"a": 3, "c": "hi", "d": None, "f": None, "g": int} + +# This should generate an error because f is missing. +td1_6: TD1 = {"a": 3, "c": "hi", "d": None, "e": 3, "g": int} + +# This should generate an error because g is missing. +td1_7: TD1 = {"a": 3, "c": "hi", "d": None, "e": 3, "f": None} + + +class TD2(TypedDict, total=True): + a: Required[int] + b: NotRequired[str] + c: Required[int | str] + d: NotRequired[str | None] + e: NotRequired[Literal[1, 2, 3]] + f: NotRequired[None] + g: NotRequired[type[int]] + + +td2_1: TD2 = {"a": 3, "c": "hi", "d": None, "e": 3, "f": None, "g": int} + +td2_2: TD2 = {"a": 3, "c": "hi"} + +# This should generate an error because c is missing. +td2_3: TD2 = {"a": 3} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/required3.py b/python-parser/packages/pyright-internal/src/tests/samples/required3.py new file mode 100644 index 00000000..14e47f8b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/required3.py @@ -0,0 +1,25 @@ +# This sample tests the handling of Required and NotRequired using +# the alternative syntax form of TypedDict. + +from typing import TypedDict +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Required, + NotRequired, +) + +Example1 = TypedDict( + "Example1", {"required": Required[int], "not_required": NotRequired[int]} +) + +v1_0: Example1 = {"required": 1} + +# This should generate an error. +v1_1: Example1 = {"not_required": 1} + +Example2 = TypedDict("Example2", required=Required[int], not_required=NotRequired[int]) + + +v2_0: Example2 = {"required": 1} + +# This should generate an error. +v2_1: Example2 = {"not_required": 1} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/returnTypes1.py b/python-parser/packages/pyright-internal/src/tests/samples/returnTypes1.py new file mode 100644 index 00000000..b7b82244 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/returnTypes1.py @@ -0,0 +1,42 @@ +# This sample tests basic return type analysis and error reporting. + +from typing import TypeVar + + +T = TypeVar("T") + + +def func1(a: int, b: int) -> int: + c = float(a + b) + # This should generate an error: + # Expression of type 'float' cannot be assigned to return type 'int' + return c + + +def func2(a: float, b: float) -> float: + c = float(a + b) + return c + + +# This should generate an error: +# Argument of type 'float' cannot be assigned to parameter of type 'int' +func1(3.4, 5) + +# This should be fine +func2(3, 5) + + +# This should not produce any error because the function's suite is empty. +def func3() -> bool: + "Doc strings are allowed" + ... + + +# This should not produce any error because not all paths return an int. +def func4() -> int: + pass + + +# This should not produce any error because not all paths return a T. +def func5(x: T) -> T: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/returnTypes2.py b/python-parser/packages/pyright-internal/src/tests/samples/returnTypes2.py new file mode 100644 index 00000000..6675cdd9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/returnTypes2.py @@ -0,0 +1,59 @@ +# This sample tests the case where return type inference encounters +# recursion. + + +class Grammar: + @staticmethod + def A(): + return Grammar.B + + @staticmethod + def B(): + return Grammar.C + + @staticmethod + def C(): + return Grammar.D + + @staticmethod + def D(): + return Grammar.E + + @staticmethod + def E(): + return Grammar.F + + @staticmethod + def F(): + return Grammar.G + + @staticmethod + def G(): + return Grammar.H + + @staticmethod + def H(): + return Grammar.I + + @staticmethod + def I(): + return Grammar.J + + @staticmethod + def J(): + return Grammar.K + + @staticmethod + def K(): + return Grammar.L + + @staticmethod + def L(): + return Grammar.B + + +async def func1(a): + if a == 0: + return + r = await func1(a - 1) + return r diff --git a/python-parser/packages/pyright-internal/src/tests/samples/revealedType1.py b/python-parser/packages/pyright-internal/src/tests/samples/revealedType1.py new file mode 100644 index 00000000..44418d92 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/revealedType1.py @@ -0,0 +1,30 @@ +# This sample tests the special reveal_type call. + +from typing import Literal, Union + +a: Union[str, int] +if 2 + 3: + a = 3 +else: + a = "hello" +reveal_type(a) + +a = 5 +reveal_type(a) + +a = "yup" +reveal_type(a) + + +reveal_type(a, expected_type=Literal["yup"]) +reveal_type(a, expected_text="Literal['yup']") +reveal_type(a, expected_text="Literal['yup']", expected_type=Literal["yup"]) + +# This should generate an error. +reveal_type() + +# This should generate an error. +reveal_type(a, a) + + +reveal_type(a, x=3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self1.py b/python-parser/packages/pyright-internal/src/tests/samples/self1.py new file mode 100644 index 00000000..e42e9ce8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self1.py @@ -0,0 +1,112 @@ +# This sample tests various error conditions for the Self type + +from typing import Callable, Generic, TypeVar +from typing_extensions import Self # pyright: ignore[reportMissingModuleSource] + + +T = TypeVar("T") + + +# This should generate an error because Self can't be used in this context. +class A(Self): ... + + +# This should generate an error because Self can't be used in this context. +x: Self + + +def func1() -> None: + # This should generate an error because Self can't be used in this context. + x: Self + + +# This should generate an error because Self can't be used in this context. +def func2(a: Self) -> None: ... + + +# This should generate an error because Self can't be used in this context. +def func3() -> Self: ... + + +def is_self(t: object): + return t is Self + + +class B: + x: Self + + def method1(self) -> Self: + return self + + def method2(self, a: Self) -> None: + x: Self = a + y = Self + + def method3(self: Self) -> Self: + # This should generate an error because Self doesn't accept a type arg. + y: Self[int] + return self + + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + def method4(self: T, a: Self) -> T: + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + x: Self + + return self + + @classmethod + def method5(cls) -> type[Self]: + return cls + + @classmethod + def method6(cls, a: Self) -> None: ... + + @classmethod + def method7(cls: type[Self]) -> type[Self]: + return cls + + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + @classmethod + def method8(cls: type[T], a: Self) -> type[T]: + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + x: Self + return cls + + # This should generate an error because Self can't be used in + # a static method. + @staticmethod + def stat_method1(a: Self) -> None: + # This should generate an error because Self can't be used in + # a static method. + x: Self + + +class C: + @classmethod + def outer(cls) -> Callable[[int, Self], Self]: + def inner(_: int, bar: Self) -> Self: + return bar + + return inner + + +class D(Generic[T]): ... + + +# This should generate an error because "Self" cannot be used +# within a generic class definition. +class E(D[Self]): ... + + +class MetaA(type): + # This should generate an error because "Self" isn't + # allowed in a metaclass. + def __new__(cls, *args: object) -> Self: ... + + # This should generate an error because "Self" isn't + # allowed in a metaclass. + def __mul__(cls, count: int) -> list[Self]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self10.py b/python-parser/packages/pyright-internal/src/tests/samples/self10.py new file mode 100644 index 00000000..feae0657 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self10.py @@ -0,0 +1,15 @@ +# This sample tests that a class is not assignable to Self@Class. + +from typing import Self + + +class A: + def self_arg(self, other: Self): ... + + def call_self_arg(self): + # This should generate an error. + self.self_arg(A()) + + def get_instance(self) -> Self: + # This should generate an error. + return A() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self11.py b/python-parser/packages/pyright-internal/src/tests/samples/self11.py new file mode 100644 index 00000000..4e9941ad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self11.py @@ -0,0 +1,17 @@ +# This sample tests a case where "self" refers to a class with type +# parameters that have default values. This is a case that regressed. + + +class Base: ... + + +class A(Base): ... + + +class B[T: Base = A]: + def __init__(self, x: T) -> None: + self._x = x + + @property + def x(self) -> T: + return self._x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self2.py b/python-parser/packages/pyright-internal/src/tests/samples/self2.py new file mode 100644 index 00000000..72d58bfa --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self2.py @@ -0,0 +1,196 @@ +# This sample tests the usage of the Self type. + +from dataclasses import dataclass +from typing import Callable, Generic, ParamSpec, Protocol, TypeVar + +from typing_extensions import Self # pyright: ignore[reportMissingModuleSource] + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +class A(Generic[_P, _R]): + val: _R + + def __init__(self, callback: Callable[_P, _R]) -> None: + self.callback = callback + + def method1(self: Self) -> Self: + return self + + def method2(self) -> Self: + return self + + @classmethod + def method3(cls: type[Self]) -> type[Self]: + return cls + + @classmethod + def method4(cls) -> type[Self]: + return cls + + +_T = TypeVar("_T") + + +class B(Generic[_T]): + def __init__(self, value: _T): + self.value = value + + +class Shape1: + def set_scale(self, scale: float) -> Self: + self.scale = scale + return self + + @classmethod + def from_config(cls, config: dict[str, float]) -> Self: + return cls() + + +class Circle1(Shape1): ... + + +x1 = Shape1().set_scale(3.4) +reveal_type(x1, expected_text="Shape1") + +x2 = Circle1().set_scale(3.4) +reveal_type(x2, expected_text="Circle1") + + +class Shape2: + def set_scale(self: Self, scale: float) -> Self: + self.scale = scale + return self + + @classmethod + def from_config(cls: type[Self], config: dict[str, float]) -> Self: + return cls() + + def difference(self: Self, other: Self) -> float: ... + + def apply(self: Self, f: Callable[[Self], None]) -> None: ... + + +class Circle2(Shape2): ... + + +s2 = Shape2() +x3 = s2.set_scale(3.4) +reveal_type(x3, expected_text="Shape2") + +c2 = Circle2() +x4 = c2.set_scale(3.4) +reveal_type(x4, expected_text="Circle2") + +c2.difference(c2) +s2.difference(c2) +s2.difference(s2) + +# This should generate an error. +c2.difference(s2) + + +@dataclass +class LinkedList(Generic[_T]): + value: _T + next: Self | None = None + + +LinkedList[int](value=1, next=LinkedList[int](value=2)) + + +@dataclass +class OrdinalLinkedList(LinkedList[int]): + def ordinal_value(self) -> str: + return str(self.value) + + +# This should generate an error. +xs = OrdinalLinkedList(value=1, next=LinkedList[int](value=2)) + +if xs.next is not None: + xs.next = OrdinalLinkedList(value=3, next=None) + + # This should generate an error. + xs.next = LinkedList[int](value=3, next=None) + + +class Container(Generic[_T]): + value: _T + + def set_value(self, value: _T) -> Self: ... + + +def object_with_concrete_type( + int_container: Container[int], str_container: Container[str] +) -> None: + reveal_type(int_container.set_value(0), expected_text="Container[int]") + reveal_type(str_container.set_value(""), expected_text="Container[str]") + + +def object_with_generic_type(container: Container[_T], value: _T) -> Container[_T]: + return container.set_value(value) + + +class ShapeProtocol(Protocol): + def set_scale(self, scale: float) -> Self: ... + + +class ReturnSelf: + scale: float = 1.0 + + def set_scale(self, scale: float) -> Self: + self.scale = scale + return self + + +class ReturnConcreteShape: + scale: float = 1.0 + + def set_scale(self, scale: float) -> Self: + self.scale = scale + return self + + +class BadReturnType: + scale: float = 1.0 + + def set_scale(self, scale: float) -> int: + self.scale = scale + return 42 + + +class ReturnDifferentClass: + scale: float = 1.0 + + def set_scale(self, scale: float) -> ReturnConcreteShape: + return ReturnConcreteShape() + + +def accepts_shape(shape: ShapeProtocol) -> None: + y = shape.set_scale(0.5) + reveal_type(y) + + +def main( + return_self_shape: ReturnSelf, + return_concrete_shape: ReturnConcreteShape, + bad_return_type: BadReturnType, + return_different_class: ReturnDifferentClass, +) -> None: + accepts_shape(return_self_shape) + accepts_shape(return_concrete_shape) + + # This should generate an error. + accepts_shape(bad_return_type) + + # This should generate an error. + accepts_shape(return_different_class) + + +class StateManager: + def __init__(self) -> None: + self.state: list[Self] = self.get_state() + + def get_state(self) -> list[Self]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self3.py b/python-parser/packages/pyright-internal/src/tests/samples/self3.py new file mode 100644 index 00000000..f2cd9558 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self3.py @@ -0,0 +1,8 @@ +# This sample tests the special-case handling of Self when comparing +# two functions whose signatures differ only in the Self scope. + + +class SomeClass: + def __str__(self) -> str: ... + + __repr__ = __str__ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self4.py b/python-parser/packages/pyright-internal/src/tests/samples/self4.py new file mode 100644 index 00000000..fd8914b3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self4.py @@ -0,0 +1,15 @@ +# This sample tests the case where a method decorator uses an explicit +# type annotation for the "self" parameter. + +from typing import Callable, Generic, TypeVar, Any + +T = TypeVar("T") +S = TypeVar("S", bound="MyClass[Any]") + + +def my_generic_wrapper(f: Callable[[S], str]) -> Callable[[S], int]: ... + + +class MyClass(Generic[T]): + @my_generic_wrapper + def do_something(self) -> str: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self5.py b/python-parser/packages/pyright-internal/src/tests/samples/self5.py new file mode 100644 index 00000000..75f5c1f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self5.py @@ -0,0 +1,23 @@ +# This sample tests the use of `Self` when used within a property +# or class property. + +from typing_extensions import Self # pyright: ignore[reportMissingModuleSource] + + +class A: + @property + def one(self) -> Self: ... + + @classmethod + @property + def two(cls) -> type[Self]: ... + + +class B(A): ... + + +reveal_type(A().one, expected_text="A") +reveal_type(A.two, expected_text="type[A]") + +reveal_type(B().one, expected_text="B") +reveal_type(B.two, expected_text="type[B]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self6.py b/python-parser/packages/pyright-internal/src/tests/samples/self6.py new file mode 100644 index 00000000..12b695a0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self6.py @@ -0,0 +1,13 @@ +# This sample tests that a Self type used within a `__new__` method does +# not preclude the use of a contravariant TypeVar within a generic class. + +from typing import Self, TypeVar, Generic + +T_contra = TypeVar("T_contra", contravariant=True) + + +class MyClass(Generic[T_contra]): + def __new__(cls: type[Self]) -> Self: ... + + +MyClass[int]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self7.py b/python-parser/packages/pyright-internal/src/tests/samples/self7.py new file mode 100644 index 00000000..3223be8a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self7.py @@ -0,0 +1,11 @@ +# This sample tests the case where type[Self] is returned but Self +# is expected. + +from typing import Self + + +class Foo: + @classmethod + def bar(cls) -> Self: + # This should generate an error. + return cls diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self8.py b/python-parser/packages/pyright-internal/src/tests/samples/self8.py new file mode 100644 index 00000000..0bca0c32 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self8.py @@ -0,0 +1,26 @@ +# This sample tests that a __new__ method allows for the Self +# to be associated with the provided `cls` argument rather than +# the class bound to the `__new__` method. + +import enum +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + Self, + reveal_type, +) + + +class Enum1(enum.IntEnum): + def __new__(cls, value: int, doc: str) -> Self: + member = int.__new__(cls, value) + reveal_type(member, expected_text="Self@Enum1") + member._value_ = value + member.__doc__ = doc + return member + + +class MyStr(str): + pass + + +v1 = str.__new__(MyStr) +reveal_type(v1, expected_text="MyStr") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/self9.py b/python-parser/packages/pyright-internal/src/tests/samples/self9.py new file mode 100644 index 00000000..c68286f7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/self9.py @@ -0,0 +1,23 @@ +# This sample tests the case where a parent class defines a class variable +# that uses Self and a child class accesses this through self or cls. + +from typing import Self + + +class ParentA: + a: list[Self] + + +class ChildA(ParentA): + b: int + + @classmethod + def method1(cls) -> None: + reveal_type(cls.a, expected_text="list[Self@ChildA]") + reveal_type(cls.a[0], expected_text="Self@ChildA") + print(cls.a[0].b) + + def method2(self) -> None: + reveal_type(self.a, expected_text="list[Self@ChildA]") + reveal_type(self.a[0], expected_text="Self@ChildA") + print(self.a[0].b) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/sentinel1.py b/python-parser/packages/pyright-internal/src/tests/samples/sentinel1.py new file mode 100644 index 00000000..46477c2b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/sentinel1.py @@ -0,0 +1,59 @@ +# This sample tests the handling of Sentinel as described in PEP 661. + +from typing import Literal, TypeAlias +from typing_extensions import Sentinel, TypeForm # pyright: ignore[reportMissingModuleSource] + +# This should generate an error because the names don't match. +BAD_NAME1 = Sentinel("OTHER") + +# This should generate an error because the arg count is wrong. +BAD_CALL1 = Sentinel() + +# This should generate an error because the arg count is wrong. +BAD_CALL2 = Sentinel("BAD_CALL2", 1) + +# This should generate an error because the arg type is wrong. +BAD_CALL3 = Sentinel(1) + + +MISSING = Sentinel("MISSING") + +type TA1 = int | MISSING + +TA2: TypeAlias = int | MISSING + +TA3 = int | MISSING + +# This should generate an error because Literal isn't appropriate here. +x: Literal[MISSING] + + +def func1(value: int | MISSING) -> None: + if value is MISSING: + reveal_type(value, expected_text="MISSING") + else: + reveal_type(value, expected_text="int") + + +def func2(value=MISSING) -> None: + pass + + +reveal_type(func2, expected_text="(value: Unknown | MISSING = MISSING) -> None") + + +def test_typeform[T](v: TypeForm[T]) -> TypeForm[T]: ... + + +reveal_type(test_typeform(MISSING), expected_text="TypeForm[MISSING]") + + +def func3(x: Literal[0, 3, "hi"] | MISSING) -> None: + if x: + reveal_type(x, expected_text="MISSING | Literal[3, 'hi']") + else: + reveal_type(x, expected_text="Literal[0]") + + +t1 = type(MISSING) +reveal_type(t1, expected_text="type[MISSING]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/slice1.py b/python-parser/packages/pyright-internal/src/tests/samples/slice1.py new file mode 100644 index 00000000..c5245518 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/slice1.py @@ -0,0 +1,15 @@ +# This sample tests the evaluation of slice types. + + +class ClassA: + def __getitem__[T](self, item: T) -> T: + return item + + +a1 = ClassA() + +reveal_type(a1[::], expected_text="slice[None, None, None]") +reveal_type( + a1[1:"a":False], expected_text="slice[Literal[1], Literal['a'], Literal[False]]" +) +reveal_type(a1[:3:5.0], expected_text="slice[None, Literal[3], float]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/slots1.py b/python-parser/packages/pyright-internal/src/tests/samples/slots1.py new file mode 100644 index 00000000..d7116514 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/slots1.py @@ -0,0 +1,63 @@ +# This sample tests the type checker's validation of instance +# variables that are declared in the __slots__ attribute. + + +class NoSlots1: + def __init__(self): + self.x = 1 + + +class NoSlots2: + # Only lists and tuples of simple strings are supported, so this + # will be treated as though there are no slots. + __slots__ = {"aaa": 3} + + def __init__(self): + self.x = 1 + + +class NoSlots3: + # Only lists and tuples of simple strings are supported, so this + # will be treated as though there are no slots. + __slots__ = ("aaa", f"test{3 + 4}") + + def __init__(self): + self.x = 1 + + +class Slots1(object): + __slots__ = ("bbb", "ccc") + + def __init__(self): + self.bbb = 1 + self.ccc = 1 + self.prop = 1 + + # This should generate an error + self.ddd = 1 + + @property + def prop(self): + pass + + @prop.setter + def prop(self, val: int): + pass + + +class Slots1_1(Slots1): + __slots__ = ["ddd", "eee"] + + def __init__(self): + self.bbb = 1 + self.ccc = 1 + self.ddd = 1 + + # This should generate an error + self.fff = 1 + + +class NoSlots1_1(Slots1, NoSlots2): + def __init__(self): + self.bbb = 1 + self.fff = 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/slots2.py b/python-parser/packages/pyright-internal/src/tests/samples/slots2.py new file mode 100644 index 00000000..0062a5d2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/slots2.py @@ -0,0 +1,38 @@ +# This sample tests the type checker's validation of class variables +# whose name conflict with a __slots__ entry. + +from dataclasses import dataclass, field + + +class NoSlots1: + pass + + +class Slots1(NoSlots1): + __slots__ = "aaa", "bbb", "ccc" + + # This should generate an error + aaa = 3 + + # This should generate an error + bbb: int = 3 + + # This should generate an error + (ccc, ddd) = 3, 4 + + eee = 5 + + +class Slots2(Slots1): + __slots__ = () + + aaa = 4 + + +@dataclass +class Slots3: + __slots__ = ("values",) + + # This should not generate an error because class variables + # in a dataclass are replaced by instance variables. + values: list[int] = field(default_factory=list) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/slots3.py b/python-parser/packages/pyright-internal/src/tests/samples/slots3.py new file mode 100644 index 00000000..86d5909f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/slots3.py @@ -0,0 +1,32 @@ +# This sample tests the case where a descriptor is assigned to a +# class variable but not included in __slots__. + +from typing import Any + + +class MyDescriptor: + def __init__(self, *, slot: str): ... + + def __set__(self, instance: object, value: object) -> None: ... + + def __get__(self, instance: object, owner: Any) -> Any: ... + + +class ClassA: + foo = MyDescriptor(slot="_foo_descriptor") + __slots__ = "_foo_descriptor" + + def __init__(self, foo: int) -> None: + self.foo = foo + + +class ClassBParent: + __slots__ = ("bar1",) + foo = MyDescriptor(slot="_foo_descriptor") + + +class ClassB(ClassBParent): + __slots__ = ("bar2",) + + def repro(self, foo: int) -> None: + self.foo = foo diff --git a/python-parser/packages/pyright-internal/src/tests/samples/slots4.py b/python-parser/packages/pyright-internal/src/tests/samples/slots4.py new file mode 100644 index 00000000..30f9c224 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/slots4.py @@ -0,0 +1,33 @@ +# This sample tests the case where a descriptor is assigned to an +# instance variable that is included in __slots__. + +from typing import Self, overload + + +class A: + pass + + +class Descriptor: + name: str + + @overload + def __get__(self, obj: None, objtype: type[A] | None = None) -> Self: ... + + @overload + def __get__(self, obj: A, objtype: type[A] | None = None) -> int: ... + + def __get__(self, obj: A | None, objtype: type[A] | None = None) -> Self | int: ... + + def __set__(self, obj: A, value: int): ... + + +class B: + __slots__ = "descriptor" + + def __init__(self, descriptor: Descriptor): + self.descriptor = descriptor + + +v1 = B(descriptor=Descriptor()) +reveal_type(v1.descriptor.name, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver1.py b/python-parser/packages/pyright-internal/src/tests/samples/solver1.py new file mode 100644 index 00000000..bce1d339 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver1.py @@ -0,0 +1,26 @@ +# This sample tests the constraint solver's handling of callables +# whose input parameters are contravariant. + +from typing import TypeVar, Callable + +T = TypeVar("T") +U = TypeVar("U") +V = TypeVar("V") + + +def compose2(f: Callable[[T], U], g: Callable[[U], V]) -> Callable[[T], V]: + def composition(x: T) -> V: + return g(f(x)) + + return composition + + +def add_one(x: int) -> int: + return x + 1 + + +def make_str(x: int) -> str: + return str(x) + + +add_two: Callable[[int], str] = compose2(add_one, make_str) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver10.py b/python-parser/packages/pyright-internal/src/tests/samples/solver10.py new file mode 100644 index 00000000..a2fecdcb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver10.py @@ -0,0 +1,24 @@ +# This sample checks the handling of constraint solving +# in the case where list and tuple expressions are being +# matched, and those expressions contain literal values. +# We need to validate that the type inference for lists +# is not over-narrowing when matching these literals. + +from typing import Callable, TypeVar + + +_T = TypeVar("_T") + + +def extend_if(xs: list[_T], ys: list[tuple[_T, bool]]) -> list[_T]: + raise NotImplementedError() + + +extend_if(["foo"], [("bar", True), ("baz", True)]) + + +def func1(value: _T) -> Callable[[_T], None]: ... + + +def func2() -> Callable[[bool], None]: + return func1(True) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver11.py b/python-parser/packages/pyright-internal/src/tests/samples/solver11.py new file mode 100644 index 00000000..b18281f7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver11.py @@ -0,0 +1,34 @@ +# This sample tests the constraint solver when a callable type is involved. + +# pyright: strict + +from typing import Callable, Literal, TypeVar + + +def filter_fn(value: object): ... + + +v1 = filter(filter_fn, [1, 2, 3]) +reveal_type(v1, expected_text="filter[int]") + +v2 = filter(filter_fn, {1, 2}) +reveal_type(v2, expected_text="filter[int]") + +v3 = filter(filter_fn, {1: 2}) +reveal_type(v3, expected_text="filter[int]") + + +_T = TypeVar("_T") +Animal = Literal["cat"] + + +def func(v: Callable[[], _T]) -> _T: ... + + +x1: dict[Animal, int] = func(lambda: {"cat": 0}) + + +def func1(factory: Callable[[], _T]) -> _T: ... + + +x2: set[int] = func1(lambda: set()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver12.py b/python-parser/packages/pyright-internal/src/tests/samples/solver12.py new file mode 100644 index 00000000..ffd248a0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver12.py @@ -0,0 +1,15 @@ +# This sample validates that a generic type can be used to solve a +# TypeVar in a generic method where the "self" parameter is itself generic. + +from typing import TypeVar + +_T1 = TypeVar("_T1", bound="ClassA") +_T2 = TypeVar("_T2", bound="ClassA") + + +class ClassA: + def chain(self: _T1) -> _T1: ... + + +def func1(p1: _T2) -> _T2: + return p1.chain() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver13.py b/python-parser/packages/pyright-internal/src/tests/samples/solver13.py new file mode 100644 index 00000000..40b50ecb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver13.py @@ -0,0 +1,30 @@ +# This sample tests that type variables chain properly. + +from typing import Generic, Iterable, Iterator, TypeVar +from itertools import chain + +T = TypeVar("T") + + +class ClassA(Iterator[T]): + def __init__(self, it: Iterable[T]) -> None: ... + + def __next__(self) -> T: ... + + def __iter__(self) -> Iterator[T]: ... + + +def func1(val: Iterable[T]) -> Iterator[T]: + return ClassA(val) + + +def func2(val: Iterable[Iterable[T]]) -> Iterator[T]: + return chain(*val) + + +class ClassB(Generic[T]): + def __init__(self, xs: Iterable[T]) -> None: + self.xs = xs + + def indexed(self) -> "ClassB[tuple[int, T]]": + return ClassB(enumerate(self.xs)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver14.py b/python-parser/packages/pyright-internal/src/tests/samples/solver14.py new file mode 100644 index 00000000..38508e4c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver14.py @@ -0,0 +1,54 @@ +# This sample tests the TypeVar constraint solver in cases where +# generic protocols are used. + +from datetime import timedelta +from typing import Any, Generic, Protocol, TypeVar, overload + +_X_co = TypeVar("_X_co", covariant=True) +_X_contra = TypeVar("_X_contra", contravariant=True) + + +class SupportsDivMod(Protocol, Generic[_X_contra, _X_co]): + def __divmod__(self, __other: _X_contra) -> _X_co: ... + + +class SupportsRDivMod(Protocol[_X_contra, _X_co]): + def __rdivmod__(self, __other: _X_contra) -> _X_co: ... + + +@overload +def divmod(__x: SupportsDivMod[_X_contra, _X_co], __y: _X_contra) -> _X_co: ... + + +@overload +def divmod(__x: _X_contra, __y: SupportsRDivMod[_X_contra, _X_co]) -> _X_co: ... + + +def divmod(__x: Any, __y: Any) -> Any: ... + + +reveal_type( + divmod(timedelta(minutes=90), timedelta(hours=1)), + expected_text="tuple[int, timedelta]", +) +reveal_type(divmod(3, 4), expected_text="tuple[int, int]") +reveal_type(divmod(3.6, 4), expected_text="tuple[float, float]") +reveal_type(divmod(3, 4.5), expected_text="tuple[float, float]") + + +class SupportsLessThan(Protocol): + def __lt__(self, __other: Any) -> bool: ... + + +SupportsLessThanT = TypeVar("SupportsLessThanT", bound=SupportsLessThan) + + +def max2(__arg1: SupportsLessThanT, __arg2: SupportsLessThanT) -> SupportsLessThanT: ... + + +def min2(__arg1: SupportsLessThanT, __arg2: SupportsLessThanT) -> SupportsLessThanT: ... + + +def func1(): + x = max2(1, min2(1, 4.5)) + reveal_type(x, expected_text="float") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver15.py b/python-parser/packages/pyright-internal/src/tests/samples/solver15.py new file mode 100644 index 00000000..1682cbe4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver15.py @@ -0,0 +1,37 @@ +# This sample tests the handling of a bound TypeVar that is used +# in a Type[X] statement. + +from typing import Callable, Generic, TypeVar + + +class Base: ... + + +T = TypeVar("T", bound=Base) + + +def register(state_name: str, state: type[T]): ... + + +def register_state(state_name: str) -> Callable[[type[T]], type[T]]: + def decorator(state: type[T]) -> type[T]: + register(state_name, state) + return state + + return decorator + + +class F: ... + + +E = TypeVar("E", bound=F) + + +def coercer_method(value: E | str, enum: type[E]) -> E: ... + + +class C(Generic[E]): + e_type: type[E] + + def coerce(self, e_type: type[E], value: E | str) -> E: + return coercer_method(value, self.e_type) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver16.py b/python-parser/packages/pyright-internal/src/tests/samples/solver16.py new file mode 100644 index 00000000..a4c0daec --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver16.py @@ -0,0 +1,29 @@ +# This sample tests the case where a TypeVar is used in the parameter +# of a callable (and is hence treated as contravariant). + +from typing import Callable, Sequence, TypeVar + +T = TypeVar("T") +U = TypeVar("U") + + +def func1(value: T) -> T: ... + + +def func2(values: Sequence[T]) -> T: ... + + +def func3(value: T, callback: Callable[[T], U]) -> U: ... + + +def func4(values: Sequence[T], callback: Callable[[Sequence[T]], U]) -> U: ... + + +reveal_type(func3(1.0, func1), expected_text="float") +reveal_type(func4([1.0], func1), expected_text="Sequence[float]") +reveal_type(func4([1.0], func2), expected_text="float") + + +def func5(obj: object, cb: Callable[[], Callable[[T], object]]) -> None: + # This should generate an error. + cb()(obj) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver17.py b/python-parser/packages/pyright-internal/src/tests/samples/solver17.py new file mode 100644 index 00000000..fa395cb4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver17.py @@ -0,0 +1,28 @@ +# This sample tests the case where a source and dest type +# are unions and are being compared using invariant constraints +# and the dest type contains a type variable. + +from typing import Pattern, Sequence, TypeVar + + +_T = TypeVar("_T") + + +def func1(v: list[_T | None]) -> _T: ... + + +def func2(v: list[_T | str | None]) -> _T: ... + + +v1: list[int | None] = [1, None] +r1 = func1(v1) +reveal_type(r1, expected_text="int") + +v2: list[int | str | None] = [1, None] +r2_1 = func1(v2) +reveal_type(r2_1, expected_text="int | str") + +r2_2 = func2(v2) +reveal_type(r2_2, expected_text="int") + +v3: list[str | Sequence[Pattern]] = [""] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver18.py b/python-parser/packages/pyright-internal/src/tests/samples/solver18.py new file mode 100644 index 00000000..5c2e9b1a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver18.py @@ -0,0 +1,74 @@ +# This sample tests the handling of functions that include TypeVars +# within unions, where the TypeVar may not be solved during constraint +# solving. + +from typing import Awaitable, Callable, Generic, ParamSpec, TypeVar + + +_T = TypeVar("_T") +_P = ParamSpec("_P") + + +def func1(x: str | None | _T) -> str | None | _T: ... + + +reveal_type(func1(None), expected_text="str | None") +reveal_type(func1("hi"), expected_text="str | None") +reveal_type(func1(3), expected_text="str | int | None") + + +def func2(x: str | None | _T) -> list[str | None | _T]: ... + + +reveal_type(func2(None), expected_text="list[str | None]") +reveal_type(func2("hi"), expected_text="list[str | None]") +reveal_type(func2(3), expected_text="list[str | int | None]") + + +Callback = Callable[..., Awaitable[None]] +_C = TypeVar("_C", bound=Callback) + + +class ClassA(Generic[_C]): ... + + +def decorator1() -> Callable[[_C | ClassA[_C]], ClassA[_C]]: ... + + +@decorator1() +async def func3() -> None: ... + + +def func4(l: list): + return next(iter(l), None) + + +val = func4([]) +reveal_type(val, expected_text="Unknown | None") + + +def func5() -> Callable[[Callable[_P, _T]], Callable[_P, _T]]: ... + + +def func6(x: int) -> str: ... + + +reveal_type(func5()(func6), expected_text="(x: int) -> str") + + +class ClassB(Generic[_P]): + def method1(self, val: str, *args: _P.args, **kwargs: _P.kwargs) -> None: + pass + + +def decorator2() -> Callable[[Callable[_P, None]], ClassB[_P]]: ... + + +@decorator2() +def func7(y: int) -> None: + pass + + +reveal_type(func7, expected_text="ClassB[(y: int)]") +reveal_type(func7.method1, expected_text="(val: str, y: int) -> None") +reveal_type(func7.method1("", 1), expected_text="None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver19.py b/python-parser/packages/pyright-internal/src/tests/samples/solver19.py new file mode 100644 index 00000000..972e6c21 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver19.py @@ -0,0 +1,27 @@ +# This sample tests the handling of Type[T] matching and replacement. + +from typing import Generator, TypeVar + + +class LI(list[int]): + pass + + +class LS(list[str]): + pass + + +_T1 = TypeVar("_T1") + + +class MyList(list[LI | LS]): + def get_generator(self, *, type_: type[_T1]) -> Generator[_T1, None, None]: + for elem in self: + if isinstance(elem, type_): + yield elem + + +def same(other: LI | LS): + for elem in MyList().get_generator(type_=other.__class__): + for v in elem: + print(v) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver2.py b/python-parser/packages/pyright-internal/src/tests/samples/solver2.py new file mode 100644 index 00000000..c57fb1a5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver2.py @@ -0,0 +1,25 @@ +# This sample validates constraint solving for protocol +# classes where the protocol is partially specialized with a +# type variable. + +# pyright: strict + +from typing import Callable, Iterator, Protocol, TypeVar + +_T = TypeVar("_T", covariant=True) + + +class ProtoA(Iterator[_T], Protocol): + pass + + +def decorator1(func: Callable[..., Iterator[_T]]) -> Callable[..., ProtoA[_T]]: ... + + +@decorator1 +def func1() -> Iterator[str]: + yield "" + + +a = func1() +b: ProtoA[str] = a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver20.py b/python-parser/packages/pyright-internal/src/tests/samples/solver20.py new file mode 100644 index 00000000..e9e9f144 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver20.py @@ -0,0 +1,16 @@ +# This sample tests the case where a type variable is bound to a union. + +from typing import Callable, TypeVar + +T = TypeVar("T") +IntStr = str | int +T1 = TypeVar("T1", bound=IntStr) +T2 = TypeVar("T2", bound=IntStr) + + +def custom_eq(x: IntStr, y: IntStr) -> bool: + return True + + +def eq(f: Callable[[T1], T2], x: T1, y: T2) -> bool: + return custom_eq(f(x), y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver21.py b/python-parser/packages/pyright-internal/src/tests/samples/solver21.py new file mode 100644 index 00000000..cd627e8a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver21.py @@ -0,0 +1,26 @@ +# This sample tests for proper handling of bound TypeVars. + +from typing import Generic, TypeVar + + +class A: ... + + +class B: ... + + +_T3 = TypeVar("_T3", bound=A | B) + + +class Registry(Generic[_T3]): + def __init__(self) -> None: + self.registry = {} + + @property + def registry(self) -> dict[str, _T3]: ... + + @registry.setter + def registry(self, registry: dict[str, _T3]) -> None: ... + + def get(self, _id: str) -> _T3 | None: + return self.registry.get(_id) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver22.py b/python-parser/packages/pyright-internal/src/tests/samples/solver22.py new file mode 100644 index 00000000..272f322b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver22.py @@ -0,0 +1,17 @@ +# This sample tests the special-case handling of "type" when used +# as an assignment for Type[T]. + +from typing import Any, TypeVar + +T = TypeVar("T") + + +def f(x: type[T]) -> T: ... + + +def g() -> type | Any: ... + + +y = g() + +f(y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver23.py b/python-parser/packages/pyright-internal/src/tests/samples/solver23.py new file mode 100644 index 00000000..defdfed1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver23.py @@ -0,0 +1,18 @@ +# This sample tests that invariant type variables are enforced. + +from typing import Hashable + + +def func1(x: list[Hashable]): ... + + +def func2(x: list[object]): ... + + +v1: list[int] = [1] + +# This should generate an error. +func1(v1) + +# This should generate an error. +func2(v1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver24.py b/python-parser/packages/pyright-internal/src/tests/samples/solver24.py new file mode 100644 index 00000000..1f6881ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver24.py @@ -0,0 +1,58 @@ +# This sample tests the case where the constraint solver's solution involves +# a union of type variables. + +from os import PathLike +from typing import AnyStr, Generic, Iterable, Iterator, Protocol, TypeAlias, TypeVar + +V = TypeVar("V") +V_co = TypeVar("V_co", covariant=True) +T = TypeVar("T") +U = TypeVar("U") + + +class ClassA(Generic[V_co]): + pass + + +class ClassB(Generic[V_co]): + def __init__(self, x: ClassA[V_co]): + pass + + +def func1(a: ClassA[V], b: ClassA[U], c: bool) -> ClassB[V | U]: + x: ClassA[V | U] = a + reveal_type(x, expected_text="ClassA[V@func1]") + if c: + x = b + reveal_type(x, expected_text="ClassA[U@func1]") + r = ClassB(x) + + reveal_type(r, expected_text="ClassB[U@func1 | V@func1]") + return r + + +class ClassC(Generic[AnyStr]): ... + + +class ClassD(Iterator[ClassC[AnyStr]], Protocol): ... + + +GenericPath: TypeAlias = AnyStr | PathLike[AnyStr] + + +def func2(iter: Iterable[object]) -> bool: ... + + +def func3(path: GenericPath[AnyStr]) -> ClassD[AnyStr]: ... + + +def func4(val: str): + func2(func3(val)) + + +def func5(a: dict[T, U], b: list[T | U]): + pass + + +def func6(a: dict[str, int], b: list[str | int]): + func5(a, b) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver25.py b/python-parser/packages/pyright-internal/src/tests/samples/solver25.py new file mode 100644 index 00000000..6fee6f15 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver25.py @@ -0,0 +1,25 @@ +# This sample tests the handling of invariant union type compatibility +# checks that include a callable with a type variable. + +from typing import Callable, TypeVar + + +T = TypeVar("T") + + +def str2int(a: str) -> int: + return int(a) + + +def int2str(b: int) -> str: + return str(b) + + +def func1(cb: Callable[[str], T], val: list[Callable[[T], str] | None]): + pass + + +func1(str2int, [int2str]) +func1(str2int, [None]) +func1(str2int, []) +func1(str2int, [int2str, None]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver26.py b/python-parser/packages/pyright-internal/src/tests/samples/solver26.py new file mode 100644 index 00000000..a087a2f9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver26.py @@ -0,0 +1,10 @@ +# This sample tests that when an unspecialized generic class type is used as +# a constraint in the constraint solver, default type arguments (typically +# `Unknown`) are used. + +from collections import defaultdict + + +def func1() -> None: + d1 = defaultdict(list) + reveal_type(d1, expected_text="defaultdict[Unknown, list[Unknown]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver27.py b/python-parser/packages/pyright-internal/src/tests/samples/solver27.py new file mode 100644 index 00000000..f1171bb0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver27.py @@ -0,0 +1,52 @@ +# This sample tests that the assignment of an instantiable generic class +# without supplied type arguments is given default type arguments (typically +# Unknown) when the TypeVar is solved. + +from typing import Any, Callable, Generic, Iterable, TypeVar, reveal_type + +T = TypeVar("T") + + +def deco1(t: type[T], val: Any) -> T: + return val + + +v1 = deco1(dict, {"foo": "bar"}) +reveal_type(v1, expected_text="dict[Unknown, Unknown]") + + +def deco2(t: T, val: Any) -> T: + return val + + +v2 = deco2(dict, {"foo": "bar"}) +reveal_type(v2, expected_text="type[dict[Unknown, Unknown]]") + + +def deco3(t: type[T]) -> type[T]: + return t + + +@deco3 +class ClassA(Generic[T]): + pass + + +reveal_type(ClassA[int], expected_text="type[ClassA[int]]") + + +def deco4() -> Callable[[type[T]], type[T]]: ... + + +@deco4() +class ClassB: + def get_features(self) -> list[str]: ... + + +def func1(specs: Iterable[str] | ClassB) -> None: + if isinstance(specs, ClassB): + features = specs.get_features() + else: + features = specs + + set(features) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver28.py b/python-parser/packages/pyright-internal/src/tests/samples/solver28.py new file mode 100644 index 00000000..0b78dadd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver28.py @@ -0,0 +1,26 @@ +# This sample tests the case where a TypeVar has a bound of `type` and +# is assigned to a `type[T]`. + +from typing import TypeVar + +T = TypeVar("T") +S = TypeVar("S", bound=type) + + +def func1(x: type[T]) -> type[T]: + return x + + +def func2(x: S) -> S: + v1 = func1(x) + reveal_type(v1, expected_text="Unknown") + return v1 + + +def func3[R: int](num: type[R]) -> None: ... + + +class A[T: type[int]](tuple[T]): ... + + +func3(*A[type[int]]()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver29.py b/python-parser/packages/pyright-internal/src/tests/samples/solver29.py new file mode 100644 index 00000000..e1498c86 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver29.py @@ -0,0 +1,32 @@ +# This sample tests the case where a union of two "bare" TypeVars are +# used in the annotation of a function parameter. + +from typing import Any, TypeVar, Callable + +S = TypeVar("S") +T = TypeVar("T") + + +def accepts_bool(b: bool) -> None: ... + + +def accepts_int(i: int) -> None: ... + + +def func1(x: S | T, l2: Callable[[S], Any], l3: Callable[[T], Any]) -> tuple[S, T]: ... + + +def func2(x: T | S, l2: Callable[[S], Any], l3: Callable[[T], Any]) -> tuple[S, T]: ... + + +x1 = func1(0, accepts_int, accepts_bool) +reveal_type(x1, expected_text="tuple[int, bool]") + +x2 = func1(True, accepts_int, accepts_bool) +reveal_type(x2, expected_text="tuple[int, bool]") + +x3 = func1(0, accepts_int, accepts_bool) +reveal_type(x3, expected_text="tuple[int, bool]") + +x4 = func1(True, accepts_int, accepts_bool) +reveal_type(x4, expected_text="tuple[int, bool]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver3.py b/python-parser/packages/pyright-internal/src/tests/samples/solver3.py new file mode 100644 index 00000000..a9176a3e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver3.py @@ -0,0 +1,21 @@ +# This sample tests constraint solving in a situation +# where the type has a type argument that is +# a union of types that are subclasses of each other. + +from typing import Iterable, Iterator, Literal, TypeVar + +_T = TypeVar("_T") + + +def filter(__function: None, __iterable: Iterable[_T | None]) -> Iterator[_T]: ... + + +# In this case, bool is a subclass of int, so the TypeVar +# matching for _T should evaluate to Iterator[int]. +list_of_bools_and_ints: list[Literal[False] | int] = [] +generator_of_ints = filter(None, list_of_bools_and_ints) + +a: int = next(generator_of_ints) + +# This should generate an error. +b: bool = next(generator_of_ints) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver30.py b/python-parser/packages/pyright-internal/src/tests/samples/solver30.py new file mode 100644 index 00000000..46a49fa2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver30.py @@ -0,0 +1,35 @@ +# This sample tests the case where a deeply nested set of calls requires +# the use of bidirectional type inference to evaluate the type of a lambda. + +from typing import Any, Callable, Iterable, Iterator, Protocol, TypeVar + +X = TypeVar("X") +Y = TypeVar("Y") +Z = TypeVar("Z") + + +class Item: + foo: bool + + +items = [Item()] + + +def func1(a: Iterable[X]) -> X: ... + + +def func2(a: Iterable[Y]) -> Iterable[Y]: ... + + +class func3(Iterator[Z]): + def __init__(self, a: Callable[[Z], Any], b: Iterable[Z]) -> None: ... + + def __next__(self) -> Z: ... + + +def func4(a: Callable[[Z], Any], b: Iterable[Z]) -> Iterator[Z]: ... + + +func1(func2(func3(lambda x: reveal_type(x.foo, expected_text="bool"), items))) + +func1(func2(func4(lambda x: reveal_type(x.foo, expected_text="bool"), items))) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver31.py b/python-parser/packages/pyright-internal/src/tests/samples/solver31.py new file mode 100644 index 00000000..5209ed76 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver31.py @@ -0,0 +1,18 @@ +# This sample tests the case where an expected type contains a union, +# as in the case where the list.__add__ method returns the type +# list[_T | _S]. + +from typing import Generic, Iterable, TypeVar + +T = TypeVar("T") + + +class A(Generic[T]): + def __init__(self, i: Iterable[T]): ... + + +def func1(i: Iterable[T]) -> T: ... + + +reveal_type(func1([0] + [""]), expected_text="str | int") +reveal_type(A([0] + [""]), expected_text="A[str | int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver32.py b/python-parser/packages/pyright-internal/src/tests/samples/solver32.py new file mode 100644 index 00000000..82105b0f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver32.py @@ -0,0 +1,36 @@ +# This sample tests a complex interplay between protocols and bound TypeVars. +# This case involves numpy types and has regressed in the past. + + +from typing import Protocol, Self, TypeVar + +TD = TypeVar("TD", bound="TimeDeltaProto") +DT = TypeVar("DT", bound="DateTimeProto") + + +class TimeDeltaProto(Protocol): + def __pos__(self) -> Self: ... + + +class DateTimeProto(Protocol[TD]): + def __add__(self, other: TD, /) -> Self: ... + + def __sub__(self, other: Self, /) -> TD: ... + + +class TimeDelta: + def __pos__(self) -> Self: ... + + +class DateTime: + def __add__(self, other: bool | int) -> Self: ... + + def __sub__(self, other: "DateTime") -> TimeDelta: ... + + +def func1(__val: DT) -> DT: + return __val + + +dt = DateTime() +reveal_type(func1(dt), expected_text="DateTime") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver33.py b/python-parser/packages/pyright-internal/src/tests/samples/solver33.py new file mode 100644 index 00000000..efce2cd1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver33.py @@ -0,0 +1,9 @@ +# This sample tests a case that resulted in a false positive in the past. + +# pyright: strict + +import operator + +keys = ("+", "-") +values = (operator.pos, operator.neg) +mapping = dict(zip(keys, values)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver34.py b/python-parser/packages/pyright-internal/src/tests/samples/solver34.py new file mode 100644 index 00000000..7af73f07 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver34.py @@ -0,0 +1,21 @@ +# This sample tests the case where a generic function returns a Callable type +# that is specialized with unsolved type variables. + +from collections.abc import Container +from typing import TypeVar, Callable + + +T = TypeVar("T") +VT = TypeVar("VT") + + +def func1(container: Container[T]) -> Callable[[T], bool]: ... + + +def func2(a: T, b: Container[VT]) -> T: + cmp = func1(b) + + # This should generate an error. + cmp(a) + + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver35.py b/python-parser/packages/pyright-internal/src/tests/samples/solver35.py new file mode 100644 index 00000000..1314bb17 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver35.py @@ -0,0 +1,20 @@ +# This sample tests the case where the constraint solver is asked to +# solve a TypeVar that is in an invariant context. + +from typing import TypeVar + +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +def func1(v1: T1, v2: T2, v1_list: list[T1], v2_list: list[T2]): ... + + +def func2(v1: int, v2: str, v1_list: list[int], v2_list: list[str]): + func1(v1, v2, v1_list, v2_list) + + # This should generate an error because the last two arguments are swapped. + func1(v2, v1, v1_list, v2_list) + + # This should generate an error because the last two arguments are swapped. + func1(v1_list=v1_list, v2_list=v2_list, v1=v2, v2=v1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver36.py b/python-parser/packages/pyright-internal/src/tests/samples/solver36.py new file mode 100644 index 00000000..1a77444d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver36.py @@ -0,0 +1,31 @@ +# This sample tests that upper bound constraints are honored when solving +# a type variable. + +from typing import Generic, SupportsAbs, TypeVar + + +T = TypeVar("T") +P = TypeVar("P", bound=SupportsAbs) + + +class BaseContainer(Generic[T]): + item: T + + +class Container(BaseContainer[P]): + def __init__(self, obj: P) -> None: + self.item = obj + + +def func1(obj: BaseContainer[T]) -> T: + return obj.item + + +func1(Container(1)) + +func1(Container(1.0)) + + +# This should generate an error because str isn't compatible with +# the bound of the TypeVar in Container. +func1(Container("")) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver37.py b/python-parser/packages/pyright-internal/src/tests/samples/solver37.py new file mode 100644 index 00000000..98881bd7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver37.py @@ -0,0 +1,22 @@ +# This sample tests a complex TypeVar unification scenario. + +from typing import Callable, Generic, TypeVar + +A = TypeVar("A") +B = TypeVar("B") + + +class Gen(Generic[A]): ... + + +def func1(x: A) -> A: ... + + +def func2(x: Gen[A], y: A) -> Gen[Gen[A]]: ... + + +def func3(x: Gen[Gen[A]]) -> Gen[A]: + return func4(x, func1, func2) + + +def func4(x: Gen[A], id_: Callable[[B], B], step: Callable[[A, B], Gen[A]]) -> A: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver38.py b/python-parser/packages/pyright-internal/src/tests/samples/solver38.py new file mode 100644 index 00000000..442bf184 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver38.py @@ -0,0 +1,37 @@ +# This sample tests a complex TypeVar unification scenario. + +from typing import Protocol, TypeVar + +A = TypeVar("A", contravariant=True) +B = TypeVar("B", covariant=True) +T = TypeVar("T") +U = TypeVar("U") +V = TypeVar("V") + + +class Getter(Protocol[A, B]): + def __call__(self, x: A, /) -> B: ... + + +class PolymorphicListItemGetter(Protocol): + def __call__(self, l: list[T], /) -> T: ... + + +def compose(get1: Getter[T, U], get2: Getter[U, V]) -> Getter[T, V]: ... + + +class HasMethod(Protocol): + @property + def method(self) -> int: ... + + +def get_value(x: HasMethod) -> int: ... + + +def upcast(x: PolymorphicListItemGetter) -> Getter[list[HasMethod], HasMethod]: + return x + + +def test(poly_getter: PolymorphicListItemGetter): + compose(poly_getter, get_value) + compose(upcast(poly_getter), get_value) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver39.py b/python-parser/packages/pyright-internal/src/tests/samples/solver39.py new file mode 100644 index 00000000..d736f1d0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver39.py @@ -0,0 +1,11 @@ +# This sample tests the case where an overloaded function is passed as +# an argument and the overloads cannot be filtered during the first +# pass through the arguments. + +from functools import reduce +from operator import getitem +from typing import Any + + +def deep_getitem(data: dict[str, Any], attr: str) -> Any: + return reduce(getitem, attr.split("."), data) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver4.py b/python-parser/packages/pyright-internal/src/tests/samples/solver4.py new file mode 100644 index 00000000..f2bf2149 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver4.py @@ -0,0 +1,11 @@ +# This sample validates that the type checker properly +# specializes a type for an unbound method (in this case, +# the "keys" method on "dict") based on the provided "self" +# argument. + +v1: dict[str, str] = {} + +# This should not result in an "Unknown", so no +# error should be generated. +result = dict.keys(v1) +reveal_type(result, expected_text="dict_keys[Unknown, Unknown]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver40.py b/python-parser/packages/pyright-internal/src/tests/samples/solver40.py new file mode 100644 index 00000000..3d7da81c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver40.py @@ -0,0 +1,15 @@ +# This sample involves solving type variables that provide a lambda +# its expected type. + +# pyright: strict + +from typing import Callable, TypeVar + +T = TypeVar("T") +U = TypeVar("U") + + +def func1(lst: list[T], init: U, f: Callable[[U, T], U]) -> U: ... + + +y = func1([1], 1, lambda x, y: x * y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver41.py b/python-parser/packages/pyright-internal/src/tests/samples/solver41.py new file mode 100644 index 00000000..6c3917c3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver41.py @@ -0,0 +1,13 @@ +# This sample tests the case where a TypeVar is used in a contravariant +# and covariant position and the contravariant position involves a +# union with other types. + +from typing import Callable +from decimal import Decimal + + +def func1[T, U](func: Callable[[str | T], U], d: T) -> U: ... + + +func1(float, d="1.1") +func1(Decimal, d="1.1") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver42.py b/python-parser/packages/pyright-internal/src/tests/samples/solver42.py new file mode 100644 index 00000000..b20e0b1c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver42.py @@ -0,0 +1,21 @@ +# This sample tests the case involving assignment to a union that contains +# multiple instances of the same TypeVar. + +from typing import TypeVar + + +T = TypeVar("T") + + +def func1(x: tuple[T, list[T]] | list[T]) -> None: ... + + +def func2(x: tuple[T, list[T]] | None) -> None: ... + + +def test1(list_of_int: list[int]): + # This should generate an error. + func1((None, list_of_int)) + + # This should generate an error. + func2((None, list_of_int)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver43.py b/python-parser/packages/pyright-internal/src/tests/samples/solver43.py new file mode 100644 index 00000000..08cf561a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver43.py @@ -0,0 +1,28 @@ +# This sample tests a case involving nested protocol types that was +# not working in a previous version of the type checker. + +from typing import Protocol + + +class Proto1[T](Protocol): + x: T + + +class Proto2[T](Protocol): + @staticmethod + def a() -> Proto1[T]: ... + @classmethod + def b(cls) -> list[T]: ... + + +class C[T]: + @staticmethod + def a() -> T: ... + @classmethod + def b[S](cls: type[Proto2[S]]) -> list[S]: ... + + +def test[S](x: type[Proto2[S]]): ... + + +test(C[Proto1[int]]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver44.py b/python-parser/packages/pyright-internal/src/tests/samples/solver44.py new file mode 100644 index 00000000..31735645 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver44.py @@ -0,0 +1,13 @@ +# This sample tests the case where the solver generates an unsolved +# unification variable that has been specialized into a conditional type. + +from typing import Callable, Iterable, Self + + +class map[S]: + def __new__[T](cls, func: Callable[[T], S], iter1: Iterable[T]) -> Self: ... + + +def func(a: list[int | None]): + b = map(lambda x: x or 0, a) + reveal_type(b, expected_text="map[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver45.py b/python-parser/packages/pyright-internal/src/tests/samples/solver45.py new file mode 100644 index 00000000..6e215d57 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver45.py @@ -0,0 +1,33 @@ +# This sample tests the case where an expression is assigned to an unpacked +# tuple, and the correctly-inferred type of the expression depends on +# bidirectional type inference. + +from typing import Literal, TypedDict + + +def func1[S, T](v: S | T, s: type[S], t: type[T]) -> tuple[S | None, T | None]: ... + + +def test1(): + a: int | None + b: str | None + + a, b = func1(1, int, str) + + +class TD1(TypedDict): + a: int + + +def test2(): + a: TD1 + b: TD1 + + a, b = ({"a": 1}, {"a": 2}) + + +def test3(): + a: Literal[1] + b: Literal[2] + + a, b = (1, 2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver5.py b/python-parser/packages/pyright-internal/src/tests/samples/solver5.py new file mode 100644 index 00000000..1d361e8a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver5.py @@ -0,0 +1,13 @@ +# This sample tests constraint solving where the first +# encounter with the TypeVar is contravariant but later +# encounters are covariant or invariant. + + +def func1(value: object) -> bool: ... + + +v1 = filter(func1, ["b", "a", "r"]) +reveal_type(v1, expected_text="filter[str]") + +v2 = next(v1) +reveal_type(v2, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver6.py b/python-parser/packages/pyright-internal/src/tests/samples/solver6.py new file mode 100644 index 00000000..70db5025 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver6.py @@ -0,0 +1,25 @@ +# This sample tests the type evaluator's handling of +# type vars that span multiple parameters - especially when +# the first parameter is a callable (which has parameters +# that are contravariant). + +from typing import Any, Callable, Iterable, Iterator, TypeVar + +_T = TypeVar("_T") + + +def is_one(x: int) -> bool: + return x == 1 + + +v1 = ["a", "b", "c"] + + +def func1( + __function: Callable[[_T], Any], __iterable: Iterable[_T] +) -> Iterator[_T]: ... + + +# This should be flagged as an error because nums is +# not an int array. +ones = func1(is_one, v1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver7.py b/python-parser/packages/pyright-internal/src/tests/samples/solver7.py new file mode 100644 index 00000000..13706dc8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver7.py @@ -0,0 +1,44 @@ +# This sample tests constraint solving in methods +# where the type is provided by a default initialization value +# rather than an argument provided directly by the caller. + +# We use "strict" here because we want to ensure that there are +# no "unknown" types remaining in this file. + +# pyright: strict + +from typing import Generic, Iterator, TypeVar +from contextlib import contextmanager + +_A = TypeVar("_A") +_B = TypeVar("_B") + + +class ClassA(Generic[_A, _B]): + def __init__(self, a: _A, b: _B = "hello"): + self._foo_a = a + self._foo_b = b + + @property + def value_a(self): + return self._foo_a + + @property + def value_b(self): + return self._foo_b + + +a1 = ClassA(27) + +reveal_type(a1.value_a, expected_text="int") +reveal_type(a1.value_b, expected_text="str") + + +@contextmanager +def func1(default: _A | None = None) -> Iterator[_A | str]: + yield "" + + +def func2(): + with func1() as y: + reveal_type(y, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver8.py b/python-parser/packages/pyright-internal/src/tests/samples/solver8.py new file mode 100644 index 00000000..67830444 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver8.py @@ -0,0 +1,27 @@ +# This sample tests the constraint solver's special-case handling of +# Optional[T] within a function. + +from typing import TypeVar + +_T = TypeVar("_T") + + +def func1(v: _T | None) -> _T: + if v is None: + raise ValueError + return v + + +def func2(v: _T) -> _T: + if v is None: + raise ValueError + return v + + +f: int | None = None + +a: int = func1(f) + +# This should generate an error because type var _T +# should be matched to "Optional[int]". +b: int = func2(f) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solver9.py b/python-parser/packages/pyright-internal/src/tests/samples/solver9.py new file mode 100644 index 00000000..486e0c0a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solver9.py @@ -0,0 +1,54 @@ +# This sample tests that Optional types can be matched +# to Type[T] expressions by the constraint solver. + +from typing import Callable, Generic, Optional, TypeVar + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2", bound=None) +_T3 = TypeVar("_T3") + + +def func1(a: type[_T1]) -> _T1: + return a() + + +a = func1(int) + + +def func2(a: type[_T2]) -> type[_T2]: + return a + + +b = func2(type(None)) + +# This should generate an error because None is +# not a type; it's an instance of the NoneType class. +c = func2(None) + + +class ClassA(Generic[_T1]): + def __init__(self, value: _T1) -> None: ... + + @classmethod + def get(cls: type[_T3]) -> type[_T3]: + return cls + + +class ClassB(ClassA): + pass + + +def func3(value: _T1) -> type[ClassA[_T1]]: + v1 = ClassA(value) + v2 = type(v1) + reveal_type(v2, expected_text="type[ClassA[_T1@func3]]") + return v2 + + +d = ClassB.get() +reveal_type(d, expected_text="type[ClassB]") +reveal_type(ClassB.get(), expected_text="type[ClassB]") + + +def func4(cls: type[_T1]) -> Callable[..., _T1]: + return cls diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder1.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder1.py new file mode 100644 index 00000000..475f876c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder1.py @@ -0,0 +1,42 @@ +# This sample tests the handling of "higher-order" type +# variables during TypeVar solving. In this example, we +# pass a generic function "identity" to another generic +# function "fmap". + +from typing import TypeVar, Callable + +T1 = TypeVar("T1") +U1 = TypeVar("U1") + + +def identity1(x: T1) -> T1: + return x + + +def fmap(f: Callable[[T1], U1], maybe: T1 | None) -> U1 | None: + return None + + +x1: int | None = 0 +y1 = fmap(identity1, x1) + +if y1 is not None: + # Make sure we can call an int method on y to confirm + # that it is an "int". + y1.conjugate() + + +# In this variant, use a bound type. +T2 = TypeVar("T2", bound=str) + + +def identity2(x: T2) -> T2: + return x + + +x2: int | None = 0 + +# This should generate an error because identity2's TypeVar +# T2 is bound to str, so there is no solution that satisfies +# all of the constraints. +y2 = fmap(identity2, x2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder10.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder10.py new file mode 100644 index 00000000..5cf168e9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder10.py @@ -0,0 +1,17 @@ +# This sample tests the handling of a higher-order function +# that accepts a generic function as a callback. + +from typing import Callable, TypeVar + +A = TypeVar("A") +B = TypeVar("B") + + +def func1(fn: Callable[[A, B], A], b: B) -> A: ... + + +def func2(a: A, x: A) -> A: ... + + +def func3(a: A) -> A: + return func1(func2, a) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder11.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder11.py new file mode 100644 index 00000000..cb6f71ba --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder11.py @@ -0,0 +1,35 @@ +# This sample tests the case where a higher-order function receives +# a generic function as an argument, and the type of the generic +# function depends on one of the other arguments passed to the +# higher-order function. This shouldn't depend on the order the +# arguments are passed. + + +from typing import Protocol, TypeVar + + +T = TypeVar("T") + + +class Proto1(Protocol[T]): + def method(self, v: T) -> T: ... + + +class Impl1: + def method(self, v: T) -> T: ... + + +def func1(a: Proto1[T], b: T) -> T: ... + + +v1 = func1(a=Impl1(), b="abc") +reveal_type(v1, expected_text="str") + +v2 = func1(b="abc", a=Impl1()) +reveal_type(v2, expected_text="str") + +v3 = func1(a=Impl1(), b=1) +reveal_type(v3, expected_text="int") + +v4 = func1(b=1, a=Impl1()) +reveal_type(v4, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder12.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder12.py new file mode 100644 index 00000000..9e0e05a3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder12.py @@ -0,0 +1,26 @@ +# This sample tests the case involving a higher-order function and a +# class that uses a contravariant type variable. + +from collections.abc import Callable +from typing import TypeVar, Generic + +T = TypeVar("T", contravariant=True) +A = TypeVar("A") +B = TypeVar("B") +C = TypeVar("C") + + +class ClassA(Generic[T]): + pass + + +def func1(c: Callable[[A], None], v: A): + pass + + +def func2(c: ClassA[B]) -> None: + pass + + +def func3(c: ClassA[int]): + func1(func2, c) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder13.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder13.py new file mode 100644 index 00000000..da4b74dc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder13.py @@ -0,0 +1,21 @@ +# This sample tests a particular situation that regressed. + +from typing import Any, Generic, TypeVar, TypeVarTuple, Callable + +D = TypeVar("D") +S = TypeVarTuple("S") + + +class N(Generic[*S, D]): ... + + +def func1[*S1, D1, *S2, D2, Dim1]( + c: Callable[[N[*S1, D1], N[*S2, D2]], Any], +) -> Callable[[N[Dim1, *S1, D1], N[Dim1, *S2, D2]], Any]: ... + + +def func2[X, Y, Z](x: N[X, Y, Z], y: N[X, Y, Z]): + func1(func3)(x, y) + + +def func3[Dim1, T](x: N[Dim1, T], y: N[Dim1, T]) -> N[T]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder14.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder14.py new file mode 100644 index 00000000..426de9af --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder14.py @@ -0,0 +1,34 @@ +# This sample tests the case where a function with an overload +# is passed to a higher-order function and the return type uses +# a nested Callable type. + +from typing import overload, Callable + + +def func1[A, B](f: Callable[[A], B]) -> Callable[[Callable[[], A]], B]: ... + + +@overload +def func2(v: int) -> None: ... + + +@overload +def func2(v: str) -> None: ... + + +def func2(v: int | str) -> None: + pass + + +def func3() -> int: + return 1 + + +v1 = func1(func2) +reveal_type(v1, expected_text="Overload[(() -> int) -> None, (() -> str) -> None]") + +v2 = v1(func3) +reveal_type(v2, expected_text="None") + +v3 = v1(lambda: 1) +reveal_type(v3, expected_text="None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder2.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder2.py new file mode 100644 index 00000000..4b1f5433 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder2.py @@ -0,0 +1,25 @@ +# This sample tests the case where a generic function is passed as +# a parameter to another generic function. + +from typing import Any, Callable, Generic, Iterable, TypeVar + +T = TypeVar("T") +U = TypeVar("U") + + +def identity(x: U) -> U: + return x + + +def not_identity(x: Any) -> int: + return 3 + + +class Test(Generic[T]): + def fun(self, x: Iterable[T], f: Callable[[T], T]): ... + + def caller(self, x: Iterable[T]): + self.fun(x, identity) + + # This should generate an error. + self.fun(x, not_identity) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder3.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder3.py new file mode 100644 index 00000000..27647565 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder3.py @@ -0,0 +1,32 @@ +# This sample tests a case where a generic function is passed as +# an argument to itself, creating a recursive situation that +# caused an infinite loop. + +from random import random +from typing import Any, Callable, TypeVar + +T = TypeVar("T") +U = TypeVar("U") + + +def func1(x: T, y: U) -> T | U: + return x if random() > 0.5 else y + + +def func2(x: T, y: T) -> T: + return x if random() > 0.5 else y + + +reveal_type( + func2(func1, func2), expected_text="(x: T(1)@func2, y: T(1)@func2) -> T(1)@func2" +) + + +S = TypeVar("S", bound=Callable[..., Any]) + + +def func3(x: S) -> S: + return x + + +reveal_type(func3(func3), expected_text="(x: S(1)@func3) -> S(1)@func3") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder4.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder4.py new file mode 100644 index 00000000..d01a7c1a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder4.py @@ -0,0 +1,57 @@ +# This sample tests the handling of generic callbacks passed to a higher-order +# function that is also generic. + +from typing import Callable, ParamSpec, Protocol, TypeVar + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T_co = TypeVar("_T_co", covariant=True) +_U = TypeVar("_U") + + +class MyIterable(Protocol[_T_co]): ... + + +class MySupportsAbs(Protocol[_T_co]): ... + + +def my_abs(x: MySupportsAbs[_T], /) -> _T: ... + + +def my_map(a: Callable[[_T], _U], b: MyIterable[_T]) -> MyIterable[_U]: ... + + +def func1(xs: MyIterable[MySupportsAbs[int]]): + ys0 = my_map(a=my_abs, b=xs) + reveal_type(ys0, expected_text="MyIterable[int]") + + ys1 = my_map(b=xs, a=my_abs) + reveal_type(ys1, expected_text="MyIterable[int]") + + +def ident(x: _U) -> _U: + return x + + +def func2(__cb: Callable[[_T1], _T], __arg0: _T1) -> _T: ... + + +x1_0 = func2(ident, "hi") +reveal_type(x1_0, expected_text="str") + +x1_1 = func2(ident, 1) +reveal_type(x1_1, expected_text="int") + + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +def func3(__obj: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + + +x2_0 = func3(ident, "hi") +reveal_type(x2_0, expected_text="str") + +x2_1 = func3(ident, 1) +reveal_type(x2_1, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder5.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder5.py new file mode 100644 index 00000000..23c7d862 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder5.py @@ -0,0 +1,227 @@ +# This sample tests the case where a generic function is passed +# as an argument to another generic function multiple times. + +from dataclasses import dataclass +from typing import ( + Any, + Callable, + Generic, + Literal, + ParamSpec, + Protocol, + TypeVar, + TypeVarTuple, + overload, +) + +T = TypeVar("T") +A = TypeVar("A") +B = TypeVar("B") +C = TypeVar("C") +D = TypeVar("D") +X = TypeVar("X") +Y = TypeVar("Y") +Z = TypeVar("Z") +P = ParamSpec("P") +Ts = TypeVarTuple("Ts") + + +def identity(x: T) -> T: + return x + + +def triple_1( + f: Callable[[A], X], g: Callable[[B], Y], h: Callable[[C], Z] +) -> Callable[[A, B, C], tuple[X, Y, Z]]: + def wrapped(a: A, b: B, c: C) -> tuple[X, Y, Z]: + return f(a), g(b), h(c) + + return wrapped + + +def triple_2( + f: tuple[Callable[[A], X], Callable[[B], Y], Callable[[C], Z]], +) -> Callable[[A, B, C], tuple[X, Y, Z]]: + def wrapped(a: A, b: B, c: C) -> tuple[X, Y, Z]: + return f[0](a), f[1](b), f[2](c) + + return wrapped + + +def test_1(f: Callable[[A], X]) -> Callable[[A, B, C], tuple[X, B, C]]: + val = triple_1(f, identity, identity) + + reveal_type( + val, + expected_text="(A@test_1, T@identity, T(1)@identity) -> tuple[X@test_1, T@identity, T(1)@identity]", + ) + + return val + + +def test_2(f: Callable[[A], X]) -> Callable[[A, B, C], tuple[X, B, C]]: + val = triple_2((f, identity, identity)) + + reveal_type( + val, + expected_text="(A@test_2, T@identity, T(1)@identity) -> tuple[X@test_2, T@identity, T(1)@identity]", + ) + + return val + + +class ClassA: + def identity(self, x: T) -> T: + return x + + def test_1(self, f: Callable[[A], X]) -> Callable[[A, B, C], tuple[X, B, C]]: + val = triple_1(f, self.identity, self.identity) + + reveal_type( + val, + expected_text="(A@test_1, T@identity, T(1)@identity) -> tuple[X@test_1, T@identity, T(1)@identity]", + ) + + return val + + def test_2(self, f: Callable[[A], X]) -> Callable[[A, B, C], tuple[X, B, C]]: + val = triple_2((f, self.identity, self.identity)) + + reveal_type( + val, + expected_text="(A@test_2, T@identity, T(1)@identity) -> tuple[X@test_2, T@identity, T(1)@identity]", + ) + + return val + + +@dataclass(frozen=True) +class Pair(Generic[A, B]): + left: A + right: B + + +def func1(f: Callable[[A], B]) -> Callable[[Pair[A, X]], Pair[B, X]]: ... + + +def test_3(pair: Pair[Pair[A, B], C]) -> Pair[Pair[A, B], C]: + val1 = func1(func1(identity)) + reveal_type( + val1, + expected_text="(Pair[Pair[T@identity, X(1)@func1], X@func1]) -> Pair[Pair[T@identity, X(1)@func1], X@func1]", + ) + val2 = val1(pair) + reveal_type(val2, expected_text="Pair[Pair[A@test_3, B@test_3], C@test_3]") + return val2 + + +def test_4(pair: Pair[Pair[Pair[A, B], C], D]) -> Pair[Pair[Pair[A, B], C], D]: + val1 = func1(func1(func1(identity))) + reveal_type( + val1, + expected_text="(Pair[Pair[Pair[T@identity, X(2)@func1], X(1)@func1], X@func1]) -> Pair[Pair[Pair[T@identity, X(2)@func1], X(1)@func1], X@func1]", + ) + val2 = val1(pair) + return val2 + + +@overload +def test_5(a: Callable[P, type[T]], *, b: Literal[0] = ...) -> type[list[type[T]]]: ... + + +@overload +def test_5(a: T, *args: int, b: Literal[False, None] = ...) -> type[list[T]]: ... + + +@overload +def test_5(a: T, *args: int, b: Literal[True] = ...) -> type[list[T]]: ... + + +def test_5(a: Any, *args: int, b: Any = ...) -> Any: ... + + +val3 = test_5(test_5, **{}) +reveal_type( + val3, + expected_text="Unknown", +) + +val4 = test_5(test_5, b=True) +reveal_type( + val4, + expected_text="type[list[Overload[(a: (**P(1)@test_5) -> type[T(1)@test_5], *, b: Literal[0] = ...) -> type[list[type[T(1)@test_5]]], (a: T(1)@test_5, *args: int, b: Literal[False] | None = ...) -> type[list[T(1)@test_5]], (a: T(1)@test_5, *args: int, b: Literal[True] = ...) -> type[list[T(1)@test_5]]]]]", +) + + +def test_6(g: Callable[[B], C]) -> Callable[[Callable[[A], B]], Callable[[A], C]]: ... + + +val5 = test_6(test_6) +reveal_type( + val5, + expected_text="((A@test_6) -> ((B(1)@test_6) -> C(1)@test_6)) -> ((A@test_6) -> ((((A(1)@test_6) -> B(1)@test_6)) -> ((A(1)@test_6) -> C(1)@test_6)))", +) + + +def test_7( + g: Callable[[C], D], +) -> Callable[[Callable[[A], Callable[[B], C]]], Callable[[A], Callable[[B], D]]]: + val6 = test_6(test_6)(test_6)(g) + reveal_type( + val6, + expected_text="((A(1)@test_6) -> ((A(2)@test_6) -> C@test_7)) -> ((A(1)@test_6) -> ((A(2)@test_6) -> D@test_7))", + ) + return val6 + + +def test_8(fn: Callable[[*Ts], Callable[[A], B]]) -> Callable[[A, *Ts], B]: ... + + +def test_9(x: Callable[[bool], Callable[[int], Callable[[str], None]]]): + test_8(test_8(x)) + + +def test_10(func: Callable[[*Ts], Any], *args: *Ts) -> Any: ... + + +def func2() -> None: ... + + +test_10(test_10, func2) + + +def test_11(func: Callable[[*Ts], T], *args: *Ts) -> T: + return func(*args) + + +def func3(num: int, /) -> int: + return num + + +test_11(test_11, func3, 123) + +# This will generate an error, but it should not crash or cause an infinite loop. +test_11(test_11, test_11, func3, 123) + + +class Proto1(Protocol): + def __call__(self, a: T, b: T) -> T: ... + + +def func4(a: T, b: T) -> T: + return a + + +def test_12(p: Proto1) -> Proto1: + return p(func4, func4) + + +reveal_type( + identity((identity, identity)), + expected_text="tuple[(x: T(1)@identity) -> T(1)@identity, (x: T(2)@identity) -> T(2)@identity]", +) + +reveal_type( + identity([identity]), + expected_text="list[(x: T(1)@identity) -> T(1)@identity]", +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder6.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder6.py new file mode 100644 index 00000000..9aa03974 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder6.py @@ -0,0 +1,57 @@ +# This sample tests the handling of nested calls to generic functions +# when bidirectional type inference is involved. + +from typing import Any, Callable, Generic, Literal, ParamSpec, Protocol, TypeVar + +_T = TypeVar("_T") +_P = ParamSpec("_P") + + +def identity1(x: _T) -> _T: + return x + + +def identity2(x: _T) -> _T: + return x + + +def test1(x: Literal[2]) -> Literal[2]: + return identity1(identity2(x)) + + +v1 = min(1, max(2, 0.5)) +reveal_type(v1, expected_text="float") + + +class Future(Generic[_T]): ... + + +def func1(future: Future[_T]) -> Future[_T]: ... + + +def func2( + __fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs +) -> Future[_T]: ... + + +def func3() -> int: ... + + +def func4(a: int, b: int) -> str: ... + + +reveal_type(func1(func2(func3)), expected_text="Future[int]") +reveal_type(func1(func2(func4, 1, 2)), expected_text="Future[str]") +reveal_type(func1(func2(func4, a=1, b=2)), expected_text="Future[str]") + + +class Proto(Protocol): + def __call__(self, func: _T) -> _T: ... + + +def func5(cb: Proto, names: Any): + val1 = cb(cb(names)) + reveal_type(val1, expected_text="Any") + + val2 = cb(cb(1)) + reveal_type(val2, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder7.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder7.py new file mode 100644 index 00000000..19cf05d4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder7.py @@ -0,0 +1,12 @@ +# This sample tests the case where a generic class is passed as an +# argument to a function that accepts a generic callable parameter. +# The class-scoped TypeVars for the class must be preserved when +# solving the higher-order TypeVars. + +from itertools import compress +from typing import Any, Iterable + + +def func1(a: Iterable[Iterable[tuple[str, int]]], b: Any) -> None: + c = map(compress, a, b) + reveal_type(c, expected_text="map[compress[tuple[str, int]]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder8.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder8.py new file mode 100644 index 00000000..1d3d6275 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder8.py @@ -0,0 +1,24 @@ +# This sample tests the case where a higher-order function involves a ParamSpec. + +from typing import TypeVar, Callable, Protocol, ParamSpec + +P = ParamSpec("P") +R = TypeVar("R", covariant=True) +T = TypeVar("T") + + +class Proto1(Protocol[P, R]): + @classmethod + def collect(cls, *args: P.args, **kwargs: P.kwargs) -> R: ... + + +class Class1: + @classmethod + def collect(cls, n: type[T]) -> Callable[[Callable[[T], int]], None]: ... + + +def func1(a: Proto1[P, R], *args: P.args, **kwargs: P.kwargs) -> R: ... + + +reveal_type(func1(Class1, float), expected_text="((float) -> int) -> None") +reveal_type(func1(Class1, int), expected_text="((int) -> int) -> None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder9.py b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder9.py new file mode 100644 index 00000000..fb929e13 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverHigherOrder9.py @@ -0,0 +1,27 @@ +# This sample tests the case where a higher-order function accepts +# a callable parameterized by a ParamSpec and a generic function +# is passed to it. + +from typing import Callable, TypeVar, ParamSpec + + +P = ParamSpec("P") +S = TypeVar("S") +T = TypeVar("T") + + +def deco1(func: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ... + + +def func1(val1: T, val2: S, val3: S) -> T: ... + + +reveal_type(deco1(func1, val1=1, val2=3, val3="s"), expected_text="int") +reveal_type(deco1(func1, 1, 3, "s"), expected_text="int") + + +def func2(val1: T, val2: S) -> T | list[S]: ... + + +reveal_type(deco1(func2, val1=1, val2="s"), expected_text="int | list[str]") +reveal_type(deco1(func2, 1, "s"), expected_text="int | list[str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverLiteral1.py b/python-parser/packages/pyright-internal/src/tests/samples/solverLiteral1.py new file mode 100644 index 00000000..9fa3b495 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverLiteral1.py @@ -0,0 +1,65 @@ +# This sample tests the case that exercises some of the heuristics that +# determine whether a solved TypeVar should retain a literal type. + +from typing import Callable, Generic, Literal, TypeVar + +FileChanges = dict[str, Literal["created", "edited", "removed"]] + +changes: FileChanges = {} +changes.update({filename: "removed" for filename in ["foo.py", "bar.py"]}) + +_S = TypeVar("_S") +_T = TypeVar("_T") + + +class ClassA(Generic[_T]): + pass + + +TA1 = Callable[[ClassA[_T]], None] + + +def func1(value: _T) -> TA1[_T]: + def ret(ctx: ClassA[_T]) -> None: + pass + + return ret + + +def func2() -> TA1[bool]: + return func1(True) + + +def func3(value: _T) -> Callable[[_T], None]: ... + + +x: Callable[[tuple[bool]], None] = func3((True,)) + + +def func4(v: _T, f: Callable[[_T], None]): ... + + +def func5(v: Literal[1, 2], f: Callable[[Literal[1, 2]], None]): + func4(v, f) + + +class ClassB(Generic[_S, _T]): + left: _S + right: _T + + +def func6(s: _S, t: _T) -> ClassB[_S, _T]: ... + + +def func7(t: _T, f: Callable[[ClassB[_T, Literal[2]]], None]) -> None: + return f(func6(t, 2)) + + +def func8(a: _T, b: Callable[[list[_T]], None]) -> _T: + return a + + +def func9(v: Callable[[list[int]], None]): + func8(b=v, a=1) + + func8(a=1, b=v) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverLiteral2.py b/python-parser/packages/pyright-internal/src/tests/samples/solverLiteral2.py new file mode 100644 index 00000000..7372805b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverLiteral2.py @@ -0,0 +1,20 @@ +# This sample validates that a literal and a non-literal are not considered +# compatible types when in an invariant context. + +from typing import Literal, TypeVar + +T = TypeVar("T") + + +def func1(a: T, b: T) -> T: + return a + + +def func2() -> None: + foo_list: list[Literal["foo"]] = ["foo"] + x = func1(foo_list, [""]) + reveal_type(x, expected_text="list[Literal['foo']] | list[str]") + + # This should generate an error. + x.append("not foo") + print(foo_list) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverScoring1.py b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring1.py new file mode 100644 index 00000000..00185a9d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring1.py @@ -0,0 +1,45 @@ +# This sample tests the type checker's "type var scoring" mechanism +# whereby it attempts to solve type variables with the simplest +# possible solution. + +from typing import Callable, TypeVar + +T = TypeVar("T") + + +def func1(obj_type: type[T], obj: list[T] | T) -> list[T]: + return [] + + +def func2(obj_type: type[T], obj: T | list[T]) -> list[T]: + return [] + + +def func3(input1: list[str]): + val1 = func1(str, input1) + reveal_type(val1, expected_text="list[str]") + + val2 = func2(str, input1) + reveal_type(val2, expected_text="list[str]") + + +def func4( + func: Callable[[], T] | Callable[[T], None] | list[T] | dict[str, T] | T, +) -> T: ... + + +def func5(func: Callable[[], T]) -> T: ... + + +def func6(val: str) -> None: ... + + +def func7() -> str: ... + + +reveal_type(func4([""]), expected_text="str") +reveal_type(func4({"": 1}), expected_text="int") +reveal_type(func4(func6), expected_text="str") +reveal_type(func4(func7), expected_text="str") +reveal_type(func4(str), expected_text="str") +reveal_type(func5(str), expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverScoring2.py b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring2.py new file mode 100644 index 00000000..e7e134ad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring2.py @@ -0,0 +1,66 @@ +# This sample tests the handling of a union that includes both +# T and a generic class parameterized by T. This case is indeterminate +# according to PEP 484, but pyright has code in place to find the +# "least complex" answer. + +from typing import Any, Generic, TypeVar, Union + +T1 = TypeVar("T1") + + +class Wrapper(Generic[T1]): ... + + +def ensure_wrapped(item: Union[T1, Wrapper[T1]]) -> Wrapper[T1]: ... + + +def some_func(x: Wrapper[T1]) -> Wrapper[T1]: + return ensure_wrapped(x) + + +def func1a(value: list[Union[T1, list[T1]]]) -> T1: ... + + +def func2a(value: list[Union[float, list[float]]]): + x = func1a(value) + reveal_type(x, expected_text="float") + + +def func3a(value: list[Union[str, list[float]]]): + # This should generate an error + func1a(value) + + +def func4a(value: list[Union[float, str, list[Union[float, str]]]]): + x = func1a(value) + reveal_type(x, expected_text="float | str") + + +def func1b(value: list[Union[int, list[T1]]]) -> T1: ... + + +def func2b(value: list[Union[int, list[float]]]): + x = func1b(value) + reveal_type(x, expected_text="float") + + +def func3b(value: list[Union[str, list[float]]]): + # This should generate an error + func1b(value) + + +def ensure_list(value: Union[T1, list[T1]]) -> list[T1]: ... + + +def func4( + v1: list, v2: list[Any], v3: list[None], v4: Any, v5: int, v6: T1, v7: list[T1] +) -> T1: + reveal_type(ensure_list(v1), expected_text="list[Unknown]") + reveal_type(ensure_list(v2), expected_text="list[Any]") + reveal_type(ensure_list(v3), expected_text="list[None]") + reveal_type(ensure_list(v4), expected_text="list[Any]") + reveal_type(ensure_list(v5), expected_text="list[int]") + reveal_type(ensure_list(v6), expected_text="list[T1@func4]") + reveal_type(ensure_list(v7), expected_text="list[T1@func4]") + + return v6 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverScoring3.py b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring3.py new file mode 100644 index 00000000..9f0711f1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring3.py @@ -0,0 +1,13 @@ +# This sample tests the case where the constraint solver can choose one +# of several types that satisfy the constraints. + +from typing import TypeVar + +T = TypeVar("T") + + +def to_list(t: list[T] | T) -> list[T]: ... + + +x = to_list([1, 2, 3]) +reveal_type(x, expected_text="list[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverScoring4.py b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring4.py new file mode 100644 index 00000000..87861ad9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverScoring4.py @@ -0,0 +1,41 @@ +# This sample tests the "complexity" calculation in the constraint +# solver to select the less-complex solution. + +from typing import Callable, Generic, Protocol, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +T_contra = TypeVar("T_contra", contravariant=True) +R = TypeVar("R") + + +class ResolveFunc(Protocol[T_contra]): + def __call__(self, resolve_value: T_contra) -> None: ... + + +TA1 = Callable[[T], R | "Promise[R]"] +TA2 = Callable[[ResolveFunc[T]], None] + + +class Promise(Generic[T]): + @staticmethod + def resolve(resolve_value: S) -> "Promise[S]": ... + + def __init__(self, executor_func: TA2[T]) -> None: ... + + def then(self, onfullfilled: TA1[T, R]) -> "Promise[R]": ... + + +Promise.resolve(1).then(lambda result: reveal_type(result, expected_text="int")) + +Promise.resolve(1).then(lambda result: "abc").then( + lambda result: reveal_type(result, expected_text="str") +) + +Promise.resolve(None).then( + lambda result: Promise.resolve("abc" if 1 < 2 else 123) +).then(lambda result: reveal_type(result, expected_text="str | int")) + +Promise.resolve(None).then(lambda result: "abc" if 1 < 2 else 123).then( + lambda result: reveal_type(result, expected_text="int | str") +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/solverUnknown1.py b/python-parser/packages/pyright-internal/src/tests/samples/solverUnknown1.py new file mode 100644 index 00000000..45e7dd7b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/solverUnknown1.py @@ -0,0 +1,10 @@ +# This sample tests TypeVar matching when there are multiple sources +# and some of them are Unknown. The TypeVar constraint solver contains +# special heuristics to deal with this case. + + +def func1(u): + b: bool = True + + x = dict(b=b, u=u, x=[]) + reveal_type(x, expected_text="dict[str, bool | list[Any]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/specialForm1.py b/python-parser/packages/pyright-internal/src/tests/samples/specialForm1.py new file mode 100644 index 00000000..e54b4a55 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/specialForm1.py @@ -0,0 +1,17 @@ +# This sample tests the case where the special-form aliases for +# the stdlib collection classes are instantiated. + +# This should generate an error. +from typing import Dict, List, Set, Tuple + +# This should generate an error. +t1 = Dict() + +# This should generate an error. +t2 = List() + +# This should generate an error. +t3 = Set() + +# This should generate an error. +t4 = Tuple() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/specialForm2.py b/python-parser/packages/pyright-internal/src/tests/samples/specialForm2.py new file mode 100644 index 00000000..156f5cbc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/specialForm2.py @@ -0,0 +1,56 @@ +# This sample tests special forms and their use as runtime objects. + +from typing import ( + Annotated, + Any, + Callable, + ClassVar, + Concatenate, + Final, + Generic, + Literal, + LiteralString, + NotRequired, + Optional, + Protocol, + Required, + Self, + TypeAlias, + TypeGuard, + TypedDict, + Union, + Unpack, +) + + +def func1(val: object) -> None: + pass + + +# All of these should be compatible with `object`, and they +# should not generate errors. +func1(Literal) +func1(Literal[0]) +func1(ClassVar) +func1(Unpack) +func1(Required) +func1(NotRequired) +func1(Protocol) +func1(Generic) +func1(Final) +func1(Callable) +func1(Callable[..., Any]) +func1(Union) +func1(Optional) +func1(TypedDict) +func1(LiteralString) +func1(Self) +func1(Concatenate) +func1(TypeAlias) +func1(TypeGuard) +func1(Annotated) +func1(Union[int, str]) +func1(int | str) + + +{Literal[1]: "literal"}[Literal[1]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/specialForm3.py b/python-parser/packages/pyright-internal/src/tests/samples/specialForm3.py new file mode 100644 index 00000000..8822c4cf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/specialForm3.py @@ -0,0 +1,48 @@ +# This sample tests special forms that are not compatible with type[T], +# type[object] or type[Any]. + +from typing import ( + Annotated, + Any, + Callable, + ClassVar, + Concatenate, + Final, + Generic, + Literal, + LiteralString, + NotRequired, + Optional, + Protocol, + Required, + Self, + TypeAlias, + TypeGuard, + Union, + Unpack, +) +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + +# The following should all generate an error. +Literal() +Literal[0]() +ClassVar() +Unpack() +Required() +NotRequired() +Protocol() +Generic() +Final() +Callable() +Callable[..., Any]() +Union() +Optional() +LiteralString() +Self() +Concatenate() +TypeAlias() +TypeGuard() +Annotated() +ReadOnly() +Union[int, str]() +(int | str)() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/specialForm4.py b/python-parser/packages/pyright-internal/src/tests/samples/specialForm4.py new file mode 100644 index 00000000..3d4c46a3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/specialForm4.py @@ -0,0 +1,116 @@ +# This sample tests special forms that are not compatible with type[T], +# type[object] or type[Any]. + +from typing import ( + Annotated, + Any, + Callable, + ClassVar, + Concatenate, + Final, + Generic, + Literal, + LiteralString, + NotRequired, + Optional, + Protocol, + Required, + Self, + TypeAlias, + TypeGuard, + TypeVar, + Union, + Unpack, +) +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + +T = TypeVar("T") + + +def func1(val: type[Any]) -> None: + pass + + +def func2(val: type[object]) -> None: + pass + + +def func3(val: type[T]) -> None: + pass + + +# The following should all generate an error. +func1(Literal) +func1(Literal[0]) +func1(ClassVar) +func1(Unpack) +func1(Required) +func1(NotRequired) +func1(Protocol) +func1(Generic) +func1(Final) +func1(Callable) +func1(Callable[..., Any]) +func1(Union) +func1(Optional) +func1(Optional[Any]) +func1(LiteralString) +func1(Self) +func1(Concatenate) +func1(TypeAlias) +func1(TypeGuard) +func1(Annotated) +func1(ReadOnly) +func1(Union[int, str]) +func1(int | str) +func1(Any) + +func2(Literal) +func2(Literal[0]) +func2(ClassVar) +func2(Unpack) +func2(Required) +func2(NotRequired) +func2(Protocol) +func2(Generic) +func2(Final) +func2(Callable) +func2(Callable[..., Any]) +func2(Union) +func2(Optional) +func2(Optional[Any]) +func2(LiteralString) +func2(Self) +func2(Concatenate) +func2(TypeAlias) +func2(TypeGuard) +func2(Annotated) +func2(ReadOnly) +func2(Union[int, str]) +func2(int | str) +func2(Any) + +func3(Literal) +func3(Literal[0]) +func3(ClassVar) +func3(Unpack) +func3(Required) +func3(NotRequired) +func3(Protocol) +func3(Generic) +func3(Final) +func3(Callable) +func3(Callable[..., Any]) +func3(Union) +func3(Optional) +func3(Optional[Any]) +func3(LiteralString) +func3(Self) +func3(Concatenate) +func3(TypeAlias) +func3(TypeGuard) +func3(Annotated) +func3(ReadOnly) +func3(Union[int, str]) +func3(int | str) +func3(Any) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/specialization1.py b/python-parser/packages/pyright-internal/src/tests/samples/specialization1.py new file mode 100644 index 00000000..482ee9db --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/specialization1.py @@ -0,0 +1,77 @@ +# This sample tests specification of generic types. + +from typing import Generic, Iterable, List, TypeVar + + +class A: + pass + + +class B(A): + pass + + +class C(A): + pass + + +_T1 = TypeVar("_T1", A, B) + + +class Moo(Generic[_T1]): + pass + + +class Foo: + def __init__(self) -> None: ... + + def m1(self, a: Moo[A]) -> None: ... + + def m2(self, b: Moo[B]) -> None: ... + + +a = Moo[A]() +b = Moo[B]() + +y = Foo() + +y.m1(a) + +# This should generate an error: +# Argument of type 'Moo[B]' cannot be assigned to parameter of type 'Moo[A]' +y.m1(b) + +# This should generate an error: +# Argument of type 'Moo[A]' cannot be assigned to parameter of type 'Moo[B]' +y.m2(a) + +y.m2(b) + + +def m3(c: Moo[C]): + pass + + +# This should generate an error: +# Type argument 'List[C]' cannot be assigned to type variable '_T1' +def m4(c: Moo[List[C]]): + pass + + +class D(Generic[_T1]): + # This should generate an error: + # TypeVar constraint types can't be generic. + _T2 = TypeVar("_T2", Iterable[_T1], int) + + # This should generate an error: + # TypeVar bound types can't be generic. + _T3 = TypeVar("_T3", bound=Iterable[_T1]) + + +# This should generate an error: +# TypeVars can't be bound and constrained. +_T4 = TypeVar("_T4", str, int, bound=int) + +# This should generate an error: +# TypeVar must include more than one constraint. +_T5 = TypeVar("_T5", str) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/specialization2.py b/python-parser/packages/pyright-internal/src/tests/samples/specialization2.py new file mode 100644 index 00000000..5720652b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/specialization2.py @@ -0,0 +1,49 @@ +# This sample tests the special case of specializing a Union +# type that has generic parameters. + +from typing import Any, TypeVar, Generic, Union, Callable +from dataclasses import dataclass + +E = TypeVar("E") +A = TypeVar("A") +B = TypeVar("B") + + +@dataclass +class Left(Generic[E]): + left: E + + +@dataclass +class Right(Generic[A]): + right: A + + +Either = Union[Left[E], Right[A]] + + +def fmap(f: Callable[[A], B], either: Either[E, A]) -> Either[E, B]: + if isinstance(either, Right): + return Right(f(either.right)) + else: + return either + + +def square(x: int) -> int: + return x * x + + +def accepts_only_left_str(p: Left[Any]): + pass + + +def accepts_only_right_int(p: Right[Any]): + pass + + +aa = fmap(square, Left("s")) + +if isinstance(aa, Left): + accepts_only_left_str(aa) +else: + accepts_only_right_int(aa) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/staticExpression1.py b/python-parser/packages/pyright-internal/src/tests/samples/staticExpression1.py new file mode 100644 index 00000000..400bb154 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/staticExpression1.py @@ -0,0 +1,76 @@ +# This sample tests static expression forms that are supported +# in the binder. + +import sys +import os + +x: int + +if sys.platform == "linux": + x = 1 +else: + x = "error!" + +if sys.version_info >= (3, 9): + x = 1 +else: + x = "error!" + +if os.name == "posix": + x = 1 +else: + x = "error!" + +if True: + x = 1 +else: + x = "error!" + +if not False: + x = 1 +else: + x = "error!" + +DEFINED_TRUE = True +DEFINED_FALSE = False + +if DEFINED_TRUE: + x = 1 +else: + x = "error!" + +if not DEFINED_FALSE: + x = 1 +else: + x = "error!" + +DEFINED_STR = "hi!" + +if DEFINED_STR == "hi!": + x = 1 +else: + x = "error!" + + +class Dummy: + DEFINED_FALSE: bool + DEFINED_TRUE: bool + DEFINED_STR: str + + +dummy = Dummy() + +if dummy.DEFINED_TRUE: + x = 1 +else: + x = "error!" + +if not dummy.DEFINED_FALSE: + x = 1 +else: + x = "error!" + +if dummy.DEFINED_STR == "hi!": + x = 1 +else: + x = "error!" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/staticExpression2.py b/python-parser/packages/pyright-internal/src/tests/samples/staticExpression2.py new file mode 100644 index 00000000..011c1074 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/staticExpression2.py @@ -0,0 +1,32 @@ +# This sample tests a special form of a sys.version_info check. + +import sys +from datetime import datetime, timezone, timedelta +from typing import overload, Optional + +# Overload was broken before 3.5.2. +# This sort of hack is seen in some type-annotated code to prevent crashes. +if sys.version_info < (3, 5, 2): + + def overload(f): + return f + + +@overload +def from_json_timestamp(ts: int) -> datetime: ... + + +@overload +def from_json_timestamp(ts: None) -> None: ... + + +def from_json_timestamp(ts: Optional[int]) -> Optional[datetime]: + return ( + None + if ts is None + else (datetime(1970, 1, 1, tzinfo=timezone.utc) + timedelta(milliseconds=ts)) + ) + + +result1: datetime = from_json_timestamp(2418049) +result3: None = from_json_timestamp(None) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/strings1.py b/python-parser/packages/pyright-internal/src/tests/samples/strings1.py new file mode 100644 index 00000000..a6f117b5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/strings1.py @@ -0,0 +1,22 @@ +# This sample tests the reportImplicitStringConcatenation diagnostic check. + + +def func1(val: str): + pass + + +func1("first argument" "second argument") + +func1( + "This is the first argument, which contains " + "especially long text that could not fit into " + "one single line thus should be spread." +) + +func1( + ( + "This is the first argument, which contains " + "especially long text that could not fit into " + "one single line thus should be spread." + ) +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/strings2.py b/python-parser/packages/pyright-internal/src/tests/samples/strings2.py new file mode 100644 index 00000000..7604d278 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/strings2.py @@ -0,0 +1,16 @@ +# This sample tests string concatenation. + +v1 = "a" "b" r"c" R"""d""" "e" "f" +reveal_type(v1, expected_text="Literal['abcdef']") + +v2 = b"a" b"b" rb"c" Rb"d" +reveal_type(v2, expected_text='Literal[b"abcd"]') + +# This should generate an error. +v3 = "a" b"b" + +# This should generate an error. +v4 = b"a" f"" + +# This should generate a warning. +v5 = b"\u00FF" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/stubs/placeholder.txt b/python-parser/packages/pyright-internal/src/tests/samples/stubs/placeholder.txt new file mode 100644 index 00000000..4051c32d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/stubs/placeholder.txt @@ -0,0 +1,2 @@ +Placeholder file to ensure this folder exists. +See https://archive.kernel.org/oldwiki/git.wiki.kernel.org/index.php/GitFaq.html#Can_I_add_empty_directories.3F diff --git a/python-parser/packages/pyright-internal/src/tests/samples/subscript1.py b/python-parser/packages/pyright-internal/src/tests/samples/subscript1.py new file mode 100644 index 00000000..fdedad97 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/subscript1.py @@ -0,0 +1,80 @@ +# This sample tests the reporting of builtin types that +# will generate exceptions when subscripted in older +# versions of Python. + +from queue import Queue +from collections import OrderedDict, deque +from asyncio import Future, Task + + +# These should generate errors for Python 3.8 and older. +def func1( + a1: Queue[int], + b1: OrderedDict[str, str], + d1: list[int], + e1: dict[str, int], + f1: set[int], + g1: deque[int], + h1: frozenset[int], + # These previously generated errors, but no longer do because of + # changes in the typeshed stubs. + c1: Future[int], + i1: Task[None], +) -> None: + pass + + +def func2( + a1: "Queue[int]", + b1: "OrderedDict[str, str]", + c1: "Future[int]", + d1: "list[int]", + e1: "dict[str, int]", + f1: "set[int]", + g1: "deque[int]", + h1: "frozenset[int]", + i1: "Task[None]", +) -> None: + pass + + +# These should generate errors because they are used +# in variable types, but they appear outside of a function. +class A: + a1: Queue[int] = Queue() + b1: OrderedDict[str, str] = OrderedDict() + d1: list[int] = [] + e1: dict[str, int] = {} + f1: set[int] = set() + g1: deque[int] = deque() + h1: frozenset[int] = frozenset() + + # These previously generated errors, but no longer do because of + # changes in the typeshed stubs. + c1: Future[int] = Future() + i1: Task[None] + + +class B: + a2: "Queue[int]" = Queue() + b2: "OrderedDict[str, str]" = OrderedDict() + c2: "Future[int]" = Future() + d2: "list[int]" = [] + e2: "dict[str, int]" = {} + f2: "set[int]" = set() + g2: "deque[int]" = deque() + h2: "frozenset[int]" = frozenset() + i1: "Task[None]" + + +def func3(): + # These should not generate errors. + a1: Queue[int] = Queue() + b1: OrderedDict[str, str] = OrderedDict() + c1: Future[int] = Future() + d1: list[int] = [] + e1: dict[str, int] = {} + f1: set[int] = set() + g1: deque[int] = deque() + h1: frozenset[int] = frozenset() + i1: Task[None] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/subscript2.py b/python-parser/packages/pyright-internal/src/tests/samples/subscript2.py new file mode 100644 index 00000000..78aa8ff2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/subscript2.py @@ -0,0 +1,55 @@ +# This sample tests various forms of subscript expressions for +# syntax and semantic (type) errors. + +from typing import TypeVar + + +_T = TypeVar("_T", list, tuple) + +def func1(p1: list[int], p2: _T): + a1 = p1[0] + reveal_type(a1, expected_text="int") + + a2 = p1[:] + reveal_type(a2, expected_text="list[int]") + + a3 = p1[1:] + reveal_type(a3, expected_text="list[int]") + + a4 = p1[1:2] + reveal_type(a4, expected_text="list[int]") + + a5 = p1[0:1:3] + reveal_type(a5, expected_text="list[int]") + + a6 = p1[:3] + reveal_type(a6, expected_text="list[int]") + + a7 = p1[::] + reveal_type(a7, expected_text="list[int]") + + a8 = p1[::2] + reveal_type(a8, expected_text="list[int]") + + # This should generate a syntax error. + b1 = p1[0:1:3:4] + + # This should generate a syntax error. + b2 = p1[0:::] + + # This should generate a type error. + c1 = p1[:,] + reveal_type(c1, expected_text="Unknown") + + # This should generate a type error. + c2 = p1[:,:] + reveal_type(c2, expected_text="Unknown") + + # This should generate a type error. + c3 = p1[1,] + reveal_type(c3, expected_text="Unknown") + + d1 = p2[0] + reveal_type(d1, expected_text="Unknown") + + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/subscript3.py b/python-parser/packages/pyright-internal/src/tests/samples/subscript3.py new file mode 100644 index 00000000..a4b6f734 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/subscript3.py @@ -0,0 +1,10 @@ +# This sample tests the handling of a subscript in a loop that includes +# a del statement. + +# pyright: strict + + +def func1(lst: list[tuple[int, int]]): + for _ in range(1): + lst[-1] = lst[-1][1], lst[-1][0] + del lst[-1] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/subscript4.py b/python-parser/packages/pyright-internal/src/tests/samples/subscript4.py new file mode 100644 index 00000000..458d30b7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/subscript4.py @@ -0,0 +1,36 @@ +# This sample tests the handling of unpack operators within +# a subscript. + +from typing import NamedTuple + + +class Recorder[T]: + def __getitem__(self, item: T) -> T: + return item + + +class OneInt(NamedTuple): + value: int + + +class IntStrPair(NamedTuple): + first: int + second: str + + +recorder_pair: Recorder[tuple[int, str]] = Recorder() +pair = IntStrPair(1, "value") +result1 = recorder_pair[*pair] +reveal_type(result1, expected_text="tuple[int, str]") + +recorder_order: Recorder[tuple[int, str]] = Recorder() +tail_value: str = "tail" +result2 = recorder_order[*OneInt(2), tail_value] +reveal_type(result2, expected_text="tuple[int, str]") + +recorder_multi: Recorder[tuple[int, *tuple[int | str, ...]]] = Recorder() +values1: list[int] = [] +values2: list[str] = [] +first_value: int = 0 +result3 = recorder_multi[first_value, *values1, *values2] +reveal_type(result3, expected_text="tuple[int, *tuple[int | str, ...]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon1.py b/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon1.py new file mode 100644 index 00000000..0ed05c9d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon1.py @@ -0,0 +1,4 @@ +if True True + pass + +pass \ No newline at end of file diff --git a/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon2.py b/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon2.py new file mode 100644 index 00000000..24876f2e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon2.py @@ -0,0 +1,2 @@ +# Error recovery should consume to colon inclusive +if True True: pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon3.py b/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon3.py new file mode 100644 index 00000000..629172cb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/suiteExpectedColon3.py @@ -0,0 +1,2 @@ +# Error recovery should consume the whole line +if True pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super1.py b/python-parser/packages/pyright-internal/src/tests/samples/super1.py new file mode 100644 index 00000000..d1513b09 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super1.py @@ -0,0 +1,97 @@ +# This sample tests the type analyzer's handling of the super() call. + + +from typing import Generic, NamedTuple, TypeVar + +T = TypeVar("T") + + +class ClassA: + @staticmethod + def method1(): + pass + + def method5(self) -> type: + return ClassA + + +class ClassB(ClassA): + def __init__(self): + pass + + def method2(self): + pass + + +class ClassC(ClassA): + def __init__(self): + pass + + def method3(self): + return self.__class__() + + @staticmethod + def aaa(): + # This should generate an error because the zero-arg form + # of super is illegal in a static method. + super().method1() + + +class ClassD(ClassB, ClassC): + def __init__(self): + super().method2() + super().method3() + + # This should generate an error + super().non_method1() + + def method(self): + def inner(): + super().method1() + + +super(ClassD) + +# This should generate an error +super(ClassD).non_method2() + + +super(ClassB, ClassD).method1() + +# This should generate an error because Foo2 +# is not a subclass of Foo1. +super(ClassB, ClassC).method1() + +v1 = ClassD() +super(ClassB, v1).method1() + +v2 = ClassC() +# This should generate an error because Foo2 +# is not a subclass of Foo1. +super(ClassB, v2).method1() + + +class ClassE(ClassA): + def method5(self): + class ClassDInner(super().method5()): + # This should generate an error. + x = super().method5() + + return ClassDInner + + +class ClassF(Generic[T]): + def __init__(self, val: T): + pass + + +class ClassG(ClassF[T]): + def __init__(self, val: T) -> None: + super().__init__(val) + + +class ClassH(NamedTuple("NT1", [("y", int), ("x", int)])): + def method(self, v: tuple[int, int]): + cls = type(self) + v = super().__new__(cls, *v) + return type(self)(self.y + v.y, self.x + v.x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super10.py b/python-parser/packages/pyright-internal/src/tests/samples/super10.py new file mode 100644 index 00000000..967908e2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super10.py @@ -0,0 +1,21 @@ +# This sample tests that super() calls use Self for binding. + + +class A: + def clone(self): + return self + + +class B(A): + def clone(self): + return super().clone() + + +class C(B): + def clone(self): + return super().clone() + + +reveal_type(A().clone(), expected_text="A") +reveal_type(B().clone(), expected_text="B") +reveal_type(C().clone(), expected_text="C") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super11.py b/python-parser/packages/pyright-internal/src/tests/samples/super11.py new file mode 100644 index 00000000..ae3b4c2a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super11.py @@ -0,0 +1,40 @@ +# This sample tests the case where a protocol class is used within a mixin +# class method that calls super(). + +from typing import Protocol, overload + + +class MixinProt(Protocol): + def method1(self) -> int: + """This is not implemented""" + raise NotImplementedError + + def method2(self) -> int: + return 1 + + @overload + def method3(self, x: int) -> int: ... + + @overload + def method3(self, x: str) -> str: ... + + @overload + def method4(self, x: int) -> int: ... + + @overload + def method4(self, x: str) -> str: ... + + def method4(self, x: int | str) -> int | str: + return "" + + +class MyMixin: + def get(self: MixinProt) -> None: + m1 = super().method1() + reveal_type(m1, expected_text="int") + + m2 = super().method2() + + m3 = super().method3(1) + + m4 = super().method4(2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super12.py b/python-parser/packages/pyright-internal/src/tests/samples/super12.py new file mode 100644 index 00000000..a4d20d9a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super12.py @@ -0,0 +1,15 @@ +# This sample tests the case where a class derives from a protocol +# and calls through super() to the protocol class to a method that is +# not implemented. + +from typing import Protocol + + +class BaseProto(Protocol): + def method1(self) -> None: ... + + +class ProtoImpl(BaseProto): + def method1(self) -> None: + # This should generate an error. + return super().method1() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super13.py b/python-parser/packages/pyright-internal/src/tests/samples/super13.py new file mode 100644 index 00000000..3b624b52 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super13.py @@ -0,0 +1,12 @@ +# This sample tests the use of `super` outside of a method. + + +def func1(t: type) -> super: + return super(t, t) + + +class ClassA: + pass + + +func1(ClassA) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super2.py b/python-parser/packages/pyright-internal/src/tests/samples/super2.py new file mode 100644 index 00000000..50f11b8b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super2.py @@ -0,0 +1,100 @@ +# This sample tests the handling of the "super" call when +# used with a two-argument form that specifies the "bind to" type. + +from typing import TypeVar + +T = TypeVar("T", bound="A") + + +class A: + def __init__(self, **kw: object) -> None: + pass + + @classmethod + def factoryA(cls: type[T]) -> T: + return cls() + + @classmethod + def get(cls: type[T], key: str) -> T: + return cls() + + +class B(A): + @classmethod + def factoryB(cls): + return super(B, cls).factoryA() + + @classmethod + def get(cls, key: str = ""): + return super(B, cls).get(key) + + +class BChild(B): + pass + + +a1 = A.factoryA() +reveal_type(a1, expected_text="A") + +b1 = B.factoryA() +reveal_type(b1, expected_text="B") + +b2 = B.factoryB() +reveal_type(b2, expected_text="B") + +g1 = B.get() +reveal_type(g1, expected_text="B") + +g2 = BChild.get() +reveal_type(g2, expected_text="BChild") + + +def test_a(cls: type[T]) -> T: + return super(A, cls).__new__(cls) + + +class C: + def __init__(self) -> None: ... + + +class CChild(C): + def __init__(self, name: str) -> None: ... + + +class D: + def __init__(self, name: str, num: int): ... + + +class DChild1(CChild, D): + def __init__(self, name: str, num: int) -> None: + super(C, self).__init__(name, num) + + +class DChild2(CChild, D): + def __init__(self, name: str) -> None: + super(DChild2, self).__init__(name) + + +class DChild3(CChild, D): + def __init__(self) -> None: + super(CChild, self).__init__() + + +d1 = DChild1("", 1) +d2 = DChild2("") +d3 = DChild3() + + +class E: + def __new__(cls) -> "E": + return super(type, cls).__new__(cls) + + +class F: ... + + +class FChild1(F): ... + + +def func1(cls: type[F | FChild1]): + super(F, cls) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super3.py b/python-parser/packages/pyright-internal/src/tests/samples/super3.py new file mode 100644 index 00000000..be9539a7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super3.py @@ -0,0 +1,10 @@ +# This sample tests the case where super() is used for a class +# whose base classes are of unknown types. + +from some_module import ClassUnknown # type: ignore + + +class Class1(ClassUnknown): + def __init__(self, x: int): + # This should not generate an error. + super(Class1, self).__init__(x, 1, 2, 3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super4.py b/python-parser/packages/pyright-internal/src/tests/samples/super4.py new file mode 100644 index 00000000..f3a7eb58 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super4.py @@ -0,0 +1,37 @@ +# This sample tests the handling of super() with no parameters +# and a base class with an annotated cls or self parameter that +# relies on the subclass being passed as a parameter. + +from typing import Generic, TypeVar + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2", bound="Parent2") + + +class Parent1(Generic[_T1]): + @classmethod + def construct(cls: type[_T1]) -> _T1: + return cls() + + +class Child1(Parent1["Child1"]): + @classmethod + def construct(cls) -> "Child1": + return super().construct() + + +reveal_type(Child1.construct(), expected_text="Child1") + + +class Parent2: + @classmethod + def construct(cls: type[_T2]) -> _T2: ... + + +class Child2(Parent2): + @classmethod + def construct(cls: type[_T2]) -> _T2: + return super().construct() + + +reveal_type(Child2.construct(), expected_text="Child2") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super5.py b/python-parser/packages/pyright-internal/src/tests/samples/super5.py new file mode 100644 index 00000000..d57526cf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super5.py @@ -0,0 +1,28 @@ +# This sample tests the case where super() is called from a class or +# instance method where the cls or self parameter is explicitly +# annotated. + +from typing import TypeVar, Type + +A_T = TypeVar("A_T", bound="A") + + +class A: + @classmethod + def construct(cls: Type[A_T]) -> A_T: + return cls() + + def construct2(self: A_T) -> A_T: + return type(self)() + + +B_T = TypeVar("B_T", bound="B") + + +class B(A): + @classmethod + def construct(cls: Type[B_T]) -> B_T: + return super().construct() + + def construct2(self: B_T) -> B_T: + return super().construct2() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super6.py b/python-parser/packages/pyright-internal/src/tests/samples/super6.py new file mode 100644 index 00000000..aedafcd6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super6.py @@ -0,0 +1,38 @@ +# This sample tests the case where super().__new__(cls) is called +# and there is an inferred return type based on the cls type. + +from typing import NamedTuple + +FooBase = NamedTuple("FooBase", [("x", int)]) + + +class Foo(FooBase): + def __new__(cls): + obj = super().__new__(cls, x=1) + reveal_type(obj, expected_text="Self@Foo") + return obj + + +f = Foo() +reveal_type(f, expected_text="Foo") + + +class FirstLevelMeta(type): + def __new__(cls, name: str, bases, dct): + new_class = super().__new__(cls, name, bases, dct) + reveal_type(new_class, expected_text="Self@FirstLevelMeta") + return new_class + + +class SecondLevelMeta(FirstLevelMeta): + def __new__(cls, name: str, bases, dct): + new_class = super().__new__(cls, name, bases, dct) + reveal_type(new_class, expected_text="Self@SecondLevelMeta") + return new_class + + +class ThirdLevelMeta(SecondLevelMeta): + def __new__(cls, name: str, bases, dct): + new_class = super().__new__(cls, name, bases, dct) + reveal_type(new_class, expected_text="Self@ThirdLevelMeta") + return new_class diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super7.py b/python-parser/packages/pyright-internal/src/tests/samples/super7.py new file mode 100644 index 00000000..34cae1bf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super7.py @@ -0,0 +1,87 @@ +# This sample tests the use of super() with two arguments where the second +# argument is an instance. + +from typing import Generic, TypeVar + + +T = TypeVar("T") + + +class BaseClass: + def my_method(self, value: int) -> int: ... + + +class SubClass(BaseClass): + def method_plain_super(self, value: int) -> int: + reveal_type(super(), expected_text="BaseClass") + return super().my_method(value) + + def method_super(self, value: int) -> int: + reveal_type(super(__class__, self), expected_text="BaseClass") + return super(__class__, self).my_method(value) + + def method_super_extra_arg(self, value: int) -> int: + reveal_type(super(__class__, self), expected_text="BaseClass") + + # This should generate an error because the method is already bound. + return super(__class__, self).my_method(self, value) + + @classmethod + def classmethod_super(cls, value: int) -> int: + self = cls() + reveal_type(super(__class__, self), expected_text="BaseClass") + return super(__class__, self).my_method(value) + + @classmethod + def classmethod_super_extra_arg(cls, value: int) -> int: + self = cls() + reveal_type(super(__class__, self), expected_text="BaseClass") + + # This should generate an error. + return super(__class__, self).my_method(self, value) + + @staticmethod + def staticmethod_super(value: int) -> int: + self = SubClass() + reveal_type(super(__class__, self), expected_text="BaseClass") + + return super(__class__, self).my_method(value) + + @staticmethod + def staticmethod_super_extra_arg(value: int) -> int: + self = SubClass() + reveal_type(super(__class__, self), expected_text="BaseClass") + + # This should generate an error. + return super(__class__, self).my_method(self, value) + + +class A(Generic[T]): ... + + +class B(Generic[T]): ... + + +class C(A[int], B[T]): + pass + + +c = C[str]() +super_obj_c = super(C, c) +reveal_type(super_obj_c, expected_text="A[int]") + +super_obj_a = super(A, c) +reveal_type(super_obj_a, expected_text="B[str]") + +super_obj_b = super(B, c) +reveal_type(super_obj_b, expected_text="object") + + +super_cls_c = super(C, C) +reveal_type(super_cls_c, expected_text="A[int]") + +super_cls_a = super(A, C) +reveal_type(super_cls_a, expected_text="B[Unknown]") + +super_cls_b = super(B, C) +reveal_type(super_cls_b, expected_text="object") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super8.py b/python-parser/packages/pyright-internal/src/tests/samples/super8.py new file mode 100644 index 00000000..a214f543 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super8.py @@ -0,0 +1,7 @@ +# This sample tests the case where super() is used within a metaclass +# __init__ method. + + +class Metaclass(type): + def __init__(self, name, bases, attrs): + super().__init__(name, bases, attrs) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/super9.py b/python-parser/packages/pyright-internal/src/tests/samples/super9.py new file mode 100644 index 00000000..9b21ff73 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/super9.py @@ -0,0 +1,34 @@ +# This sample tests the case where a super() call is used with a class +# whose constructor uses a default argument value with a parameter +# whose type is a specialized TypeVar. + +from typing import Generic, TypeVar + +_T = TypeVar("_T") + + +class Foo(Generic[_T]): + def __init__(self, x: _T = 1) -> None: ... + + +class Bar(Foo[int]): ... + + +class Baz(Bar): + def __init__(self) -> None: + super().__init__() + + +class Baz2(Bar): + def __init__(self) -> None: + super().__init__(x=1) + + +class Bar2(Foo[int]): + def __init__(self) -> None: + super().__init__() + + +class Bar3(Foo[int]): + def __init__(self) -> None: + super().__init__(x=1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/test_file1.py b/python-parser/packages/pyright-internal/src/tests/samples/test_file1.py new file mode 100644 index 00000000..e69de29b diff --git a/python-parser/packages/pyright-internal/src/tests/samples/totalOrdering1.py b/python-parser/packages/pyright-internal/src/tests/samples/totalOrdering1.py new file mode 100644 index 00000000..7b9f6489 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/totalOrdering1.py @@ -0,0 +1,72 @@ +# This sample tests the support for functools.total_ordering. + +from functools import total_ordering + + +@total_ordering +class ClassA: + val1: int + + def __gt__(self, other: object) -> bool: ... + + +a = ClassA() +b = ClassA() +v1 = a < b +v2 = a <= b +v3 = a > b +v4 = a >= b +v5 = a == b +v6 = a != b + + +# This should generate an error because it doesn't declare +# any of the required ordering functions. +@total_ordering +class ClassB: + val1: int + + +@total_ordering +class ClassC: + def __eq__(self, other: object) -> bool: + return False + + def __lt__(self, other: "ClassC") -> bool: + return False + + +reveal_type(ClassC() < ClassC(), expected_text="bool") +reveal_type(ClassC() <= ClassC(), expected_text="bool") +reveal_type(ClassC() == ClassC(), expected_text="bool") +reveal_type(ClassC() > ClassC(), expected_text="bool") +reveal_type(ClassC() >= ClassC(), expected_text="bool") + +_ = ClassC() == 1 +_ = ClassC() != 1 + +# The following four lines should each produce an error. +_ = ClassC() < 1 +_ = ClassC() <= 1 +_ = ClassC() > 1 +_ = ClassC() >= 1 + + +@total_ordering +class ClassD: + def __init__(self) -> None: + self.value: int = 0 + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ClassD): + return NotImplemented + + reveal_type(other, expected_text="ClassD") + + return self.value == other.value + + def __le__(self, other: object) -> bool: + if not isinstance(other, ClassD): + return NotImplemented + + return self.value <= other.value diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept1.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept1.py new file mode 100644 index 00000000..3f6a27fb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept1.py @@ -0,0 +1,79 @@ +# This sample tests the name binder's handling of +# try/except/raise statements + + +from typing import TypeVar + + +def func1(): + try: + pass + except: + raise + + raise + + +def func2(x, y) -> bool: + try: + z = x / y + except (RuntimeError, NameError) as e: + reveal_type(e, expected_text="RuntimeError | NameError") + return False + except Exception as e: + reveal_type(e, expected_text="Exception") + return False + except: + raise Exception() + else: + return True + + # This should not generate an error + # because this code is unreachable. + return "hello" + + +def func3(): + # This should generate an error because there is no + # except or finally clause. + try: + pass + +class Exception1(BaseException): ... + +base_exceptions = (RuntimeError, NameError) + +class Exception2(*base_exceptions): ... + +def func4(): + try: + pass + except Exception1: + pass + except Exception2: + pass + + +def func5(): + try: + return 1 + # This should generate an error. + except int: + pass + # This should generate an error. + except (NotImplementedError, str): + pass + # This should generate an error. + except [Exception, ValueError]: + pass + except BaseException: + pass + + +T = TypeVar("T", bound=BaseException) + +def func6(*errors: type[T]): + try: + return 1 + except errors as e: + return e diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept10.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept10.py new file mode 100644 index 00000000..15b474a4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept10.py @@ -0,0 +1,13 @@ +# This sample tests that type errors within a finally clause are +# property detected. + + +def func1() -> None: + file = None + try: + raise ValueError() + except Exception: + return None + finally: + # This should generate an error. + file.name diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept11.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept11.py new file mode 100644 index 00000000..35f9bdc9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept11.py @@ -0,0 +1,22 @@ +# This sample tests the case where a variable is initialized before +# a try/except and is referenced within the finally clause. This ensures +# that the "finally gate" logic is reentrant. + + +def func1(): + func2() + + +def func2(): + a = A() + + try: + with open("path"): + return + finally: + a.method1() + + +class A: + def method1(self): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept12.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept12.py new file mode 100644 index 00000000..01a8bdc2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept12.py @@ -0,0 +1,16 @@ +# This sample tests that multi-exception lists are parsed correctly +# based on PEP 758 in Python 3.14. + +def func1(): + try: + pass + # This should generate an error for Python 3.13 or earlier. + except ZeroDivisionError, TypeError: + raise + +def func2(): + try: + pass + # This should generate an error because an "as" clause always requires parens. + except ZeroDivisionError, TypeError as e: + raise diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept2.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept2.py new file mode 100644 index 00000000..6beebf81 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept2.py @@ -0,0 +1,16 @@ +# This sample tests basic handling of nested finally clauses. + + +def func1(i: int) -> None: + pass + + +def func2(): + aaa = 3 + try: + try: + return + finally: + pass + finally: + func1(aaa) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept3.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept3.py new file mode 100644 index 00000000..d265387f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept3.py @@ -0,0 +1,10 @@ +# This sample verifies that the exception type validation +# handles the case where the exception type is a Type[X] object. + +exc: type[Exception] = Exception + + +try: + v = 1 / 0 +except exc: + print("exc") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept4.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept4.py new file mode 100644 index 00000000..5e2246d0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept4.py @@ -0,0 +1,49 @@ +# This sample validates that the exception type provided +# within a raise statement is valid. + +from random import random + + +a: bool = True if random() > 0.5 else False + + +class CustomException1(BaseException): + def __init__(self, code: int): + pass + + +# This should generate an error because CustomException1 +# requires an argument to instantiate. +if a or 2 > 1: + raise CustomException1 + +if a or 2 > 1: + raise CustomException1(3) + + +class CustomException2: + pass + + +# This should generate an error because +# the exception doesn't derive from BaseException. +if a or 2 > 1: + raise CustomException2 + + +def func1(x1: type[BaseException], x2: type[BaseException]): + if 2 > 1: + raise x1 from None + + if 2 > 1: + raise x1 from x2 + + if 2 > 1: + # This should generate an error because the exception + # type doesn't derive from BaseException. + raise 1 from x2 + + if 2 > 1: + # This should generate an error because the exception + # type doesn't derive from BaseException. + raise ValueError from 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept5.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept5.py new file mode 100644 index 00000000..d8b12fcd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept5.py @@ -0,0 +1,30 @@ +# This sample tests a try statement with no except clauses +# but a finally clause. + +from typing import Any +import asyncio + + +class MyJob: + async def do_stuff(self): + try: + while True: + await asyncio.sleep(1) + my_var = 3 + finally: + # This should generate an error because + # my_var may be unbound at this point. + print(my_var) + self.cleanup() + + def cleanup(self): + pass + + +async def main(): + c = asyncio.create_task(MyJob().do_stuff()) + await asyncio.sleep(5) + c.cancel() + + +asyncio.run(main()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept6.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept6.py new file mode 100644 index 00000000..30b91dcb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept6.py @@ -0,0 +1,27 @@ +# This sample tests for the proper detection of +# an unbound variable within a finally statement +# in cases where a "bare" exception clause is used +# and not used. + + +def func1(): + try: + _ = "text".index("a") + except: + var = 1 + else: + var = 2 + finally: + print(var) + + +def func2(): + try: + _ = "text".index("a") + except NameError: + var = 1 + else: + var = 2 + finally: + # This should generate a "possibly unbound" error. + print(var) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept7.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept7.py new file mode 100644 index 00000000..7cf6b0b1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept7.py @@ -0,0 +1,17 @@ +# This sample tests the syntax handling for Python 3.11 exception groups +# as described in PEP 654. + + +def func1(): + + try: + pass + + # This should generate an error if using Python 3.10 or earlier. + except* ValueError as e: + reveal_type(e, expected_text="BaseExceptionGroup[ValueError]") + pass + + # This should generate an error if using Python 3.10 or earlier. + except*: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept8.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept8.py new file mode 100644 index 00000000..6ad77b0a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept8.py @@ -0,0 +1,71 @@ +# This sample tests the detection of inaccessible exception handlers. + + +from typing import Union + + +def func1() -> None: + pass + + +def func2(): + try: + func1() + except OSError: + pass + except Exception: + pass + except (): + pass + # This should generate an error. + except PermissionError: + pass + + +def func3(): + try: + func1() + except OSError: + pass + # This should generate an error. + except (PermissionError, ProcessLookupError): + pass + # This should generate an error. + except (PermissionError, ConnectionAbortedError): + pass + + +def func4(): + try: + func1() + except OSError: + pass + except (UnboundLocalError, ConnectionAbortedError): + pass + + +def func5(): + try: + func1() + except OSError: + pass + except: + pass + + +def func6(u: Union[type[Exception], tuple[type[Exception], ...]]): + try: + ... + except ValueError as e: + ... + except u as e: + ... + + +def func7(u: type[Exception]): + try: + ... + except ValueError as e: + ... + except u as e: + ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tryExcept9.py b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept9.py new file mode 100644 index 00000000..d93a4406 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tryExcept9.py @@ -0,0 +1,42 @@ +# This sample tests the case where a finally clause contains some conditional +# logic that narrows the type of an expression. This narrowed type should +# persist after the finally clause. + + +def func1(): + file = None + try: + file = open("test.txt") + except Exception: + return None + finally: + if file: + file.close() + + # This should evaluate to "TextIOWrapper", but the current + # logic is not able to evaluate different types for file + # based on whether it's an exception or non-exception case. + reveal_type(file, expected_text="TextIOWrapper[_WrappedBuffer] | None") + + +def func2(): + file = None + try: + file = open("test.txt") + except Exception: + pass + finally: + if file: + file.close() + + reveal_type(file, expected_text="TextIOWrapper[_WrappedBuffer] | None") + + +def func3(): + file = None + try: + file = open("test.txt") + finally: + pass + + reveal_type(file, expected_text="TextIOWrapper[_WrappedBuffer]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tstring1.py b/python-parser/packages/pyright-internal/src/tests/samples/tstring1.py new file mode 100644 index 00000000..b126b840 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tstring1.py @@ -0,0 +1,22 @@ +# This sample tests error handling for template strings. + + +# This should generate an error if using Python 3.13 or earlier. +t1 = t'Hello {"World"}' + +t2 = tr"""Test""" +t3 = rt"Test\n" + + +# This should generate two errors because tf is not a valid string type. +t4 = tf"{1}" + +# This should generate two errors because ft is not a valid string type. +t5 = ft"{1}" + +# This should generate two errors because tu is not a valid string type. +t6 = tu"{1}" + +# This should generate two errors because ut is not a valid string type. +t7 = ut"{1}" + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tstring2.py b/python-parser/packages/pyright-internal/src/tests/samples/tstring2.py new file mode 100644 index 00000000..0cec4f07 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tstring2.py @@ -0,0 +1,20 @@ +# This sample tests basic template string type functionality. + + +# This should result in an error because x is not defined. +t1 = t"Hello {x=}" + +age = 30 +t2 = t'''Age = {age}''' +reveal_type(t2, expected_text="Template") + +t3 = Tr"" +reveal_type(t3, expected_text="Template") + +t4 = "" tR"" T"" r"" RT"""{age}""" """x""" +reveal_type(t4, expected_text="Template") + +t4.strings +t4.interpolations +t4.values + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple1.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple1.py new file mode 100644 index 00000000..46b2cc9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple1.py @@ -0,0 +1,295 @@ +# This sample file tests various aspects of type analysis for tuples. + +import os +from typing import Any, Callable, Never + +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +Ts = TypeVarTuple("Ts") + + +def func1() -> tuple[int, int, int]: + a = 1, 2, 3 + + # This should generate an error because + # of a tuple size mismatch. + b, c = a + + b, c, d = a + + # This should generate an error because + # of a tuple size mismatch. + ( + b, + c, + d, + e, + ) = a + + return a + + +def func2() -> tuple[int, int, str]: + a = 1, 2, 3 + + # This should generate an error because the + # item types don't match. + return a + + +def func3() -> tuple[str, ...]: + a = "1", 2, 3 + + # This should generate an error because the + # heterogeneous tuple can't be assigned to + # the homogeneous tuple type. + return a + + +def func4() -> tuple[str, ...]: + a = (1,) + + # This should generate an error because the first + # item in the tuple isn't a string. + return a + + +def func6(): + a = 1, 2, "hello" + a.index("1") + + +def func7(a: tuple) -> tuple[()]: + return () + + +def func7_1(a: tuple): + a.index("1") + + +# Test the tuple specialization code. This +# should generate no error because split should +# be specialized to return a tuple of str values. +def func8() -> str: + dirname, fname = os.path.split("dir/file") + return dirname + + +def func9(param1: tuple[int, ...]): + pass + + +def func10() -> tuple[int, ...]: + return ( + 3, + 4, + 5, + ) + + +func9(func10()) +func9((2, 3, 4)) +func9((2,)) + + +# Tests for tuple assignments with unpack expressions. +def func10_1() -> int: + a = (3, 4, 5) + + c, *d = a + if c: + # This should generate an error because + # d should be an iterable type, not compatible + # with the declared return type. + return d + + # This should generate an error because + # there are not enough elements to populate + # the variable h. + e, f, g, h, *i = a + + return e + + +# Tests for tuple assignments with unpack expressions. +def func11() -> float: + b = ("hello", 3, 6.7) + + c, *d = b + if c: + # This should generate an error because + # d should be an iterable type, not compatible + # with the declared return type. + return d + + return 3 + + +# Tests for assignment of tuple list that includes star +# operator both with and without type annotations. +def func12(): + data = ["a", "b"] + data1 = (*map(str.split, data),) + data2: tuple[list[str], ...] = (*map(str.split, data),) + data3 = (*map(str.split, data),) + data4: tuple[list[str], ...] = (*map(str.split, data),) + + +# Tests for index-out-of-range error. +def func13( + a: tuple[int, str], + b: tuple[()], + c: tuple[int, ...], + d: tuple[int] | tuple[str, str] | tuple[int, ...], + e: tuple[int, Unpack[tuple[str, ...]], float], + f: tuple[int, Unpack[Ts], float], + g: tuple[Unpack[Ts]], +): + v1 = a[0] + reveal_type(v1, expected_text="int") + + v2 = a[1] + reveal_type(v2, expected_text="str") + + # This should generate an error. + v3 = a[2] + + # This should generate an error. + v4 = b[0] + + v5 = c[100] + reveal_type(v5, expected_text="int") + + v6 = a[-2] + reveal_type(v6, expected_text="int") + + v7 = a[-1] + reveal_type(v7, expected_text="str") + + # This should generate an error. + v8 = a[-3] + reveal_type(v8, expected_text="int | str") + + v9 = c[-100] + reveal_type(v9, expected_text="int") + + v10 = d[0] + + # This should generate an error. + v11 = d[1] + + # This should generate two errors. + v12 = d[2] + + v13: tuple[()] = () + # This should generate an error. + v13[0] + + v14 = e[0] + reveal_type(v14, expected_text="int") + + v15 = e[1] + reveal_type(v15, expected_text="int | str | float") + + v16 = f[0] + reveal_type(v16, expected_text="int") + + v17 = f[1] + reveal_type(v17, expected_text="int | Union[*Ts@func13] | float") + + v18 = f[-1] + reveal_type(v18, expected_text="float") + + +def func14(): + list1 = [1, 2, 3] + v1 = tuple(list1) + reveal_type(v1, expected_text="tuple[int, ...]") + + +def func15(var: tuple[()]) -> str: + raise NotImplementedError + + +def func16(var: tuple[int, int]) -> str: + raise NotImplementedError + + +def func17(var: tuple[int, ...]) -> str: + raise NotImplementedError + + +f1: Callable[[tuple[int, ...]], str] + +# This should generate an error. +f1 = func15 + +# This should generate an error. +f1 = func16 + +f1 = func17 + + +def func18(a: tuple[int, *tuple[Any, ...], str], b: tuple[Any, ...]): + a1: tuple[int, str] = a + a2: tuple[int, int, str] = a + a3: tuple[int, int, str, str] = a + a4: tuple[int, *tuple[int, ...], str] = a + + # This should generate an error. + a5: tuple[str, int, str, str] = a + + # This should generate an error. + a6: tuple[int, int, str, int] = a + + b1: tuple[()] = b + b2: tuple[int, int, str] = b + b3: tuple[int, *tuple[int, ...], str] = b + + +def func19(a: tuple[int, ...], b: tuple[int, *tuple[int, ...]]): + a1: tuple[*tuple[int, ...]] = a + + # This should generate an error. + a2: tuple[int, *tuple[int, ...]] = a + + # This should generate an error. + a3: tuple[int, *tuple[int, ...], int] = a + + # This should generate an error. + a4: tuple[*tuple[int, ...], int] = a + + b1: tuple[int, ...] = b + b2: tuple[int, *tuple[int, ...]] = b + b3: tuple[*tuple[int, ...], int] = b + + # This should generate an error. + b4: tuple[str, *tuple[int, ...]] = b + + # This should generate an error. + b5: tuple[int, int, *tuple[int, ...]] = b + + +def func20(v: tuple[Never]): + # This should generate an error. + x1: tuple[Never] = (1,) + + # This should generate an error. + x2: tuple[Never] = () + + x3: tuple[Never] = v + + +def func21(x: tuple[Any, ...], *args: *Ts) -> tuple[*Ts]: + args = x + return args + + +def func22(x: tuple[*tuple[int, ...], float, str]): + reveal_type(x[0], expected_text="int | float | str") + reveal_type(x[-1], expected_text="str") + reveal_type(x[-2], expected_text="float") + reveal_type(x[-3], expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple10.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple10.py new file mode 100644 index 00000000..fe2a4234 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple10.py @@ -0,0 +1,30 @@ +# This sample tests that inferred types for tuples strip +# literals under the appropriate circumstances. + + +from typing import Literal + + +a1 = (1, 2) +reveal_type(a1, expected_text="tuple[Literal[1], Literal[2]]") + +a2 = list((1, 2)) +reveal_type(a2, expected_text="list[Literal[1, 2]]") + +a3: list[Literal[1]] = list((1,)) +reveal_type(a3, expected_text="list[Literal[1]]") + + +def func1(v1: tuple[Literal[1], ...], v2: tuple[Literal[1]]): + a4 = set(v1) + reveal_type(a4, expected_text="set[Literal[1]]") + + a5 = set(v2) + reveal_type(a5, expected_text="set[Literal[1]]") + + +a6 = (1, "hi") +reveal_type(a6, expected_text="tuple[Literal[1], Literal['hi']]") + +v4 = set(a6) +reveal_type(v4, expected_text="set[Literal[1, 'hi']]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple11.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple11.py new file mode 100644 index 00000000..1fb01643 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple11.py @@ -0,0 +1,27 @@ +# This sample tests the handling of magic methods on +# the tuple class. + +# pyright: strict + + +def func1(t1: tuple[int, ...], t2: tuple[int, ...]) -> bool: + return t1 >= t2 + + +def func2(t1: tuple[int, ...], t2: tuple[str, int]) -> bool: + return t1 < t2 + + +def func3(t1: tuple[int, int], t2: tuple[int, ...]) -> bool: + return t1 > t2 + + +def func4(t1: tuple[int, ...], t2: tuple[str, ...]) -> bool: + # This should generate an error + return t1 <= t2 # pyright: ignore[reportUnknownVariableType] + + +def func5(t1: tuple[str | int, ...]) -> tuple[str | int, ...]: + while len(t1) < 4: + t1 = t1 + (0,) + return t1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple12.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple12.py new file mode 100644 index 00000000..e87eacd0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple12.py @@ -0,0 +1,31 @@ +# This sample tests type inference for tuples that contain unpack +# operators. + + +def func1(a: int, *args: int): + v1 = (a, *args) + reveal_type(v1, expected_text="tuple[int, *tuple[int, ...]]") + + +def func2(a: int, *args: str): + v1 = (a, *args) + reveal_type(v1, expected_text="tuple[int, *tuple[str, ...]]") + + +def func3(a: int, b: str, *args: str): + v1 = (a, b, *(a, b, a), *args, a, *args, b, *(a, b, a)) + reveal_type( + v1, expected_text="tuple[int, str, int, str, int, *tuple[str | int, ...]]" + ) + + +def func4(a: int, b: str, *args: str): + v1 = (b, *args, *(b, a)) + reveal_type(v1, expected_text="tuple[str, *tuple[str, ...], str, int]") + + +def func5(): + a = 3.4 + b = [1, 2, 3] + v1 = (a, *b) + reveal_type(v1, expected_text="tuple[float, *tuple[int, ...]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple13.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple13.py new file mode 100644 index 00000000..73adfc0d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple13.py @@ -0,0 +1,99 @@ +# This sample tests indexing of tuples with slice expressions. + + +from typing import TypeVarTuple + + +def func1(val1: tuple[int, str, None], val2: tuple[int, ...]): + x1 = val1[:2] + reveal_type(x1, expected_text="tuple[int, str]") + + x2 = val1[-3:2] + reveal_type(x2, expected_text="tuple[int, str]") + + x3 = val1[1:] + reveal_type(x3, expected_text="tuple[str, None]") + + x4 = val1[1:-1] + reveal_type(x4, expected_text="tuple[str]") + + x5 = val1[:-2] + reveal_type(x5, expected_text="tuple[int]") + + x6 = val1[0:100] + reveal_type(x6, expected_text="tuple[int, str, None]") + + x7 = val2[:2] + reveal_type(x7, expected_text="tuple[int, ...]") + + x8 = val1[1:3] + reveal_type(x8, expected_text="tuple[str, None]") + + +def func2(val1: tuple[str, *tuple[int, ...], None]): + x1 = val1[:2] + reveal_type(x1, expected_text="tuple[str | int | None, ...]") + + x2 = val1[:1] + reveal_type(x2, expected_text="tuple[str]") + + x3 = val1[1:] + reveal_type(x3, expected_text="tuple[*tuple[int, ...], None]") + + x4 = val1[1:2] + reveal_type(x4, expected_text="tuple[str | int | None, ...]") + + x5 = val1[1:-1] + reveal_type(x5, expected_text="tuple[int, ...]") + + x6 = val1[:-1] + reveal_type(x6, expected_text="tuple[str, *tuple[int, ...]]") + + x7 = val1[:] + reveal_type(x7, expected_text="tuple[str, *tuple[int, ...], None]") + + x8 = val1[2:0] + reveal_type(x8, expected_text="tuple[str | int | None, ...]") + + +Ts = TypeVarTuple("Ts") + + +def func3(val1: tuple[str, *Ts, None]): + x1 = val1[:2] + reveal_type(x1, expected_text="tuple[str | Union[*Ts@func3] | None, ...]") + + x2 = val1[:1] + reveal_type(x2, expected_text="tuple[str]") + + x3 = val1[1:] + reveal_type(x3, expected_text="tuple[*Ts@func3, None]") + + x4 = val1[1:2] + reveal_type(x4, expected_text="tuple[str | Union[*Ts@func3] | None, ...]") + + x5 = val1[1:-1] + reveal_type(x5, expected_text="tuple[*Ts@func3]") + + x6 = val1[:-1] + reveal_type(x6, expected_text="tuple[str, *Ts@func3]") + + x7 = val1[:] + reveal_type(x7, expected_text="tuple[str, *Ts@func3, None]") + + x8 = val1[2:0] + reveal_type(x8, expected_text="tuple[str | Union[*Ts@func3] | None, ...]") + + +def func4(val1: tuple[str, int]): + x1 = val1[2:] + reveal_type(x1, expected_text="tuple[()]") + + x2 = val1[-4:] + reveal_type(x2, expected_text="tuple[str, int]") + + x3 = val1[-4:-3] + reveal_type(x3, expected_text="tuple[()]") + + x4 = val1[:-3] + reveal_type(x4, expected_text="tuple[()]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple15.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple15.py new file mode 100644 index 00000000..446cf047 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple15.py @@ -0,0 +1,33 @@ +# This sample tests the special-case handling of the __add__ operator +# when two tuples of known types are added together. + +v1 = () + () +reveal_type(v1, expected_text="tuple[()]") + + +def func1(a: tuple[int, int, int], b: tuple[str, str]): + reveal_type(a + b, expected_text="tuple[int, int, int, str, str]") + + +def func2(a: tuple[int, int, int], b: tuple[str, ...]): + reveal_type(a + b, expected_text="tuple[int, int, int, *tuple[str, ...]]") + + +def func3(a: tuple[int, ...], b: tuple[str, ...]): + reveal_type(a + b, expected_text="tuple[int | str, ...]") + + +def func4(a: tuple[str, *tuple[int, ...]], b: tuple[str, int]): + reveal_type(a + b, expected_text="tuple[str, *tuple[int, ...], str, int]") + + +def func5(input_list): + output_tuple = () + + for _, value in enumerate([]): + if value is None: + output_tuple += (None,) + continue + output_tuple += (input_list[value],) + + return output_tuple diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple16.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple16.py new file mode 100644 index 00000000..68928289 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple16.py @@ -0,0 +1,11 @@ +# This sample tests the handling of bidirectional type inference +# for unions of tuples. + +# The following two unions are the same but declared in different orders. +TupleUnion1 = tuple[int, str] | tuple[int, str, dict[str, str | int]] +TupleUnion2 = tuple[int, str, dict[str, str | int]] | tuple[int, str] + +v1: TupleUnion1 = 1, "two", {"hey": "three"} +v2: TupleUnion2 = 1, "two", {"hey": "three"} +v3: TupleUnion1 = 1, "two" +v4: TupleUnion2 = 1, "two" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple17.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple17.py new file mode 100644 index 00000000..6aa75932 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple17.py @@ -0,0 +1,12 @@ +# This sample tests a limiter that prevents infinite recursion +# in the tuple inference logic. + + +def func1(val: int): + t = None + while True: + t = (val or t, val) + val += 1 + if val > 1000: + break + return t diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple18.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple18.py new file mode 100644 index 00000000..c92bc475 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple18.py @@ -0,0 +1,17 @@ +# This sample tests the case where the tuple constructor is called +# explicitly with bidirectional type inference. + +from typing import Any, Iterable + +# This should generate an error. +v1: tuple[float] = tuple([1.0, 2.0]) + +# This should generate an error. +v2: tuple[float] | tuple[float, float] = tuple([1.0, 2.0]) + +v3: tuple[float, ...] = tuple([1, 2]) + + +def f(x: Iterable[Any], y: Iterable): + a: tuple[int, int] = tuple(x) + b: tuple[int, int] = tuple(y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple19.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple19.py new file mode 100644 index 00000000..761c487a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple19.py @@ -0,0 +1,11 @@ +# This sample tests a case where the constraint solver generates a very +# "deep" tuple type. Previously, this caused a hang in the evaluator. + +from typing import Callable + + +def func1[T](c: Callable[[T], T]): ... + + +# This should generate an error, not hang. +func1(lambda v: (v, v)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple2.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple2.py new file mode 100644 index 00000000..e5e60ced --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple2.py @@ -0,0 +1,26 @@ +# This sample file tests various aspects of type analysis for tuples. + +no_args: tuple = () +zero_length: tuple[()] = () +all_ints1: tuple[int, ...] = () +all_ints2: tuple[int, ...] = (1,) +all_ints3: tuple[int, ...] = (1, 3, 4) + +all_ints1 = all_ints2 +all_ints2 = all_ints3 +all_ints3 = all_ints2 + +# This should generate an error. +bad_ellipsis1: tuple[...] + +# This should generate an error. +bad_ellipsis2: tuple[int, int, ...] + +# This should generate an error. +bad_ellipsis3: tuple[int, ..., int] + +# This should generate an error. +bad_ellipsis4: tuple[*tuple[int], ...] + +# This should generate an error. +bad_ellipsis5: tuple[*tuple[int, ...], ...] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple3.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple3.py new file mode 100644 index 00000000..ffaf610c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple3.py @@ -0,0 +1,37 @@ +# This sample tests the assignment of heterogeneous tuples +# to homogeneous tuple types. + +from typing import Callable + + +def func1(values: tuple[str, ...]): ... + + +# This should generate an error. +func1(("", False)) + +# This should generate an error. +func1((False, "")) + + +def func2(x: tuple[int]) -> None: ... + + +def func3(x: tuple[()]) -> None: ... + + +def func4(x: tuple[int, ...]) -> None: ... + + +c1: Callable[[tuple[int]], None] + +c1 = func2 +c1 = func3 # This should generate an error. +c1 = func4 + + +c2: Callable[[tuple[int, ...]], None] + +c2 = func2 # This should generate an error. +c2 = func3 # This should generate an error. +c2 = func4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple4.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple4.py new file mode 100644 index 00000000..af783ec3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple4.py @@ -0,0 +1,21 @@ +# This sample tests the translation of a heterogeneous tuple +# into an Iterable. + +from typing import Iterable, TypeVar + +_T = TypeVar("_T") + + +def foo(x: Iterable[_T]) -> Iterable[_T]: + return x + + +def bar(x: Iterable[int | str]): + pass + + +my_tuple = (3, "hello") + +# The type of my_iterable should be Iterable[Union[int, str]]. +my_iterable = foo(my_tuple) +bar(my_iterable) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple5.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple5.py new file mode 100644 index 00000000..8b2a4936 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple5.py @@ -0,0 +1,27 @@ +# This sample tests the type checker's handling of +# empty tuples and assignment to empty tuples. + +from typing import Sequence, TypeVar + + +T = TypeVar("T") + +a: tuple[()] = () + +# This should generate an error because the assigned +# tuple has one element, but the destination is +# expecting zero. +b: tuple[()] = (1,) + +# This should generate an error because the assigned +# tuple has zero elements, but the destination is +# expecting two. +c: tuple[int, str] = () + + +def test_seq(x: Sequence[T]) -> Sequence[T]: + return x + + +def func1(t1: tuple[()]): + reveal_type(test_seq(t1), expected_text="Sequence[Never]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple6.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple6.py new file mode 100644 index 00000000..361157ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple6.py @@ -0,0 +1,66 @@ +# This sample tests tuple parameter matching for +# cases where an unpack operator is used in the target. + +from typing import Iterable + +a: int +b: int +c: str +d: str +e: Iterable[int] +f: Iterable[str | int] + +# This should generate an error because an unpack +# operator must be within a tuple. +*e = 3, 4, 5, 6 + +(*e,) = 3, 4, 5, 6 + +a, b, *e, c, d = 3, 4, "a", "b" + +a, b, *f, c, d = 3, 4, 5, "a", "b", "c" + +*f, a, b, c, d = 3, 4, "a", "b" +a, *f, b, c, d = 3, 4, "a", "b" +a, b, *f, c, d = 3, 4, "a", "b" +a, b, c, *f, d = 3, 4, "a", "b" +a, b, c, d, *f = 3, 4, "a", "b" + +a, b, c, *f = 3, 2, "" + +# This should generate an error because there are +# not enough source values. +*f, a, b, c = 3, 2 +a, *f, b, c = 3, 2 +a, b, *f, c = 3, 2 +a, b, c, *f = 3, 2 + +# This should generate an error because there are +# too many source values. +a, b = 3, 2, 3 + +# This should generate an error because e can't +# accommodate both int and str types. +a, b, *e, c, d = 3, 4, 5, "a", "b", "c" + + +def func1(p1: tuple[str, ...]): + global a, b, c, d + + c, d = p1 + + # This should generate an error because + # p1 is an incompatible type. + a, b = p1 + + c, d, *f = p1 + + +def func2(p1: tuple[str, ...], p2: tuple[str, *tuple[str, ...]]): + () = p1 + (_,) = p1 + (_, _) = p1 + + # This should generate an error. + () = p2 + (_,) = p2 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple7.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple7.py new file mode 100644 index 00000000..09197779 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple7.py @@ -0,0 +1,59 @@ +# This sample tests handling of tuples and tracking +# of specific types within a tuple. + +from typing import Generic, TypeVar, Self + +_T = TypeVar("_T") + + +class ClassA(tuple[int, str, int, _T]): + def __new__(cls) -> Self: ... + + +objA = ClassA[complex]() + +(a, b, c, d) = objA + +aa1: int = a +bb1: str = b +cc1: int = c +dd1: complex = d + +reveal_type(objA[0], expected_text="int") +reveal_type(objA[1], expected_text="str") +reveal_type(objA[2], expected_text="int") +reveal_type(objA[3], expected_text="complex") + +# This should generate an error because the trailing +# comma turns the index value into a tuple. +e = objA[0,] + +for aaa in objA: + print(aaa) + + +class ClassB(tuple[_T, ...]): + def __new__(cls) -> Self: ... + + +objB = ClassB[complex]() + +(x, y, z) = objB + +reveal_type(x, expected_text="complex") +reveal_type(y, expected_text="complex") +reveal_type(z, expected_text="complex") + +xx2: complex = objB[0] +yy2: complex = objB[1] +zz2: complex = objB[2] + + +def func1(lst: list[str] | None) -> None: + for item in lst or (): + reveal_type(item, expected_text="str") + + +class X(Generic[_T]): + def __init__(self): + self._x: tuple[_T, ...] = () diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple8.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple8.py new file mode 100644 index 00000000..48acf2d0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple8.py @@ -0,0 +1,97 @@ +# This sample verifies that the "tuple" type is treated +# analogously to "Tuple" type. + +from typing import Iterable, TypeVar, Self + + +_T = TypeVar("_T") + + +class ClassA(tuple[int, str, int, _T]): + def __new__(cls) -> Self: ... + + +objA = ClassA[complex]() + +(a, b, c, d) = objA + +aa1: int = a +bb1: str = b +cc1: int = c +dd1: complex = d + +aa2: int = objA[0] +bb2: str = objA[1] +cc2: int = objA[2] +dd2: complex = objA[3] + +# These should generate errors because +# these are not the correct types. +aa3: str = a +bb3: complex = b +cc3: str = c +dd3: int = d + +for aaa in objA: + print(aaa) + + +class ClassB(tuple[_T, ...]): + def __new__(cls) -> Self: ... + + +objB = ClassB[complex]() + +(x, y, z) = objB + +xx1: complex = x +yy1: complex = y +zz1: complex = z + +xx2: complex = objB[0] +yy2: complex = objB[1] +zz2: complex = objB[2] + +# These should generate errors because +# these are not the correct types. +xx3: int = x +yy3: int = y +zz3: int = z + +TupleTypeAlias1 = tuple[str, int, float] + +t1_1: TupleTypeAlias1 = ("hi", 2, 3.4) + +# This should generate an error. +t1_2: TupleTypeAlias1 = ("hi", 2) + +# This should generate an error. +t1_3: TupleTypeAlias1 = ("hi", 2.3, 4) + +TupleTypeAlias2 = tuple[str, ...] + +t2_1: TupleTypeAlias2 = ("hi", "", "") +t2_2: TupleTypeAlias2 = () + +# This should generate an error. +t2_3: TupleTypeAlias2 = ("hi", 2) + +TupleTypeAlias3 = tuple[()] + +t3_1: TupleTypeAlias2 = () + +# This should generate an error. +t3_2: TupleTypeAlias2 = (3, 4) + + +T = TypeVar("T") + + +def baz(v: Iterable[T]) -> tuple[T]: ... + + +def qux() -> None: + foo = ["foo"] + quux = baz(foo) + for s in quux: + reveal_type(s, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tuple9.py b/python-parser/packages/pyright-internal/src/tests/samples/tuple9.py new file mode 100644 index 00000000..5b8b5a55 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tuple9.py @@ -0,0 +1,14 @@ +# This sample tests the special-case logic for the "tuple" +# constructor. Rather than generating type "tuple[T]" as +# would be expected from the constructor, we actually +# generate "tuple[T, ...]". + +# pyright: strict + +str_list = ["1", "2", "3"] +left, right = tuple(str_list) + +check1: tuple[str, str] = (left, right) + +# This should generate an error +check2: tuple[str, int] = (left, right) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack1.py b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack1.py new file mode 100644 index 00000000..c3b2727f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack1.py @@ -0,0 +1,75 @@ +# This sample tests the handling of Unpack[Tuple[...]] as described +# in PEP 646. + +from typing import Union +from typing_extensions import Unpack # pyright: ignore[reportMissingModuleSource] + + +def func1(v1: tuple[int, Unpack[tuple[bool, bool]], str]): + reveal_type(v1, expected_text="tuple[int, bool, bool, str]") + + +def func2(v2: tuple[int, Unpack[tuple[bool, bool]], str, Unpack[tuple[bool, bool]]]): + reveal_type(v2, expected_text="tuple[int, bool, bool, str, bool, bool]") + + +def func3(v3: tuple[int, Unpack[tuple[bool, ...]], str]): + reveal_type(v3, expected_text="tuple[int, *tuple[bool, ...], str]") + + +# This should generate an error because there are multiple unbounded tuples. +def func4(v4: tuple[Unpack[tuple[bool, ...]], ...]): + pass + + +# This should generate an error because there are multiple unbounded tuples. +def func5(v5: tuple[Unpack[tuple[Unpack[tuple[bool, ...]]]], ...]): + pass + + +def func6(v6: tuple[Unpack[tuple[bool]]]): + reveal_type(v6, expected_text="tuple[bool]") + + +def func7(v7: tuple[Unpack[tuple[bool, Unpack[tuple[int, float]]]]]): + reveal_type(v7, expected_text="tuple[bool, int, float]") + + +def func8(v8: tuple[Unpack[tuple[bool, Unpack[tuple[int, ...]]]]]): + reveal_type(v8, expected_text="tuple[bool, *tuple[int, ...]]") + + +# This should generate an error because unpack isn't allowed for simple parameters. +def func9(v9: Unpack[tuple[int, int]]): + pass + + +# This should generate an error because unpack isn't allowed for **kwargs parameters. +def func10(**v10: Unpack[tuple[int, int]]): + pass + + +def func11(*v11: Unpack[tuple[int, ...]]): + pass + + +def func12(*v11: Unpack[tuple[int, int]]): + pass + + +def func13(t: type): + if t is Unpack: + ... + + +def func14( + *args: Unpack[tuple[int]], + other: str, +) -> None: ... + + +func14(1, other="hi") + +# This should generate an error because the second argument +# corresponds to a keyword-only parameter. +func14(1, "hi") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack2.py b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack2.py new file mode 100644 index 00000000..69e4e0d6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack2.py @@ -0,0 +1,55 @@ +# This sample tests the handling of *tuple[...] as described +# in PEP 646. This test is the same as tupleUnpack1.py but +# it uses the * syntax instead of the backward compatibility +# "Unpack" form. + +def func1(v1: tuple[int, *tuple[bool, bool], str]): + reveal_type(v1, expected_text="tuple[int, bool, bool, str]") + + +def func2(v2: tuple[int, *tuple[bool, bool], str, *tuple[bool, bool]]): + reveal_type(v2, expected_text="tuple[int, bool, bool, str, bool, bool]") + + +def func3(v3: tuple[int, *tuple[bool, ...], str]): + reveal_type(v3, expected_text="tuple[int, *tuple[bool, ...], str]") + + +# This should generate an error because there are multiple unbounded tuples. +def func4(v4: tuple[*tuple[bool, ...], ...]): + pass + + +# This should generate an error because there are multiple unbounded tuples. +def func5(v5: tuple[*tuple[*tuple[bool, ...]], ...]): + pass + + +def func6(v6: tuple[*tuple[bool]]): + reveal_type(v6, expected_text="tuple[bool]") + + +def func7(v7: tuple[*tuple[bool, *tuple[int, float]]]): + reveal_type(v7, expected_text="tuple[bool, int, float]") + + +def func8(v8: tuple[*tuple[bool, *tuple[int, ...]]]): + reveal_type(v8, expected_text="tuple[bool, *tuple[int, ...]]") + +# This should generate an error because unpack isn't allowed for simple parameters. +def func9(v9: *tuple[int, int]): + pass + +# This should generate an error because unpack isn't allowed for **kwargs parameters. +def func10(**v10: *tuple[int, int]): + pass + +def func11(*v11: *tuple[int, ...]): + pass + +def func12(*v11: *tuple[int, int]): + pass + +def func13(v12: "tuple[str, *tuple[int, ...], str]"): + pass + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack3.py b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack3.py new file mode 100644 index 00000000..41cfcd2e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack3.py @@ -0,0 +1,27 @@ +# This sample verifies that a unpacked tuple that contains TypeVars +# and is later specialized is honored. + +from typing import Protocol, TypeVar, Callable, Protocol, Any +from typing_extensions import Unpack # pyright: ignore[reportMissingModuleSource] + + +class SupportsSum(Protocol): + def __add__(self, __x: Any) -> Any: ... + + +T = TypeVar("T", bound=SupportsSum) + + +def wrapped_summation(start: T) -> Callable[[Unpack[tuple[T, ...]]], T]: + def inner_func(*values: T): + return sum(values, start=start) + + return inner_func + + +int_sum = wrapped_summation(3) + +reveal_type(int_sum, expected_text="(*tuple[int, ...]) -> int") + +# This should generate an error. +int_sum(3.14) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack4.py b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack4.py new file mode 100644 index 00000000..c0307f77 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack4.py @@ -0,0 +1,30 @@ +# This sample tests the handling of a TypeVar whose upper bound is +# a tuple when used for an *args parameter. + +from typing import TypeVar, Unpack + + +T = TypeVar("T", bound="tuple[int, ...]") + + +def func1(*args: Unpack[T]) -> tuple[int, ...]: + a, *v = args + + reveal_type(a, expected_text="*T@func1") + + b: int = a + + # This should generate an error. + c: str = a + + reveal_type(v, expected_text="list[*T@func1]") + + return args + + +S = TypeVar("S", bound=list[int]) + + +# This should generate an error. +def func2(*args: Unpack[S]) -> int: + return 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack5.py b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack5.py new file mode 100644 index 00000000..84d2a47e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/tupleUnpack5.py @@ -0,0 +1,29 @@ +# This sample tests cases where an unpacked tuple is used in +# an overload. + + +from typing import Callable, Concatenate, overload + + +@overload +def func1[**P, R](func: Callable[P, R], /, *args: *tuple[()]) -> Callable[P, R]: ... +@overload +def func1[**P, R]( + func: Callable[Concatenate[int, P], R], /, *args: *tuple[int] +) -> Callable[P, R]: ... +@overload +def func1[**P, R]( + func: Callable[Concatenate[int, int, P], R], /, *args: *tuple[int, int] +) -> Callable[P, R]: ... + + +def func1[**P, R](func: Callable[..., R], /, *args: object) -> Callable[..., R]: ... + + +@overload +def func2(*args: *tuple[int]) -> int: ... +@overload +def func2(*args: *tuple[int, int, int]) -> int: ... + + +def func2(*args: *tuple[int, *tuple[int, ...]]) -> int: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/type1.py b/python-parser/packages/pyright-internal/src/tests/samples/type1.py new file mode 100644 index 00000000..0afcac63 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/type1.py @@ -0,0 +1,135 @@ +# This sample tests the handling of type[T] and Type[T]. + +from typing import Any, Callable, Generic, Type, TypeVar + + +def func1(t1: Type, t2: Type[Any], t3: type, t4: type[Any]): + reveal_type(t1.x, expected_text="Unknown") + reveal_type(t2.x, expected_text="Any") + reveal_type(t3.x, expected_text="Unknown") + reveal_type(t4.x, expected_text="Any") + + reveal_type(t1.__name__, expected_text="str") + reveal_type(t2.__name__, expected_text="str") + reveal_type(t3.__name__, expected_text="str") + reveal_type(t4.__name__, expected_text="str") + + reveal_type(t1.__sizeof__, expected_text="() -> int") + reveal_type(t2.__sizeof__, expected_text="() -> int") + reveal_type(t3.__sizeof__, expected_text="() -> int") + reveal_type(t4.__sizeof__, expected_text="() -> int") + + +def func2(t1: Type[object], t2: type[object]): + # This should generate an error. + t1.x + + # This should generate an error. + t2.x + + reveal_type(t1.__name__, expected_text="str") + reveal_type(t2.__name__, expected_text="str") + + reveal_type(t1.__sizeof__, expected_text="(self: object) -> int") + reveal_type(t2.__sizeof__, expected_text="(self: object) -> int") + + +TA1 = Type +reveal_type(TA1, expected_text="type[Type[Unknown]]") + +# This should generate an error. +TA1.x + +TA2 = Type[Any] +reveal_type(TA2, expected_text="type[Type[Any]]") + +# This should generate an error. +TA2.x + +TA3 = type +reveal_type(TA3, expected_text="type[type]") + +# This should generate an error. +TA3.x + +TA4 = type[Any] +reveal_type(TA4, expected_text="type[type[Any]]") + +# This should generate an error. +TA4.x + + +def func3(t1: TA1, t2: TA2, t3: TA3, t4: TA4): + reveal_type(t1.x, expected_text="Unknown") + reveal_type(t2.x, expected_text="Any") + reveal_type(t3.x, expected_text="Unknown") + reveal_type(t4.x, expected_text="Any") + + reveal_type(t1.__name__, expected_text="str") + reveal_type(t2.__name__, expected_text="str") + reveal_type(t3.__name__, expected_text="str") + reveal_type(t4.__name__, expected_text="str") + + reveal_type(t1.__sizeof__, expected_text="() -> int") + reveal_type(t2.__sizeof__, expected_text="() -> int") + reveal_type(t3.__sizeof__, expected_text="() -> int") + reveal_type(t4.__sizeof__, expected_text="() -> int") + + +TA5 = Type[object] +TA6 = type[object] + + +def func4(t1: TA5, t2: TA6): + # This should generate an error. + t1.x + + # This should generate an error. + t2.x + + reveal_type(t1.__name__, expected_text="str") + reveal_type(t2.__name__, expected_text="str") + + reveal_type(t1.__sizeof__, expected_text="(self: object) -> int") + reveal_type(t2.__sizeof__, expected_text="(self: object) -> int") + + +T = TypeVar("T") + +TA7 = type[T] +TA8 = Type[T] + + +def func5(t1: TA7[T]) -> T: + return t1() + + +def func6(t1: TA8[T]) -> T: + return t1() + + +reveal_type(func5(int), expected_text="int") +reveal_type(func6(int), expected_text="int") + + +def func7(v: type): + x1: Callable[..., Any] = v + x2: Callable[[int, int], int] = v + x3: object = v + x4: type = v + x5: type[int] = v + x6: type[Any] = v + + +class Class1(Generic[T]): + def method1(self, v: type) -> type[T]: + return v + + +class Class2: + x1: type + x2: type[Any] + + +reveal_type(Class2.x1, expected_text="type") +reveal_type(Class2.x2, expected_text="type[Any]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias1.py new file mode 100644 index 00000000..57df5ff5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias1.py @@ -0,0 +1,67 @@ +# This sample tests that type aliasing works. + +from typing import Any, Literal + +# Make sure it works with and without forward references. +TupleAlias = tuple["int", int] + +v1: tuple[int, int] +v2: TupleAlias + +v1 = (1, 2) +v2 = (1, 2) + + +AnyAlias = Any + +v3: AnyAlias = 3 + + +class A: + Value1 = Literal[1] + + Value2 = 1 + + +reveal_type(A.Value1, expected_text="type[Literal[1]]") +reveal_type(A.Value2, expected_text="int") + + +def func1(x: A.Value1): + reveal_type(x, expected_text="Literal[1]") + + +Alias1 = Literal[0, 1] + +v4: dict[Alias1, Any] = {} + +if v4: + pass + +v5: list[Alias1] = [] + + +Alias2 = int | str +Alias3 = int +Alias4 = type[int] + + +def func2(x: Alias2): + reveal_type(type(x), expected_text="type[int] | type[str]") + + +def func3(v2: type[Alias2], v3: type[Alias3], v4: type[Alias4]): + reveal_type(v2, expected_text="type[int] | type[str]") + reveal_type(v3, expected_text="type[int]") + reveal_type(v4, expected_text="type[type[int]]") + + +class B: + TA1 = list + + def __init__(self) -> None: + self.val = self.TA1 + + +b = B() +reveal_type(b.val, expected_text="type[list[Unknown]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias10.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias10.py new file mode 100644 index 00000000..cd8aced2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias10.py @@ -0,0 +1,50 @@ +# This sample tests the handling of generic type alias where a type +# argument is not provided. + +# pyright: reportMissingTypeArgument=true + +from typing import Any, Generic, TypeAlias, TypeVar + +_T = TypeVar("_T") + + +class A(Generic[_T]): ... + + +# This should generate an error if reportMissingTypeArgument is enabled. +B: TypeAlias = A + + +v1: B = A() + +# This should generate an error because B is already specialized. +v2: B[int] = A() + +# This should generate an error if reportMissingTypeArgument is enabled. +v3: A = A() + + +C = A[str] + + +# This should generate an error because C is already specialized. +v4: C[int] + + +class D(Generic[_T]): + def __getitem__(self, key: Any) -> int: ... + + +D_Alias = D[_T] + +d: D_Alias[Any] = D() +item = d[0] + +x: int = D_Alias[Any]()[0] + +E: TypeAlias = _T + +e1: E[int] = 3 + +# This should generate an error if reportMissingTypeArgument is enabled. +e2: E = 3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias11.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias11.py new file mode 100644 index 00000000..b6340081 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias11.py @@ -0,0 +1,38 @@ +# This sample tests the simple aliasing of a generic class with no +# type arguments. + +from typing import Generic, TypeVar +import collections +from collections import OrderedDict + + +_T = TypeVar("_T") + + +class ClassA(Generic[_T]): + def __init__(self, x: _T): + pass + + +A = ClassA +reveal_type(A(3), expected_text="ClassA[int]") + + +TA1 = collections.OrderedDict +TA2 = OrderedDict + + +TA1[int, int] +TA2[int, int] + +TA3 = TA1 + +TA3[int, int] + + +TA4 = dict | OrderedDict + +# This should generate two errors because the two types in TA4 +# are already specialized. + +x: TA4[int, int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias12.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias12.py new file mode 100644 index 00000000..5c2d9faf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias12.py @@ -0,0 +1,21 @@ +# This sample tests the handling of a generic type alias that uses +# a union that collapses to a single type when specialized. + +from typing import TypeVar + +V = TypeVar("V") +U = TypeVar("U") + +Alias = V | U + + +def func1(x: Alias[V, V]) -> V: + return x + + +def func2(x: list[Alias[V, V]]) -> list[V]: + return x + + +def func3(x: Alias[int, int]): + reveal_type(x, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias13.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias13.py new file mode 100644 index 00000000..4ed27282 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias13.py @@ -0,0 +1,55 @@ +# This sample tests a complex generic type alias that uses ParamSpecs +# and several layers of nested type aliases. + +from typing import Any, Callable, Concatenate, Coroutine, TypeVar, Union +from typing_extensions import ParamSpec # pyright: ignore[reportMissingModuleSource] + +T = TypeVar("T") +U = TypeVar("U") +P = ParamSpec("P") + + +Method = Callable[Concatenate[T, P], U] +MaybeMethod = Union[Method[T, P, U], Callable[P, U]] +Co = Coroutine[Any, Any, T] +MaybeCo = Union[T, Co[T]] +CoFunc = Callable[P, Co[T]] +CoMethod = Method[T, P, Co[U]] +CoMaybeMethod = Union[CoMethod[T, P, U], CoFunc[P, U]] + + +class D: ... + + +class E(Exception): ... + + +class F: ... + + +DT = TypeVar("DT", bound=D) + +Error = CoMaybeMethod[DT, [F, E], Any] +reveal_type( + Error, + expected_text="type[(DT@Error, F, E) -> Coroutine[Any, Any, Any]] | type[(F, E) -> Coroutine[Any, Any, Any]]", +) + + +class A: ... + + +class B: ... + + +class C: ... + + +BT = TypeVar("BT", bound=B) + + +Something = CoMaybeMethod[A, [BT, C], Any] +reveal_type( + Something, + expected_text="type[(A, BT@Something, C) -> Coroutine[Any, Any, Any]] | type[(BT@Something, C) -> Coroutine[Any, Any, Any]]", +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias14.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias14.py new file mode 100644 index 00000000..f0d96312 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias14.py @@ -0,0 +1,27 @@ +# This sample tests the case where a generic alias refers to a Callable +# type and the alias is used without type arguments. + +from typing import Callable, ParamSpec, TypeVar + +T = TypeVar("T") +P = ParamSpec("P") + +TA1 = Callable[[T], T] +TA2 = Callable[[T], T] | Callable[P, T] + + +def f1() -> TA1: ... + + +reveal_type(f1(), expected_text="(Unknown) -> Unknown") + + +def f2() -> TA2: ... + + +g2 = f2() +reveal_type( + g2, + expected_text="((Unknown) -> Unknown) | ((...) -> Unknown)", +) +reveal_type(g2(42), expected_text="Unknown") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias15.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias15.py new file mode 100644 index 00000000..0be124dd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias15.py @@ -0,0 +1,23 @@ +# This sample tests the case where a generic type alias is specialized +# with an instantiable class rather than a class instance. + +from typing import TypeVar, Sequence + +T = TypeVar("T", bound=type[Exception]) +MaybeSequence = T | Sequence[T] + + +class HttpError(Exception): + pass + + +def func1(errs: MaybeSequence[type[Exception]]): + pass + + +func1(HttpError) +func1(Exception) + + +def func2(x: MaybeSequence[type[HttpError]]): + reveal_type(x, expected_text="type[HttpError] | Sequence[type[HttpError]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias16.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias16.py new file mode 100644 index 00000000..783e918d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias16.py @@ -0,0 +1,24 @@ +# This sample tests that a generic type alias retains a literal type argument +# when it is specialized. + +from typing import Literal, TypeAlias, TypeVar + +Mode = Literal["read", "write"] +T = TypeVar("T") +Entry: "TypeAlias" = dict[T, int] +Entry2: TypeAlias = dict[Mode, int] + + +def f() -> Entry[Mode]: + return {"read": 0} + + +def g() -> Entry2: + return {"read": 0} + + +def main() -> None: + d1 = f() + reveal_type(d1, expected_text="dict[Literal['read', 'write'], int]") + d2 = g() + reveal_type(d2, expected_text="dict[Literal['read', 'write'], int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias17.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias17.py new file mode 100644 index 00000000..428ecdde --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias17.py @@ -0,0 +1,60 @@ +# This sample tests reporting of type argument count mismatch when +# used with generic type aliases. + +from typing import Callable, ParamSpec, TypeVar, TypeVarTuple, Unpack + + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +P = ParamSpec("P") +Tv1 = TypeVarTuple("Tv1") + +TA1 = dict[T1, T2] + +# This should generate an error if reportMissingTypeArgument is enabled. +a1: TA1 +# This should generate an error because of too few type arguments. +a2: TA1[str] +a3: TA1[str, str] +# This should generate an error because of too many type arguments. +a4: TA1[str, str, str] + +TA2 = Callable[P, T1] + +# This should generate an error if reportMissingTypeArgument is enabled. +b1: TA2 +# This should generate an error because of too few type arguments. +b2: TA2[...] +b3: TA2[..., int] +# This should generate an error because of too many type arguments. +b4: TA2[..., int, int] + +TA3 = Callable[P, int] + +# This should generate an error if reportMissingTypeArgument is enabled. +c1: TA3 +c2: TA3[int] +c3: TA3[int, int] +c4: TA3[int, int, int] + + +TA4 = list[T1] | tuple[Unpack[Tv1]] + +# This should generate an error if reportMissingTypeArgument is enabled. +d1: TA4 +d2: TA4[int] +d3: TA4[int, int] +d4: TA4[int, int, int] + + +_T = TypeVar("_T") +TA5 = dict[_T, _T] + +# This should generate an error if reportMissingTypeArgument is enabled +TA6 = TA5 | None + +# This should generate an error if reportMissingTypeArgument is enabled +TA7 = list | str + +# This should generate an error if reportMissingTypeArgument is enabled +TA8 = str | dict diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias18.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias18.py new file mode 100644 index 00000000..b44b3549 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias18.py @@ -0,0 +1,41 @@ +# This sample tests the case where a type alias is used in a class +# declaration. We want to ensure that the variance of type variables +# is compatible with the usage within the type alias. + +from typing import Generic, TypeVar, TypeAlias + +T1 = TypeVar("T1") +T2 = TypeVar("T2", covariant=True) +T3 = TypeVar("T3", contravariant=True) + + +class A(Generic[T1]): + pass + + +A_Alias_1: TypeAlias = A[T2] + +A_Alias_2: TypeAlias = A_Alias_1[T2 | int] + + +# This should generate an error because the variance is incompatible. +class A_1(A_Alias_1[T2]): ... + + +# This should generate an error because the variance is incompatible. +class A_2(A_Alias_2[T2]): ... + + +# This should generate an error because the variance is incompatible. +class A_3(A[T2]): ... + + +class B(Generic[T1, T2]): + pass + + +B_Alias_1 = B[T2, T3] + + +# This should generate an error because the variance is incompatible. +class C(B_Alias_1[T3, T2]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias19.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias19.py new file mode 100644 index 00000000..2547b43f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias19.py @@ -0,0 +1,4 @@ +# This sample is used to test that TypeAlias can be aliased +# and re-exported. It is used with typeAlias20.py. + +from typing import TypeAlias as TA diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias2.py new file mode 100644 index 00000000..23750034 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias2.py @@ -0,0 +1,26 @@ +# This sample tests that forward references to type aliases work. + +from typing import Any, Union + + +class Base: + @staticmethod + def create(data: dict[str, Any]) -> "Mix": + return A() + + +class A(Base): + pass + + +class B(Base): + pass + + +Mix = Union[A, B] + + +class S: + @staticmethod + def create(data: dict[str, Any]) -> "Mix": + return A() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias20.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias20.py new file mode 100644 index 00000000..5b17d778 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias20.py @@ -0,0 +1,10 @@ +# This sample is used to test that TypeAlias can be aliased +# and re-exported. It is used with typeAlias19.py. + +from .typeAlias19 import TA as TA2 + +TA3 = TA2 + +x: TA2 = dict[str, str] + +y: x = {"": ""} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias21.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias21.py new file mode 100644 index 00000000..ce3799ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias21.py @@ -0,0 +1,14 @@ +# This sample tests that a generic type alias can use +# a compatible bound TypeVar. + +from typing import Generic, TypeVar + +D = TypeVar("D", bool, int, float, object) +E = TypeVar("E", bool, int, float, object) + + +class Gen(Generic[D]): + pass + + +GenAlias = Gen[E] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias22.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias22.py new file mode 100644 index 00000000..8b203355 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias22.py @@ -0,0 +1,135 @@ +# This sample tests handling of type aliases. + +from datetime import datetime +from typing import Any, Callable, Generic, TypeVar, Union + + +from typing import TypeVar, Union, Optional + +S = TypeVar("S") + +Response1 = Optional[Union[S, int]] + + +def f1_1() -> Response1[str]: + return None + + +def f1_2() -> Response1[str]: + return "s" + + +def f1_3() -> Response1[float]: + # This should generate an error. + return "s" + + +Response2 = Union[S, int] + + +def f2_1() -> Response2[Any]: + return "s" + + +def f2_2() -> Response2[str]: + return "s" + + +def f2_3() -> Response2[float]: + return 3.4 + + +def f2_4() -> Response2[datetime]: + # This should generate an error + return 3.4 + + +Response3 = Callable[[S], S] + + +def response2(query: str) -> Response3[int]: + return lambda x: x + 2 + + +def response3(query: str) -> Response3[datetime]: + # This should generate an error because datetime doesn't support + + return lambda x: x + 2 + + +Response4 = Union[S, int, str] + + +class Foo1: + pass + + +class Foo2: + pass + + +class Foo3: + pass + + +T = TypeVar("T") + +Response5 = Union[T, Foo1, Foo2] + +# Test nested type aliases +Response6 = Response5[Response4[Foo3]] + + +def f6_1() -> Response6: + return Foo1() + + +def f6_2() -> Response6: + return Foo2() + + +def f6_3() -> Response6: + return Foo3() + + +def f6_4() -> Response6: + return 3 + + +def f6_5() -> Response6: + # This should generate an error + return None + + +class InnerA: + pass + + +class InnerB: + pass + + +T = TypeVar("T", bound=InnerA) + + +class A(Generic[T]): + pass + + +class B: + pass + + +U = Union[A[T], B] + +a: U[InnerA] + +# This should generate an error because InnerB is not +# compatible with the type bound to TypeVar T. +b: U[InnerB] + + +V = Union[A[T], T] + +# This should generate an error because too many type +# arguments are provided. +c: V[InnerA, int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias3.py new file mode 100644 index 00000000..7fb031d4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias3.py @@ -0,0 +1,50 @@ +# This sample tests that type aliases can consist of +# partially-specialized classes that can be further +# specialized. + +# pyright: reportMissingModuleSource=false + +from typing import Callable, Generic, Optional +from typing_extensions import ParamSpec, TypeVar + +T = TypeVar("T") +P = ParamSpec("P") +TStr = TypeVar("TStr", default=str) + +ValidationResult = tuple[bool, Optional[T]] + + +def foo() -> ValidationResult[str]: + return False, "valid" + + +class ClassA(Generic[T]): + def __new__(cls, value: T) -> "ClassA[T]": ... + + +TypeAliasA1 = ClassA[T] + +a1 = ClassA(3.0) +reveal_type(a1, expected_text="ClassA[float]") + +a2 = TypeAliasA1(3) +reveal_type(a2, expected_text="ClassA[Unknown]") + +a3 = TypeAliasA1[int](3) +reveal_type(a3, expected_text="ClassA[int]") + + +TypeAliasA2 = ClassA[TStr] + +# This should generate an error. +b1 = TypeAliasA2(1) + +b2 = TypeAliasA2("") +reveal_type(b2, expected_text="ClassA[str]") + +b3 = TypeAliasA2[float](1.0) +reveal_type(b3, expected_text="ClassA[float]") + +Func = Callable[P, T] +AnyFunc = Func[P, int] +x: AnyFunc[...] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias4.py new file mode 100644 index 00000000..9b201310 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias4.py @@ -0,0 +1,78 @@ +# This sample tests the handling of the Python 3.9 +# TypeAlias feature as documented in PEP 613. + +import sys +from typing import Type, TypeAlias as TA, Union, cast + +type1: TA = Union[int, str] + +type2: TA = "ClassA" + + +class ClassA: + pass + + +not_a_type = "ClassA" + + +def requires_string(a: str): + pass + + +requires_string(not_a_type) + +# This should generate an error because type2 should +# not be interpreted as a string. +requires_string(type2) + +# This should generate an error because the symbol +# is later declared as a TypeAlias. +my_type3 = int + +# This should generate an error because it is obscured +# by another type alias declaration. +my_type3: "TA" = Union[int, str] + +# This should generate an error because the symbol +# was previously declared as a TypeAlias. +my_type3: TA = int + +# This should generate an error because the expression +# on the RHS evaluates to an object, not a class. +my_type4: TA = 3 + +# This should generate an error because the expression +# on the RHS evaluates to an object, not a class. +my_type5: TA = True + +# This should generate an error because the expression +# on the RHS evaluates to an object, not a class. +my_type7: TA = list() + +# Verify that variables with declarations (other than explicit TypeAlias) +# are not treated as a type alias. +SimpleAlias = int +ExplicitAlias: TA = int +SimpleNonAlias: Type[int] = int + +if sys.version_info > (3, 9): + reveal_type(SimpleAlias, expected_text="type[int]") + reveal_type(ExplicitAlias, expected_text="type[int]") + reveal_type(SimpleNonAlias, expected_text="type[int]") + + +class ClassB: + my_type1: TA = int + + +def func1(): + # This should generate an error because type aliases are allowed + # only in classes or modules. + my_type1: TA = int + + +_Obj = cast(type[object], object) +# This should generate an error because _Obj is a variable, +# which isn't allowed in a TypeAlias statement. +Obj: TA = _Obj diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias5.py new file mode 100644 index 00000000..cc6e670f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias5.py @@ -0,0 +1,58 @@ +# This sample tests type aliases that are unions that include +# TypeVars. + +from datetime import datetime +from typing import IO, Generic, Literal, TypeVar, Union + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + +MyUnion1 = Union[int, _T1, str, _T2, list[_T1]] + +MyUnion2 = Union[float, datetime] + +# This should generate an error because two type arguments are +# expected, but only one was provided. +MyUnion3 = MyUnion1[MyUnion2] + +MyUnion4 = MyUnion1[MyUnion2, IO] + +# This should generate an error because only two type +# arguments are expected. +MyUnion5 = MyUnion1[MyUnion2, IO, str] + +MyUnion6 = MyUnion1[Literal[0], Literal["a"]] +reveal_type( + MyUnion6, + expected_text="type[int] | type[str] | type[list[Literal[0]]] | type[Literal[0, 'a']]", +) + + +class Foo: + def __int__(self) -> int: + return 0 + + +FooT = TypeVar("FooT", bound=Foo) +FooIsh = Union[int, FooT] + + +class Bar(Foo): + def __int__(self) -> int: + return super().__int__() + 1 + + +v1: FooIsh[Bar] = 42 +v2: FooIsh[Bar] = Bar() + +# This should generate an error. +v3: FooIsh[type[Bar]] = 42 + + +MyTypeAlias = dict[_T1, _T2] + + +class MyClass1(Generic[_T1, _T2]): + # This should generate an error because S and T are bound + # type variables. + MyTypeAlias = dict[_T1, _T2] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias6.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias6.py new file mode 100644 index 00000000..a5b6cd8e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias6.py @@ -0,0 +1,45 @@ +# This sample tests that certain type aliases cannot be used within +# call expressions. + +from typing import Callable, TypeVar + + +T_Union = int | float + +# This should generate an error +T_Union(3) + +T_Callable = Callable[[int], None] + +# This should generate an error +T_Callable(1) + + +T_Type1 = type[int] + +# This should generate an error +T_Type1(object) + +T_Type2 = type +T_Type2(object) + +T_Optional = str | None + +# This should generate an error +T_Optional(3) + + +T_TypeVar = TypeVar("T_TypeVar") + +# This should generate an error +T_TypeVar() + + +I = int + +I(3) + + +T_Tuple2 = tuple[int, ...] + +T_Tuple2([3, 4, 5]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias7.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias7.py new file mode 100644 index 00000000..d3441961 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias7.py @@ -0,0 +1,56 @@ +# This sample tests the handling of generic type aliases that are +# defined in terms of other generic type aliases in a nested manner. + +from typing import Awaitable, Callable, Generic, Literal, TypeAlias, TypeVar + +TSource = TypeVar("TSource") +TError = TypeVar("TError") +TResult = TypeVar("TResult") +TNext = TypeVar("TNext") + + +class Context(Generic[TResult]): + Response: TResult + + +class Result(Generic[TResult, TError]): + def map( + self, mapper: Callable[[Context[TResult]], TResult] + ) -> "Result[TResult, TError]": + return Result() + + +HttpFuncResult = Result[Context[TResult], TError] +HttpFuncResultAsync = Awaitable[Result[Context[TResult], TError]] + +HttpFunc = Callable[ + [Context[TNext]], + HttpFuncResultAsync[TResult, TError], +] + +HttpHandler = Callable[ + [ + HttpFunc[TNext, TResult, TError], + Context[TSource], + ], + HttpFuncResultAsync[TResult, TError], +] + + +async def run_async( + ctx: Context[TSource], + handler: HttpHandler[str, TResult, TError, TSource], +) -> Result[TResult, TError]: + result = Result[TResult, TError]() + + def mapper(x: Context[TResult]) -> TResult: + return x.Response + + return result.map(mapper) + + +T1 = TypeVar("T1", bound=Literal["a", "b", "c"]) +T2 = TypeVar("T2", bound=Literal["b", "c"]) + +TA2: TypeAlias = list[T1] +TA3: TypeAlias = TA2[T2] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias8.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias8.py new file mode 100644 index 00000000..697f9342 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias8.py @@ -0,0 +1,23 @@ +# This sample verifies that a generic type alias with a Callable +# works correctly. + +# pyright: reportInvalidTypeVarUse=false + +from typing import Callable, TypeVar + +T = TypeVar("T") +F = Callable[[T], T] + + +def func1() -> F[T]: + def g(x: T) -> T: ... + + return g + + +func2 = func1() +v1 = func2("foo") +reveal_type(v1, expected_text="str") + +v2 = func2(1) +reveal_type(v2, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAlias9.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias9.py new file mode 100644 index 00000000..53698848 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAlias9.py @@ -0,0 +1,35 @@ +# This sample tests that generic type aliases are properly flagged as +# partially-unknown types if their type arguments are omitted. + +# pyright: reportUnknownParameterType=true, reportMissingTypeArgument=false + +from typing import TypeVar + +T = TypeVar("T") +TA1 = list[T] + + +# This should generate an error because Foo is missing a type argument, +# so the type of `f` is partially unknown. +def func1(f: TA1) -> None: + pass + + +TA2 = TA1 + + +# This should generate an error because Bar doesn't specialize +# Foo appropriately. +def func2(f: TA2) -> None: + pass + + +K = TypeVar("K") +V = TypeVar("V") + +TA3 = dict[K, V] + + +# This should generate two errors because Baz is only partially specialized. +def func3(f: TA3[int]) -> None: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement1.py new file mode 100644 index 00000000..f81a22a6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement1.py @@ -0,0 +1,91 @@ +# This sample tests error cases associated with the "type" statement +# introduced in PEP 695. + +from typing import Any, Callable + + +T1 = 0 + +type TA1[T1] = int + +reveal_type(TA1, expected_text="TypeAliasType") + + +class ClassA[T2]: + type TA2 = int + type TA3 = str + + type TA4 = int + + T2 = 4 + + +T2 = 4 + +type TA5[S1, *S2, **S3] = Callable[S3, S1] | tuple[*S2] + +X1 = TA5[int, tuple[int, str], ...] + +type TA6 = TA5[int, tuple[int, str], ...] + +val1: TA5 +val2: TA6 + +if 1 < 2: + # This should generate an error because it is obscured. + type TA7 = int +else: + type TA7 = int + + +def func1() -> type[int]: ... + + +# This should generate an error because a call expression is not +# allowed in a type alias definition. +type TA8 = func1() + +# This should generate two errors because a tuple and index expression is not +# allowed in a type alias definition. +type TA9 = (int, str, str)[0] + + +type TA10 = int + +# This should generate an error. +TA10.bit_count(1) + +# This should generate an error. +TA10(0) + +list[TA10]() + + +# This should generate an error. +class DerivedInt(TA10): + pass + + +def func2(x: object): + # This should generate an error. + if isinstance(x, TA10): + reveal_type(x) + + +type TA11 = Callable[..., Any] + + +def func3(cb: TA11): + cb() + + +def func4(): + # This should generate an error. + type TA12 = int + + +type TA12[T] = "list[T]" +ta12: TA12[int] = [1, 2, 3] + +# This should generate an error. +type TA13[T] = ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement2.py new file mode 100644 index 00000000..eb0e2c01 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement2.py @@ -0,0 +1,5 @@ +# This sample tests that "type" statements (introduced in PEP 695) +# are illegal prior to Python 3.12. + +# This should generate an error if less than Python 3.12. +type TA1[T1] = int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement3.py new file mode 100644 index 00000000..70ba52f3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement3.py @@ -0,0 +1,18 @@ +# This sample tests the error case where traditional type variables +# are used in a new-style type alias statement introduced in PEP 695. + +from typing import TypeVar + + +V = TypeVar("V") + +# This should generate an error because it combines old and +# new type variables. +type TA1[K] = dict[K, V] + + +T1 = TypeVar("T1") + +# This should generate an error because it uses old type +# variables in a type alias statement. +type TA2 = list[T1] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement4.py new file mode 100644 index 00000000..988970ab --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement4.py @@ -0,0 +1,40 @@ +# This sample tests the use of recursive (self-referencing) type aliases, +# which are allowed in PEP 695. + +from typing import Callable + + +type TA1[T] = T | list[TA1[T]] + +x1: TA1[int] = 1 +x2: TA1[int] = [1] + +type TA2[S: int, T: str, **P] = Callable[P, T] | list[S] | list[TA2[S, T, P]] + + +# This should generate an error because str isn't compatible with S bound. +x3: TA2[str, str, ...] + +x4: TA2[int, str, ...] + +# This should generate an error because int isn't compatible with T bound. +x5: TA2[int, int, ...] + +x6: TA2[int, str, [int, str]] + +# This should generate an error because it is unresolvable. +type TA3 = TA3 + +# This should generate an error because it is unresolvable. +type TA4[T] = T | TA4[str] + +type TA5[T] = T | list[TA5[T]] + +# This should generate an error because it is unresolvable. +type TA6 = "TA7" +type TA7 = TA6 + +type JSONNode = list[JSONNode] | dict[str, JSONNode] | str | float + +# This should generate an error because it is unresolvable. +type TA8[**P] = TA8[P, int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement5.py new file mode 100644 index 00000000..c4c28d9e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasStatement5.py @@ -0,0 +1,16 @@ +# This sample tests the interaction between a traditional type alias +# and a PEP 695 type alias. + +from typing import Annotated, TypeVar + + +T = TypeVar("T") + +TA1 = Annotated[T, "metadata"] +TA2 = list[T] + +type TA1_1 = TA1[int] +type TA1_2[T] = TA1[T] + +type TA2_1 = TA2[int] +type TA2_2[T] = TA2[T] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAliasType1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasType1.py new file mode 100644 index 00000000..9e0510cd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasType1.py @@ -0,0 +1,63 @@ +# This sample tests error cases for calls to the TypeAliasType constructor. + +from typing import TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeAliasType, +) + +# This should generate an error because arguments are missing. +TA1 = TypeAliasType() + +# This should generate two errors because 1 isn't a legal name or str. +TA2 = TypeAliasType(1, int) + +my_str = "" + +# This should generate an error because my_str isn't a string literal. +TA3 = TypeAliasType(my_str, int) + +# This should generate an error because name doesn't match. +TA4 = TypeAliasType("TA3", int) + +# This should generate an error because it's not part of an assignment statement. +TypeAliasType("TA3", int) + +# This should generate an error because it has an extra argument. +TA5 = TypeAliasType("TA5", int, x=3) + +# This should generate an error because it has an extra argument. +TA6 = TypeAliasType("TA6", int, 3) + +# This should generate two errors because type_params is not a tuple. +TA7 = TypeAliasType("TA7", int, type_params=[1]) + +# This should generate two errors because type_params is not a tuple of TypeVars. +TA8 = TypeAliasType("TA8", int, type_params=(int,)) + + +S = TypeVar("S") +T = TypeVar("T") + +# This should generate an error because S is not in scope. +TA9 = TypeAliasType("TA9", list[S], type_params=(T,)) + +my_tuple = (S, T) + +# This should generate two errors because type_params is not a tuple expression. +TA10 = TypeAliasType("TA10", int, type_params=my_tuple) +print(TA10.__value__) + + +TA11 = TypeAliasType("TA11", int) +print(TA11.__value__) + +type TA12 = int | str +print(TA12.__value__) + +# This should generate an error. +TA13 = TypeAliasType("TA13", ...) + + +def func1(): + # This should generate an error. + TA14 = TypeAliasType("TA14", int) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeAliasType2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasType2.py new file mode 100644 index 00000000..57a29954 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeAliasType2.py @@ -0,0 +1,81 @@ +# This sample tests the TypeAliasType constructor. + +from typing import Callable, Generic, ParamSpec, TypeVar, TypeVarTuple +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeAliasType, +) + +T1 = TypeVar("T1") + +S = TypeVar("S", bound=int) +T = TypeVar("T", bound=str) +P = ParamSpec("P") +Ts = TypeVarTuple("Ts") + +TA1 = TypeAliasType("TA1", "T1 | list[TA1[T1]]", type_params=(T1,)) + +x1: TA1[int] = 1 +x2: TA1[int] = [1] + +TA2 = TypeAliasType( + "TA2", + "Callable[P, T] | list[S] | list[TA2[S, T, P]] | tuple[*Ts]", + type_params=(S, T, P, Ts), +) + + +# This should generate an error because str isn't compatible with S bound. +x3: TA2[str, str, ..., int, str] + +x4: TA2[int, str, ..., int, str] + +# This should generate an error because int isn't compatible with T bound. +x5: TA2[int, int, ...] + +x6: TA2[int, str, [int, str], *tuple[int, str, int]] + +# This should generate an error because it is unresolvable. +TA3 = TypeAliasType("TA3", TA3) + +# This should generate an error because it is unresolvable. +TA4 = TypeAliasType("TA4", "T | TA4[str]", type_params=(T,)) + +TA5 = TypeAliasType("TA5", "T | list[TA5[T]]", type_params=(T,)) + +# This should generate an error because it is unresolvable. +TA6 = TypeAliasType("TA6", "TA7") +TA7 = TypeAliasType("TA7", "TA6") + +JSONNode = TypeAliasType( + "JSONNode", "list[JSONNode] | dict[str, JSONNode] | str | float" +) + + +class A(Generic[T1]): + L = TypeAliasType("L", list[T1]) + + +a1: A[int].L = [1, 2, 3] +a2: A[str].L = ["1", "2", "3"] + +# This should generate an error because S is not in scope. +TA8 = TypeAliasType("TA8", list[S]) + + +def identity[T](t: T) -> T: + return t + + +reveal_type(identity(TA1), expected_text="TypeAliasType") + + +class B: + TA9 = TypeAliasType("TA9", T1 | list[T1], type_params=(T1,)) + + +b1: B.TA9[int] + + +# This should generate an error because TA9 refers to itself +# and is not quoted. +TA9 = TypeAliasType("TA9", list[TA9]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeCheckOnly1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeCheckOnly1.py new file mode 100644 index 00000000..44a4fa45 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeCheckOnly1.py @@ -0,0 +1,40 @@ +# This sample tests the reporting of a function or class decorated with +# @type_check_only when used in a value expression. + +from __future__ import annotations + +from typing import TYPE_CHECKING, type_check_only + +if TYPE_CHECKING: + from typing import _TypedDict + +a1: function +a2: _TypedDict + +# This should generate an error. +v1 = function + +# This should generate an error, but it doesn't because +# of a typeshed issue. +v2 = isinstance(1, ellipsis) + +# This should generate an error. +v3 = _TypedDict + + +if TYPE_CHECKING: + + class ClassA: + @type_check_only + def method1(self): + pass + + @type_check_only + def func1() -> None: ... + + +# This should generate an error. +ClassA().method1() + +# This should generate an error. +func1() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeForm1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeForm1.py new file mode 100644 index 00000000..d4dc78dc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeForm1.py @@ -0,0 +1,32 @@ +# This sample tests basic usage of the TypeForm special form +# when used as a call. + +# pyright: reportMissingModuleSource=false + +from typing import Annotated +from typing_extensions import TypeForm + +# This should generate an error because TypeForm requires one arg. +t1 = TypeForm() + +# This should generate an error because TypeForm uses positional args only. +t2 = TypeForm(x=int) + +# This should generate an error because TypeForm accepts only one arg. +t3 = TypeForm(int, str) + +# This should generate an error because the type expression is invalid. +t4 = TypeForm("int[str]") + + +s1 = TypeForm(int) +reveal_type(s1, expected_text="TypeForm[int]") + +s2 = TypeForm("int | str") +reveal_type(s2, expected_text="TypeForm[int | str]") + +s3 = TypeForm(list["str"]) +reveal_type(s3, expected_text="TypeForm[list[str]]") + +s4 = TypeForm(Annotated[int, "meta"]) +reveal_type(s4, expected_text="TypeForm[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeForm2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeForm2.py new file mode 100644 index 00000000..078bc149 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeForm2.py @@ -0,0 +1,231 @@ +# This sample tests type inference rules for TypeForm. + +# pyright: reportMissingModuleSource=false + +from typing import ( + Annotated, + Any, + Callable, + Concatenate, + Final, + Generic, + Literal, + LiteralString, + Never, + NoReturn, + NotRequired, + Optional, + Required, + Type, + TypeAlias, + TypeGuard, + TypeVar, + Union, + Unpack, +) +import typing as tp +from typing_extensions import TypeForm, TypeIs, ReadOnly + +T = TypeVar("T") + +type TA1 = int | str +type TA2[T] = list[T] | T +TA3: TypeAlias = Annotated[int, "meta"] +TA4 = int | str +type TA5[T] = int + + +def tf[T](x: TypeForm[T]) -> TypeForm[T]: ... + + +def func1(): + t1 = tf(int) + reveal_type(t1, expected_text="TypeForm[int]") + + t2 = tf(int | str) + reveal_type(t2, expected_text="TypeForm[int | str]") + + t3 = tf("int | str") + reveal_type(t3, expected_text="TypeForm[int | str]") + + t5 = tf(Annotated[int, "meta"]) + reveal_type(t5, expected_text="TypeForm[int]") + + t5_alt1 = tf(tp.Annotated[int, "meta"]) + reveal_type(t5_alt1, expected_text="TypeForm[int]") + + t6 = tf(Any) + reveal_type(t6, expected_text="TypeForm[Any]") + + t6_alt1 = tf(tp.Any) + reveal_type(t6_alt1, expected_text="TypeForm[Any]") + + t7 = tf(type[int]) + reveal_type(t7, expected_text="TypeForm[type[int]]") + + t7_alt = tf(type) + reveal_type(t7_alt, expected_text="TypeForm[type]") + + t8 = tf(TA1) + reveal_type(t8, expected_text="TypeForm[int | str]") + + t9 = tf(TA2[str]) + reveal_type(t9, expected_text="TypeForm[list[str] | str]") + + t9_alt = tf(TA2) + reveal_type(t9_alt, expected_text="TypeForm[list[T@TA2] | T@TA2]") + + t10 = tf(TA3) + reveal_type(t10, expected_text="TypeForm[int]") + + t11 = tf(TA4) + reveal_type(t11, expected_text="TypeForm[int | str]") + + t12 = tf(Literal[1, 2, 3]) + reveal_type(t12, expected_text="TypeForm[Literal[1, 2, 3]]") + + t12_alt1 = tf(tp.Literal[1, 2, 3]) + reveal_type(t12_alt1, expected_text="TypeForm[Literal[1, 2, 3]]") + + t13 = tf(Optional[str]) + reveal_type(t13, expected_text="TypeForm[str | None]") + + t13_alt1 = tf(tp.Optional[str]) + reveal_type(t13_alt1, expected_text="TypeForm[str | None]") + + t14 = tf(Union[list[int], str]) + reveal_type(t14, expected_text="TypeForm[list[int] | str]") + + t14_alt1 = tf(tp.Union[list[int], str]) + reveal_type(t14_alt1, expected_text="TypeForm[list[int] | str]") + + t15 = tf(TypeGuard[int]) + reveal_type(t15, expected_text="TypeForm[TypeGuard[int]]") + + t15_alt1 = tf(tp.TypeGuard[int]) + reveal_type(t15_alt1, expected_text="TypeForm[TypeGuard[int]]") + + t16 = tf(TypeIs[str]) + reveal_type(t16, expected_text="TypeForm[TypeIs[str]]") + + t17 = tf(Callable[[int], None]) + reveal_type(t17, expected_text="TypeForm[(int) -> None]") + + t17_alt1 = tf(tp.Callable[[int], None]) + reveal_type(t17_alt1, expected_text="TypeForm[(int) -> None]") + + t18 = list + reveal_type(tf(t18), expected_text="TypeForm[list[Unknown]]") + reveal_type(tf(t18[int]), expected_text="TypeForm[list[int]]") + + t19 = tf(list | dict) + reveal_type( + t19, + expected_text="TypeForm[list[Unknown] | dict[Unknown, Unknown]]", + ) + + t20 = tuple + reveal_type(tf(t20), expected_text="TypeForm[tuple[Unknown, ...]]") + reveal_type(tf(t20[()]), expected_text="TypeForm[tuple[()]]") + reveal_type(tf(t20[int, ...]), expected_text="TypeForm[tuple[int, ...]]") + + t21 = tf(tuple[()]) + reveal_type(t21, expected_text="TypeForm[tuple[()]]") + + t22 = tf(tuple[int, *tuple[str, ...], int]) + reveal_type(t22, expected_text="TypeForm[tuple[int, *tuple[str, ...], int]]") + + t23 = tf(TA5) + reveal_type(t23, expected_text="TypeForm[int]") + + t24 = tf(str | None) + reveal_type(t24, expected_text="TypeForm[str | None]") + + t25 = tf(None) + reveal_type(t25, expected_text="TypeForm[None]") + + t26 = tf(LiteralString) + reveal_type(t26, expected_text="TypeForm[LiteralString]") + + +def func2[T](x: T) -> T: + t1 = tf(str | T) + reveal_type(t1, expected_text="TypeForm[str | T@func2]") + + t2 = tf(type[T]) + reveal_type(t2, expected_text="TypeForm[type[T@func2]]") + + return x + + +def func3[**P, R](x: Callable[P, R]) -> Callable[P, R]: + t1 = tf(Callable[Concatenate[int, P], R]) + reveal_type(t1, expected_text="TypeForm[(int, **P@func3) -> R@func3]") + + return x + + +def func4(): + t1 = tf(Never) + reveal_type(t1, expected_text="TypeForm[Never]") + + t1_alt1 = tf(tp.Never) + reveal_type(t1_alt1, expected_text="TypeForm[Never]") + + t2 = tf(NoReturn) + reveal_type(t2, expected_text="TypeForm[NoReturn]") + + t3 = tf(Type[int]) + reveal_type(t3, expected_text="TypeForm[type[int]]") + + t3_alt1 = tf(tp.Type[int]) + reveal_type(t3_alt1, expected_text="TypeForm[type[int]]") + + +def func5(): + t1 = tf(TypeForm[int | str]) + reveal_type(t1, expected_text="TypeForm[TypeForm[int | str]]") + + t2 = tf(TypeForm[TypeForm[int | str]]) + reveal_type(t2, expected_text="TypeForm[TypeForm[TypeForm[int | str]]]") + + +def func6(x: T) -> T: + v1: TypeForm[T] = T + v2 = tf(T) + reveal_type(v2, expected_text="TypeForm[T@func6]") + + v3: TypeForm[T | int] = T + v3 = T | int + + v4 = tf(T | int) + reveal_type(v4, expected_text="TypeForm[T@func6 | int]") + + v5: TypeForm[list[T]] = list[T] + + v6 = tf(list[T]) + reveal_type(v6, expected_text="TypeForm[list[T@func6]]") + + return x + + +# These should maybe generage errors, but given +# that the typing spec doesn't say anything about how +# to evaluate the type of a special form when it's used +# in a value expression context, it's not clear. +def func7(): + t1 = tf(Generic) + + t2 = tf(Final) + + t3 = tf(Final[int]) + + t4 = tf(Concatenate[int]) + + t5 = tf(Unpack[int]) + + t6 = tf(Required[int]) + + t7 = tf(NotRequired[int]) + + t8 = tf(ReadOnly[int]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeForm3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeForm3.py new file mode 100644 index 00000000..b70befa8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeForm3.py @@ -0,0 +1,27 @@ +# This sample tests inference behaviors related to TypeForm. + +# pyright: strict + + +def func1(): + return "int | str" + + +reveal_type(func1(), expected_text="Literal['int | str']") + + +def func2(): + return int | str + + +reveal_type(func2(), expected_text="UnionType") + + +v1 = [int | str, str | bytes] +reveal_type(v1, expected_text="list[UnionType]") + +v2 = {int | str, str | bytes} +reveal_type(v2, expected_text="set[UnionType]") + +v3 = {int | str: str | bytes} +reveal_type(v3, expected_text="dict[UnionType, UnionType]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeForm4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeForm4.py new file mode 100644 index 00000000..8a141439 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeForm4.py @@ -0,0 +1,241 @@ +# This sample tests assignability rules for TypeForm types. + +# pyright: reportMissingModuleSource=false + +from typing import ( + Annotated, + Any, + Callable, + Concatenate, + Final, + Generic, + Literal, + LiteralString, + Never, + NewType, + NoReturn, + NotRequired, + Optional, + Required, + Type, + TypeAlias, + TypeGuard, + TypeVar, + Union, + Unpack, +) +import typing +from typing_extensions import ReadOnly, TypeForm, TypeIs + +type TA1 = int | str +type TA2[T] = list[T] | T +TA3: TypeAlias = Annotated[int, "meta"] +TA4 = int | str +type TA5[T] = int + + +def func1(): + t1: TypeForm[int | str] = int + t2: TypeForm[int | str] = "int | str" + t3: TypeForm[Any] = int | str + t4: TypeForm = Annotated[int, "meta"] + t5: TypeForm = Any + t6: TypeForm = type[int] + t7: TypeForm[Any] = type + + t8: TypeForm[TA1] = TA1 + t9: TypeForm[int | str] = TA1 + + t10: TypeForm = TA2[str] + t11: TypeForm[TA2[str]] = TA2[str] + t12: TypeForm[list[Any] | str] = TA2[str] + + t13: TypeForm = TA2 + + t14: TypeForm[int] = TA3 + + t15: TypeForm = TA4 + + t16: TypeForm[int] = Literal[1, 2, 3] + + t17: TypeForm[str | None] = Optional[str] + + t18: TypeForm = Union[list[int], str] + + t19: TypeForm[TypeGuard[Any]] = TypeGuard[int] + t20: TypeForm = TypeIs[str] + + t21: TypeForm[Callable] = Callable[[int], None] + + t22: TypeForm[list[Any]] = list + + t24: TypeForm = list | dict + + t25: TypeForm = tuple + + t28: TypeForm = tuple[()] + + t29: TypeForm = tuple[int, *tuple[str, ...], int] + t30: TypeForm = TA5 + + t31: TypeForm = None + t32: TypeForm = None | str + + def get_type() -> TypeForm[int]: + return int + + t33: TypeForm = get_type() + + +def func2[T](x: T) -> T: + t1: TypeForm[str | T | None] = str | T + + t2: TypeForm[Any] = type[T] + + return x + + +def func3[**P, R](x: Callable[P, R]) -> Callable[P, R]: + t1: TypeForm = Callable[Concatenate[int, P], R] + + return x + + +def func4(): + t1: TypeForm[Never] = Never + + t2: TypeForm[Never] = NoReturn + + t3: TypeForm[type[int]] = Type[int] + + +NT1 = NewType("NT1", int) + + +def func5[**P, R](): + t1: TypeForm[LiteralString] = typing.LiteralString + t2: TypeForm = TypeForm[int | str] + t3: TypeForm = "P" + t4: TypeForm = "typing.Callable" + t5: TypeForm = "Union[int, str]" + t6: TypeForm = NT1 + + +T = TypeVar("T") + + +def func6[**P, R](): + # This should generate an error. + t1: TypeForm[int] = Generic + + # This should generate an error. + t2: TypeForm[int] = Final + + # This should generate an error. + t3: TypeForm[int] = Final[int] + + # This should generate an error. + t4: TypeForm[int] = Concatenate[int] + + # This should generate an error. + t5: TypeForm[int] = Unpack[int] + + # This should generate an error. + t6: TypeForm[int] = Required[int] + + # This should generate an error. + t7: TypeForm[int] = NotRequired[int] + + # This should generate an error. + t8: TypeForm[int] = ReadOnly[int] + + var1 = 1 + # This should generate an error. + t9: TypeForm = int | var1 + + # This should generate an error. + t10: TypeForm = "int + str" + + # This should generate an error. + t11: TypeForm = "(int, str)" + + # This should generate an error. + t12: TypeForm = "Q" + + # This should generate an error. + t13: TypeForm = "Callable[P]" + + # This should generate an error. + t14: TypeForm = "Callable[]" + + # This should generate an error. + t15: TypeForm = "typing.Optional" + + # This should generate an error. + t16: TypeForm = "Union[]" + + # This should generate an error. + t17: TypeForm = "Union[int]" + + # This should generate an error. + t18: TypeForm = "Annotated" + + # This should generate an error. + t19: TypeForm = "T" + + # This should generate an error. + t20: TypeForm = "int extra" + + +def func7(): + # This should generate an error. + t1: TypeForm[Literal[""]] = typing.LiteralString + + # This should generate an error. + t2: TypeForm[Never] = int + + # This should generate an error. + t3: TypeForm[type[int]] = int + + # This should generate an error. + t4: TypeForm[int] = type[int] + + +def func8[S, T: type](p1: TypeForm[Any], p2: TypeForm[int | str], p3: TypeForm[S]): + t1: TypeForm = p1 + t2: TypeForm = p2 + t3: TypeForm = p3 + + t4: TypeForm[int | str] = p2 + t5: TypeForm[int | str | None] = p2 + + # This should generate an error. + t6: TypeForm[int] = p2 + + t7: TypeForm[S] = p3 + + # This should generate an error. + t8: TypeForm[T] = p3 + + t9: TypeForm = S + t10: TypeForm = T + + t11: TypeForm[S] = S + t12: TypeForm[T] = T + + +def func9(): + t1: list[TypeForm[int | str]] = ["str | int", str] + t1.append("bool") + + # This should generate an error. + t1.append(complex) + + +def func10[T](x: type[T], y: type[int]): + t1: TypeForm = x + t2: TypeForm[T] = x + + t3: TypeForm = y + t4: TypeForm[int] = y + t5: TypeForm[float] = y diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeForm5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeForm5.py new file mode 100644 index 00000000..e4cb9017 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeForm5.py @@ -0,0 +1,49 @@ +# This sample tests the TypeForm special form when used with type variables. + +# pyright: reportMissingModuleSource=false + +from typing import Literal, LiteralString, TypeGuard, Optional +from typing_extensions import TypeForm, TypeIs + + +def func1[T](x: TypeForm[T]) -> T: ... + + +def func2[S](x: TypeForm[S | None]) -> S: ... + + +def func3[S, T](x: TypeForm[S | T]) -> S: ... + + +def func4[T](x: object, t: TypeForm[T]) -> TypeIs[T]: ... + + +def func5[T](x: TypeForm[T]) -> TypeGuard[type[T]]: ... + + +v1 = func1(int | str) +reveal_type(v1, expected_text="int | str") + +v2 = func1("int | str") +reveal_type(v2, expected_text="int | str") + +v3 = func1(LiteralString) +reveal_type(v3, expected_text="LiteralString") + +v4 = func1(Literal[1, 2, 3]) +reveal_type(v4, expected_text="Literal[1, 2, 3]") + +v5 = func2("Optional[str]") +reveal_type(v5, expected_text="str") + +v6 = func2(int | str | None) +reveal_type(v6, expected_text="int | str") + +v7 = func3(int | str | None) +reveal_type(v7, expected_text="int | str | None") + +v8 = func4(1, int | str | None) +reveal_type(v8, expected_text="TypeIs[int | str | None]") + +v9 = func5(int | str | None) +reveal_type(v9, expected_text="TypeGuard[type[int] | type[str] | type[None]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeForm6.py b/python-parser/packages/pyright-internal/src/tests/samples/typeForm6.py new file mode 100644 index 00000000..bf679f0b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeForm6.py @@ -0,0 +1,62 @@ +# This sample tests the handling of assert_type with TypeForm types. + +# pyright: reportMissingModuleSource=false + +from types import UnionType +from typing import assert_type +from typing_extensions import TypeForm + + +def func1[T](x: T) -> T: + v1 = str + assert_type(v1, type[str]) + + # This should generate an error. + assert_type(v1, TypeForm[str]) + + # This should generate an error. + assert_type(v1, type[str] | type[int]) + + # This should generate an error. + assert_type(v1, TypeForm[str | int]) + + v1_tf: TypeForm[str | int] = str + assert_type(v1_tf, TypeForm[str]) + + # This should generate an error. + assert_type(v1_tf, type[str]) + + return x + + +def func2[T](x: T) -> T: + v2 = str | T + assert_type(v2, UnionType) + + # This should generate an error. + assert_type(v2, TypeForm[str | T]) + + v2_tf: TypeForm[object] = str | T + + # This should generate an error. + assert_type(v2_tf, UnionType) + + assert_type(v2_tf, TypeForm[str | T]) + + return x + + +def func3[T](x: T) -> T: + v3 = list["str | T"] | T + assert_type(v3, UnionType) + + # This should generate an error. + assert_type(v3, TypeForm[list[str | T] | T]) + + v3_tf: TypeForm[object] = list["str | T"] | T + # This should generate an error. + assert_type(v3_tf, UnionType) + + assert_type(v3_tf, TypeForm[list[str | T] | T]) + + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeForm7.py b/python-parser/packages/pyright-internal/src/tests/samples/typeForm7.py new file mode 100644 index 00000000..e7af7127 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeForm7.py @@ -0,0 +1,22 @@ +# This sample tests the isinstance type narrowing involving TypeForm types. + +# pyright: reportMissingModuleSource=false + +from types import UnionType +from typing import Any +from typing_extensions import TypeForm + + +def func1(tf1: TypeForm[Any]): + if isinstance(tf1, UnionType): + reveal_type(tf1, expected_text="UnionType") + else: + reveal_type(tf1, expected_text="TypeForm[Any]") + + reveal_type(tf1, expected_text="UnionType | TypeForm[Any]") + + +def func2(): + # This should generate an error. + if isinstance(1, TypeForm): + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeGuard1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeGuard1.py new file mode 100644 index 00000000..99619ceb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeGuard1.py @@ -0,0 +1,132 @@ +# This sample tests the TypeGuard functionality +# that allows user-defined functions to perform +# conditional type narrowing. + +# pyright: reportMissingModuleSource=false + +import os +from typing import Any, Callable, TypeVar + +from typing_extensions import TypeGuard # pyright: ignore[reportMissingModuleSource] + +_T = TypeVar("_T") + + +def is_two_element_tuple(a: tuple[_T, ...]) -> TypeGuard[tuple[_T, _T]]: + return True + + +def func1(a: tuple[int, ...]): + if is_two_element_tuple(a): + reveal_type(a, expected_text="tuple[int, int]") + else: + reveal_type(a, expected_text="tuple[int, ...]") + + +def is_string_list(val: list[Any], allow_zero_entries: bool) -> TypeGuard[list[str]]: + if allow_zero_entries and len(val) == 0: + return True + return all(isinstance(x, str) for x in val) + + +def func2(a: list[str | int]): + if is_string_list(a, True): + reveal_type(a, expected_text="list[str]") + else: + reveal_type(a, expected_text="list[str | int]") + + +# This should generate an error because TypeGuard +# has no type argument. +def bad1(a: int, b: object) -> TypeGuard: + # This is a runtime use of TypeGuard and shouldn't generate an error. + if b is TypeGuard: + return True + return True + + +# This should generate an error because TypeGuard +# has too many type arguments. +def bad2(a: int) -> TypeGuard[str, int]: + return True + + +# This should generate an error because TypeGuard +# does not accept an ellipsis. +def bad3(a: int) -> TypeGuard[...]: + return True + + +# This should generate an error because TypeGuard +# has does not accept a module. +def bad4(a: int) -> TypeGuard[os]: + return True + + +def bad5(a: int) -> TypeGuard[int]: + # This should generate an error because only + # bool values can be returned. + return 3 + + +# This should generate an error because a type guard function must +# accept at least one parameter. +def bad6() -> TypeGuard[int]: + return True + + +class ClassA: + # This should generate an error because a type guard function must + # accept at least one parameter. + def method1(self) -> TypeGuard[int]: + return True + + +class IsInt: + def __call__(self, value: Any) -> TypeGuard[int]: + return isinstance(value, int) + + +def func3(x: Any): + i = IsInt() + if i(x): + reveal_type(x, expected_text="int") + + +def is_int(obj: type) -> TypeGuard[type[int]]: ... + + +def func4(typ: type[_T]) -> _T: + if not is_int(typ): + raise Exception("Unsupported type") + + return typ() + + +def takes_int_typeguard(f: Callable[[object], TypeGuard[int]]) -> None: + pass + + +def int_typeguard(val: object) -> TypeGuard[int]: + return isinstance(val, int) + + +def bool_typeguard(val: object) -> TypeGuard[bool]: + return isinstance(val, bool) + + +def str_typeguard(val: object) -> TypeGuard[str]: + return isinstance(val, str) + + +takes_int_typeguard(int_typeguard) +takes_int_typeguard(bool_typeguard) + +# This should generate an error because TypeGuard is covariant. +takes_int_typeguard(str_typeguard) + + +v0 = is_int(int) +v1: bool = v0 +v2: int = v0 +v3 = v0 & v0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeGuard2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeGuard2.py new file mode 100644 index 00000000..4f22dd92 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeGuard2.py @@ -0,0 +1,54 @@ +# This sample tests the handling of user-defined type guards (PEP 647) +# when they are used as callback functions. + +# pyright: strict, reportMissingModuleSource=false + +from typing import Any, Callable, Sequence, TypeVar, overload +from typing_extensions import TypeGuard # pyright: ignore[reportMissingModuleSource] + + +_T = TypeVar("_T") + + +def cb1(obj: object) -> TypeGuard[int]: ... + + +def cb2(obj: object) -> bool: ... + + +def simple_filter( + val: Sequence[object], fn: Callable[[object], bool] +) -> list[object]: ... + + +@overload +def overloaded_filter( + val: Sequence[object], fn: Callable[[object], TypeGuard[_T]] +) -> Sequence[_T]: ... + + +@overload +def overloaded_filter( + val: Sequence[object], fn: Callable[[object], bool] +) -> Sequence[object]: ... + + +def overloaded_filter( + val: Sequence[object], fn: Callable[[object], Any] +) -> Sequence[Any]: ... + + +x1 = cb1(1) +reveal_type(x1, expected_text="TypeGuard[int]") + +sf1 = simple_filter([], cb1) +reveal_type(sf1, expected_text="list[object]") + +sf2 = simple_filter([], cb2) +reveal_type(sf2, expected_text="list[object]") + +of1 = overloaded_filter([], cb1) +reveal_type(of1, expected_text="Sequence[int]") + +of2 = overloaded_filter([], cb2) +reveal_type(of2, expected_text="Sequence[object]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeGuard3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeGuard3.py new file mode 100644 index 00000000..f1d28a7a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeGuard3.py @@ -0,0 +1,51 @@ +# This sample tests that user-defined TypeGuard can +# be used in an overloaded function. + +from enum import Enum +from typing import Literal, overload +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeGuard, + TypeIs, +) + + +class TypeGuardMode(Enum): + NoTypeGuard = 0 + TypeGuard = 1 + TypeIs = 2 + + +@overload +def is_int(obj: object, mode: Literal[TypeGuardMode.NoTypeGuard]) -> bool: ... + + +@overload +def is_int(obj: object, mode: Literal[TypeGuardMode.TypeGuard]) -> TypeGuard[int]: ... + + +@overload +def is_int(obj: object, mode: Literal[TypeGuardMode.TypeIs]) -> TypeIs[int]: ... + + +def is_int(obj: object, mode: TypeGuardMode) -> bool | TypeGuard[int] | TypeIs[int]: ... + + +def func_no_typeguard(val: int | str): + if is_int(val, TypeGuardMode.NoTypeGuard): + reveal_type(val, expected_text="int | str") + else: + reveal_type(val, expected_text="int | str") + + +def func_typeguard(val: int | str): + if is_int(val, TypeGuardMode.TypeGuard): + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="int | str") + + +def func_typeis(val: int | str): + if is_int(val, TypeGuardMode.TypeIs): + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore1.py new file mode 100644 index 00000000..6983a4c5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore1.py @@ -0,0 +1,10 @@ +# This sample tests the type: ignore for the entire file. + +# type: ignore + +# The "type: ignore" should suppress these errors. +a: int = 3 +b = len(a) + +for for for + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore2.py new file mode 100644 index 00000000..71add07f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore2.py @@ -0,0 +1,20 @@ +# This sample tests the type: ignore for individual lines. + +from typing import Dict + + +a: int = 3 +b = len(a) # type: ignore + +for for for # type: ignore + +c: Dict[str, str] = { + 3: 3, + 'hello': 3, + 3.2: 2.4 +} #type:ignore # something + + + + + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore3.py new file mode 100644 index 00000000..ece4541f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore3.py @@ -0,0 +1,22 @@ +# This sample tests the type: ignore for individual lines. +# It uses a form of ignore syntax that is not part of the +# official PEP 484 spec but is a variant supported by mypy. + +from typing import Dict + + +a: int = 3 +b = len(a) # type: ignore[1424] + +for for for # type: ignore[1424, 244] + +c: Dict[str, str] = { + 3: 3, + 'hello': 3, + 3.2: 2.4 +} #type:ignore[999] # something + + + + + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore4.py new file mode 100644 index 00000000..99bdae16 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore4.py @@ -0,0 +1,10 @@ +# This sample tests the reportUnnecessaryTypeIgnoreComment diagnostic check +# as applied to the entire file. + +a: str = 3 # type: ignore + +# This should emit an error if reportUnnecessaryTypeComment is enabled +b: str = "" # type: ignore + +# This should emit an error if reportUnnecessaryTypeComment is enabled +c: int = 3 # type: ignore diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore5.py new file mode 100644 index 00000000..2cfea889 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIgnore5.py @@ -0,0 +1,7 @@ +# This sample tests the reportUnnecessaryTypeIgnoreComment diagnostic check +# as applied to individual lines. + +# This should generate an error if reportUnnecessaryTypeIgnoreComment is enabled. +# type: ignore + +b: str = "" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIs1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIs1.py new file mode 100644 index 00000000..d98c5854 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIs1.py @@ -0,0 +1,190 @@ +# This sample tests the TypeIs form. + +# pyright: reportMissingModuleSource=false + +from typing import ( + Any, + Callable, + Collection, + Literal, + Mapping, + Sequence, + TypeVar, + Union, + overload, +) + +from typing_extensions import TypeIs + + +def is_str1(val: Union[str, int]) -> TypeIs[str]: + return isinstance(val, str) + + +def func1(val: Union[str, int]): + if is_str1(val): + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="int") + + +def is_true(o: object) -> TypeIs[Literal[True]]: ... + + +def func2(val: bool): + if not is_true(val): + reveal_type(val, expected_text="bool") + else: + reveal_type(val, expected_text="Literal[True]") + + reveal_type(val, expected_text="bool") + + +def is_list(val: object) -> TypeIs[list[Any]]: + return isinstance(val, list) + + +def func3(val: dict[str, str] | list[str] | list[int] | Sequence[int]): + if is_list(val): + reveal_type(val, expected_text="list[str] | list[int] | list[Any]") + else: + reveal_type(val, expected_text="dict[str, str] | Sequence[int]") + + +def func4(val: dict[str, str] | list[str] | list[int] | tuple[int]): + if is_list(val): + reveal_type(val, expected_text="list[str] | list[int]") + else: + reveal_type(val, expected_text="dict[str, str] | tuple[int]") + + +_K = TypeVar("_K") +_V = TypeVar("_V") + + +def is_dict(val: Mapping[_K, _V]) -> TypeIs[dict[_K, _V]]: + return isinstance(val, dict) + + +def func5(val: dict[_K, _V] | Mapping[_K, _V]): + if not is_dict(val): + reveal_type(val, expected_text="Mapping[_K@func5, _V@func5]") + else: + reveal_type(val, expected_text="dict[_K@func5, _V@func5]") + + +def is_cardinal_direction(val: str) -> TypeIs[Literal["N", "S", "E", "W"]]: + return val in ("N", "S", "E", "W") + + +def func6(direction: Literal["NW", "E"]): + if is_cardinal_direction(direction): + reveal_type(direction, expected_text="Literal['E']") + else: + reveal_type(direction, expected_text="Literal['NW']") + + +class Animal: ... + + +class Kangaroo(Animal): ... + + +class Koala(Animal): ... + + +T = TypeVar("T") + + +def is_marsupial(val: Animal) -> TypeIs[Kangaroo | Koala]: + return isinstance(val, Kangaroo | Koala) + + +# This should generate an error because list[T] isn't consistent with list[T | None]. +def has_no_nones(val: list[T | None]) -> TypeIs[list[T]]: + return None not in val + + +def takes_int_typeis(f: Callable[[object], TypeIs[int]]) -> None: + pass + + +def int_typeis(val: object) -> TypeIs[int]: + return isinstance(val, int) + + +def bool_typeis(val: object) -> TypeIs[bool]: + return isinstance(val, bool) + + +takes_int_typeis(int_typeis) + +# This should generate an error because TypeIs is invariant. +takes_int_typeis(bool_typeis) + + +def is_two_element_tuple(val: tuple[T, ...]) -> TypeIs[tuple[T, T]]: + return len(val) == 2 + + +def func7(names: tuple[str, ...]): + if is_two_element_tuple(names): + reveal_type(names, expected_text="tuple[str, str]") + else: + reveal_type(names, expected_text="tuple[str, ...]") + + +def is_int(obj: type) -> TypeIs[type[int]]: ... + + +def func8(x: type) -> None: + if is_int(x): + reveal_type(x, expected_text="type[int]") + + +def is_int_list(x: Collection[Any]) -> TypeIs[list[int]]: + raise NotImplementedError + + +def func9(val: Collection[object]) -> None: + if is_int_list(val): + reveal_type(val, expected_text="list[int]") + else: + reveal_type(val, expected_text="Collection[object]") + + +@overload +def func10(v: tuple[int | str, ...], b: Literal[False]) -> TypeIs[tuple[str, ...]]: ... + + +@overload +def func10( + v: tuple[int | str, ...], b: Literal[True] = True +) -> TypeIs[tuple[int, ...]]: ... + + +def func10(v: tuple[int | str, ...], b: bool = True) -> bool: ... + + +v0 = is_int(int) +v1: bool = v0 +v2: int = v0 +v3 = v0 & v0 + + +def is_sequence_of_int(sequence: Sequence) -> TypeIs[Sequence[int]]: + return all(isinstance(x, int) for x in sequence) + + +def func11(v: Sequence[int] | Sequence[str]): + if is_sequence_of_int(v): + reveal_type(v, expected_text="Sequence[int]") + else: + reveal_type(v, expected_text="Sequence[str]") + + +def func12(v: Sequence[int | str] | Sequence[list[Any]]): + if is_sequence_of_int(v): + reveal_type(v, expected_text="Sequence[int]") + else: + reveal_type(v, expected_text="Sequence[int | str] | Sequence[list[Any]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIs2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIs2.py new file mode 100644 index 00000000..d964891b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIs2.py @@ -0,0 +1,35 @@ +# This sample tests the subtyping relationships between TypeIs, TypeGuard, +# and bool. + +# pyright: reportMissingModuleSource=false + +from typing import Callable + +from typing_extensions import TypeGuard, TypeIs + +TypeIsInt = Callable[..., TypeIs[int]] +TypeIsFloat = Callable[..., TypeIs[float]] +BoolReturn = Callable[..., bool] +TypeGuardInt = Callable[..., TypeGuard[int]] + + +def func1(v1: TypeIsInt, v2: TypeIsFloat, v3: BoolReturn, v4: TypeGuardInt): + a1: TypeIsInt = v1 + a2: TypeIsInt = v2 # Should generate an error + a3: TypeIsInt = v3 # Should generate an error + a4: TypeIsInt = v4 # Should generate an error + + b1: TypeIsFloat = v1 # Should generate an error + b2: TypeIsFloat = v2 + b3: TypeIsFloat = v3 # Should generate an error + b4: TypeIsFloat = v4 # Should generate an error + + c1: BoolReturn = v1 + c2: BoolReturn = v2 + c3: BoolReturn = v3 + c4: BoolReturn = v4 + + d1: TypeGuardInt = v1 # Should generate an error + d2: TypeGuardInt = v2 # Should generate an error + d3: TypeGuardInt = v3 # Should generate an error + d4: TypeGuardInt = v4 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIs3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIs3.py new file mode 100644 index 00000000..a15c61f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIs3.py @@ -0,0 +1,51 @@ +# This sample tests the handling of tuple types when used with TypeIs. + +# pyright: reportMissingModuleSource=false + +from typing_extensions import TypeIs + + +def is_tuple_of_strings(v: tuple[int | str, ...]) -> TypeIs[tuple[str, ...]]: + return all(isinstance(x, str) for x in v) + + +def test1(t: tuple[int]) -> None: + if is_tuple_of_strings(t): + reveal_type(t, expected_text="Never") + else: + reveal_type(t, expected_text="tuple[int]") + + +def test2(t: tuple[str, int]) -> None: + if is_tuple_of_strings(t): + reveal_type(t, expected_text="Never") + else: + reveal_type(t, expected_text="tuple[str, int]") + + +def test3(t: tuple[int | str]) -> None: + if is_tuple_of_strings(t): + reveal_type(t, expected_text="tuple[str]") + else: + reveal_type(t, expected_text="tuple[int | str]") + + +def test4(t: tuple[int | str, int | str]) -> None: + if is_tuple_of_strings(t): + reveal_type(t, expected_text="tuple[str, str]") + else: + reveal_type(t, expected_text="tuple[int | str, int | str]") + + +def test5(t: tuple[int | str, ...]) -> None: + if is_tuple_of_strings(t): + reveal_type(t, expected_text="tuple[str, ...]") + else: + reveal_type(t, expected_text="tuple[int | str, ...]") + + +def test6(t: tuple[str, *tuple[int | str, ...], str]) -> None: + if is_tuple_of_strings(t): + reveal_type(t, expected_text="tuple[str, *tuple[str, ...], str]") + else: + reveal_type(t, expected_text="tuple[str, *tuple[int | str, ...], str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeIs4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeIs4.py new file mode 100644 index 00000000..bc170bdf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeIs4.py @@ -0,0 +1,38 @@ +# This sample tests TypeIs when used with a Callable type. + +# pyright: reportMissingModuleSource=false + +from typing import Callable +from typing_extensions import TypeIs + + +def is_callable(obj: object, /) -> TypeIs[Callable[..., object]]: ... + + +def func1(x: type[int]): + if is_callable(x): + reveal_type(x, expected_text="type[int]") + else: + reveal_type(x, expected_text="Never") + + +def func2[T](x: type[T]): + if is_callable(x): + reveal_type(x, expected_text="type[T@func2]") + else: + reveal_type(x, expected_text="Never") + + +def func3[T](x: type[T] | T): + if is_callable(x): + reveal_type(x, expected_text="type[T@func3] | ((...) -> object)") + else: + reveal_type(x, expected_text="object*") + + +def func4[T](x: T) -> T: + if not is_callable(x): + reveal_type(x, expected_text="object*") + raise ValueError() + reveal_type(x, expected_text="(...) -> object") + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing1.py new file mode 100644 index 00000000..cbca8d4d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing1.py @@ -0,0 +1,68 @@ +# This file validates type narrowing that involve +# conditional binary expressions. + +# pyright: reportOptionalMemberAccess=false + +from random import random + + +class ClassA: + def x(self): + return + + +maybe = True + +a = None if maybe else ClassA() +b = None if maybe else ClassA() + +if not a or not b: + a.x() + b.x() +else: + a.x() + b.x() + +if not (not a or not b): + a.x() + b.x() +else: + a.x() + b.x() + +if not a and not b: + # This should be flagged as an error + a.x() + # This should be flagged as an error + b.x() +else: + a.x() + b.x() + +if not (not a and not b): + a.x() + b.x() +else: + # This should be flagged as an error + a.x() + # This should be flagged as an error + b.x() + +if a or b: + a.x() + b.x() +else: + # This should be flagged as an error + a.x() + # This should be flagged as an error + b.x() + + +def func1(a: str, b: str | bool) -> bool: + x: str | bool = a and a in [] + reveal_type(x, expected_text="bool | Literal['']") + + if random() > 0.5: + return (a and a in [""]) or True + else: + return x or True diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing2.py new file mode 100644 index 00000000..1697dc29 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing2.py @@ -0,0 +1,10 @@ +# This sample tests the type narrowing logic for "continue" +# statements within a loop. + + +def func1(args: list[int | None]): + for arg in args: + if arg is None: + continue + + reveal_type(arg.bit_length(), expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing3.py new file mode 100644 index 00000000..9a349f36 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing3.py @@ -0,0 +1,41 @@ +# This sample tests the type analyzer's type narrowing logic +# relating to break and continue statements and while test expressions. + + +def only_int(a: int): + return a < 3 + + +def test_break(): + val1 = None + while True: + if val1 is None: + val1 = 5 + break + else: + val1 = "hello" + + reveal_type(val1, expected_text="Literal[5]") + + +def test_continue(): + bar1 = 1 + my_list: list[int | None] = [None, 3, 5] + for n in my_list: + if n is None: + continue + bar1 = n + + only_int(bar1) + + +def test_while_condition(): + param = 3 + + # This should generate an error because param + # can be a str type at this point. + while only_int(param): + if param: + break + else: + param = "hello" diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing4.py new file mode 100644 index 00000000..15ea2578 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing4.py @@ -0,0 +1,22 @@ +# This sample tests the type narrowing logic for +# conditional expression involving assignment expressions +# (walrus operator). + +# pyright: strict + + +class C: + def method1(self): + pass + + +def good(b: C | None) -> None: + a = b + if a: + a.method1() + + +def bad(b: C | None) -> None: + if c := b: + c.method1() + b.method1() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing5.py new file mode 100644 index 00000000..d457bd31 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing5.py @@ -0,0 +1,35 @@ +# This sample tests type narrowing for assignments +# where the source contains Unknown or Any type +# arguments. + +from typing import Any, Generic, TypeVar + + +def func1(struct: dict[Any, Any]): + a1: dict[str, Any] = struct + reveal_type(a1, expected_text="dict[str, Any]") + + +def func2(struct: Any): + a1: dict[Any, str] = struct + reveal_type(a1, expected_text="dict[Any, str]") + + if isinstance(struct, dict): + a2: dict[str, Any] = struct + reveal_type(a2, expected_text="dict[str, Any]") + + +T = TypeVar("T") + + +class A(Generic[T]): ... + + +def func3(val: A[Any]): + x: A[int] = val + reveal_type(x, expected_text="A[int]") + + +def func4(val: A[list[Any]]): + x: A[list[int]] = val + reveal_type(x, expected_text="A[list[int]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing6.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing6.py new file mode 100644 index 00000000..ccb6654f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing6.py @@ -0,0 +1,27 @@ +# This sample verifies that a member access expression whose type +# is narrowed is "reset" when part of the member access expression +# is reassigned. + + +class Class1: + val0: int + + +class Class2: + val1: int + val2: Class1 + + +def func1(a: bool): + foo2: Class2 = Class2() + foo2.val1 = 0 + foo2.val2.val0 = 4 + + reveal_type(foo2.val1, expected_text="Literal[0]") + reveal_type(foo2.val2.val0, expected_text="Literal[4]") + + if a: + foo2 = Class2() + + reveal_type(foo2.val1, expected_text="int") + reveal_type(foo2.val2.val0, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing7.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing7.py new file mode 100644 index 00000000..905bd2db --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing7.py @@ -0,0 +1,69 @@ +# This sample tests type narrowing for index operations. + + +class Foo: + val: list[list[str | None]] = [] + + +def func1(v1: list[complex | None]): + if v1[0] and v1[1]: + reveal_type(v1[0], expected_text="complex") + reveal_type(v1[1], expected_text="complex") + reveal_type(v1[2], expected_text="complex | None") + + v1[0], v1[1] = None, None + reveal_type(v1[0], expected_text="None") + reveal_type(v1[1], expected_text="None") + + v1[0], v1[1] = 1, 2 + reveal_type(v1[0], expected_text="Literal[1]") + reveal_type(v1[1], expected_text="Literal[2]") + + v1 = [] + reveal_type(v1[0], expected_text="complex | None") + + i = 1 + if v1[i]: + reveal_type(v1[i], expected_text="complex | None") + + foo = Foo() + if foo.val[0][2]: + reveal_type(foo.val[0][2], expected_text="str") + reveal_type(foo.val[1][2], expected_text="str | None") + + foo.val = [] + reveal_type(foo.val[0][2], expected_text="str | None") + + if v1[-1]: + reveal_type(v1[-1], expected_text="complex") + + +def func2(v1: list[dict[str, str] | list[str]]): + if isinstance(v1[0], dict): + reveal_type(v1[0], expected_text="dict[str, str]") + reveal_type(v1[1], expected_text="dict[str, str] | list[str]") + + if isinstance(v1[-1], list): + reveal_type(v1[-1], expected_text="list[str]") + + +def func3(): + v1: dict[str, int] = {} + + reveal_type(v1["x1"], expected_text="int") + v1["x1"] = 3 + reveal_type(v1["x1"], expected_text="Literal[3]") + + v1[f"x2"] = 5 + reveal_type(v1["x2"], expected_text="int") + + v1 = {} + reveal_type(v1["x1"], expected_text="int") + + v2: dict[str, dict[str, int]] = {} + + reveal_type(v2["y1"]["y2"], expected_text="int") + v2["y1"]["y2"] = 3 + reveal_type(v2["y1"]["y2"], expected_text="Literal[3]") + v2["y1"] = {} + reveal_type(v2["y1"]["y2"], expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing8.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing8.py new file mode 100644 index 00000000..878dee34 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowing8.py @@ -0,0 +1,25 @@ +# This sample tests type narrowing for expressions that include +# assignment expressions (walrus operators). + + +def func1(v1: int | str, v2: str | None) -> None: + if isinstance(x1 := v1, str): + reveal_type(x1, expected_text="str") + + if (x2 := v2) == "hello": + reveal_type(x2, expected_text="Literal['hello']") + + if x2 := v2: + reveal_type(x2, expected_text="str") + + +class A: + val: bool | None + + def __init__(self, val: bool | None) -> None: + self.val = val + + +def func2(): + if (v1 := A(True)).val: + reveal_type(v1.val, expected_text="Literal[True]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingAssert1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingAssert1.py new file mode 100644 index 00000000..791f4a6e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingAssert1.py @@ -0,0 +1,21 @@ +# This sample exercises the type analyzer's type narrowing +# logic for assert statements. + +condition: bool = True + + +def func1(a: str | int) -> int: + if condition: + # This should generate an error because + # a could be a str. + return a + + assert isinstance(a, int) + + return a + + +def func2(a: str | int) -> int: + # Test the form of "assert" that includes a message string. + assert isinstance(a, int), "Message" + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingCallable1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingCallable1.py new file mode 100644 index 00000000..9126c825 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingCallable1.py @@ -0,0 +1,85 @@ +# This sample tests the type engine's narrowing logic for +# callable expressions. + +from typing import Callable, Optional, TypeVar, Union + + +class CallableObj: + def __call__(self, val: int): + return 3 + + +def f(a: int) -> Union[Callable[[int], int], type[int], CallableObj, int]: + if a == 0: + + def h(b: int): + return 3 + + return h + elif a < 40: + return 2 + else: + return int + + +q = f(45) +if callable(q): + w = q(3) + +if not callable(q): + a = q + 3 + + +def g(a: Optional[Callable[[int], int]]): + if callable(a): + a(3) + + +_T1 = TypeVar("_T1", bound=int) + + +def test1(arg: Union[_T1, Callable[[], _T1]]) -> _T1: + if callable(arg): + return arg() + return arg + + +class ClassA: + def bar(self) -> None: + pass + + +def test2(o: ClassA) -> None: + if callable(o): + reveal_type(o, expected_text="<callable subtype of ClassA>") + + # This should generate an error + o.foo() + o.bar() + r1 = o(1, 2, 3) + reveal_type(r1, expected_text="Unknown") + else: + o.bar() + + # This should generate an error + o(1, 2, 3) + + +_T2 = TypeVar("_T2", int, str, Callable[[], int], Callable[[], str]) + + +def test3(v: _T2) -> Union[_T2, int, str]: + if callable(v): + reveal_type(v, expected_text="(() -> int) | (() -> str)") + reveal_type(v(), expected_text="int* | str*") + return v() + else: + reveal_type(v, expected_text="int* | str*") + return v + + +def test4(v: type[int] | object): + if callable(v): + reveal_type(v, expected_text="type[int] | ((...) -> object)") + else: + reveal_type(v, expected_text="object") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingEnum1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingEnum1.py new file mode 100644 index 00000000..852c6e86 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingEnum1.py @@ -0,0 +1,70 @@ +# This sample tests exhaustive type narrowing for enums +# and the use of "Never" and "NoReturn". + +from enum import Enum, Flag +from typing import Literal, NoReturn, Union + + +class SomeEnum(Enum): + value1 = 1 + value2 = 2 + value3 = 3 + + +def assert_never(x: NoReturn) -> NoReturn: + """Used to cause Mypy to catch impossible cases.""" + # https://github.com/python/mypy/issues/6366#issuecomment-560369716 + assert False, "Unhandled type: {}".format(type(x).__name__) + + +def func1(a: SomeEnum): + if a == SomeEnum.value1: + pass + elif a == SomeEnum.value2: + pass + elif a == SomeEnum.value3: + pass + else: + assert_never(a) + + +def func2(val: Literal["a", "b"]): + if val == "a": + pass + elif val == "b": + pass + else: + assert_never(val) + + +def func3(val: Union[str, int]): + if isinstance(val, str): + pass + elif isinstance(val, int): + pass + else: + assert_never(val) + + +def func4(val: Union[str, int]) -> Union[str, int]: + if isinstance(val, str): + return val + elif isinstance(val, int): + return val + else: + # Even though "val" is a Never type at this + # point, it should be assignable to Union[str, int] + # because Never is assignable to any type. + return val + + +class MyFlags(Flag): + V1 = 1 + V2 = 2 + + +def func5(val: MyFlags): + if val == MyFlags.V1 or val == MyFlags.V2: + return + + reveal_type(val, expected_text="MyFlags") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingEnum2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingEnum2.py new file mode 100644 index 00000000..597af89c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingEnum2.py @@ -0,0 +1,77 @@ +# This sample tests the type narrowing logic for +# enum values or False/True that are compared using the +# "is" and "is not" operators. + +from enum import Enum +from typing import Literal, NoReturn, Union + + +class SomeEnum(Enum): + VALUE1 = 1 + VALUE2 = 2 + + +def assert_never(val: NoReturn): ... + + +def func1(a: SomeEnum): + if a is SomeEnum.VALUE1: + pass + elif a is SomeEnum.VALUE2: + pass + else: + assert_never(a) + + +def func2(a: SomeEnum): + if a is SomeEnum.VALUE1: + pass + else: + # This should generate an error because + # a hasn't been narrowed to Never. + assert_never(a) + + +def func3(a: SomeEnum): + if not a is not SomeEnum.VALUE1: + pass + elif not a is not SomeEnum.VALUE2: + pass + else: + assert_never(a) + + +def func4(a: SomeEnum): + if not a is not SomeEnum.VALUE1: + pass + else: + # This should generate an error because + # a hasn't been narrowed to Never. + assert_never(a) + + +def func5(a: Union[str, Literal[False]]) -> str: + if a is False: + return "no" + return a + + +def func6(a: Union[str, Literal[False]]) -> str: + if a is not False: + return a + return "no" + + +def func7(a: Union[str, bool]) -> str: + if a is False: + return "False" + elif a is True: + return "True" + return a + + +def func8(a: object): + if a is SomeEnum.VALUE1 or a is SomeEnum.VALUE2: + reveal_type(a, expected_text="Literal[SomeEnum.VALUE1, SomeEnum.VALUE2]") + else: + reveal_type(a, expected_text="object") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingFalsy1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingFalsy1.py new file mode 100644 index 00000000..d6546113 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingFalsy1.py @@ -0,0 +1,244 @@ +# This sample tests type narrowing for falsey and truthy values. + +from typing import ( + AnyStr, + Iterable, + Literal, + NamedTuple, + NotRequired, + TypeVar, + TypedDict, +) +from enum import Enum, IntEnum + + +class A: ... + + +class B: + def __bool__(self) -> bool: ... + + +class C: + def __bool__(self) -> Literal[False]: ... + + +class D: + def __bool__(self) -> Literal[True]: ... + + +def func1(x: int | list[int] | A | B | C | D | None) -> None: + if x: + reveal_type(x, expected_text="int | list[int] | A | B | D") + else: + reveal_type(x, expected_text="list[int] | B | C | Literal[0] | None") + + +def func2(maybe_int: int | None): + if bool(maybe_int): + reveal_type(maybe_int, expected_text="int") + else: + reveal_type(maybe_int, expected_text="Literal[0] | None") + + +def func3_1(maybe_a: A | None): + if bool(maybe_a): + reveal_type(maybe_a, expected_text="A") + else: + reveal_type(maybe_a, expected_text="None") + + +def func3_2(maybe_a: A | None): + if bool(maybe_a): + reveal_type(maybe_a, expected_text="A") + else: + reveal_type(maybe_a, expected_text="None") + + +def func4(val: Iterable[int]) -> None: + if val: + reveal_type(val, expected_text="Iterable[int]") + else: + reveal_type(val, expected_text="Iterable[int]") + + +def func5(val: tuple[int]) -> None: + if val: + reveal_type(val, expected_text="tuple[int]") + else: + reveal_type(val, expected_text="Never") + + +def func6(val: tuple[int, ...]) -> None: + if val: + reveal_type(val, expected_text="tuple[int, ...]") + else: + reveal_type(val, expected_text="tuple[int, ...]") + + +def func7(val: tuple[()]) -> None: + if val: + reveal_type(val, expected_text="Never") + else: + reveal_type(val, expected_text="tuple[()]") + + +class NT1(NamedTuple): + val: int + + +def func8(val: NT1) -> None: + if val: + reveal_type(val, expected_text="NT1") + else: + reveal_type(val, expected_text="Never") + + +class NT2(NT1): + pass + + +def func9(val: NT2) -> None: + if val: + reveal_type(val, expected_text="NT2") + else: + reveal_type(val, expected_text="Never") + + +class E: + def __init__(self, value: int = 0) -> None: + self.value = value + + def __bool__(self) -> bool: + return self.value >= 0 + + def method(self) -> None: + while not self: + reveal_type(self, expected_text="Self@E") + self.value += 1 + + +def func10(val: AnyStr | None): + return 1 + + +def func11(val: AnyStr | None): + assert val + reveal_type(val, expected_text="AnyStr@func11") + + +T = TypeVar("T") + + +def func12(val: T) -> T: + if val: + reveal_type(val, expected_text="T@func12") + else: + reveal_type(val, expected_text="T@func12") + + return val + + +class Enum1(Enum): + A = 0 + + +class Enum2(Enum): + A = 0 + + def __bool__(self) -> Literal[False]: + return False + + +class Enum3(IntEnum): + A = 0 + B = 1 + + +def func13(x: Literal[Enum1.A]): + if x: + reveal_type(x, expected_text="Literal[Enum1.A]") + else: + reveal_type(x, expected_text="Never") + + +def func14(x: Enum1): + if x: + reveal_type(x, expected_text="Enum1") + else: + reveal_type(x, expected_text="Never") + + +def func15(x: Literal[Enum2.A]): + if x: + reveal_type(x, expected_text="Never") + else: + reveal_type(x, expected_text="Literal[Enum2.A]") + + +def func16(x: Enum2): + if x: + reveal_type(x, expected_text="Never") + else: + reveal_type(x, expected_text="Enum2") + + +def func17(x: Enum3): + if x: + reveal_type(x, expected_text="Enum3") + else: + reveal_type(x, expected_text="Enum3") + + +def func18(x: Literal[Enum3.A], y: Literal[Enum3.B]): + if x: + reveal_type(x, expected_text="Never") + else: + reveal_type(x, expected_text="Literal[Enum3.A]") + + if y: + reveal_type(y, expected_text="Literal[Enum3.B]") + else: + reveal_type(y, expected_text="Never") + + +class TD1(TypedDict): + d1: int + + +class TD2(TypedDict): + d1: NotRequired[int] + + +class TD3(TypedDict): + pass + + +def func19(v1: TD1 | None, v2: TD2 | None, v3: TD3 | None): + if v1: + reveal_type(v1, expected_text="TD1") + else: + reveal_type(v1, expected_text="None") + + if v2: + reveal_type(v2, expected_text="TD2") + else: + reveal_type(v2, expected_text="TD2 | None") + + if v2 is not None: + if v2: + reveal_type(v2, expected_text="TD2") + else: + reveal_type(v2, expected_text="TD2") + + v2["d1"] = 1 + + if v2: + reveal_type(v2, expected_text="TD2") + else: + reveal_type(v2, expected_text="Never") + + if v3: + reveal_type(v3, expected_text="TD3") + else: + reveal_type(v3, expected_text="TD3 | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIn1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIn1.py new file mode 100644 index 00000000..de42085d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIn1.py @@ -0,0 +1,226 @@ +# This sample tests type narrowing for the "in" operator. + +from typing import ( + Any, + Callable, + Generic, + Literal, + LiteralString, + ParamSpec, + TypeVar, + TypedDict, +) +import random + + +def func0(x: str | None, y: int | str): + if random.random() < 0.5: + x = None + y = 1 + else: + x = "2" + y = "2" + + if x in ["2"]: + reveal_type(x, expected_text="Literal['2']") + + if y in [1]: + reveal_type(y, expected_text="Literal[1]") + + +def func1(x: int | str | None, y: Literal[1, 2, "b"], b: int): + if x in (1, 2, "a"): + reveal_type(x, expected_text="Literal[1, 2, 'a']") + + if x in (1, "2"): + reveal_type(x, expected_text="Literal[1, '2']") + + if x in (1, None): + reveal_type(x, expected_text="Literal[1] | None") + + if x in (1, b, "a"): + reveal_type(x, expected_text="int | Literal['a']") + + if y in (1, b, "a"): + reveal_type(y, expected_text="Literal[1, 2]") + + if y in (1, "a"): + reveal_type(y, expected_text="Literal[1]") + + if y in (1, "b"): + reveal_type(y, expected_text="Literal[1, 'b']") + + +def func2(a: Literal[1, 2, 3]): + x = (1, 2) + if a in x: + reveal_type(a, expected_text="Literal[1, 2]") + else: + reveal_type(a, expected_text="Literal[3]") + + +def func3(val: str | None, container: frozenset[str]): + if val in container: + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="str | None") + + +def func4(val: str | None, container: list[str]): + if val not in container: + reveal_type(val, expected_text="str | None") + else: + reveal_type(val, expected_text="str") + + +def func5(x: str | None, y: int | None, z: dict[str, str]): + if x in z: + reveal_type(x, expected_text="str") + else: + reveal_type(x, expected_text="str | None") + + if y not in z: + reveal_type(y, expected_text="int | None") + else: + reveal_type(y, expected_text="Never") + + +def func6(x: type): + if x in (str, int, float, bool): + reveal_type(x, expected_text="type[str] | type[int] | type[float] | type[bool]") + else: + reveal_type(x, expected_text="type") + + +def func7(x: object | bytes, y: str, z: int): + if x in (y, z): + reveal_type(x, expected_text="object") + else: + reveal_type(x, expected_text="object | bytes") + reveal_type(x, expected_text="object | bytes") + + +def func8(x: object): + if x in ("a", "b", 2, None): + reveal_type(x, expected_text="Literal['a', 'b', 2] | None") + + +def func9(x: Literal["A", "B", "C", None, True]): + if x in (None, "B", True): + reveal_type(x, expected_text="Literal['B', True] | None") + else: + reveal_type(x, expected_text="Literal['A', 'C']") + if x not in ("A", "C"): + reveal_type(x, expected_text="Never") + else: + reveal_type(x, expected_text="Literal['A', 'C']") + + if x in ("A", "B"): + reveal_type(x, expected_text="Literal['B', 'A']") + else: + reveal_type(x, expected_text="Literal[True, 'C'] | None") + + +def func10(x: Literal["A", "B"], y: tuple[Literal["A"], ...]): + if x in y: + reveal_type(x, expected_text="Literal['A']") + else: + reveal_type(x, expected_text="Literal['A', 'B']") + + +class TD1(TypedDict): + x: str + + +class TD2(TypedDict): + y: str + + +T1 = TypeVar("T1", TD1, TD2) + + +def func12(v: T1): + if "x" in v: + # This should technically be TD1* | TD2*, but the + # current narrowing logic implements a not-entirely-safe + # narrowing behavior. We can fix this once PEP 728 + # is accepted. + reveal_type(v, expected_text="TD1*") + # reveal_type(v, expected_text="TD1* | TD2*") + else: + reveal_type(v, expected_text="TD2*") + + +P = ParamSpec("P") + + +class Container(Generic[P]): + def __init__(self, func: Callable[P, str]) -> None: + self.func = func + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> str: + if "data" in kwargs: + raise ValueError("data is not allowed in kwargs") + + return self.func(*args, **kwargs) + + +T13 = TypeVar("T13") + + +def func13(x: type[T13]) -> type[T13]: + if x in (str, int, float): + reveal_type(x, expected_text="type[str]* | type[int]* | type[float]*") + + return x + + +def func14(x: str, y: dict[Any, Any]): + if x in y: + reveal_type(x, expected_text="str") + + +def func15(x: Any, y: dict[str, str]): + if x in y: + reveal_type(x, expected_text="Any") + + +def func16(x: int, y: list[Literal[0, 1]]): + if x in y: + reveal_type(x, expected_text="Literal[0, 1]") + + +def func17(x: Literal[-1, 0], y: list[Literal[0, 1]]): + if x in y: + reveal_type(x, expected_text="Literal[0]") + + +def func18(x: Literal[0, 1, 2], y: list[Literal[0, 1]]): + if x in y: + reveal_type(x, expected_text="Literal[0, 1]") + + +def func19(x: float, y: list[int]): + if x in y: + reveal_type(x, expected_text="float") + + +def func20(x: float, y: list[Literal[0, 1]]): + if x in y: + reveal_type(x, expected_text="Literal[0, 1]") + + +def func21(x: int, y: list[Literal[0, True]]): + if x in y: + reveal_type(x, expected_text="Literal[0, True]") + + +def func22(x: bool, y: list[Literal[0, 1]]): + if x in y: + reveal_type(x, expected_text="bool") + + +def func23[T: LiteralString](x: str, y: tuple[T, ...]) -> T: + if x in y: + return x + raise ValueError(f"Invalid value {x!r}") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIn2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIn2.py new file mode 100644 index 00000000..2810c553 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIn2.py @@ -0,0 +1,25 @@ +# This sample tests type narrowing for Enums using the "in" operator. + +import enum + + +class MyEnum(enum.Enum): + A = enum.auto() + B = enum.auto() + C = enum.auto() + + +def func1(x: MyEnum): + if x is MyEnum.C: + return + elif x in (MyEnum.A, MyEnum.B): + reveal_type(x, expected_text="Literal[MyEnum.A, MyEnum.B]") + else: + reveal_type(x, expected_text="Never") + + +def func2(x: MyEnum): + if x in (MyEnum.A, MyEnum.B): + reveal_type(x, expected_text="Literal[MyEnum.A, MyEnum.B]") + else: + reveal_type(x, expected_text="Literal[MyEnum.C]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsClass1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsClass1.py new file mode 100644 index 00000000..71633b92 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsClass1.py @@ -0,0 +1,69 @@ +# This sample tests type narrowing for conditional +# statements of the form X is <class> or X is not <class>. + +from typing import Any, TypeVar, final + + +@final +class A: ... + + +@final +class B: ... + + +class C: ... + + +def func1(x: type[A] | type[B] | None | int): + if x is A: + reveal_type(x, expected_text="type[A]") + else: + reveal_type(x, expected_text="type[B] | int | None") + + +def func2(x: type[A] | type[B] | None | int, y: type[A]): + if x is not y: + reveal_type(x, expected_text="type[B] | int | None") + else: + reveal_type(x, expected_text="type[A]") + + +def func3(x: type[A] | type[B] | Any): + if x is A: + reveal_type(x, expected_text="type[A]") + else: + reveal_type(x, expected_text="type[B] | Any") + + +def func4(x: type[A] | type[B] | type[C]): + if x is C: + reveal_type(x, expected_text="type[C]") + else: + reveal_type(x, expected_text="type[A] | type[B] | type[C]") + + +T = TypeVar("T") + + +def func5(x: type[A] | type[B] | type[T]) -> type[A] | type[B] | type[T]: + if x is A: + reveal_type(x, expected_text="type[A] | type[A]*") + else: + reveal_type(x, expected_text="type[B] | type[T@func5]") + + return x + + +def func6(x: type): + if x is str: + reveal_type(x, expected_text="type[str]") + else: + reveal_type(x, expected_text="type") + + +def func7(x: type[A | B]): + if x is A: + reveal_type(x, expected_text="type[A]") + else: + reveal_type(x, expected_text="type[B]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsEllipsis1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsEllipsis1.py new file mode 100644 index 00000000..910e72ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsEllipsis1.py @@ -0,0 +1,50 @@ +# This sample tests the type analyzer's type narrowing logic for +# conditions of the form "X is ...", "X is not ...", +# "X == .." and "X != ...". + +import types +from typing import Any, TypeVar + +_T = TypeVar("_T", str, types.EllipsisType) + + +def func1(val: int | ellipsis): + if val is not ...: + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="EllipsisType") + + +def func2(val: _T): + if val is ...: + reveal_type(val, expected_text="EllipsisType*") + else: + reveal_type(val, expected_text="str*") + + +def func3(val: int | types.EllipsisType): + if val != ...: + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="EllipsisType") + + +def func4(val: int | ellipsis): + if not val == ...: + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="EllipsisType") + + +def func5(val: object): + if val is ...: + reveal_type(val, expected_text="EllipsisType") + else: + reveal_type(val, expected_text="object") + + +def func6(val: Any | types.EllipsisType): + if val is not ...: + reveal_type(val, expected_text="Any") + else: + reveal_type(val, expected_text="EllipsisType") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone1.py new file mode 100644 index 00000000..9a5219a8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone1.py @@ -0,0 +1,115 @@ +# This sample tests the type analyzer's type narrowing logic for +# conditions of the form "X is None", "X is not None", +# "X == None" and "X != None". + +# pyright: strict, reportUnusedVariable=false + +from typing import Any, Literal, Protocol, Self, TypeVar + + +def func1(x: int | None): + if x is not None: + x.bit_length() + + if x != None: + x.bit_length() + + if x is None: + pass + else: + x.bit_length() + + if x == None: + pass + else: + x.bit_length() + + +_T1 = TypeVar("_T1", None, str) + + +def func2(val: _T1) -> _T1: + if val is not None: + reveal_type(val, expected_text="str*") + return val + else: + reveal_type(val, expected_text="None*") + return val + + +def func3(x: object): + if x is None: + reveal_type(x, expected_text="None") + else: + reveal_type(x, expected_text="object") + + +_T2 = TypeVar("_T2") + + +def func4(x: _T2) -> _T2: + if x is None: + reveal_type(x, expected_text="None*") + raise ValueError() + else: + reveal_type(x, expected_text="_T2@func4") + return x + + +def func5(x: Any | None): + if x is None: + reveal_type(x, expected_text="None") + else: + reveal_type(x, expected_text="Any") + + +def func6(x: Any | object | None): + if x is None: + reveal_type(x, expected_text="None") + else: + reveal_type(x, expected_text="Any | object") + + +class NoneProto(Protocol): + def __bool__(self) -> Literal[False]: ... + + +def func7(x: NoneProto | None): + if x is None: + reveal_type(x, expected_text="None") + else: + reveal_type(x, expected_text="NoneProto") + + +_T3 = TypeVar("_T3", bound=None | int) + + +def func8(x: _T3) -> _T3: + if x is None: + reveal_type(x, expected_text="None*") + else: + reveal_type(x, expected_text="int*") + return x + + +_T4 = TypeVar("_T4") + + +def func9(value: type[_T4] | None): + if value is None: + reveal_type(value, expected_text="None") + else: + reveal_type(value, expected_text="type[_T4@func9]") + + +class A: + def __init__(self, parent: Self | None) -> None: + self.parent = parent + + def get_depth(self) -> int: + current: Self | None = self + count = 0 + while current is not None: + count += 1 + current = current.parent + return count - 1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone2.py new file mode 100644 index 00000000..82577838 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone2.py @@ -0,0 +1,8 @@ +# This sample tests type narrowing for conditional +# statements of the form X is None or X is not None +# where X is an assignment expression. + +# pyright: strict + +i = {"a": "", "b": None} +dict_comp = {key: w.strip() if (w := i[key]) is not None else "" for key in i} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple1.py new file mode 100644 index 00000000..d9dec6cc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple1.py @@ -0,0 +1,42 @@ +# This sample tests the type narrowing case for unions of tuples +# where one or more of the entries is tested against type None. + +from typing import TypeVar, Tuple, Union + +_T1 = TypeVar("_T1") + + +def func1(a: Union[Tuple[_T1, None], Tuple[None, str]]) -> Tuple[_T1, None]: + if a[1] is None: + reveal_type(a, expected_text="Tuple[_T1@func1, None]") + return a + else: + reveal_type(a, expected_text="Tuple[None, str]") + raise ValueError() + + +_T2 = TypeVar("_T2", bound=Union[None, int]) + + +def func2(a: Union[Tuple[_T2, None], Tuple[None, str]]): + if a[0] is None: + reveal_type(a, expected_text="Tuple[_T2@func2, None] | Tuple[None, str]") + else: + reveal_type(a, expected_text="Tuple[_T2@func2, None]") + + +_T3 = TypeVar("_T3", None, int) + + +def func3(a: Union[Tuple[_T3, None], Tuple[None, str]]): + if a[0] is None: + reveal_type(a, expected_text="Tuple[_T3@func3, None] | Tuple[None, str]") + else: + reveal_type(a, expected_text="Tuple[_T3@func3, None]") + + +def func4(a: Union[Tuple[Union[int, None]], Tuple[None, str]]): + if a[0] is None: + reveal_type(a, expected_text="Tuple[int | None] | Tuple[None, str]") + else: + reveal_type(a, expected_text="Tuple[int | None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple2.py new file mode 100644 index 00000000..5a4c7b4b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple2.py @@ -0,0 +1,45 @@ +# This sample tests the type narrowing case for unions of NamedTuples +# where one or more of the entries is tested against type None by index. + +from typing import NamedTuple + +IntFirst = NamedTuple( + "IntFirst", + [ + ("first", int), + ("second", None), + ], +) + +StrSecond = NamedTuple( + "StrSecond", + [ + ("first", None), + ("second", str), + ], +) + + +def func1(a: IntFirst | StrSecond) -> IntFirst: + if a[1] is None: + reveal_type(a, expected_text="IntFirst") + return a + else: + reveal_type(a, expected_text="StrSecond") + raise ValueError() + + +UnionFirst = NamedTuple( + "UnionFirst", + [ + ("first", None | int), + ("second", None), + ], +) + + +def func2(a: UnionFirst | StrSecond): + if a[0] is None: + reveal_type(a, expected_text="UnionFirst | StrSecond") + else: + reveal_type(a, expected_text="UnionFirst") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance1.py new file mode 100644 index 00000000..150e8ffe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance1.py @@ -0,0 +1,291 @@ +# This sample exercises the type analyzer's isinstance type narrowing logic. + +from types import NoneType +from typing import ( + Any, + Generic, + Iterable, + Iterator, + Protocol, + Sized, + TypeVar, + Union, + runtime_checkable, +) + +S = TypeVar("S") +T = TypeVar("T") + + +class UnrelatedClass: + class_var1: int + + def __init__(self) -> None: + self.property: None = None + + +class UnrelatedSubclass(UnrelatedClass): + def __init__(self) -> None: + self.property2: None = None + + +class SuperClass: + class_var1: int + + def __init__(self) -> None: + self.property: None = None + + +class MyClass1(SuperClass): + class_var2: int + + def __init__(self) -> None: + self.property2: None = None + + +class MyClass2(SuperClass): + def __init__(self) -> None: + self.property2: None = None + + +def f(instance: Union[SuperClass, UnrelatedClass], a: Any) -> None: + if isinstance(instance, (MyClass1, UnrelatedSubclass, a)): + print(instance.property) + + # This should generate two errors: + # 'property2' is not a known member of 'SuperClass' + # 'property2' is not a known member of 'UnrelatedClass' + print(instance.property2) + else: + print(instance.property) + + # This should generate two errors: + # 'property2' is not a known member of 'SuperClass' + # 'property2' is not a known member of 'UnrelatedClass' + print(instance.property2) + + +def g(cls: Union[type[SuperClass], type[UnrelatedClass]], a: Any) -> None: + if issubclass(cls, (MyClass1, UnrelatedSubclass, a)): + print(cls.class_var1) + + # This should generate two errors: + # 'property2' is not a known member of 'SuperClass' + # 'property2' is not a known member of 'UnrelatedClass' + print(cls.class_var2) + else: + print(cls.class_var1) + + # This should generate two errors: + # 'property2' is not a known member of 'SuperClass' + # 'property2' is not a known member of 'UnrelatedClass' + print(cls.class_var2) + + +# This code should analyze without any errors. +class TestClass1: + def __init__(self) -> None: + self.property = True + + +class TestClass2(TestClass1): + pass + + +def func1(instance: TestClass2) -> None: + # Although it's redundant for code to check for either + # TestClass1 or TestClass2, the analyzer should be fine with it. + if isinstance(instance, TestClass2): + print(instance.property) + + if isinstance(instance, TestClass1): + print(instance.property) + + +def func2(val: Union[int, None, str]) -> int | None: + return None if isinstance((z := val), str) else z + + +# Test the special-case handling of isinstance with a +# "type" class. +def func3(ty: type[int]) -> type[int]: + assert isinstance(ty, (type, str)) + return ty + + +def func4(ty: type[int]) -> type[int]: + assert not isinstance(ty, str) + return ty + + +def func5(ty: type[T]) -> type[T]: + assert isinstance(ty, (type, str)) + return ty + + +def func6(ty: type[T]) -> type[T]: + assert not isinstance(ty, str) + return ty + + +def func6_2(ty: type[int] | int): + if isinstance(ty, type): + reveal_type(ty, expected_text="type[int]") + else: + reveal_type(ty, expected_text="int") + + +def func6_3(ty: type): + if issubclass(ty, str): + reveal_type(ty, expected_text="type[str]") + else: + reveal_type(ty, expected_text="type[Unknown]") + + +def func6_4(ty: Any): + if issubclass(ty, str): + reveal_type(ty, expected_text="type[str]") + else: + reveal_type(ty, expected_text="Any") + + +# Test the handling of protocol classes that support runtime checking. +def func7(a: Union[list[int], int]): + if isinstance(a, Sized): + reveal_type(a, expected_text="list[int]") + else: + reveal_type(a, expected_text="int") + + +# Test handling of member access expressions whose types change based +# on isinstance checks. + + +class Base1: ... + + +class Sub1_1(Base1): + value: str + + +class Sub1_2(Base1): + value: Base1 + + +def handler(node: Base1) -> Any: + if isinstance(node, Sub1_1): + reveal_type(node.value, expected_text="str") + elif isinstance(node, Sub1_2): + reveal_type(node.value, expected_text="Base1") + if isinstance(node.value, Sub1_1): + reveal_type(node.value, expected_text="Sub1_1") + + +def func8a(a: int | list[int] | dict[str, int] | None): + if isinstance(a, (str, (int, list, type(None)))): + reveal_type(a, expected_text="int | list[int] | None") + else: + reveal_type(a, expected_text="dict[str, int]") + + +def func8b(a: int | list[int] | dict[str, int] | None): + if isinstance(a, str | int | list | type(None)): + reveal_type(a, expected_text="int | list[int] | None") + else: + reveal_type(a, expected_text="dict[str, int]") + + +TA1 = str | int | list | None + + +def func8c(a: int | list[int] | dict[str, int] | None): + if isinstance(a, TA1): + reveal_type(a, expected_text="int | list[int] | None") + else: + reveal_type(a, expected_text="dict[str, int]") + + +def func9(a: int | None): + if not isinstance(a, NoneType): + reveal_type(a, expected_text="int") + else: + reveal_type(a, expected_text="None") + + +class Base2(Generic[S, T]): + pass + + +class Sub2(Base2[T, T]): + pass + + +def func10(val: Sub2[str] | Base2[str, float]): + if isinstance(val, Sub2): + reveal_type(val, expected_text="Sub2[str] | Sub2[str | float]") + + +@runtime_checkable +class Proto1(Protocol): + def f0(self, /) -> None: ... + + +@runtime_checkable +class Proto2(Proto1, Protocol): + def f1(self, /) -> None: ... + + +def func11(x: Proto1): + if isinstance(x, Proto2): + reveal_type(x, expected_text="Proto2") + else: + reveal_type(x, expected_text="Proto1") + + +TA2 = list["TA3"] | dict[str, "TA3"] +TA3 = str | TA2 + + +def func12(x: TA3) -> None: + if isinstance(x, dict): + reveal_type(x, expected_text="dict[str, str | list[TA3] | dict[str, TA3]]") + else: + reveal_type(x, expected_text="str | list[str | list[TA3] | dict[str, TA3]]") + + +def func13(x: object | type[object]) -> None: + if isinstance(x, object): + reveal_type(x, expected_text="object | type[object]") + + +def func14(x: Iterable[T]): + if isinstance(x, Iterator): + reveal_type(x, expected_text="Iterator[T@func14]") + + +class Base15(Generic[T]): + value: T + + +class Child15(Base15[int]): + value: int + + +def func15(x: Base15[T]): + if isinstance(x, Child15): + # This should generate an error. It's here just to ensure that + # this code branch isn't marked unreachable. + reveal_type(x, expected_text="Never") + + reveal_type(x, expected_text="Child15") + reveal_type(x.value, expected_text="int") + + +def func16(x: Any): + if isinstance(x, (int, int)): + reveal_type(x, expected_text="int") + + +def func17(x: Any): + if isinstance(x, (Union[int, int])): + reveal_type(x, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance10.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance10.py new file mode 100644 index 00000000..adeadf3e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance10.py @@ -0,0 +1,15 @@ +# This sample tests the case where an isinstance type guard is used +# with a dynamic set of types. + +SOME_TYPES_L: list[type[object]] = [int, float] +SOME_TYPES: tuple[type[object], ...] = tuple(SOME_TYPES_L) + + +def check_object(obj: object): + if isinstance(obj, SOME_TYPES): + reveal_type(obj, expected_text="object") + return + reveal_type(obj, expected_text="object") + + if isinstance(obj, list): + reveal_type(obj, expected_text="list[Unknown]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance11.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance11.py new file mode 100644 index 00000000..7b065b8f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance11.py @@ -0,0 +1,18 @@ +# This sample tests the case where the class type passed as the second +# argument to isinstance is incomplete the first time the type guard +# is evaluated because it's in an loop. + + +class X: + pass + + +class Y: + p: type + + +def func1(xs: list[X | Y]) -> None: + for x in xs: + if not isinstance(x, X): + if x.p == X: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance12.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance12.py new file mode 100644 index 00000000..80a435a6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance12.py @@ -0,0 +1,46 @@ +# This sample tests the case where a symbol with type `<some type>|Any` +# is narrowed using an `isinstance` type guard. + +from typing import Any + + +def func1(val: Any): + if isinstance(val, str): + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="Any") + + +def func2(val: str): + if isinstance(val, str): + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="Never") + + +def func3(val: str | int): + if isinstance(val, str): + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="int") + + +def func4(val: str | Any): + if isinstance(val, str): + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="Any") + + +def func5(val: str | int | Any): + if isinstance(val, str): + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="int | Any") + + +def func6(val: list[str] | Any): + if isinstance(val, list): + reveal_type(val, expected_text="list[str] | list[Unknown]") + else: + reveal_type(val, expected_text="Any") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance13.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance13.py new file mode 100644 index 00000000..5715c6ca --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance13.py @@ -0,0 +1,15 @@ +# This sample tests the case where isinstance type narrowing is used +# with a protocol class that supports runtime checking. + +# pyright: reportUnnecessaryIsInstance=true + +from typing import Any, Iterable, Sized + + +def func1(v: Any) -> bool: + if isinstance(v, Iterable): + reveal_type(v, expected_text="Iterable[Unknown]") + if isinstance(v, Sized): + reveal_type(v, expected_text="<subclass of Iterable and Sized>") + return True + return False diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance14.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance14.py new file mode 100644 index 00000000..4e8613f0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance14.py @@ -0,0 +1,26 @@ +# This sample tests the isinstance narrowing when the list +# of classes includes a type defined by a type variable. + +from typing import Any, TypeVar + +T = TypeVar("T") + + +def func1(cls: type[T], obj: Any) -> T: + assert isinstance(obj, cls) + reveal_type(obj, expected_text="T@func1") + return obj + + +v1 = func1(int, 3) +reveal_type(v1, expected_text="int") + + +def func2(klass: type[T], obj: T | int) -> T: + assert isinstance(obj, klass) + reveal_type(obj, expected_text="object*") + return obj + + +v2 = func2(str, 3) +reveal_type(v2, expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance15.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance15.py new file mode 100644 index 00000000..1562a3f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance15.py @@ -0,0 +1,41 @@ +# This sample tests that the negative filtering for the 'isinstance' +# narrowing logic properly preserves a TypeVar. + +from typing import TypeVar, Generic + + +class Operator: ... + + +OpType = TypeVar("OpType", bound=Operator) + + +class BasePipeline(Operator, Generic[OpType]): + def __init__( + self, + step: OpType, + ) -> None: + if isinstance(step, BasePipeline): + reveal_type(step, expected_text="BasePipeline[Unknown]*") + else: + reveal_type(step, expected_text="Operator*") + + +T1 = TypeVar("T1", int, str) + + +def do_nothing1(x: T1) -> T1: + if isinstance(x, int): + return x + return x + + +T2 = TypeVar("T2") + + +def func2(arg: T2) -> T2: + if isinstance(arg, str): + reveal_type(arg, expected_text="str*") + + reveal_type(arg, expected_text="str* | object*") + return arg diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance16.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance16.py new file mode 100644 index 00000000..a7b7281b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance16.py @@ -0,0 +1,19 @@ +# This sample tests isinstance and issubclass type narrowing +# based on cls and self parameters. + + +class ClassA: + @classmethod + def bar(cls, other: type): + if issubclass(other, cls): + reveal_type(other, expected_text="type[Self@ClassA]") + + if issubclass(other, (int, cls)): + reveal_type(other, expected_text="type[Self@ClassA] | type[int]") + + def baz(self, other: object): + if isinstance(other, type(self)): + reveal_type(other, expected_text="Self@ClassA") + + if isinstance(other, (int, type(self))): + reveal_type(other, expected_text="Self@ClassA | int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance17.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance17.py new file mode 100644 index 00000000..0f49cd08 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance17.py @@ -0,0 +1,79 @@ +# This sample tests basic type narrowing behavior for +# the isinstance call. + +from types import NoneType +from typing import Any, TypedDict + + +def func1(x: list[str] | int): + if isinstance(x, list): + reveal_type(x, expected_text="list[str]") + else: + reveal_type(x, expected_text="int") + + +def func2(x: Any): + if isinstance(x, list): + reveal_type(x, expected_text="list[Unknown]") + else: + reveal_type(x, expected_text="Any") + + +def func3(x): + if isinstance(x, list): + reveal_type(x, expected_text="list[Unknown]") + else: + reveal_type(x, expected_text="Unknown") + + +class SomeTypedDict(TypedDict): + name: str + + +def func4(x: int | SomeTypedDict): + if isinstance(x, dict): + reveal_type(x, expected_text="SomeTypedDict") + else: + reveal_type(x, expected_text="int") + + +def func5(x: int | str | complex): + if isinstance(x, (int, str)): + reveal_type(x, expected_text="int | str") + else: + reveal_type(x, expected_text="complex | float") + + +def func6(x: type[int] | type[str] | type[complex]): + if issubclass(x, (int, str)): + reveal_type(x, expected_text="type[int] | type[str]") + else: + reveal_type(x, expected_text="type[complex] | type[float]") + + +def func7(x: int | SomeTypedDict | None): + if isinstance(x, (dict, type(None))): + reveal_type(x, expected_text="SomeTypedDict | None") + else: + reveal_type(x, expected_text="int") + + +def func8(x: type[int] | type[SomeTypedDict] | type[None]): + if issubclass(x, (dict, type(None))): + reveal_type(x, expected_text="type[SomeTypedDict] | type[None]") + else: + reveal_type(x, expected_text="type[int]") + + +def func9(x: int | None): + if isinstance(x, NoneType): + reveal_type(x, expected_text="None") + else: + reveal_type(x, expected_text="int") + + +def func10(x: type[int] | type[None]): + if issubclass(x, NoneType): + reveal_type(x, expected_text="type[None]") + else: + reveal_type(x, expected_text="type[int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance18.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance18.py new file mode 100644 index 00000000..e4c28874 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance18.py @@ -0,0 +1,63 @@ +# This sample tests the case where a filter (guard) type has a subtype +# relationship to the type of the variable being filtered but the +# type arguments sometimes mean that it cannot be a subtype. + +from typing import Generic, NamedTuple, TypeVar + +T = TypeVar("T") + + +class NT1(NamedTuple, Generic[T]): + pass + + +def func1(val: NT1[str] | tuple[int, int]): + if isinstance(val, NT1): + reveal_type(val, expected_text="NT1[str]") + else: + reveal_type(val, expected_text="tuple[int, int]") + + +class NT2(NamedTuple, Generic[T]): + a: T + b: str + + +def func2(val: NT2[str] | tuple[int, int]): + if isinstance(val, NT2): + reveal_type(val, expected_text="NT2[str]") + else: + reveal_type(val, expected_text="tuple[int, int]") + + +def func3(val: NT2[str] | tuple[int, str]): + if isinstance(val, NT2): + reveal_type(val, expected_text="NT2[str] | NT2[Unknown]") + else: + reveal_type(val, expected_text="tuple[int, str]") + + +class NT3(NamedTuple, Generic[T]): + a: T + b: T + + +def func4(val: NT3[str] | tuple[int, int]): + if isinstance(val, NT3): + reveal_type(val, expected_text="NT3[str] | NT3[Unknown]") + else: + reveal_type(val, expected_text="tuple[int, int]") + + +def func5(val: NT3[str] | tuple[str, str, str]): + if isinstance(val, NT3): + reveal_type(val, expected_text="NT3[str]") + else: + reveal_type(val, expected_text="tuple[str, str, str]") + + +def func6(val: NT3[str] | tuple[str, ...]): + if isinstance(val, NT3): + reveal_type(val, expected_text="NT3[str] | NT3[Unknown]") + else: + reveal_type(val, expected_text="tuple[str, ...]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance19.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance19.py new file mode 100644 index 00000000..b6dd8749 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance19.py @@ -0,0 +1,89 @@ +# This sample tests the logic for narrowing a metaclass using an +# issubclass call. + +from abc import ABC, ABCMeta +from typing import Any, ClassVar, Iterable +from typing_extensions import reveal_type # pyright: ignore[reportMissingModuleSource] + + +class Meta1(ABCMeta): + pass + + +class Parent1(ABC, metaclass=Meta1): + pass + + +class Child1(Parent1): + x: ClassVar[tuple[int, int]] = (0, 1) + + +def func1(m: Meta1) -> None: + if issubclass(m, Parent1): + reveal_type(m, expected_text="type[Parent1]") + else: + reveal_type(m, expected_text="Meta1") + + +def func2(m: Meta1) -> None: + if issubclass(m, Child1): + reveal_type(m, expected_text="type[Child1]") + else: + reveal_type(m, expected_text="Meta1") + + +def func3(m: ABCMeta) -> None: + if issubclass(m, Child1): + reveal_type(m, expected_text="type[Child1]") + else: + reveal_type(m, expected_text="ABCMeta") + + +def func4(m: ABCMeta) -> None: + if issubclass(m, (Parent1, Child1, int)): + reveal_type(m, expected_text="type[Parent1] | type[Child1]") + else: + reveal_type(m, expected_text="ABCMeta") + + +def func5(m: Meta1) -> None: + if issubclass(m, (Parent1, Child1)): + reveal_type(m, expected_text="type[Parent1] | type[Child1]") + else: + reveal_type(m, expected_text="Meta1") + + +def func6(m: Meta1, x: type[Any]) -> None: + if issubclass(m, x): + reveal_type(m, expected_text="Meta1") + else: + reveal_type(m, expected_text="Meta1") + + +def func7(m: Meta1, x: type[Parent1] | type[Child1]) -> None: + if issubclass(m, x): + reveal_type(m, expected_text="type[Parent1] | type[Child1]") + else: + reveal_type(m, expected_text="Meta1") + + +def func8(cls: type): + if isinstance(cls, Meta1): + reveal_type(cls, expected_text="Meta1") + else: + reveal_type(cls, expected_text="type") + + +class Meta2(type): + pass + + +class Class2(metaclass=Meta2): + pass + + +def func9(v: type[Class2] | Iterable[type[Class2]]): + if isinstance(v, Meta2): + reveal_type(v, expected_text="type[Class2]") + else: + reveal_type(v, expected_text="Iterable[type[Class2]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance2.py new file mode 100644 index 00000000..0b6d45e7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance2.py @@ -0,0 +1,50 @@ +# This sample verifies that type narrowing for isinstance works +# on "self" and other bound TypeVars. + + +from typing import Self, TypeVar + + +class ClassA: + def get_value(self) -> int: + if isinstance(self, ChildB): + return self.calculate() + return 7 + + +class ChildB(ClassA): + def calculate(self) -> int: + return 2 * 2 + + +TC = TypeVar("TC") + + +class ClassC: + @classmethod + def test(cls: type[TC], id: int | TC): + if isinstance(id, cls): + reveal_type(id, expected_text="object*") + else: + reveal_type(id, expected_text="int | object*") + + +TD = TypeVar("TD", bound="ClassD") + + +class ClassD: + @classmethod + def test(cls: type[TD], id: int | TD): + if isinstance(id, cls): + reveal_type(id, expected_text="ClassD*") + else: + reveal_type(id, expected_text="int | ClassD*") + + +class ClassE: + @classmethod + def test(cls: type[Self], id: int | Self): + if isinstance(id, cls): + reveal_type(id, expected_text="Self@ClassE") + else: + reveal_type(id, expected_text="int | ClassE*") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance20.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance20.py new file mode 100644 index 00000000..d4a0f957 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance20.py @@ -0,0 +1,16 @@ +# This sample tests the case where an isinstance type narrowing is used +# with a generic class with a type parameter that has a default value. + +from typing import Generic +from typing_extensions import TypeVar # pyright: ignore[reportMissingModuleSource] + + +T = TypeVar("T", bound=int, default=int) + + +class ClassA(Generic[T]): ... + + +def func1(obj: object): + if isinstance(obj, ClassA): + reveal_type(obj, expected_text="ClassA[Unknown]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance21.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance21.py new file mode 100644 index 00000000..4857c012 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance21.py @@ -0,0 +1,77 @@ +# This sample tests the case where the filter type is a class object. + +# pyright: reportMissingModuleSource=false + +from typing import Any +from typing_extensions import TypeIs + + +class Sentinel: + pass + + +def is_sentinel(value: object) -> TypeIs[type[Sentinel]]: ... + + +def _(a: dict[str, int] | type[Sentinel]): + if is_sentinel(a): + reveal_type(a, expected_text="type[Sentinel]") + else: + reveal_type(a, expected_text="dict[str, int]") + + +def is_str_type(typ: object) -> TypeIs[type[str]]: + return typ is str + + +def test_typevar[T](typ: type[T], val: T) -> None: + if is_str_type(typ): + reveal_type(typ, expected_text="type[str]*") + + +def func1(v: Sentinel | type[Sentinel]): + if isinstance(v, Sentinel): + reveal_type(v, expected_text="Sentinel") + else: + reveal_type(v, expected_text="type[Sentinel]") + + +class A: + pass + + +class B: + pass + + +def guard3(t: type[Any]) -> TypeIs[type[A]]: + return True + + +def func3(t: type[B]): + if guard3(t): + reveal_type(t, expected_text="type[<subclass of B and A>]") + else: + reveal_type(t, expected_text="type[B]") + + +def guard4(t: Any) -> TypeIs[type[A]]: + return True + + +def func4(t: B): + if guard4(t): + reveal_type(t, expected_text="<subclass of B and type[A]>") + else: + reveal_type(t, expected_text="B") + + +class CParent: ... + + +class CChild(CParent): ... + + +def func5(val: CChild, t: type[CParent]): + if not isinstance(val, t): + reveal_type(val, expected_text="CChild") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance3.py new file mode 100644 index 00000000..75db837b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance3.py @@ -0,0 +1,93 @@ +# This sample tests the handling of isinstance and issubclass type +# narrowing in the case where there is no overlap between the +# value type and the test type. + +from typing import TypeVar, final + + +class A: + a_val: int + + +class B: + b_val: int + + +class C: + c_val: int + + +@final +class D: + d_val: int + + +def func1(val: A): + if isinstance(val, B): + val.a_val + val.b_val + + # This should generate an error + val.c_val + + reveal_type(val, expected_text="<subclass of A and B>") + + if isinstance(val, C): + val.a_val + val.b_val + val.c_val + reveal_type(val, expected_text="<subclass of <subclass of A and B> and C>") + + else: + val.a_val + + # This should generate an error + val.b_val + + reveal_type(val, expected_text="A") + + +def func2(val: type[A]): + if issubclass(val, B): + val.a_val + val.b_val + + # This should generate an error + val.c_val + + reveal_type(val, expected_text="type[<subclass of A and B>]") + + if issubclass(val, C): + val.a_val + val.b_val + val.c_val + reveal_type( + val, expected_text="type[<subclass of <subclass of A and B> and C>]" + ) + + else: + val.a_val + + # This should generate an error + val.b_val + + reveal_type(val, expected_text="type[A]") + + +_T1 = TypeVar("_T1", bound=A) + + +def func3(val: _T1) -> _T1: + if isinstance(val, B): + return val + return val + + +def func4(val: D): + if isinstance(val, A): + reveal_type(val, expected_text="Never") + + +def func5(val: type[int]): + if isinstance(val, str): + x: type = val diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance4.py new file mode 100644 index 00000000..505b8d9d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance4.py @@ -0,0 +1,66 @@ +# This sample checks the handling of callable types that are narrowed +# to a particular type using an isinstance type narrowing test. + +from typing import Callable, ParamSpec, Protocol, Union, runtime_checkable + +P = ParamSpec("P") + + +class ClassA: + def __call__(self, arg: int, bar: str) -> None: + raise NotImplementedError + + +@runtime_checkable +class ClassB(Protocol): + def __call__(self, arg: int) -> None: + raise NotImplementedError + + +@runtime_checkable +class ClassC(Protocol): + def __call__(self, arg: str) -> None: + raise NotImplementedError + + +def check_callable1(val: Union[Callable[[int, str], None], Callable[[int], None]]): + if isinstance(val, ClassA): + reveal_type(val, expected_text="ClassA") + else: + # This doesn't get narrowed because `ClassA` is not a runtime checkable protocol. + reveal_type(val, expected_text="((int, str) -> None) | ((int) -> None)") + + +def check_callable2(val: Union[Callable[[int, str], None], Callable[[int], None]]): + if isinstance(val, ClassB): + reveal_type(val, expected_text="((int, str) -> None) | ((int) -> None)") + else: + reveal_type(val, expected_text="Never") + + +def check_callable3(val: Union[Callable[[int, str], None], Callable[[int], None]]): + if isinstance(val, ClassC): + reveal_type(val, expected_text="((int, str) -> None) | ((int) -> None)") + else: + reveal_type(val, expected_text="Never") + + +def check_callable4(val: Union[type, Callable[[int], None]]): + if isinstance(val, type): + reveal_type(val, expected_text="type") + else: + reveal_type(val, expected_text="(int) -> None") + + +def check_callable5(fn: Callable[P, None]) -> None: + if isinstance(fn, ClassA): + reveal_type(fn, expected_text="ClassA") + else: + reveal_type(fn, expected_text="(**P@check_callable5) -> None") + + +def check_callable6(o: object | Callable[[int], int]): + if isinstance(o, Callable): + reveal_type(o, expected_text="((...) -> Unknown) | ((int) -> int)") + else: + reveal_type(o, expected_text="object") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance5.py new file mode 100644 index 00000000..4981b135 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance5.py @@ -0,0 +1,66 @@ +# This sample tests isinstance type narrowing when the class list +# includes "Callable". + +from typing import Callable, Sequence, TypeVar, final + + +class A: ... + + +class B: + def __call__(self, x: str) -> int: ... + + +class C: ... + + +class D(C): ... + + +TCall1 = TypeVar("TCall1", bound=Callable[..., int]) + + +def func1( + obj: Callable[[int, str], int] | list[int] | A | B | C | D | TCall1, +) -> TCall1 | None: + if isinstance(obj, (Callable, Sequence, C)): + reveal_type( + obj, + expected_text="((int, str) -> int) | Sequence[Unknown] | C | list[int] | B | D | TCall1@func1", + ) + else: + reveal_type(obj, expected_text="A") + + if isinstance(obj, Callable): + reveal_type(obj, expected_text="((int, str) -> int) | B | TCall1@func1") + else: + reveal_type(obj, expected_text="Sequence[Unknown] | C | list[int] | D | A") + + +class CB1: + def __call__(self, x: str) -> None: ... + + +def func2(c1: Callable[[int], None], c2: Callable[..., None]): + if isinstance(c1, CB1): + reveal_type(c1, expected_text="Never") + + if isinstance(c2, CB1): + reveal_type(c2, expected_text="CB1") + + +class IsNotFinal: ... + + +def func3(c1: Callable[[int], None]): + if isinstance(c1, IsNotFinal): + reveal_type(c1, expected_text="IsNotFinal") + + +@final +class IsFinal: ... + + +def func4(c1: Callable[[int], None]): + if isinstance(c1, IsFinal): + reveal_type(c1, expected_text="Never") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance6.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance6.py new file mode 100644 index 00000000..099f3356 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance6.py @@ -0,0 +1,91 @@ +# This sample tests the case where isinstance or issubclass is used to +# narrow the type of a specialized class to a subclass where the type +# arguments are implied by the type arguments of the wider class. + +from typing import Any, Generic, Iterable, Sequence, TypeVar + +_T1 = TypeVar("_T1") + + +class ParentA(Generic[_T1]): ... + + +class ChildA1(ParentA[_T1]): ... + + +def func1(a: ParentA[int], b: ParentA[str] | ParentA[complex]) -> None: + if isinstance(a, ChildA1): + reveal_type(a, expected_text="ChildA1[int]") + + if isinstance(b, ChildA1): + reveal_type(b, expected_text="ChildA1[str] | ChildA1[complex]") + + +def func2( + a: type[ParentA[int]], b: type[ParentA[str]] | type[ParentA[complex]] +) -> None: + if issubclass(a, ChildA1): + reveal_type(a, expected_text="type[ChildA1[int]]") + + if issubclass(b, ChildA1): + reveal_type(b, expected_text="type[ChildA1[str]] | type[ChildA1[complex]]") + + +def func3(value: Iterable[_T1]) -> Sequence[_T1] | None: + if isinstance(value, Sequence): + return value + + +_T2 = TypeVar("_T2", bound=float, covariant=True) + + +class ParentB(Generic[_T2]): + pass + + +class ChildB1(ParentB[_T2]): + pass + + +def func4(var: ParentB[int]): + if isinstance(var, ChildB1): + reveal_type(var, expected_text="ChildB1[int]") + + +def func5(var: ParentB[Any]): + if isinstance(var, ChildB1): + reveal_type(var, expected_text="ChildB1[Any]") + + +_T3 = TypeVar("_T3", float, str) + + +class ParentC(Generic[_T3]): + pass + + +class ChildC1(ParentC[_T3]): + pass + + +def func6(var: ParentC[int]): + if isinstance(var, ChildC1): + reveal_type(var, expected_text="ChildC1[float]") + + +class ParentD(Generic[_T1]): + x: _T1 + + +class ChildD1(ParentD[_T1]): ... + + +class ChildD2(ParentD[int]): ... + + +def func7(a: ParentD[_T1]) -> _T1 | None: + if isinstance(a, ChildD1): + reveal_type(a, expected_text="ChildD1[_T1@func7]") + + elif isinstance(a, ChildD2): + reveal_type(a, expected_text="ChildD2") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance7.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance7.py new file mode 100644 index 00000000..1bcc49c7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance7.py @@ -0,0 +1,19 @@ +# This sample tests the case where an isinstance call uses +# a union of class types, some of which are tuples of other types +# and some of which are not. + +from typing import TypeVar, Iterator + +T1 = TypeVar("T1", bound="X") +T2 = TypeVar("T2", bound="X") + + +class X: + element_list: list["X"] + + def return_iter( + self, cls: type[T1] | tuple[type[T1], type[T2]] + ) -> Iterator[T1 | T2]: + for item in self.element_list: + if isinstance(item, cls): + yield item diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance8.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance8.py new file mode 100644 index 00000000..0649762c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance8.py @@ -0,0 +1,27 @@ +# This sample tests the case where an issubclass type guard narrows +# to an abstract base class. When attempting to instantiate the +# class, there should be no "cannot instantiate ABC" error. + +# pyright: strict + +from abc import ABC, abstractmethod +from typing import Any + + +class Base(ABC): + @abstractmethod + def f(self) -> None: ... + + +def func1(cls: Any): + assert issubclass(cls, Base) + reveal_type(cls, expected_text="type[Base]") + _ = cls() + + +def func2(cls: Any): + assert isinstance(cls, type) + reveal_type(cls, expected_text="type") + assert issubclass(cls, Base) + reveal_type(cls, expected_text="type[Base]") + _ = cls() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral1.py new file mode 100644 index 00000000..a0e90dc2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral1.py @@ -0,0 +1,61 @@ +# This sample tests the type analyzer's type narrowing +# logic for literals. + +from typing import Literal, LiteralString, TypeVar, Union + + +def func1(p1: Literal["a", "b", "c"]): + if p1 != "b": + if p1 == "c": + reveal_type(p1, expected_text="Literal['c']") + pass + else: + reveal_type(p1, expected_text="Literal['a']") + + if p1 != "a": + reveal_type(p1, expected_text="Literal['c', 'b']") + else: + reveal_type(p1, expected_text="Literal['a']") + + +def func2(p1: Literal[1, 4, 7]): + if p1 == 4 or p1 == 1: + reveal_type(p1, expected_text="Literal[4, 1]") + else: + reveal_type(p1, expected_text="Literal[7]") + + +def func3(a: Union[int, None]): + if a == 1 or a == 2: + reveal_type(a, expected_text="Literal[1, 2]") + + +T = TypeVar("T", bound=Literal["a", "b"]) + + +def func4(x: T) -> T: + if x == "a": + reveal_type(x, expected_text="Literal['a']") + return x + else: + reveal_type(x, expected_text="Literal['b']") + return x + + +S = TypeVar("S", Literal["a"], Literal["b"]) + + +def func5(x: S) -> S: + if x == "a": + reveal_type(x, expected_text="Literal['a']") + return x + else: + reveal_type(x, expected_text="Literal['b']") + return x + + +def func6(x: LiteralString): + if x == "a": + reveal_type(x, expected_text="Literal['a']") + else: + reveal_type(x, expected_text="LiteralString") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral2.py new file mode 100644 index 00000000..a768bd6c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral2.py @@ -0,0 +1,82 @@ +# This sample tests the type narrowing capabilities involving +# types that have enumerated literals (bool and enums). + +from enum import Enum +from typing import Any, Literal, Union, reveal_type + + +class SomeEnum(Enum): + SOME_ENUM_VALUE1 = 1 + SOME_ENUM_VALUE2 = 2 + SOME_ENUM_VALUE3 = 3 + + +def func1(a: SomeEnum) -> Literal[3]: + if a == SomeEnum.SOME_ENUM_VALUE1 or a == SomeEnum.SOME_ENUM_VALUE2: + return 3 + else: + return a.value + + +def func2(a: SomeEnum) -> Literal[3]: + if a == SomeEnum.SOME_ENUM_VALUE1: + return 3 + elif a == SomeEnum.SOME_ENUM_VALUE2: + return 3 + else: + return a.value + + +def must_be_true(a: Literal[True]): ... + + +def must_be_false(a: Literal[False]): ... + + +def func3(a: bool): + if a == True: + must_be_true(a) + else: + must_be_false(a) + + +def func4(a: bool): + if not a: + must_be_false(a) + else: + must_be_true(a) + + +class MyEnum(Enum): + ZERO = 0 + ONE = 1 + + +def func5(x: Union[MyEnum, str]): + if x is MyEnum.ZERO: + reveal_type(x, expected_text="Literal[MyEnum.ZERO]") + elif x is MyEnum.ONE: + reveal_type(x, expected_text="Literal[MyEnum.ONE]") + else: + reveal_type(x, expected_text="str") + + +def func6(x: Any): + if x is MyEnum.ZERO: + reveal_type(x, expected_text="Literal[MyEnum.ZERO]") + else: + reveal_type(x, expected_text="Any") + + +def func7(x: Any): + if x == MyEnum.ZERO: + reveal_type(x, expected_text="Literal[MyEnum.ZERO]") + else: + reveal_type(x, expected_text="Any") + + +def func8(x: Literal[0, 1] | None): + if x is 1: + reveal_type(x, expected_text="Literal[1]") + else: + reveal_type(x, expected_text="Literal[0, 1] | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteralMember1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteralMember1.py new file mode 100644 index 00000000..359c1d95 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLiteralMember1.py @@ -0,0 +1,204 @@ +# This sample tests type narrowing based on member accesses +# to members that have literal types. + +from typing import ClassVar, Literal, Union + + +class A: + kind: Literal["A"] + kind_class: ClassVar[Literal["A"]] + d: Literal[1, 2, 3] + is_a: Literal[True] + + +class B: + kind: Literal["B"] + kind_class: ClassVar[Literal["B"]] + d: Literal[3, 4, 5] + is_a: Literal[False] + + +class C: + kind: str + kind_class: str + c: int + is_a: bool + + +class D: + kind: Literal[1, 2, 3] + + +def eq_obj1(c: Union[A, B]): + if c.kind == "A": + reveal_type(c, expected_text="A") + else: + reveal_type(c, expected_text="B") + + +def is_obj1_1(c: Union[A, B]): + if c.kind is "A": + reveal_type(c, expected_text="A | B") + else: + reveal_type(c, expected_text="A | B") + + +def is_obj1_2(c: Union[A, B]): + if c.is_a is False: + reveal_type(c, expected_text="B") + else: + reveal_type(c, expected_text="A") + + +def eq_obj2(c: Union[A, B]): + if c.kind != "A": + reveal_type(c, expected_text="B") + else: + reveal_type(c, expected_text="A") + + +def is_obj2(c: Union[A, B]): + if c.kind is not "A": + reveal_type(c, expected_text="A | B") + else: + reveal_type(c, expected_text="A | B") + + +def eq_obj3(c: Union[A, B, C]): + if c.kind == "A": + reveal_type(c, expected_text="A | C") + else: + reveal_type(c, expected_text="B | C") + + +def is_obj3(c: Union[A, B, C]): + if c.kind is "A": + reveal_type(c, expected_text="A | B | C") + else: + reveal_type(c, expected_text="A | B | C") + + +def eq_obj4(c: Union[A, B]): + if c.d == 1: + reveal_type(c, expected_text="A") + elif c.d == 3: + reveal_type(c, expected_text="A | B") + + +def is_obj4(c: Union[A, B]): + if c.d is 1: + reveal_type(c, expected_text="A | B") + elif c.d is 3: + reveal_type(c, expected_text="A | B") + + +def eq_obj5(d: D): + if d.kind == 1: + reveal_type(d, expected_text="D") + elif d.kind == 2: + reveal_type(d, expected_text="D") + + +def is_obj5(d: D): + if d.kind is 1: + reveal_type(d, expected_text="D") + elif d.kind is 2: + reveal_type(d, expected_text="D") + + +def eq_class2(c: Union[type[A], type[B]]): + if c.kind_class == "A": + reveal_type(c, expected_text="type[A]") + else: + reveal_type(c, expected_text="type[B]") + + +def is_class2(c: Union[type[A], type[B]]): + if c.kind_class is "A": + reveal_type(c, expected_text="type[A] | type[B]") + else: + reveal_type(c, expected_text="type[A] | type[B]") + + +class E: + @property + def type(self) -> Literal[0]: + return 0 + + +class F: + @property + def type(self) -> Literal[1]: + return 1 + + +def test(x: E | F) -> None: + if x.type == 1: + reveal_type(x, expected_type="F") + else: + reveal_type(x, expected_type="E") + + +class G: + type: Literal[0] + + +class H: + type: Literal[1] + + +class I: + thing: G | H + + def method1(self) -> None: + if self.thing.type == 1: + reveal_type(self.thing, expected_text="H") + + local = self.thing + if local.type == 1: + reveal_type(local, expected_text="H") + + +class XA: + data: int + event: Literal["a"] + + +class XB: + data: str + event: Literal["b"] + + +class XC: + data: complex + event: Literal["c"] + + +def func1(event: XA | XC | XB) -> None: + if event.event == "a": + reveal_type(event.data, expected_text="int") + + if event.event == "b": + if event.data: + reveal_type(event.data, expected_text="str") + elif event.event == "c": + reveal_type(event.data, expected_text="complex") + + +class XD: + event: Literal["d"] + + +class XE: + event: None | Literal["e"] + + +def func2(e: XD | XE) -> None: + if e.event == None: + reveal_type(e, expected_text="XE") + + if e.event == "e": + reveal_type(e, expected_text="XE") + + if e.event == "d": + reveal_type(e, expected_text="XD") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLocalConst1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLocalConst1.py new file mode 100644 index 00000000..be56f6bc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingLocalConst1.py @@ -0,0 +1,133 @@ +# This sample tests the case where a local (constant) variable that +# is assigned a narrowing expression can be used in a type guard condition. +# These are sometimes referred to as "aliased conditional expressions". + + +import random + + +class A: + a: int + + +class B: + b: int + + +def func1(x: A | B) -> None: + is_a = not not isinstance(x, A) + + if not is_a: + reveal_type(x, expected_text="B") + else: + reveal_type(x, expected_text="A") + + +def func2(x: A | B) -> None: + is_a = isinstance(x, A) + + if random.random() < 0.5: + x = B() + + if is_a: + reveal_type(x, expected_text="B | A") + else: + reveal_type(x, expected_text="B | A") + + +def func3(x: int | None): + is_number = x != None + + if is_number: + reveal_type(x, expected_text="int") + else: + reveal_type(x, expected_text="None") + + +def func4() -> A | None: + return A() if random.random() < 0.5 else None + + +maybe_a1 = func4() +is_a1 = maybe_a1 + +if is_a1: + reveal_type(maybe_a1, expected_text="A") +else: + reveal_type(maybe_a1, expected_text="None") + +maybe_a2 = func4() + + +def func5(): + global maybe_a2 + maybe_a2 = False + + +is_a2 = maybe_a2 + +if is_a2: + reveal_type(maybe_a2, expected_text="A | None") +else: + reveal_type(maybe_a2, expected_text="A | None") + + +def func6(x: A | B) -> None: + is_a = isinstance(x, A) + + for y in range(1): + if is_a: + reveal_type(x, expected_text="A | B") + else: + reveal_type(x, expected_text="A | B") + + if random.random() < 0.5: + x = B() + + +def get_string() -> str: ... + + +def get_optional_string() -> str | None: ... + + +def func7(val: str | None = None): + val = get_optional_string() + + val_is_none = val is None + + if val_is_none: + val = get_string() + + reveal_type(val, expected_text="str") + + +def func8(val: str | None = None): + val = get_optional_string() + + val_is_none = val is None + + val = get_optional_string() + + if val_is_none: + val = get_string() + + reveal_type(val, expected_text="str | None") + + +def func9(var: str | None = None): + if var_not_None := not (var is None): + reveal_type(var, expected_text="str") + + reveal_type(var, expected_text="str | None") + + if var_not_None: + reveal_type(var, expected_text="str") + + if 1 > 1 + 2: + var = None + else: + var = "a" + "b" + + if var_not_None: + reveal_type(var, expected_text="Literal['ab'] | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingNoneMember1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingNoneMember1.py new file mode 100644 index 00000000..4ad49365 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingNoneMember1.py @@ -0,0 +1,100 @@ +# This sample tests the type narrowing case for unions of NamedTuples +# where one or more of the entries is tested against type None by attribute. + +from typing import NamedTuple, Union + +IntFirst = NamedTuple( + "IntFirst", + [ + ("first", int), + ("second", None), + ], +) + +StrSecond = NamedTuple( + "StrSecond", + [ + ("first", None), + ("second", str), + ], +) + + +def func1(a: Union[IntFirst, StrSecond]) -> IntFirst: + if a.second is None: + reveal_type(a, expected_text="IntFirst") + return a + else: + reveal_type(a, expected_text="StrSecond") + raise ValueError() + + +UnionFirst = NamedTuple( + "UnionFirst", + [ + ("first", Union[None, int]), + ("second", None), + ], +) + + +def func2(a: Union[UnionFirst, StrSecond]): + if a.first is None: + reveal_type(a, expected_text="UnionFirst | StrSecond") + else: + reveal_type(a, expected_text="UnionFirst") + + +class A: + @property + def prop1(self) -> int | None: ... + + member1: None + member2: int | None + member3: int | None + member4: int | None + + +class B: + @property + def prop1(self) -> int: ... + + member1: int + member2: int | None + member3: None + member4: int + + +def func3(c: Union[A, B]): + if c.prop1 is None: + reveal_type(c, expected_text="A | B") + else: + reveal_type(c, expected_text="A | B") + + +def func4(c: Union[A, B]): + if c.member1 is None: + reveal_type(c, expected_text="A") + else: + reveal_type(c, expected_text="B") + + +def func5(c: Union[A, B]): + if c.member2 is None: + reveal_type(c, expected_text="A | B") + else: + reveal_type(c, expected_text="A | B") + + +def func6(c: Union[A, B]): + if c.member3 is not None: + reveal_type(c, expected_text="A") + else: + reveal_type(c, expected_text="A | B") + + +def func7(c: Union[A, B]): + if c.member4 is not None: + reveal_type(c, expected_text="A | B") + else: + reveal_type(c, expected_text="A") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTuple1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTuple1.py new file mode 100644 index 00000000..243315a5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTuple1.py @@ -0,0 +1,69 @@ +# This sample tests the type narrowing for known-length tuples +# that have an entry with a declared literal type. + +from enum import Enum +from typing import Literal + +MsgA = tuple[Literal[1], str] +MsgB = tuple[Literal[2], float] + +MsgAOrB = MsgA | MsgB + + +def func1(m: MsgAOrB): + if m[0] == 1: + reveal_type(m, expected_text="tuple[Literal[1], str]") + else: + reveal_type(m, expected_text="tuple[Literal[2], float]") + + +def func2(m: MsgAOrB): + if m[0] != 1: + reveal_type(m, expected_text="tuple[Literal[2], float]") + else: + reveal_type(m, expected_text="tuple[Literal[1], str]") + + +MsgC = tuple[Literal[True], str] +MsgD = tuple[Literal[False], float] + +MsgCOrD = MsgC | MsgD + + +def func3(m: MsgCOrD): + if m[0] is True: + reveal_type(m, expected_text="tuple[Literal[True], str]") + else: + reveal_type(m, expected_text="tuple[Literal[False], float]") + + +def func4(m: MsgCOrD): + if m[0] is not True: + reveal_type(m, expected_text="tuple[Literal[False], float]") + else: + reveal_type(m, expected_text="tuple[Literal[True], str]") + + +class MyEnum(Enum): + A = 0 + B = 1 + + +MsgE = tuple[Literal[MyEnum.A], str] +MsgF = tuple[Literal[MyEnum.B], float] + +MsgEOrF = MsgE | MsgF + + +def func5(m: MsgEOrF): + if m[0] is MyEnum.A: + reveal_type(m, expected_text="tuple[Literal[MyEnum.A], str]") + else: + reveal_type(m, expected_text="tuple[Literal[MyEnum.B], float]") + + +def func6(m: MsgEOrF): + if m[0] is not MyEnum.A: + reveal_type(m, expected_text="tuple[Literal[MyEnum.B], float]") + else: + reveal_type(m, expected_text="tuple[Literal[MyEnum.A], str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTupleLength1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTupleLength1.py new file mode 100644 index 00000000..0301013a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTupleLength1.py @@ -0,0 +1,161 @@ +# This sample tests type narrowing of tuples based on len(x) test. + +from typing import Callable, Literal, ParamSpec, TypeVar + +P = ParamSpec("P") + + +def func1(val: tuple[int] | tuple[int, int] | tuple[str, str]): + if len(val) == 1: + reveal_type(val, expected_text="tuple[int]") + else: + reveal_type(val, expected_text="tuple[int, int] | tuple[str, str]") + + if len(val) != 2: + reveal_type(val, expected_text="tuple[int]") + else: + reveal_type(val, expected_text="tuple[int, int] | tuple[str, str]") + + +def func2(val: tuple[int] | tuple[int, ...]): + if len(val) == 1: + reveal_type(val, expected_text="tuple[int]") + else: + reveal_type(val, expected_text="tuple[int, ...]") + + if len(val) != 2: + reveal_type(val, expected_text="tuple[int] | tuple[int, ...]") + else: + reveal_type(val, expected_text="tuple[int, int]") + + +def func3(val: tuple[int] | tuple[()]): + N = 0 + if len(val) == N: + reveal_type(val, expected_text="tuple[()]") + else: + reveal_type(val, expected_text="tuple[int]") + + +_T1 = TypeVar("_T1", bound=tuple[int]) +_T2 = TypeVar("_T2", bound=tuple[str, str]) + + +def func4(val: _T1 | _T2) -> _T1 | _T2: + if len(val) == 1: + reveal_type(val, expected_text="_T1@func4") + else: + reveal_type(val, expected_text="_T2@func4") + + return val + + +def func5( + val: ( + tuple[int, ...] + | tuple[str] + | tuple[str, str, str] + | tuple[int, *tuple[str, ...], str] + | tuple[int, *tuple[float, ...]] + ), + length: Literal[2], +): + if len(val) == length: + reveal_type( + val, expected_text="tuple[int, int] | tuple[int, str] | tuple[int, float]" + ) + else: + reveal_type( + val, + expected_text="tuple[int, ...] | tuple[str] | tuple[str, str, str] | tuple[int, str, *tuple[str, ...], str] | tuple[int, *tuple[float, ...]]", + ) + + +def func10(t: tuple[()] | tuple[int] | tuple[int, int] | tuple[int, int, int]): + if len(t) >= 2: + reveal_type(t, expected_text="tuple[int, int] | tuple[int, int, int]") + else: + reveal_type(t, expected_text="tuple[()] | tuple[int]") + + +def func11(t: tuple[()] | tuple[int] | tuple[int, int] | tuple[int, int, int]): + if len(t) > 1: + reveal_type(t, expected_text="tuple[int, int] | tuple[int, int, int]") + else: + reveal_type(t, expected_text="tuple[()] | tuple[int]") + + +def func12(t: tuple[()] | tuple[int] | tuple[int, int]): + if len(t) >= 0: + reveal_type(t, expected_text="tuple[()] | tuple[int] | tuple[int, int]") + else: + reveal_type(t, expected_text="Never") + + +def func20(t: tuple[int, ...]): + if len(t) >= 2: + reveal_type(t, expected_text="tuple[int, int, *tuple[int, ...]]") + else: + reveal_type(t, expected_text="tuple[()] | tuple[int]") + + +def func21(t: tuple[int, ...]): + if len(t) > 0: + reveal_type(t, expected_text="tuple[int, *tuple[int, ...]]") + else: + reveal_type(t, expected_text="tuple[()]") + + +def func22(t: tuple[str, *tuple[int, ...], str]): + if len(t) < 3: + reveal_type(t, expected_text="tuple[str, str]") + else: + reveal_type(t, expected_text="tuple[str, int, *tuple[int, ...], str]") + + +def func23(t: tuple[str, *tuple[int, ...], str]): + if len(t) <= 3: + reveal_type(t, expected_text="tuple[str, str] | tuple[str, int, str]") + else: + reveal_type(t, expected_text="tuple[str, int, int, *tuple[int, ...], str]") + + +def func24(t: tuple[str, *tuple[int, ...], str]): + if len(t) <= 34: + reveal_type(t, expected_text="tuple[str, *tuple[int, ...], str]") + else: + reveal_type(t, expected_text="tuple[str, *tuple[int, ...], str]") + + +def func25(t: tuple[str, *tuple[int, ...], str]): + if len(t) < 2: + reveal_type(t, expected_text="Never") + else: + reveal_type(t, expected_text="tuple[str, *tuple[int, ...], str]") + + +def func26(fn: Callable[P, None]): + def inner(*args: P.args, **kwargs: P.kwargs): + if len(args) >= 0: + reveal_type(args, expected_text="P@func26.args") + else: + reveal_type(args, expected_text="P@func26.args") + return fn(*args, **kwargs) + + return inner + + +def func27(t: tuple[int, ...]): + if len(t) == 0 or len(t) >= 2: + reveal_type(t, expected_text="tuple[()] | tuple[int, int, *tuple[int, ...]]") + else: + reveal_type(t, expected_text="tuple[int]") + + +def func28(t: tuple[int, *tuple[int, ...]]): + if len(t) == 1 or len(t) >= 3: + reveal_type( + t, expected_text="tuple[int] | tuple[int, int, int, *tuple[int, ...]]" + ) + else: + reveal_type(t, expected_text="tuple[int, int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypeEquals1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypeEquals1.py new file mode 100644 index 00000000..137618b9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypeEquals1.py @@ -0,0 +1,110 @@ +# This sample exercises the type analyzer's type narrowing +# logic for tests of the form "type(X) == Y" or "type(X) != Y". + +from typing import Any, Generic, TypeVar, final + + +def func1(a: str | int) -> int: + if type(a) != str: + # This should generate an error because + # "a" is potentially a subclass of str. + return a + + # This should generate an error because + # "a" is provably type str at this point. + return a + + +def func2(a: str | None) -> str: + if type(a) == str: + return a + + # This should generate an error because + # "a" is provably type str at this point. + return a + + +def func3(a: dict[str, Any]) -> str: + val = a.get("hello") + if type(val) == str: + return val + + return "none" + + +class A: + pass + + +class B(A): + pass + + +def func4(a: str | A): + if type(a) == B: + reveal_type(a, expected_text="B") + else: + reveal_type(a, expected_text="str | A") + + +T = TypeVar("T") + + +class C(Generic[T]): + def __init__(self, a: T): + self.a = a + + +class D: + pass + + +E = C[T] | D + + +def func5(x: E[T]) -> None: + if type(x) == C: + reveal_type(x, expected_text="C[T@func5]") + + +@final +class AFinal: + pass + + +@final +class BFinal: + pass + + +def func6(val: AFinal | BFinal) -> None: + if type(val) == AFinal: + reveal_type(val, expected_text="AFinal") + else: + reveal_type(val, expected_text="BFinal") + + +def func7(val: Any): + if type(val) == int: + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="Any") + + reveal_type(val, expected_text="int | Any") + + +class CParent: ... + + +class CChild(CParent): ... + + +_TC = TypeVar("_TC", bound=CParent) + + +def func8(a: _TC, b: _TC) -> _TC: + if type(a) == CChild: + reveal_type(a, expected_text="CChild*") + return a + reveal_type(a, expected_text="CParent*") + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypeIs1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypeIs1.py new file mode 100644 index 00000000..a103c98f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypeIs1.py @@ -0,0 +1,153 @@ +# This sample exercises the type analyzer's type narrowing +# logic for tests of the form "type(X) is Y" or "type(X) is not Y". + +from typing import Any, Generic, TypeVar, final + + +def func1(a: str | int) -> int: + if type(a) is not str: + # This should generate an error because + # "a" is potentially a subclass of str. + return a + + # This should generate an error because + # "a" is provably type str at this point. + return a + + +def func2(a: str | None) -> str: + if type(a) is str: + return a + + # This should generate an error because + # "a" is provably type str at this point. + return a + + +def func3(a: dict[str, Any]) -> str: + val = a.get("hello") + if type(val) is str: + return val + + return "none" + + +class A: + pass + + +class B(A): + pass + + +def func4(a: str | A): + if type(a) is B: + reveal_type(a, expected_text="B") + else: + reveal_type(a, expected_text="str | A") + + +T = TypeVar("T") + + +class C(Generic[T]): + def __init__(self, a: T): + self.a = a + + +class D: + pass + + +E = C[T] | D + + +def func5(x: E[T]) -> None: + if type(x) is C: + reveal_type(x, expected_text="C[T@func5]") + + +@final +class AFinal: + pass + + +@final +class BFinal: + pass + + +def func6(val: AFinal | BFinal) -> None: + if type(val) is AFinal: + reveal_type(val, expected_text="AFinal") + else: + reveal_type(val, expected_text="BFinal") + + +def func7(val: Any): + if type(val) is int: + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="Any") + + reveal_type(val, expected_text="int | Any") + + +class CParent: ... + + +class CChild(CParent): ... + + +_TC = TypeVar("_TC", bound=CParent) + + +def func8(a: _TC, b: _TC) -> _TC: + if type(a) is CChild: + reveal_type(a, expected_text="CChild*") + return a + reveal_type(a, expected_text="CParent*") + return a + + +class F: + def method1(self, v: object): + if type(self) == type(v): + reveal_type(self, expected_text="Self@F") + else: + reveal_type(self, expected_text="Self@F") + + +class G(str): + @classmethod + def method1(cls, v: str): + if type(v) is cls: + reveal_type(v, expected_text="G*") + else: + reveal_type(v, expected_text="str") + + +class H: + def __init__(self, x): ... + + +def func9[T: H](x: type[T], y: H) -> T: + if type(y) == x: + reveal_type(y, expected_text="H*") + return y + return x(y) + + +class I: + pass + + +class J: + pass + + +def func10[T: I | J](items: list[I | J], kind: type[T]) -> T | None: + for i in items: + if type(i) is kind: + reveal_type(i, expected_text="I* | J*") + return i diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict1.py new file mode 100644 index 00000000..2521fdb3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict1.py @@ -0,0 +1,117 @@ +# This sample tests type narrowing for TypedDict types based +# on whether a key is in or not in the dict. + +from typing import TypedDict + + +class TD1(TypedDict): + a: str + b: int + + +class TD2(TypedDict): + a: int + c: str + + +class TD3(TypedDict, total=False): + a: int + d: str + + +def f1(p: TD1 | TD2): + if "b" in p: + # This should technically be TD1 | TD2, but the + # current narrowing logic implements a not-entirely-safe + # narrowing behavior. We can fix this once PEP 728 + # is accepted. + reveal_type(p, expected_text="TD1") + # reveal_type(p, expected_text="TD1 | TD2") + else: + reveal_type(p, expected_text="TD2") + + +def f2(p: TD1 | TD2): + if "b" not in p: + reveal_type(p, expected_text="TD2") + else: + # This should technically be TD1 | TD2, but the + # current narrowing logic implements a not-entirely-safe + # narrowing behavior. We can fix this once PEP 728 + # is accepted. + reveal_type(p, expected_text="TD1") + # reveal_type(p, expected_text="TD1 | TD2") + + +def f3(p: TD1 | TD3): + if "d" in p: + # This should technically be TD1 | TD3, but the + # current narrowing logic implements a not-entirely-safe + # narrowing behavior. We can fix this once PEP 728 + # is accepted. + reveal_type(p, expected_text="TD3") + # reveal_type(p, expected_text="TD1 | TD3") + else: + reveal_type(p, expected_text="TD1 | TD3") + + +def f4(p: TD1 | TD3): + if "d" not in p: + reveal_type(p, expected_text="TD1 | TD3") + else: + # This should technically be TD1 | TD3, but the + # current narrowing logic implements a not-entirely-safe + # narrowing behavior. We can fix this once PEP 728 + # is accepted. + reveal_type(p, expected_text="TD3") + # reveal_type(p, expected_text="TD1 | TD3") + + +def f5(p: TD1 | TD3): + if "a" in p: + reveal_type(p, expected_text="TD1 | TD3") + else: + reveal_type(p, expected_text="TD3") + + +def f6(p: TD1 | TD2 | TD3): + # This should generate an error for TD3. + v1 = p["a"] + + v2 = p.get("a") + + if "c" in p: + # This should technicall generate two errors for TD1 and TD3 + v3 = p["c"] + # This should technically be Unknown | str, but the + # current narrowing logic implements a not-entirely-safe + # narrowing behavior. We can fix this once PEP 728 + # is accepted. + reveal_type(v3, expected_text="str") + # reveal_type(v3, expected_text="Unknown | str") + + if "a" in p and "d" in p: + v4 = p["a"] + # This should technically be str | int, but the + # current narrowing logic implements a not-entirely-safe + # narrowing behavior. We can fix this once PEP 728 + # is accepted. + reveal_type(v4, expected_text="int") + # reveal_type(v4, expected_text="str | int") + + # This should generate an error for TD1 and TD2 + v5 = p["d"] + reveal_type(v5, expected_text="Unknown | str") + + # This should generate three errors, two for TD1 and TD2 (because + # "d" is not a valid key) and one for TD3 (because "d" is not required). + v6 = p["d"] + + +def f7(p: TD3): + pass + + +def f8(p: TD3): + if "a" in p: + f7(p) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict2.py new file mode 100644 index 00000000..ca02090a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict2.py @@ -0,0 +1,98 @@ +# This sample tests type narrowing based on key accesses +# to unions of TypedDicts that have fields with literal types. + +from typing import Literal, TypedDict + + +class Event1(TypedDict): + tag: Literal["new-job"] + job_name: str + config_file_path: str + + +class Event2(TypedDict): + tag: Literal[2] + job_id: int + + +class Event3(TypedDict): + tag: Literal["other-job"] + message: str + + +Event = Event1 | Event2 | Event3 + + +def process_event1(event: Event) -> None: + if event["tag"] == "new-job": + reveal_type(event, expected_text="Event1") + event["job_name"] + elif event["tag"] == 2: + reveal_type(event, expected_text="Event2") + event["job_id"] + else: + reveal_type(event, expected_text="Event3") + event["message"] + + +def process_event2(event: Event) -> None: + if event["tag"] is "new-job": + reveal_type(event, expected_text="Event1") + event["job_name"] + elif event["tag"] is 2: + reveal_type(event, expected_text="Event2") + event["job_id"] + else: + reveal_type(event, expected_text="Event3") + event["message"] + + +class ClassA: + job_event: Event1 | Event3 + + def method1(self): + if self.job_event["tag"] == "new-job": + reveal_type(self.job_event, expected_text="Event1") + else: + reveal_type(self.job_event, expected_text="Event3") + + +class A(TypedDict): + name: Literal["A"] + a: str + + +class BC(TypedDict): + name: Literal["B", "C"] + b: str + + +AorBC = A | BC + + +def func1(val: AorBC, key: Literal["C", "D"]): + if val["name"] == key: + reveal_type(val, expected_text="BC") + else: + reveal_type(val, expected_text="A | BC") + + +def func2(val: AorBC, key: Literal["A", "D"]): + if val["name"] == key: + reveal_type(val, expected_text="A") + else: + reveal_type(val, expected_text="A | BC") + + +def func3(val: AorBC, key: Literal["A", "C"]): + if val["name"] == key: + reveal_type(val, expected_text="A | BC") + else: + reveal_type(val, expected_text="A | BC") + + +def func4(val: AorBC, key: Literal["B", "C"]): + if val["name"] == key: + reveal_type(val, expected_text="BC") + else: + reveal_type(val, expected_text="A | BC") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict3.py new file mode 100644 index 00000000..25d16939 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict3.py @@ -0,0 +1,45 @@ +# This sample tests assignment-based narrowing for TypedDict values. + +from typing import TypedDict + + +class MyDict1(TypedDict, total=False): + key1: int + key2: str + + +my_dict1: MyDict1 = {"key1": 1} +my_dict1["key1"] + +# This should generate an error because "key2" isn't included in the +# narrowed type. +my_dict1["key2"] + +if "key2" in my_dict1: + my_dict1["key2"] + + +class MyDict2(TypedDict, total=False): + key3: MyDict1 + key4: MyDict1 + key5: MyDict1 + + +my_dict2: MyDict2 = {"key3": {"key1": 3}, "key4": {}} + +my_dict2["key3"] +my_dict2["key4"] + +# This should generate an error because "key5" isn't included in the +# narrowed type. +my_dict2["key5"] + +my_dict2["key3"]["key1"] + +# This should generate an error because "key2" isn't included in the +# narrowed type. +my_dict2["key3"]["key2"] + +# This should generate an error because "key4" isn't included in the +# narrowed type. +my_dict2["key4"]["key1"] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams1.py new file mode 100644 index 00000000..adbff305 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams1.py @@ -0,0 +1,115 @@ +# This sample tests the PEP 695 type parameter syntax for generic classes +# and functions. + +T1 = 0 + + +class ClassA[T1]: ... + + +def func1[T1](): ... + + +T2: str + + +class ClassB[T2]: ... + + +def func2[T2](): ... + + +# This should generate an error because T3 is duplicated. +class ClassC[T3, S1, T3]: ... + + +class ClassD: + class ClassE: ... + + class ClassF: + class A[T]: ... + + int_alias = int + + class B(A[int_alias]): + pass + + # This should generate an error because ClassE is out of scope. + class C(A[ClassE]): + pass + + +class ClassG[T](list["T"]): + pass + + +class ClassH: + def object[T](self, target: object, new: T) -> T: ... + + +# This should generate an error because T3 is duplicated. +def func3[T3, S1, T3](): ... + + +def func4[T4](T4: int): ... + + +def func5[T5](a: int): + # This should generate an error because T5 is already in use. + class ClassA[T5]: ... + + # This should generate an error because T5 is already in use. + def inner_func1[T5](): ... + + +def func6[T6](T7: int): + class ClassA[T7]: ... + + def inner_func1[T7](): ... + + global T2 + + class ClassB[T2]: + global T2 + + class ClassC[T3]: + T3 = 4 + + T3 = 4 + + +def func7[T8: ForwardRefClass[str], T9: "ForwardRefClass[int]"](): + pass + + +def func8[T10: (ForwardRefClass[str], "ForwardRefClass[int]")](): + pass + + +class ForwardRefClass[T]: + pass + + +class ClassI1: ... + + +class ClassI2: + def method1[T](self, v: ClassI1) -> None: ... + + # This should generate an error because ClassJ is + def method2[T](self, v: ClassI3) -> None: ... + + +class ClassI3: ... + + +def func9[T, **P, S](x: T) -> T: + S = 1 + + def inner(): + # This should generate two errors. + nonlocal T, P + + nonlocal S + + return x diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams2.py new file mode 100644 index 00000000..0fa58aa7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams2.py @@ -0,0 +1,10 @@ +# This sample tests that the use of PEP 695 type parameter syntax for generic +# classes and functions is flagged as an error if the version of Python +# is < 3.12. + +# This should generate an error if <3.12 +class ClassA[T, S]: ... + + +# This should generate an error if <3.12 +def func1[T, S](): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams3.py new file mode 100644 index 00000000..7ccad601 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams3.py @@ -0,0 +1,109 @@ +# This sample tests error conditions related to the use of PEP 695 +# type parameters outside of their valid scope. + + +class ClassA[S]: + s: S + + class ClassB[T](dict[S, T]): + s: S + t: T + + def method1[U](self): + s: S + t: T + u: U + lambda: (S, T, U) + + # This should generate an error because T is out of scope. + t: T + + +# This should generate an error because S is out of scope. +s: S + +# This should generate an error because T is out of scope. +t: T + + +def func1[A](): + def func2[B](): + a: A + b: B + + class ClassC[C](dict[B, C]): + a: A + b: B + c: C + + def method1[D](self): + a: A + b: B + c: C + d: D + e = lambda: (A, B, C, D) + + a: A + + # This should generate an error because B is out of scope. + b: B + + +# This should generate an error because A is out of scope. +a: A + +# This should generate an error because B is out of scope. +b: B + +type TA1[A] = list[A] + +# This should generate an error because B is out of scope. +type TA2[A] = list[B] + + +S = 0 + + +def outer1[S](): + S = "" + T = 1 + + def outer2[T](): + def inner1(): + nonlocal S # OK + reveal_type(S, expected_text="Literal['']") + + def inner2(): + global S # OK + reveal_type(S, expected_text="Literal[0]") + + +T = 0 + + +class Outer2[T]: + T = 1 + + reveal_type(T, expected_text="Literal[1]") + + class Inner1: + T = "" + + reveal_type(T, expected_text="Literal['']") + + def inner_method(self): + reveal_type(T, expected_text="TypeVar") + + def outer_method(self): + T = 3j + + reveal_type(T, expected_text="complex") + + def inner_func(): + reveal_type(T, expected_text="complex") + + +class Outer3[T]: + # This should generate an error because Outer3 is + # not bound at this point. + def inner_func1[S](self: Outer3[S]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams4.py new file mode 100644 index 00000000..f495299d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams4.py @@ -0,0 +1,17 @@ +# This sample tests errors related to the use of a Generic +# or Protocol base class with PEP 695 type parameter syntax. + +from typing import Generic, Protocol + + +# This should generate an error because Generic should not +# be used with type parameter syntax. +class ClassA[T](Generic[T]): ... + + +class ClassB[T](Protocol): ... + + +# This should generate an error because Protocol should not be used +# with type parameters when used with type parameter syntax. +class ClassC[T](Protocol[T]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams5.py new file mode 100644 index 00000000..75b46c55 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams5.py @@ -0,0 +1,58 @@ +# This sample tests the handling of PEP 695 type parameter syntax used for +# bounded and constrained TypeVars, TypeVarTuples, and ParamSpecs. + +from typing import Any + +class ClassA[R, *Ts, **P]: + ... + +class ClassB[R: int | str]: + ... + +# This should generate an error because 'dummy' is not declared. +class ClassC[R: dummy]: + ... + +class ClassD[R: "ClassE[Any]"]: + ... + + +class ClassE[T]: + ... + +# This should generate an error because variadic type params don't +# support bound expressions. +class ClassF[*Ts: int]: ... + +# This should generate an error because ParamSpecs don't +# support bound expressions. +class ClassG[**P: int]: ... + +# This should generate an error because the expression isn't +# a valid type. +class ClassH[R: 1]: ... + +# This should generate an error because a constrained type +# must contain at least two types. +class ClassI[R: ()]: ... + +# This should generate an error because a constrained type +# must contain at least two types. +class ClassJ[R: (int, )]: ... + +class ClassK[R: (bytes, str)]: ... + +t2 = (bytes, str) +# This should generate an error because a literal tuple expression +# must be used for constrained types. +class ClassL[R: t2]: ... + +# This should generate an error because constraints must be legal +# type expressions. +class ClassM[R: (1, str)]: ... + +v: type[int] = int + +# This should generate an error because constraints must be legal +# type expressions. +class ClassN[R: (v, str)]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams6.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams6.py new file mode 100644 index 00000000..244afb3e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams6.py @@ -0,0 +1,23 @@ +# This sample tests the interactions between traditional TypeVars and +# PEP 695 type parameter syntax. + +from typing import Generic, TypeVar + + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +T4 = TypeVar("T4") + + +# This should generate an error because traditional type variables +# like T1 cannot be combined with new-style type parameters. +class ClassA[T3](dict[T1, T3]): ... + + +class ClassB(Generic[T1]): + class ClassC[T2](dict[T1, T2]): + def method1[T3](self, a: T1, b: T2, c: T3) -> T1 | T2 | T3: ... + + # This should generate an error because traditional type variables + # like T4 cannot be combined with new-style type parameters. + def method2[T3](self, a: T3, b: T4) -> T3 | T4: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams7.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams7.py new file mode 100644 index 00000000..2509b4d9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams7.py @@ -0,0 +1,47 @@ +# This sample tests the handling of bound and constrained type parameters +# as specified in PEP 695 type parameter statements. + + +class ClassA[**P, R: str]: ... + + +A1 = ClassA[..., str] + +# This should generate an error because str isn't a valid +# specialization for a ParamSpec. +A2 = ClassA[str, str] + +A3 = ClassA[[str], str] + +# This should generate an error because int doesn't conform +# to the bound. +A4 = ClassA[..., int] + + +class StrSubclass(str): ... + + +A5 = ClassA[..., StrSubclass] + + +class ClassB[X: (int, str), Y](dict[Y, X]): ... + + +B1 = ClassB[int, int] + +# This should generate an error because float doesn't conform +# to the constraint. +B2 = ClassB[float, float] + + +class ClassC[*Ts]: ... + + +C1 = ClassC[str, str] + +C2 = ClassC[*tuple[str, ...]] + +# This should generate an error because ... isn't valid. +C3 = ClassC[...] + +C4 = ClassC[*tuple[()]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeParams8.py b/python-parser/packages/pyright-internal/src/tests/samples/typeParams8.py new file mode 100644 index 00000000..1e036824 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeParams8.py @@ -0,0 +1,13 @@ +# This sample tests the case where a class defined in an inner scope +# uses type variables from an outer scope. + + +class Parent[S, T]: + def task(self, input: S) -> T: ... + + +def outer_func1[S, T](): + class Child(Parent[S, T]): + def task(self, input: S) -> T: ... + + return Child diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typePrinter1.py b/python-parser/packages/pyright-internal/src/tests/samples/typePrinter1.py new file mode 100644 index 00000000..5f2ecfa6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typePrinter1.py @@ -0,0 +1,27 @@ +# This sample tests that the type printer prints fully-qualified names +# for types that are ambiguous because they have the same local name. + +from . import typePrinter2 + + +class A: + class Inner: ... + + +class B: + class Inner: ... + + +def func1(v: A.Inner | None): + reveal_type(v, expected_text="Inner | None") + + +def func2(v: A.Inner | B.Inner | None): + reveal_type(v, expected_text="typePrinter1.A.Inner | typePrinter1.B.Inner | None") + + +class IntOrStr: ... + + +def func3(v: typePrinter2.IntOrStr | IntOrStr | None): + reveal_type(v, expected_text="int | str | IntOrStr | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typePrinter2.py b/python-parser/packages/pyright-internal/src/tests/samples/typePrinter2.py new file mode 100644 index 00000000..d6b3d49d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typePrinter2.py @@ -0,0 +1,3 @@ +# This sample is used in conjunction with the typePrinter1.py sample. + +IntOrStr = int | str diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typePrinter3.py b/python-parser/packages/pyright-internal/src/tests/samples/typePrinter3.py new file mode 100644 index 00000000..dd17214b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typePrinter3.py @@ -0,0 +1,18 @@ +class A: + class Child1: + pass + + +class B: + class Child1: + pass + + class Child2: + pass + + +# This should generate an error that uses fully-qualified names. +v1: A.Child1 = B.Child1() + +# This should generate an error that uses simple names. +v2: A.Child1 = B.Child2() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typePromotions1.py b/python-parser/packages/pyright-internal/src/tests/samples/typePromotions1.py new file mode 100644 index 00000000..ef13b568 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typePromotions1.py @@ -0,0 +1,43 @@ +# This sample tests handling of special-cased "type promotions". + + +from typing import NewType + + +def func1(float_val: float, int_val: int): + v1: float = int_val + v2: complex = float_val + v3: complex = int_val + + +def func2(mem_view_val: memoryview, byte_array_val: bytearray): + v1: bytes = mem_view_val + v2: bytes = byte_array_val + + +class IntSubclass(int): ... + + +def func3(x: IntSubclass) -> float: + return x + + +IntNewType = NewType("IntNewType", int) + + +def func4(x: IntNewType) -> float: + return x + + +def func5(f: float): + if isinstance(f, float): + reveal_type(f, expected_text="float") + else: + reveal_type(f, expected_text="int") + + +def func6(f: complex): + if isinstance(f, float): + reveal_type(f, expected_text="float") + else: + reveal_type(f, expected_text="complex | int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar1.py new file mode 100644 index 00000000..82bb90c9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar1.py @@ -0,0 +1,23 @@ +# This sample tests that the type checker enforces that the +# assigned name of a TypeVar matches the name provided in +# the TypeVar itself. + +from typing import Any, TypeVar + +T1 = TypeVar("T1") + +# This should generate an error because the TypeVar name +# does not match the name of the variable it is assigned to. +T2 = TypeVar("T3") + +T4: Any = TypeVar("T4") + +my_dict = {} + +# This should generate an error because TypeVars cannot be +# assigned to an index expression. +my_dict["var"] = TypeVar("T5") + +# This should generate an error because a TypeVar with a single +# constraint is an error. +T5 = TypeVar("T5", str) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar10.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar10.py new file mode 100644 index 00000000..e8254673 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar10.py @@ -0,0 +1,19 @@ +# This sample tests the handling of constrained TypeVars when used +# within call arguments. + +from typing import TypeVar + + +class A: + def method(self, x: "A") -> "A": ... + + +class B: + def method(self, x: "B") -> "B": ... + + +T = TypeVar("T", A, B) + + +def check(x: T, y: T) -> T: + return x.method(y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar11.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar11.py new file mode 100644 index 00000000..bb31833c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar11.py @@ -0,0 +1,12 @@ +# This sample tests that literal values are retained by the constraint +# solver if they are found as type arguments. + +from typing import Literal + + +_L1 = Literal["foo", "bar"] + + +def combine(set1: set[_L1], set2: set[_L1]) -> None: + x = set1 | set2 + reveal_type(x, expected_text="set[Literal['foo', 'bar']]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar2.py new file mode 100644 index 00000000..895eaa83 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar2.py @@ -0,0 +1,26 @@ +# This sample verifies that the type checker is using +# synthesized type variables for "self" and "cls" variables. + + +class BaseClass: + @classmethod + def c(cls): + return cls + + def f(self): + return self + + +class SubClass(BaseClass): + pass + + +def requires_subclass(p1: SubClass): + pass + + +x = SubClass().f() +requires_subclass(x) + +y = SubClass().c() +requires_subclass(y()) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar3.py new file mode 100644 index 00000000..8fb13429 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar3.py @@ -0,0 +1,73 @@ +# This sample tests various diagnostics related to TypeVar usage. + +from typing import Callable, Generic, TypeVar, AnyStr + +_T = TypeVar("_T") +_S = TypeVar("_S") + + +class OuterClass(Generic[_T, AnyStr]): + # This should generate an error because _S + # isn't defined in this context. + my_var1: _S + + my_var2: AnyStr + + # This should generate an error because _T + # is already in use. + class InnerClass1(Generic[_T]): ... + + # This should generate an error because AnyStr + # is already in use. + class InnerClass2(Generic[_S, AnyStr]): + my_var1: _S + + # This should generate an error because _T + # is already in use in the outer class. + my_var2: _T + + class InnerClass3: + # This should generate an error. + x: list[_T] + + def f(self, x: _T, y: _S, z: _S) -> _T: ... + + def g(self, x: AnyStr) -> None: + # This should generate an error. + y: list[_T] + + +def func1(a: _T) -> _T | None: + my_var1: _T + + # This should generate an error + my_var2: _S + + # This should generate an error because _T + # is already in use. + class InnerClass3(Generic[_T]): ... + + +# This should generate an error. +a: _S = 3 + +# This should generate an error. +b: list[_T] = [] + +# This should generate an error. +c: list[AnyStr] = [] + + +T = TypeVar("T") + + +def foo() -> Callable[[T], T]: + def inner(v: T) -> T: + reveal_type(v, expected_text="T@foo") + return v + + return inner + + +# This should generate an error. +list[T]() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar4.py new file mode 100644 index 00000000..3383358f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar4.py @@ -0,0 +1,62 @@ +# This sample tests the logic that emits errors when +# covariant and contravariant TypeVars are used incorrectly +# for method parameters and return types. + +from typing import Generic, TypeVar + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + + +class ClassA(Generic[_T, _T_co, _T_contra]): + def func1(self, a: _T): + pass + + # This should generate an error because covariant + # TypeVars are not allowed for input parameters. + def func2(self, a: _T_co): + def inner(b: _T_co) -> None: + pass + + return inner + + def func3(self, a: int | _T_co): + pass + + def func4(self, a: list[_T_co]): + pass + + def func5(self, a: _T_contra): + pass + + def func6(self) -> _T | None: + pass + + def func7(self) -> _T_co | None: + pass + + # This should generate an error because contravariant + # TypeVars are not allowed for return parameters. + def func8(self) -> _T_contra: ... + + # This should generate an error because contravariant + # TypeVars are not allowed for return parameters. + def func9(self) -> _T_contra | int: + return 3 + + # This should generate an error because contravariant + # TypeVars are not allowed for return parameters. + def func10(self, x: _T_contra): + return x + + def func11(self) -> list[_T_contra]: + return [] + + +class ClassB: + def func1(self, a: _T_co) -> _T_co: + return a + + def func2(self, a: _T_contra) -> _T_contra: + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar5.py new file mode 100644 index 00000000..a84330ff --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar5.py @@ -0,0 +1,75 @@ +# This sample tests that generic type variables +# with no bound type properly generate errors. It tests +# both class-defined and function-defined type variables. + +from typing import Generic, TypeVar + + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + + +class ClassA(Generic[_T1, _T2]): + async def func1(self, a: _T1): + # This should generate an error. + _ = a.temp + + # This should generate an error. + _ = a(3) + + # This should generate an error. + _ = a[0] + + # This should generate an error. + _ = a.temp + + # This should generate an error. + _ = a + 1 + + # This should generate an error. + _ = -a + + # This should generate an error. + a += 3 + + # This should generate an error. + _ = await a + + # This should generate an error. + for _ in a: + pass + + _ = a.__class__ + _ = a.__doc__ + + async def func2(self, a: _T2): + # This should generate an error. + _ = a.temp + + # This should generate an error. + _ = a(3) + + # This should generate an error. + _ = a[0] + + # This should generate an error. + _ = a.temp + + # This should generate an error. + _ = a + 1 + + # This should generate an error. + _ = -a + + # This should generate an error. + a += 3 + + # This should generate an error. + _ = await a + + # This should generate an error. + for _ in a: + pass + + _ = a.__class__ + _ = a.__doc__ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar6.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar6.py new file mode 100644 index 00000000..fca8fba3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar6.py @@ -0,0 +1,115 @@ +# This sample tests that generic type variables +# with a bound type properly generate errors. It tests +# both class-defined and function-defined type variables. + +from typing import Generic, TypeVar, Union + + +class Foo: + var1: int + + def __call__(self, val: int): + pass + + def do_stuff(self) -> int: + return 0 + + +class Bar: + var1: int + var2: int + + def __call__(self, val: int): + pass + + def do_stuff(self) -> float: + return 0 + + def do_other_stuff(self) -> float: + return 0 + + +_T1 = TypeVar("_T1", bound=Foo) +_T2 = TypeVar("_T2", bound=Union[Foo, Bar]) + + +class ClassA(Generic[_T1]): + async def func1(self, a: _T1) -> _T1: + _ = a.var1 + + # This should generate an error. + _ = a.var2 + + _ = a(3) + + # This should generate an error. + _ = a(3.3) + + # This should generate an error. + _ = a[0] + + # This should generate an error. + _ = a + 1 + + # This should generate an error. + _ = -a + + # This should generate an error. + a += 3 + + # This should generate an error. + _ = await a + + # This should generate an error. + for _ in a: + pass + + a.do_stuff() + + # This should generate an error. + a.do_other_stuff() + + _ = a.__class__ + _ = a.__doc__ + + return a + + async def func2(self, a: _T2) -> _T2: + _ = a.var1 + + # This should generate an error. + _ = a.var2 + + _ = a(3) + + # This should generate an error. + _ = a(3.3) + + # This should generate two errors. + _ = a[0] + + # This should generate an error. + _ = a + 1 + + # This should generate an error. + _ = -a + + # This should generate an error. + a += 3 + + # This should generate an error. + _ = await a + + # This should generate an error. + for _ in a: + pass + + a.do_stuff() + + # This should generate an error. + a.do_other_stuff() + + _ = a.__class__ + _ = a.__doc__ + + return a diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar7.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar7.py new file mode 100644 index 00000000..3ab94d8f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar7.py @@ -0,0 +1,167 @@ +# This sample tests that generic type variables +# with constrained types properly generate errors. It tests +# both class-defined and function-defined type variables. + +from typing import Generic, TypeVar + + +class Foo: + var1: int + + def __call__(self, val: int): + pass + + def do_stuff(self) -> int: + return 0 + + def __add__(self, val: "Foo") -> "Foo": + return val + + +class Bar: + var1: int + var2: int + + def __call__(self, val: int): + pass + + def do_stuff(self) -> float: + return 0 + + def do_other_stuff(self) -> float: + return 0 + + def __add__(self, val: "Bar") -> "Bar": + return val + + +_T1 = TypeVar("_T1", Foo, Bar) +_T2 = TypeVar("_T2", Foo, Bar, str) + + +class ClassA(Generic[_T1, _T2]): + async def func1(self, a: _T1) -> _T1: + _ = a.var1 + + # This should generate an error. + _ = a.var2 + + # This should generate an error. + _ = a(3.3) + + # This should generate two errors. + _ = a[0] + + # This should generate an error. + _ = a + 1 + + _ = a + a + + a += a + + # This should generate an error. + _ = -a + + # This should generate an error. + a += 3 + + # This should generate an error. + _ = await a + + # This should generate two errors. + for _ in a: + pass + + a.do_stuff() + + # This should generate an error. + a.do_other_stuff() + + _ = a.__class__ + _ = a.__doc__ + + return a + + async def func2(self, a: _T2, b: _T1) -> _T1: + # This should generate two errors. + _ = a.var2 + + # This should generate an error. + _ = a(3.3) + + # This should generate two errors. + _ = a[0] + + # This should generate an error. + _ = a + 1 + + _ = a + a + + a += a + + # This should generate an error. + _ = a + b + + # This should generate an error. + _ = -a + + # This should generate an error. + a += 3 + + # This should generate an error. + _ = await a + + # This should generate an error. + for _ in a: + pass + + # This should generate an error. + a.do_other_stuff() + + _ = a.__class__ + _ = a.__doc__ + + return b + + +_T3 = TypeVar("_T3", float, int, str) +_T4 = TypeVar("_T4", float, int) + + +def custom_add(a: _T3, b: _T4) -> float: + if isinstance(a, str): + return 0 + c = a + b + reveal_type(c, expected_text="float* | int*") + return c + + +class Thing1: + def __add__(self, value: float) -> "Thing1": ... + + def __radd__(self, value: float) -> "Thing1": ... + + +class Thing2: + def __add__(self, value: float) -> "Thing2": ... + + def __radd__(self, value: float) -> "Thing2": ... + + +TThing = TypeVar("TThing", Thing1, Thing2) + + +def func1(x: TThing) -> TThing: + if isinstance(x, Thing1): + return 2 + x + else: + assert isinstance(x, Thing2) + return 3 + x + + +def func2(x: TThing) -> TThing: + if isinstance(x, Thing1): + return x + 2 + else: + assert isinstance(x, Thing2) + return x + 3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar8.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar8.py new file mode 100644 index 00000000..a8da16f8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar8.py @@ -0,0 +1,82 @@ +# This sample tests the handling of a TypeVar symbol when it is +# used as a runtime object rather than a special form. + +import typing as t +import typing_extensions as te # pyright: ignore[reportMissingModuleSource] + + +T1 = t.TypeVar("T1") +S1 = t.TypeVar("S1", bound=str) +Ts1 = t.TypeVarTuple("Ts1") +P1 = t.ParamSpec("P1") + +# In these cases, the TypeVar symbol simply represents the TypeVar +# object itself, rather than representing a type variable. +T1.__name__ +S1.__name__ +S1.__bound__ +Ts1.__name__ +P1.__name__ + + +def func1(x: bool, a: T1, b: S1) -> T1 | S1: + reveal_type(T1.__name__, expected_text="str") + reveal_type(S1.__name__, expected_text="str") + reveal_type(Ts1.__name__, expected_text="str") + reveal_type(P1.__name__, expected_text="str") + + # This should generate an error. + a.__name__ + + # This should generate an error. + b.__name__ + + if x: + return a + else: + return b + + +T2 = te.TypeVar("T2") +S2 = te.TypeVar("S2", bound=str) +Ts2 = te.TypeVarTuple("Ts2") +P2 = te.ParamSpec("P2") + +T2.__name__ +S2.__name__ +S2.__bound__ +Ts2.__name__ +P2.__name__ + + +def func2(x: bool, a: T2, b: S2) -> T2 | S2: + reveal_type(T2.__name__, expected_text="str") + reveal_type(S2.__name__, expected_text="str") + reveal_type(Ts2.__name__, expected_text="str") + reveal_type(P2.__name__, expected_text="str") + + if x: + return a + else: + return b + + +def func3(t: t.TypeVar, ts: t.TypeVarTuple = ..., p: t.ParamSpec = ...) -> None: ... + + +func3(T1, Ts1, P1) + +# This should generate an error for Python 3.12 and older because the runtime +# object typing.TypeVar is not the same as typing_extensions.TypeVar. +func3(T2) + + +def func4(t: te.TypeVar, ts: te.TypeVarTuple = ..., p: te.ParamSpec = ...) -> None: ... + + +func4(T2, Ts2, P2) + + +# This should generate an error for Python 3.12 and older because the runtime +# object typing.TypeVar is not the same as typing_extensions.TypeVar. +func4(T1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVar9.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVar9.py new file mode 100644 index 00000000..2510020c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVar9.py @@ -0,0 +1,148 @@ +# This sample tests the reporting of incorrect TypeVar usage within +# a generic function. A TypeVar must appear at least twice to be +# considered legitimate. + +# pyright: reportInvalidTypeVarUse=true + +from typing import AnyStr, Callable, Generic, overload +from typing_extensions import TypeVar # pyright: ignore[reportMissingModuleSource] + + +_T = TypeVar("_T") +_S = TypeVar("_S") + + +class A(Generic[_T]): + def m1(self, v1: _T) -> None: ... + + # This should generate an error because _S + # is a local typeVar and appears only once. + def m2(self, v1: _S) -> None: ... + + # This should generate an error because _S + # is a local typeVar and appears only once. + def m3(self, v1: _T) -> _S: ... + + +# This should generate an error because _T +# is a local typeVar and appears only once. +def f1(v1: _T) -> None: ... + + +def f2(v1: _T, v2: list[_T]) -> None: ... + + +def f3(v1: _T) -> _T: ... + + +def f4() -> dict[_T, _T]: ... + + +# This should generate an error because _T +# is a local typeVar and appears only once. +def f5() -> list[_T]: ... + + +_T_Bound = TypeVar("_T_Bound", bound=int) +_T_Constrained = TypeVar("_T_Constrained", int, str) + + +# Constrained TypeVars are exempt. +def f6(v1: _T_Constrained): ... + + +# Bound TypeVars are not exempt. +def f7(v1: _T_Bound): ... + + +# Bound TypeVars as type arguments are exempt when used in an +# input parameter annotation. +def f8(v1: list[_T_Bound]): ... + + +# Bound TypeVars as type arguments are not exempt when used in a +# return annotation. +def f9() -> list[_T_Bound]: ... + + +# TypeVars used as type args to a generic type alias are exempt. +MyCallable = Callable[[_T], _T] + + +def f10() -> MyCallable[_T]: ... + + +# This should generate an error because AnyStr can go unsolved. +def f11(x: AnyStr = ...) -> AnyStr: ... + + +# This should generate an error because AnyStr can go unsolved. +def f12(x: AnyStr = ...) -> list[AnyStr]: ... + + +def f13(x: AnyStr = ...) -> AnyStr | None: ... + + +def f14(x: AnyStr = "") -> AnyStr: ... + + +# This should generate an error because AnyStr can go unsolved. +def f15(x: AnyStr = ...) -> list[AnyStr] | None: ... + + +class B(Generic[AnyStr]): + # This should generate an error because AnyStr can go unsolved. + def __init__(self, *, mode: AnyStr = ...) -> None: ... + + +class C(Generic[AnyStr]): + def __init__(self, *, mode: AnyStr = "") -> None: ... + + +@overload +def f16(default: int = ...) -> list[int]: ... + + +@overload +def f16(default: _T) -> list[_T]: ... + + +def f16(default: _T = ...) -> list[int] | list[_T]: ... + + +class ClassA(Generic[_T]): + # This should generate an error because _T can go unsolved. + def __init__(self, x: _T = ...) -> None: ... + + +_T2 = TypeVar("_T2", default=int) + + +class ClassB(Generic[_T2]): + def __init__(self, x: _T2 = ...) -> None: ... + + +# This should generate an error because _T appears only once. +def f17( + arg, # type: _T +): # type: (...) -> int + return 1 + + +def f18( + arg, # type: _T +): # type: (...) -> _T + return arg + + +# This should generate an error because _T appears only once. +def f19( + arg, +): # type: (_T) -> int + return 1 + + +def f20( + arg, # type: _T +): # type: (...) -> _T + return arg diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault1.py new file mode 100644 index 00000000..0cc410f0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault1.py @@ -0,0 +1,99 @@ +# This sample tests basic support for PEP 696 -- default types for TypeVars. + +from typing import Any +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVar, + TypeVarTuple, + ParamSpec, + Unpack, +) + +S1 = TypeVar("S1") +S2 = TypeVar("S2", bound=int) +S3 = TypeVar("S3", bytes, str) + +Ts0 = TypeVarTuple("Ts0") + +P0 = ParamSpec("P0") + + +T1 = TypeVar("T1", default=int) + +# This should generate an error because default must be a type expression. +T2 = TypeVar("T2", default=3) + +TInt = TypeVar("TInt", bound=int) +T3 = TypeVar("T3", bound=float, default=TInt) + +# This should generate an error because default must be a subtype of bound. +T4 = TypeVar("T4", bound=int, default=float) + +# This should generate an error because S1 is not a subtype of int. +T6 = TypeVar("T6", bound=int, default=S1) + +T7 = TypeVar("T7", bound=float, default=S2) + +# This should generate an error because S3 is not a subtype of int. +T8 = TypeVar("T8", bound=float, default=S3) + +T9 = TypeVar("T9", bound=list[Any], default=list[S1]) + +T10 = TypeVar("T10", bytes, str, default=str) + +# This should generate an error because str | bytes isn't one of the constrained types. +T11 = TypeVar("T11", bytes, str, default=str | bytes) + +# This should generate an error because S1 isn't one of the constrained types. +T12 = TypeVar("T12", bytes, str, default=S1) + +T13 = TypeVar("T13", int, str) +T14 = TypeVar("T14", int, str, bool, default=T13) + +# This should generate an error because the constraints for T13 are not compatible. +T15 = TypeVar("T15", int, complex, bool, default=T13) + +T16 = TypeVar("T16", bound=int) +T17 = TypeVar("T17", int, complex, bool, default=T16) + +# This should generate an error because the type of T16 is not compatible. +T18 = TypeVar("T18", str, list, default=T16) + + +Ts1 = TypeVarTuple("Ts1", default=Unpack[tuple[int]]) + +# This should generate an error because default must be unpacked tuple. +Ts2 = TypeVarTuple("Ts2", default=tuple[int]) + +# This should generate an error because default must be unpacked tuple. +Ts3 = TypeVarTuple("Ts3", default=int) + +Ts4 = TypeVarTuple("Ts4", default=Unpack[Ts0]) + +# This should generate an error because default must be unpacked. +Ts5 = TypeVarTuple("Ts5", default=Ts0) + +Ts6 = TypeVarTuple("Ts6", default=Unpack[tuple[int, ...]]) + +Ts7 = TypeVarTuple("Ts7", default=Unpack[tuple[S1, S2]]) + + +P1 = ParamSpec("P1", default=[]) + +P2 = ParamSpec("P2", default=[int, str, None, int | None]) + +P3 = ParamSpec("P3", default=[int, S1]) + +P4 = ParamSpec("P4", default=[int]) + +P5 = ParamSpec("P5", default=...) + +# This should generate an error because ParamSpec must be a list of types. +P6 = ParamSpec("P6", default=int) + +# This should generate an error because ParamSpec must be a list of types. +P7 = ParamSpec("P7", default=3) + +# This should generate an error because ParamSpec must be a list of types. +P8 = ParamSpec("P8", default=(1, int)) + +P9 = ParamSpec("P9", default=P0) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault2.py new file mode 100644 index 00000000..ccb64c2f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault2.py @@ -0,0 +1,116 @@ +# This sample tests the PEP 695 type parameter syntax extensions introduced +# in PEP 696 (default types for TypeVarLike). + +from typing import Any, ParamSpec, TypeVar, Unpack +from typing_extensions import TypeVarTuple # pyright: ignore[reportMissingModuleSource] + +T1 = TypeVar("T1") +Ts1 = TypeVarTuple("Ts1") +P1 = ParamSpec("P1") + + +# This should generate an error because default must be a type expression. +class ClassT1[T = 3]: ... + + +class ClassT2[T: float = int]: ... + + +# This should generate an error because default must be a subtype of bound. +class ClassT3[T: int = float]: ... + + +class ClassT4[T: list[Any] = list[int]]: ... + + +class ClassT5[T: (bytes, str) = str]: ... + + +# This should generate an error because str | bytes isn't one of the constrained types. +class ClassT6[T: (bytes, str) = str | bytes]: ... + + +# This should generate an error because T1 is not a valid default. +class ClassT7[T = T1]: ... + + +# This should generate an error because Ts1 is not a valid default. +class ClassT8[T = Ts1]: ... + + +# This should generate an error because P1 is not a valid default. +class ClassT9[T = P1]: ... + + +class ClassTs1[*Ts = *tuple[int]]: ... + + +class ClassTs2[*Ts = Unpack[tuple[int]]]: ... + + +# This should generate an error because default must be unpacked tuple. +class ClassTs3[*Ts = tuple[int]]: ... + + +# This should generate an error because default must be unpacked tuple. +class ClassTs4[*Ts = int]: ... + + +# This should generate an error because default must be unpacked tuple. +class ClassTs5[*Ts = T1]: ... + + +# This should generate an error because default must be unpacked tuple. +class ClassTs6[*Ts = Ts1]: ... + + +# This should generate an error because default must be unpacked tuple. +class ClassTs7[*Ts = P1]: ... + + +class ClassTs8[*Ts = Unpack[tuple[int, ...]]]: ... + + +# This should generate an error because T1 isn't legal here. +class ClassTs9[*Ts = Unpack[tuple[T1, T1]]]: ... + + +# This should generate an error because ... isn't legal here. +class ClassTs10[*Ts = ...]: ... + + +class ClassP1[**P = [int]]: ... + + +class ClassP2[**P = ...]: ... + + +class ClassP3[**P = []]: ... + + +class ClassP4[**P = [int, str, None, int | None]]: ... + + +# This should generate an error because T1 isn't legal here. +class ClassP5[**P = [T1]]: ... + + +# This should generate an error because ParamSpec must be a list of types. +class ClassP6[**P = int]: ... + + +# This should generate an error because ParamSpec must be a list of types. +class ClassP7[**P = 3]: ... + + +# This should generate an error because ParamSpec must be a list of types. +class ClassP8[**P = [1, int]]: ... + + +# This should generate an error because it combines a traditional ParamSpec +# with a new-style (PEP 695) ParamSpec. +class ClassP9[**P = P1]: ... + + +# This should generate an error because ParamSpec must be a list of types. +class ClassP10[**P = Ts1]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault3.py new file mode 100644 index 00000000..c6dcf8f2 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault3.py @@ -0,0 +1,53 @@ +# This sample tests error handling for PEP 696. TypeVars without default +# types cannot be after TypeVars with default types. + +from typing import Generic +from typing_extensions import TypeVar, TypeVarTuple # pyright: ignore[reportMissingModuleSource] + + +T0 = TypeVar("T0", default=object) +T1 = TypeVar("T1") +T2 = TypeVar("T2", default=str) + + +# This should generate an error because T1 is after T2. +class ClassA(Generic[T2, T1]): ... + + +# This should generate an error because T1 is after T2. +class ClassB(dict[T2, T1]): ... + + +class ClassC(dict[T2, T1], Generic[T1, T2]): ... + + +# This should generate an error because T1 is after T2. +def funcA(a: T2, b: T1) -> T1 | T2: ... + + +# This should generate an error because T1 is after T2. +TA_A = dict[T2, T1] + + +class ClassD(Generic[T0]): + def method1(self, a: T0, b: T1, /) -> T0 | T1: ... + + +Ts0 = TypeVarTuple("Ts0") +T3 = TypeVar("T3", default=int) + + +# This should generate an error. +class ClassE(Generic[*Ts0, T3]): ... + + +# This should generate an error. +class ClassF[*Ts0, T1 = bool]: + pass + + +# This should generate an error. +type TA1[*Ts0, T1 = bool] = tuple[*Ts0] | T1 + +# This should generate an error. +TA2 = tuple[*Ts0] | T3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault4.py new file mode 100644 index 00000000..5da123d5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault4.py @@ -0,0 +1,17 @@ +# This sample tests error handling for PEP 696. TypeVars without default +# types cannot be after TypeVars with default types. This is the same as +# typeVarDefault3 except that it uses PEP 695 syntax. + +from typing import TypeVar + + +# This should generate an error because T1 is after T2. +class ClassA[T2 = str, T1]: ... + + +# This should generate an error because T1 is after T2. +def funcA[T2 = str, T1](a: T2, b: T1) -> T1 | T2: ... + + +# This should generate an error because T1 is after T2. +type TA_A[T2 = str, T1] = dict[T2, T1] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault5.py new file mode 100644 index 00000000..67ee439d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefault5.py @@ -0,0 +1,30 @@ +# This sample tests the handling of TypeVar defaults in classes +# with a constructor that defines an __init__ but no __new__. + +from dataclasses import dataclass +from typing import Any, overload + + +class ClassA: ... + + +@dataclass +class ClassB[T: ClassA = ClassA]: + owner: T + + +def post_comment[T: ClassA](owner: T) -> ClassB[T]: + return ClassB(owner) + + +class ClassC: ... + + +@overload +def func1(x: ClassA) -> ClassA: ... +@overload +def func1[T1 = str](x: ClassC | T1) -> T1: ... +def func1(x: Any) -> Any: ... + + +reveal_type(func1(ClassC()), expected_text="str") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass1.py new file mode 100644 index 00000000..6e121d93 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass1.py @@ -0,0 +1,130 @@ +# This sample tests support for PEP 696 -- default types for TypeVars. +# In particular, it tests the handling of default TypeVar types for +# generic classes. + +from typing import Generic, Self, assert_type + +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + ParamSpec, + TypeVar, + TypeVarTuple, + Unpack, +) + +T1 = TypeVar("T1") +T2 = TypeVar("T2", default=int) +T3 = TypeVar("T3", default=str) + + +class ClassA1(Generic[T2, T3]): + def method1(self) -> Self: + return self + + +reveal_type( + ClassA1.method1, expected_text="(self: ClassA1[int, str]) -> ClassA1[int, str]" +) + + +def func_a1(a: ClassA1, b: ClassA1[float], c: ClassA1[float, float], d: ClassA1[()]): + reveal_type(a, expected_text="ClassA1[int, str]") + reveal_type(b, expected_text="ClassA1[float, str]") + reveal_type(c, expected_text="ClassA1[float, float]") + reveal_type(d, expected_text="ClassA1[int, str]") + + +class ClassA2(Generic[T1, T2, T3]): + def method1(self) -> Self: + return self + + +reveal_type( + ClassA2[int].method1, + expected_text="(self: ClassA2[int, int, str]) -> ClassA2[int, int, str]", +) + + +def func_a2( + a: ClassA2, + b: ClassA2[float], + c: ClassA2[float, float], + d: ClassA2[float, float, float], +): + reveal_type(a, expected_text="ClassA2[Unknown, int, str]") + reveal_type(b, expected_text="ClassA2[float, int, str]") + reveal_type(c, expected_text="ClassA2[float, float, str]") + reveal_type(d, expected_text="ClassA2[float, float, float]") + + +P1 = ParamSpec("P1") +P2 = ParamSpec("P2", default=[int, str]) +P3 = ParamSpec("P3", default=...) + + +class ClassB1(Generic[P2, P3]): ... + + +def func_b1(a: ClassB1, b: ClassB1[[float]], c: ClassB1[[float], [float]]): + reveal_type(a, expected_text="ClassB1[(int, str), ...]") + reveal_type(b, expected_text="ClassB1[(float), ...]") + reveal_type(c, expected_text="ClassB1[(float), (float)]") + + +Ts1 = TypeVarTuple("Ts1") +Ts2 = TypeVarTuple("Ts2", default=Unpack[tuple[int, str]]) +Ts3 = TypeVarTuple("Ts3", default=Unpack[tuple[float, ...]]) +Ts4 = TypeVarTuple("Ts4", default=Unpack[tuple[()]]) + + +class ClassC1(Generic[*Ts2]): ... + + +class ClassC2(Generic[T3, *Ts3]): ... + + +class ClassC3(Generic[T3, *Ts4]): ... + + +def func_c1(a: ClassC1, b: ClassC1[*tuple[float]]): + reveal_type(a, expected_text="ClassC1[int, str]") + reveal_type(b, expected_text="ClassC1[float]") + + +def func_c2(a: ClassC2, b: ClassC2[int], c: ClassC2[int, *tuple[()]]): + reveal_type(a, expected_text="ClassC2[str, *tuple[float, ...]]") + reveal_type(b, expected_text="ClassC2[int, *tuple[float, ...]]") + reveal_type(c, expected_text="ClassC2[int]") + + +def func_c3(a: ClassC3, b: ClassC3[int], c: ClassC3[int, *tuple[float]]): + reveal_type(a, expected_text="ClassC3[str]") + reveal_type(b, expected_text="ClassC3[int]") + reveal_type(c, expected_text="ClassC3[int, float]") + + +P4 = ParamSpec("P4", default=[float, bool]) +P5 = ParamSpec("P5", default=[bool]) +Ts5 = TypeVarTuple("Ts5") + + +class ClassD(Generic[*Ts5, P4, P5]): ... # OK + + +reveal_type( + ClassD[int, str, complex], + expected_text="type[ClassD[int, str, complex, (float, bool), (bool)]]", +) +reveal_type( + ClassD[int, str, [str, complex]], + expected_text="type[ClassD[int, str, (str, complex), (bool)]]", +) + +P6 = ParamSpec("P6", default=[str, int]) + + +class ClassE(Generic[P6]): ... + + +assert_type(ClassE, type[ClassE[str, int]]) +assert_type(ClassE(), ClassE[str, int]) +assert_type(ClassE[[bool, bool]](), ClassE[bool, bool]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass2.py new file mode 100644 index 00000000..9216717b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass2.py @@ -0,0 +1,185 @@ +# This sample tests the case where a TypeVar default refers to another +# TypeVar in a class declaration. This sample uses classic TypeVar syntax. +# If you make a change to this file, reflect the change in +# typeVarDefaultClass3.py, which uses PEP 695 syntax. + +from typing import Generic, ParamSpec, TypeVar, TypeVarTuple, Unpack + + +T1 = TypeVar("T1", default=str) +T2 = TypeVar("T2", default=T1) +T3 = TypeVar("T3", default=list[T2]) +T4 = TypeVar("T4", default=dict[T1, T2]) + +# This should generate an error because of the recursive definition. +T5 = TypeVar("T5", default="T5") + + +class ClassA(dict[T1, T2]): ... + + +a1 = ClassA[int]() +reveal_type(a1, expected_text="ClassA[int, int]") + +a2 = ClassA() +reveal_type(a2, expected_text="ClassA[str, str]") + + +# This should generate an error because T2 depends on T1. +class ClassC(Generic[T2, T1]): ... + + +class ClassD(dict[T2, T1], Generic[T1, T2]): ... + + +d1 = ClassD[int]() +reveal_type(d1, expected_text="ClassD[int, int]") + +d2 = ClassD() +reveal_type(d2, expected_text="ClassD[str, str]") + + +# This should generate an error because T5 refers to itself. +class ClassE(Generic[T5]): ... + + +class ClassH(Generic[T1, T2, T3]): ... + + +h1 = ClassH() +reveal_type(h1, expected_text="ClassH[str, str, list[str]]") + +h2 = ClassH[int]() +reveal_type(h2, expected_text="ClassH[int, int, list[int]]") + +h3 = ClassH[int, float]() +reveal_type(h3, expected_text="ClassH[int, float, list[float]]") + + +# This should generate an error because T2 depends on T1. +class ClassI(Generic[T2]): ... + + +# This should generate an error because T4 depends on T2. +class ClassJ(Generic[T1, T4]): ... + + +class ClassK(Generic[T1]): + # This should generate an error because T2 depends on T1, which + # is defined in an outer scope. + class ClassL(Generic[T2]): ... + + +class ClassMChild1(Generic[T1]): + a: T1 + + +class ClassMChild2(Generic[T1]): + b: T1 + + +class ClassM(ClassMChild1[T1], ClassMChild2[T2]): ... + + +m1 = ClassM[int]() +reveal_type(m1.a, expected_text="int") +reveal_type(m1.b, expected_text="int") + +m2 = ClassM() +reveal_type(m2.a, expected_text="str") +reveal_type(m2.b, expected_text="str") + + +class ClassNChild(Generic[T1]): + a: T1 + + +class ClassN(ClassNChild): ... + + +n1 = ClassN() +reveal_type(n1.a, expected_text="str") + + +P1 = ParamSpec("P1", default=...) +P2 = ParamSpec("P2", default=P1) +P3 = ParamSpec("P3", default=P2) +P4 = ParamSpec("P4", default=[int, T1]) + + +class ClassPA(Generic[P1, P2, P3]): ... + + +pa1 = ClassPA() +reveal_type(pa1, expected_text="ClassPA[..., ..., ...]") + +pa2 = ClassPA[[str]]() +reveal_type(pa2, expected_text="ClassPA[(str), (str), (str)]") + +pa3 = ClassPA[..., [float]]() +reveal_type(pa3, expected_text="ClassPA[..., (float), (float)]") + +pa4 = ClassPA[..., [int, int], [float]]() +reveal_type(pa4, expected_text="ClassPA[..., (int, int), (float)]") + + +# This should generate an error because P1 depends on P2. +class ClassPB(Generic[P2, P1]): ... + + +class ClassPC(Generic[T1, P4]): ... + + +pc1 = ClassPC() +reveal_type(pc1, expected_text="ClassPC[str, (int, str)]") + +pc2 = ClassPC[float]() +reveal_type(pc2, expected_text="ClassPC[float, (int, float)]") + +pc3 = ClassPC[float, ...]() +reveal_type(pc3, expected_text="ClassPC[float, ...]") + + +# This should generate an error because P4 depends on T1. +class ClassPD(Generic[P4, T1]): ... + + +Ts1 = TypeVarTuple("Ts1", default=Unpack[tuple[T1, T2]]) +Ts2 = TypeVarTuple("Ts2", default=Unpack[tuple[T1, ...]]) + + +class ClassTA(Generic[T1, T2, *Ts1]): ... + + +ta1 = ClassTA() +reveal_type(ta1, expected_text="ClassTA[str, str, str, str]") + +ta2 = ClassTA[int]() +reveal_type(ta2, expected_text="ClassTA[int, int, int, int]") + +ta3 = ClassTA[int, float]() +reveal_type(ta3, expected_text="ClassTA[int, float, int, float]") + +ta4 = ClassTA[int, float, *tuple[None, ...]]() +reveal_type(ta4, expected_text="ClassTA[int, float, *tuple[None, ...]]") + + +# This should generate an error because Ts1 depends on T2. +# It should also produce an error because T2 comes after a TypeVarTuple. +class ClassTB(Generic[T1, *Ts1, T2]): ... + + +class ClassTC(Generic[T1, *Ts2]): ... + + +tc1 = ClassTC() +reveal_type(tc1, expected_text="ClassTC[str, *tuple[str, ...]]") + +tc2 = ClassTC[int]() +reveal_type(tc2, expected_text="ClassTC[int, *tuple[int, ...]]") + +tc3 = ClassTC[int, *tuple[()]]() +reveal_type(tc3, expected_text="ClassTC[int]") + +tc4 = ClassTC[int, *tuple[None]]() +reveal_type(tc4, expected_text="ClassTC[int, None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass3.py new file mode 100644 index 00000000..f276e6bc --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass3.py @@ -0,0 +1,142 @@ +# This sample tests the case where a TypeVar default refers to another +# TypeVar in a class declaration. This sample uses PEP 695 syntax. + +from typing import Self, Unpack + + +class ClassA[T1 = str, T2 = T1](dict[T1, T2]): + def method1(self) -> Self: + return self + + +reveal_type( + ClassA[int].method1, expected_text="(self: ClassA[int, int]) -> ClassA[int, int]" +) +reveal_type( + ClassA.method1, expected_text="(self: ClassA[str, str]) -> ClassA[str, str]" +) + +a1 = ClassA[int]() +reveal_type(a1, expected_text="ClassA[int, int]") + +a2 = ClassA() +reveal_type(a2, expected_text="ClassA[str, str]") + + +# This should generate an error because T2 depends on T1. +class ClassC[T2 = T1, T1 = str]: ... + + +class ClassD[T1 = str, T2 = T1](dict[T2, T1]): ... + + +d1 = ClassD[int]() +reveal_type(d1, expected_text="ClassD[int, int]") + +d2 = ClassD() +reveal_type(d2, expected_text="ClassD[str, str]") + + +# This should generate an error because T5 refers to itself. +class ClassE[T5 = T5]: ... + + +class ClassH[T1 = str, T2 = T1, T3 = list[T2]]: ... + + +h1 = ClassH() +reveal_type(h1, expected_text="ClassH[str, str, list[str]]") + +h2 = ClassH[int]() +reveal_type(h2, expected_text="ClassH[int, int, list[int]]") + +h3 = ClassH[int, float]() +reveal_type(h3, expected_text="ClassH[int, float, list[float]]") + + +# This should generate an error because T2 depends on T1. +class ClassI[T2 = T1]: ... + + +# This should generate an error because T4 depends on T2. +class ClassJ[T1 = str, T4 = dict[T1, T2]]: ... + + +class ClassK[T1 = str]: + # This should generate an error because T2 depends on T1, which + # is defined in an outer scope. + class ClassL[T2 = T1]: ... + + +class ClassPA[**P1, **P2 = P1, **P3 = P2]: ... + + +pa1 = ClassPA() +reveal_type(pa1, expected_text="ClassPA[..., ..., ...]") + +pa2 = ClassPA[[str]]() +reveal_type(pa2, expected_text="ClassPA[(str), (str), (str)]") + +pa3 = ClassPA[..., [float]]() +reveal_type(pa3, expected_text="ClassPA[..., (float), (float)]") + +pa4 = ClassPA[..., [int, int], [float]]() +reveal_type(pa4, expected_text="ClassPA[..., (int, int), (float)]") + + +# This should generate an error because P1 depends on P2. +class ClassPB[**P2 = P1, **P1 = ...]: ... + + +class ClassPC[T1 = str, **P4 = [int, T1]]: ... + + +pc1 = ClassPC() +reveal_type(pc1, expected_text="ClassPC[str, (int, str)]") + +pc2 = ClassPC[float]() +reveal_type(pc2, expected_text="ClassPC[float, (int, float)]") + +pc3 = ClassPC[float, ...]() +reveal_type(pc3, expected_text="ClassPC[float, ...]") + + +# This should generate an error because P4 depends on T1. +class ClassPD[**P4 = [int, T1], T1 = str]: ... + + +class ClassTA[T1 = str, T2 = T1, *Ts1 = Unpack[tuple[T1, T2]]]: ... + + +ta1 = ClassTA() +reveal_type(ta1, expected_text="ClassTA[str, str, str, str]") + +ta2 = ClassTA[int]() +reveal_type(ta2, expected_text="ClassTA[int, int, int, int]") + +ta3 = ClassTA[int, float]() +reveal_type(ta3, expected_text="ClassTA[int, float, int, float]") + +ta4 = ClassTA[int, float, *tuple[None, ...]]() +reveal_type(ta4, expected_text="ClassTA[int, float, *tuple[None, ...]]") + + +# This should generate an error because Ts1 depends on T2. +# It will generate a second error because T2 follows a TypeVarTuple. +class ClassTB[T1 = str, *Ts1 = Unpack[tuple[T1, T2]], T2 = T1]: ... + + +class ClassTC[T1 = str, *Ts2 = Unpack[tuple[T1, ...]]]: ... + + +tc1 = ClassTC() +reveal_type(tc1, expected_text="ClassTC[str, *tuple[str, ...]]") + +tc2 = ClassTC[int]() +reveal_type(tc2, expected_text="ClassTC[int, *tuple[int, ...]]") + +tc3 = ClassTC[int, *tuple[()]]() +reveal_type(tc3, expected_text="ClassTC[int]") + +tc4 = ClassTC[int, *tuple[None]]() +reveal_type(tc4, expected_text="ClassTC[int, None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass4.py new file mode 100644 index 00000000..d49a061c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultClass4.py @@ -0,0 +1,22 @@ +# This sample tests the handling of TypeVar defaults with isinstance type +# narrowing. + +from typing import Any, Generic, ParamSpec, TypeVar + + +P = ParamSpec("P", default=...) +R = TypeVar("R", default=Any) + + +class ParentA(Generic[P, R]): ... + + +class ChildA(ParentA[P, R]): + pass + + +def func(x: ParentA[[int], int]): + if isinstance(x, ChildA): + reveal_type(x, expected_text="ChildA[(int), int]") + else: + reveal_type(x, expected_text="ParentA[(int), int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction1.py new file mode 100644 index 00000000..ce176cb5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction1.py @@ -0,0 +1,60 @@ +# This sample tests support for PEP 696 -- default types for TypeVars. +# In particular, it tests the case where a TypeVarLike goes unsolved +# in a call, and a default value is used rather than Unknown. + +from typing import Callable, Generic, Unpack +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + ParamSpec, + TypeVar, + TypeVarTuple, +) + +T = TypeVar("T", default=str) + + +def func1(x: int | T) -> list[T]: ... + + +v1_1 = func1(3.4) +reveal_type(v1_1, expected_text="list[float]") + +v1_2 = func1(3) +reveal_type(v1_2, expected_text="list[str]") + + +P = ParamSpec("P", default=[int, str, str]) + + +class ClassA(Generic[P]): + def __init__(self, x: Callable[P, None]) -> None: ... + + +def func2(x: int | ClassA[P]) -> ClassA[P]: ... + + +def callback1(x: str) -> None: ... + + +v2_1 = func2(ClassA(callback1)) +reveal_type(v2_1, expected_text="ClassA[(x: str)]") + + +v2_2 = func2(3) +reveal_type(v2_2, expected_text="ClassA[(int, str, str)]") + + +Ts = TypeVarTuple("Ts", default=Unpack[tuple[int, str, float]]) + + +def func3(x: int | Callable[[*Ts], None]) -> tuple[*Ts]: ... + + +v3_1 = func3(callback1) +reveal_type(v3_1, expected_text="tuple[str]") + +v3_2 = func3(3) +reveal_type(v3_2, expected_text="tuple[int, str, float]") + + +P2 = ParamSpec("P2", default=...) +P3 = ParamSpec("P3", default="...") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction2.py new file mode 100644 index 00000000..f995399c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction2.py @@ -0,0 +1,63 @@ +# This sample tests support for PEP 696 (default types for TypeVars) +# when used to define generic functions and with defaults type +# expressions that refer to other type variables. + +from typing import Generic, Self +from typing_extensions import TypeVar # pyright: ignore[reportMissingModuleSource] + +T1 = TypeVar("T1", default=str) +T2 = TypeVar("T2", default=list[T1]) + + +def func1(x: T1, y: int | T2 = 0) -> T2 | list[T1]: ... + + +v1_1 = func1("hi", 3.4) +reveal_type(v1_1, expected_text="float | list[str]") + +v1_2 = func1("") +reveal_type(v1_2, expected_text="list[str]") + + +# This should generate an error because T1 depends on T2. +def func2(x: T2, y: T1) -> list[T1 | T2]: ... + + +T3 = TypeVar("T3", default=int) + + +class ClassA(Generic[T3]): + def __init__(self, value: T3): + self.value = value + + def func1(self, value: T3) -> Self: + self.value = value + return self + + +T4 = TypeVar("T4", default=int) +T5 = TypeVar("T5", default=T4) + + +class ClassB(Generic[T4, T5]): + @property + def x(self) -> T4: ... + + @property + def y(self) -> T5: ... + + +b1 = ClassB() +reveal_type(b1.x, expected_text="int") +reveal_type(b1.y, expected_text="int") + + +T6 = TypeVar("T6", default=int) +T7 = TypeVar("T7", default=T6) +T8 = TypeVar("T8", default=int | None) + + +class ClassC(Generic[T6, T7, T8]): + def __new__(cls, x: T7, /) -> Self: ... + + def method1(self) -> T7: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction3.py new file mode 100644 index 00000000..d3f3876a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultFunction3.py @@ -0,0 +1,20 @@ +# This sample tests support for PEP 696 (default types for TypeVars) +# when used to define generic functions and with defaults type +# expressions that refer to other type variables. This is the same +# as typeVarDefaultFunction2 except that it uses the PEP 695 syntax. + +from typing import TypeVar + + +def func1[T1, T2 = list[T1]](x: T1, y: int | T2 = 0) -> T2 | list[T1]: ... + + +v1_1 = func1("hi", 3.4) +reveal_type(v1_1, expected_text="float | list[str]") + +v1_2 = func1("") +reveal_type(v1_2, expected_text="list[str]") + + +# This should generate an error because T1 depends on T2. +def func2[T2 = list[T1], T1 = str]() -> None: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias1.py new file mode 100644 index 00000000..fc82f407 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias1.py @@ -0,0 +1,98 @@ +# This sample tests support for PEP 696 -- default types for TypeVars. +# In particular, it tests the handling of default TypeVar types for +# generic type aliases. + +from collections.abc import Callable +from typing import Any, TypeAlias +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVar, + ParamSpec, + TypeVarTuple, + Unpack, +) + + +T1 = TypeVar("T1") +T2 = TypeVar("T2", default=int) +T3 = TypeVar("T3", default=str) + +TA1: TypeAlias = dict[T2, T3] + + +def func_a1(a: TA1, b: TA1[float], c: TA1[float, float]): + reveal_type(a, expected_text="dict[int, str]") + reveal_type(b, expected_text="dict[float, str]") + reveal_type(c, expected_text="dict[float, float]") + + +TA2: TypeAlias = dict[T1, T2] | list[T3] + + +def func_a2(a: TA2, b: TA2[float], c: TA2[float, float], d: TA2[float, float, float]): + reveal_type(a, expected_text="dict[Unknown, int] | list[str]") + reveal_type(b, expected_text="dict[float, int] | list[str]") + reveal_type(c, expected_text="dict[float, float] | list[str]") + reveal_type(d, expected_text="dict[float, float] | list[float]") + + +P1 = ParamSpec("P1") +P2 = ParamSpec("P2", default=[int, str]) +P3 = ParamSpec("P3", default=...) + +TA3: TypeAlias = Callable[P2, Any] | Callable[P3, Any] + + +def func_b1(a: TA3, b: TA3[[float]], c: TA3[[float], [list[float]]]): + reveal_type(a, expected_text="((int, str) -> Any) | ((...) -> Any)") + reveal_type(b, expected_text="((float) -> Any) | ((...) -> Any)") + reveal_type(c, expected_text="((float) -> Any) | ((list[float]) -> Any)") + + +Ts1 = TypeVarTuple("Ts1") +Ts2 = TypeVarTuple("Ts2", default=Unpack[tuple[int, str]]) +Ts3 = TypeVarTuple("Ts3", default=Unpack[tuple[float, ...]]) +Ts4 = TypeVarTuple("Ts4", default=Unpack[tuple[()]]) + +TA4: TypeAlias = tuple[*Ts2] + +TA5: TypeAlias = tuple[T3, *Ts3] + +TA6: TypeAlias = tuple[T3, *Ts4] + + +def func_c1(a: TA4, b: TA4[*tuple[float]]): + reveal_type(a, expected_text="tuple[int, str]") + reveal_type(b, expected_text="tuple[float]") + + +def func_c2(a: TA5, b: TA5[int], c: TA5[int, *tuple[()]]): + reveal_type(a, expected_text="tuple[str, *tuple[float, ...]]") + reveal_type(b, expected_text="tuple[int, *tuple[float, ...]]") + reveal_type(c, expected_text="tuple[int]") + + +def func_c3(a: TA6, b: TA6[int], c: TA6[int, *tuple[float]]): + reveal_type(a, expected_text="tuple[str]") + reveal_type(b, expected_text="tuple[int]") + reveal_type(c, expected_text="tuple[int, float]") + + +P4 = ParamSpec("P4", default=[float, bool]) +P5 = ParamSpec("P5", default=[bool]) +Ts5 = TypeVarTuple("Ts5") + +TA7 = tuple[*Ts5] | Callable[P4, Any] | Callable[P5, Any] + + +def func_d1(x: TA7[int, str, complex]): + reveal_type( + x, + expected_text="tuple[int, str, complex] | ((float, bool) -> Any) | ((bool) -> Any)", + ) + + +def func_d2(x: TA7[int, str, [str, complex]]): + reveal_type( + x, + expected_text="tuple[int, str] | ((str, complex) -> Any) | ((bool) -> Any)", + ) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias2.py new file mode 100644 index 00000000..36d19482 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias2.py @@ -0,0 +1,139 @@ +# This sample tests support for PEP 696 (default types for TypeVars). +# In particular, it tests the handling of default TypeVar types for +# generic type aliases when one TypeVar default expression refers +# to another. + +from typing import Callable, Generic, Unpack +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + ParamSpec, + TypeVar, + TypeVarTuple, +) + +T1 = TypeVar("T1", default=str) +T2 = TypeVar("T2", default=T1) +T3 = TypeVar("T3", default=list[T2]) +T4 = TypeVar("T4", default=dict[T1, T2]) + +# This should generate an error because of the recursive definition. +T5 = TypeVar("T5", default="T5") + +TA_A = dict[T1, T2] + + +def func1(a1: TA_A[int], a2: TA_A): + reveal_type(a1, expected_text="dict[int, int]") + reveal_type(a2, expected_text="dict[str, str]") + + +# This should generate an error because T2 depends on T1. +TA_B = dict[T2, T1] + +# This should generate an error because T5 refers to itself. +TA_C = list[T5] + +TA_D = tuple[T1, T2, T3] + + +def func2(d1: TA_D, d2: TA_D[int], d3: TA_D[int, float]): + reveal_type(d1, expected_text="tuple[str, str, list[str]]") + reveal_type(d2, expected_text="tuple[int, int, list[int]]") + reveal_type(d3, expected_text="tuple[int, float, list[float]]") + + +# This should generate an error because T2 depends on T1. +TA_E = list[T2] + +# This should generate an error because T4 depends on T2. +TA_F = dict[T2, T4] + + +class ClassK(Generic[T1]): + # This should generate an error because T2 depends on T1, which + # is defined in an outer scope. + TA_G = list[T2] + + +P1 = ParamSpec("P1", default=...) +P2 = ParamSpec("P2", default=P1) +P3 = ParamSpec("P3", default=P2) +P4 = ParamSpec("P4", default=[int, T1]) + +TA_PA = tuple[Callable[P1, None], Callable[P2, None], Callable[P3, None]] + + +def func3( + pa1: TA_PA, + pa2: TA_PA[[str]], + pa3: TA_PA[..., [float]], + pa4: TA_PA[..., [int, int], [float]], +): + reveal_type(pa1, expected_text="tuple[(...) -> None, (...) -> None, (...) -> None]") + reveal_type(pa2, expected_text="tuple[(str) -> None, (str) -> None, (str) -> None]") + reveal_type( + pa3, expected_text="tuple[(...) -> None, (float) -> None, (float) -> None]" + ) + reveal_type( + pa4, expected_text="tuple[(...) -> None, (int, int) -> None, (float) -> None]" + ) + + +# This should generate an error because P1 depends on P2. +TA_PB = tuple[Callable[P2, None], Callable[P1, None]] + +TA_PC = T1 | Callable[P4, T1] + + +def func4(pc1: TA_PC, pc2: TA_PC[float], pc3: TA_PC[float, ...]): + reveal_type(pc1, expected_text="str | ((int, str) -> str)") + reveal_type(pc2, expected_text="float | ((int, float) -> float)") + reveal_type(pc3, expected_text="float | ((...) -> float)") + + +# This should generate an error because P4 depends on T1. +TA_PD = Callable[P4, T1] + + +Ts1 = TypeVarTuple("Ts1", default=Unpack[tuple[T1, T2]]) +Ts2 = TypeVarTuple("Ts2", default=Unpack[tuple[T1, ...]]) + + +class ClassTA(Generic[T1, T2, *Ts1]): ... + + +TA_TA = ClassTA[T1, T2, *Ts1] + + +def func5( + ta1: TA_TA, + ta2: TA_TA[int], + ta3: TA_TA[int, float], + ta4: TA_TA[int, float, *tuple[None, ...]], +): + reveal_type(ta1, expected_text="ClassTA[str, str, str, str]") + reveal_type(ta2, expected_text="ClassTA[int, int, int, int]") + reveal_type(ta3, expected_text="ClassTA[int, float, int, float]") + reveal_type(ta4, expected_text="ClassTA[int, float, *tuple[None, ...]]") + + +# This should generate an error because Ts1 depends on T2. +# It should also generate a second error because T2 follows a TypeVarTuple. +TA_TB = tuple[T1, *Ts1, T2] + + +class ClassTC(Generic[T1, *Ts2]): ... + + +TA_TC = ClassTC[T1, *Ts2] + + +def func6( + tc1: TA_TC, + tc2: TA_TC[int], + tc3: TA_TC[int, *tuple[()]], + tc4: TA_TC[int, *tuple[None]], +): + reveal_type(tc1, expected_text="ClassTC[str, *tuple[str, ...]]") + reveal_type(tc2, expected_text="ClassTC[int, *tuple[int, ...]]") + reveal_type(tc3, expected_text="ClassTC[int]") + reveal_type(tc4, expected_text="ClassTC[int, None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias3.py new file mode 100644 index 00000000..bbc23c04 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarDefaultTypeAlias3.py @@ -0,0 +1,125 @@ +# This sample tests support for PEP 696 (default types for TypeVars). +# In particular, it tests the handling of default TypeVar types for +# generic type aliases when one TypeVar default expression refers +# to another. This is the same as typeVarDefaultTypeAlias2 except +# that it uses PEP 695 syntax. + +from typing import Callable, Unpack + +type TA_A[T1 = str, T2 = T1] = dict[T1, T2] + + +def func1(a1: TA_A[int], a2: TA_A): + reveal_type(a1, expected_text="dict[int, int]") + reveal_type(a2, expected_text="dict[str, str]") + + +# This should generate an error because T2 depends on T1. +type TA_B[T2 = T1, T1 = str] = None + +# This should generate an error because T5 refers to itself. +type TA_C[T5 = T5] = None + +type TA_D[T1 = str, T2 = T1, T3 = list[T2]] = tuple[T1, T2, T3] + + +def func2(d1: TA_D, d2: TA_D[int], d3: TA_D[int, float]): + reveal_type(d1, expected_text="tuple[str, str, list[str]]") + reveal_type(d2, expected_text="tuple[int, int, list[int]]") + reveal_type(d3, expected_text="tuple[int, float, list[float]]") + + +# This should generate an error because T2 depends on T1. +type TA_E[T2 = T1] = list[T2] + +# This should generate two errors because T4 depends on T2 and T1. +type TA_F[T2 = T1, T4 = dict[T1, T2]] = dict[T2, T4] + + +class ClassK[T1]: + # This should generate an error because T2 depends on T1, which + # is defined in an outer scope. + type TA_G[T2 = T1] = list[T2] + + +type TA_PA[**P1, **P2 = P1, **P3 = P2] = tuple[ + Callable[P1, None], Callable[P2, None], Callable[P3, None] +] + + +def func3( + pa1: TA_PA, + pa2: TA_PA[[str]], + pa3: TA_PA[..., [float]], + pa4: TA_PA[..., [int, int], [float]], +): + reveal_type(pa1, expected_text="tuple[(...) -> None, (...) -> None, (...) -> None]") + reveal_type(pa2, expected_text="tuple[(str) -> None, (str) -> None, (str) -> None]") + reveal_type( + pa3, expected_text="tuple[(...) -> None, (float) -> None, (float) -> None]" + ) + reveal_type( + pa4, expected_text="tuple[(...) -> None, (int, int) -> None, (float) -> None]" + ) + + +# This should generate an error because P1 depends on P2. +type TA_PB[**P2 = P1, **P1 = ...] = tuple[Callable[P2, None], Callable[P1, None]] + +type TA_PC[ + T1 = str, + **P4 = [ + int, + T1, + ], +] = T1 | Callable[P4, T1] + + +def func4(pc1: TA_PC, pc2: TA_PC[float], pc3: TA_PC[float, ...]): + reveal_type(pc1, expected_text="str | ((int, str) -> str)") + reveal_type(pc2, expected_text="float | ((int, float) -> float)") + reveal_type(pc3, expected_text="float | ((...) -> float)") + + +# This should generate an error because P4 depends on T1. +type TA_PD[**P4 = [int, T1], T1 = str] = Callable[P4, T1] + + +class ClassTA[T1, T2, *Ts1]: ... + + +type TA_TA[T1 = str, T2 = T1, *Ts1 = Unpack[tuple[T1, T2]]] = ClassTA[T1, T2, *Ts1] + + +def func5( + ta1: TA_TA, + ta2: TA_TA[int], + ta3: TA_TA[int, float], + ta4: TA_TA[int, float, *tuple[None, ...]], +): + reveal_type(ta1, expected_text="ClassTA[str, str, str, str]") + reveal_type(ta2, expected_text="ClassTA[int, int, int, int]") + reveal_type(ta3, expected_text="ClassTA[int, float, int, float]") + reveal_type(ta4, expected_text="ClassTA[int, float, *tuple[None, ...]]") + + +# This should generate an error because Ts1 depends on T2. +type TA_TB[T1 = str, *Ts1 = Unpack[tuple[T1, T2]], T2 = T1] = tuple[T1, *Ts1, T2] + + +class ClassTC[T1, *Ts2]: ... + + +type TA_TC[T1 = str, *Ts2 = Unpack[tuple[T1, ...]]] = ClassTC[T1, *Ts2] + + +def func6( + tc1: TA_TC, + tc2: TA_TC[int], + tc3: TA_TC[int, *tuple[()]], + tc4: TA_TC[int, *tuple[None]], +): + reveal_type(tc1, expected_text="ClassTC[str, *tuple[str, ...]]") + reveal_type(tc2, expected_text="ClassTC[int, *tuple[int, ...]]") + reveal_type(tc3, expected_text="ClassTC[int]") + reveal_type(tc4, expected_text="ClassTC[int, None]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple1.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple1.py new file mode 100644 index 00000000..4881afed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple1.py @@ -0,0 +1,62 @@ +# This sample tests various conditions under which variadic +# type variables can and cannot be used. + +# pyright: reportMissingModuleSource=false + +from typing import Generic, TypeVar, Union +from typing_extensions import TypeVarTuple, Unpack + + +_T = TypeVar("_T") +_Xs = TypeVarTuple("_Xs") + + +class ClassA(Generic[_T, Unpack[_Xs]]): + def __init__(self, *args: Unpack[_Xs]) -> None: + reveal_type(args, expected_text="tuple[*_Xs@ClassA]") + + # This should generate two errors. + def func2(self) -> Union[_Xs]: ... + + def func3(self) -> tuple[Unpack[_Xs]]: ... + + # This should generate an error. + def func4(self) -> tuple[_Xs]: ... + + def func5(self) -> "ClassA[int, str, Unpack[_Xs]]": ... + + # This should be an error because list doesn't accept a variadic TypeVar. + x: list[_Xs] = [] + + # This should generate an error. + y: _Xs = () + + # This should generate an error. + z: tuple[_Xs, ...] + + +# This should generate an error. +class ClassB(Generic[_Xs]): ... + + +# This should generate an error. +x: list[_Xs] = [] + +# This should generate an error. +y: _Xs = () + + +# This should generate an error because of the name mismatch. +BadName = TypeVarTuple("Ts1") + +# This should generate TypeVarTuple cannot have constraints. +Ts2 = TypeVarTuple("Ts2", int, str) + +# This should generate TypeVarTuple cannot be covariant. +Ts3 = TypeVarTuple("Ts3", covariant=True) + +# This should generate TypeVarTuple cannot be contravariant. +Ts4 = TypeVarTuple("Ts4", contravariant=True) + +# This should generate TypeVarTuple does not accept other keyword arguments. +Ts5 = TypeVarTuple("Ts5", other=True) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple10.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple10.py new file mode 100644 index 00000000..88b70c4b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple10.py @@ -0,0 +1,64 @@ +# This sample tests the handling of variadic type variables when used +# in conjunction with unpacked tuples. + +from __future__ import annotations +from typing import Any, Generic, NewType, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +DType = TypeVar("DType") +Shape = TypeVarTuple("Shape") + +Batch = NewType("Batch", int) +Height = NewType("Height", int) +Width = NewType("Width", int) +Channels = NewType("Channels", int) + + +class Array(Generic[DType, Unpack[Shape]]): + def __abs__(self) -> Array[DType, Unpack[Shape]]: ... + + def __add__( + self, other: Array[DType, Unpack[Shape]] + ) -> Array[DType, Unpack[Shape]]: ... + + +def process_batch_channels( + x: Array[Batch, Unpack[tuple[Any, ...]], Channels], +) -> None: ... + + +def expect_variadic_array1(x: Array[Batch, Unpack[Shape]]) -> tuple[Unpack[Shape]]: ... + + +def expect_variadic_array2(x: Array[Batch, Unpack[tuple[Any, ...]]]) -> None: ... + + +def expect_precise_array(x: Array[Batch, Height, Width, Channels]) -> None: ... + + +def func1(x: Array[Batch, Height, Width, Channels]): + process_batch_channels(x) + + expect_precise_array(x) + + +def func2(y: Array[Batch, Channels]): + process_batch_channels(y) + + # This should generate an error because the type args don't match. + expect_precise_array(y) + + +def func3(z: Array[Batch]): + # This should generate an error because Channels is missing + process_batch_channels(z) + + +def func4(y: Array[Any, Unpack[tuple[Any, ...]]]): + reveal_type(y, expected_text="Array[Any, *tuple[Any, ...]]") + expect_variadic_array1(y) + expect_variadic_array2(y) + expect_precise_array(y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple11.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple11.py new file mode 100644 index 00000000..955889ed --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple11.py @@ -0,0 +1,71 @@ +# This sample tests packing and unpacking operations with +# variadic type variables. It is the same as variadicTypeVar4.py +# except that it uses the * operator rather than Unpack. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import Generic, NewType, Union +from typing_extensions import TypeVarTuple # pyright: ignore[reportMissingModuleSource] + + +Shape = TypeVarTuple("Shape") + + +class Array(Generic[*Shape]): + def __init__(self, *shape: *Shape): + self.shape = shape + + def __abs__(self) -> "Array[*Shape]": ... + + def __add__(self, other: "Array[*Shape]") -> "Array[*Shape]": ... + + +Height = NewType("Height", int) +Width = NewType("Width", int) +x: Array[Height, Width] = Array(Height(480), Width(640)) +reveal_type(x.shape, expected_text="tuple[Height, Width]") +reveal_type(abs(x), expected_text="Array[Height, Width]") +reveal_type(x + abs(x), expected_text="Array[Height, Width]") + + +_Xs = TypeVarTuple("_Xs") + + +def func1(a: tuple[*_Xs], b: tuple[*_Xs]) -> Union[*_Xs]: ... + + +def func2(a: tuple[int, *_Xs], b: tuple[int, *_Xs]) -> Union[*_Xs]: ... + + +def func3(p1: tuple[int], p2: tuple[int, str], p3: tuple[int, int]): + # This should generate an error. + v1 = func1(p1, p2) + + # This should generate an error. + v2 = func2(p1, p2) + + v3 = func2(p2, p2) + reveal_type(v3, expected_text="str") + + v4 = func2((3, "hi"), p2) + reveal_type(v4, expected_text="str") + + # This should generate an error. + func2((3, 3), p2) + + v5 = func2((3, 3), p3) + reveal_type(v5, expected_text="int") + + +def func4(a: int, *args: *_Xs, **kwargs: str) -> tuple[int, *_Xs]: ... + + +c1 = func4(4, 5.4, 6j, b="3", c="5") +reveal_type(c1, expected_text="tuple[int, float, complex]") + +c2 = func4(4, b="3", c="5") +reveal_type(c2, expected_text="tuple[int]") + +# This should generate an error. +c3 = func4(b="3", c="5") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple12.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple12.py new file mode 100644 index 00000000..d860d723 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple12.py @@ -0,0 +1,30 @@ +# This sample tests the case where a variadic TypeVar is used in +# conjunction with a keyword-only parameter. It also tests protocol +# invariance validation when a TypeVarTuple is used in the protocol +# along with a non-variadic TypeVar. + +# pyright: strict + +from typing import Protocol, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +class CallbackA(Protocol[*Ts, T]): + def __call__(self, *args: *Ts, keyed: T) -> tuple[Unpack[Ts], T]: ... + + +def example(a: int, b: str, *, keyed: bool) -> tuple[int, str, bool]: + return (a, b, keyed) + + +a: CallbackA[int, str, bool] = example + +reveal_type( + a, expected_text="(a: int, b: str, *, keyed: bool) -> tuple[int, str, bool]" +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple13.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple13.py new file mode 100644 index 00000000..2591a0ae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple13.py @@ -0,0 +1,37 @@ +# This sample tests the case where a variadic TypeVar is unpacked +# in a call expression that invokes a call that accepts an unpacked +# TypeVarTuple. + +from typing import Protocol, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +class CallbackPosOnly(Protocol[Unpack[Ts]]): + def __call__(self, *args: *Ts) -> tuple[Unpack[Ts]]: ... + + +def invoke_posonly(fn: CallbackPosOnly[Unpack[Ts]], *args: *Ts) -> tuple[Unpack[Ts]]: + return fn(*args) + + +class CallbackKeyed(Protocol[Unpack[Ts]]): + def __call__(self, *args: *Ts, keyed: bool) -> tuple[Unpack[Ts]]: ... + + +def invoke_keyed(fn: CallbackKeyed[Unpack[Ts]], *args: *Ts) -> tuple[Unpack[Ts]]: + return fn(*args, keyed=True) + + +def invoke_keyed_should_fail( + fn: CallbackKeyed[Unpack[Ts]], *args: *Ts +) -> tuple[Unpack[Ts]]: + # This should generate an error because "keyed" should + # be interpreted as a keyword-only parameter. + return fn(*args, True) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple14.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple14.py new file mode 100644 index 00000000..5e26314b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple14.py @@ -0,0 +1,109 @@ +# This sample tests the matching of a traditional *args parameter +# and a *args unpacked Tuple to a *args TypeVarTuple. + +from typing import Callable, TypeVar +from typing_extensions import TypeVarTuple # pyright: ignore[reportMissingModuleSource] + +Ts = TypeVarTuple("Ts") +R = TypeVar("R") + + +def call_with_params(func: Callable[[*Ts], R], *params: *Ts) -> R: + # This should generate an error because it's missing a *. + func(params) + + return func(*params) + + +def callback1(*args: int) -> int: ... + + +def callback2(*args: *tuple[int, int]) -> int: ... + + +call_with_params(callback1) +call_with_params(callback1, 1, 2, 3) + +# This should generate an error. +call_with_params(callback1, "1") + +# This should generate an error. +call_with_params(callback2) + +call_with_params(callback2, 1, 1) + +# This should generate an error. +call_with_params(callback2, 1, "") + + +def callback3(*args: *tuple[int, *tuple[str, ...], int]) -> int: ... + + +# This should generate an error. +call_with_params(callback3) + +call_with_params(callback3, 1, 2) + +call_with_params(callback3, 1, "hi", 2) + +call_with_params(callback3, 1, "hi", "hi", 2) + +# This should generate an error. +call_with_params(callback3, 1, 1, 2) + + +class ClassA: + @classmethod + def method1(cls, *shape: *Ts) -> tuple[*Ts]: ... + + +def func1(target: Callable[[*Ts], int]) -> tuple[*Ts]: ... + + +def func2(a: int, b: str, /) -> int: ... + + +def func3(action: Callable[[int, str], int]): + v1 = func1(func2) + reveal_type(v1, expected_text="tuple[int, str]") + + v2 = func1(action) + reveal_type(v2, expected_text="tuple[int, str]") + + +def func4(*args: *tuple[int, str]): ... + + +func4(1, "") + +# This should generate an error. +func4() + +# This should generate an error. +func4(1) + +# This should generate an error. +func4(1, "", "") + + +def func5(*args: *tuple[int, *tuple[str, ...], int]): ... + + +func5(1, 1) +func5(1, "", 1) +func5(1, "", "", 1) + +# This should generate an error. +func5() + +# This should generate an error. +func5(1) + +# This should generate an error. +func5("") + +# This should generate an error. +func5(1, "") + +# This should generate an error. +func5(1, "", "") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple15.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple15.py new file mode 100644 index 00000000..cdd79519 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple15.py @@ -0,0 +1,24 @@ +# This sample tests the capture of an unbounded (unknown-length) tuple +# by a TypeVarTuple. + +from typing import Any, Generic +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +Shape = TypeVarTuple("Shape") + + +class Array(Generic[Unpack[Shape]]): ... + + +def func0(x: Array[Unpack[Shape]]) -> Array[Unpack[Shape]]: ... + + +def func1(y: Array[int, Unpack[tuple[Any, ...]]]): + reveal_type(func0(y), expected_text="Array[int, *tuple[Any, ...]]") + + +def func2(y: Array[Unpack[tuple[int, ...]], int]): + reveal_type(func0(y), expected_text="Array[*tuple[int, ...], int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple16.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple16.py new file mode 100644 index 00000000..3d10931b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple16.py @@ -0,0 +1,26 @@ +# This sample tests the case where a classmethod or staticmethod are +# used with a TypeVarTuple that requires specialization. + +from typing import Generic + +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +T2 = TypeVarTuple("T2") + + +class Base(Generic[Unpack[T2]]): + @classmethod + def method1(cls, *args: Unpack[T2]) -> int: ... + + @staticmethod + def method2(*args: Unpack[T2]) -> int: ... + + +class Child(Base[int, str]): ... + + +Child.method1(1, "") +Child.method2(1, "") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple17.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple17.py new file mode 100644 index 00000000..16ec9536 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple17.py @@ -0,0 +1,57 @@ +# This sample tests the case where an unpacked tuple argument in a call +# expression is matched to an `*args` parameter that has a declared type +# that includes an unpacked TypeVarTuple. + +from typing_extensions import TypeVarTuple # pyright: ignore[reportMissingModuleSource] + +Ts = TypeVarTuple("Ts") + + +def call0(*args: *Ts) -> tuple[*Ts]: ... + + +def call1(*args: *tuple[int, *Ts]) -> tuple[*Ts]: ... + + +def call2(*args: *tuple[*Ts, float]) -> tuple[*Ts]: ... + + +def call3(*args: *tuple[int, *Ts, float]) -> tuple[*Ts]: ... + + +def call4(*args: *tuple[*tuple[int, *tuple[*Ts], float]]) -> tuple[*Ts]: ... + + +def func1(*args: *tuple[int, str]): + reveal_type(call0(*args), expected_text="tuple[int, str]") + + +def func2(*args: *tuple[int, ...]): + reveal_type(call0(*args), expected_text="tuple[int, ...]") + + +def func3(*args: *tuple[int, *tuple[str, ...], float]): + reveal_type(call0(*args), expected_text="tuple[int, *tuple[str, ...], float]") + + +def func4(*args: *Ts) -> tuple[*Ts]: + call0(*args) + return args + + +def func5(x: int, y: str, z: float): + v1 = call1(*(x, y, z)) + reveal_type(v1, expected_text="tuple[str, float]") + + v2 = call2(*(x, y, z)) + reveal_type(v2, expected_text="tuple[int, str]") + + v3 = call3(*(x, y, z)) + reveal_type(v3, expected_text="tuple[str]") + + v4 = call4(*(x, *(y, z))) + reveal_type(v4, expected_text="tuple[str]") + + +def func6(*args: *tuple[int, *tuple[None, ...], float]): + reveal_type(call2(*args), expected_text="tuple[int, *tuple[None, ...]]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple18.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple18.py new file mode 100644 index 00000000..cbe5778c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple18.py @@ -0,0 +1,18 @@ +# This sample tests the case where an unpacked TypeVar is used in +# an iterator. + +from typing import Any, Callable, TypeVarTuple + + +Ts = TypeVarTuple("Ts") + + +def func1(f: Callable[[*Ts], Any], p: tuple[*Ts]): + f(*p) + + # This should generate an error because p is not unpacked. + f(p) + + for i in p: + # This should generate an error. + f(i) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple19.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple19.py new file mode 100644 index 00000000..9a1ee163 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple19.py @@ -0,0 +1,76 @@ +# This sample tests the case where an unpacked TypeVarTuple is used +# as one or more type arguments for a tuple. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import Generator, Iterable, TypeVar, TypeVarTuple, Union + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +def func1(a: Iterable[T], b: Iterable[T]): + i = iter(a) + j = iter(b) + while True: + try: + yield (next(i), next(j)) + except StopIteration: + break + + +reveal_type( + func1, + expected_text="(a: Iterable[T@func1], b: Iterable[T@func1]) -> Generator[tuple[T@func1, T@func1], Any, None]", +) + + +def func2(a: tuple[*Ts], b: tuple[*Ts]): + for i in func1(a, b): + yield i + + +reveal_type( + func2, + expected_text="(a: tuple[*Ts@func2], b: tuple[*Ts@func2]) -> Generator[tuple[Union[*Ts@func2], Union[*Ts@func2]], Any, None]", +) + + +def func3(): + v1 = func2((1, "foo"), (2, "bar")) + reveal_type(v1, expected_text="Generator[tuple[int | str, int | str], Any, None]") + + for i in v1: + reveal_type(i, expected_text="tuple[int | str, int | str]") + + +def func5(x: "Iterable[Union[*Ts]]") -> Iterable[Union[*Ts]]: ... + + +def func6(): + v1: list[int] = [i for i in func5([1, 2, 3])] + v2: list[int | str] = [i for i in func5([1, "foo"])] + + +def func7(t: "tuple[*Ts]") -> "tuple[Union[*Ts], ...]": ... + + +def func8(a: int, b: str): + v1 = func7(((a, b),)) + reveal_type(v1, expected_text="tuple[tuple[int, str], ...]") + + +def func9(x: "tuple[T, ...]", y: "tuple[*Ts]") -> Generator[T | Union[*Ts], None, None]: + z = x + y + reveal_type(z, expected_text="tuple[T@func9 | Union[*Ts@func9], ...]") + for e in z: + reveal_type(e, expected_text="T@func9 | Union[*Ts@func9]") + yield e + + +def func10(x: tuple[*Ts]): ... + + +def func11(x: tuple[*Ts, int, int]): + func10(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple2.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple2.py new file mode 100644 index 00000000..0223ec14 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple2.py @@ -0,0 +1,57 @@ +# This sample tests various conditions under which Unpack +# can and cannot be used. + +# pyright: reportMissingModuleSource=false + +from typing import Generic, TypeVar, Union +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + + +_T = TypeVar("_T") +_Xs = TypeVarTuple("_Xs") + + +class ClassA(Generic[_T, Unpack[_Xs]]): + def __init__(self, *shape: Unpack[_Xs]): + self.x: tuple[Unpack[_Xs]] = shape + + # This should generate an error + self.y: _Xs = shape + + # This should generate two errors + def func1(self) -> Union[Unpack[_Xs]]: ... + + # This should generate an error + def func2(self) -> tuple[Unpack[_T]]: ... + + # This should generate an error + def func3(self) -> tuple[Unpack[int]]: ... + + # This should generate an error + def func4(self) -> tuple[Unpack[_Xs, _Xs]]: ... + + # This should generate an error. + a: list[Unpack[_Xs]] = [] + + # This should generate an error. + b: Unpack[_Xs] = () + + +# This should generate an error. +x: list[Unpack[_Xs]] = [] + +# This should generate an error. +y: Unpack[_Xs] = () + +# This should generate an error. +z: Unpack = () + + +class Array(Generic[Unpack[_Xs]]): ... + + +# This should generate two errors because _Xs must be unpacked. +def func0(value: Array[_Xs]) -> tuple[complex, _Xs, str]: ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple20.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple20.py new file mode 100644 index 00000000..bb626284 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple20.py @@ -0,0 +1,26 @@ +# This sample tests the case where an unpacked TypeVarTuple is assigned +# to a non-variadic TypeVar during constraint solving. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import TypeVar, Tuple, Union +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + reveal_type, + TypeVarTuple, +) + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +def func1(*args: T) -> Tuple[T, ...]: + return args + + +def func2(x: "Tuple[*Ts]") -> list[Union[*Ts]]: + r = func1(*x) + reveal_type(r, expected_text="Tuple[Union[*Ts@func2], ...]") + v = [i for i in r] + reveal_type(v, expected_text="list[Union[*Ts@func2]]") + return v diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple21.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple21.py new file mode 100644 index 00000000..abd01769 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple21.py @@ -0,0 +1,19 @@ +# This sample tests the case where a tuple including an unpacked +# TypeVarTuple is used in an unpacked argument and assigned to another +# TypeVarTuple parameter. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import TypeVar, TypeVarTuple, Union, Unpack + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +def f(*args: Unpack[Ts]) -> Union[Unpack[Ts]]: ... + + +def g(x: tuple[T, Unpack[Ts]]) -> Union[T, Unpack[Ts]]: + f(*x) + return x[0] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple22.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple22.py new file mode 100644 index 00000000..03ebc5bd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple22.py @@ -0,0 +1,49 @@ +# This sample tests the case where a TypeVarTuple is solved using +# a tuple with literal values. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import Callable, Literal, TypeVarTuple, Union, Unpack + +Ts = TypeVarTuple("Ts") + + +def func1( + f: Callable[[Unpack[Ts]], None], vs: tuple[Unpack[Ts]] +) -> Union[Unpack[Ts]]: ... + + +def func2(f: Callable[[Literal[1, 2]], None], vs: tuple[Literal[1, 2]]): + v1 = func1(f, vs) + reveal_type(v1, expected_text="Literal[1, 2]") + + +def func3(f: Callable[[Literal[1, 2, 3]], None], vs: tuple[Literal[1, 2]]): + v1 = func1(f, vs) + reveal_type(v1, expected_text="Literal[1, 2]") + + +def func4(f: Callable[[int], None], vs: tuple[Literal[1, 2]]): + v1 = func1(f, vs) + reveal_type(v1, expected_text="int") + + +def func5(f: Callable[[Literal[1, 2]], None], vs: tuple[Literal[1, 2, 3]]): + # This should result in an error. + func1(f, vs) + + +def func6(f: Callable[[Literal[1, 2]], None], vs: tuple[int]): + # This should result in an error. + func1(f, vs) + + +def func7(f: Callable[[int, int, int], None], vs: tuple[int, ...]): + # This should result in an error because of a size mismatch. + func1(f, vs) + + +def func8(f: Callable[[Unpack[tuple[int, ...]]], None], vs: tuple[int]): + v1 = func1(f, vs) + reveal_type(v1, expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple23.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple23.py new file mode 100644 index 00000000..a2a11790 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple23.py @@ -0,0 +1,84 @@ +# This sample tests a complex combination of TypeVarTuple, +# unpacking, bidirectional type inference, and recursive calls. + +from dataclasses import dataclass +from typing import Generic, TypeVar, TypeVarTuple, Callable + +X = TypeVar("X") +Y = TypeVar("Y") +Z = TypeVar("Z") +Xs = TypeVarTuple("Xs") +Ys = TypeVarTuple("Ys") + + +def nil() -> tuple[()]: + return () + + +def cons( + f: Callable[[X], Y], + g: Callable[[*Xs], tuple[*Ys]], +) -> Callable[[X, *Xs], tuple[Y, *Ys]]: + def wrapped(x: X, *xs: *Xs) -> tuple[Y, *Ys]: + y, ys = f(x), g(*xs) + return y, *ys + + return wrapped + + +def star(f: Callable[[X], Y]) -> Callable[[*tuple[X, ...]], tuple[Y, ...]]: + def wrapped(*xs: X): + if not xs: + return nil() + return cons(f, star(f))(*xs) + + return wrapped + + +@dataclass(frozen=True) +class Tree(Generic[X, Y]): + left: X + right: Y + + +def lift( + f: Callable[[*Xs], tuple[*Ys]], +) -> Callable[[Tree[Z, tuple[*Xs]]], Tree[Z, tuple[*Ys]]]: ... + + +def test( + f: Callable[[X], Y], +) -> Callable[[Tree[Z, tuple[X, ...]]], Tree[Z, tuple[Y, ...]]]: + return lift(star(f)) + + +def parallel( + f: Callable[[X], Y], + g: Callable[[*Xs], tuple[*Ys]], +) -> Callable[[X, *Xs], tuple[Y, *Ys]]: + def wrapped(a: X, *bs: *Xs): + return f(a), *g(*bs) + + return wrapped + + +def identity(x: X) -> X: + return x + + +def parallel_identity(*xs: *Xs) -> tuple[*Xs]: + return xs + + +Shape = TypeVarTuple("Shape") +DType = TypeVar("DType") + + +class NDArray(Generic[*Shape, DType]): ... + + +def insert(values: NDArray[*Shape, DType]) -> NDArray[int, *Shape, DType]: ... + + +def prepend(values: NDArray[*Shape, DType]) -> NDArray[int, *Shape, DType]: + return insert(values) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple24.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple24.py new file mode 100644 index 00000000..95cbaf31 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple24.py @@ -0,0 +1,27 @@ +# This sample tests the handling of `Union[*Ts]` in certain cases. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import Generic, TypeVarTuple, Union + +Ts = TypeVarTuple("Ts") + + +class ClassA(Generic[*Ts]): + def __init__(self) -> None: + self.x: list[Union[*Ts]] = [] + + reveal_type(self.x, expected_text="list[Union[*Ts@ClassA]]") + + def method(self) -> Union[*Ts]: ... + + +a1 = ClassA[int, bool, str]() + +reveal_type(a1.method(), expected_text="int | bool | str") +reveal_type(a1.x, expected_text="list[int | bool | str]") + + +def func1(t0: tuple[*Ts], t1: tuple[*Ts]): + return all(v0 == v1 for v0, v1 in zip(t0, t1)) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple25.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple25.py new file mode 100644 index 00000000..e5a75c37 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple25.py @@ -0,0 +1,15 @@ +# This sample tests the case where a TypeVarTuple is used in a +# nested callable type. + +from typing import Callable, TypeVarTuple + +Ts = TypeVarTuple("Ts") + + +def func1(g: Callable[[Callable[[*Ts], None]], None]) -> tuple[*Ts]: ... + + +def func2(cb: Callable[[bytes, int], None]) -> None: ... + + +reveal_type(func1(func2), expected_text="tuple[bytes, int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple26.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple26.py new file mode 100644 index 00000000..18bb712d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple26.py @@ -0,0 +1,57 @@ +# This sample tests the case where a `*args: *Ts` parameter captures +# a callable with an indeterminate number of parameters because +# some of them have default arguments. + +from typing import Callable, Literal, TypeVar, TypeVarTuple + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +def func1(x: int, y: str = "", z: int | None = None) -> None: ... + + +def func2(callback: Callable[[*Ts], None], *args: *Ts) -> tuple[*Ts]: ... + + +v1 = func2(func1, 1) +reveal_type(v1, expected_text="tuple[int]") + +v2 = func2(func1, 1, "") +reveal_type(v2, expected_text="tuple[int, str]") + +v3 = func2(func1, 1, "", 3) +reveal_type(v3, expected_text="tuple[int, str, int]") + +v4 = func2(func1, 1, "", None) +reveal_type(v4, expected_text="tuple[int, str, None]") + +# This should generate an error. +func2(func1) + +# This should generate an error. +func2(func1, "") + +# This should generate an error. +func2(func1, 3, "", None, None) + + +def func3(callback: Callable[[*Ts], None]) -> tuple[*Ts]: ... + + +v5 = func3(func1) +reveal_type(v5, expected_text="tuple[int, str, int | None]") + + +def func4(a: Literal["day", "hour"]) -> None: ... + + +def func5(x: bool): + func2(func4, "day" if x else "hour") + + +def func6(x: T, y: T, z: int | None = None) -> None: ... + + +def func7(x: T, y: T) -> None: + func2(func6, x, y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple27.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple27.py new file mode 100644 index 00000000..ef20fcf1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple27.py @@ -0,0 +1,47 @@ +# This sample tests the case where a Callable uses an unpacked TypeVarTuple +# followed by another positional parameter. + +from typing import TypeVarTuple, TypeVar, Generic, Callable + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +class A(Generic[*Ts, T]): ... + + +def deco1(x: Callable[[*tuple[*Ts, int]], None]) -> tuple[*Ts]: ... + + +def deco2(x: Callable[[*tuple[*Ts, str]], None]) -> tuple[*Ts]: ... + + +def deco3(x: Callable[[*tuple[str, int]], None]) -> None: ... + + +def deco4(x: Callable[[*Ts, T], None]) -> A[*Ts, T]: + return A() + + +def func1(a: str, b: int) -> None: ... + + +def func2(a: str, b: str, c: int) -> None: ... + + +v1 = deco1(func1) +reveal_type(v1, expected_text="tuple[str]") + +v2 = deco1(func2) +reveal_type(v2, expected_text="tuple[str, str]") + +# This should generate an error. +deco2(func1) + +deco3(func1) + +v3 = deco4(func1) +reveal_type(v3, expected_text="A[str, int]") + +v4 = deco4(func2) +reveal_type(v4, expected_text="A[str, str, int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple28.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple28.py new file mode 100644 index 00000000..195b9f7d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple28.py @@ -0,0 +1,41 @@ +# This sample tests the case where a generic type alias with a TypeVarTuple +# also contains other TypeVars, and it is specialized with an unpacked tuple. + +from typing import TypeVar, TypeVarTuple + +Ts = TypeVarTuple("Ts") +T1 = TypeVar("T1") +T2 = TypeVar("T2") + +TA1 = tuple[T1, *Ts, T2] +TA1_Spec1 = TA1[*tuple[int, ...]] +TA1_Spec2 = TA1[float, *tuple[int, ...]] +TA1_Spec3 = TA1[*tuple[int, ...], str] +TA1_Spec4 = TA1[float, *tuple[int, ...], str] + +TA2 = tuple[*Ts, T1, T2] +TA2_Spec1 = TA2[*tuple[int, ...]] + +TA3 = tuple[T1, T2, *Ts] +TA3_Spec1 = TA3[*tuple[int, ...]] + +TA4 = tuple[T1, T1, *Ts, T2, T2] +TA4_Spec1 = TA4[*tuple[int, ...]] + + +def func1( + ta1_1: TA1_Spec1, + ta1_2: TA1_Spec2, + ta1_3: TA1_Spec3, + ta1_4: TA1_Spec4, + ta2: TA2_Spec1, + ta3: TA3_Spec1, + ta4: TA4_Spec1, +): + reveal_type(ta1_1, expected_type="tuple[int, *tuple[int, ...], int]") + reveal_type(ta1_2, expected_type="tuple[float, *tuple[int, ...], int]") + reveal_type(ta1_3, expected_type="tuple[int, *tuple[int, ...], str]") + reveal_type(ta1_4, expected_type="tuple[float, *tuple[int, ...], str]") + reveal_type(ta2, expected_type="tuple[*tuple[int, ...], int, int]") + reveal_type(ta3, expected_type="tuple[int, int, *tuple[int, ...]]") + reveal_type(ta4, expected_type="tuple[int, int, *tuple[int, ...], int, int]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple29.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple29.py new file mode 100644 index 00000000..d551f768 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple29.py @@ -0,0 +1,30 @@ +# This sample tests the case where a function parameterized with +# a TypeVarTuple is called in a nested manner. + +from typing import Callable, TypeVar, TypeVarTuple + + +def f(a: str, b: int, c: bool) -> None: ... + + +def curry1[First, *Rest, Result]( + function: Callable[[First, *Rest], Result], +) -> Callable[[*Rest], Callable[[First], Result]]: ... + + +applied_twice1 = curry1(curry1(f)) +reveal_type(applied_twice1, expected_text="(bool) -> ((int) -> ((str) -> None))") + + +First = TypeVar("First") +Rest = TypeVarTuple("Rest") +Result = TypeVar("Result") + + +def curry2( + function: Callable[[First, *Rest], Result], +) -> Callable[[*Rest], Callable[[First], Result]]: ... + + +applied_twice2 = curry2(curry2(f)) +reveal_type(applied_twice2, expected_text="(bool) -> ((int) -> ((str) -> None))") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple3.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple3.py new file mode 100644 index 00000000..862f273f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple3.py @@ -0,0 +1,129 @@ +# This sample tests the TypeVar matching logic related to +# variadic type variables. + +from typing import Any, Generic, Literal, TypeAlias, TypeVar, overload +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + + +_T = TypeVar("_T") +_Xs = TypeVarTuple("_Xs") + + +class Array(Generic[Unpack[_Xs]]): + def __init__(self, *args: Unpack[_Xs]) -> None: + self.x: tuple[Unpack[_Xs]] = args + reveal_type(args, expected_text="tuple[*_Xs@Array]") + + # This should generate an error because _Xs is not unpacked. + def foo(self, *args: _Xs) -> None: ... + + +def linearize(value: Array[Unpack[_Xs]]) -> tuple[Unpack[_Xs]]: ... + + +def array_to_tuple(value: Array[Unpack[_Xs]]) -> tuple[complex, Unpack[_Xs]]: ... + + +def func1(x: Array[int, str, str, float], y: Array[()]): + reveal_type(x, expected_text="Array[int, str, str, float]") + + reveal_type(y, expected_text="Array[*tuple[()]]") + + a1 = Array(3, 3.5, "b") + reveal_type(a1, expected_text="Array[int, float, str]") + + a2 = linearize(a1) + reveal_type(a2, expected_text="tuple[int, float, str]") + + b1 = Array() + reveal_type(b1, expected_text="Array[*tuple[()]]") + + b2 = linearize(b1) + reveal_type(b2, expected_text="tuple[()]") + + e = array_to_tuple(x) + reveal_type(e, expected_text="tuple[complex, int, str, str, float]") + + f = array_to_tuple(y) + reveal_type(f, expected_text="tuple[complex]") + + +class ArrayIntStr(Array[int, str, _T]): + def __init__(self, val: _T) -> None: + pass + + +v1 = ArrayIntStr(3) + +v2: Array[int, str, int] = v1 + +# This should generate an error. +v3: Array[int, str, str] = v1 + +# This should generate an error. +v4: Array[int, str, int, int] = v1 + +# This should generate an error. +v5: Array[int, str] = v1 + + +def func2(p1: tuple[str, int], p2: list[str]): + v6 = Array(*p1) + reveal_type(v6, expected_text="Array[str, int]") + + v7 = Array(1, *p1, "") + reveal_type(v7, expected_text="Array[int, str, int, str]") + + v8 = Array(*p2) + reveal_type(v8, expected_text="Array[*tuple[str, ...]]") + + +def func3(x: Array[Unpack[_Xs]]) -> Array[Unpack[_Xs]]: + y: Array[Unpack[tuple[Any, ...]]] = x + return x + + +@overload +def func4(signal: Array[*_Xs], *args: *_Xs) -> None: ... + + +@overload +def func4(signal: str, *args: Any) -> None: ... + + +def func4(signal: Array[*_Xs] | str, *args: *_Xs) -> None: ... + + +def func5(a1: Array[Literal["a", "b"]], a2: Array[Literal["a"], Literal["b"]]): + func4(a1, "a") + func4(a2, "a", "b") + + +def func6(a: Array): + reveal_type(a, expected_text="Array[*tuple[Unknown, ...]]") + + +def func7(): + x1: Array[*tuple[int, str], *tuple[str]] + x2: Array[*tuple[int, ...], *tuple[str]] + x3: Array[*tuple[str], *tuple[int, ...], *tuple[str]] + + # This should generate an error because only one unpacked unbounded + # tuple can be used. + x4: Array[*tuple[str, ...], *tuple[int, ...], *tuple[str]] + + +ArrayAlias: TypeAlias = Array[Unpack[_Xs]] + + +def func8(): + x1: ArrayAlias[*tuple[int, str], *tuple[str]] + x2: ArrayAlias[*tuple[int, ...], *tuple[str]] + x3: ArrayAlias[*tuple[str], *tuple[int, ...], *tuple[str]] + + # This should generate an error because only one unpacked unbounded + # tuple can be used. + x4: ArrayAlias[*tuple[str, ...], *tuple[int, ...], *tuple[str]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple30.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple30.py new file mode 100644 index 00000000..f1924035 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple30.py @@ -0,0 +1,12 @@ +# This sample tests the case where a TypeVarTuple is used in a class +# and a `Self` type is involved. + + +class Parent[*Ts]: + def __init__(self, *args: *Ts): ... + + def method(self): + Child(self) + + +class Child(Parent[*tuple[Parent, ...]]): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple4.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple4.py new file mode 100644 index 00000000..46473450 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple4.py @@ -0,0 +1,75 @@ +# This sample tests packing and unpacking operations with +# variadic type variables. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import Generic, NewType, Union +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + + +Shape = TypeVarTuple("Shape") + + +class Array(Generic[Unpack[Shape]]): + def __init__(self, *shape: Unpack[Shape]): + self.shape = shape + + def __abs__(self) -> "Array[Unpack[Shape]]": ... + + def __add__(self, other: "Array[Unpack[Shape]]") -> "Array[Unpack[Shape]]": ... + + +Height = NewType("Height", int) +Width = NewType("Width", int) +x: Array[Height, Width] = Array(Height(480), Width(640)) +reveal_type(x.shape, expected_text="tuple[Height, Width]") +reveal_type(abs(x), expected_text="Array[Height, Width]") +reveal_type(x + abs(x), expected_text="Array[Height, Width]") + + +_Xs = TypeVarTuple("_Xs") + + +def func1(a: tuple[Unpack[_Xs]], b: tuple[Unpack[_Xs]]) -> Union[Unpack[_Xs]]: ... + + +def func2( + a: tuple[int, Unpack[_Xs]], b: tuple[int, Unpack[_Xs]] +) -> Union[Unpack[_Xs]]: ... + + +def func3(p1: tuple[int], p2: tuple[int, str], p3: tuple[int, int]): + # This should generate an error. + v1 = func1(p1, p2) + + # This should generate an error. + v2 = func2(p1, p2) + + v3 = func2(p2, p2) + reveal_type(v3, expected_text="str") + + v4 = func2((3, "hi"), p2) + reveal_type(v4, expected_text="str") + + # This should generate an error. + func2((3, 3), p2) + + v5 = func2((3, 3), p3) + reveal_type(v5, expected_text="int") + + +def func4(a: int, *args: Unpack[_Xs], **kwargs: str) -> tuple[int, Unpack[_Xs]]: ... + + +c1 = func4(4, 5.4, 6j, b="3", c="5") +reveal_type(c1, expected_text="tuple[int, float, complex]") + +c2 = func4(4, b="3", c="5") +reveal_type(c2, expected_text="tuple[int]") + +# This should generate an error. +c3 = func4(b="3", c="5") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple5.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple5.py new file mode 100644 index 00000000..5a831fad --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple5.py @@ -0,0 +1,127 @@ +# This sample tests the handling of variadic type variables used +# within Callable types. + +from typing import Any, Callable, Protocol +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +_Xs = TypeVarTuple("_Xs") + + +def func1(func: Callable[[int, Unpack[_Xs]], Any]) -> Callable[[Unpack[_Xs]], int]: ... + + +def func2(func: Callable[[Unpack[_Xs]], int]) -> Callable[[Unpack[_Xs]], int]: ... + + +def callback1(a: int) -> int: ... + + +def callback2(a: str) -> int: ... + + +def callback3(a: str) -> None: ... + + +def callback4(a: int, b: complex, c: str) -> int: ... + + +def callback5(a: int, *args: Unpack[_Xs]) -> tuple[Unpack[_Xs]]: ... + + +def callback6(a: int, *args: Any) -> int: ... + + +def callback7(a: int, b: str, c: str, d: str, *args: Any) -> int: ... + + +c1 = func1(callback1) +reveal_type(c1, expected_text="() -> int") +c1_1 = c1() +reveal_type(c1_1, expected_text="int") + +# This should generate an error. +c2 = func1(callback2) + +# This should generate an error. +c3 = func2(callback3) + +c4 = func1(callback4) +reveal_type(c4, expected_text="(complex, str) -> int") +c4_1 = c4(3j, "hi") +reveal_type(c4_1, expected_text="int") + +# This should generate an error. +c4_2 = c4(3j) + +# This should generate an error. +c4_3 = c4(3j, "hi", 4) + +c5 = func1(callback5) +reveal_type(c5, expected_text="(*_Xs@callback5) -> int") + +c6_1 = func1(callback6) +reveal_type(c6_1, expected_text="(*Any) -> int") + +c6_2 = func2(callback6) +reveal_type(c6_2, expected_text="(int, *Any) -> int") + +c7_1 = func1(callback7) +reveal_type(c7_1, expected_text="(str, str, str, *Any) -> int") + +c7_2 = func2(callback7) +reveal_type(c7_2, expected_text="(int, str, str, str, *Any) -> int") + + +class CallbackA(Protocol[Unpack[_Xs]]): + def __call__(self, a: int, *args: Unpack[_Xs]) -> Any: ... + + +def func3(func: CallbackA[Unpack[_Xs]]) -> Callable[[Unpack[_Xs]], int]: ... + + +d1 = func3(callback1) +reveal_type(d1, expected_text="() -> int") + +# This should generate an error. +d2 = func3(callback2) + +# This should generate an error. +d3 = func3(callback3) + +d4 = func3(callback4) +reveal_type(d4, expected_text="(complex, str) -> int") +d4_1 = d4(3j, "hi") +reveal_type(d4_1, expected_text="int") + +# This should generate an error. +d4_2 = d4(3j) + +# This should generate an error. +d4_3 = d4(3j, "hi", 4) + + +def func4(func: Callable[[Unpack[_Xs], int], int]) -> Callable[[Unpack[_Xs]], int]: ... + + +def callback8(a: int, b: str, c: complex, d: int) -> int: ... + + +d5_1 = func4(callback1) +reveal_type(d5_1, expected_text="() -> int") + +# This should generate an error. +d5_2 = func4(callback4) + +d5_3 = func4(callback8) +reveal_type(d5_3, expected_text="(int, str, complex) -> int") + + +def func5(x: Callable[[Unpack[_Xs]], None], y: tuple[Unpack[_Xs]]): + pass + + +def func6(x: Callable[[Unpack[_Xs]], None], y: tuple[Unpack[_Xs]]): + func5(x, y) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple6.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple6.py new file mode 100644 index 00000000..3473e490 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple6.py @@ -0,0 +1,109 @@ +# This sample tests the handling of generic type aliases with +# variadic type variables. + +# pyright: reportMissingTypeArgument=true, reportMissingModuleSource=false + +from typing import Callable, Generic, TypeVar + +from typing_extensions import TypeVarTuple, Unpack + +_Xs = TypeVarTuple("_Xs") +_T = TypeVar("_T") + + +class Array(Generic[Unpack[_Xs]]): + def __init__(self, *args: Unpack[_Xs]): ... + + +Alias1 = Array[Unpack[_Xs]] + +# This should generate an error. +Alias2 = Array[_Xs] + +# This should generate an error. +Alias3 = Array[_T, int, _Xs] + +# This should generate an error if reportMissingTypeArgument is enabled. +x1: Alias1 | None = None + +x2: Alias1[int] = Array(3) + +# This should generate an error. +x3: Alias1[int, str] = Array(3) + +x4: Alias1[int, dict[str, str]] = Array(3, {}) + +# This should generate an error. +x5: Alias1[()] = Array(3) + +x6 = Alias1[int, int, str](3, 4, "") + +x7: Alias1[int, float, str] = Array(3, 4, "") + +Alias4 = Array[_T, int, Unpack[_Xs]] + +Alias5 = Array[Unpack[_Xs]] + +y1: Alias4[float, str, str] = Array(3.4, 2, "hi", "hi") + +# This should generate an error. +y2: Alias4[float, str, str] = Array("3.4", 2, "hi", "hi") + +y3 = Alias4[float, str, str](3, 2, "hi", "hi") + + +def func1(a: Alias4[_T, Unpack[_Xs]]) -> tuple[_T, Unpack[_Xs]]: ... + + +z1 = func1(Array(3, 4, "hi", 3j)) +reveal_type(z1, expected_text="tuple[int, str, complex]") + +# This should generate an error. +z2 = func1(Array(3, 4.3, "hi", 3j)) + +z3 = func1(Array(3.5, 4)) +reveal_type(z3, expected_text="tuple[float]") + +Alias6 = tuple[int, Unpack[_Xs]] + + +# The type annotation for y will generate an error if +# reportMissingTypeArgument is enabled. +def func2(x: Alias6[float, bool], y: Alias6, z: Alias6[()]): + reveal_type(x, expected_text="tuple[int, float, bool]") + + reveal_type(y, expected_text="tuple[int, *tuple[Unknown, ...]]") + + reveal_type(z, expected_text="tuple[int]") + + +Alias7 = Callable[[Unpack[_Xs]], None] + + +def func3(cb: Alias7[int, Unpack[_Xs]]) -> tuple[Unpack[_Xs]]: ... + + +def func4(a: int, b: str) -> None: ... + + +reveal_type(func3(func4), expected_text="tuple[str]") + + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + +Alias8 = tuple[*_Xs, _T1, _T2] + +# This should generate an error because there are +# enough type arguments. +a8_1: Alias8[int] + +a8_2: Alias8[int, int] + + +class ClassA9(Generic[_T1]): + pass + + +# This should generate an error. +a9_1: ClassA9[*tuple[int]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple7.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple7.py new file mode 100644 index 00000000..c4e4d82c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple7.py @@ -0,0 +1,57 @@ +# This sample tests error handling for variadic type var usage. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import Any, Callable, Generic, TypeVar, Union +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + +_Xs = TypeVarTuple("_Xs") +_Ys = TypeVarTuple("_Ys") +_T1 = TypeVar("_T1") + + +# This should generate an error because only one TypeVarTuple is allowed. +class Class1(Generic[Unpack[_Ys], Unpack[_Xs]]): ... + + +# This should generate an error because only one TypeVarTuple is allowed. +class Class2(dict[tuple[Unpack[_Ys]], tuple[Unpack[_Xs]]]): ... + + +class Class3(dict[tuple[Unpack[_Ys]], _T1]): ... + + +class Class4(dict[_T1, tuple[Unpack[_Ys]]], Generic[Unpack[_Ys], _T1]): ... + + +class Class5(dict[tuple[Unpack[_Ys]], _T1], Generic[_T1, Unpack[_Ys]]): + def func1(self, a: tuple[Unpack[_Ys], int]): + pass + + # This should generate an error because tuple cannot contain multiple + # TypeVarTuples. + def func2(self, *args: Unpack[_Xs]) -> tuple[Unpack[_Ys], Unpack[_Xs]]: ... + + def func3(self) -> Union[Unpack[_Ys], int]: + return 3 + + def func4(self, *args: Unpack[_Xs]) -> Union[int, Unpack[_Ys], Unpack[_Xs]]: + return 3 + + def func5(self, a: Callable[[Unpack[_Ys], int], Any]): + pass + + # This should generate an error because *_Ys cannot appear + # by itself in a return type for a Callable. + def func6(self, a: Callable[[int], Unpack[_Ys]]): + pass + + +Alias1 = Union[tuple[int, Unpack[_Xs]], _T1] + +# This should generate an error because at most one TypeVarTuple is allowed. +Alias2 = Union[tuple[int, Unpack[_Xs]], tuple[Unpack[_Ys]]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple8.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple8.py new file mode 100644 index 00000000..3c8e136b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple8.py @@ -0,0 +1,116 @@ +# This sample tests variadic TypeVar matching for unions. + +# Enable experimental features to support Union[*Ts]. +# pyright: enableExperimentalFeatures=true + +from typing import TypeVar, Union +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + + +_T = TypeVar("_T") +_Xs = TypeVarTuple("_Xs") +_Ys = TypeVarTuple("_Ys") + + +def func1(x: Union[Unpack[_Xs]]) -> Union[Unpack[_Xs]]: ... + + +def func2(x: Union[Unpack[_Xs], Unpack[_Ys]]) -> Union[Unpack[_Xs], Unpack[_Ys]]: ... + + +def func3(x: Union[int, Unpack[_Xs]]) -> Union[Unpack[_Xs]]: ... + + +def func4(x: Union[_T, Unpack[_Xs]]) -> Union[_T, Unpack[_Xs]]: ... + + +def func5(x: Union[Unpack[_Xs]], *args: Unpack[_Xs]) -> Union[Unpack[_Xs]]: ... + + +def func6(*args: Unpack[_Xs]) -> Union[Unpack[_Xs]]: ... + + +def func7(a: list[Union[Unpack[_Xs]]]) -> Union[Unpack[_Xs]]: ... + + +def test1(a: int, b: str, c: list[int], d: Union[complex, str]): + v1_1 = func1(a) + reveal_type(v1_1, expected_text="int") + + v1_2 = func1(d) + reveal_type(v1_2, expected_text="complex | str") + + # --------- + + # This behavior isn't defined by PEP 646, but neither + # did PEP 484 define the behavior for multiple (non- + # variadic) TypeVar matching within a Union. So behavior + # is likely to vary between type checkers here. + v2_1 = func2(a) + reveal_type(v2_1, expected_text="int") + + v2_2 = func2(d) + reveal_type(v2_2, expected_text="complex | str") + + # --------- + + v3_1 = func3(a) + reveal_type(v3_1, expected_text="Unknown") + + # This should generate an error + v3_2 = func3(d) + + v3_3 = func3(b) + reveal_type(v3_3, expected_text="str") + + # --------- + + # This behavior isn't defined by PEP 646 or PEP 484. + v4_1 = func4(a) + reveal_type(v4_1, expected_text="int") + + v4_2 = func4(d) + reveal_type(v4_2, expected_text="complex | str") + + # --------- + + v5_1 = func5(a) + reveal_type(v5_1, expected_text="int") + + v5_2 = func5(a, a) + reveal_type(v5_2, expected_text="int") + + # This should generate an error + v5_3 = func5(a, b) + + # This should generate an error + v5_4 = func5(a, b, c) + + # --------- + + v6_1 = func6(a) + reveal_type(v6_1, expected_text="int") + + v6_2 = func6(a, b) + reveal_type(v6_2, expected_text="int | str") + + v6_3 = func6(a, b, d) + reveal_type(v6_3, expected_text="int | str | complex") + + v6_4 = func6() + reveal_type(v6_4, expected_text="Never") + + # --------- + + v7_1 = func7([a]) + reveal_type(v7_1, expected_text="int") + + x: list[Union[int, str]] = [a, b] + v7_2 = func7(x) + reveal_type(v7_2, expected_text="int | str") + + v7_3 = func7([a, b, d]) + reveal_type(v7_3, expected_text="int | str | complex") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple9.py b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple9.py new file mode 100644 index 00000000..6d7d9c05 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typeVarTuple9.py @@ -0,0 +1,51 @@ +# This sample tests the handling of variadic type variables used +# in generic type aliases and with suffixes. + +from typing import Callable, Generic, TypeVar +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + TypeVarTuple, + Unpack, +) + + +Ts = TypeVarTuple("Ts") +T = TypeVar("T", covariant=True) + + +class Call(Generic[Unpack[Ts]]): + def __init__(self, *args: Unpack[Ts]) -> None: + self.args = args + + +class Return(Generic[T]): + def __init__(self, /, result: T) -> None: + self.result = result + + +TailRec = Call[Unpack[Ts]] | Return[T] + + +def tail_rec( + fn: Callable[[Unpack[Ts]], TailRec[Unpack[Ts], T]], +) -> Callable[[Unpack[Ts]], T]: ... + + +@tail_rec +def factorial(n: int, acc: int) -> TailRec[int, int, int]: + if n <= 0: + return Return(acc) + return Call(n - 1, acc * n) + + +reveal_type(factorial, expected_text="(int, int) -> int") + + +Alias10 = tuple[T, *Ts] +Alias11 = tuple[*Ts] +Alias12 = tuple[T, *Ts, T] + + +def func5(a10: Alias10, a11: Alias11, a12: Alias12): + reveal_type(a10, expected_text="tuple[Unknown, *tuple[Unknown, ...]]") + reveal_type(a11, expected_text="tuple[Unknown, ...]") + reveal_type(a12, expected_text="tuple[Unknown, *tuple[Unknown, ...], Unknown]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict1.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict1.py new file mode 100644 index 00000000..92240124 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict1.py @@ -0,0 +1,91 @@ +# This sample tests the type analyzer's handling of TypedDict classes. + +from typing import Any, TypeVar, TypedDict + +not_total = False + + +# This should generate an error because +# the value of the total argument must +# be a literal "True" or "False". +class TD1(TypedDict, total=not_total): + pass + + +class TD2(TypedDict, total=False): + """This is a test""" + + a: int + + # This should generate an error because "b" + # is redeclared below with a different type. + b: str + + b: float + + c: "dict[Any, Any]" + + # This should generate an error because + # assignments are not allowed. + d: float = 3.0 + + # This should generate an error because + # methods are not allowed. + def foo(self): + pass + + +class TD3(TypedDict, total=True): + a: int + b: float + c: str + + +class TD4(TypedDict): + d: str + + +class TD5(TD3, total=False): + e: str + + # This should generate an error because + # methods are not allowed. + def foo(self): + pass + + +class NotATD: + pass + + +# This should generate an error because non-TypeDict +# base classes shouldn't be allowed for TD classes. +class TD6(TD3, NotATD): + pass + + +# This should generate an error because non-TypeDict +# base classes shouldn't be allowed for TD classes. +class TD7(NotATD, TypedDict): + pass + + +# This should generate an error because TypedDict can't +# be used in a type annotation. +v1: TypedDict | int + +# This should generate an error because TypedDict can't +# be used in a TypeVar bound. +T = TypeVar("T", bound=TypedDict | int) + + +# This should generate an error because TypedDict doesn't support +# a metaclass parameter. +class TD8(TypedDict, metaclass=type): + name: str + + +# This should generate an error because TypedDict doesn't support +# other __init_subclass__ parameters. +class TD9(TypedDict, other=True): + name: str diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict10.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict10.py new file mode 100644 index 00000000..9501513a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict10.py @@ -0,0 +1,34 @@ +# This sample tests the type analyzer's handling of a variant +# of the TypedDict "alternate syntax" defined in the Python docs. + +from typing import TypedDict + +Movie = TypedDict("Movie", name=str, year=int) + + +def get_movie_name(movie: Movie): + return movie["name"] + + +name2 = get_movie_name({"name": "ET", "year": 1982}) + +movie1: Movie = {"name": "Blade Runner", "year": 1982} + +movie2: Movie = { + "name": "Blade Runner", + # This should generate an error because + # the type is incorrect. + "year": "1982", +} + +movie3: Movie = { + # This should generate an error because + # all keys are required. + "name": "Blade Runner" +} + +movie4: Movie = { + # This should generate an error because + # the key name is not supported. + "name2": "Blade Runner" +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict11.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict11.py new file mode 100644 index 00000000..c339bf38 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict11.py @@ -0,0 +1,20 @@ +# This sample tests bidirectional type inference (expected type) for +# lists that include TypedDicts. + +from typing import TypeVar, TypedDict + + +MessageTypeDef = TypedDict("MessageTypeDef", {"Id": str, "Handle": str}) + +msgs = [{"Id": "1", "Handle": "2"}] +list2: list[MessageTypeDef] = [ + {"Id": msg["Id"], "Handle": msg["Handle"]} for msg in msgs +] + +TMessage = TypeVar("TMessage", bound=MessageTypeDef) + + +def func1(x: list[TMessage]) -> TMessage: ... + + +func1([{"Id": "", "Handle": ""}]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict12.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict12.py new file mode 100644 index 00000000..f0518dd6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict12.py @@ -0,0 +1,123 @@ +# This sample tests the synthesized methods get, setdefault +# pop, __delitem__, clear, and popitem for a TypedDict. + +from typing import TypedDict, final +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + NotRequired, + Required, +) + + +class TD1(TypedDict): + bar: NotRequired[str] + + +class TD2(TD1): + foo: Required[str] + + +td1: TD1 = {} +td2: TD2 = {"foo": "hi"} + +v1: str | None = td1.get("bar") + +v2: str = td1.get("bar", "") + +v3: str | int = td1.get("bar", 3) + +v4: str = td1.setdefault("bar", "1") + +# This should generate an error. +td1.setdefault("bar", 3) + +# This should generate an error. +td1.setdefault("bar") + +# This should generate an error. +td1.setdefault("baz", "") + +v6: str = td1.pop("bar") +v7: str | int = td1.pop("bar", 1) +v8: str | int = td1.pop("bar", 3) + +v9 = td2.pop("foo") +reveal_type(v9, expected_text="object") + +v10 = td2.pop("foo", None) +reveal_type(v10, expected_text="object | None") + +td1.__delitem__("bar") + + +@final +class TD3(TypedDict): + foo: int + baz: NotRequired[int] + + +class TD4(TypedDict): + bar: str + + +C = TD3 | TD4 + + +def func1(a: TD3, b: TD4, c: C, s: str) -> int | None: + a1 = a.get("foo") + reveal_type(a1, expected_text="int") + a2 = a.get("foo", 1.0) + reveal_type(a2, expected_text="int") + a3 = a.get("bar") + reveal_type(a3, expected_text="Any | None") + a4 = a.get("bar", 1.0) + reveal_type(a4, expected_text="Any | float") + a5 = a.get("baz") + reveal_type(a5, expected_text="int | None") + a6 = a.get("baz", 1.0) + reveal_type(a6, expected_text="int | float") + a7 = a.get(s) + reveal_type(a7, expected_text="Any | None") + a8 = a.get(s, 1.0) + reveal_type(a8, expected_text="Any | float") + + b1 = b.get("bar") + reveal_type(b1, expected_text="str") + b2 = b.get("bar", 1.0) + reveal_type(b2, expected_text="str") + b3 = b.get("foo") + reveal_type(b3, expected_text="Any | None") + b4 = b.get("foo", 1.0) + reveal_type(b4, expected_text="Any | float") + b5 = b.get(s) + reveal_type(b5, expected_text="Any | None") + b6 = b.get(s, 1.0) + reveal_type(b6, expected_text="Any | float") + + c1 = c.get("foo") + reveal_type(c1, expected_text="int | Any | None") + c2 = c.get("foo", 1.0) + reveal_type(c2, expected_text="int | Any | float") + c3 = c.get("bar") + reveal_type(c3, expected_text="Any | str | None") + c4 = c.get("bar", 1.0) + reveal_type(c4, expected_text="Any | float | str") + c5 = c.get("baz") + reveal_type(c5, expected_text="int | Any | None") + c6 = c.get("baz", 1.0) + reveal_type(c6, expected_text="int | float | Any") + + +class TD7(TypedDict, total=False): + a: dict[str, str] + b: list[str] + + +def func2(td7: TD7): + v1 = td7.get("a", []) + reveal_type(v1, expected_text="dict[str, str] | list[Any]") + + v2 = td7.get("a", {}) + reveal_type(v2, expected_text="dict[str, str]") + + v3 = td7.get("b", []) + reveal_type(v3, expected_text="list[str]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict13.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict13.py new file mode 100644 index 00000000..1bbf24f4 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict13.py @@ -0,0 +1,51 @@ +# This sample tests that TypedDicts that inherit from other +# TypedDicts do not override field names with incompatible types. + +# pyright: reportIncompatibleVariableOverride=true + +from typing import Any, NotRequired, Required, TypedDict + + +ParentA = TypedDict("ParentA", {"name": str, "age": int}) + + +class ChildA(ParentA): + # This should generate an error because the type of "age" is redefined. + age: float + + name: str + + +class ParentB(TypedDict): + x: Any + + +class ChildB(ParentB): + x: int + + +class ParentC(TypedDict): + x: Required[int] + + +class ChildC(ParentC): + # This should generate an error because "x" is Required in the parent. + x: NotRequired[int] + + +class ParentD(TypedDict): + x: Required[int] + + +class ChildD(ParentD): + # This should generate an error because "x" is NotRequired in the parent. + x: NotRequired[int] + + +class ParentE(TypedDict, total=True): + x: int + + +class ChildE(ParentE, total=False): + # This should generate an error because "x" is Required in the parent. + x: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict14.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict14.py new file mode 100644 index 00000000..24854484 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict14.py @@ -0,0 +1,25 @@ +# This sample tests bidirectional type inference when assigning +# a value to a TypedDict element. + +from typing import TypedDict + + +class Thing(TypedDict): + v1: bool + v2: str + + +class Thing2(TypedDict): + v3: Thing | None + v4: list[str | int] | None + + +thing2: Thing2 = {"v3": None, "v4": None} +thing2["v3"] = {"v1": False, "v2": "a"} +thing2["v4"] = [] +thing2["v4"] = [3] +thing2["v4"] = ["hi"] +thing2["v4"] = ["hi", 4] + +# This should generate an error +thing2["v4"] = ["hi", 4.0] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict15.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict15.py new file mode 100644 index 00000000..86f5cf4e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict15.py @@ -0,0 +1,51 @@ +# This sample tests the type compatibility checks when the source +# is a TypedDict and the dest is a protocol. + +from typing import Protocol, TypeVar, TypedDict + + +class HasName(Protocol): + name: str + + +class SupportsClear(Protocol): + def clear(self) -> None: ... + + +_T = TypeVar("_T") + + +class SupportsUpdate(Protocol): + def update(self: _T, __m: _T) -> None: ... + + +class B(TypedDict): + name: str + + +def print_name(x: HasName): + print(x.name) + + +my_typed_dict: B = {"name": "my name"} + +# This should generate an error. The "name" +# attribute of a TypedDict can't be accessed +# through a member access expression. +print_name(my_typed_dict) + + +def do_clear(x: SupportsClear): + x.clear() + + +# This should generate an error. Although a "dict" +# class supports clear, a TypedDict does not. +do_clear(my_typed_dict) + + +def do_update(x: SupportsUpdate): + x.update(x) + + +do_update(my_typed_dict) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict16.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict16.py new file mode 100644 index 00000000..4de25b79 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict16.py @@ -0,0 +1,80 @@ +# This sample tests that type compatibility between TypedDicts. + +from typing import TypedDict, final + + +class TD0(TypedDict): + key: str + + +class TD1(TD0): + value: str + + +class TD2(TypedDict): + key: str + value: str + + +v1: TD2 = TD1(key="", value="") +v2: TD1 = TD2(key="", value="") + +v3 = [v2] +v4: list[TD2] = v3 +v5 = [v1] +v6: list[TD1] = v5 + + +class TD10(TypedDict, total=False): + key: str + + +class TD11(TD10): + value: str + + +class TD12(TypedDict): + key: str + value: str + + +# This should generate an error. +v10: TD12 = TD11(key="", value="") + +# This should generate an error. +v11: TD11 = TD12(key="", value="") + + +v12 = [v10] +# This should generate an error. +v13: list[TD10] = v12 + +v14 = [v11] +# This should generate an error. +v15: list[TD12] = v14 + + +class TD20(TypedDict): + key: str + value: str + + +class TD21(TypedDict): + key: str + value: str + extra: str + + +# This should generate an error. +v20: TD21 = TD20(key="", value="") + +v21: TD20 = TD21(key="", value="", extra="") + + +v22 = [v20] +# This should generate an error. +v23: list[TD20] = v22 + +v24: list[TD20] = [v21] +# This should generate an error. +v25: list[TD21] = v24 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict17.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict17.py new file mode 100644 index 00000000..202cbd2d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict17.py @@ -0,0 +1,24 @@ +# This sample tests the handling of the "|" and "|=" operators +# for TypedDicts. + +from typing import TypedDict + + +class Person(TypedDict, total=False): + name: str + age: int + + +person: Person = {} + +person.update({"name": "Michael"}) + +person |= {"name": "Michael"} +person = person | {"name": "Michael"} + + +# This should generate an error. +person |= {"name": "Michael", "other": 1} + +# This should generate an error. +person = person | {"name": 1} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict18.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict18.py new file mode 100644 index 00000000..38025d06 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict18.py @@ -0,0 +1,134 @@ +# This sample tests the handling of generic TypedDicts which are +# supported in Python 3.11 and newer. + +from typing import Generic, Literal, TypeVar, TypedDict, Unpack + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + + +class TD1(TypedDict, Generic[_T1, _T2]): + a: dict[_T1, _T2] + b: _T1 + + +v1_1: TD1[str, int] = {"a": {"x": 3}, "b": "y"} + +# This should generate an error. +v1_2: TD1[str, str] = {"a": {"x": 3}, "b": "y"} + + +class TD2(TD1[_T1, int]): + c: _T1 + + +v2_1: TD2[int] = {"a": {3: 3}, "b": 1, "c": 5} + + +class TD3(TypedDict): + a: int + + +class TD4(TD3, Generic[_T1]): + b: _T1 + + +v4: TD4[str] = {"a": 3, "b": ""} +v5: TD4[tuple[str]] = {"a": 3, "b": ("",)} + + +def func1(x: TD1[_T1, _T2]) -> dict[_T1, _T2]: + return x["a"] + + +v1_3 = func1({"a": {"x": 3}, "b": "y"}) +reveal_type(v1_3, expected_text="dict[str, int]") + + +class TD5(TypedDict, Generic[_T1]): + x: _T1 + y: _T1 + + +def func2(a: TD5[Literal[1]]): ... + + +func2({"x": 1, "y": 1}) + +# This should generate an error because 2 doesn't match Literal[1]. +func2({"x": 2, "y": 1}) + + +def func3(a: TD5[_T1]) -> _T1: ... + + +reveal_type(func3({"x": 1, "y": 1}), expected_text="int") +reveal_type(func3({"x": "1", "y": 1}), expected_text="str | int") + + +class TD6(TD5[Literal[1]]): + z: str + + +def func4(a: TD6) -> Literal[1]: ... + + +func4({"x": 1, "y": 1, "z": "a"}) +f2: TD6 = {"x": 1, "y": 1, "z": "a"} + +reveal_type(func4({"x": 1, "y": 1, "z": "a"})) + + +class TD7(TD5[_T1], Generic[_T1]): + z: str + + +def func5(a: TD7[Literal[1]]) -> Literal[1]: + return a["x"] + + +func5({"x": 1, "y": 1, "z": "a"}) +f3: TD7[Literal[1]] = {"x": 1, "y": 1, "z": "a"} + +reveal_type(func5({"x": 1, "y": 1, "z": "a"})) + + +class TD8(TD7[Literal[1]]): ... + + +def func6(a: TD8) -> Literal[1]: + return a["x"] + + +func6({"x": 1, "y": 1, "z": "a"}) +f4: TD8 = {"x": 1, "y": 1, "z": "a"} + +reveal_type(func6({"x": 1, "y": 1, "z": "a"})) + + +class TD9(TypedDict, Generic[_T1]): + x: _T1 + + +class ClassA(Generic[_T1]): + def __init__(self, **attrs: Unpack[TD9[_T1]]) -> None: ... + + +f5 = ClassA[int](x=1) + +# This should generate an error because 1 isn't a valid type. +f6 = ClassA[str](x=1) + +f7 = ClassA(x=1) +reveal_type(f7, expected_text="ClassA[int]") + + +class TD10(TypedDict, Generic[_T1]): + x: _T1 + + +class TD11(TypedDict): + y: int + + +class TD12(TD10[str], TD11): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict19.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict19.py new file mode 100644 index 00000000..e978be48 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict19.py @@ -0,0 +1,59 @@ +# This sample tests the handling of type narrowing of a TypedDict based +# on an assignment to a not-required key. + +from typing import TypedDict +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + NotRequired, + Required, + Unpack, +) + + +class TD1(TypedDict): + x: NotRequired[str] + + +class TD2(TypedDict): + x: Required[str] + + +def func1(td: TD1 | TD2): + # This should generate an error because "x" is not required in TD1. + v1 = td["x"] + + +def func2(td: TD1 | TD2): + td["x"] = "hi" + v1 = td["x"] + + +def func3(td: TD1 | TD2, opt: bool): + if opt: + td["x"] = "hi" + + # This should generate an error because "x" is not required in TD1. + v1 = td["x"] + + +def func4(td: TD1 | TD2, opt: bool): + if opt: + td["x"] = "hi" + else: + td["x"] = "hi" + + v1 = td["x"] + + +class TD3(TypedDict): + x: str + y: NotRequired[int] + + +def accepts_td3(**args: Unpack[TD3]): + pass + + +def func5(td: TD3, cond: bool): + if cond: + td["y"] = 5 + accepts_td3(**td) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict2.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict2.py new file mode 100644 index 00000000..1a150438 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict2.py @@ -0,0 +1,59 @@ +# This sample tests the type analyzer's handling of TypedDict classes. + +from typing import TypedDict + + +class Movie(TypedDict, total=False): + name: str + year: int + + +class BookBasedMovie(Movie, total=True): + based_on: str + + +def get_movie_name(movie: Movie): + return movie.get("name") + + +name2 = get_movie_name({"name": "ET", "year": 1982}) + +movie1: Movie = {"name": "Blade Runner", "year": 1982} + +movie2: Movie = { + "name": "Blade Runner", + # This should generate an error because + # the type is incorrect. + "year": "1982", +} + +movie3: Movie = {"name": "Blade Runner"} + +movie4: Movie = { + # This should generate an error because + # the key name is not supported. + "name2": "Blade Runner" +} + +movie5: Movie = Movie(movie3) +movie6: Movie = Movie(movie3, year=2030, name="New movie") + +book1: BookBasedMovie = {"name": "Moonraker", "year": 1979, "based_on": "Moonraker"} + +book2: BookBasedMovie = {"year": 1979, "based_on": "Moonraker"} + +book3: BookBasedMovie = {"based_on": "Moonraker"} + +book4: BookBasedMovie = { + # This should generate an error because 'author' isn't + # a defined field. + "author": "Ian Fleming", + "based_on": "Moonraker", +} + +book5: BookBasedMovie = { + "name": "Moonraker", + "year": 1979, + # This should generate an error because 'based_on' is + # a required field, and it's not provided. +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict20.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict20.py new file mode 100644 index 00000000..a7679e3a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict20.py @@ -0,0 +1,62 @@ +# This sample tests the case where a TypedDict is narrowed based +# on an "in" type guard, and the narrowed type is later combined +# with the original wider type. We want to verify that this +# doesn't result in a combinatorial explosion. + +from typing import TypedDict + + +class GroupsSettingsDict(TypedDict, total=False): + a: bool | None + b: bool | None + c: bool | None + d: bool | None + e: bool | None + f: bool | None + g: bool | None + h: bool | None + i: bool | None + j: bool | None + k: bool | None + l: bool | None + m: bool | None + n: bool | None + o: bool | None + p: bool | None + + +def foo() -> None: + settings: GroupsSettingsDict = {} + + if "a" in settings: + settings["a"] + if "b" in settings: + settings["b"] + if "c" in settings: + settings["c"] + if "d" in settings: + settings["d"] + if "e" in settings: + settings["e"] + if "f" in settings: + settings["f"] + if "g" in settings: + settings["g"] + if "h" in settings: + settings["h"] + if "i" in settings: + settings["i"] + if "j" in settings: + settings["j"] + if "k" in settings: + settings["k"] + if "l" in settings: + settings["l"] + if "m" in settings: + settings["m"] + if "n" in settings: + settings["n"] + if "o" in settings: + settings["o"] + if "p" in settings: + settings["p"] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict21.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict21.py new file mode 100644 index 00000000..90aa526b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict21.py @@ -0,0 +1,53 @@ +# This sample tests the handling of dictionary expansion for TypedDicts. + +from typing_extensions import ( # pyright: ignore[reportMissingModuleSource] + NotRequired, + Required, + TypedDict, +) + + +class TD1(TypedDict): + v1: Required[int] + + +class TD2(TypedDict): + v2: Required[str] + + +class TD3(TypedDict): + v1: NotRequired[int] + + +class TD4(TD1, TD2): ... + + +td1: TD1 = {"v1": 0} +td2: TD2 = {"v2": ""} +td3_1: TD3 = {} +td3_2: TD3 = {"v1": 0} + +td4_1: TD4 = {**td1, **td2} + +# This should generate an error because td3_1 +# does not include the required "v1" entry. +td4_2: TD4 = {**td3_1, **td2} + +td4_3: TD4 = {**td3_2, **td2} + + +class TD5(TypedDict): + f1: str + f2: str + + +class TD6(TypedDict): + f1: str + f2: int + + +def func1(t1: TD5) -> TD6: + return {**t1, "f2": 0} + + +td6: TD6 = {"f1": 1, "f2": "", "f1": "", "f2": 1} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict22.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict22.py new file mode 100644 index 00000000..5e4ac2d6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict22.py @@ -0,0 +1,11 @@ +# This sample tests that class variables for TypedDict are accessible. + +from typing import TypedDict + + +class TD1(TypedDict): ... + + +reveal_type(TD1.__required_keys__, expected_text="frozenset[str]") +reveal_type(TD1.__optional_keys__, expected_text="frozenset[str]") +reveal_type(TD1.__total__, expected_text="bool") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict23.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict23.py new file mode 100644 index 00000000..4458b9ef --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict23.py @@ -0,0 +1,45 @@ +# This sample tests the synthesized update method for TypedDict classes. + +from typing import NotRequired, Required, TypedDict + + +class TD1(TypedDict): + a: Required[int] + b: NotRequired[str] + + +class TD2(TD1): + c: Required[int] + + +td1: TD1 = {"a": 3} + +reveal_type( + td1.update, + expected_text="Overload[(__m: Iterable[tuple[Literal['a'], int] | tuple[Literal['b'], str]], /) -> None, (__m: Partial[TD1], /) -> None, (*, a: int = ..., b: str = ...) -> None]", +) + +td1.update({}) +td1.update({"b": ""}) + +td2: TD2 = {"a": 0, "c": 3} + +reveal_type( + td2.update, + expected_text="Overload[(__m: Iterable[tuple[Literal['a'], int] | tuple[Literal['b'], str] | tuple[Literal['c'], int]], /) -> None, (__m: Partial[TD2], /) -> None, (*, a: int = ..., b: str = ..., c: int = ...) -> None]", +) + +# This should generate an error because "c" within TD1 may be incompatible with "int". +# A second error is generated to indicate that no overloads are compatible. +td2.update(td1) + + +class TD3(TypedDict): + a: NotRequired[str] + + +td3: TD3 = {} +reveal_type( + td3.update, + expected_text="Overload[(__m: Iterable[tuple[Literal['a'], str]], /) -> None, (__m: Partial[TD3], /) -> None, (*, a: str = ...) -> None]", +) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict24.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict24.py new file mode 100644 index 00000000..1f8243b6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict24.py @@ -0,0 +1,18 @@ +# This sample tests the handling of a TypedDict used as a bound +# for a TypeVar. + + +from typing import Generic, TypeVar, TypedDict + + +class TD1(TypedDict): + a: str + + +T1 = TypeVar("T1", bound=TD1) + + +class A(Generic[T1]): + def method1(self) -> T1: + # This should generate an error. + return {"a": ""} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict25.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict25.py new file mode 100644 index 00000000..5ccb854e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict25.py @@ -0,0 +1,13 @@ +# This sample tests that member accesses to a TypedDict are properly +# handled even if one of the items in the TypedDict shadows the name +# of a TypedDict attribute. + +from typing import TypedDict + + +class TD1(TypedDict): + items: int + + +td1 = TD1(items=0) +td1.items() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict3.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict3.py new file mode 100644 index 00000000..68d79dc7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict3.py @@ -0,0 +1,40 @@ +# This sample tests the type analyzer's handling of TypedDict classes. + +from typing import TypeVar, TypedDict + + +class Movie(TypedDict, total=False): + name: str + year: int + + +class BookBasedMovie(Movie, total=True): + based_on: str + + +movie1 = Movie(year=1982, name="Blade Runner") + +# This should generate an error because +# the type is incorrect. +movie2 = Movie(name="Blade Runner", year="1982") + +movie3 = Movie(name="Blade Runner") + +# This should generate an error because +# the key name is not supported. +movie4 = Movie(name2="Blade Runner") + + +book1 = BookBasedMovie(year=1979, name="Moonraker", based_on="Moonraker") + +book2 = BookBasedMovie(based_on="Moonraker", year=1979) + +book3 = BookBasedMovie(based_on="Moonraker") + +# This should generate an error because 'author' isn't +# a defined field. +book4 = BookBasedMovie(based_on="Moonraker", author="Ian Fleming") + +# This should generate an error because 'based_on' is +# a required field, and it's not provided. +book5 = BookBasedMovie(year=1982, name="Blade Runner") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict4.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict4.py new file mode 100644 index 00000000..8b7373fe --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict4.py @@ -0,0 +1,68 @@ +# This sample tests the type analyzer's handling of TypedDict classes. + +from typing import Literal, TypedDict + + +class Movie(TypedDict, total=False): + name: str + year: int + + +class BookBasedMovie(Movie, total=True): + based_on: str + + +movie1 = Movie(name="Blade Runner", year=1982) + + +def get_value(movie: Movie, key: Literal["year", "name"]) -> int | str | None: + if "year" in movie and "name" in movie: + return movie[key] + + +def make_movie(name: str, year: int) -> Movie: + return {"name": name, "year": year} + + +name1 = movie1.get("name", "Blue Nile") +year1 = movie1.get("year", 1921) +movie2 = make_movie(name1, year1) + +# This should generate an error because all indices need +# to be string literals. +year2 = movie1[3] + +# This should generate an error because only one index +# is allowed. +year3 = movie1[3, 3] + +movie1["name"] = "Transformers" +movie1["year"] = 2007 + +# This should generate an error because the RHS is the wrong type. +movie1["name"] = [3] + +# This should generate an error because the RHS is the wrong type. +movie1["year"] = {} + +del movie1["year"] + +# This should generate an error because the key is not in the dictionary. +del movie1["year2"] + +# This should generate an error because entries in a TypedDict +# are not accessible through member access. +name2 = movie1.name + +book1 = BookBasedMovie(based_on="E.T.") +make_movie(name=book1["based_on"], year=1923) + +del book1["name"] + +# This should generate an error because you can't delete a required key. +del book1["based_on"] + +# Make sure "in" operator works with TypedDict. +movie3 = Movie() +if "d" in movie3: + pass diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict5.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict5.py new file mode 100644 index 00000000..5b9abe14 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict5.py @@ -0,0 +1,61 @@ +# This sample tests the type analyzer's handling of TypedDict classes. + +from typing import TypedDict + + +class Movie1(TypedDict, total=False): + name: str + year: int + + +class Movie2(TypedDict, total=False): + name: str + year: int + + +class Movie3(TypedDict, total=True): + name: str + year: int + + +class Movie4(TypedDict, total=True): + name: str + year: int + earnings: float + + +class Movie5(TypedDict, total=True): + name: str + year: float + + +movie1: Movie1 = Movie2(name="hello", year=1971) + +# This should generate an error because +# items are required in Movie3 but not Movie2. +movie2: Movie2 = Movie3(name="hello", year=1971) + +# This should generate an error because +# items are required in Movie3 but not Movie2. +movie3: Movie3 = Movie2(name="hello", year=1971) + +# This should generate an error. +movie4: Movie4 = Movie3(name="hello", year=1971) + +movie5: Movie3 = Movie4(name="hello", year=1971, earnings=23) + + +movie6 = Movie2(name="hello", year=1971) +movie6["name"] = "goodbye" + +movie7 = {"name": "hello", "year": 1971} +movie7["name"] = "goodbye" + +movie8: Movie2 = {"year": 1981, "name": "test"} +movie8["year"] = 1982 + +movie9 = Movie3(name="", year=1971) + +# This should generate an error because "year" is mutable, +# so its type must match exactly. +movie10: Movie5 = movie9 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict6.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict6.py new file mode 100644 index 00000000..8ab7f622 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict6.py @@ -0,0 +1,97 @@ +# This sample tests the type analyzer's handling of TypedDict +# "alternate syntax" defined in PEP 589. + +from typing import NotRequired, Required, TypedDict + +Movie = TypedDict("Movie", {"name": str, "year": int}) + +# This should generate an error because the arguments are missing. +Movie2 = TypedDict() + +# This should generate an error because the arguments are missing. +Movie3 = TypedDict("Movie3") + +# This should generate an error because the argument type is wrong. +Movie4 = TypedDict("Movie4", 3) + +# This should generate an error because the argument type is wrong. +Movie5 = TypedDict(3, {}) + +Movie6 = TypedDict("Movie6", {}, total=False) +Movie7 = TypedDict("Movie7", {}, total=True) + +# This should generate an error because the total param +# accepts only True or False. +Movie8 = TypedDict("Movie8", {}, total=3) + +# This should generate an error because the third arg is unknown. +Movie9 = TypedDict("Movie9", {}, random=3) + +# This should generate an error because the third arg is unknown. +Movie10 = TypedDict("Movie10", {}, 3) + +# This should generate an error because a fourth arg +# is not supported. +Movie11 = TypedDict("Movie11", {}, total=True, foo=3) + + +def get_movie_name(movie: Movie): + return movie["name"] + + +name2 = get_movie_name({"name": "ET", "year": 1982}) + +movie1: Movie = {"name": "Blade Runner", "year": 1982} + +movie2: Movie = { + "name": "Blade Runner", + # This should generate an error because + # the type is incorrect. + "year": "1982", +} + +movie3: Movie = { + # This should generate an error because + # all keys are required. + "name": "Blade Runner" +} + +movie4: Movie = { + # This should generate an error because + # the key name is not supported. + "name2": "Blade Runner" +} + +MovieNotTotal = TypedDict("MovieNotTotal", {"name": str, "year": int}, total=False) + +movie5: MovieNotTotal = {"name": "Blade Runner"} + + +def foo(unknown_str_value: str): + a = movie5[unknown_str_value] + + +Movie12 = TypedDict( + "Movie12", {"title": Required[str], "predecessor": NotRequired["Movie12"]} +) + +movie12: Movie12 = {"title": "Two Towers", "predecessor": {"title": "Fellowship"}} + + +# This should generate an error because the name doesn't match. +# the arguments are missing. +Movie13 = TypedDict("NotMovie13", {"name": str, "year": int}) + + +# This should generate an error because CustomType1 is a forward reference +# and is not quoted. +Movie14 = TypedDict("Movie14", {"title": CustomType1, "year": "CustomType2"}) + +# This should generate an error because CustomType2 is a forward reference +# and is not quoted. +Movie15 = TypedDict("Movie15", title="CustomType1", year=CustomType2) + + +class CustomType1: ... +class CustomType2: ... + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict7.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict7.py new file mode 100644 index 00000000..00910fc6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict7.py @@ -0,0 +1,22 @@ +# This sample tests synthesized get methods in TypedDict classes. + +from typing import TypedDict + +UserType1 = TypedDict("UserType1", {"name": str, "age": int}, total=False) +user1: UserType1 = {"name": "Bob", "age": 40} + +name1: str = user1.get("name", "n/a") +age1: int = user1.get("age", 42) + +UserType2 = TypedDict("UserType2", name=str, age=int) +user2: UserType2 = {"name": "Bob", "age": 40} + +name2: str | None = user2.get("name") + +name3: str = user2.get("name") + +age2: int = user2.get("age", 42) + +age3: int | str = user2.get("age", "42") + +age4: int = user2.get("age", "42") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict8.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict8.py new file mode 100644 index 00000000..abb76fce --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict8.py @@ -0,0 +1,19 @@ +# This sample tests the ability of the type checker to +# perform bidirectional type inference involving TypedDict +# classes and dict literal expressions. + +from typing import TypedDict + + +class Entry(TypedDict): + index: int + value: str + + +entries1: list[Entry] = [{"index": 2, "value": "a"}, {"index": 5, "value": "b"}] + +# This should generate an error +entries2: list[Entry] = [{"index": 2, "value": "a"}, {"index": "2", "value": "b"}] + +# This should generate an error +entries3: list[Entry] = [{"index": 2, "value": "a"}, 3] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDict9.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDict9.py new file mode 100644 index 00000000..0176f469 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDict9.py @@ -0,0 +1,47 @@ +# This sample tests the handling of nested TypedDict fields. + +from typing import Literal, TypedDict + + +class Inner1(TypedDict): + inner_key: str + + +class Inner2(TypedDict): + inner_key: Inner1 + + +class Outer1(TypedDict): + outer_key: Inner2 + + +o1: Outer1 = {"outer_key": {"inner_key": {"inner_key": "hi"}}} + +# This should generate an error because the inner-most value +# should be a string. +o2: Outer1 = {"outer_key": {"inner_key": {"inner_key": 1}}} + + +class Inner3(TypedDict): + x: int + + +class Inner4(TypedDict): + x: int + + +class Outer2(TypedDict): + y: str + z: Literal[""] | Inner3 + + +class Outer3(TypedDict): + y: str + z: Literal[""] | Inner4 + + +def func1(td: Outer3): ... + + +o3: Outer2 = {"y": "", "z": {"x": 0}} +o4: Outer3 = o3 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed1.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed1.py new file mode 100644 index 00000000..81ac9f2b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed1.py @@ -0,0 +1,60 @@ +# This sample tests basic usage of "closed" TypedDict classes as +# introduced in PEP 728. + +from typing import NotRequired, Required, TypedDict +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + + +class Movie(TypedDict, extra_items=bool): + name: str + + +m1: Movie = {"name": "Blade Runner", "novel_adaptation": True} + +# This should generate an error because int is not compatible with bool. +m2: Movie = {"name": "Blade Runner", "year": 1982} + + +MovieAlt = TypedDict("MovieAlt", {"name": str}, extra_items=bool) + +m_alt1: MovieAlt = {"name": "Blade Runner", "novel_adaptation": True} + +# This should generate an error because int is not compatible with bool. +m_alt2: MovieAlt = {"name": "Blade Runner", "year": 1982} + + +def func1(movie: Movie) -> None: + reveal_type(movie["name"], expected_text="str") + + if "novel_adaptation" in movie: + reveal_type(movie["novel_adaptation"], expected_text="bool") + + movie["other1"] = True + + # This should generate a type incompatibility error. + movie["other2"] = 1 + + +class MovieBase(TypedDict, extra_items=ReadOnly[str | None]): + name: str + + +# This should generate an error. +class BadTD1(TypedDict, extra_items=Required[str]): + pass + + +# This should generate an error. +class BadTD2(TypedDict, extra_items=NotRequired[str]): + pass + + +# This should generate an error. +class BadTD3(TypedDict, closed=True, extra_items=str): + pass + + +# This should generate an error because "closed" and +# "extra_items" cannot both be specified. +class BadTD4(TypedDict, closed=False, extra_items=bool): + name: str diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed10.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed10.py new file mode 100644 index 00000000..c72b7130 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed10.py @@ -0,0 +1,15 @@ +# This sample tests the case where a type variable is used to define +# the extra_items in a TypedDict. + +from typing_extensions import TypedDict # pyright: ignore[reportMissingModuleSource] + + +class TD1[T](TypedDict, extra_items=T): + a: T + + +d1: TD1 = {"a": 1} +d2: TD1[int] = {"a": 1} + +reveal_type(d1["other"], expected_text="Unknown") +reveal_type(d2["other"], expected_text="int") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed2.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed2.py new file mode 100644 index 00000000..3e13fc83 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed2.py @@ -0,0 +1,39 @@ +# This sample tests basic usage of "closed" TypedDict classes as +# introduced in PEP 728. + +from typing import TypedDict, Unpack + + +class Movie1(TypedDict, extra_items=int): + name: str + + +def func1(movie: Movie1) -> None: + del movie["year"] + + # This should generate an error. + del movie["name"] + + +class Movie2(TypedDict, extra_items=int): + name: str + + +def func2(**kwargs: Unpack[Movie2]) -> None: ... + + +func2(name="") + +func2(name="", foo=1) + +# This should generate an error. +func2(name=1) + +# This should generate an error. +func2(name="", foo="") + + +m1 = Movie1(name="ET", year=1984) + +# This should generate an error. +m2 = Movie1(name="ET", year="1984") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed3.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed3.py new file mode 100644 index 00000000..235eec71 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed3.py @@ -0,0 +1,152 @@ +# This sample tests inheritance rules for closed TypedDicts. + +from typing import Any, Never, NotRequired, Required, TypedDict +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + + +class Parent1(TypedDict, extra_items=int | None): + pass + + +class Child1_1(Parent1, extra_items=int | None): + pass + + +# This should generate an error because of a type mismatch. +class Child1_2(Parent1, extra_items=int): + pass + + +class ParentClosed1(TypedDict, closed=True): + a: int + + +class ChildClosed1_1(ParentClosed1, extra_items=Never): + pass + + +# This should generate an error. +class ChildClosed1_2(ParentClosed1): + b: str + + +# This should generate an error because extra_items is incompatible type. +class ChildClosed1_3(ParentClosed1, extra_items=int): + pass + + +class ParentClosed2(TypedDict, extra_items=Never): + a: int + + +# This should generate an error. +class ChildClosed2(ParentClosed2): + b: str + + +class ParentClosed3(TypedDict, extra_items=int | str): + a: int + + +class ChildClosed3_1(ParentClosed3): + b: NotRequired[int | str] + + +class ChildClosed3_2(ParentClosed3): + b: NotRequired[Any] + + +# This should generate an error. +class ChildClosed3_3(ParentClosed3): + b: NotRequired[int] + + +# This should generate an error. +class ChildClosed3_4(ParentClosed3): + b: int | str + + +class ParentClosed4(TypedDict, extra_items=ReadOnly[int | str]): + a: int + + +class ChildClosed4_1(ParentClosed4): + b: int + + +class ChildClosed4_2(ParentClosed4): + b: Any + + +class ChildClosed4_3(ParentClosed4): + b: Required[int] + + +class ChildClosed4_4(ParentClosed4): + b: NotRequired[int] + + +class ChildClosed4_5(ParentClosed4): + b: ReadOnly[int | str] + + +class ChildClosed4_6(ParentClosed4, extra_items=int | str): + pass + + +# This should generate an error. +class ChildClosed4_7(ParentClosed4): + b: list[str] + + +class MovieBase(TypedDict, extra_items=int | None): + name: str + + +# This should generate an error. +class AdaptedMovie(MovieBase): + adapted_from_novel: bool + + +# This should generate an error. +class MovieRequiredYear(MovieBase): + year: int | None + + +# This should generate an error. +class MovieNotRequiredYear(MovieBase): + year: NotRequired[int] + + +class MovieWithYear(MovieBase): + year: NotRequired[int | None] + + +class ParentNonOpen5(TypedDict, closed=True): + pass + + +# This should generate an error because a subclass of +# a closed TypedDict cannot be open. +class ChildNotClosed5(ParentNonOpen5, closed=False): + pass + + +class ParentNonOpen6(TypedDict, extra_items=str): + pass + + +# This should generate an error because a subclass of +# a closed TypedDict cannot be open. +class ChildNotClosed6(ParentNonOpen6, closed=False): + pass + + +class ParentNonOpen7(TypedDict, extra_items=str): + pass + + +# This should generate an error because added fields +# cannot be ReadOnly. +class ChildNotClosed7(ParentNonOpen7): + a: NotRequired[ReadOnly[str]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed4.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed4.py new file mode 100644 index 00000000..cb426e02 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed4.py @@ -0,0 +1,70 @@ +# This sample tests type consistency rules for closed TypedDicts. + +from typing import NotRequired, TypedDict +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + + +class Movie1(TypedDict, extra_items=int | None): + name: str + + +class MovieDetails1(TypedDict, extra_items=int | None): + name: str + year: NotRequired[int] + + +details1: MovieDetails1 = {"name": "Kill Bill Vol. 1", "year": 2003} + +# This should generate an error because of a type incompatibility. +movie1: Movie1 = details1 + + +class MovieDetails2(TypedDict, extra_items=int | None): + name: str + year: int | None + + +details2: MovieDetails2 = {"name": "Kill Bill Vol. 1", "year": 2003} + +# This should generate an error because "year" is not required. +movie2: Movie1 = details2 + + +class Movie3(TypedDict, extra_items=ReadOnly[str | int]): + name: str + + +class MovieDetails3(TypedDict, extra_items=int): + name: str + year: NotRequired[int] + + +details3: MovieDetails3 = {"name": "Kill Bill Vol. 2", "year": 2004} +movie3: Movie3 = details3 + + +class MovieExtraInt(TypedDict, extra_items=int): + name: str + + +class MovieExtraStr(TypedDict, extra_items=str): + name: str + + +def func1(p1: MovieExtraInt, p2: MovieExtraStr): + # This should generate an error because of a type inconsistency. + extra_int: MovieExtraInt = p2 + + # This should generate an error because of a type inconsistency. + extra_str: MovieExtraStr = p1 + + +class MovieNotClosed(TypedDict): + name: str + + +def func2(p1: MovieExtraInt, p2: MovieNotClosed): + # This should generate an error because of a type inconsistency. + extra_int: MovieExtraInt = p2 + + not_closed: MovieNotClosed = p1 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed5.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed5.py new file mode 100644 index 00000000..8b2c9ed5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed5.py @@ -0,0 +1,56 @@ +# This sample tests type compatibility between closed TypedDicts and +# Mapping types. + +from typing import Mapping, TypedDict + + +class MovieExtraStr(TypedDict, extra_items=str): + name: str + + +class MovieExtraInt(TypedDict, extra_items=int): + name: str + + +extra_str: MovieExtraStr = {"name": "Blade Runner", "summary": ""} +extra_int: MovieExtraInt = {"name": "No Country for Old Men", "year": 2007} + +str_mapping: Mapping[str, str] = extra_str + +# This should generate an error. +int_mapping: Mapping[str, int] = extra_int + +int_str_mapping: Mapping[str, int | str] = extra_int + + +def func1(movie: MovieExtraStr) -> None: + reveal_type(movie.items(), expected_text="dict_items[str, str]") + reveal_type(movie.keys(), expected_text="dict_keys[str, str]") + reveal_type(movie.values(), expected_text="dict_values[str, str]") + + +class MovieNotClosed(TypedDict): + name: str + + +def func2(movie: MovieNotClosed) -> None: + reveal_type(movie.items(), expected_text="dict_items[str, object]") + reveal_type(movie.keys(), expected_text="dict_keys[str, object]") + reveal_type(movie.values(), expected_text="dict_values[str, object]") + + +class MovieClosed(TypedDict, closed=True): + name: str + year: int + + +def func3(movie: MovieClosed) -> None: + reveal_type( + movie.items(), expected_text="dict_items[Literal['name', 'year'], str | int]" + ) + reveal_type( + movie.keys(), expected_text="dict_keys[Literal['name', 'year'], str | int]" + ) + reveal_type( + movie.values(), expected_text="dict_values[Literal['name', 'year'], str | int]" + ) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed6.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed6.py new file mode 100644 index 00000000..d6572243 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed6.py @@ -0,0 +1,62 @@ +# This sample tests type compatibility between closed TypedDicts and +# dict and MutableMapping types. + +from typing import MutableMapping, NotRequired, TypedDict +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + + +class IntDict1(TypedDict, extra_items=int): + pass + + +class IntDictWithNum(IntDict1): + num: NotRequired[int] + + +def func1(x: IntDict1) -> None: + v: dict[str, int] = x + v.clear() + + +def func2(x: dict[str, int]): + # This should generate an error. + not_required_num: IntDictWithNum = x + + +def func3(p1: IntDictWithNum, p2: dict[str, int]): + d1: dict[str, int] = p1 + m1: MutableMapping[str, int] = p1 + func1(p1) + + # This should generate an error. + d2: IntDictWithNum = p2 + + +class IntDict2(TypedDict, extra_items=int): + num: int + + +def func4(p1: IntDict2): + # This should generate an error. + d1: dict[str, int] = p1 + + # This should generate an error. + m1: MutableMapping[str, int] = p1 + + # This should generate an error. + func1(p1) + + +class IntDict3(TypedDict, extra_items=int): + num: NotRequired[ReadOnly[int]] + + +def func5(p1: IntDict3): + # This should generate an error. + d1: dict[str, int] = p1 + + # This should generate an error. + m1: MutableMapping[str, int] = p1 + + # This should generate an error. + func1(p1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed7.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed7.py new file mode 100644 index 00000000..939ad477 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed7.py @@ -0,0 +1,69 @@ +# This sample tests the synthesis of "clear" and "popitem" within a closed +# TypedDict under certain circumstances. + +from typing import NotRequired, Required, TypedDict +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + + +class TD1(TypedDict, total=False, extra_items=int): + a: int + + +td1: TD1 = {"a": 1} + +reveal_type(td1.clear, expected_text="() -> None") +reveal_type(td1.popitem, expected_text="() -> tuple[str, int]") +td1.clear() +td1.popitem() + + +class TD2(TypedDict, total=False, closed=True): + a: str + + +td2: TD2 = {"a": "1"} + +reveal_type(td2.clear, expected_text="() -> None") +reveal_type(td2.popitem, expected_text="() -> tuple[str, str]") +td2.clear() +td2.popitem() + + +class TD3(TypedDict, total=False, extra_items=ReadOnly[int]): + a: int + + +td3: TD3 = {"a": 1} + +# This should generate an error because extra_items is ReadOnly. +td3.clear() + +# This should generate an error because extra_items is ReadOnly. +td3.popitem() + + +class TD4(TypedDict, closed=True): + a: NotRequired[int] + b: Required[int] + + +td4: TD4 = {"b": 1} + +# This should generate an error because not all elements are NotRequired. +td4.clear() + +# This should generate an error because not all elements are NotRequired. +td4.popitem() + + +class TD5(TypedDict, closed=True): + a: NotRequired[ReadOnly[int]] + + +td5: TD5 = {"a": 1} + +# This should generate an error because some elements are ReadOnly. +td5.clear() + +# This should generate an error because some elements are ReadOnly. +td5.popitem() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed8.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed8.py new file mode 100644 index 00000000..829d27b8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed8.py @@ -0,0 +1,41 @@ +# This sample tests the case where bidirectional type inference is required +# for the extra_items in a closed TypedDict. + +from typing_extensions import TypedDict # pyright: ignore[reportMissingModuleSource] + + +class Typed(TypedDict, extra_items=str | int): + type: str + + +class Named(TypedDict, extra_items="str | int | Typed | Named"): + name: str + + +td2_1: Named = { + "name": "Fred", + "birth": { + "type": "date", + "year": 2000, + "month": 12, + "day": 31, + }, +} + +td2_2: Named = { + "name": "Fred", + "extra": { + "name": "test", + "value": "", + }, +} + +td2_3: Named = { + "name": "Fred", +} + +td2_4: Named = { + "name": "Fred", + "test1": 1, + "test2": {"name": "Barb", "value": {"type": "date", "day": 31}}, +} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed9.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed9.py new file mode 100644 index 00000000..798ee46a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictClosed9.py @@ -0,0 +1,37 @@ +# This sample tests the handling of calls with unpacked TypedDicts. + + +from typing_extensions import TypedDict # pyright: ignore[reportMissingModuleSource] + + +class ClosedTD1(TypedDict, closed=True): + arg1: str + + +class IntDict1(TypedDict, extra_items=int): + arg1: str + + +td1 = ClosedTD1(arg1="hello") +td2 = IntDict1(arg1="hello", arg2=3) + + +def func1(arg1: str): + pass + + +func1(**td1) + +# This should arguably generate an error because there +# could be extra items, but we'll match mypy's behavior here. +func1(**td2) + + +def func2(arg1: str, **kwargs: str): + pass + + +func2(**td1) + +# This should result in an error. +func2(**td2) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictInline1.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictInline1.py new file mode 100644 index 00000000..77524544 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictInline1.py @@ -0,0 +1,50 @@ +# This sample tests support for inlined TypedDict definitions. + +from typing import NotRequired, ReadOnly, Required, TypedDict + + +td1: TypedDict[{"a": int, "b": str}] = {"a": 0, "b": ""} + +td2: TypedDict[{"a": TypedDict[{"b": int}]}] = {"a": {"b": 0}} + +td3: TypedDict[{"a": "list[float]"}] = {"a": [3]} + +td4: TypedDict[ + {"a": NotRequired[int], "b": Required[int], "c": NotRequired[ReadOnly[int]]} +] = {"b": 3} + +# This should generate an error because dictionary comprehensions +# are not allowed. +err1: TypedDict[{"a": int for _ in range(1)}] + +# This should generate an error because unpacked dictionary +# entries are not allowed. +err2: TypedDict[{**{"a": int}}] + +# This should generate an error because an extra type argument is provided. +err3: TypedDict[{"a": int}, str] + +# This should generate an error because TypedDict cannot be used without +# a subscript in this context. +err4: TypedDict + +# This should generate an error because a dict expression is not a +# valid type expression by itself. +err5: TypedDict[{"a": {"b": int}}] = {"a": {"b": 0}} + + +def func1(val: TypedDict[{"a": int}]) -> TypedDict[{"a": int}]: + return {"a": val["a"] + 1} + + +func1({"a": 3}) + + +type TA1[T] = TypedDict[{"a": int, "b": T, "c": NotRequired[int]}] + + +class Outer1[T]: + attr1: TypedDict[{"a": list[T]}] + + def __init__(self, v: T) -> None: + self.attr1 = {"a": [v]} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictReadOnly1.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictReadOnly1.py new file mode 100644 index 00000000..75a49150 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictReadOnly1.py @@ -0,0 +1,46 @@ +# This sample tests error conditions for TypedDict classes with +# read-only entries as introduced in PEP 705. + +# pyright: reportIncompatibleVariableOverride=true + +from typing import NotRequired, Required, TypedDict +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + + +class TD1(TypedDict): + a: ReadOnly[int] + b: Required[ReadOnly[str]] + c: ReadOnly[NotRequired[str]] + + # This should generate an error because nested ReadOnly are not allowed. + d: ReadOnly[ReadOnly[str]] + + +TD2 = TypedDict("TD2", {"a": ReadOnly[str]}, total=True) +TD3 = TypedDict("TD3", {"a": ReadOnly[str]}, total=True) + + +class F1(TypedDict): + a: Required[int] + b: ReadOnly[NotRequired[int]] + c: ReadOnly[Required[int]] + + +class F3(F1): + # This should generate an error because it is redefined as read-only. + a: ReadOnly[int] + + +class F4(F1): + # This should generate an error because it is redefined as not required. + a: NotRequired[int] + + +class F5(F1): + b: ReadOnly[Required[int]] + + +class F6(F1): + # This should generate an error because a "not required" field can't + # override a "required" field. + c: ReadOnly[NotRequired[int]] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/typedDictReadOnly2.py b/python-parser/packages/pyright-internal/src/tests/samples/typedDictReadOnly2.py new file mode 100644 index 00000000..12e446b9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/typedDictReadOnly2.py @@ -0,0 +1,248 @@ +# This sample tests various uses of ReadOnly fields in TypedDict classes +# as specified in PEP 705. + +# pyright: reportIncompatibleVariableOverride=true + +from typing import ( + Generic, + Literal, + Mapping, + Never, + NotRequired, + Required, + TypeVar, + TypedDict, + Unpack, +) +from typing_extensions import ReadOnly # pyright: ignore[reportMissingModuleSource] + +_T = TypeVar("_T") + + +class TD1(TypedDict, Generic[_T]): + a: ReadOnly[int] + b: Required[str] + c: Required[ReadOnly[list[str]]] + d: ReadOnly[Required[dict[str, str]]] + e: ReadOnly[_T] + + +class TD2(TD1[_T]): + e: _T + f: ReadOnly[str] + + +td1: TD1[float] = {"a": 3, "b": "", "c": [], "d": {}, "e": 0.0} + +reveal_type(td1.get("a"), expected_text="int") +reveal_type(td1.get("b"), expected_text="str") +reveal_type(td1.get("c"), expected_text="list[str]") +reveal_type(td1.get("d"), expected_text="dict[str, str]") +reveal_type(td1.get("e"), expected_text="float") + +td2: TD2[float] = {"a": 3, "b": "", "c": [], "d": {}, "e": 0.0, "f": ""} + +x1: TD1[float] = td2 + + +class TD3(TypedDict, total=True): + a: str + b: NotRequired[str] + c: NotRequired[str] + + +class TD4(TypedDict, total=True): + a: ReadOnly[str] + b: NotRequired[str] + c: NotRequired[str] + + +td3: TD3 = {"a": ""} +td4: TD4 = {"a": ""} + +# This should generate an error because "a" is ReadOnly. +# It generates a second error because no overloads are found. +td4.update({"a", ""}) + +# This should generate an error because "a" is ReadOnly. +td4.update(a="") + +# This should generate an error because "a" is ReadOnly. +# It generates a second error because no overloads are found. +td4.update([("a", "")]) + +td4.update({"b": ""}) +td4.update({"b": "", "c": ""}) +td4.update(b="") +td4.update(c="") +td4.update(c="", b="") +td4.update([("b", "")]) +td4.update([("c", "")]) +td4.update([("b", ""), ("c", "")]) + +td5 = td3 | td4 + +# This should generate an error. +td4["a"] = "" + +# This should generate an error. +x3_0: TD3 = td4 +x3_1: TD3 = td3 +x4_0: TD4 = td3 +x4_1: TD4 = td4 + + +def func1(**kwargs: Unpack[TD4]): + # This should generate an error. + kwargs["a"] = "" + + +m1: Mapping[str, object] = td3 +m2: Mapping[str, object] = td4 + + +class TD5(TypedDict): + a: ReadOnly[float | str] + b: ReadOnly[int] + + +class TD6(TD5): + a: int + + # This should generate an error because str is not + # a subtype of int. + b: ReadOnly[str] + + +class TD7(TD6): + # This should generate an error because Literal[3] is + # not the same type as int. + a: Literal[3] + + +class TD8(TypedDict): + a: ReadOnly[NotRequired[int]] + + +class TD9(TypedDict): + a: NotRequired[int] + + +class TD10(TypedDict): + a: int + + +td10: TD10 = {"a": 0} +n1: TD8 = td10 + +# This should generate an error because "a" is writable +# and required in TD10 but writable and not required in +# TD9, which means it can be deleted. +n2: TD9 = td10 + + +class TD11(TypedDict): + a: int + + +class TD12(TypedDict): + a: ReadOnly[float] + + +class TD13(TypedDict): + a: float + + +v1 = TD11(a=2) +v2: TD12 = v1 + +# This should generate an error because "a" is writable +# and is therefore invariant. +v3: TD13 = v1 + + +class TD14(TypedDict): + x: int + + +class TD15(TypedDict): + x: int + y: ReadOnly[NotRequired[str]] + + +td14: TD14 = {"x": 1} + +# This should generate an error because 'str' is not +# compatible with 'object'. +td15: TD15 = td14 + + +class TD16(TypedDict): + x: int + + +class TD17(TypedDict): + x: int + y: ReadOnly[NotRequired[object]] + + +td16: TD16 = {"x": 1} +ted17: TD17 = td16 + + +class TD18(TypedDict): + x: NotRequired[ReadOnly[int]] + y: int + + +td18_1: TD18 = {"x": 1, "y": 2} +td18_2: TD18 = {"x": 2, "y": 4} + +# This should generate an error because "x" is read-only. +# It generates a second error because no overloads are found. +td18_1.update(td18_2) + + +class TD19(TypedDict): + x: NotRequired[Never] + y: ReadOnly[int] + + +def update_a(a: TD18, b: TD19) -> None: + a.update(b) + + +class TD20(TypedDict): + pass + + +td20 = TD20() +td20.update(TD20()) + + +class TD_A1(TypedDict): + x: int + y: ReadOnly[int] + + +class TD_A2(TypedDict): + x: float + y: ReadOnly[float] + + +# This should generate an error for x but not y. +class TD_A(TD_A1, TD_A2): ... + + +class TD_B1(TypedDict): + x: ReadOnly[NotRequired[int]] + y: ReadOnly[Required[int]] + + +class TD_B2(TypedDict): + x: ReadOnly[Required[int]] + y: ReadOnly[NotRequired[int]] + + +# This should generate an error for x but not y. +class TD_B(TD_B1, TD_B2): ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unbound1.py b/python-parser/packages/pyright-internal/src/tests/samples/unbound1.py new file mode 100644 index 00000000..24163422 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unbound1.py @@ -0,0 +1,18 @@ +# This sample tests the type checker's ability to determine which +# symbols are potentially unbound. + +if True: + + class X: + # This should generate an error because 'X' is not yet declared. + def func1(self) -> X: + return X() + + a: X + + class A: + a: X + b = X + + def fn(self) -> X: + return X() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unbound2.py b/python-parser/packages/pyright-internal/src/tests/samples/unbound2.py new file mode 100644 index 00000000..fb4a4d48 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unbound2.py @@ -0,0 +1,12 @@ +# This sample validates that an unbound variable error is reported +# even if that variable has a type declaration. + + +def func1(): + aaa: int + + # This should generate an error because aaa is unbound. + return aaa + + +func1() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unbound3.py b/python-parser/packages/pyright-internal/src/tests/samples/unbound3.py new file mode 100644 index 00000000..3294d5c9 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unbound3.py @@ -0,0 +1,13 @@ +# This sample tests that "possibly unbound" error messages don't propagate. + + +def func1(a: bool): + if a: + b = 3 + + # This should generate an error. + c = b + + # These should not. + d = c + e = d diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unbound4.py b/python-parser/packages/pyright-internal/src/tests/samples/unbound4.py new file mode 100644 index 00000000..bdd175f7 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unbound4.py @@ -0,0 +1,25 @@ +# This sample tests that an unbound variable that is generated in +# a function does not propagate beyond that function to callers. + + +def func1(): + # This should generate an error + return a + + +# This should not. +b = func1() +reveal_type(b, expected_text="Unknown") + + +def func2(val: int): + if val < 3: + return val + + # This should generate an error + return a + + +# This should not. +c = func2(36) +reveal_type(c, expected_text="int | Unknown") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unbound5.py b/python-parser/packages/pyright-internal/src/tests/samples/unbound5.py new file mode 100644 index 00000000..0a9c245a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unbound5.py @@ -0,0 +1,31 @@ +# This sample tests the interplay between unbound symbol detection and +# the code that handles conditional narrowing of captured variables. + +from random import random + + +if random() > 0.5: + from datetime import datetime + from math import cos + +# The following should generate an error because datetime +# is "narrowed" across execution scopes. +test0 = lambda: datetime + + +def test1(): + # The following should generate an error because datetime + # is "narrowed" across execution scopes. + return datetime + + +test2 = lambda: cos + + +def test2(): + return cos + + +# This modification means that cos will not be narrowed +# across execution scopes. +cos = None diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unbound6.py b/python-parser/packages/pyright-internal/src/tests/samples/unbound6.py new file mode 100644 index 00000000..119d9927 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unbound6.py @@ -0,0 +1,64 @@ +# This sample tests the case where a variable in an outer scope is captured +# by inner scopes and is potentially unbound. + + +def func1(): + if 1 + 1 > 3: + y = 0 + + class A: + def method1(self): + # This should generate a "possibly unbound" error. + print(y) + + def inner(): + # This should generate a "possibly unbound" error. + print(y) + + # This should generate a "possibly unbound" error. + v = lambda: y + + # This should generate a "possibly unbound" error. + x = [m + y for m in range(3)] + + def func1(self): + # This should generate a "possibly unbound" error. + print(y) + + def inner(): + # This should generate a "possibly unbound" error. + print(y) + + # This should generate a "possibly unbound" error. + v = lambda: y + + # This should generate a "possibly unbound" error. + x = [m + y for m in range(3)] + + # The code below should not generate any errors because + # z is assigned a value later. + if 1 + 1 > 3: + z = 0 + + class B: + def method1(self): + print(z) + + def inner(): + print(z) + + v = lambda: z + + x = [m + z for m in range(3)] + + def func2(self): + print(z) + + def inner(): + print(z) + + v = lambda: z + + x = [m + z for m in range(3)] + + z = 0 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unicode1.py b/python-parser/packages/pyright-internal/src/tests/samples/unicode1.py new file mode 100644 index 00000000..3d261c44 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unicode1.py @@ -0,0 +1,42 @@ +# This sample tests a variety of unicode characters including those that +# require two-code (surrogate) forms. + +# Old Italic +𐌎𐌘𐌟𐌁 = 42 + +# Egyptian hieroglyphs +𓃘𓐭𓇀𓅨𓆙 = 2 + +# Linear B Ideograms +𐂂𐃪𐃯 = "" + +# Cuneiform +𒀟𒀕𒀰𒁜𒂐𒄊 = "" + +# Old Persian +𐎠𐏊𐏏 = 3 + +# Lydian +𐤢𐤷𐤬𐤮 = 4 + +# Phoenician +𐤔𐤑𐤇 = 4 + +# Nabataean +𐢖𐢊ﬗ = 0 + +# CJK ideographs +㐀䶿一鿿𠀀𪛖𪜀𫜴𫝀𫠝𫠠𬺡𬺰𮯠𣎴 = 1 + +# This should generate an error because "𐢭" is outside the range of +# characters supported by the Python standard. +𐢭 = 0 + +# Other surrogate characters +𝓐 = 3 +𝙰 = 4 +𝚫 = 3 +𞡏 = 4 +𞥁 = 0 + + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable1.py b/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable1.py new file mode 100644 index 00000000..c17db89b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable1.py @@ -0,0 +1,46 @@ +# This sample tests the reportUninitializedInstanceVariable functionality. + +from abc import ABC +from dataclasses import dataclass +from typing import Protocol, TypedDict, final + + +class A: + # This should generate an error if reportUninitializedInstanceVariable + # is enabled. + v1: int + v2: int + v3 = 2 + v4: int = 3 + + def __init__(self) -> None: + self.v2 = 3 + super().__init__() + + +@dataclass +class B: + x: int + + +class C(TypedDict): + member1: str + member2: str + + +# Protocol classes are exempt. +class D(Protocol): + x: str + y: str + + +# ABCs are exempt. +class E(ABC): + x: str + y: str + + +# Unless they are final. +@final +class ESub(E): + z: str diff --git a/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable2.py b/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable2.py new file mode 100644 index 00000000..a2214e95 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable2.py @@ -0,0 +1,68 @@ +# This sample tests the reportUninitializedInstanceVariable when applied +# to a concrete implementation of an abstract base class that defines +# (but does not assign) variables. + +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from typing import NamedTuple, final + + +class Abstract1(ABC): + x: str + + +@final +# This should generate an error because x is unimplemented. +class A(Abstract1): + pass + + +class B(Abstract1): + pass + + +@final +class C(Abstract1): + x = "" + + +@final +class D(Abstract1): + def __init__(self): + self.x = "" + + +class Abstract2(Abstract1): + y: str + + +@final +# This should generate an error because x and y are unimplemented. +class E(Abstract2): + pass + + +class Abstract3(Abstract1): + x = "" + + +@final +class G(Abstract3): + pass + + +class H(NamedTuple): + x: int + + +@dataclass +class IAbstract(ABC): + p1: str + p2: int = field(init=False) + + +@final +@dataclass +# This should generate an error because p2 is uninitialized. +class I(IAbstract): + p3: int diff --git a/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable3.py b/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable3.py new file mode 100644 index 00000000..844d9328 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/uninitializedVariable3.py @@ -0,0 +1,13 @@ +# This sample tests a special case for the reportUninitializedInstanceVariable +# test involving NamedTuple classes. + +# pyright: reportUninitializedInstanceVariable=true + +from typing import final, NamedTuple + + +@final +class A(NamedTuple): + x: int + y: float + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unions1.py b/python-parser/packages/pyright-internal/src/tests/samples/unions1.py new file mode 100644 index 00000000..20951088 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unions1.py @@ -0,0 +1,76 @@ +# This sample tests the alternative syntax for unions as +# documented in PEP 604. + +from typing import Callable, Generic, TypeVar, Union + + +def func1(a: int | str): + if isinstance(a, int): + return 1 + else: + return 2 + + +B = bytes | None | Callable[[], None] +A = int | str | B + + +def func2(a: A) -> B: + if a == 3 or a is None: + return b"" + elif not isinstance(a, (int, str, bytes)): + a() + + +def func3(A: "int | str"): + return 1 + + +T = TypeVar("T") + + +def func4(a: str): + def helper(value: T) -> T | None: ... + + class Baz(Generic[T]): + qux: T | None + + reveal_type(helper(a), expected_text="str | None") + reveal_type(Baz[str]().qux, expected_text="str | None") + + +T = TypeVar("T") +TT = TypeVar("TT", bound=type) + + +def decorator1(value: type[T]) -> type[T]: ... + + +def decorator2(value: TT) -> TT: ... + + +class ClassA: + class ClassA_A: + pass + + @decorator1 + class ClassA_B: + pass + + @decorator2 + class ClassA_C: + pass + + +a_or_str: "ClassA.ClassA_A | str" +b_or_str: "ClassA.ClassA_B | str" +b_or_str_Union: Union[ClassA.ClassA_B, str] +c_or_str: "ClassA.ClassA_C | str" + +Alias1 = None | str +Alias2 = str | None + +_T = TypeVar("_T") + +Alias3 = _T | str +Alias4 = str | _T diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unions2.py b/python-parser/packages/pyright-internal/src/tests/samples/unions2.py new file mode 100644 index 00000000..a73b7893 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unions2.py @@ -0,0 +1,15 @@ +# This sample verifies that bitwise or operator is not +# interpreted as a Union operator in cases where it +# shouldn't be. + +from typing import Any + + +class Class1: + def __init__(self, v): + self._v = v + + +def test_bad_syntax(a: Any, b: Any): + r = Class1(a | b) + print(r) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unions3.py b/python-parser/packages/pyright-internal/src/tests/samples/unions3.py new file mode 100644 index 00000000..2da0130f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unions3.py @@ -0,0 +1,49 @@ +# This sample verifies that the "logical or" operator +# is supported for classes that have a custom metaclass +# with a __or__ or __ror__ method defined. + +# pyright: reportIncompatibleMethodOverride=false + +from typing import Type, TypeVar + + +class ClassWithNoMeta1: + pass + + +class ClassWithNoMeta2: + pass + + +NoMetaUnion = ClassWithNoMeta1 | ClassWithNoMeta2 + + +def func1(x: NoMetaUnion): + reveal_type(x, expected_text="ClassWithNoMeta1 | ClassWithNoMeta2") + + +_T = TypeVar("_T") + + +class Metaclass1(type): + def __or__(cls: _T, other: type) -> _T: ... + + +class Metaclass2(type): + def __ror__(cls: _T, other: type) -> _T: ... + + +class ClassWithMeta1(metaclass=Metaclass1): + pass + + +class ClassWithMeta2(metaclass=Metaclass2): + pass + + +def requires_class_with_meta1(val: Type[ClassWithMeta1]): + pass + + +MetaOr1 = ClassWithMeta1 | ClassWithNoMeta1 +requires_class_with_meta1(MetaOr1) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unions4.py b/python-parser/packages/pyright-internal/src/tests/samples/unions4.py new file mode 100644 index 00000000..3ecc2e39 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unions4.py @@ -0,0 +1,30 @@ +# This sample tests the incorrect usage of Union types. + +from typing import Union + +x = Union[int, str] + +y = Union[int] + +z = Union + +# This should generate an error. +v1: Union[int] + + +# This should generate an error. +def func1() -> Union: ... + + +# This should generate an error. +var1: Union + + +# This should generate two errors. +def func2(x: (list | set)[int]): + reveal_type(x) + + +# This should generate two errors. +def func3(x: Union[list, set][int]): + reveal_type(x) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unions5.py b/python-parser/packages/pyright-internal/src/tests/samples/unions5.py new file mode 100644 index 00000000..a6b361bf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unions5.py @@ -0,0 +1,44 @@ +# This sample tests the handling of runtime union expressions that +# are used in contexts other than a type annotation. + +from types import UnionType +from typing import Optional, Union + + +class Class1: + a: int + + +class Class2: + a: int + + +# This should generate an error +a1: type[Class1] | type[Class2] = Class1 | Class2 + +# This should generate an error +a2: type[Class1] | type[Class2] = Union[Class1, Class2] + + +b1 = Class1 | Class2 + +# This should generate an error +print(b1.a) + +# This should generate an error +b1() + + +b2 = Union[Class1, Class2] + +# This should generate an error +print(b2.a) + +# This should generate an error +b2() + + +c1: UnionType +c1 = int | str +c1 = Union[int, str] +c1 = Optional[int] diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unions6.py b/python-parser/packages/pyright-internal/src/tests/samples/unions6.py new file mode 100644 index 00000000..35e2a3d1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unions6.py @@ -0,0 +1,58 @@ +# This sample tests that union type compatibility does not depend on +# the order of the elements in the union. + +from __future__ import annotations + +from typing import ( + Awaitable, + Callable, + MutableSequence, + Protocol, + SupportsIndex, + TypeGuard, + TypeVar, + overload, +) + +T_co = TypeVar("T_co", covariant=True) +_T = TypeVar("_T") + + +class MyList(MutableSequence[_T]): + @overload + def __getitem__(self, index: SupportsIndex) -> _T: # type: ignore + ... + + @overload + def __getitem__(self, index: slice) -> MyList[_T]: ... + + +class NestedSequence(Protocol[T_co]): + @overload + def __getitem__(self, index: int, /) -> T_co | NestedSequence[T_co]: ... + + @overload + def __getitem__(self, index: slice, /) -> NestedSequence[T_co]: ... + + +def func1(b: MyList[int | MyList[int]]): + _: NestedSequence[int] = b + + +def func2(c: MyList[MyList[int] | int]): + _: NestedSequence[int] = c + + +def is_async_callable( + obj: Callable[..., _T] | Callable[..., Awaitable[_T]], +) -> TypeGuard[Callable[..., Awaitable[_T]]]: ... + + +async def func3(fn: Callable[[], _T] | Callable[[], Awaitable[_T]]): + if is_async_callable(fn): + return await fn() + + +async def func4(fn: Callable[[], Awaitable[_T]] | Callable[[], _T]): + if is_async_callable(fn): + return await fn() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryCast1.py b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryCast1.py new file mode 100644 index 00000000..897cf0df --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryCast1.py @@ -0,0 +1,57 @@ +# This sample tests the type checker's reportUnnecessaryCast feature. + +from typing import Annotated, Never, NoReturn, TypeVar, cast + + +def func1(a: int): + # This should generate an error if + # reportUnnecessaryCast is enabled. + v1 = cast(int, a) + + +def func2(a: int | str): + v1 = cast(int, a) + + b: str = "hello" + v2 = cast(int, b) + + +def func3(a: int | None): + v1 = cast(int, a) + + # This should generate an error if + # reportUnnecessaryCast is enabled. + v2 = cast(int | None, a) + + +T = TypeVar("T") + + +def func4(a: list[T]) -> list[T]: + # This should generate an error if + # reportUnnecessaryCast is enabled. + v1 = cast(list[T], a) + + return a + + +def func5(a: Never): + # This should generate an error if + # reportUnnecessaryCast is enabled. + v1 = cast(NoReturn, a) + + +def func6(a: type[int], b: int): + v1 = cast(int, a) + v2 = cast(type[int], b) + + # This should generate an error if + # reportUnnecessaryCast is enabled. + v3 = cast(type[int], a) + + # This should generate an error if + # reportUnnecessaryCast is enabled. + v4 = cast(int, b) + + +AnnotatedInt = cast(type[int], Annotated[int, ...]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryContains1.py b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryContains1.py new file mode 100644 index 00000000..940ba37f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryContains1.py @@ -0,0 +1,65 @@ +# This sample tests the "reportUnnecessaryContains" diagnostic rule. + +from enum import Enum +from typing import Literal, TypeVar + +T1 = TypeVar("T1") +T2 = TypeVar("T2", bound=str) + + +def func1(x: str | int): + if x in ("a",): + return + + # This should generate an error if "reportUnnecessaryContains" is enabled. + if x in (b"a",): + return + + +def func2(x: Literal[1, 2, 3]): + if x in ("4", 1): + return + + # This should generate an error if "reportUnnecessaryContains" is enabled. + if x not in ("4", "1"): + pass + + # This should generate an error if "reportUnnecessaryContains" is enabled. + if x in (4, 5): + return + + +def func3(x: list[str]): + if x in (["hi"], [2, 3]): + return + + # This should generate an error if "reportUnnecessaryContains" is enabled. + if x not in ((1, 2), (3,)): + pass + + +def func4(x: list[T1]) -> T1: + if 0 not in x: + pass + return x[0] + + +def func5(x: list[T2]) -> T2: + # This should generate an error if "reportUnnecessaryContains" is enabled. + if 0 not in x: + pass + return x[0] + + +class Enum1(Enum): + a = "a" + b = "b" + c = "c" + + @property + def is_ab(self): + return self in (Enum1.a, Enum1.b) + + @property + def is_c(self): + return self not in (Enum1.a, Enum1.b) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance1.py b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance1.py new file mode 100644 index 00000000..5b72dcfb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance1.py @@ -0,0 +1,83 @@ +# This sample tests for isinstance calls that always evaluate to true. + +from typing import ClassVar, Protocol, TypedDict, runtime_checkable +from unknown_import import CustomClass1 + + +class CustomClass2(CustomClass1): + pass + + +def func1(p1: int, p2: int | str): + a = isinstance(p2, str) + + b = isinstance(p2, (int, float)) + + # This should generate an error because this is always true. + c = isinstance(p2, (float, dict, int, str)) + + d = isinstance(p1, float) + + e = isinstance(p2, (float, dict, int)) + + # This should generate an error because this is always true. + f = isinstance(p1, int) + + # This should not generate an error because it's within an assert. + assert isinstance(p1, int) + + g = CustomClass2() + # This should not generate an error because CustomClass2 + # derives from an unknown type. + g = isinstance(g, CustomClass1) + + +class SomeTypedDict(TypedDict): + name: str + + +def func2(p1: SomeTypedDict, p2: int | SomeTypedDict): + a = isinstance(p2, dict) + + # This should generate an error because it's always true. + b = isinstance(p1, dict) + + +@runtime_checkable +class BaseClass(Protocol): + text: ClassVar[str] = "FOO" + + +class ClassA: + text: ClassVar[str] = "BAR" + + +class ClassB: + text: ClassVar[str] = "BAZ" + + +class ClassC: + pass + + +def func3(obj: BaseClass): + if isinstance(obj, (ClassA, ClassB)): + reveal_type(obj, expected_text="ClassA | ClassB") + + if isinstance(obj, (ClassA, ClassB, ClassC)): + reveal_type(obj, expected_text="ClassA | ClassB") + + +class A: + pass + + +class B(A): + pass + + +def func4(a: A, cls: type[A]) -> None: + isinstance(a, cls) + + # This should generate an error because it's always true. + isinstance(a, A) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance2.py b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance2.py new file mode 100644 index 00000000..d1c938fb --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance2.py @@ -0,0 +1,35 @@ +# This sample tests for isinstance calls that never evaluate to true. + +from typing import final + + +class ABase: ... + + +@final +class AFinal(ABase): ... + + +class BBase: ... + + +@final +class BFinal(BBase): ... + + +def func1(a: AFinal, b: BFinal): + # This should generate an error if reportUnnecessaryIsinstance is true. + if isinstance(a, BBase): + reveal_type(a) + + # This should generate an error if reportUnnecessaryIsinstance is true. + if isinstance(a, BBase): + reveal_type(a) + + +def func2(a: ABase, b: BBase): + if isinstance(a, BBase): + reveal_type(a) + + if isinstance(b, ABase): + reveal_type(b) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsSubclass1.py b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsSubclass1.py new file mode 100644 index 00000000..2bb54367 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unnecessaryIsSubclass1.py @@ -0,0 +1,20 @@ +# This sample tests issubclass calls that always evaluate to true. + + +def func1(p1: type[int], p2: type[int] | type[str]): + a = issubclass(p2, str) + + b = issubclass(p2, (int, float)) + + # This should generate an error because this is always true. + c = issubclass(p2, (float, dict, int, str)) + + d = issubclass(p1, float) + + e = issubclass(p2, (float, dict, int)) + + # This should generate an error because this is always true. + f = issubclass(p1, int) + + # This should not generate an error because it's within an assert. + assert issubclass(p1, int) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unpack1.py b/python-parser/packages/pyright-internal/src/tests/samples/unpack1.py new file mode 100644 index 00000000..0f3f2b1c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unpack1.py @@ -0,0 +1,53 @@ +# This sample tests the type checker's handling of the unpack operator. + +# pyright: strictListInference=true + + +class Class1: ... + + +class Class2: ... + + +a = [1, "hello", 3.4, Class1()] + +b = [*a] + + +def int_only(a: int): ... + + +for c in b: + if not isinstance(c, (float, str)): + # This should generate an error because c can + # be an int or foo. + int_only(c) + + if not isinstance(c, Class1): + # This should not generate an error. + int_only(c) + +# This should generate an error +x1 = *(1, 2, 3) + +x2 = 2, *(1, 2, 3) + +x3 = *(1, 2, 3), 2 + + +[d1, *e1, f1] = [1, 2, 3, 4] +reveal_type(e1, expected_text="list[int]") + +[*d2, e2, f2] = [1, 2, 3, 4] +reveal_type(d2, expected_text="list[int]") + +[d3, e3, *f3] = (1, 2, 3, 4) +reveal_type(f3, expected_text="list[int]") + +[g1, g2, g3] = (1, 2, 3) + +# This should generate an error. +[g1, g2, g3, g4] = (1, 2, 3) + +# This should generate an error. +[g1, g2] = (1, 2, 3) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unpack2.py b/python-parser/packages/pyright-internal/src/tests/samples/unpack2.py new file mode 100644 index 00000000..f2ecf9c5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unpack2.py @@ -0,0 +1,27 @@ +# This sample tests the creation of tuples from unpacked values. + +from typing import Any + + +def foo() -> tuple[int, int, int]: + rest = (2, 3) + t = 1, *rest + return t + + +def foo2() -> tuple[int, int, int]: + rest = (3, 4) + t = 1, 2, *rest + # This should generate an error + return t + + +def foo3() -> tuple[Any, ...]: + rest = [1, 2, 3] + t = 1, 2, 3, *rest + requires_list(rest) + return t + + +def requires_list(a: list[int]): + return None diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unpack3.py b/python-parser/packages/pyright-internal/src/tests/samples/unpack3.py new file mode 100644 index 00000000..39abdf98 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unpack3.py @@ -0,0 +1,16 @@ +# This sample tests the reporting of errors related to +# unpack operators within tuple expressions not enclosed +# in parentheses when used with return statements. Support +# for this was added in Python 3.8. + + +def test1(): + a = [1, 2, 3] + b = (4, *a, 5) + return (4, *b, 5) + + +def test2(): + a = [1, 2, 3] + # This should generate an error for versions of Python <3.8 + return 4, *a, 5 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unpack4.py b/python-parser/packages/pyright-internal/src/tests/samples/unpack4.py new file mode 100644 index 00000000..aa008647 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unpack4.py @@ -0,0 +1,19 @@ +# This sample tests the handling of multiple unpack operators in a +# star expression. + +a = [1, 2] +b = ["3", "4"] + +# This should generate an error for versions of Python <3.9 +for x in *a, *b: + print(x) + +c = *a, *b +print(c) + +# This should always generate an error. +*a, *b = (1, 2) + + +def func1(x: str): + "".join([*sorted([x])]) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unpack5.py b/python-parser/packages/pyright-internal/src/tests/samples/unpack5.py new file mode 100644 index 00000000..a6038138 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unpack5.py @@ -0,0 +1,86 @@ +# This sample tests unpacking of tuples that contain PEP 646-style +# tuples with unknown length within them. + + +def suffix() -> tuple[int, str, *tuple[bool, ...]]: + return 1, "a", True + + +def test_suffix(): + a1, a2, a3 = suffix() + reveal_type(a1, expected_text="int") + reveal_type(a2, expected_text="str") + reveal_type(a3, expected_text="bool") + + *b1, b2, b3 = suffix() + # This case is ambiguous. + reveal_type(b1, expected_text="list[int]") + reveal_type(b2, expected_text="str") + reveal_type(b3, expected_text="bool") + + c1, *c2, c3 = suffix() + # This case is ambiguous. + reveal_type(c1, expected_text="int") + reveal_type(c2, expected_text="list[str]") + reveal_type(c3, expected_text="bool") + + d1, d2, *d3 = suffix() + reveal_type(d1, expected_text="int") + reveal_type(d2, expected_text="str") + reveal_type(d3, expected_text="list[bool]") + + +def prefix() -> tuple[*tuple[int, ...], str, bool]: + return 1, "a", True + + +def test_prefix(): + a1, a2, a3 = prefix() + reveal_type(a1, expected_text="int") + reveal_type(a2, expected_text="str") + reveal_type(a3, expected_text="bool") + + *b1, b2, b3 = prefix() + reveal_type(b1, expected_text="list[int]") + reveal_type(b2, expected_text="str") + reveal_type(b3, expected_text="bool") + + c1, *c2, c3 = prefix() + # This case is ambiguous. + reveal_type(c1, expected_text="int") + reveal_type(c2, expected_text="list[str]") + reveal_type(c3, expected_text="bool") + + d1, d2, *d3 = prefix() + # This case is ambiguous. + reveal_type(d1, expected_text="int") + reveal_type(d2, expected_text="str") + reveal_type(d3, expected_text="list[bool]") + + +def middle() -> tuple[int, *tuple[str, ...], bool]: + return 1, "a", True + + +def test_middle(): + a1, a2, a3 = middle() + reveal_type(a1, expected_text="int") + reveal_type(a2, expected_text="str") + reveal_type(a3, expected_text="bool") + + *b1, b2, b3 = middle() + # This case is ambiguous. + reveal_type(b1, expected_text="list[int]") + reveal_type(b2, expected_text="str") + reveal_type(b3, expected_text="bool") + + c1, *c2, c3 = middle() + reveal_type(c1, expected_text="int") + reveal_type(c2, expected_text="list[str]") + reveal_type(c3, expected_text="bool") + + d1, d2, *d3 = middle() + # This case is ambiguous. + reveal_type(d1, expected_text="int") + reveal_type(d2, expected_text="str") + reveal_type(d3, expected_text="list[bool]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unreachable1.py b/python-parser/packages/pyright-internal/src/tests/samples/unreachable1.py new file mode 100644 index 00000000..43d94896 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unreachable1.py @@ -0,0 +1,132 @@ +# This sample tests the detection and reporting of unreachable code. + +import os +import sys +from abc import abstractmethod +from typing import NoReturn + + +def func1(): + """ + Docstring + """ + raise NotImplementedError() + + +class Foo: + b: bool + + @staticmethod + def method1(): + """ + Docstring + """ + raise NotImplementedError("Not Implemented") + + def method2(self, a: int): + """ + Docstring + """ + if a < 10 or self.b: + raise NotImplementedError() + + @abstractmethod + def method3(self): + print(self.b) + raise RuntimeError() + + def method4(self) -> None: + print(self.b) + raise RuntimeError() + + def method5(self) -> NoReturn: + print(self.b) + raise RuntimeError() + + +def func2(): + func1() + + # This should not be marked unreachable because NotImplementedError + # is special-cased. + return 3 + + +def func3(foo: Foo): + foo.method1() + return 3 + + +def func4(foo: Foo): + foo.method2(2) + return 3 + + +def func5(foo: Foo): + foo.method3() + return 3 + + +def func6(foo: Foo): + foo.method4() + return 3 + + +def func7(foo: Foo): + foo.method5() + + # This should be marked as unreachable. + # If reportUnreachable is enabled, it should generate a diagnostic. + return 3 + + +def func8() -> NoReturn: + raise NameError() + + +def func9(): + func8() + + # This should be marked unreachable. + # If reportUnreachable is enabled, it should generate a diagnostic. + return 3 + + +def func10(): + e = OSError() + a1 = os.name == "nt" and None == e.errno + reveal_type(a1, expected_text="bool") + + a2 = True and os.name == "nt" + reveal_type(a2, expected_text="bool") + + if os.name == "nt": + # This should be marked unreachable. + b = e.errno + + if sys.version_info >= (4, 0): + # This should be marked unreachable. + b = e.errno + + return + # This should be marked unreachable. + # If reportUnreachable is enabled, it should generate a diagnostic. + b = e.errno + + +def func11(obj: str) -> list: + if isinstance(obj, str): + return [] + else: + # This should be marked as unreachable. + # If reportUnreachable is enabled, it should generate a diagnostic. + return obj + + +def func12(obj: str) -> list: + if isinstance(obj, str): + return [] + + # This should be marked as unreachable. + # If reportUnreachable is enabled, it should generate a diagnostic. + return obj diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unusedCallResult1.py b/python-parser/packages/pyright-internal/src/tests/samples/unusedCallResult1.py new file mode 100644 index 00000000..4c4bbdfd --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unusedCallResult1.py @@ -0,0 +1,67 @@ +# This sample tests the reportUnusedCallResult diagnostic rule. + +from typing import Any, Iterable + + +def func1(): + pass + + +def func2(): + raise RuntimeError() + + +def func3() -> Any: + pass + + +def func4(): + return 3 + + +def func5(a: int) -> int | list[int]: + if a < 0: + return 5 + return [3] + + +def func6() -> Iterable[int]: + return [] + + +func1() + + +def aaa(): + func2() + + +func3() + + +# This should generate a diagnostic if reportUnusedCallResult is enabled. +func4() + +# This should generate a diagnostic if reportUnusedCallResult is enabled. +func5(3) + +# This should generate a diagnostic if reportUnusedCallResult is enabled. +func6() + +_, _ = func5(3), func6() + +_ = func5(3) + +_ = func5(func4()) + +for _ in func6(): + pass + + +async def get_string_async() -> str: + return "A string" + + +async def await_string() -> None: + # This should generate a diagnostic if reportUnusedCallResult is enabled. + await get_string_async() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unusedCoroutine1.py b/python-parser/packages/pyright-internal/src/tests/samples/unusedCoroutine1.py new file mode 100644 index 00000000..b6d2c98a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unusedCoroutine1.py @@ -0,0 +1,23 @@ +# This sample tests the reportUnusedCoroutine diagnostic rule. + + +async def func1(): + return 3 + + +async def func2() -> str: + return "5" + + +async def func3(): + await func1() + await func2() + + # This should generate an error + func1() + + # This should generate an error + func2() + + _ = func1() + _ = func2() diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unusedExpression1.py b/python-parser/packages/pyright-internal/src/tests/samples/unusedExpression1.py new file mode 100644 index 00000000..8631c591 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unusedExpression1.py @@ -0,0 +1,50 @@ +# This sample tests the reportUnusedExpression diagnostic rule. + +t = 1 + + +# This should generate a diagnostic. +-4 + +# This should generate a diagnostic. +4j + +# This should generate a diagnostic. +4j + 4 + +# This should generate a diagnostic. +False + +# This should generate a diagnostic. +t == 1 + +# This should generate a diagnostic. +t != 2 + +# This should generate a diagnostic. +t <= t + +# This should generate a diagnostic. +not t + +# This should generate a diagnostic. +None + +# This should generate a diagnostic. +t + +# This should generate a diagnostic. +(1, 2, 3) + +# This should generate a diagnostic. +{1: 2} + +# This should generate a diagnostic. +{1, 2, 3} + +# This should generate a diagnostic. +[1, 2, 3] + +[x for x in range(3)] +{x: x for x in range(3)} +{x for x in range(3)} diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unusedImport1.py b/python-parser/packages/pyright-internal/src/tests/samples/unusedImport1.py new file mode 100644 index 00000000..15d35337 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unusedImport1.py @@ -0,0 +1,5 @@ +# This sample tests the reportUnusedImport diagnostic rule. +import sys as sys # Assumes export +import os as os2 # Should error +from sys import path as p # Should error +from os import environ as environ # Assumes export diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unusedImport2.py b/python-parser/packages/pyright-internal/src/tests/samples/unusedImport2.py new file mode 100644 index 00000000..89b19406 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unusedImport2.py @@ -0,0 +1,17 @@ +# This sample tests the reportUnusedImport diagnostic rule +# for multipart imports. + +# This should result in an error if reportUnusedImport is enabled. +import package2.module1 + +# This should result in an error if reportUnusedImport is enabled. +import package2.module2 + +import package2.module3 + +import package2.module2 as dummy + + +def func1(): + print(package2.module3.a3) + print(dummy) diff --git a/python-parser/packages/pyright-internal/src/tests/samples/unusedVariable1.py b/python-parser/packages/pyright-internal/src/tests/samples/unusedVariable1.py new file mode 100644 index 00000000..df6b1c0f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/unusedVariable1.py @@ -0,0 +1,21 @@ +# This sample tests the reportUnusedVariable diagnostic check. + + +def func1(a: int): + x = 4 + + # This should generate an error if reportUnusedVariable is enabled. + y = x + + _z = 4 + + _ = 2 + + __z__ = 5 + + if x + 1: + # This should generate an error if reportUnusedVariable is enabled. + z = 3 + else: + # This should generate an error if reportUnusedVariable is enabled. + z = 5 diff --git a/python-parser/packages/pyright-internal/src/tests/samples/with1.py b/python-parser/packages/pyright-internal/src/tests/samples/with1.py new file mode 100644 index 00000000..06db6ae6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/with1.py @@ -0,0 +1,115 @@ +# This sample tests various forms of the 'with' statement. + +from typing import Any, Generic, Optional, TypeVar, Self + +_T1 = TypeVar("_T1") + + +class Class1(object): + def __exit__( + self, + t: Optional[type] = None, + exc: Optional[BaseException] = None, + tb: Optional[Any] = None, + ) -> bool: + return True + + +class Class2(object): + def __enter__(self): + return 1 + + def __exit__( + self, + t: Optional[type] = None, + exc: Optional[BaseException] = None, + tb: Optional[Any] = None, + ) -> bool: + return True + + +class Class3(object): + def __enter__(self: _T1) -> _T1: + return self + + def __exit__( + self, + t: Optional[type] = None, + exc: Optional[BaseException] = None, + tb: Optional[Any] = None, + ) -> bool: + return True + + +def requires_int(val: int): + pass + + +def requires_class3(val: Class3): + pass + + +def test1(): + a1 = Class1() + + # This should generate an error because Class1 + # does not implement an __enter__ + with a1 as foo: + pass + + a2 = Class2() + with a2 as foo: + requires_int(foo) + + a3 = Class3() + with a3 as foo: + # This should generate an error because foo + # should be of type Class3. + requires_int(foo) + + requires_class3(foo) + + with a2 as foo2, a3 as foo3: + requires_int(foo2) + requires_class3(foo3) + + +class Class4: + async def __aenter__(self: _T1) -> _T1: + return self + + async def __aexit__( + self, + t: Optional[type] = None, + exc: Optional[BaseException] = None, + tb: Optional[Any] = None, + ) -> bool: + return True + + +async def test2(): + a1 = Class4() + + # This should generate two errors because Class4 does not + # implement __enter__ or __exit__. + with a1 as foo: + pass + + async with a1 as foo: + pass + + +class Class5(Generic[_T1]): + async def __aenter__(self) -> Self: + return self + + async def __aexit__(self, *args: Any) -> None: + return None + + +class Class6(Class5[int]): ... + + +async def do(): + async with Class6() as f: + reveal_type(f, expected_text="Class6") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/with2.py b/python-parser/packages/pyright-internal/src/tests/samples/with2.py new file mode 100644 index 00000000..fe50054b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/with2.py @@ -0,0 +1,78 @@ +# This sample tests for the presence of an __exit__ +# or __aexit__ method. + + +from contextlib import AbstractContextManager +from typing import Any, Literal, TypeVar + +_T1 = TypeVar("_T1") + + +class Class2(object): + def __enter__(self): + return 1 + + +class Class3(object): + def __enter__(self: _T1) -> _T1: + return self + + def __exit__( + self, + t: type | None = None, + exc: BaseException | None = None, + tb: Any | None = None, + ) -> bool: + return True + + +def requires_int(val: int): + pass + + +def requires_class3(val: Class3): + pass + + +def test1(): + a2 = Class2() + a3 = Class3() + + # This should generate an error because + # the __exit__ method is missing. + with a2 as foo: + requires_int(foo) + + # This should generate an error because + # the __exit__ method is missing. + with a2 as foo2, a3 as foo3: + requires_int(foo2) + requires_class3(foo3) + + +class Class4: + async def __aenter__(self: _T1) -> _T1: + return self + + +async def test2(): + a1 = Class4() + + # This should generate an error because __aexit__ + # needs to be used with async with. + async with a1 as foo: + pass + + +class Class5(AbstractContextManager[Any]): + def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> Literal[True]: + return True + + +def test3(val: str | None): + val = None + with Class5(): + val = "" + raise Exception + + reveal_type(val, expected_text="Literal[''] | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/with3.py b/python-parser/packages/pyright-internal/src/tests/samples/with3.py new file mode 100644 index 00000000..8034b1f6 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/with3.py @@ -0,0 +1,81 @@ +# This sample verifies the proper type analysis of context managers +# that suppress exceptions, as indicated by a return type of "bool" +# for the __exit__ or __aexit__ method. + +from contextlib import suppress, AsyncExitStack +from typing import Never + + +def test1() -> None: + class A: + b: str + + x = b"" + a = A() + with memoryview(x), suppress(AttributeError): + if a.b: + raise RuntimeError() + return + + # This should generate an error. + c = "hi" + 3 + + with memoryview(x): + raise RuntimeError() + + # This should not generate an error because + # the code is unreachable. + return 3 + + +def test2() -> None: + some_dict = dict() + + with suppress(KeyError): + print(some_dict["missing_key"]) + + # This should generate an error because the + # code is reachable. + return 1 + + +def test3(cm: suppress) -> None: + some_dict = dict() + + with cm: + print(some_dict["missing_key"]) + + # This should generate an error because the + # code is reachable. + return 1 + + +class CMFactory: + def get_cm(self) -> suppress: + return suppress() + + +def test4() -> None: + some_dict = dict() + + with CMFactory().get_cm(): + print(some_dict["missing_key"]) + + # This should generate an error because the + # code is reachable. + return 1 + + +def no_return() -> Never: + raise Exception() + + +def test6(): + val = None + with suppress(): + val = 1 + no_return() + val = 2 + + assert val is not None + reveal_type(val, expected_text="Literal[1, 2]") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/with4.py b/python-parser/packages/pyright-internal/src/tests/samples/with4.py new file mode 100644 index 00000000..4eaece7f --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/with4.py @@ -0,0 +1,24 @@ +# This sample tests that parentheses are allowed in with statements +# if using Python 3.9 and later. + +from tempfile import TemporaryFile + +# This should generate an error +with (TemporaryFile() as a, TemporaryFile() as b): + pass + +# This should generate an error +with (TemporaryFile() as c, ): + pass + +# This should generate an error +with (TemporaryFile() as d): + pass + +with (TemporaryFile()): + pass + +# This should generate an error +with (TemporaryFile(), TemporaryFile()): + pass + diff --git a/python-parser/packages/pyright-internal/src/tests/samples/with5.py b/python-parser/packages/pyright-internal/src/tests/samples/with5.py new file mode 100644 index 00000000..b1a18826 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/with5.py @@ -0,0 +1,31 @@ +# This sample tests the case of a context manager within a try/except block. + +from typing import ContextManager + + +def create_context() -> ContextManager[str]: ... + + +def possible_exception() -> None: ... + + +def func1(): + x: str | None = None + ctx: str | None = None + try: + with create_context() as ctx: + x = "0" + possible_exception() + except Exception: + reveal_type(x, expected_text="Literal['0'] | None") + reveal_type(ctx, expected_text="str | None") + + +def func2(): + ctx: str | None = None + try: + with create_context() as ctx: + possible_exception() + return + except Exception: + reveal_type(ctx, expected_text="str | None") diff --git a/python-parser/packages/pyright-internal/src/tests/samples/with6.py b/python-parser/packages/pyright-internal/src/tests/samples/with6.py new file mode 100644 index 00000000..e7bc5c96 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/with6.py @@ -0,0 +1,22 @@ +# This sample tests that classes whose metaclass implements a context +# manager work with the "with" statement. + +from types import TracebackType + + +class ClassA(type): + def __enter__(cls) -> "ClassA": + print("Enter A") + return cls + + def __exit__( + cls, exc_typ: type[Exception], exc_val: Exception, exc_tbc: TracebackType + ) -> None: + print("Exit A") + + +class ClassB(metaclass=ClassA): ... + + +with ClassB as b: + ... diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.egg b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.egg new file mode 100644 index 00000000..da55f375 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.egg @@ -0,0 +1 @@ +This file isn't a zip. diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.jar b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.jar new file mode 100644 index 00000000..da55f375 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.jar @@ -0,0 +1 @@ +This file isn't a zip. diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.zip b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.zip new file mode 100644 index 00000000..07f41f7d --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/bad.zip @@ -0,0 +1 @@ +This file isn't a zip. diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.egg b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.egg new file mode 100644 index 00000000..2ae9babc Binary files /dev/null and b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.egg differ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.jar b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.jar new file mode 100644 index 00000000..2ae9babc Binary files /dev/null and b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.jar differ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.zip b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.zip new file mode 100644 index 00000000..2ae9babc Binary files /dev/null and b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/basic.zip differ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.egg b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.egg new file mode 100644 index 00000000..bbb00f43 Binary files /dev/null and b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.egg differ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.jar b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.jar new file mode 100644 index 00000000..bbb00f43 Binary files /dev/null and b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.jar differ diff --git a/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.zip b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.zip new file mode 100644 index 00000000..bbb00f43 Binary files /dev/null and b/python-parser/packages/pyright-internal/src/tests/samples/zipfs/corrupt.zip differ diff --git a/python-parser/packages/pyright-internal/src/tests/serialization.test.ts b/python-parser/packages/pyright-internal/src/tests/serialization.test.ts new file mode 100644 index 00000000..206bca31 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/serialization.test.ts @@ -0,0 +1,103 @@ +/* + * serialization.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for serializing/deserializing data for background threads. + */ + +import assert from 'assert'; + +import { deserialize, serialize } from '../backgroundThreadBase'; +import { UriEx } from '../common/uri/uriUtils'; +import { CancellationToken } from 'vscode-languageserver'; + +export function serializationTests(serializer = serialize, deserializer = deserialize) { + test('Simple string', () => { + const serialized = serializer('hello'); + const deserialized = deserializer(serialized); + assert.strictEqual(deserialized, 'hello'); + }); + + test('Simple number', () => { + const serialized = serializer(123); + const deserialized = deserializer(serialized); + assert.strictEqual(deserialized, 123); + }); + + test('Simple boolean', () => { + const serialized = serializer(true); + const deserialized = deserializer(serialized); + assert.strictEqual(deserialized, true); + }); + + test('Simple object', () => { + const serialized = serializer({ a: 1, b: 'hello' }); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, { a: 1, b: 'hello' }); + }); + + test('Simple array', () => { + const serialized = serializer([1, 'hello']); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, [1, 'hello']); + }); + + test('Object with maps', () => { + const serialized = serializer({ + a: new Map<string, number>([ + ['hello', 1], + ['world', 2], + ]), + }); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, { + a: new Map<string, number>([ + ['hello', 1], + ['world', 2], + ]), + }); + }); + + test('Object with sets', () => { + const serialized = serializer({ a: new Set<string>(['hello', 'world']) }); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, { a: new Set<string>(['hello', 'world']) }); + }); + + test('Object with undefined', () => { + const serialized = serializer({ a: undefined }); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, {}); + }); + + test('Object with null', () => { + const serialized = serializer({ a: null }); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, { a: null }); + }); + + test('Object with URI', () => { + const serialized = serializer({ a: UriEx.file('hello') }); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, { a: UriEx.file('hello') }); + }); + + test('Object with URI array', () => { + const serialized = serializer({ a: [UriEx.file('hello'), UriEx.file('world')] }); + const deserialized = deserializer(serialized); + assert.deepStrictEqual(deserialized, { a: [UriEx.file('hello'), UriEx.file('world')] }); + }); + + test('cancellatoin', () => { + const cancelled = serializer(CancellationToken.Cancelled); + const none = serializer(CancellationToken.None); + + assert(CancellationToken.Cancelled === deserializer(cancelled)); + assert(CancellationToken.None === deserializer(none)); + }); +} + +describe('Serialization', () => { + serializationTests(); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/service.test.ts b/python-parser/packages/pyright-internal/src/tests/service.test.ts new file mode 100644 index 00000000..08b63602 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/service.test.ts @@ -0,0 +1,475 @@ +/* + * service.test.ts + * + * service tests. + */ + +import assert from 'assert'; + +import { CancellationToken } from 'vscode-jsonrpc'; +import { IPythonMode } from '../analyzer/sourceFile'; +import { CommandLineOptions } from '../common/commandLineOptions'; +import { combinePaths, getDirectoryPath, normalizeSlashes } from '../common/pathUtils'; +import { Uri } from '../common/uri/uri'; +import { UriEx } from '../common/uri/uriUtils'; +import { parseTestData } from './harness/fourslash/fourSlashParser'; +import { parseAndGetTestState, TestState } from './harness/fourslash/testState'; + +test('random library file changed', () => { + const state = parseAndGetTestState('', '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/site-packages/test.py', state.serviceProvider), + [Uri.file('/site-packages', state.serviceProvider)] + ), + true + ); +}); + +test('random library file starting with . changed', () => { + const state = parseAndGetTestState('', '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/site-packages/.test.py', state.serviceProvider), + [Uri.file('/site-packages', state.serviceProvider)] + ), + false + ); +}); + +test('random library file changed, nested search paths', () => { + const state = parseAndGetTestState('', '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/lib/.venv/site-packages/myFile.py', state.serviceProvider), + [Uri.file('/lib', state.serviceProvider), Uri.file('/lib/.venv/site-packages', state.serviceProvider)] + ), + true + ); +}); + +test('random library file changed, nested search paths, fs is not case sensitive', () => { + const code = ` +// global options +// @ignoreCase: true + `; + const state = parseAndGetTestState(code, '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/lib/.venv/site-packages/myFile.py', state.serviceProvider), + [Uri.file('/lib', state.serviceProvider), Uri.file('/LIB/.venv/site-packages', state.serviceProvider)] + ), + true + ); +}); + +test('random library file changed, nested search paths, fs is case sensitive', () => { + const code = ` +// global options +// @ignoreCase: false + `; + const state = parseAndGetTestState(code, '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/lib/.venv/site-packages/myFile.py', state.serviceProvider), + [Uri.file('/lib', state.serviceProvider), Uri.file('/LIB/.venv/site-packages', state.serviceProvider)] + ), + false + ); +}); + +test('random library file starting with . changed, fs is not case sensitive', () => { + const code = ` +// global options +// @ignoreCase: true + `; + const state = parseAndGetTestState(code, '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/lib/.test.py', state.serviceProvider), + [Uri.file('/LIB', state.serviceProvider), Uri.file('/lib/site-packages', state.serviceProvider)] + ), + false + ); +}); + +test('random library file starting with . changed, fs is case sensitive', () => { + const code = ` +// global options +// @ignoreCase: false + `; + const state = parseAndGetTestState(code, '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/lib/.test.py', state.serviceProvider), + [Uri.file('/LIB', state.serviceProvider), Uri.file('/lib/site-packages', state.serviceProvider)] + ), + true + ); +}); + +test('random library file under a folder starting with . changed', () => { + const state = parseAndGetTestState('', '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleLibraryFileWatchChanges( + Uri.file('/site-packages/.testFolder/test.py', state.serviceProvider), + [Uri.file('/site-packages', state.serviceProvider)] + ), + false + ); +}); + +test('basic file change', () => { + const code = ` +// @filename: test.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code); +}); + +test('non python file', () => { + const code = ` +// @filename: test.pyc +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ false); +}); + +test('temp file', () => { + const code = ` +// @filename: test.py.12345678901234567890123456789012.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ false); +}); + +test('excluded file', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/excluded.py"] +//// } + +// @filename: included.py +//// # empty + +// @filename: excluded.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ false); +}); + +test('excluded but still part of program', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/excluded.py"] +//// } + +// @filename: included.py +//// from . import excluded + +// @filename: excluded.py +//// [|/*marker*/|] + `; + + const state = parseAndGetTestState(code, '/projectRoot').state; + const marker = state.getMarkerByName('marker'); + + while (state.workspace.service.test_program.analyze()); + + assert.strictEqual( + state.workspace.service.test_shouldHandleSourceFileWatchChanges(marker.fileUri, /* isFile */ true), + true + ); +}); + +test('random folder changed', () => { + const code = ` +// @filename: notUsed.py +//// # empty + `; + + const state = parseAndGetTestState(code, '/projectRoot').state; + + assert.strictEqual( + state.workspace.service.test_shouldHandleSourceFileWatchChanges( + Uri.file('/randomFolder', state.serviceProvider), + /* isFile */ false + ), + false + ); +}); + +test('excluded folder changed', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/.*"] +//// } + +// @filename: .excluded/notUsed.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ false, /* isFile */ false); +}); + +test('file under excluded folder changed', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/.*"] +//// } + +// @filename: included.py +//// # empty + +// @filename: .excluded/notUsed.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ false); +}); + +test('folder under excluded folder changed', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/.*"] +//// } + +// @filename: .excluded/nested/notUsed.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ false, /* isFile */ false); +}); + +test('folder that contains no file has changed', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/excluded.py"] +//// } + +// @filename: included.py +//// # empty + +// @filename: lib/excluded.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ false, /* isFile */ false); +}); + +test('folder that contains a file has changed', () => { + const code = ` +// @filename: lib/included.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ true, /* isFile */ false); +}); + +test('folder that contains no file but whose parent has __init__ has changed', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "exclude": ["**/excluded.py"] +//// } + +// @filename: lib/__init__.py +//// # empty + +// @filename: lib/nested/excluded.py +//// [|/*marker*/|] + `; + + testSourceFileWatchChange(code, /* expected */ true, /* isFile */ false); +}); + +test('library file watching for extra path under workspace', () => { + const watchers = getRegisteredLibraryFileWatchers('/src', ['extraPath'], ['extraPath/**']); + assert(watchers.some((w) => w.paths.some((p) => p.equals(UriEx.file('/src/extraPath'))))); +}); + +test('user file watching as extra path under workspace', () => { + // Sometimes, this trick is used to make sub-modules to top-level modules. + const watchers = getRegisteredLibraryFileWatchers('/src', ['extraPath']); + + // This shouldn't be recognized as library file. + assert(!watchers.some((w) => w.paths.some((p) => p.equals(UriEx.file('/src/extraPath'))))); +}); + +test('library file watching another workspace root using extra path', () => { + // The extra path for a different workspace root will be initially added as a relative path, + // but when it reaches the service layer, it will be normalized to an absolute path. + // That's why it is used as an absolute path here. + const watchers = getRegisteredLibraryFileWatchers('/root1', ['/root2']); + assert(watchers.some((w) => w.paths.some((p) => p.equals(UriEx.file('/root2'))))); +}); + +test('program containsSourceFileIn', () => { + const code = ` +// @ignoreCase: true + +// @filename: myLib/__init__.py +//// # empty + `; + + const state = parseAndGetTestState(code, '/projectRoot').state; + assert(state.workspace.service.test_program.containsSourceFileIn(state.activeFile.fileUri)); +}); + +test('service runEditMode', () => { + const code = ` +// @filename: open.py +//// /*open*/ + +// @filename: closed.py +//// /*closed*/ + `; + + const state = parseAndGetTestState(code, '/projectRoot').state; + const open = state.getMarkerByName('open'); + const closed = state.getMarkerByName('closed'); + const openUri = open.fileUri; + const closedUri = closed.fileUri; + + const newFileUri = Uri.file(combinePaths(getDirectoryPath(open.fileName), 'interimFile.py'), state.serviceProvider); + state.testFS.writeFileSync(newFileUri, '# empty', 'utf8'); + + const options = { + isTracked: true, + ipythonMode: IPythonMode.None, + chainedFileUri: newFileUri, + }; + + // try run edit mode + verifyRunEditMode('# first'); + + // try run again to make sure things are cleared up correctly + verifyRunEditMode('# second'); + + function verifyRunEditMode(value: string) { + state.workspace.service.runEditMode((p) => { + p.addInterimFile(newFileUri); + p.setFileOpened(openUri, 0, value, options); + p.setFileOpened(closedUri, 0, value, options); + + const interim = p.getSourceFileInfo(newFileUri); + assert(interim); + + const openFile = p.getSourceFileInfo(openUri); + assert(openFile); + assert(openFile.isOpenByClient); + assert.strictEqual(value, openFile.contents); + + const closedFile = p.getSourceFileInfo(closedUri); + assert(closedFile); + assert(closedFile.isOpenByClient); + assert.strictEqual(value, closedFile.contents); + }, CancellationToken.None); + + const interim = state.workspace.service.test_program.getSourceFileInfo(newFileUri); + assert(!interim); + + const openFile = state.workspace.service.test_program.getSourceFileInfo(openUri); + assert(openFile); + assert(openFile.isOpenByClient); + + assert.strictEqual('', openFile.contents?.trim()); + + const closedFile = state.workspace.service.test_program.getSourceFileInfo(closedUri); + assert(closedFile); + assert(!closedFile.isOpenByClient); + + const content = closedFile.contents ?? ''; + assert.strictEqual('', content.trim()); + } +}); + +test('file changes cause semantic update', () => { + const code = ` +// @filename: open.py +//// import closed +//// /*open*/ + +// @filename: closed.py +//// /*closed*/ + `; + + const state = parseAndGetTestState(code, '/projectRoot').state; + const open = state.getMarkerByName('open'); + const closed = state.getMarkerByName('closed'); + const openUri = open.fileUri; + const closedUri = closed.fileUri; + const openContents = state.testFS.readFileSync(openUri, 'utf-8'); + const options = { + isTracked: true, + ipythonMode: IPythonMode.None, + chainedFileUri: undefined, + }; + + // Setup the file watcher for the project + const cmdOptions = new CommandLineOptions(state.workspace.rootUri, false); + cmdOptions.languageServerSettings.watchForSourceChanges = true; + state.workspace.service.setOptions(cmdOptions); + + // Changing the closed file should update the semantic version of the open file as it is + // imported by it. + const p = state.workspace.service.test_program; + p.setFileOpened(openUri, 0, openContents, options); + // Do a parse so that imports are processed but not a full analysis as that would load + // the closed file into memory. + p.getParseResults(openUri); + const openFile = p.getSourceFileInfo(openUri); + assert(openFile); + assert(openFile.isOpenByClient); + assert.strictEqual(openContents, openFile.contents); + assert.strictEqual(openFile.imports.length, 3); + const oldSemanticVersion = openFile.semanticVersion; + state.testFS.writeFileSync(closedUri, 'print("changed")'); + state.testFS.fireFileWatcherEvent(closedUri.toString(), 'change'); + assert.strictEqual(openFile.semanticVersion, oldSemanticVersion + 1); +}); + +function testSourceFileWatchChange(code: string, expected = true, isFile = true) { + const state = parseAndGetTestState(code, '/projectRoot').state; + const marker = state.getMarkerByName('marker'); + const path = isFile ? marker.fileName : getDirectoryPath(marker.fileName); + + assert.strictEqual( + state.workspace.service.test_shouldHandleSourceFileWatchChanges(Uri.file(path, state.serviceProvider), isFile), + expected + ); +} + +function getRegisteredLibraryFileWatchers(root: string, extraPaths: string[], excludes: string[] = []) { + root = normalizeSlashes(root); + + const data = parseTestData(root, '', ''); + const state = new TestState(root, data); + + const options = new CommandLineOptions(state.workspace.rootUri, false); + options.languageServerSettings.watchForLibraryChanges = true; + options.configSettings.extraPaths = extraPaths; + options.configSettings.excludeFileSpecs = excludes; + + state.workspace.service.setOptions(options); + + return state.testFS.fileWatchers; +} diff --git a/python-parser/packages/pyright-internal/src/tests/signatureHelp.test.ts b/python-parser/packages/pyright-internal/src/tests/signatureHelp.test.ts new file mode 100644 index 00000000..1d784321 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/signatureHelp.test.ts @@ -0,0 +1,97 @@ +/* + * signatureHelp.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for signature help. + */ + +import assert from 'assert'; +import { CancellationToken, MarkupKind } from 'vscode-languageserver'; + +import { convertOffsetToPosition } from '../common/positionUtils'; +import { SignatureHelpProvider } from '../languageService/signatureHelpProvider'; +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { PyrightDocStringService } from '../common/docStringService'; + +test('invalid position in format string segment', () => { + const code = ` +// @filename: test.py +//// f'{"(".capit[|/*marker*/|]alize()}' + `; + + checkSignatureHelp(code, false); +}); + +test('valid position in format string segment', () => { + const code = ` +// @filename: test.py +//// f'{"(".capitalize([|/*marker*/|])}' + `; + + checkSignatureHelp(code, true); +}); + +test('valid position in the second format string segment', () => { + const code = ` +// @filename: test.py +//// f'{print("hello")} {"(".capitalize([|/*marker*/|])}' + `; + + checkSignatureHelp(code, true); +}); + +test('invalid position in the second format string segment', () => { + const code = ` +// @filename: test.py +//// f'{print("hello")} {"(".capitalize [|/*marker*/|] ()}' + `; + + checkSignatureHelp(code, false); +}); + +test('nested call in format string segment', () => { + const code = ` +// @filename: test.py +//// def foo(): +//// pass +//// +//// f'{"(".capitalize(foo([|/*marker*/|]))}' + `; + + checkSignatureHelp(code, true); +}); + +test('within arguments in format string segment', () => { + const code = ` +// @filename: test.py +//// def foo(): +//// pass +//// +//// f'{"(".capitalize(fo[|/*marker*/|]o())}' + `; + + checkSignatureHelp(code, true); +}); + +function checkSignatureHelp(code: string, expects: boolean) { + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + + const parseResults = state.workspace.service.getParseResults(marker.fileUri)!; + const position = convertOffsetToPosition(marker.position, parseResults.tokenizerOutput.lines); + + const actual = new SignatureHelpProvider( + state.workspace.service.test_program, + marker.fileUri, + position, + MarkupKind.Markdown, + /*hasSignatureLabelOffsetCapability*/ true, + /*hasActiveParameterCapability*/ true, + /*context*/ undefined, + new PyrightDocStringService(), + CancellationToken.None + ).getSignatureHelp(); + + assert.strictEqual(!!actual, expects); +} diff --git a/python-parser/packages/pyright-internal/src/tests/sourceFile.test.ts b/python-parser/packages/pyright-internal/src/tests/sourceFile.test.ts new file mode 100644 index 00000000..44483928 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/sourceFile.test.ts @@ -0,0 +1,98 @@ +/* + * sourceFile.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright sourceFile module. + */ +import * as assert from 'assert'; + +import { ImportResolver } from '../analyzer/importResolver'; +import { SourceFile } from '../analyzer/sourceFile'; +import { ConfigOptions, getBasicDiagnosticRuleSet, getOffDiagnosticRuleSet } from '../common/configOptions'; +import { FullAccessHost } from '../common/fullAccessHost'; +import { combinePaths } from '../common/pathUtils'; +import { RealTempFile, createFromRealFileSystem } from '../common/realFileSystem'; +import { createServiceProvider } from '../common/serviceProviderExtensions'; +import { Uri } from '../common/uri/uri'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('Empty', () => { + const filePath = combinePaths(process.cwd(), 'tests/samples/test_file1.py'); + const tempFile = new RealTempFile(); + const fs = createFromRealFileSystem(tempFile); + const serviceProvider = createServiceProvider(tempFile, fs); + const sourceFile = new SourceFile(serviceProvider, Uri.file(filePath, serviceProvider), () => '', false, false, { + isEditMode: false, + }); + const configOptions = new ConfigOptions(Uri.file(process.cwd(), serviceProvider)); + const sp = createServiceProvider(fs); + const importResolver = new ImportResolver(sp, configOptions, new FullAccessHost(sp)); + + sourceFile.parse(configOptions, importResolver); + serviceProvider.dispose(); +}); + +test('SourceFile setInitialDiagnosticRuleSet overrides default', () => { + const filePath = combinePaths(process.cwd(), 'tests/samples/test_file1.py'); + const tempFile = new RealTempFile(); + const fs = createFromRealFileSystem(tempFile); + const serviceProvider = createServiceProvider(tempFile, fs); + + // Verify basic defaults have reportPrivateImportUsage as 'error'. + const basicRuleSet = getBasicDiagnosticRuleSet(); + assert.strictEqual(basicRuleSet.reportPrivateImportUsage, 'error'); + + // Create a rule set with reportPrivateImportUsage set to 'none'. + const offRuleSet = getOffDiagnosticRuleSet(); + assert.strictEqual(offRuleSet.reportPrivateImportUsage, 'none'); + + const sourceFile = new SourceFile(serviceProvider, Uri.file(filePath, serviceProvider), () => '', false, false, { + isEditMode: false, + }); + + // Call setInitialDiagnosticRuleSet to apply config-level overrides. + sourceFile.setInitialDiagnosticRuleSet(offRuleSet); + + assert.ok(sourceFile); + serviceProvider.dispose(); +}); + +test('Empty Open file', () => { + const code = ` +// @filename: test.py +//// [|/*marker*/# Content|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + + assert.strictEqual( + state.workspace.service.test_program.getSourceFile(marker.fileUri)?.getFileContent(), + '# Content' + ); + + state.workspace.service.updateOpenFileContents(marker.fileUri, 1, ''); + assert.strictEqual(state.workspace.service.test_program.getSourceFile(marker.fileUri)?.getFileContent(), ''); +}); + +test('No unexpected user files', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "pythonVersion": "3.14" +//// } + +// @filename: test.py +//// [|/*marker*/x: int = 1|] + `; + + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + while (state.workspace.service.test_program.analyze()); + + const userFiles = state.workspace.service.test_program.getUserFiles(); + assert.strictEqual(userFiles.length, 1); + assert.strictEqual(userFiles[0].uri.toString(), marker.fileUri.toString()); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/sourceMapperUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/sourceMapperUtils.test.ts new file mode 100644 index 00000000..d90b9bae --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/sourceMapperUtils.test.ts @@ -0,0 +1,241 @@ +/* + * sourceFile.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for pyright sourceMapperUtils module. + */ +import assert from 'assert'; +import { CancellationToken, CancellationTokenSource } from 'vscode-jsonrpc'; + +import { VariableDeclaration, isVariableDeclaration } from '../analyzer/declaration'; +import { buildImportTree as buildImportTreeImpl } from '../analyzer/sourceMapperUtils'; +import { TypeCategory } from '../analyzer/types'; +import { TextRange } from '../common/textRange'; +import { UriEx } from '../common/uri/uriUtils'; +import { ParseNodeType } from '../parser/parseNodes'; +import { getNodeAtMarker, parseAndGetTestState } from './harness/fourslash/testState'; + +function buildImportTree( + sourceFile: string, + targetFile: string, + importResolver: (f: string) => string[], + token: CancellationToken +): string[] { + return buildImportTreeImpl( + UriEx.file(sourceFile), + UriEx.file(targetFile), + (from) => { + const resolved = importResolver(from.getFilePath().slice(1)); + return resolved.map((f) => UriEx.file(f)); + }, + token + ).map((u) => u.getFilePath().slice(1)); +} + +describe('BuildImportTree', () => { + const tokenSource = new CancellationTokenSource(); + test('Simple', () => { + const results = buildImportTree( + 'A', + 'C', + (f) => { + switch (f) { + case 'C': + return ['B']; + case 'B': + return ['A']; + default: + break; + } + return []; + }, + tokenSource.token + ); + assert.deepEqual(results, ['C', 'B']); + }); + + test('Recursion', () => { + const results = buildImportTree( + 'A', + 'E', + (f) => { + switch (f) { + case 'E': + return ['D']; + case 'D': + return ['C', 'B']; + case 'C': + return ['D']; + case 'B': + return ['A']; + default: + break; + } + return []; + }, + tokenSource.token + ); + assert.deepEqual(results, ['E', 'D', 'B']); + }); + + test('Multiple Paths', () => { + const results = buildImportTree( + 'A', + 'G', + (f) => { + switch (f) { + case 'G': + return ['F', 'H', 'I']; + case 'F': + return ['D', 'E']; + case 'D': + return ['C', 'B']; + case 'C': + return ['E']; + case 'B': + return ['A']; + default: + break; + } + return []; + }, + tokenSource.token + ); + assert.deepEqual(results, ['G', 'F', 'D', 'B']); + }); + + test('No paths', () => { + const results = buildImportTree( + 'A', + 'G', + (f) => { + switch (f) { + case 'G': + return ['F', 'H', 'I']; + case 'F': + return ['D', 'E']; + case 'D': + return ['C', 'B']; + case 'C': + return ['E']; + default: + break; + } + return []; + }, + tokenSource.token + ); + assert.deepEqual(results, ['G']); + }); + + function genArray(start: number, end: number): string[] { + return Array(end - start) + .fill(0) + .map(() => String.fromCharCode(start++)); + } + + test('Too deep', () => { + const results = buildImportTree( + 'Z', + 'A', + (f) => { + const start = f.charCodeAt(0); + const end = 'Y'.charCodeAt(0); + return genArray(start, end); + }, + tokenSource.token + ); + assert.deepEqual(results, ['A']); + }); + + test('Canceled', () => { + const canceled = new CancellationTokenSource(); + canceled.cancel(); + const results = buildImportTree( + 'A', + 'E', + (f) => { + switch (f) { + case 'E': + return ['D']; + case 'D': + return ['C', 'B']; + case 'C': + return ['D']; + case 'B': + return ['A']; + default: + break; + } + return []; + }, + canceled.token + ); + assert.deepEqual(results, ['E']); + }); +}); + +test('find type alias decl', () => { + const code = ` +// @filename: test.py +//// from typing import Mapping +//// [|/*decl*/M|] = Mapping +//// +//// def foo(/*marker*/m: M): pass + `; + + assertTypeAlias(code); +}); + +test('find type alias decl from inferred type', () => { + const code = ` +// @filename: test.py +//// from typing import Mapping +//// [|/*decl*/M|] = Mapping +//// +//// def foo(m: M): +//// return m + +// @filename: test1.py +//// from test import foo +//// a = { "hello": 10 } +//// +//// /*marker*/b = foo(a) + `; + + assertTypeAlias(code); +}); + +function assertTypeAlias(code: string) { + const state = parseAndGetTestState(code).state; + + const node = getNodeAtMarker(state, 'marker'); + assert(node.nodeType === ParseNodeType.Name); + + const type = state.program.evaluator!.getType(node); + assert(type?.category === TypeCategory.Class); + + assert.strictEqual(type.shared.name, 'Mapping'); + assert.strictEqual(type.props?.typeAliasInfo?.shared.name, 'M'); + assert.strictEqual(type.props?.typeAliasInfo.shared.moduleName, 'test'); + + const marker = state.getMarkerByName('marker'); + const markerUri = marker.fileUri; + const mapper = state.program.getSourceMapper( + markerUri, + CancellationToken.None, + /* mapCompiled */ false, + /* preferStubs */ true + ); + + const range = state.getRangeByMarkerName('decl')!; + const decls = mapper.findDeclarationsByType(markerUri, type, /* userTypeAlias */ true); + + const decl = decls.find((d) => isVariableDeclaration(d) && d.typeAliasName && d.typeAliasName.d.value === 'M') as + | VariableDeclaration + | undefined; + assert(decl); + + assert.deepEqual(TextRange.create(decl.node.start, decl.node.length), TextRange.fromBounds(range.pos, range.end)); +} diff --git a/python-parser/packages/pyright-internal/src/tests/stringUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/stringUtils.test.ts new file mode 100644 index 00000000..0d2604e5 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/stringUtils.test.ts @@ -0,0 +1,47 @@ +/* + * stringUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import * as assert from 'assert'; + +import * as core from '../common/core'; +import * as utils from '../common/stringUtils'; + +test('stringUtils isPatternInSymbol', () => { + assert.equal(utils.isPatternInSymbol('', 'abcd'), true); + + assert.equal(utils.isPatternInSymbol('abcd', 'abcd'), true); + assert.equal(utils.isPatternInSymbol('abc', 'abcd'), true); + + assert.equal(utils.isPatternInSymbol('ABCD', 'abcd'), true); + assert.equal(utils.isPatternInSymbol('ABC', 'abcd'), true); + + assert.equal(utils.isPatternInSymbol('acbd', 'abcd'), false); + assert.equal(utils.isPatternInSymbol('abce', 'abcd'), false); + assert.equal(utils.isPatternInSymbol('abcde', 'abcd'), false); + assert.equal(utils.isPatternInSymbol('azcde', 'abcd'), false); + assert.equal(utils.isPatternInSymbol('acde', 'abcd'), false); + assert.equal(utils.isPatternInSymbol('zbcd', 'abcd'), false); +}); + +test('CoreCompareStringsCaseInsensitive1', () => { + assert.equal(utils.compareStringsCaseInsensitive('Hello', 'hello'), core.Comparison.EqualTo); +}); + +test('CoreCompareStringsCaseInsensitive2', () => { + assert.equal(utils.compareStringsCaseInsensitive('Hello', undefined), core.Comparison.GreaterThan); +}); + +test('CoreCompareStringsCaseInsensitive3', () => { + assert.equal(utils.compareStringsCaseInsensitive(undefined, 'hello'), core.Comparison.LessThan); +}); + +test('CoreCompareStringsCaseInsensitive4', () => { + assert.equal(utils.compareStringsCaseInsensitive(undefined, undefined), core.Comparison.EqualTo); +}); + +test('CoreCompareStringsCaseSensitive', () => { + assert.equal(utils.compareStringsCaseSensitive('Hello', 'hello'), core.Comparison.LessThan); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/symbolNameUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/symbolNameUtils.test.ts new file mode 100644 index 00000000..adf47a41 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/symbolNameUtils.test.ts @@ -0,0 +1,77 @@ +/* + * symbolNameUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import * as assert from 'assert'; + +import * as snu from '../analyzer/symbolNameUtils'; + +test('symbolNameUtils isPrivateName', () => { + assert.strictEqual(snu.isPrivateName('__var'), true); + assert.strictEqual(snu.isPrivateName('__Var_1-2'), true); + + assert.strictEqual(snu.isPrivateName('var'), false); + assert.strictEqual(snu.isPrivateName('_var'), false); + assert.strictEqual(snu.isPrivateName('__var__'), false); +}); + +test('symbolNameUtils isProtectedName', () => { + assert.strictEqual(snu.isProtectedName('_var'), true); + assert.strictEqual(snu.isProtectedName('_Var_1-2'), true); + + assert.strictEqual(snu.isProtectedName('__var'), false); + assert.strictEqual(snu.isProtectedName('var'), false); +}); + +test('symbolNameUtils isPrivateOrProtectedName', () => { + assert.strictEqual(snu.isPrivateOrProtectedName('_var'), true); + assert.strictEqual(snu.isPrivateOrProtectedName('__VAR_1-2'), true); + + assert.strictEqual(snu.isPrivateOrProtectedName('var'), false); + assert.strictEqual(snu.isPrivateOrProtectedName('__init__'), false); +}); + +test('symbolNameUtils isDunderName', () => { + assert.strictEqual(snu.isDunderName('__init__'), true); + assert.strictEqual(snu.isDunderName('__CONSTANT__'), true); + + assert.strictEqual(snu.isDunderName('____'), false); + assert.strictEqual(snu.isDunderName('_init_'), false); + assert.strictEqual(snu.isDunderName('init'), false); +}); + +test('symbolNameUtils isConstantName', () => { + assert.strictEqual(snu.isConstantName('CONSTANT'), true); + assert.strictEqual(snu.isConstantName('CONSTANT_NAME'), true); + assert.strictEqual(snu.isConstantName('CONSTANT_42'), true); + assert.strictEqual(snu.isConstantName('_CONSTANT_42'), true); + assert.strictEqual(snu.isConstantName('__CONSTANT_42'), true); + + assert.strictEqual(snu.isConstantName('Constant'), false); + assert.strictEqual(snu.isConstantName('constant'), false); + assert.strictEqual(snu.isConstantName('____'), false); +}); + +test('symbolNameUtils isTypeAliasName', () => { + assert.strictEqual(snu.isTypeAliasName('TypeAlias'), true); + assert.strictEqual(snu.isTypeAliasName('Type_alias'), true); + assert.strictEqual(snu.isTypeAliasName('TypeAlias1'), true); + assert.strictEqual(snu.isTypeAliasName('_TypeAlias'), true); + assert.strictEqual(snu.isTypeAliasName('__TypeAlias'), true); + + assert.strictEqual(snu.isTypeAliasName('invalidTypeAlias'), false); + assert.strictEqual(snu.isTypeAliasName('1TypeAlias'), false); + assert.strictEqual(snu.isTypeAliasName('___TypeAlias'), false); +}); + +test('symbolNameUtils isPublicConstantOrTypeAliasName', () => { + assert.strictEqual(snu.isPublicConstantOrTypeAlias('CONSTANT'), true); + assert.strictEqual(snu.isPublicConstantOrTypeAlias('TypeAlias'), true); + + assert.strictEqual(snu.isPublicConstantOrTypeAlias('var'), false); + assert.strictEqual(snu.isPublicConstantOrTypeAlias('_CONSTANT'), false); + assert.strictEqual(snu.isPublicConstantOrTypeAlias('_TypeAlias'), false); + assert.strictEqual(snu.isPublicConstantOrTypeAlias('__TypeAlias'), false); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/testState.test.ts b/python-parser/packages/pyright-internal/src/tests/testState.test.ts new file mode 100644 index 00000000..9fc5d185 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/testState.test.ts @@ -0,0 +1,592 @@ +/* + * testState.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests and show how to use TestState in unit test + */ + +import assert from 'assert'; + +import { combinePaths, getFileName, normalizeSlashes } from '../common/pathUtils'; +import { compareStringsCaseSensitive } from '../common/stringUtils'; +import { Uri } from '../common/uri/uri'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { runFourSlashTestContent } from './harness/fourslash/runner'; +import { parseAndGetTestState } from './harness/fourslash/testState'; +import * as factory from './harness/vfs/factory'; + +test('Create', () => { + const code = ` +// @filename: file1.py +////class A: +//// pass + `; + + const { data, state } = parseAndGetTestState(code); + assert(state.activeFile === data.files[0]); +}); + +test('Multiple files', () => { + const code = ` +// @filename: file1.py +////class A: +//// pass + +// @filename: file2.py +////class B: +//// pass + +// @filename: file3.py +////class C: +//// pass + `; + + const state = parseAndGetTestState(code, factory.srcFolder).state; + + assert.equal(state.cwd(), normalizeSlashes('/')); + assert( + state.fs.existsSync( + Uri.file(normalizeSlashes(combinePaths(factory.srcFolder, 'file1.py')), state.serviceProvider) + ) + ); +}); + +test('Configuration', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "include": [ +//// "src" +//// ], +//// +//// "exclude": [ +//// "**/node_modules", +//// "**/__pycache__", +//// "src/experimental", +//// "src/web/node_modules", +//// "src/typestubs" +//// ], +//// +//// "ignore": [ +//// "src/oldstuff" +//// ], +//// +//// "typingsPath": "src/typestubs", +//// "venvPath": "/home/foo/.venvs", +//// +//// "reportMissingImports": true, +//// "reportMissingTypeStubs": false, +//// +//// "pythonVersion": "3.6", +//// "pythonPlatform": "Linux", +//// +//// "executionEnvironments": [ +//// { +//// "root": "src/web", +//// "pythonVersion": "3.5", +//// "pythonPlatform": "Windows", +//// "extraPaths": [ +//// "src/service_libs" +//// ] +//// }, +//// { +//// "root": "src/sdk", +//// "pythonVersion": "3.0", +//// "extraPaths": [ +//// "src/backend" +//// ], +//// "venv": "venv_bar" +//// }, +//// { +//// "root": "src/tests", +//// "extraPaths": [ +//// "src/tests/e2e", +//// "src/sdk" +//// ] +//// }, +//// { +//// "root": "src" +//// } +//// ] +//// } + +// @filename: file1.py +////class A: +//// pass + `; + + const state = parseAndGetTestState(code, factory.srcFolder).state; + + assert.equal(state.cwd(), normalizeSlashes('/')); + assert( + state.fs.existsSync( + Uri.file(normalizeSlashes(combinePaths(factory.srcFolder, 'file1.py')), state.serviceProvider) + ) + ); + + assert.equal(state.configOptions.diagnosticRuleSet.reportMissingImports, 'error'); + assert.equal(state.configOptions.diagnosticRuleSet.reportMissingModuleSource, 'warning'); + assert.equal(state.configOptions.stubPath?.getFilePath(), normalizeSlashes('/src/typestubs')); +}); + +test('stubPath configuration', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "stubPath": "src/typestubs" +//// } + `; + + const state = parseAndGetTestState(code).state; + assert.equal(state.configOptions.stubPath?.getFilePath(), normalizeSlashes('/src/typestubs')); +}); + +test('Duplicated stubPath configuration', () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "typingsPath": "src/typestubs1", +//// "stubPath": "src/typestubs2" +//// } + `; + + const state = parseAndGetTestState(code).state; + assert.equal(state.configOptions.stubPath?.getFilePath(), normalizeSlashes('/src/typestubs2')); +}); + +test('ProjectRoot', () => { + const code = ` +// global options +// @projectRoot: /root + +// @filename: /root/file1.py +////class A: +//// pass + `; + + const state = parseAndGetTestState(code).state; + + assert.equal(state.cwd(), normalizeSlashes('/root')); + assert(state.fs.existsSync(Uri.file(normalizeSlashes('/root/file1.py'), state.serviceProvider))); + + assert.equal(state.configOptions.projectRoot.getFilePath(), normalizeSlashes('/root')); +}); + +test('CustomTypeshedFolder', () => { + // use differnt physical folder as typeshed folder. this is different than + // typeshed folder settings in config json file since that points to a path + // in virtual file system. not physical one. this decides which physical folder + // those virtual folder will mount to. + const code = ` +// global options +// @typeshed: ${__dirname} + `; + + // mount the folder this file is in as typeshed folder and check whether + // in typeshed folder in virtual file system, this file exists. + const state = parseAndGetTestState(code).state; + assert(state.fs.existsSync(factory.typeshedFolder.combinePaths(getFileName(__filename)))); +}); + +test('IgnoreCase', () => { + const code = ` +// global options +// @ignoreCase: true + +// @filename: file1.py +////class A: +//// pass + `; + + const state = parseAndGetTestState(code, factory.srcFolder).state; + + assert( + state.fs.existsSync( + Uri.file(normalizeSlashes(combinePaths(factory.srcFolder, 'FILE1.py')), state.serviceProvider) + ) + ); +}); + +test('GoToMarker', () => { + const code = ` +////class A: +//// /*marker1*/pass + `; + + const { data, state } = parseAndGetTestState(code); + const marker = data.markerPositions.get('marker1'); + + state.goToMarker('marker1'); + assert.equal(state.lastKnownMarker, 'marker1'); + assert.equal(state.currentCaretPosition, marker!.position); + + state.goToMarker(marker); + assert.equal(state.lastKnownMarker, 'marker1'); + assert.equal(state.currentCaretPosition, marker!.position); + assert.equal(state.selectionEnd, -1); +}); + +test('GoToEachMarker', () => { + const code = ` +// @filename: file1.py +////class A: +//// /*marker1*/pass + +// @filename: file2.py +////class B: +//// /*marker2*/pass + `; + + const { data, state } = parseAndGetTestState(code); + const marker1 = data.markerPositions.get('marker1'); + const marker2 = data.markerPositions.get('marker2'); + + const results: number[] = []; + state.goToEachMarker([marker1!, marker2!], (m) => { + results.push(m.position); + }); + + assert.deepEqual(results, [marker1!.position, marker2!.position]); + + assert.equal(state.activeFile.fileName, marker2!.fileName); + assert.equal(state.currentCaretPosition, marker2!.position); + assert.equal(state.selectionEnd, -1); +}); + +test('Markers', () => { + const code = ` +// @filename: file1.py +////class A: +//// /*marker1*/pass + +// @filename: file2.py +////class B: +//// /*marker2*/pass + `; + + const { data, state } = parseAndGetTestState(code); + const marker1 = data.markerPositions.get('marker1'); + + assert.deepEqual(state.getMarkerName(marker1!), 'marker1'); + assert.deepEqual( + state + .getMarkers() + .map((m) => state.getMarkerName(m)) + .sort(compareStringsCaseSensitive), + state.getMarkerNames().sort(compareStringsCaseSensitive) + ); +}); + +test('GoToPosition', () => { + const code = ` +// @filename: file1.py +////class A: +//// /*marker1*/pass + `; + + const { data, state } = parseAndGetTestState(code); + const marker1 = data.markerPositions.get('marker1'); + state.goToPosition(marker1!.position); + + assert.equal(state.currentCaretPosition, marker1!.position); + assert.equal(state.selectionEnd, -1); +}); + +test('select', () => { + const code = ` +// @filename: file1.py +/////*start*/class A: +//// class B: +//// def Test(self): +//// pass +//// +//// def Test2(self): +//// pass/*end*/ + `; + + const { data, state } = parseAndGetTestState(code); + + state.select('start', 'end'); + + assert.equal(state.currentCaretPosition, data.markerPositions.get('start')!.position); + assert.equal(state.selectionEnd, data.markerPositions.get('end')!.position); +}); + +test('selectAllInFile', () => { + const code = ` +// @filename: file1.py +/////*start*/class A: +//// class B: +//// def Test(self): +//// pass +//// +//// def Test2(self): +//// pass/*end*/ + `; + + const { data, state } = parseAndGetTestState(code); + state.selectAllInFile(data.files[0].fileName); + + assert.equal(state.currentCaretPosition, data.markerPositions.get('start')!.position); + assert.equal(state.selectionEnd, data.markerPositions.get('end')!.position); +}); + +test('selectRange', () => { + const code = ` +// @filename: file1.py +/////class A: +//// class B: +//// [|def Test(self): +//// pass|] +//// +//// def Test2(self): +//// pass + `; + + const { data, state } = parseAndGetTestState(code); + const range = data.ranges[0]; + + state.selectRange(range); + + assert.equal(state.activeFile.fileName, range.fileName); + assert.equal(state.currentCaretPosition, range.pos); + assert.equal(state.selectionEnd, range.end); +}); + +test('selectLine', () => { + const code = ` +// @filename: file1.py +/////class A: +//// class B: +////[| def Test(self):|] +//// pass +//// +//// def Test2(self): +//// pass + `; + + const { data, state } = parseAndGetTestState(code); + const range = data.ranges[0]; + + state.selectLine(2); + + assert.equal(state.currentCaretPosition, range.pos); + assert.equal(state.selectionEnd, range.end); +}); + +test('goToEachRange', () => { + const code = ` +// @filename: file1.py +/////class A: +//// class B: +//// [|def Test(self):|] +//// pass +//// +//// def Test2(self): +//// [|pass|] + `; + + const { state } = parseAndGetTestState(code); + + const results: Range[] = []; + state.goToEachRange((r) => { + assert.equal(state.activeFile.fileName, r.fileName); + results.push(r); + }); + + assert.deepEqual(results, [state.getRanges()[0], state.getRanges()[1]]); +}); + +test('getRangesInFile', () => { + const code = ` +// @filename: file1.py +/////class A: +//// class B: +//// [|def Test(self):|] +//// pass + +// @filename: file2.py +//// def Test2(self): +//// [|pass|] + `; + + const { data, state } = parseAndGetTestState(code); + + assert.deepEqual( + state.getRangesInFile(data.files[0].fileName), + data.ranges.filter((r) => r.fileName === data.files[0].fileName) + ); +}); + +test('rangesByText', () => { + const code = ` +// @filename: file1.py +/////class A: +//// class B: +//// [|def Test(self):|] +//// pass + +// @filename: file2.py +//// def Test2(self): +//// [|pass|] + `; + + const { data, state } = parseAndGetTestState(code); + const map = state.getRangesByText(); + + assert.deepEqual(map.get('def Test(self):'), [data.ranges[0]]); + assert.deepEqual(map.get('pass'), [data.ranges[1]]); +}); + +test('moveCaretRight', () => { + const code = ` +// @filename: file1.py +/////class A: +//// class B: +//// /*position*/def Test(self): +//// pass +//// +//// def Test2(self): +//// pass + `; + + const { data, state } = parseAndGetTestState(code); + const marker = data.markerPositions.get('position')!; + + state.goToBOF(); + assert.equal(state.currentCaretPosition, 0); + + state.goToEOF(); + assert.equal(state.currentCaretPosition, data.files[0].content.length); + + state.goToPosition(marker.position); + state.moveCaretRight('def'.length); + + assert.equal(state.currentCaretPosition, marker.position + 'def'.length); + assert.equal(state.selectionEnd, -1); +}); + +test('runFourSlashTestContent', () => { + const code = ` +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: file1.py +//// class A: +//// class B: +//// /*position*/def Test(self): +//// pass +//// +//// def Test2(self): +//// pass + +helper.getMarkerByName("position"); + `; + + runFourSlashTestContent(normalizeSlashes('/'), 'unused.py', code); +}); + +test('VerifyDiagnosticsTest1', () => { + const code = ` +/// <reference path="typings/fourslash.d.ts" /> + +// @filename: dataclass1.py +//// # This sample validates the Python 3.7 data class feature. +//// +//// from typing import NamedTuple, Optional +//// +//// class Other: +//// pass +//// +//// class DataTuple(NamedTuple): +//// def _m(self): +//// pass +//// id: int +//// aid: Other +//// valll: str = '' +//// name: Optional[str] = None +//// +//// d1 = DataTuple(id=1, aid=Other()) +//// d2 = DataTuple(id=1, aid=Other(), valll='v') +//// d3 = DataTuple(id=1, aid=Other(), name='hello') +//// d4 = DataTuple(id=1, aid=Other(), name=None) +//// id = d1.id +//// +//// # This should generate an error because the name argument +//// # is the incorrect type. +//// d5 = DataTuple(id=1, aid=Other(), name=[|{|"category": "error"|}3|]) +//// +//// # This should generate an error because aid is a required +//// # parameter and is missing an argument here. +//// d6 = [|{|"category": "error"|}DataTuple(id=1, name=None|]) + +helper.verifyDiagnostics(); + `; + + runFourSlashTestContent(factory.srcFolder, 'unused.py', code); +}); + +test('VerifyDiagnosticsTest2', () => { + const code = ` + + +//// # This sample tests the handling of the @dataclass decorator. +//// +//// from dataclasses import dataclass, InitVar +//// +//// @dataclass +//// class Bar(): +//// bbb: int +//// ccc: str +//// aaa = 'string' +//// +//// bar1 = Bar(bbb=5, ccc='hello') +//// bar2 = Bar(5, 'hello') +//// bar3 = Bar(5, 'hello', 'hello2') +//// print(bar3.bbb) +//// print(bar3.ccc) +//// print(bar3.aaa) +//// +//// # This should generate an error because ddd +//// # isn't a declared value. +//// bar = Bar(bbb=5, [|/*marker1*/ddd|]=5, ccc='hello') +//// +//// # This should generate an error because the +//// # parameter types don't match. +//// bar = Bar([|/*marker2*/'hello'|], 'goodbye') +//// +//// # This should generate an error because a parameter +//// # is missing. +//// bar = [|/*marker3*/Bar(2)|] +//// +//// # This should generate an error because there are +//// # too many parameters. +//// bar = Bar(2, 'hello', 'hello', [|/*marker4*/4|]) +//// +//// +//// @dataclass +//// class Baz1(): +//// bbb: int +//// aaa = 'string' +//// +//// # This should generate an error because variables +//// # with no default cannot come after those with +//// # defaults. +//// [|/*marker5*/ccc|]: str +//// +//// @dataclass +//// class Baz2(): +//// aaa: str +//// ddd: InitVar[int] = 3 + +helper.verifyDiagnostics({ + "marker1": { category: "error", message: "No parameter named 'ddd'" }, + "marker2": { category: "error", message: "Argument of type 'Literal['hello']' cannot be assigned to parameter 'bbb' of type 'int'\\n 'str' is incompatible with 'int'" }, + "marker3": { category: "error", message: "Argument missing for parameter 'ccc'" }, + "marker4": { category: "error", message: "Expected 3 positional arguments" }, + "marker5": { category: "error", message: "Data fields without default value cannot appear after data fields with default values" }, +}); + `; + + runFourSlashTestContent(factory.srcFolder, 'unused.py', code); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/testStateUtils.ts b/python-parser/packages/pyright-internal/src/tests/testStateUtils.ts new file mode 100644 index 00000000..037ad309 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/testStateUtils.ts @@ -0,0 +1,193 @@ +/* + * testStateUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Test helpers for TestState + */ + +import assert from 'assert'; + +import { CancellationToken } from 'vscode-languageserver'; +import { findNodeByOffset } from '../analyzer/parseTreeUtils'; +import { Program } from '../analyzer/program'; +import { createMapFromItems } from '../common/collectionUtils'; +import { ConfigOptions } from '../common/configOptions'; +import { isArray } from '../common/core'; +import { assertNever } from '../common/debug'; +import { FileEditAction, FileEditActions } from '../common/editAction'; +import { TextRange, rangesAreEqual } from '../common/textRange'; +import { Uri } from '../common/uri/uri'; +import { isFile } from '../common/uri/uriUtils'; +import { applyTextEditsToString } from '../common/workspaceEditUtils'; +import { DocumentSymbolCollector } from '../languageService/documentSymbolCollector'; +import { NameNode } from '../parser/parseNodes'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { TestState } from './harness/fourslash/testState'; + +export function convertFileEditActionToString(edit: FileEditAction): string { + return `'${edit.replacementText.replace(/\n/g, '!n!')}'@'${edit.fileUri}:(${edit.range.start.line},${ + edit.range.start.character + })-(${edit.range.end.line},${edit.range.end.character})'`; +} + +export function convertRangeToFileEditAction(state: TestState, range: Range, replacementText?: string): FileEditAction { + const data = range.marker?.data as { r: string } | undefined; + return { + fileUri: range.fileUri, + replacementText: (replacementText ?? data?.r ?? 'N/A').replace(/!n!/g, '\n'), + range: state.convertPositionRange(range), + }; +} + +export function verifyEdits( + state: TestState, + fileEditActions: FileEditActions, + ranges: Range[], + replacementText: string | undefined +) { + for (const edit of fileEditActions.edits) { + const expected: FileEditAction[] = ranges.map((r) => convertRangeToFileEditAction(state, r, replacementText)); + assert( + expected.some((a) => { + return ( + a.fileUri.equals(edit.fileUri) && + rangesAreEqual(a.range, edit.range) && + a.replacementText === edit.replacementText + ); + }), + `can't find ${convertFileEditActionToString(edit)} in ${expected + .map((a) => convertFileEditActionToString(a)) + .join('|')}` + ); + } +} + +export function applyFileEditActions(state: TestState, fileEditActions: FileEditActions) { + // Apply changes + // First, apply text changes + const editsPerFileMap = createMapFromItems(fileEditActions.edits, (e) => e.fileUri.key); + + for (const [editFileName, editsPerFile] of editsPerFileMap) { + const result = _applyEdits(state, editFileName, editsPerFile); + + const uri = Uri.file(editFileName, state.serviceProvider); + state.testFS.writeFileSync(uri, result.text, 'utf8'); + + // Mimic file change notification. in future, we should properly set up file change notification on test FS. + state.program.getSourceFileInfo(uri)?.sourceFile.markDirty(); + + // Update open file content if the file is in opened state. + if (result.version) { + let openedFilePath = editFileName; + const renamed = fileEditActions.fileOperations.find( + (o) => o.kind === 'rename' && o.oldFileUri.getFilePath() === editFileName + ); + if (renamed?.kind === 'rename') { + openedFilePath = renamed.newFileUri.getFilePath(); + state.program.setFileClosed(renamed.oldFileUri); + } + + state.program.setFileOpened( + Uri.file(openedFilePath, state.serviceProvider), + result.version + 1, + result.text + ); + } + } + + // Second, apply filename change to disk or rename directory. + for (const fileOperation of fileEditActions.fileOperations) { + switch (fileOperation.kind) { + case 'create': { + state.testFS.mkdirpSync(fileOperation.fileUri.getDirectory().getFilePath()); + state.testFS.writeFileSync(fileOperation.fileUri, ''); + state.program.getSourceFileInfo(fileOperation.fileUri)?.sourceFile.markDirty(); + break; + } + case 'rename': { + if (isFile(state.testFS, fileOperation.oldFileUri)) { + state.testFS.mkdirpSync(fileOperation.newFileUri.getDirectory().getFilePath()); + state.testFS.renameSync( + fileOperation.oldFileUri.getFilePath(), + fileOperation.newFileUri.getFilePath() + ); + + // Add new file as tracked file + state.program.addTrackedFile(fileOperation.newFileUri); + } else { + state.testFS.renameSync( + fileOperation.oldFileUri.getFilePath(), + fileOperation.newFileUri.getFilePath() + ); + } + break; + } + case 'delete': { + state.testFS.rimrafSync(fileOperation.fileUri.getFilePath()); + break; + } + default: + assertNever(fileOperation); + } + } + + // And refresh program. + state.importResolver.invalidateCache(); + state.program.markAllFilesDirty(true); +} + +function _applyEdits(state: TestState, filePath: string, edits: FileEditAction[]) { + const sourceFile = state.program.getBoundSourceFile(Uri.file(filePath, state.serviceProvider))!; + const parseResults = sourceFile.getParseResults()!; + + const current = applyTextEditsToString( + edits.filter((e) => e.fileUri.getFilePath() === filePath), + parseResults.tokenizerOutput.lines, + parseResults.text + ); + + return { version: sourceFile.getClientVersion(), text: current }; +} + +export function verifyReferencesAtPosition( + program: Program, + configOption: ConfigOptions, + symbolNames: string | string[], + fileName: string, + position: number, + ranges: Range[] +) { + const sourceFile = program.getBoundSourceFile(Uri.file(fileName, program.serviceProvider)); + assert(sourceFile); + + const node = findNodeByOffset(sourceFile.getParseResults()!.parserOutput.parseTree, position); + const decls = DocumentSymbolCollector.getDeclarationsForNode(program, node as NameNode, CancellationToken.None, { + resolveLocalNames: true, + }); + + const rangesByFile = createMapFromItems(ranges, (r) => r.fileName); + for (const rangeFileName of rangesByFile.keys()) { + const collector = new DocumentSymbolCollector( + program, + isArray(symbolNames) ? symbolNames : [symbolNames], + decls, + program + .getBoundSourceFile(Uri.file(rangeFileName, program.serviceProvider))! + .getParseResults()!.parserOutput.parseTree, + CancellationToken.None, + { + treatModuleInImportAndFromImportSame: true, + skipUnreachableCode: false, + } + ); + + const results = collector.collect(); + const rangesOnFile = rangesByFile.get(rangeFileName)!; + assert.strictEqual(results.length, rangesOnFile.length, `${rangeFileName}@${symbolNames}`); + + for (const result of results) { + assert(rangesOnFile.some((r) => r.pos === result.range.start && r.end === TextRange.getEnd(result.range))); + } + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/testUtils.ts b/python-parser/packages/pyright-internal/src/tests/testUtils.ts new file mode 100644 index 00000000..56389016 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/testUtils.ts @@ -0,0 +1,225 @@ +/* + * testUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Utility functions that are common to a bunch of the tests. + */ + +import * as assert from 'assert'; +import * as fs from 'fs'; +import * as path from 'path'; + +import { ImportResolver } from '../analyzer/importResolver'; +import { Program } from '../analyzer/program'; +import { NameTypeWalker } from '../analyzer/testWalker'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { ConfigOptions, ExecutionEnvironment, getStandardDiagnosticRuleSet } from '../common/configOptions'; +import { ConsoleWithLogLevel, NullConsole } from '../common/console'; +import { fail } from '../common/debug'; +import { Diagnostic, DiagnosticCategory } from '../common/diagnostic'; +import { DiagnosticSink } from '../common/diagnosticSink'; +import { FullAccessHost } from '../common/fullAccessHost'; +import { RealTempFile, createFromRealFileSystem } from '../common/realFileSystem'; +import { createServiceProvider } from '../common/serviceProviderExtensions'; +import { Uri } from '../common/uri/uri'; +import { UriEx } from '../common/uri/uriUtils'; +import { ParseFileResults, ParseOptions, Parser, ParserOutput } from '../parser/parser'; + +// This is a bit gross, but it's necessary to allow the fallback typeshed +// directory to be located when running within the jest environment. This +// assumes that the working directory has been set appropriately before +// running the tests. +(global as any).__rootDirectory = path.resolve(); + +export interface FileAnalysisResult { + fileUri: Uri; + parseResults?: ParseFileResults | undefined; + errors: Diagnostic[]; + warnings: Diagnostic[]; + infos: Diagnostic[]; + unusedCodes: Diagnostic[]; + unreachableCodes: Diagnostic[]; + deprecateds: Diagnostic[]; +} + +export function resolveSampleFilePath(fileName: string): string { + return path.resolve(path.dirname(module.filename), `./samples/${fileName}`); +} + +export function readSampleFile(fileName: string): string { + const filePath = resolveSampleFilePath(fileName); + + try { + return fs.readFileSync(filePath, { encoding: 'utf8' }); + } catch { + console.error(`Could not read file "${fileName}"`); + return ''; + } +} + +export function parseText( + textToParse: string, + diagSink: DiagnosticSink, + parseOptions: ParseOptions = new ParseOptions() +): ParseFileResults { + const parser = new Parser(); + return parser.parseSourceFile(textToParse, parseOptions, diagSink); +} + +export function parseSampleFile( + fileName: string, + diagSink: DiagnosticSink, + execEnvironment = new ExecutionEnvironment( + 'python', + UriEx.file('.'), + getStandardDiagnosticRuleSet(), + /* defaultPythonVersion */ undefined, + /* defaultPythonPlatform */ undefined, + /* defaultExtraPaths */ undefined + ) +): ParseFileResults { + const text = readSampleFile(fileName); + const parseOptions = new ParseOptions(); + if (fileName.endsWith('pyi')) { + parseOptions.isStubFile = true; + } + parseOptions.pythonVersion = execEnvironment.pythonVersion; + return parseText(text, diagSink, parseOptions); +} + +export function typeAnalyzeSampleFiles( + fileNames: string[], + configOptions = new ConfigOptions(Uri.empty()), + console?: ConsoleWithLogLevel +): FileAnalysisResult[] { + // Always enable "test mode". + configOptions.internalTestMode = true; + + const tempFile = new RealTempFile(); + const fs = createFromRealFileSystem(tempFile); + const serviceProvider = createServiceProvider(fs, console || new NullConsole(), tempFile); + const importResolver = new ImportResolver(serviceProvider, configOptions, new FullAccessHost(serviceProvider)); + + const program = new Program(importResolver, configOptions, serviceProvider); + const fileUris = fileNames.map((name) => UriEx.file(resolveSampleFilePath(name))); + program.setTrackedFiles(fileUris); + + // Set a "pre-check callback" so we can evaluate the types of each NameNode + // prior to checking the full document. This will exercise the contextual + // evaluation logic. + program.setPreCheckCallback((parserOutput: ParserOutput, evaluator: TypeEvaluator) => { + const nameTypeWalker = new NameTypeWalker(evaluator); + nameTypeWalker.walk(parserOutput.parseTree); + }); + + const results = getAnalysisResults(program, fileUris, configOptions); + + program.dispose(); + serviceProvider.dispose(); + + return results; +} + +export function getAnalysisResults( + program: Program, + fileUris: Uri[], + configOptions = new ConfigOptions(Uri.empty()) +): FileAnalysisResult[] { + // Always enable "test mode". + configOptions.internalTestMode = true; + + while (program.analyze()) { + // Continue to call analyze until it completes. Since we're not + // specifying a timeout, it should complete the first time. + } + + const sourceFiles = fileUris.map((filePath) => program.getSourceFile(filePath)); + return sourceFiles.map((sourceFile, index) => { + if (sourceFile) { + const diagnostics = sourceFile.getDiagnostics(configOptions) || []; + const analysisResult: FileAnalysisResult = { + fileUri: sourceFile.getUri(), + parseResults: sourceFile.getParseResults(), + errors: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Error), + warnings: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Warning), + infos: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Information), + unusedCodes: diagnostics.filter((diag) => diag.category === DiagnosticCategory.UnusedCode), + unreachableCodes: diagnostics.filter((diag) => diag.category === DiagnosticCategory.UnreachableCode), + deprecateds: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Deprecated), + }; + return analysisResult; + } else { + fail(`Source file not found for ${fileUris[index]}`); + + const analysisResult: FileAnalysisResult = { + fileUri: Uri.empty(), + parseResults: undefined, + errors: [], + warnings: [], + infos: [], + unusedCodes: [], + unreachableCodes: [], + deprecateds: [], + }; + return analysisResult; + } + }); +} + +export function validateResults( + results: FileAnalysisResult[], + errorCount: number, + warningCount = 0, + infoCount?: number, + unusedCode?: number, + unreachableCode?: number, + deprecated?: number +) { + assert.strictEqual(results.length, 1); + + if (results[0].errors.length !== errorCount) { + logDiagnostics(results[0].errors); + assert.fail(`Expected ${errorCount} errors, got ${results[0].errors.length}`); + } + + if (results[0].warnings.length !== warningCount) { + logDiagnostics(results[0].warnings); + assert.fail(`Expected ${warningCount} warnings, got ${results[0].warnings.length}`); + } + + if (infoCount !== undefined) { + if (results[0].infos.length !== infoCount) { + logDiagnostics(results[0].infos); + assert.fail(`Expected ${infoCount} infos, got ${results[0].infos.length}`); + } + } + + if (unusedCode !== undefined) { + if (results[0].unusedCodes.length !== unusedCode) { + logDiagnostics(results[0].unusedCodes); + assert.fail(`Expected ${unusedCode} unused, got ${results[0].unusedCodes.length}`); + } + } + + if (unreachableCode !== undefined) { + if (results[0].unreachableCodes.length !== unreachableCode) { + logDiagnostics(results[0].unreachableCodes); + assert.fail(`Expected ${unreachableCode} unreachable, got ${results[0].unreachableCodes.length}`); + } + } + + if (deprecated !== undefined) { + if (results[0].deprecateds.length !== deprecated) { + logDiagnostics(results[0].deprecateds); + assert.fail(`Expected ${deprecated} deprecated, got ${results[0].deprecateds.length}`); + } + } +} + +function logDiagnostics(diags: Diagnostic[]) { + for (const diag of diags) { + console.error(` [${diag.range.start.line + 1}:${diag.range.start.character + 1}] ${diag.message}`); + } +} diff --git a/python-parser/packages/pyright-internal/src/tests/textEditUtil.test.ts b/python-parser/packages/pyright-internal/src/tests/textEditUtil.test.ts new file mode 100644 index 00000000..9c5bdbf8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/textEditUtil.test.ts @@ -0,0 +1,173 @@ +/* + * textEditUtil.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-jsonrpc'; + +import { findNodeByOffset } from '../analyzer/parseTreeUtils'; +import { FileEditAction } from '../common/editAction'; +import { TextEditTracker } from '../common/textEditTracker'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { parseAndGetTestState, TestState } from './harness/fourslash/testState'; +import { convertRangeToFileEditAction } from './testStateUtils'; + +test('simple add', () => { + const code = ` +//// import [|{|"r":"bar"|}foo|] + `; + + verifyEdits(code); +}); + +test('multiple edits', () => { + const code = ` +//// import [|{|"r":"bar"|}foo|][|{|"r":"!n!import os"|}|] + `; + + verifyEdits(code); +}); + +test('delete and add', () => { + const code = ` +//// [|{|"r":""|}import foo|][|{|"r":"import os"|}|] + `; + + verifyEdits(code); +}); + +test('overlapped delete', () => { + const code = ` +//// [|{|"e":""|}[|{|"r":""|}import [|{|"r":""|}foo|]|]|] + `; + + verifyEdits(code); +}); + +test('overlapped delete and add', () => { + const code = ` +//// [|{|"r":""|}import foo[|{|"r":"!n!import os"|}|] +//// |] + `; + + verifyEdits(code); +}); + +test('dup with same range', () => { + const code = ` +//// [|{|"e":"import os"|}[|{|"r":"import os"|}[|{|"r":"import os"|}import foo|]|]|] + `; + + verifyEdits(code); +}); + +test('delete and add with merge', () => { + const code = ` +//// [|{|"e":"import os"|}[|{|"r":""|}import foo|][|{|"r":"import os"|}|]|] + `; + + verifyEdits(code, false); +}); + +test('overlapped delete with merge', () => { + const code = ` +//// [|{|"e":""|}[|{|"r":""|}import [|{|"r":""|}foo|]|]|] + `; + + verifyEdits(code, false); +}); + +test('overlapped delete and add with merge', () => { + const code = ` +//// [|{|"e":"!n!import os"|}[|{|"r":""|}import foo[|{|"r":"!n!import os"|}|] +//// |]|] + `; + + verifyEdits(code, false); +}); + +test('dup with overlapped range', () => { + const code = ` +//// [|{|"e":"import os"|}[|{|"r":""|}import sys!n!|][|{|"r":"import os"|}[|{|"r":"import os"|}import foo|]|]|] + `; + + verifyEdits(code, false); +}); + +test('handle comments', () => { + const code = ` +//// from os import ( +//// abort[|{|"e":""|},|] # comment[|{|"e":""|} +//// [|{|"r":""|}access|]|] +//// ) + `; + + verifyRemoveNodes(code); +}); + +function verifyRemoveNodes(code: string) { + const state = parseAndGetTestState(code).state; + const tracker = new TextEditTracker(); + + const ranges = state.getRanges(); + const changeRanges = _getChangeRanges(ranges); + for (const range of changeRanges) { + const parseFileResults = state.program.getParseResults(range.fileUri)!; + const node = findNodeByOffset(parseFileResults.parserOutput.parseTree, range.pos)!; + tracker.removeNodes({ node, parseFileResults }); + } + + const edits = tracker.getEdits(CancellationToken.None); + + const editRanges = _getEditRanges(ranges); + assert.strictEqual(edits.length, editRanges.length); + assert( + _areEqual( + edits, + editRanges.map((r) => _createFileActionEdit(state, r)) + ) + ); +} + +function verifyEdits(code: string, mergeOnlyDuplications = true) { + const state = parseAndGetTestState(code).state; + const tracker = new TextEditTracker(mergeOnlyDuplications); + + const ranges = state.getRanges(); + const changeRanges = _getChangeRanges(ranges); + for (const range of changeRanges) { + const edit = convertRangeToFileEditAction(state, range); + tracker.addEdit(edit.fileUri, edit.range, edit.replacementText); + } + + const edits = tracker.getEdits(CancellationToken.None); + + const editRanges = _getEditRanges(ranges); + assert.strictEqual(edits.length, editRanges.length); + assert( + _areEqual( + edits, + editRanges.map((r) => _createFileActionEdit(state, r)) + ) + ); +} + +function _getChangeRanges(ranges: Range[]) { + return ranges.filter((r) => r.marker?.data && (r.marker.data as { r: string }).r !== undefined); +} + +function _getEditRanges(ranges: Range[]) { + const editRanges = ranges.filter((r) => r.marker?.data && (r.marker.data as { e: string }).e !== undefined); + return editRanges.length > 0 ? editRanges : _getChangeRanges(ranges); +} + +function _areEqual(a1: FileEditAction[], a2: FileEditAction[]) { + return a1.some((e1) => a2.some((e2) => FileEditAction.areEqual(e1, e2))); +} + +function _createFileActionEdit(state: TestState, range: Range): FileEditAction { + const replacementText = (range.marker!.data as { e: string }).e; + return convertRangeToFileEditAction(state, range, replacementText); +} diff --git a/python-parser/packages/pyright-internal/src/tests/textRange.test.ts b/python-parser/packages/pyright-internal/src/tests/textRange.test.ts new file mode 100644 index 00000000..97d31f52 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/textRange.test.ts @@ -0,0 +1,29 @@ +/* + * textRange.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + */ + +import * as assert from 'assert'; + +import { TextRange } from '../common/textRange'; + +test('textRange combine', () => { + const range1 = TextRange.create(10, 2); + const range2 = TextRange.create(12, 2); + const range3 = TextRange.create(8, 2); + + const combined = TextRange.combine([range1, range2, range3]); + + assert.ok(combined); + assert.equal(combined.start, 8); + assert.equal(combined.length, 6); + + // Ensure input ranges are unchanged + assert.equal(range1.start, 10); + assert.equal(range1.length, 2); + assert.equal(range2.start, 12); + assert.equal(range2.length, 2); + assert.equal(range3.start, 8); + assert.equal(range3.length, 2); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/tokenizer.test.ts b/python-parser/packages/pyright-internal/src/tests/tokenizer.test.ts new file mode 100644 index 00000000..d755a972 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/tokenizer.test.ts @@ -0,0 +1,1859 @@ +/* + * tokenizer.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Based on code from vscode-python repository: + * https://github.com/Microsoft/vscode-python + * + * Unit tests for Python tokenizer. + */ + +import assert from 'assert'; + +import * as StringTokenUtils from '../parser/stringTokenUtils'; +import { Tokenizer } from '../parser/tokenizer'; +import { + CommentType, + DedentToken, + FStringEndToken, + FStringMiddleToken, + FStringStartToken, + IdentifierToken, + IndentToken, + NewLineToken, + NewLineType, + NumberToken, + OperatorToken, + OperatorType, + StringToken, + StringTokenFlags, + TokenType, +} from '../parser/tokenizerTypes'; +import * as TestUtils from './testUtils'; + +const _implicitTokenCount = 2; +const _implicitTokenCountNoImplicitNewLine = 1; + +test('Empty', () => { + const t = new Tokenizer(); + const results = t.tokenize(''); + assert.equal(results.tokens.count, 0 + _implicitTokenCount); + assert.equal(results.tokens.length, 0); + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(1).type, TokenType.EndOfStream); + + assert.equal(results.tokens.getItemAtPosition(-1), -1); + assert.equal(results.tokens.getItemAtPosition(2), -1); + + assert.throws(() => results.tokens.getItemAt(-1), Error); + assert.throws(() => results.tokens.getItemAt(10), Error); + + assert.equal(results.tokens.contains(-1), false); + assert.equal(results.tokens.contains(2), false); +}); + +test('NewLines', () => { + const t = new Tokenizer(); + const results = t.tokenize('\na\r\nb\r'); + assert.equal(results.tokens.count, 5 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(0) as NewLineToken).newLineType, NewLineType.LineFeed); + assert.equal(results.tokens.getItemAt(2).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(2) as NewLineToken).newLineType, NewLineType.CarriageReturnLineFeed); + assert.equal(results.tokens.getItemAt(4).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(4) as NewLineToken).newLineType, NewLineType.CarriageReturn); + assert.equal(results.tokens.getItemAt(5).type, TokenType.EndOfStream); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 1); + assert.equal(results.tokens.getItemAtPosition(2), 2); + assert.equal(results.tokens.getItemAtPosition(3), 2); + assert.equal(results.tokens.getItemAtPosition(4), 3); + assert.equal(results.tokens.getItemAtPosition(5), 4); + assert.equal(results.tokens.getItemAtPosition(6), 5); + + assert.equal(results.tokens.contains(5), true); + assert.equal(results.tokens.contains(6), false); +}); + +test('InvalidWithNewLine', () => { + const t = new Tokenizer(); + const results = t.tokenize('\\\\\r\n\\aaa \t\f\n'); + assert.equal(results.tokens.count, 4 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Invalid); + assert.equal(results.tokens.getItemAt(0).length, 2); + assert.equal(results.tokens.getItemAt(1).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(1) as NewLineToken).newLineType, NewLineType.CarriageReturnLineFeed); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Invalid); + assert.equal(results.tokens.getItemAt(2).length, 4); + assert.equal(results.tokens.getItemAt(3).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(3) as NewLineToken).newLineType, NewLineType.LineFeed); +}); + +test('InvalidIndent', () => { + const t = new Tokenizer(); + const results = t.tokenize('\tpass\n'); + assert.equal(results.tokens.count, 4 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Keyword); +}); + +test('ParenNewLines', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n(\n(\n)\n)\n)\n'); + assert.equal(results.tokens.count, 8 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenParenthesis); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenParenthesis); + assert.equal(results.tokens.getItemAt(3).type, TokenType.CloseParenthesis); + assert.equal(results.tokens.getItemAt(4).type, TokenType.CloseParenthesis); + assert.equal(results.tokens.getItemAt(5).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseParenthesis); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 1); + assert.equal(results.tokens.getItemAtPosition(2), 1); + assert.equal(results.tokens.getItemAtPosition(3), 2); + assert.equal(results.tokens.getItemAtPosition(4), 2); + assert.equal(results.tokens.getItemAtPosition(5), 3); + assert.equal(results.tokens.getItemAtPosition(6), 3); + assert.equal(results.tokens.getItemAtPosition(7), 4); + assert.equal(results.tokens.getItemAtPosition(8), 5); + assert.equal(results.tokens.getItemAtPosition(9), 6); + assert.equal(results.tokens.getItemAtPosition(10), 7); + assert.equal(results.tokens.getItemAtPosition(11), 8); + + assert.equal(results.tokens.contains(10), true); + assert.equal(results.tokens.contains(11), false); +}); + +test('BraceNewLines', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n{\n{\n}\n}\n}\n'); + assert.equal(results.tokens.count, 8 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(4).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(5).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseCurlyBrace); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 1); + assert.equal(results.tokens.getItemAtPosition(2), 1); + assert.equal(results.tokens.getItemAtPosition(3), 2); + assert.equal(results.tokens.getItemAtPosition(4), 2); + assert.equal(results.tokens.getItemAtPosition(5), 3); + assert.equal(results.tokens.getItemAtPosition(6), 3); + assert.equal(results.tokens.getItemAtPosition(7), 4); + assert.equal(results.tokens.getItemAtPosition(8), 5); + assert.equal(results.tokens.getItemAtPosition(9), 6); + assert.equal(results.tokens.getItemAtPosition(10), 7); + assert.equal(results.tokens.getItemAtPosition(11), 8); + + assert.equal(results.tokens.contains(10), true); + assert.equal(results.tokens.contains(11), false); +}); + +test('BracketNewLines', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n[\n[\n]\n]\n]\n'); + assert.equal(results.tokens.count, 8 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenBracket); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenBracket); + assert.equal(results.tokens.getItemAt(3).type, TokenType.CloseBracket); + assert.equal(results.tokens.getItemAt(4).type, TokenType.CloseBracket); + assert.equal(results.tokens.getItemAt(5).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseBracket); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 1); + assert.equal(results.tokens.getItemAtPosition(2), 1); + assert.equal(results.tokens.getItemAtPosition(3), 2); + assert.equal(results.tokens.getItemAtPosition(4), 2); + assert.equal(results.tokens.getItemAtPosition(5), 3); + assert.equal(results.tokens.getItemAtPosition(6), 3); + assert.equal(results.tokens.getItemAtPosition(7), 4); + assert.equal(results.tokens.getItemAtPosition(8), 5); + assert.equal(results.tokens.getItemAtPosition(9), 6); + assert.equal(results.tokens.getItemAtPosition(10), 7); + assert.equal(results.tokens.getItemAtPosition(11), 8); + + assert.equal(results.tokens.contains(10), true); + assert.equal(results.tokens.contains(11), false); +}); + +test('NewLinesWithWhiteSpace', () => { + const t = new Tokenizer(); + const results = t.tokenize(' \na \r\nb \rc'); + assert.equal(results.tokens.count, 6 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(0).length, 1); + assert.equal((results.tokens.getItemAt(0) as NewLineToken).newLineType, NewLineType.LineFeed); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(2) as NewLineToken).newLineType, NewLineType.CarriageReturnLineFeed); + assert.equal(results.tokens.getItemAt(2).length, 2); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(4) as NewLineToken).newLineType, NewLineType.CarriageReturn); + assert.equal(results.tokens.getItemAt(4).length, 1); + + assert.equal(results.tokens.getItemAt(6).type, TokenType.NewLine); + assert.equal((results.tokens.getItemAt(6) as NewLineToken).newLineType, NewLineType.Implied); + assert.equal(results.tokens.getItemAt(6).length, 0); + + assert.equal(results.tokens.getItemAtPosition(0), -1); + assert.equal(results.tokens.getItemAtPosition(1), -1); + assert.equal(results.tokens.getItemAtPosition(2), 0); + assert.equal(results.tokens.getItemAtPosition(3), 1); + assert.equal(results.tokens.getItemAtPosition(6), 1); + assert.equal(results.tokens.getItemAtPosition(7), 2); + assert.equal(results.tokens.getItemAtPosition(8), 2); + assert.equal(results.tokens.getItemAtPosition(9), 3); + assert.equal(results.tokens.getItemAtPosition(11), 3); + assert.equal(results.tokens.getItemAtPosition(12), 4); + assert.equal(results.tokens.getItemAtPosition(13), 5); + assert.equal(results.tokens.getItemAtPosition(14), 7); + + assert.equal(results.tokens.contains(13), true); + assert.equal(results.tokens.contains(14), false); +}); + +test('NewLineEliding', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n\r\n\r'); + assert.equal(results.tokens.count, 1 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(0).length, 1); + assert.equal((results.tokens.getItemAt(0) as NewLineToken).newLineType, NewLineType.LineFeed); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(3), 0); + assert.equal(results.tokens.getItemAtPosition(4), 1); + + assert.equal(results.tokens.contains(3), true); + assert.equal(results.tokens.contains(4), false); +}); + +test('LineContinuation', () => { + const t = new Tokenizer(); + const results = t.tokenize('foo \\\na \\\r\nb \\\rc \\ \n # Comment \\\n'); + assert.equal(results.tokens.count, 6 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Invalid); + assert.equal(results.tokens.getItemAt(5).type, TokenType.NewLine); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(6), 0); + assert.equal(results.tokens.getItemAtPosition(7), 1); + assert.equal(results.tokens.getItemAtPosition(13), 1); + assert.equal(results.tokens.getItemAtPosition(14), 2); + assert.equal(results.tokens.getItemAtPosition(18), 2); + assert.equal(results.tokens.getItemAtPosition(19), 3); + assert.equal(results.tokens.getItemAtPosition(21), 3); + assert.equal(results.tokens.getItemAtPosition(22), 4); + assert.equal(results.tokens.getItemAtPosition(23), 4); + assert.equal(results.tokens.getItemAtPosition(24), 5); + assert.equal(results.tokens.getItemAtPosition(37), 5); + assert.equal(results.tokens.getItemAtPosition(38), 6); + + assert.equal(results.tokens.contains(37), true); + assert.equal(results.tokens.contains(38), false); +}); + +test('Dots', () => { + const t = new Tokenizer(); + const results = t.tokenize('. .. ... ....'); + assert.equal(results.tokens.count, 6 + _implicitTokenCount); + assert.equal(results.tokens.getItemAt(0).type, TokenType.Dot); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Dot); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Dot); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Ellipsis); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Ellipsis); + assert.equal(results.tokens.getItemAt(5).type, TokenType.Dot); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 0); + + assert.equal(results.tokens.getItemAtPosition(2), 1); + assert.equal(results.tokens.getItemAtPosition(3), 2); + assert.equal(results.tokens.getItemAtPosition(4), 2); + + assert.equal(results.tokens.getItemAtPosition(5), 3); + assert.equal(results.tokens.getItemAtPosition(8), 3); + + assert.equal(results.tokens.getItemAtPosition(9), 4); + assert.equal(results.tokens.getItemAtPosition(11), 4); + + assert.equal(results.tokens.getItemAtPosition(12), 5); + assert.equal(results.tokens.getItemAtPosition(13), 7); + + assert.equal(results.tokens.contains(12), true); + assert.equal(results.tokens.contains(13), false); +}); + +test('PunctuationTokens', () => { + const t = new Tokenizer(); + const results = t.tokenize(':;,()[]{}->'); + assert.equal(results.tokens.count, 10 + _implicitTokenCount); + assert.equal(results.tokens.getItemAt(0).type, TokenType.Colon); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Semicolon); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Comma); + assert.equal(results.tokens.getItemAt(3).type, TokenType.OpenParenthesis); + assert.equal(results.tokens.getItemAt(4).type, TokenType.CloseParenthesis); + assert.equal(results.tokens.getItemAt(5).type, TokenType.OpenBracket); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseBracket); + assert.equal(results.tokens.getItemAt(7).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(8).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(9).type, TokenType.Arrow); +}); + +test('IndentDedent', () => { + const t = new Tokenizer(); + const results = t.tokenize('test\n' + ' i1\n' + ' i2 # \n' + ' # \n' + ' \ti3\n' + '\ti4\n' + ' i1'); + assert.equal(results.tokens.count, 16 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Indent); + assert.equal((results.tokens.getItemAt(2) as IndentToken).indentAmount, 2); + assert.equal((results.tokens.getItemAt(2) as IndentToken).length, 2); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(5).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(6).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(7).type, TokenType.Indent); + assert.equal((results.tokens.getItemAt(7) as IndentToken).indentAmount, 8); + assert.equal((results.tokens.getItemAt(7) as IndentToken).length, 3); + assert.equal(results.tokens.getItemAt(8).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(9).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(10).type, TokenType.Indent); + assert.equal((results.tokens.getItemAt(10) as IndentToken).isIndentAmbiguous, true); + assert.equal((results.tokens.getItemAt(10) as IndentToken).length, 1); + assert.equal(results.tokens.getItemAt(11).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(12).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(13).type, TokenType.Dedent); + assert.equal((results.tokens.getItemAt(13) as DedentToken).indentAmount, 2); + assert.equal((results.tokens.getItemAt(13) as DedentToken).matchesIndent, true); + assert.equal(results.tokens.getItemAt(14).type, TokenType.Dedent); + assert.equal((results.tokens.getItemAt(14) as DedentToken).indentAmount, 1); + assert.equal((results.tokens.getItemAt(14) as DedentToken).matchesIndent, false); + assert.equal(results.tokens.getItemAt(15).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(16).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(17).type, TokenType.EndOfStream); +}); + +test('IndentDedentParen', () => { + const t = new Tokenizer(); + const results = t.tokenize('test (\n i1\n )\n foo'); + assert.equal(results.tokens.count, 8 + _implicitTokenCount); + + // Test that indent and dedent tokens are suppressed within + // a parenthetical clause. + assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenParenthesis); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(3).type, TokenType.CloseParenthesis); + assert.equal(results.tokens.getItemAt(4).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(5).type, TokenType.Indent); + assert.equal(results.tokens.getItemAt(6).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(7).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(8).type, TokenType.Dedent); + assert.equal(results.tokens.getItemAt(9).type, TokenType.EndOfStream); +}); + +test('Strings: simple', () => { + const t = new Tokenizer(); + const results = t.tokenize(' "a"'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + const stringToken = results.tokens.getItemAt(1) as StringToken; + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.length, 3); + assert.equal(stringToken.escapedValue, 'a'); + assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote); + assert.equal(results.tokens.getItemAt(2).type, TokenType.NewLine); +}); + +test('Strings: unclosed', () => { + const t = new Tokenizer(); + const results = t.tokenize(' "string" """line1\n#line2"""\t\'un#closed'); + assert.equal(results.tokens.count, 5 + _implicitTokenCount); + + const ranges = [ + [1, 8], + [10, 18], + [29, 10], + ]; + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + for (let i = 0; i < ranges.length; i++) { + assert.equal(results.tokens.getItemAt(i + 1).start, ranges[i][0]); + assert.equal(results.tokens.getItemAt(i + 1).length, ranges[i][1]); + assert.equal(results.tokens.getItemAt(i + 1).type, TokenType.String); + } + assert.equal(results.tokens.getItemAt(5).type, TokenType.Dedent); +}); + +test('Strings: escaped across multiple lines', () => { + const t = new Tokenizer(); + const results = t.tokenize(' "a\\\nb" \'c\\\r\nb\''); + assert.equal(results.tokens.count, 4 + _implicitTokenCount); + + const ranges = [ + [1, 6], + [8, 7], + ]; + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + for (let i = 0; i < ranges.length; i++) { + assert.equal(results.tokens.getItemAt(i + 1).start, ranges[i][0]); + assert.equal(results.tokens.getItemAt(i + 1).length, ranges[i][1]); + assert.equal(results.tokens.getItemAt(i + 1).type, TokenType.String); + } + assert.equal(results.tokens.getItemAt(5).type, TokenType.EndOfStream); +}); + +test('Strings: block next to regular, double-quoted', () => { + const t = new Tokenizer(); + const results = t.tokenize('"string""""s2"""'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + const ranges = [ + [0, 8], + [8, 8], + ]; + for (let i = 0; i < ranges.length; i++) { + assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); + assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); + assert.equal(results.tokens.getItemAt(i).type, TokenType.String); + } +}); + +test('Strings: block next to block, double-quoted', () => { + const t = new Tokenizer(); + const results = t.tokenize('""""""""'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + const ranges = [ + [0, 6], + [6, 2], + ]; + for (let i = 0; i < ranges.length; i++) { + assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); + assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); + assert.equal(results.tokens.getItemAt(i).type, TokenType.String); + } +}); + +test('Strings: unclosed sequence of quotes', () => { + const t = new Tokenizer(); + const results = t.tokenize('"""""'); + assert.equal(results.tokens.count, 1 + _implicitTokenCount); + + const ranges = [[0, 5]]; + for (let i = 0; i < ranges.length; i++) { + assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); + assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); + assert.equal(results.tokens.getItemAt(i).type, TokenType.String); + } +}); + +test('Strings: single quote escape', () => { + const t = new Tokenizer(); + const results = t.tokenize("'\\'quoted\\''"); + assert.equal(results.tokens.count, 1 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.SingleQuote); + assert.equal(stringToken.length, 12); + assert.equal(stringToken.prefixLength, 0); + assert.equal(stringToken.escapedValue, "\\'quoted\\'"); +}); + +test('Strings: double quote escape', () => { + const t = new Tokenizer(); + const results = t.tokenize('"\\"quoted\\""'); + assert.equal(results.tokens.count, 1 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote); + assert.equal(stringToken.length, 12); + assert.equal(stringToken.escapedValue, '\\"quoted\\"'); +}); + +test('Strings: triplicate double quote escape', () => { + const t = new Tokenizer(); + const results = t.tokenize('"""\\"quoted\\""""'); + assert.equal(results.tokens.count, 1 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Triplicate); + assert.equal(stringToken.length, 16); + assert.equal(stringToken.escapedValue, '\\"quoted\\"'); +}); + +test('Strings: single quoted f-string', () => { + const t = new Tokenizer(); + const results = t.tokenize("a+f'quoted'"); + assert.equal(results.tokens.count, 5 + _implicitTokenCount); + assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Operator); + + const fStringStartToken = results.tokens.getItemAt(2) as FStringStartToken; + assert.equal(fStringStartToken.type, TokenType.FStringStart); + assert.equal(fStringStartToken.flags, StringTokenFlags.SingleQuote | StringTokenFlags.Format); + assert.equal(fStringStartToken.length, 2); + + const fStringMiddleToken = results.tokens.getItemAt(3) as FStringMiddleToken; + assert.equal(fStringMiddleToken.type, TokenType.FStringMiddle); + assert.equal(fStringMiddleToken.flags, StringTokenFlags.SingleQuote | StringTokenFlags.Format); + assert.equal(fStringMiddleToken.length, 6); + assert.equal(fStringMiddleToken.escapedValue, 'quoted'); + + const fStringEndToken = results.tokens.getItemAt(4) as FStringEndToken; + assert.equal(fStringEndToken.type, TokenType.FStringEnd); + assert.equal(fStringEndToken.flags, StringTokenFlags.SingleQuote | StringTokenFlags.Format); + assert.equal(fStringEndToken.length, 1); +}); + +test('Strings: double quoted f-string', () => { + const t = new Tokenizer(); + const results = t.tokenize('x(1,f"quoted")'); + assert.equal(results.tokens.count, 8 + _implicitTokenCount); + assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenParenthesis); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Comma); + assert.equal(results.tokens.getItemAt(7).type, TokenType.CloseParenthesis); + + const fStringStartToken = results.tokens.getItemAt(4) as FStringStartToken; + assert.equal(fStringStartToken.type, TokenType.FStringStart); + assert.equal(fStringStartToken.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Format); + assert.equal(fStringStartToken.length, 2); + + const fStringMiddleToken = results.tokens.getItemAt(5) as FStringMiddleToken; + assert.equal(fStringMiddleToken.type, TokenType.FStringMiddle); + assert.equal(fStringMiddleToken.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Format); + assert.equal(fStringMiddleToken.length, 6); + assert.equal(fStringMiddleToken.escapedValue, 'quoted'); + + const fStringEndToken = results.tokens.getItemAt(6) as FStringEndToken; + assert.equal(fStringEndToken.type, TokenType.FStringEnd); + assert.equal(fStringEndToken.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Format); + assert.equal(fStringEndToken.length, 1); +}); + +test('Strings: single quoted multiline f-string', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'''quoted'''"); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const fStringStartToken = results.tokens.getItemAt(0) as FStringStartToken; + assert.equal(fStringStartToken.type, TokenType.FStringStart); + assert.equal( + fStringStartToken.flags, + StringTokenFlags.SingleQuote | StringTokenFlags.Triplicate | StringTokenFlags.Format + ); + assert.equal(fStringStartToken.length, 4); + + const fStringMiddleToken = results.tokens.getItemAt(1) as FStringMiddleToken; + assert.equal(fStringMiddleToken.type, TokenType.FStringMiddle); + assert.equal( + fStringMiddleToken.flags, + StringTokenFlags.SingleQuote | StringTokenFlags.Triplicate | StringTokenFlags.Format + ); + assert.equal(fStringMiddleToken.length, 6); + assert.equal(fStringMiddleToken.escapedValue, 'quoted'); + + const fStringEndToken = results.tokens.getItemAt(2) as FStringEndToken; + assert.equal(fStringEndToken.type, TokenType.FStringEnd); + assert.equal( + fStringEndToken.flags, + StringTokenFlags.SingleQuote | StringTokenFlags.Triplicate | StringTokenFlags.Format + ); + assert.equal(fStringEndToken.length, 3); +}); + +test('Strings: double quoted multiline f-string', () => { + const t = new Tokenizer(); + const results = t.tokenize('f"""quoted """'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const fStringStartToken = results.tokens.getItemAt(0) as FStringStartToken; + assert.equal(fStringStartToken.type, TokenType.FStringStart); + assert.equal( + fStringStartToken.flags, + StringTokenFlags.DoubleQuote | StringTokenFlags.Triplicate | StringTokenFlags.Format + ); + assert.equal(fStringStartToken.length, 4); + + const fStringMiddleToken = results.tokens.getItemAt(1) as FStringMiddleToken; + assert.equal(fStringMiddleToken.type, TokenType.FStringMiddle); + assert.equal( + fStringMiddleToken.flags, + StringTokenFlags.DoubleQuote | StringTokenFlags.Triplicate | StringTokenFlags.Format + ); + assert.equal(fStringMiddleToken.length, 7); + assert.equal(fStringMiddleToken.escapedValue, 'quoted '); + + const fStringEndToken = results.tokens.getItemAt(2) as FStringEndToken; + assert.equal(fStringEndToken.type, TokenType.FStringEnd); + assert.equal( + fStringEndToken.flags, + StringTokenFlags.DoubleQuote | StringTokenFlags.Triplicate | StringTokenFlags.Format + ); + assert.equal(fStringEndToken.length, 3); +}); + +test('Strings: f-string with single right brace', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'hello}'"); + assert.equal(results.tokens.count, 4 + _implicitTokenCount); + + const fStringStartToken = results.tokens.getItemAt(0) as FStringStartToken; + assert.equal(fStringStartToken.type, TokenType.FStringStart); + assert.equal(fStringStartToken.length, 2); + assert.equal(fStringStartToken.flags, StringTokenFlags.SingleQuote | StringTokenFlags.Format); + + const fStringMiddleToken = results.tokens.getItemAt(1) as FStringMiddleToken; + assert.equal(fStringMiddleToken.type, TokenType.FStringMiddle); + assert.equal(fStringMiddleToken.length, 5); + assert.equal( + fStringMiddleToken.flags, + StringTokenFlags.SingleQuote | StringTokenFlags.Format | StringTokenFlags.ReplacementFieldEnd + ); + + const braceToken = results.tokens.getItemAt(2).type; + assert.equal(braceToken, TokenType.CloseCurlyBrace); + + const fStringEndToken = results.tokens.getItemAt(3) as FStringEndToken; + assert.equal(fStringEndToken.type, TokenType.FStringEnd); + assert.equal(fStringEndToken.flags, StringTokenFlags.SingleQuote | StringTokenFlags.Format); + assert.equal(fStringEndToken.length, 1); +}); + +test('Strings: f-string with backslash escape', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f'\\\\'`); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + const fStringMiddleToken = results.tokens.getItemAt(1) as FStringMiddleToken; + assert.equal(fStringMiddleToken.type, TokenType.FStringMiddle); + assert.equal(fStringMiddleToken.length, 2); + assert.equal(results.tokens.getItemAt(2).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with new line escape', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f'x \\\ny'`); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with escape in expression', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f'hello { "\\t" }'`); + assert.equal(results.tokens.count, 6 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.String); + assert.equal(results.tokens.getItemAt(4).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(5).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with escape in format string 1', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'he\\{ 1 }lo'"); + assert.equal(results.tokens.count, 7 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + + const middleFString = results.tokens.getItemAt(1) as FStringMiddleToken; + assert.equal(middleFString.type, TokenType.FStringMiddle); + assert.equal(middleFString.escapedValue.length, 3); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(5).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(6).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with escape in format string 2', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f"'{{\\"{0}\\": {0}}}'"`); + assert.equal(results.tokens.count, 11 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + + const middleFString = results.tokens.getItemAt(1) as FStringMiddleToken; + assert.equal(middleFString.type, TokenType.FStringMiddle); + assert.equal(middleFString.escapedValue.length, 5); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(5).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(6).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(7).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(8).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(9).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(10).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with double brace', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f"hello {{{0==0}}}"`); + assert.equal(results.tokens.count, 9 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(5).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(7).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(8).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with walrus operator', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f"{(x:=0)}"`); + assert.equal(results.tokens.count, 9 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenParenthesis); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(5).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseParenthesis); + assert.equal(results.tokens.getItemAt(7).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(8).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with single right brace', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f"}"`); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(2).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with comment', () => { + const t = new Tokenizer(); + const results = t.tokenize(`f'''hello{\nx # comment\n}'''`); + assert.equal(results.tokens.count, 6 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + const closeBraceToken = results.tokens.getItemAt(4); + assert.equal(closeBraceToken.type, TokenType.CloseCurlyBrace); + assert.deepEqual(closeBraceToken.comments, [ + { type: CommentType.Regular, value: ' comment', start: 14, length: 8 }, + ]); + assert.equal(results.tokens.getItemAt(5).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with unterminated expression', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'hello { a'"); + assert.equal(results.tokens.count, 5 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + + const fStringEnd = results.tokens.getItemAt(4) as FStringEndToken; + assert.equal(fStringEnd.type, TokenType.FStringEnd); + assert.equal(fStringEnd.flags, StringTokenFlags.Format | StringTokenFlags.SingleQuote); +}); + +test('Strings: f-string with replacement field', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'hello { a + b}'"); + assert.equal(results.tokens.count, 8 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(5).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(7).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with format specifier', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'hello { a ! b}'"); + assert.equal(results.tokens.count, 8 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).type, TokenType.ExclamationMark); + assert.equal(results.tokens.getItemAt(5).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(6).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(7).type, TokenType.FStringEnd); +}); + +test('Strings: f-string with debug format specifier', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'hello { a =}'"); + assert.equal(results.tokens.count, 7 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(5).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(6).type, TokenType.FStringEnd); +}); + +test('Strings: nested f-string', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'{f'{a}'}'"); + assert.equal(results.tokens.count, 9 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(2).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(3).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(5).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(6).type, TokenType.FStringEnd); + assert.equal(results.tokens.getItemAt(7).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(8).type, TokenType.FStringEnd); +}); + +test('Strings: nested f-string formats 1', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'{a:x{{b}+:x{c}+}}'"); + assert.equal(results.tokens.count, 19 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(3).type, TokenType.Colon); + assert.equal(results.tokens.getItemAt(4).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(5).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(6).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(7).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(8).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(9).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(10).type, TokenType.Colon); + assert.equal(results.tokens.getItemAt(11).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(12).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(13).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(14).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(15).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(16).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(17).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(18).type, TokenType.FStringEnd); +}); + +test('Strings: nested f-string formats 2', () => { + const t = new Tokenizer(); + const results = t.tokenize("f'hi{'x':*^{8:{'':}}0}'"); + assert.equal(results.tokens.count, 17 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart); + assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(2).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(3).type, TokenType.String); + assert.equal(results.tokens.getItemAt(4).type, TokenType.Colon); + assert.equal(results.tokens.getItemAt(5).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(6).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(7).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(8).type, TokenType.Colon); + assert.equal(results.tokens.getItemAt(9).type, TokenType.OpenCurlyBrace); + assert.equal(results.tokens.getItemAt(10).type, TokenType.String); + assert.equal(results.tokens.getItemAt(11).type, TokenType.Colon); + assert.equal(results.tokens.getItemAt(12).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(13).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(14).type, TokenType.FStringMiddle); + assert.equal(results.tokens.getItemAt(15).type, TokenType.CloseCurlyBrace); + assert.equal(results.tokens.getItemAt(16).type, TokenType.FStringEnd); +}); + +test('Strings: escape at the end of single quoted string', () => { + const t = new Tokenizer(); + const results = t.tokenize("'quoted\\'\nx"); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.SingleQuote | StringTokenFlags.Unterminated); + assert.equal(stringToken.length, 9); + assert.equal(stringToken.escapedValue, "quoted\\'"); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(8), 0); + assert.equal(results.tokens.getItemAtPosition(9), 1); + assert.equal(results.tokens.getItemAtPosition(10), 2); + assert.equal(results.tokens.getItemAtPosition(11), 4); + + assert.equal(results.tokens.contains(10), true); + assert.equal(results.tokens.contains(11), false); +}); + +test('Strings: escape at the end of double quoted string', () => { + const t = new Tokenizer(); + const results = t.tokenize('"quoted\\"\nx'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Unterminated); + assert.equal(stringToken.length, 9); + assert.equal(stringToken.escapedValue, 'quoted\\"'); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); +}); + +test('Strings: b/u/r-string', () => { + const t = new Tokenizer(); + const results = t.tokenize('b"b" U\'u\' bR"br"'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const stringToken0 = results.tokens.getItemAt(0) as StringToken; + assert.equal(stringToken0.type, TokenType.String); + assert.equal(stringToken0.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Bytes); + assert.equal(stringToken0.length, 4); + assert.equal(stringToken0.escapedValue, 'b'); + assert.equal(stringToken0.prefixLength, 1); + + const stringToken1 = results.tokens.getItemAt(1) as StringToken; + assert.equal(stringToken1.type, TokenType.String); + assert.equal(stringToken1.flags, StringTokenFlags.SingleQuote | StringTokenFlags.Unicode); + assert.equal(stringToken1.length, 4); + assert.equal(stringToken1.escapedValue, 'u'); + assert.equal(stringToken1.prefixLength, 1); + + const stringToken2 = results.tokens.getItemAt(2) as StringToken; + assert.equal(stringToken2.type, TokenType.String); + assert.equal(stringToken2.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Bytes | StringTokenFlags.Raw); + assert.equal(stringToken2.length, 6); + assert.equal(stringToken2.escapedValue, 'br'); + assert.equal(stringToken2.prefixLength, 2); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(4), 0); + assert.equal(results.tokens.getItemAtPosition(5), 1); + assert.equal(results.tokens.getItemAtPosition(9), 1); + assert.equal(results.tokens.getItemAtPosition(10), 2); + assert.equal(results.tokens.getItemAtPosition(15), 2); + + assert.equal(results.tokens.contains(15), true); + assert.equal(results.tokens.contains(16), false); +}); + +test('Strings: bytes string with non-ASCII', () => { + const t = new Tokenizer(); + const results = t.tokenize("B\"Teßt\" b'''Teñt'''"); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + const stringToken0 = results.tokens.getItemAt(0) as StringToken; + const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0); + assert.equal(stringToken0.type, TokenType.String); + assert.equal(stringToken0.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Bytes); + assert.equal(unescapedValue0.nonAsciiInBytes, true); + assert.equal(stringToken0.length, 7); + + const stringToken1 = results.tokens.getItemAt(1) as StringToken; + const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1); + assert.equal(stringToken1.type, TokenType.String); + assert.equal( + stringToken1.flags, + StringTokenFlags.SingleQuote | StringTokenFlags.Bytes | StringTokenFlags.Triplicate + ); + assert.equal(unescapedValue1.nonAsciiInBytes, true); + assert.equal(stringToken1.length, 11); +}); + +test('Strings: raw strings with escapes', () => { + const t = new Tokenizer(); + const results = t.tokenize('R"\\"" r"\\\r\n\\\n\\a"'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + const stringToken0 = results.tokens.getItemAt(0) as StringToken; + const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0); + assert.equal(stringToken0.type, TokenType.String); + assert.equal(stringToken0.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Raw); + assert.equal(stringToken0.length, 5); + assert.equal(stringToken0.escapedValue, '\\"'); + assert.equal(unescapedValue0.value, '\\"'); + + const stringToken1 = results.tokens.getItemAt(1) as StringToken; + const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1); + assert.equal(stringToken1.type, TokenType.String); + assert.equal(stringToken1.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Raw); + assert.equal(stringToken1.length, 10); + assert.equal(stringToken1.escapedValue, '\\\r\n\\\n\\a'); + assert.equal(unescapedValue1.value, '\\\r\n\\\n\\a'); +}); + +test('Strings: escape at the end of double quoted string', () => { + const t = new Tokenizer(); + const results = t.tokenize('"quoted\\"\nx'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.Unterminated); + assert.equal(stringToken.length, 9); + assert.equal(stringToken.escapedValue, 'quoted\\"'); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); +}); + +test('Strings: special escape characters', () => { + const t = new Tokenizer(); + const results = t.tokenize('"\\r\\n\\a\\v\\t\\b\\f\\\\"'); + assert.equal(results.tokens.count, 1 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + const unescapedValue = StringTokenUtils.getUnescapedString(stringToken); + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote); + assert.equal(stringToken.length, 18); + assert.equal(unescapedValue.value, '\r\n\u0007\v\t\b\f\\'); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(17), 0); + assert.equal(results.tokens.getItemAtPosition(18), 2); + + assert.equal(results.tokens.contains(17), true); + assert.equal(results.tokens.contains(18), false); +}); + +test('Strings: invalid escape characters', () => { + const t = new Tokenizer(); + const results = t.tokenize('"\\d \\ "'); + assert.equal(results.tokens.count, 1 + _implicitTokenCount); + + const stringToken = results.tokens.getItemAt(0) as StringToken; + const unescapedValue = StringTokenUtils.getUnescapedString(stringToken); + assert.equal(stringToken.type, TokenType.String); + assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote); + assert.equal(stringToken.length, 8); + assert.equal(stringToken.escapedValue, '\\d \\ '); + assert.equal(unescapedValue.unescapeErrors.length, 2); + assert.equal(unescapedValue.unescapeErrors[0].offset, 0); + assert.equal(unescapedValue.unescapeErrors[0].length, 2); + assert.equal(unescapedValue.unescapeErrors[0].errorType, StringTokenUtils.UnescapeErrorType.InvalidEscapeSequence); + assert.equal(unescapedValue.unescapeErrors[1].offset, 4); + assert.equal(unescapedValue.unescapeErrors[1].length, 2); + assert.equal(unescapedValue.unescapeErrors[1].errorType, StringTokenUtils.UnescapeErrorType.InvalidEscapeSequence); +}); + +test('Strings: good hex escapes', () => { + const t = new Tokenizer(); + const results = t.tokenize('"\\x4d" "\\u006b" "\\U0000006F"'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const stringToken0 = results.tokens.getItemAt(0) as StringToken; + const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0); + assert.equal(stringToken0.type, TokenType.String); + assert.equal(stringToken0.flags, StringTokenFlags.DoubleQuote); + assert.equal(stringToken0.length, 6); + assert.equal(stringToken0.escapedValue, '\\x4d'); + assert.equal(unescapedValue0.value, 'M'); + + const stringToken1 = results.tokens.getItemAt(1) as StringToken; + const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1); + assert.equal(stringToken1.type, TokenType.String); + assert.equal(stringToken1.flags, StringTokenFlags.DoubleQuote); + assert.equal(stringToken1.length, 8); + assert.equal(stringToken1.escapedValue, '\\u006b'); + assert.equal(unescapedValue1.value, 'k'); + + const stringToken2 = results.tokens.getItemAt(2) as StringToken; + const unescapedValue2 = StringTokenUtils.getUnescapedString(stringToken2); + assert.equal(stringToken2.type, TokenType.String); + assert.equal(stringToken2.flags, StringTokenFlags.DoubleQuote); + assert.equal(stringToken2.length, 12); + assert.equal(stringToken2.escapedValue, '\\U0000006F'); + assert.equal(unescapedValue2.value, 'o'); +}); + +test('Strings: bad hex escapes', () => { + const t = new Tokenizer(); + const results = t.tokenize('"\\x4g" "\\u006" "\\U0000006m"'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + const stringToken0 = results.tokens.getItemAt(0) as StringToken; + const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0); + assert.equal(stringToken0.type, TokenType.String); + assert.equal(stringToken0.flags, StringTokenFlags.DoubleQuote); + assert.equal(unescapedValue0.unescapeErrors.length, 1); + assert.equal(stringToken0.length, 6); + assert.equal(unescapedValue0.value, '\\x4g'); + + const stringToken1 = results.tokens.getItemAt(1) as StringToken; + const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1); + assert.equal(stringToken1.type, TokenType.String); + assert.equal(stringToken1.flags, StringTokenFlags.DoubleQuote); + assert.equal(unescapedValue1.unescapeErrors.length, 1); + assert.equal(stringToken1.length, 7); + assert.equal(unescapedValue1.value, '\\u006'); + + const stringToken2 = results.tokens.getItemAt(2) as StringToken; + const unescapedValue2 = StringTokenUtils.getUnescapedString(stringToken2); + assert.equal(stringToken2.type, TokenType.String); + assert.equal(stringToken2.flags, StringTokenFlags.DoubleQuote); + assert.equal(unescapedValue2.unescapeErrors.length, 1); + assert.equal(stringToken2.length, 12); + assert.equal(unescapedValue2.value, '\\U0000006m'); +}); + +test('Strings: good name escapes', () => { + const t = new Tokenizer(); + const results = t.tokenize('"\\N{caret escape blah}" "a\\N{A9}a"'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + const stringToken0 = results.tokens.getItemAt(0) as StringToken; + const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0); + assert.equal(stringToken0.type, TokenType.String); + assert.equal(stringToken0.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.NamedUnicodeEscape); + assert.equal(stringToken0.length, 23); + assert.equal(stringToken0.escapedValue, '\\N{caret escape blah}'); + assert.equal(unescapedValue0.value, '-'); + + const stringToken1 = results.tokens.getItemAt(1) as StringToken; + const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1); + assert.equal(stringToken1.type, TokenType.String); + assert.equal(stringToken1.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.NamedUnicodeEscape); + assert.equal(stringToken1.length, 10); + assert.equal(stringToken1.escapedValue, 'a\\N{A9}a'); + assert.equal(unescapedValue1.value, 'a-a'); +}); + +test('Strings: bad name escapes', () => { + const t = new Tokenizer(); + const results = t.tokenize('"\\N{caret" "\\N{.A9}"'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + const stringToken0 = results.tokens.getItemAt(0) as StringToken; + const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0); + assert.equal(stringToken0.type, TokenType.String); + assert.equal(stringToken0.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.NamedUnicodeEscape); + assert.equal(unescapedValue0.unescapeErrors.length, 1); + assert.equal(stringToken0.length, 10); + assert.equal(stringToken0.escapedValue, '\\N{caret'); + assert.equal(unescapedValue0.value, '\\N{caret'); + + const stringToken1 = results.tokens.getItemAt(1) as StringToken; + const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1); + assert.equal(stringToken1.type, TokenType.String); + assert.equal(stringToken1.flags, StringTokenFlags.DoubleQuote | StringTokenFlags.NamedUnicodeEscape); + assert.equal(unescapedValue1.unescapeErrors.length, 1); + assert.equal(stringToken1.length, 9); + assert.equal(stringToken1.escapedValue, '\\N{.A9}'); + assert.equal(unescapedValue1.value, '\\N{.A9}'); +}); + +test('Comments', () => { + const t = new Tokenizer(); + const results = t.tokenize(' #co"""mment1\n\t\n#x\'y2 '); + assert.equal(results.tokens.count, 1 + _implicitTokenCountNoImplicitNewLine); + assert.equal(results.tokens.getItemAt(0).type, TokenType.NewLine); +}); + +test('Period to operator token', () => { + const t = new Tokenizer(); + const results = t.tokenize('x.y'); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Dot); + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); +}); + +test('@ to operator token', () => { + const t = new Tokenizer(); + const results = t.tokenize('@x'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Identifier); +}); + +test('Unknown token', () => { + const t = new Tokenizer(); + const results = t.tokenize('`$'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Backtick); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Invalid); +}); + +test('Hex number', () => { + const t = new Tokenizer(); + const results = t.tokenize('1 0X2 0xFe_Ab 0x'); + assert.equal(results.tokens.count, 5 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(0).length, 1); + assert.equal((results.tokens.getItemAt(0) as NumberToken).value, 1); + assert.equal((results.tokens.getItemAt(0) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(1).length, 3); + assert.equal((results.tokens.getItemAt(1) as NumberToken).value, 2); + assert.equal((results.tokens.getItemAt(1) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(2).length, 7); + assert.equal((results.tokens.getItemAt(2) as NumberToken).value, 0xfeab); + assert.equal((results.tokens.getItemAt(2) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(3).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(3).length, 1); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).length, 1); +}); + +test('Binary number', () => { + const t = new Tokenizer(); + const results = t.tokenize('1 0B1 0b010 0b3 0b'); + assert.equal(results.tokens.count, 7 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(0).length, 1); + assert.equal((results.tokens.getItemAt(0) as NumberToken).value, 1); + assert.equal((results.tokens.getItemAt(0) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(1).length, 3); + assert.equal((results.tokens.getItemAt(1) as NumberToken).value, 1); + assert.equal((results.tokens.getItemAt(1) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(2).length, 5); + assert.equal((results.tokens.getItemAt(2) as NumberToken).value, 2); + assert.equal((results.tokens.getItemAt(2) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(3).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(3).length, 1); + assert.equal((results.tokens.getItemAt(3) as NumberToken).value, 0); + assert.equal((results.tokens.getItemAt(3) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(4).length, 2); + + assert.equal(results.tokens.getItemAt(5).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(5).length, 1); + + assert.equal(results.tokens.getItemAt(6).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(6).length, 1); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 0); + assert.equal(results.tokens.getItemAtPosition(2), 1); + assert.equal(results.tokens.getItemAtPosition(5), 1); + assert.equal(results.tokens.getItemAtPosition(6), 2); + assert.equal(results.tokens.getItemAtPosition(11), 2); + assert.equal(results.tokens.getItemAtPosition(12), 3); + assert.equal(results.tokens.getItemAtPosition(13), 4); + assert.equal(results.tokens.getItemAtPosition(15), 4); + assert.equal(results.tokens.getItemAtPosition(16), 5); + assert.equal(results.tokens.getItemAtPosition(17), 6); + assert.equal(results.tokens.getItemAtPosition(18), 8); + + assert.equal(results.tokens.contains(17), true); + assert.equal(results.tokens.contains(18), false); +}); + +test('Octal number', () => { + const t = new Tokenizer(); + const results = t.tokenize('1 0o4 0O0_7_7 -0o200 0o9 0oO'); + assert.equal(results.tokens.count, 9 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(0).length, 1); + assert.equal((results.tokens.getItemAt(0) as NumberToken).value, 1); + assert.equal((results.tokens.getItemAt(0) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(1).length, 3); + assert.equal((results.tokens.getItemAt(1) as NumberToken).value, 4); + assert.equal((results.tokens.getItemAt(1) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(2).length, 7); + assert.equal((results.tokens.getItemAt(2) as NumberToken).value, 0o77); + assert.equal((results.tokens.getItemAt(2) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(3).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(3).length, 1); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).length, 5); + assert.equal((results.tokens.getItemAt(4) as NumberToken).value, 0o200); + assert.equal((results.tokens.getItemAt(4) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(5).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(5).length, 1); + assert.equal((results.tokens.getItemAt(5) as NumberToken).value, 0); + assert.equal((results.tokens.getItemAt(5) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(6).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(6).length, 2); + assert.equal((results.tokens.getItemAt(6) as IdentifierToken).value, 'o9'); + + assert.equal(results.tokens.getItemAt(7).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(7).length, 1); + assert.equal((results.tokens.getItemAt(7) as NumberToken).value, 0); + assert.equal((results.tokens.getItemAt(7) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(8).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(8).length, 2); + assert.equal((results.tokens.getItemAt(8) as IdentifierToken).value, 'oO'); +}); + +test('Decimal number', () => { + const t = new Tokenizer(); + const results = t.tokenize('-2147483647 ++2147483647'); + assert.equal(results.tokens.count, 5 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(0).length, 1); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(1).length, 10); + assert.equal((results.tokens.getItemAt(1) as NumberToken).value, 2147483647); + assert.equal((results.tokens.getItemAt(1) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(2).length, 1); + + assert.equal(results.tokens.getItemAt(3).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(3).length, 1); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).length, 10); + assert.equal((results.tokens.getItemAt(4) as NumberToken).value, 2147483647); + assert.equal((results.tokens.getItemAt(4) as NumberToken).isInteger, true); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 1); + assert.equal(results.tokens.getItemAtPosition(11), 1); + assert.equal(results.tokens.getItemAtPosition(12), 2); + assert.equal(results.tokens.getItemAtPosition(13), 3); + assert.equal(results.tokens.getItemAtPosition(14), 4); + assert.equal(results.tokens.getItemAtPosition(23), 4); + assert.equal(results.tokens.getItemAtPosition(24), 6); + + assert.equal(results.tokens.contains(23), true); + assert.equal(results.tokens.contains(24), false); +}); + +test('Decimal number operator', () => { + const t = new Tokenizer(); + const results = t.tokenize('a[: -1]'); + assert.equal(results.tokens.count, 6 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).length, 1); +}); + +test('Floating point number', () => { + const t = new Tokenizer(); + const results = t.tokenize('3.0 .2 ++.3e+12 --.4e1 1e-4 0.01 01.0'); + assert.equal(results.tokens.count, 11 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Number); + assert.equal((results.tokens.getItemAt(0) as NumberToken).value, 3); + assert.equal((results.tokens.getItemAt(0) as NumberToken).isInteger, false); + assert.equal(results.tokens.getItemAt(0).length, 3); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Number); + assert.equal((results.tokens.getItemAt(1) as NumberToken).value, 0.2); + assert.equal((results.tokens.getItemAt(1) as NumberToken).isInteger, false); + assert.equal(results.tokens.getItemAt(1).length, 2); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(2).length, 1); + + assert.equal(results.tokens.getItemAt(3).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(3).length, 1); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Number); + assert.equal((results.tokens.getItemAt(4) as NumberToken).value, 0.3e12); + assert.equal((results.tokens.getItemAt(4) as NumberToken).isInteger, false); + assert.equal(results.tokens.getItemAt(4).length, 6); + + assert.equal(results.tokens.getItemAt(5).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(5).length, 1); + + assert.equal(results.tokens.getItemAt(6).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(6).length, 1); + + assert.equal(results.tokens.getItemAt(7).type, TokenType.Number); + assert.equal((results.tokens.getItemAt(7) as NumberToken).value, 0.4e1); + assert.equal((results.tokens.getItemAt(7) as NumberToken).isInteger, false); + assert.equal(results.tokens.getItemAt(7).length, 4); + + assert.equal(results.tokens.getItemAt(8).type, TokenType.Number); + assert.equal((results.tokens.getItemAt(8) as NumberToken).value, 1e-4); + assert.equal((results.tokens.getItemAt(8) as NumberToken).isInteger, false); + assert.equal(results.tokens.getItemAt(8).length, 4); + + assert.equal(results.tokens.getItemAt(9).type, TokenType.Number); + assert.equal((results.tokens.getItemAt(9) as NumberToken).value, 0.01); + assert.equal((results.tokens.getItemAt(9) as NumberToken).isInteger, false); + assert.equal(results.tokens.getItemAt(9).length, 4); + + assert.equal(results.tokens.getItemAt(10).type, TokenType.Number); + assert.equal((results.tokens.getItemAt(10) as NumberToken).value, 1.0); + assert.equal((results.tokens.getItemAt(10) as NumberToken).isInteger, false); + assert.equal(results.tokens.getItemAt(10).length, 4); +}); + +test('Floating point numbers with parens', () => { + const t = new Tokenizer(); + const results = t.tokenize('(3.0) (.2) (+.3e+12, .4e1; 0)'); + assert.equal(results.tokens.count, 14 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(1).length, 3); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).length, 2); + + assert.equal(results.tokens.getItemAt(8).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(8).length, 6); + + assert.equal(results.tokens.getItemAt(10).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(10).length, 4); + + assert.equal(results.tokens.getItemAt(12).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(12).length, 1); +}); + +test('Floating point numbers with operators', () => { + const t = new Tokenizer(); + const results = t.tokenize('88.9/100.0*4.0-2.0,'); + assert.equal(results.tokens.count, 8 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(0).length, 4); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(2).length, 5); + + assert.equal(results.tokens.getItemAt(4).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(4).length, 3); + + assert.equal(results.tokens.getItemAt(6).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(6).length, 3); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(3), 0); + assert.equal(results.tokens.getItemAtPosition(4), 1); + assert.equal(results.tokens.getItemAtPosition(5), 2); + assert.equal(results.tokens.getItemAtPosition(9), 2); + assert.equal(results.tokens.getItemAtPosition(10), 3); + assert.equal(results.tokens.getItemAtPosition(11), 4); + assert.equal(results.tokens.getItemAtPosition(13), 4); + assert.equal(results.tokens.getItemAtPosition(14), 5); + assert.equal(results.tokens.getItemAtPosition(15), 6); + assert.equal(results.tokens.getItemAtPosition(17), 6); + assert.equal(results.tokens.getItemAtPosition(18), 7); + assert.equal(results.tokens.getItemAtPosition(19), 9); + + assert.equal(results.tokens.contains(18), true); + assert.equal(results.tokens.contains(19), false); +}); + +test('Imaginary numbers', () => { + const t = new Tokenizer(); + const results = t.tokenize('88.9j/100.0J*4.0e-5j-2.0j,'); + assert.equal(results.tokens.count, 8 + _implicitTokenCount); + + const token0 = results.tokens.getItemAt(0); + assert.equal(token0.type, TokenType.Number); + assert.equal(token0.length, 5); + assert((token0 as NumberToken).isImaginary); + + const token2 = results.tokens.getItemAt(2); + assert.equal(token2.type, TokenType.Number); + assert.equal(token2.length, 6); + assert((token2 as NumberToken).isImaginary); + + const token4 = results.tokens.getItemAt(4); + assert.equal(token4.type, TokenType.Number); + assert.equal(token4.length, 7); + assert((token4 as NumberToken).isImaginary); + + const token6 = results.tokens.getItemAt(6); + assert.equal(token6.type, TokenType.Number); + assert.equal(token6.length, 4); + assert((token6 as NumberToken).isImaginary); +}); + +test('Underscore numbers', () => { + const t = new Tokenizer(); + const results = t.tokenize('1_0_0_0 0_0 .5_00_3e-4 0xC__A_FE_F00D 10_000_000.0 0b_0011_1111_0100_1110'); + const lengths = [7, 3, 10, 14, 12, 22]; + const isIntegers = [true, true, false, true, false, true]; + assert.equal(results.tokens.count, 6 + _implicitTokenCount); + + for (let i = 0; i < lengths.length; i++) { + assert.equal(results.tokens.getItemAt(i).type, TokenType.Number); + assert.equal(results.tokens.getItemAt(i).length, lengths[i]); + assert.equal((results.tokens.getItemAt(i) as NumberToken).isInteger, isIntegers[i]); + } +}); + +test('Simple expression, leading minus', () => { + const t = new Tokenizer(); + const results = t.tokenize('x == -y'); + assert.equal(results.tokens.count, 4 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(0).length, 1); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(1).length, 2); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Operator); + assert.equal(results.tokens.getItemAt(2).length, 1); + + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(3).length, 1); +}); + +test('Operators', () => { + const text = + '< << <<= ' + + '== != > >> >>= >= <=' + + '+ - ~ %' + + '* ** / // /= //=' + + '*= += -= %= **= ' + + '& &= | |= ^ ^= ' + + ':= <>'; + const results = new Tokenizer().tokenize(text); + const lengths = [1, 2, 3, 2, 2, 1, 2, 3, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 3, 2, 2, 2, 2, 3, 1, 2, 1, 2, 1, 2, 2, 2]; + const operatorTypes = [ + OperatorType.LessThan, + OperatorType.LeftShift, + OperatorType.LeftShiftEqual, + OperatorType.Equals, + OperatorType.NotEquals, + OperatorType.GreaterThan, + OperatorType.RightShift, + OperatorType.RightShiftEqual, + OperatorType.GreaterThanOrEqual, + OperatorType.LessThanOrEqual, + OperatorType.Add, + OperatorType.Subtract, + OperatorType.BitwiseInvert, + OperatorType.Mod, + OperatorType.Multiply, + OperatorType.Power, + OperatorType.Divide, + OperatorType.FloorDivide, + OperatorType.DivideEqual, + OperatorType.FloorDivideEqual, + OperatorType.MultiplyEqual, + OperatorType.AddEqual, + OperatorType.SubtractEqual, + OperatorType.ModEqual, + OperatorType.PowerEqual, + OperatorType.BitwiseAnd, + OperatorType.BitwiseAndEqual, + OperatorType.BitwiseOr, + OperatorType.BitwiseOrEqual, + OperatorType.BitwiseXor, + OperatorType.BitwiseXorEqual, + OperatorType.Walrus, + OperatorType.LessOrGreaterThan, + ]; + assert.equal(results.tokens.count - _implicitTokenCount, lengths.length); + assert.equal(results.tokens.count - _implicitTokenCount, operatorTypes.length); + for (let i = 0; i < lengths.length; i++) { + const t = results.tokens.getItemAt(i); + assert.equal(t.type, TokenType.Operator, `${t.type} at ${i} is not an operator`); + assert.equal((t as OperatorToken).operatorType, operatorTypes[i]); + assert.equal( + t.length, + lengths[i], + `Length ${t.length} at ${i} (text ${text.substr(t.start, t.length)}), expected ${lengths[i]}` + ); + } +}); + +test('Identifiers', () => { + const t = new Tokenizer(); + const results = t.tokenize('and __and __and__ and__'); + assert.equal(results.tokens.count, 4 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Keyword); + assert.equal(results.tokens.getItemAt(0).length, 3); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).length, 5); + + assert.equal(results.tokens.getItemAt(2).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(2).length, 7); + + assert.equal(results.tokens.getItemAt(3).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(3).length, 5); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(3), 0); + assert.equal(results.tokens.getItemAtPosition(4), 1); + assert.equal(results.tokens.getItemAtPosition(9), 1); + assert.equal(results.tokens.getItemAtPosition(10), 2); + assert.equal(results.tokens.getItemAtPosition(17), 2); + assert.equal(results.tokens.getItemAtPosition(18), 3); + assert.equal(results.tokens.getItemAtPosition(22), 3); + assert.equal(results.tokens.getItemAtPosition(23), 5); + + assert.equal(results.tokens.contains(22), true); + assert.equal(results.tokens.contains(23), false); +}); + +test('Lines1', () => { + const sampleText = TestUtils.readSampleFile('lines1.py'); + const t = new Tokenizer(); + + // Start with the line feed only. We don't know whether the + // sample file was stored with CR/LF or just LF, so do + // the replacement here. + const sampleTextLfOnly = sampleText.replace(/\r\n/g, '\n'); + const resultsLf = t.tokenize(sampleTextLfOnly); + assert.equal(resultsLf.lines.count, 15); + + // Now replace the LF with CR/LF sequences. + const sampleTextCrLf = sampleTextLfOnly.replace(/\n/g, '\r\n'); + const resultsCrLf = t.tokenize(sampleTextCrLf); + assert.equal(resultsCrLf.lines.count, 15); +}); + +test('Comments1', () => { + const t = new Tokenizer(); + const results = t.tokenize('# hello\n# good bye\n\n\n""" test """ # another\n\n\npass'); + assert.equal(results.tokens.count, 4 + _implicitTokenCount); + + const token0 = results.tokens.getItemAt(0); + assert.equal(token0.type, TokenType.NewLine); + assert.equal(token0.comments!.length, 1); + assert.equal(token0.comments![0].value, ' hello'); + + const token1 = results.tokens.getItemAt(1); + assert.equal(token1.type, TokenType.String); + assert.equal(token1.comments!.length, 1); + assert.equal(token1.comments![0].value, ' good bye'); + + const token2 = results.tokens.getItemAt(2); + assert.equal(token2.type, TokenType.NewLine); + assert.equal(token2.comments!.length, 1); + assert.equal(token2.comments![0].value, ' another'); + + assert.equal(results.tokens.getItemAtPosition(0), -1); + assert.equal(results.tokens.getItemAtPosition(7), 0); + assert.equal(results.tokens.getItemAtPosition(20), 0); + assert.equal(results.tokens.getItemAtPosition(21), 1); + assert.equal(results.tokens.getItemAtPosition(42), 1); + assert.equal(results.tokens.getItemAtPosition(43), 2); + assert.equal(results.tokens.getItemAtPosition(45), 2); + assert.equal(results.tokens.getItemAtPosition(46), 3); + assert.equal(results.tokens.getItemAtPosition(49), 3); + assert.equal(results.tokens.getItemAtPosition(50), 5); + + assert.equal(results.tokens.contains(49), true); + assert.equal(results.tokens.contains(50), false); +}); + +test('Comments2', () => { + const t = new Tokenizer(); + const results = t.tokenize('class A:\n def func(self):\n pass\n # comment\n '); + assert.equal(results.tokens.count, 16 + _implicitTokenCount); + + const token17 = results.tokens.getItemAt(17); + assert.equal(token17.type, TokenType.EndOfStream); + assert.equal(token17.comments, undefined); + const start = token17.start; + + const token16 = results.tokens.getItemAt(16); + assert.equal(token16.type, TokenType.Dedent); + assert.equal(token16.start, start); + assert.equal(token16.comments, undefined); + + // When multiple tokens have the same start position (and 0-length) + // comments, if any, are stored on the first such token. + const token15 = results.tokens.getItemAt(15); + assert.equal(token15.type, TokenType.Dedent); + assert.equal(token15.start, start); + assert.equal(token15.comments!.length, 1); + assert.equal(token15.comments![0].value, ' comment'); + + const token14 = results.tokens.getItemAt(14); + assert.notEqual(token14.start, start); +}); + +test('Identifiers1', () => { + const t = new Tokenizer(); + const results = t.tokenize('배열 数値 лік Opciók 可選值'); + assert.equal(results.tokens.count, 5 + _implicitTokenCount); + + // Korean (Hangul) + const token0 = results.tokens.getItemAt(0); + assert.equal(token0.type, TokenType.Identifier); + + // Japanese + const token1 = results.tokens.getItemAt(1); + assert.equal(token1.type, TokenType.Identifier); + + // Russian (Cyrillic) + const token2 = results.tokens.getItemAt(2); + assert.equal(token2.type, TokenType.Identifier); + + // Hungarian + const token3 = results.tokens.getItemAt(3); + assert.equal(token3.type, TokenType.Identifier); + + // Chinese + const token4 = results.tokens.getItemAt(4); + assert.equal(token4.type, TokenType.Identifier); +}); + +test('TypeIgnoreAll1', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n#type:ignore\n"test"'); + assert(results.typeIgnoreAll); +}); + +test('TypeIgnoreAll2', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n# type: ignore ssss\n'); + assert(results.typeIgnoreAll); +}); + +test('TypeIgnoreAll3', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n# type: ignoreSsss\n'); + assert(!results.typeIgnoreAll); +}); + +test('TypeIgnoreAll3', () => { + const t = new Tokenizer(); + const results = t.tokenize('\n"hello"\n# type: ignore\n'); + assert(!results.typeIgnoreAll); +}); + +test('TypeIgnoreLine1', () => { + const t = new Tokenizer(); + const results = t.tokenize('\na = 3 # type: ignore\n"test" # type:ignore'); + assert.equal(results.typeIgnoreLines.size, 2); + assert(results.typeIgnoreLines.has(1)); + assert(results.typeIgnoreLines.has(2)); +}); + +test('TypeIgnoreLine2', () => { + const t = new Tokenizer(); + const results = t.tokenize('a = 3 # type: ignores\n"test" # type:ignore'); + assert.equal(results.typeIgnoreLines.size, 1); + assert(results.typeIgnoreLines.has(1)); + + assert.equal(results.tokens.getItemAtPosition(0), 0); + assert.equal(results.tokens.getItemAtPosition(1), 0); + assert.equal(results.tokens.getItemAtPosition(2), 1); + assert.equal(results.tokens.getItemAtPosition(3), 1); + assert.equal(results.tokens.getItemAtPosition(4), 2); + assert.equal(results.tokens.getItemAtPosition(20), 2); + assert.equal(results.tokens.getItemAtPosition(21), 3); + assert.equal(results.tokens.getItemAtPosition(22), 4); + assert.equal(results.tokens.getItemAtPosition(41), 4); + assert.equal(results.tokens.getItemAtPosition(42), 6); + + assert.equal(results.tokens.contains(41), true); + assert.equal(results.tokens.contains(42), false); +}); + +test('Constructor', () => { + const t = new Tokenizer(); + const results = t.tokenize('def constructor'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Keyword); + assert.equal(results.tokens.getItemAt(0).length, 3); + + assert.equal(results.tokens.getItemAt(1).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(1).length, 11); +}); + +test('Normalization', () => { + const t = new Tokenizer(); + const results = t.tokenize('ℝ 𝕽'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + let idToken = results.tokens.getItemAt(0) as IdentifierToken; + assert.equal(idToken.type, TokenType.Identifier); + assert.equal(idToken.length, 1); + assert.equal(idToken.value, 'R'); + + idToken = results.tokens.getItemAt(1) as IdentifierToken; + assert.equal(idToken.type, TokenType.Identifier); + assert.equal(idToken.length, 2); + assert.equal(idToken.value, 'R'); +}); + +test('Last empty line', () => { + const t = new Tokenizer(); + const results = t.tokenize('\r\n'); + assert.equal(results.tokens.count, _implicitTokenCount); + + const newLineToken = results.tokens.getItemAt(0) as NewLineToken; + assert.equal(newLineToken.type, TokenType.NewLine); + assert.equal(newLineToken.length, 2); + assert.equal(newLineToken.newLineType, NewLineType.CarriageReturnLineFeed); + + const eofToken = results.tokens.getItemAt(1); + assert.equal(eofToken.type, TokenType.EndOfStream); + assert.equal(eofToken.length, 0); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator1.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator1.test.ts new file mode 100644 index 00000000..6b2aa94b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator1.test.ts @@ -0,0 +1,1199 @@ +/* + * typeEvaluator1.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import * as assert from 'assert'; + +import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; +import { ScopeType } from '../analyzer/scope'; +import { ConfigOptions } from '../common/configOptions'; +import { + pythonVersion3_10, + pythonVersion3_11, + pythonVersion3_13, + pythonVersion3_7, + pythonVersion3_8, + pythonVersion3_9, +} from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('Unreachable1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['unreachable1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0, 0, 2, 1, 6); + + configOptions.diagnosticRuleSet.reportUnreachable = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['unreachable1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 5, 0, 2, 1, 6); + + configOptions.diagnosticRuleSet.reportUnreachable = 'warning'; + const analysisResults3 = TestUtils.typeAnalyzeSampleFiles(['unreachable1.py'], configOptions); + TestUtils.validateResults(analysisResults3, 0, 5, 2, 1, 6); +}); + +test('Builtins1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['builtins1.py']); + + assert.strictEqual(analysisResults.length, 1); + assert.notStrictEqual(analysisResults[0].parseResults, undefined); + assert.strictEqual(analysisResults[0].errors.length, 0); + assert.strictEqual(analysisResults[0].warnings.length, 0); + + // This list comes from python directly. + // `python` + // `import builtins + // `dir(builtins)` + // Remove True, False, None, _, __build_class__, __debug__, __doc__ + const expectedBuiltinsSymbols = [ + 'ArithmeticError', + 'AssertionError', + 'AttributeError', + 'BaseException', + 'BaseExceptionGroup', + 'BlockingIOError', + 'BrokenPipeError', + 'BufferError', + 'BytesWarning', + 'ChildProcessError', + 'ConnectionAbortedError', + 'ConnectionError', + 'ConnectionRefusedError', + 'ConnectionResetError', + 'DeprecationWarning', + 'EOFError', + 'Ellipsis', + 'EncodingWarning', + 'EnvironmentError', + 'Exception', + 'ExceptionGroup', + 'FileExistsError', + 'FileNotFoundError', + 'FloatingPointError', + 'FutureWarning', + 'GeneratorExit', + 'IOError', + 'ImportError', + 'ImportWarning', + 'IndentationError', + 'IndexError', + 'InterruptedError', + 'IsADirectoryError', + 'KeyError', + 'KeyboardInterrupt', + 'LookupError', + 'ModuleNotFoundError', + 'MemoryError', + 'NameError', + 'NotADirectoryError', + 'NotImplemented', + 'NotImplementedError', + 'OSError', + 'OverflowError', + 'PendingDeprecationWarning', + 'PermissionError', + 'ProcessLookupError', + 'PythonFinalizationError', + 'RecursionError', + 'ReferenceError', + 'ResourceWarning', + 'RuntimeError', + 'RuntimeWarning', + 'StopAsyncIteration', + 'StopIteration', + 'SyntaxError', + 'SyntaxWarning', + 'SystemError', + 'SystemExit', + 'TabError', + 'TimeoutError', + 'TypeError', + 'UnboundLocalError', + 'UnicodeDecodeError', + 'UnicodeEncodeError', + 'UnicodeError', + 'UnicodeTranslateError', + 'UnicodeWarning', + 'UserWarning', + 'ValueError', + 'Warning', + 'WindowsError', + 'ZeroDivisionError', + '__build_class__', + '__import__', + '__loader__', + '__name__', + '__package__', + '__spec__', + 'abs', + 'aiter', + 'all', + 'anext', + 'any', + 'ascii', + 'bin', + 'bool', + 'breakpoint', + 'bytearray', + 'bytes', + 'callable', + 'chr', + 'classmethod', + 'compile', + 'complex', + 'copyright', + 'credits', + 'delattr', + 'dict', + 'dir', + 'divmod', + 'enumerate', + 'eval', + 'exec', + 'exit', + 'filter', + 'float', + 'format', + 'frozenset', + 'getattr', + 'globals', + 'hasattr', + 'hash', + 'help', + 'hex', + 'id', + 'input', + 'int', + 'isinstance', + 'issubclass', + 'iter', + 'len', + 'license', + 'list', + 'locals', + 'map', + 'max', + 'memoryview', + 'min', + 'next', + 'object', + 'oct', + 'open', + 'ord', + 'pow', + 'print', + 'property', + 'quit', + 'range', + 'repr', + 'reversed', + 'round', + 'set', + 'setattr', + 'slice', + 'sorted', + 'staticmethod', + 'str', + 'sum', + 'super', + 'tuple', + 'type', + 'vars', + 'zip', + // These really shouldn't be exposed but are defined by builtins.pyi currently. + 'function', + 'ellipsis', + ]; + + const moduleScope = AnalyzerNodeInfo.getScope(analysisResults[0].parseResults!.parserOutput.parseTree)!; + assert.notStrictEqual(moduleScope, undefined); + + const builtinsScope = moduleScope.parent!; + assert.notStrictEqual(builtinsScope, undefined); + assert.strictEqual(builtinsScope.type, ScopeType.Builtin); + + // Make sure all the expected symbols are present. + const builtinsSymbolTable = builtinsScope.symbolTable; + for (const symbolName of expectedBuiltinsSymbols) { + const symbol = moduleScope.lookUpSymbolRecursive(symbolName); + if (symbol === undefined) { + assert.fail(`${symbolName} is missing from builtins scope`); + } + } + + // Make sure the builtins scope doesn't contain symbols that + // shouldn't be present. + const symbolMap = new Map<string, string>(); + for (const symbolName of expectedBuiltinsSymbols) { + symbolMap.set(symbolName, symbolName); + } + + for (const builtinName of builtinsSymbolTable.keys()) { + const symbolInfo = moduleScope.lookUpSymbolRecursive(builtinName); + if (symbolInfo && symbolInfo.isBeyondExecutionScope) { + if (symbolMap.get(builtinName) === undefined) { + assert.fail(`${builtinName} should not be in builtins scope`); + } + } + } +}); + +test('Builtins2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['builtins2.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Complex1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['complex1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowing1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('TypeNarrowing2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowing3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeNarrowing4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowing5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowing6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowing7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowing8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowing8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingAssert1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingAssert1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeNarrowingTypeIs1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTypeIs1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeNarrowingTypeEquals1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTypeEquals1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeNarrowingIsNone1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsNone1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsNone2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsNone2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsClass1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsClass1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsNoneTuple1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsNoneTuple1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsNoneTuple2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsNoneTuple2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsEllipsis1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsEllipsis1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingLiteral1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingLiteral1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingLiteral2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingLiteral2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingEnum1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingEnum1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingEnum2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingEnum2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeNarrowingIsinstance1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance1.py']); + + TestUtils.validateResults(analysisResults, 9); +}); + +test('TypeNarrowingIsinstance2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeNarrowingIsinstance4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance13.py', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance18.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance19.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance20.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIsinstance21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance21.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingTupleLength1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTupleLength1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIn1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIn1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingIn2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIn2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingLiteralMember1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingLiteralMember1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingNoneMember1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingNoneMember1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingTuple1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTuple1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingTypedDict1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTypedDict1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('TypeNarrowingTypedDict2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTypedDict2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingTypedDict3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTypedDict3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('typeNarrowingCallable1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingCallable1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeNarrowingFalsy1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingFalsy1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingLocalConst1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingLocalConst1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ReturnTypes1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['returnTypes1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('ReturnTypes2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['returnTypes2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Specialization1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['specialization1.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('Specialization2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['specialization2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Expression1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Expression2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Expression3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Expression4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression4.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Expression5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression5.py']); + + TestUtils.validateResults(analysisResults, 12); +}); + +test('Expression6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Expression7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Expression8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Expression9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expression9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Unpack1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unpack1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Unpack2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unpack2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Unpack3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.7 settings. + configOptions.defaultPythonVersion = pythonVersion3_7; + const analysisResults37 = TestUtils.typeAnalyzeSampleFiles(['unpack3.py'], configOptions); + TestUtils.validateResults(analysisResults37, 1); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['unpack3.py'], configOptions); + TestUtils.validateResults(analysisResults38, 0); +}); + +test('Unpack4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['unpack4.py'], configOptions); + TestUtils.validateResults(analysisResults38, 2); + + // Analyze with Python 3.9 settings. + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults39 = TestUtils.typeAnalyzeSampleFiles(['unpack4.py'], configOptions); + TestUtils.validateResults(analysisResults39, 1); +}); + +test('Unpack4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unpack5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Lambda2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda2.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Lambda3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Lambda4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda4.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Lambda5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Lambda7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Lambda15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Call1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Call2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call2.py']); + + TestUtils.validateResults(analysisResults, 24); +}); + +test('Call3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.7 settings. This will generate more errors. + configOptions.defaultPythonVersion = pythonVersion3_7; + const analysisResults37 = TestUtils.typeAnalyzeSampleFiles(['call3.py'], configOptions); + TestUtils.validateResults(analysisResults37, 36); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['call3.py'], configOptions); + TestUtils.validateResults(analysisResults38, 20); +}); + +test('Call4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Call5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call5.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Call6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call6.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Call7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call7.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Call8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Call9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call9.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Call10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call10.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Call11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Call12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call12.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Call13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Call14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call14.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Call15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call15.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Call16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Call17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Function5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Function8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('KwargsUnpack1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['kwargsUnpack1.py']); + + TestUtils.validateResults(analysisResults, 13); +}); + +test('FunctionMember1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportFunctionMemberAccess = 'none'; + const analysisResult1 = TestUtils.typeAnalyzeSampleFiles(['functionMember1.py'], configOptions); + TestUtils.validateResults(analysisResult1, 0); + + configOptions.diagnosticRuleSet.reportFunctionMemberAccess = 'error'; + const analysisResult2 = TestUtils.typeAnalyzeSampleFiles(['functionMember1.py'], configOptions); + TestUtils.validateResults(analysisResult2, 3); +}); + +test('FunctionMember2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['functionMember2.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Annotations1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // This test requires 3.13 or older because 3.14 uses deferred + // type annotation evaluation. + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 21); +}); + +test('Annotations2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Annotations3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Annotations4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations4.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Annotations5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Annotations6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations6.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('AnnotatedVar1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('AnnotatedVar2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar2.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('AnnotatedVar3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar3.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('AnnotatedVar4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar4.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('AnnotatedVar5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar5.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('AnnotatedVar6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('AnnotatedVar7', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['annotatedVar7.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportTypeCommentUsage = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['annotatedVar7.py'], configOptions); + TestUtils.validateResults(analysisResults2, 3); +}); + +test('AnnotatedVar8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar8.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Required1', () => { + // Analyze with Python 3.10 settings. + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['required1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Required2', () => { + // Analyze with Python 3.10 settings. + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['required2.py'], configOptions); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('Required3', () => { + // Analyze with Python 3.10 settings. + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['required3.py'], configOptions); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Metaclass1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass1.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('Metaclass2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass2.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('Metaclass3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass3.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Metaclass4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass4.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Metaclass5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass5.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Metaclass6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass6.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Metaclass7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass7.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Metaclass8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass8.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Metaclass9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass9.py']); + TestUtils.validateResults(analysisResults, 6); +}); + +test('Metaclass10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass10.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Metaclass11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass11.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('AssignmentExpr1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr1.py']); + TestUtils.validateResults(analysisResults, 7); +}); + +test('AssignmentExpr2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr2.py']); + TestUtils.validateResults(analysisResults, 8); +}); + +test('AssignmentExpr3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr3.py']); + TestUtils.validateResults(analysisResults, 5); +}); + +test('AssignmentExpr4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr4.py']); + TestUtils.validateResults(analysisResults, 16); +}); + +test('AssignmentExpr5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr5.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('AssignmentExpr6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr6.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('AssignmentExpr7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr7.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('AssignmentExpr8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr8.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('AssignmentExpr9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr9.py']); + TestUtils.validateResults(analysisResults, 0); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator2.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator2.test.ts new file mode 100644 index 00000000..16645bf1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator2.test.ts @@ -0,0 +1,989 @@ +/* + * typeEvaluator2.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import { ConfigOptions } from '../common/configOptions'; +import { pythonVersion3_10, pythonVersion3_13, pythonVersion3_9 } from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('CallbackProtocol1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol1.py']); + + TestUtils.validateResults(analysisResults, 10); +}); + +test('CallbackProtocol2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallbackProtocol3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallbackProtocol4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol4.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('CallbackProtocol5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol5.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('CallbackProtocol6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol6.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('CallbackProtocol7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallbackProtocol8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallbackProtocol9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol9.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('CallbackProtocol10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallbackProtocol11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Assignment1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment1.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Assignment2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Assignment3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Assignment4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Assignment5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Assignment6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Assignment7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Assignment8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Assignment9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment9.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Assignment10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Assignment11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment11.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Assignment12', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['assignment12.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportUnknownVariableType = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['assignment12.py'], configOptions); + TestUtils.validateResults(analysisResults2, 3); +}); + +test('AugmentedAssignment1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['augmentedAssignment1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('AugmentedAssignment2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['augmentedAssignment2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('AugmentedAssignment3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['augmentedAssignment3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Super2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super7.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Super8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super12.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Super13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('MissingSuper1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['missingSuper1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportMissingSuperCall = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['missingSuper1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 4); +}); + +test('NewType1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType1.py']); + + TestUtils.validateResults(analysisResults, 13); +}); + +test('NewType2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType2.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('NewType3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('NewType4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType4.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('NewType5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('NewType6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('NewType7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('isInstance1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['isinstance1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('isInstance2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['isinstance2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('isInstance3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['isinstance3.py'], configOptions); + TestUtils.validateResults(analysisResults1, 7); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['isinstance3.py'], configOptions); + TestUtils.validateResults(analysisResults2, 7); +}); + +test('isInstance4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['isinstance4.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('isInstance5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['isinstance5.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('isInstance6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['isinstance6.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Unbound1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // This test requires 3.13 or older because 3.14 uses deferred + // type annotation evaluation. + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unbound1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Unbound2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unbound2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Unbound3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unbound3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Unbound4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unbound4.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Unbound5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unbound5.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Unbound6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unbound6.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Assert1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, this is reported as a warning. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['assert1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 2); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportAssertAlwaysTrue = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['assert1.py'], configOptions); + TestUtils.validateResults(analysisResults, 2, 0); + + // Turn off the diagnostic. + configOptions.diagnosticRuleSet.reportAssertAlwaysTrue = 'none'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['assert1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 0); +}); + +test('RevealedType1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['revealedType1.py']); + + TestUtils.validateResults(analysisResults, 2, 0, 7); +}); + +test('AssertType1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assertType1.py']); + + TestUtils.validateResults(analysisResults, 11); +}); + +test('NameBinding1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['nameBinding1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('NameBinding2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['nameBinding2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('NameBinding3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['nameBinding3.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('NameBinding4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['nameBinding4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('NameBinding5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['nameBinding5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('ConstrainedTypeVar2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar2.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('ConstrainedTypeVar3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ConstrainedTypeVar6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('ConstrainedTypeVar8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ConstrainedTypeVar9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar10.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ConstrainedTypeVar11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar11.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ConstrainedTypeVar12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar13.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('ConstrainedTypeVar14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar15', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.disableBytesTypePromotions = true; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar15.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar18.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstrainedTypeVar19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar19.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ConstrainedTypeVar20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constrainedTypeVar20.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('MissingTypeArg1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, reportMissingTypeArgument is disabled. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['missingTypeArg1.py']); + TestUtils.validateResults(analysisResults, 1); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportMissingTypeArgument = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['missingTypeArg1.py'], configOptions); + TestUtils.validateResults(analysisResults, 6); +}); + +test('Solver1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Solver4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Solver7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Solver9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver9.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Solver10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver16.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Solver17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver18.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver19.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver20.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver21.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver22', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver22.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver23.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Solver24', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver24.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver25.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver26', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver26.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver27', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver27.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver28', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver28.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver29', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver29.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver30', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver30.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver31', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver31.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver32', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver32.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver33', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver33.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver34', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver34.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Solver35', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver35.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Solver36', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver36.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Solver37', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver37.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver38', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver38.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver39', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver39.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver40', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver40.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver41', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver41.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver42', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver42.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Solver43', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver43.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver44', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver44.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Solver45', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solver45.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverScoring1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverScoring1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverScoring2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverScoring2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('SolverScoring3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverScoring3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverScoring4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverScoring4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('SolverHigherOrder2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('SolverHigherOrder3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('SolverHigherOrder6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverHigherOrder14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverHigherOrder14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverLiteral1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverLiteral1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SolverLiteral2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverLiteral2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('SolverUnknown1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['solverUnknown1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Sentinel1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['sentinel1.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator3.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator3.test.ts new file mode 100644 index 00000000..91b7709e --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator3.test.ts @@ -0,0 +1,1101 @@ +/* + * typeEvaluator3.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import { ConfigOptions } from '../common/configOptions'; +import { + pythonVersion3_10, + pythonVersion3_11, + pythonVersion3_12, + pythonVersion3_13, + pythonVersion3_9, +} from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('Module1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['module1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Module2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['module2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Module3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['module3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Ellipsis1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['ellipsis1.pyi']); + + TestUtils.validateResults(analysisResults, 10); +}); + +test('Generator1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator1.py']); + + TestUtils.validateResults(analysisResults, 12); +}); + +test('Generator2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Generator3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Generator4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator9.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Generator10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator11.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Generator12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator12.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Generator13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generator15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator15.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Generator16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generator16.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Await1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['await1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Await2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['await2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Await3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['await3.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Coroutines1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // This functionality is deprecated in Python 3.11, so the type no longer + // exists in typing.pyi after that point. + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['coroutines1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Coroutines2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['coroutines2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Coroutines3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // This functionality is deprecated in Python 3.11, so the type no longer + // exists in typing.pyi after that point. + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['coroutines3.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Coroutines4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['coroutines4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Loop2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop11.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Loop12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop12.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Loop13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop18.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop19.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop20.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop21.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop22', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop22.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop23.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop24', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop24.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop25.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop26', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop26.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop27', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop27.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop28', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop28.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop29', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop29.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop30', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop30.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop31', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop31.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Loop32', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop32.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop33', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop33.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop34', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop34.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop35', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop35.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop36', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop36.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop37', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop37.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop38', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop38.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop39', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop39.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop40', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop40.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop41', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop41.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Loop42', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop42.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop43', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop43.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop44', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop44.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop45', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop45.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop46', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop46.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop47', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop47.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop48', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop48.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop49', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop49.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop50', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop50.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop51', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop51.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loop52', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loop52.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ForLoop1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['forLoop1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('ForLoop2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['forLoop2.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('Comprehension1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Comprehension2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Comprehension3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Comprehension4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Comprehension5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Comprehension6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension6.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Comprehension7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Comprehension8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Comprehension9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Comprehension10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension10.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Comprehension11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['comprehension11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Literals1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals1.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('Literals2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Literals3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals3.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Literals4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Literals5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals5.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Literals6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals6.py']); + + TestUtils.validateResults(analysisResults, 25); +}); + +test('Literals7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeAlias1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeAlias4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults3_9 = TestUtils.typeAnalyzeSampleFiles(['typeAlias4.py'], configOptions); + TestUtils.validateResults(analysisResults3_9, 1); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults3_10 = TestUtils.typeAnalyzeSampleFiles(['typeAlias4.py'], configOptions); + TestUtils.validateResults(analysisResults3_10, 12); +}); + +test('TypeAlias5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias5.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeAlias6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias6.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('TypeAlias7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias9.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeAlias10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias10.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('TypeAlias11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias11.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeAlias12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias17', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['typeAlias17.py'], configOptions); + TestUtils.validateResults(analysisResults1, 4); + + configOptions.diagnosticRuleSet.reportMissingTypeArgument = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['typeAlias17.py'], configOptions); + TestUtils.validateResults(analysisResults2, 11); +}); + +test('TypeAlias18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias18.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeAlias20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias20.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias21.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAlias22', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias22.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('RecursiveTypeAlias1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias1.py']); + + TestUtils.validateResults(analysisResults, 13); +}); + +test('RecursiveTypeAlias2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('RecursiveTypeAlias3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias3.py'], configOptions); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('RecursiveTypeAlias4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias5.pyi']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias10.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('RecursiveTypeAlias11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias14.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('RecursiveTypeAlias15', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias15.py'], configOptions); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('RecursiveTypeAlias16', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias16.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Classes1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Classes3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes3.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Classes4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Classes5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportIncompatibleVariableOverride = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes5.py'], configOptions); + TestUtils.validateResults(analysisResults, 11); + + configOptions.diagnosticRuleSet.reportIncompatibleVariableOverride = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes5.py'], configOptions); + TestUtils.validateResults(analysisResults, 35); +}); + +test('Classes6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes6.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Classes7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Classes8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Classes9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes9.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Classes10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Classes11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes11.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Methods1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['methods1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('MethodOverride1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride1.py'], configOptions); + TestUtils.validateResults(analysisResults, 43); +}); + +test('MethodOverride2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride2.py'], configOptions); + TestUtils.validateResults(analysisResults, 8); +}); + +test('MethodOverride3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride3.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride3.py'], configOptions); + TestUtils.validateResults(analysisResults, 8); +}); + +test('MethodOverride4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride4.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('MethodOverride5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['methodOverride5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MethodOverride6', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'none'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['methodOverride6.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['methodOverride6.py'], configOptions); + TestUtils.validateResults(analysisResults2, 3); +}); + +test('Enum1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Enum2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Enum3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Enum4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Enum5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Enum6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum6.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Enum7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Enum8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Enum9', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['enum9.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['enum9.py'], configOptions); + TestUtils.validateResults(analysisResults2, 0); +}); + +test('Enum10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Enum11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum11.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Enum12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum12.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Enum13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum13.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Enum14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enum14.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('EnumAuto1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enumAuto1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('EnumGenNextValue1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enumGenNextValue1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator4.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator4.test.ts new file mode 100644 index 00000000..06004b0c --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator4.test.ts @@ -0,0 +1,889 @@ +/* + * typeEvaluator4.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import { ConfigOptions } from '../common/configOptions'; +import { + pythonVersion3_10, + pythonVersion3_11, + pythonVersion3_12, + pythonVersion3_13, + pythonVersion3_14, + pythonVersion3_7, + pythonVersion3_8, + pythonVersion3_9, +} from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('Final1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final1.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Final2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final2.py']); + TestUtils.validateResults(analysisResults, 15); +}); + +test('Final3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final3.py']); + TestUtils.validateResults(analysisResults, 41); +}); + +test('Final4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final4.pyi']); + TestUtils.validateResults(analysisResults, 3); +}); + +test('Final5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final5.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Final6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final6.pyi']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('Final8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final8.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('InferredTypes1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inferredTypes1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('InferredTypes2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inferredTypes2.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('InferredTypes3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inferredTypes3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallSite2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callSite2.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallSite3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callSite3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('FString1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['fstring1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 15, 1); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['fstring1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 11, 1); +}); + +test('FString2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['fstring2.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('FString3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['fstring3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('FString4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['fstring4.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('FString5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.7 settings. This will generate errors. + configOptions.defaultPythonVersion = pythonVersion3_7; + const analysisResults37 = TestUtils.typeAnalyzeSampleFiles(['fstring5.py'], configOptions); + TestUtils.validateResults(analysisResults37, 6); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['fstring5.py'], configOptions); + TestUtils.validateResults(analysisResults38, 0); +}); + +test('TString1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tstring1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 11); + + configOptions.defaultPythonVersion = pythonVersion3_14; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['tstring1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 8); +}); + +test('TString2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_14; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tstring2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 1); +}); + +test('MemberAccess1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess2.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess3.py']); + TestUtils.validateResults(analysisResults, 3); +}); + +test('MemberAccess4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess4.py']); + TestUtils.validateResults(analysisResults, 5); +}); + +test('MemberAccess5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess5.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess6.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('MemberAccess7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess7.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess8.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess9.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess10.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('MemberAccess11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess11.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess12.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess13.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess14.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess15.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess16.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess17.py']); + TestUtils.validateResults(analysisResults, 5); +}); + +test('MemberAccess18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess18.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess19.py']); + TestUtils.validateResults(analysisResults, 10); +}); + +test('MemberAccess20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess20.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('MemberAccess21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess21.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('MemberAccess22', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess22.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess23.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess24', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess24.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess25.py']); + TestUtils.validateResults(analysisResults, 12); +}); + +test('MemberAccess26', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess26.py']); + TestUtils.validateResults(analysisResults, 3); +}); + +test('MemberAccess27', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess27.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess28', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess28.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClassNamedTuple1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassNamedTuple1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('DataClassNamedTuple2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassNamedTuple2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClass1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass1.py']); + + TestUtils.validateResults(analysisResults, 11); +}); + +test('DataClass2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass3.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('DataClass4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass4.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('DataClass5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClass6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClass7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClass8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass12.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('DataClass13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass13.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClass14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass16.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClass17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass17.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('DataClass18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass18.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClassReplace1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['dataclassReplace1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 10); + + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['dataclassReplace1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 4); +}); + +test('DataClassFrozen1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassFrozen1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('DataClassKwOnly1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassKwOnly1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('DataClassSlots1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassSlots1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('DataClassHash1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassHash1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('DataClassDescriptors1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassDescriptors1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataClassDescriptors2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassDescriptors2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClassConverter1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassConverter1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('DataClassConverter2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassConverter2.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('DataClassConverter3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassConverter3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClassPostInit1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassPostInit1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('InitVar1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['initVar1.py']); + + TestUtils.validateResults(analysisResults, 2, 1); +}); + +test('Callable1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Callable2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Callable3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Callable4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Callable5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable5.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Callable6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable6.py']); + + TestUtils.validateResults(analysisResults, 9); +}); + +test('Callable7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Generic1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generic1.py']); + + TestUtils.validateResults(analysisResults, 9); +}); + +test('Generic2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generic2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Generic3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generic3.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Unions1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.disableBytesTypePromotions = true; + + // Analyze with Python 3.9 settings. This will generate errors. + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults3_9 = TestUtils.typeAnalyzeSampleFiles(['unions1.py'], configOptions); + TestUtils.validateResults(analysisResults3_9, 11); + + // Analyze with Python 3.10 settings. + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults3_10 = TestUtils.typeAnalyzeSampleFiles(['unions1.py'], configOptions); + TestUtils.validateResults(analysisResults3_10, 0); +}); + +test('Unions2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['unions2.py'], configOptions); + TestUtils.validateResults(analysisResults38, 0); +}); + +test('Unions3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.9 settings. This will generate errors. + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults3_9 = TestUtils.typeAnalyzeSampleFiles(['unions3.py'], configOptions); + TestUtils.validateResults(analysisResults3_9, 1); + + // Analyze with Python 3.10 settings. + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults3_10 = TestUtils.typeAnalyzeSampleFiles(['unions3.py'], configOptions); + TestUtils.validateResults(analysisResults3_10, 0); +}); + +test('Unions4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unions4.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('Unions5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unions5.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Unions6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unions6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ParamSpec1', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec1.py']); + TestUtils.validateResults(results, 9); +}); + +test('ParamSpec2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_9; + const analysisResults39 = TestUtils.typeAnalyzeSampleFiles(['paramSpec2.py'], configOptions); + TestUtils.validateResults(analysisResults39, 9); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults310 = TestUtils.typeAnalyzeSampleFiles(['paramSpec2.py'], configOptions); + TestUtils.validateResults(analysisResults310, 0); +}); + +test('ParamSpec3', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec3.py']); + TestUtils.validateResults(results, 3); +}); + +test('ParamSpec4', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec4.py']); + TestUtils.validateResults(results, 10); +}); + +test('ParamSpec5', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec5.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec6', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec6.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec7', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec7.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec8', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec8.py']); + TestUtils.validateResults(results, 7); +}); + +test('ParamSpec9', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec9.py']); + TestUtils.validateResults(results, 14); +}); + +test('ParamSpec10', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec10.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec11', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec11.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec12', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec12.py']); + TestUtils.validateResults(results, 14); +}); + +test('ParamSpec13', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec13.py']); + TestUtils.validateResults(results, 11); +}); + +test('ParamSpec14', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec14.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec15', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec15.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec16', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec16.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec17', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec17.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec18', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec18.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec19', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec19.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec20', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec20.py']); + TestUtils.validateResults(results, 8); +}); + +test('ParamSpec21', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec21.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec22', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec22.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec23', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec23.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec24', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec24.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec25', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec25.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec26', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec26.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec27', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec27.py']); + TestUtils.validateResults(results, 2); +}); + +test('ParamSpec28', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec28.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec29', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec29.py']); + TestUtils.validateResults(results, 3); +}); + +test('ParamSpec30', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec30.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec31', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec31.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec32', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec32.py']); + TestUtils.validateResults(results, 4); +}); + +test('ParamSpec33', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec33.py']); + TestUtils.validateResults(results, 4); +}); + +test('ParamSpec34', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec34.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec35', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec35.py']); + TestUtils.validateResults(results, 1); +}); + +test('ParamSpec36', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec36.py']); + TestUtils.validateResults(results, 3); +}); + +test('ParamSpec37', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec37.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec38', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec38.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec39', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec39.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec40', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec40.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec41', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec41.py']); + TestUtils.validateResults(results, 1); +}); + +test('ParamSpec42', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec42.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec43', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec43.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec44', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec44.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec45', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec45.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec46', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec46.py']); + TestUtils.validateResults(results, 2); +}); + +test('ParamSpec47', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec47.py']); + TestUtils.validateResults(results, 3); +}); + +test('ParamSpec48', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec48.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec49', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec49.py']); + TestUtils.validateResults(results, 8); +}); + +test('ParamSpec50', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec50.py']); + TestUtils.validateResults(results, 2); +}); + +test('ParamSpec51', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec51.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec52', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec52.py']); + TestUtils.validateResults(results, 2); +}); + +test('ParamSpec53', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec53.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec54', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec54.py']); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec55', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec55.py']); + TestUtils.validateResults(results, 1); +}); + +test('Slice1', () => { + const results = TestUtils.typeAnalyzeSampleFiles(['slice1.py']); + TestUtils.validateResults(results, 0); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator5.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator5.test.ts new file mode 100644 index 00000000..bf35f9c0 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator5.test.ts @@ -0,0 +1,439 @@ +/* + * typeEvaluator5.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import { ConfigOptions } from '../common/configOptions'; +import { pythonVersion3_11, pythonVersion3_12, pythonVersion3_13 } from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('TypeParams1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeParams1.py'], configOptions); + TestUtils.validateResults(analysisResults, 8); +}); + +test('TypeParams2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['typeParams2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 2); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['typeParams2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 0); +}); + +test('TypeParams3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeParams3.py'], configOptions); + TestUtils.validateResults(analysisResults, 8); +}); + +test('TypeParams4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeParams4.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeParams5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeParams5.py'], configOptions); + TestUtils.validateResults(analysisResults, 9); +}); + +test('TypeParams6', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeParams6.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeParams7', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeParams7.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeParams8', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeParams8.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('AutoVariance1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['autoVariance1.py'], configOptions); + TestUtils.validateResults(analysisResults, 17); +}); + +test('AutoVariance2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['autoVariance2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('AutoVariance3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['autoVariance3.py'], configOptions); + TestUtils.validateResults(analysisResults, 18); +}); + +test('AutoVariance4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['autoVariance4.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('AutoVariance5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['autoVariance5.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeAliasStatement1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAliasStatement1.py'], configOptions); + TestUtils.validateResults(analysisResults, 10); +}); + +test('TypeAliasStatement2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['typeAliasStatement2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 1); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['typeAliasStatement2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 0); +}); + +test('TypeAliasStatement3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAliasStatement3.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeAliasStatement4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAliasStatement4.py'], configOptions); + TestUtils.validateResults(analysisResults, 6); +}); + +test('TypeAliasStatement5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAliasStatement5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Hashability1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['hashability1.py']); + TestUtils.validateResults(analysisResults, 10); +}); + +test('Hashability2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['hashability2.py']); + TestUtils.validateResults(analysisResults, 6); +}); + +test('Hashability3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['hashability3.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Override1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['override1.py']); + TestUtils.validateResults(analysisResults, 5); +}); + +test('Override2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['override2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportImplicitOverride = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['override2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 2); +}); + +test('TypeVarDefault1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefault1.py']); + TestUtils.validateResults(analysisResults, 14); +}); + +test('TypeVarDefault2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefault2.py'], configOptions); + TestUtils.validateResults(analysisResults, 24); +}); + +test('TypeVarDefault3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefault3.py'], configOptions); + TestUtils.validateResults(analysisResults, 8); +}); + +test('TypeVarDefault4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefault4.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeVarDefault5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefault5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarDefaultClass1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultClass1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarDefaultClass2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultClass2.py'], configOptions); + TestUtils.validateResults(analysisResults, 10); +}); + +test('TypeVarDefaultClass3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultClass3.py'], configOptions); + TestUtils.validateResults(analysisResults, 9); +}); + +test('TypeVarDefaultClass4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultClass4.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarDefaultTypeAlias1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultTypeAlias1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarDefaultTypeAlias2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultTypeAlias2.py']); + TestUtils.validateResults(analysisResults, 11); +}); + +test('TypeVarDefaultTypeAlias3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultTypeAlias3.py'], configOptions); + TestUtils.validateResults(analysisResults, 10); +}); + +test('TypeVarDefaultFunction1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultFunction1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarDefaultFunction2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultFunction2.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeVarDefaultFunction3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_13; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarDefaultFunction3.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); +}); + +test('FutureImport1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['futureImport1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('FutureImport2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['futureImport2.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('FutureImport3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['futureImport3.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Conditional1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['conditional1.py']); + TestUtils.validateResults(analysisResults, 15); +}); + +test('TypePrinter1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typePrinter1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypePrinter3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typePrinter3.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeAliasType1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.defaultPythonVersion = pythonVersion3_12; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAliasType1.py'], configOptions); + TestUtils.validateResults(analysisResults, 17); +}); + +test('TypeAliasType2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAliasType2.py']); + TestUtils.validateResults(analysisResults, 7); +}); + +test('TypedDictReadOnly1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictReadOnly1.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypedDictReadOnly2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictReadOnly2.py']); + TestUtils.validateResults(analysisResults, 17); +}); + +test('TypedDictClosed1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed1.py']); + TestUtils.validateResults(analysisResults, 7); +}); + +test('TypedDictClosed2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed2.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypedDictClosed3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed3.py']); + TestUtils.validateResults(analysisResults, 13); +}); + +test('TypedDictClosed4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed4.py']); + TestUtils.validateResults(analysisResults, 5); +}); + +test('TypedDictClosed5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed5.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypedDictClosed6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed6.py']); + TestUtils.validateResults(analysisResults, 8); +}); + +test('TypedDictClosed7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed7.py']); + TestUtils.validateResults(analysisResults, 6); +}); + +test('TypedDictClosed8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed8.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypedDictClosed9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed9.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypedDictClosed10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictClosed10.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataclassTransform1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('DataclassTransform2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform2.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('DataclassTransform3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform3.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('DataclassTransform4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform4.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Async1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['async1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('TypeCheckOnly1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeCheckOnly1.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('NoTypeCheck1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noTypeCheck1.py']); + TestUtils.validateResults(analysisResults, 2); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator6.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator6.test.ts new file mode 100644 index 00000000..577550e3 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator6.test.ts @@ -0,0 +1,1020 @@ +/* + * typeEvaluator6.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import { ConfigOptions } from '../common/configOptions'; +import { pythonVersion3_10, pythonVersion3_11, pythonVersion3_12, pythonVersion3_8 } from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('Overload1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload1.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Overload2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload2.py']); + TestUtils.validateResults(analysisResults, 3); +}); + +test('Overload3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload3.py']); + TestUtils.validateResults(analysisResults, 3); +}); + +test('Overload4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload4.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Overload5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload5.py']); + TestUtils.validateResults(analysisResults, 6); +}); + +test('OverloadCall1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('OverloadCall2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall2.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('OverloadCall3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('OverloadCall4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall4.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('OverloadCall5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall5.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('OverloadCall6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall6.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('OverloadCall7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall7.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('OverloadCall8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall8.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('OverloadCall9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall9.py']); + TestUtils.validateResults(analysisResults, 8); +}); + +test('OverloadCall10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadCall10.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('OverloadOverride1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadOverride1.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('OverloadImpl1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadImpl1.py']); + TestUtils.validateResults(analysisResults, 6); +}); + +test('OverloadImpl2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadImpl2.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('OverloadOverlap1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportOverlappingOverload = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadOverlap1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportOverlappingOverload = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['overloadOverlap1.py'], configOptions); + TestUtils.validateResults(analysisResults, 16); +}); + +test('TypeGuard1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeGuard1.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('TypeGuard2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeGuard2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeGuard3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeGuard3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeIs1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIs1.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeIs2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIs2.py']); + TestUtils.validateResults(analysisResults, 9); +}); + +test('TypeIs3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIs3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeIs4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIs4.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Never1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['never1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Never2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['never2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypePromotions1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.disableBytesTypePromotions = false; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typePromotions1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Index1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['index1.py']); + + TestUtils.validateResults(analysisResults, 10); +}); + +test('ProtocolModule2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocolModule2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('ProtocolModule4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocolModule4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeVarTuple1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple1.py'], configOptions); + TestUtils.validateResults(analysisResults, 18); +}); + +test('TypeVarTuple2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple2.py'], configOptions); + TestUtils.validateResults(analysisResults, 16); +}); + +test('TypeVarTuple3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple3.py'], configOptions); + TestUtils.validateResults(analysisResults, 6); +}); + +test('TypeVarTuple4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple4.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeVarTuple5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple5.py'], configOptions); + TestUtils.validateResults(analysisResults, 9); +}); + +test('TypeVarTuple6', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple6.py'], configOptions); + TestUtils.validateResults(analysisResults, 10); +}); + +test('TypeVarTuple7', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple7.py'], configOptions); + TestUtils.validateResults(analysisResults, 6); +}); + +test('TypeVarTuple8', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple8.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeVarTuple9', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple9.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple10', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple10.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeVarTuple11', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple11.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeVarTuple12', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple12.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple13', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple13.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeVarTuple14', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple14.py'], configOptions); + TestUtils.validateResults(analysisResults, 14); +}); + +test('TypeVarTuple15', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple15.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple16', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple16.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple17', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple17.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple18', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple18.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeVarTuple19', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple19.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple20', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple20.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple21', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple21.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple22', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple22.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeVarTuple23', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple23.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple24', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple24.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple25', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple25.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple26', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple26.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeVarTuple27', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple27.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypeVarTuple28', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple28.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple29', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple29.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVarTuple30', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVarTuple30.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Match1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['match1.py'], configOptions); + TestUtils.validateResults(analysisResults, 21); +}); + +test('Match2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['match2.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('Match3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['match3.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchSequence1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchSequence1.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('MatchSequence2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchSequence2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchClass1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass1.py'], configOptions); + TestUtils.validateResults(analysisResults, 7); +}); + +test('MatchClass2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchClass3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass3.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchClass4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass4.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchClass5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass5.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); + +test('MatchClass6', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass6.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchClass7', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass7.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); +}); + +test('MatchClass8', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchClass8.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('MatchValue1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchValue1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchMapping1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchMapping1.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('MatchLiteral1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchLiteral1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchLiteral2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['matchLiteral2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MatchExhaustion1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + configOptions.diagnosticRuleSet.reportMatchNotExhaustive = 'none'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['matchExhaustion1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportMatchNotExhaustive = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['matchExhaustion1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 4); +}); + +test('MatchUnnecessary1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['matchUnnecessary1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportUnnecessaryComparison = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['matchUnnecessary1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 7); +}); + +test('List1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['list1.py']); + TestUtils.validateResults(analysisResults, 3); +}); + +test('List2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['list2.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('List3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['list3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Comparison1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['comparison1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportUnnecessaryComparison = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['comparison1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 7); +}); + +test('Comparison2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['comparison2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportUnnecessaryComparison = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['comparison2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 18); +}); + +test('EmptyContainers1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['emptyContainers1.py']); + TestUtils.validateResults(analysisResults, 5); +}); + +test('InitSubclass1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['initsubclass1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('InitSubclass2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['initsubclass2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('InitSubclass3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['initsubclass3.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('None1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['none1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('None2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['none2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Constructor1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Constructor5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor6.py']); + + TestUtils.validateResults(analysisResults, 0, 1); +}); + +test('Constructor7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor13.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Constructor14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor16.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Constructor17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor18.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor19.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Constructor20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor20.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Constructor21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor21.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Constructor22', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor22.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor23.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor24', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.strictParameterNoneValue = false; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor24.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); + + configOptions.diagnosticRuleSet.strictParameterNoneValue = true; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor24.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); + +test('Constructor25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor25.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Constructor26', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor26.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Constructor27', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor27.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor28', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor28.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Constructor29', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor29.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor30', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor30.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor31', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor31.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor32', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor32.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Constructor33', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor33.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ConstructorCallable1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructorCallable1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('ConstructorCallable2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructorCallable2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('InconsistentConstructor1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportInconsistentConstructor = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentConstructor1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportInconsistentConstructor = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentConstructor1.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('ClassGetItem1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classGetItem1.py']); + + TestUtils.validateResults(analysisResults, 0, 1); +}); + +test('UnusedCallResult1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, this is disabled. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedCallResult1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportUnusedCallResult = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedCallResult1.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('UnusedCoroutine1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unusedCoroutine1.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('FunctionAnnotation1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['functionAnnotation1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('FunctionAnnotation2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['functionAnnotation2.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('FunctionAnnotation3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['functionAnnotation3.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('FunctionAnnotation4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['functionAnnotation4.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportTypeCommentUsage = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['functionAnnotation4.py'], configOptions); + TestUtils.validateResults(analysisResults2, 3); +}); + +test('Subscript1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['subscript1.py'], configOptions); + TestUtils.validateResults(analysisResults38, 14); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults39 = TestUtils.typeAnalyzeSampleFiles(['subscript1.py'], configOptions); + TestUtils.validateResults(analysisResults39, 0); +}); + +test('Subscript2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['subscript2.py']); + TestUtils.validateResults(analysisResults, 8); +}); + +test('Subscript3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['subscript3.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Subscript4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['subscript4.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Decorator1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['decorator1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Decorator2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['decorator2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Decorator3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['decorator3.py'], configOptions); + TestUtils.validateResults(analysisResults38, 3); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults39 = TestUtils.typeAnalyzeSampleFiles(['decorator3.py'], configOptions); + TestUtils.validateResults(analysisResults39, 0); +}); + +test('Decorator4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['decorator4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Decorator5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['decorator5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Decorator6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['decorator6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Decorator7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['decorator7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator7.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator7.test.ts new file mode 100644 index 00000000..22315e91 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator7.test.ts @@ -0,0 +1,1054 @@ +/* + * typeEvaluator7.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import { ConfigOptions } from '../common/configOptions'; +import { + pythonVersion3_10, + pythonVersion3_11, + pythonVersion3_12, + pythonVersion3_13, + pythonVersion3_14, + pythonVersion3_8, +} from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('GenericType1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('GenericType2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType3.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('GenericType4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('GenericType8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType9.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('GenericType10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType18.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType19.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType20.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType21.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType22', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType22.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType23.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType24', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType24.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType25.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType26', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType26.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('GenericType27', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType27.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType28', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType28.py']); + + TestUtils.validateResults(analysisResults, 18); +}); + +test('GenericType29', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType29.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType30', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType30.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType31', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType31.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('GenericType32', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType32.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType33', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType33.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType34', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType34.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType35', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType35.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericType36', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType36.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType37', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType37.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType38', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType38.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType39', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType39.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType40', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType40.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType41', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType41.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType42', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType42.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType43', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType43.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType44', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType44.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType45', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType45.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('GenericType46', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType46.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericType47', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericType47.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol1.py']); + + TestUtils.validateResults(analysisResults, 9); +}); + +test('Protocol2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol3.py']); + + TestUtils.validateResults(analysisResults, 13); +}); + +test('Protocol4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol4.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Protocol5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol6.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Protocol7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol12.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol16.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol17', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.reportInvalidTypeVarUse = 'error'; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol17.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('Protocol18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol18.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol19.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol20.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol21.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol22', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.reportInvalidTypeVarUse = 'error'; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol22.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol23.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol24', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol24.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Protocol25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol25.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol26', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol26.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol28', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol28.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol29', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol29.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol30', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol30.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol31', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol31.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol32', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol32.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol33', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol33.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol34', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol34.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol35', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol35.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol36', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol36.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol37', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol37.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol38', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol38.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol39', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol39.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol40', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol40.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol41', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol41.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol42', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol42.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol43', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol43.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol44', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol44.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol45', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol45.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol46', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol46.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol47', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol47.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol48', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol48.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol49', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol49.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol50', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol50.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol51', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol51.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol52', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol52.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol53', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Note: This test exposes some inconsistencies between override checks + // and protocol matching. Both of these should generate 8 errors. + configOptions.diagnosticRuleSet.reportAssignmentType = 'none'; + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'error'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['protocol53.py'], configOptions); + TestUtils.validateResults(analysisResults1, 10); + + configOptions.diagnosticRuleSet.reportAssignmentType = 'error'; + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'none'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['protocol53.py'], configOptions); + TestUtils.validateResults(analysisResults2, 8); +}); + +test('ProtocolExplicit1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocolExplicit1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('ProtocolExplicit3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocolExplicit3.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypedDict1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict1.py']); + + TestUtils.validateResults(analysisResults, 11); +}); + +test('TypedDict2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict2.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypedDict3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypedDict4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict4.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('TypedDict5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict5.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypedDict6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict6.py']); + + TestUtils.validateResults(analysisResults, 15); +}); + +test('TypedDict7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypedDict8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict8.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypedDict9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict9.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypedDict10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict10.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypedDict11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypedDict12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict12.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypedDict13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict13.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypedDict14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict14.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypedDict15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict15.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypedDict16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict16.py']); + + TestUtils.validateResults(analysisResults, 7); +}); + +test('TypedDict17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict17.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypedDict18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict18.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypedDict19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict19.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypedDict20', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict20.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypedDict21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict21.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypedDict22', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict22.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypedDict23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict23.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypedDict24', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict24.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TypedDict25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict25.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypedDictInline1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDictInline1.py'], configOptions); + TestUtils.validateResults(analysisResults, 6); +}); + +test('ClassVar1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ClassVar2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ClassVar3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar3.py']); + + TestUtils.validateResults(analysisResults, 13); +}); + +test('ClassVar4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ClassVar5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ClassVar6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar6.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('ClassVar7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeVar1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('TypeVar2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVar3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar3.py']); + + TestUtils.validateResults(analysisResults, 12); +}); + +test('TypeVar4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar4.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeVar5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar5.py']); + + TestUtils.validateResults(analysisResults, 18); +}); + +test('TypeVar6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar6.py']); + + TestUtils.validateResults(analysisResults, 20); +}); + +test('TypeVar7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar7.py']); + + TestUtils.validateResults(analysisResults, 26); +}); + +test('TypeVar8', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_12; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['typeVar8.py'], configOptions); + TestUtils.validateResults(analysisResults1, 4); + + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['typeVar8.py'], configOptions); + TestUtils.validateResults(analysisResults2, 2); +}); + +test('TypeVar9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar9.py']); + + TestUtils.validateResults(analysisResults, 13); +}); + +test('TypeVar10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeVar11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Annotated1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['annotated1.py'], configOptions); + TestUtils.validateResults(analysisResults38, 5); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults39 = TestUtils.typeAnalyzeSampleFiles(['annotated1.py'], configOptions); + TestUtils.validateResults(analysisResults39, 3); +}); + +test('Annotated2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotated2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Circular1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // This test requires 3.13 or older because 3.14 uses deferred + // type annotation evaluation. + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['circular1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Circular2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['circular2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TryExcept1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TryExcept2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TryExcept3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TryExcept4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept4.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TryExcept5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TryExcept6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TryExcept8', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept8.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TryExcept9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept9.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TryExcept10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept10.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('TryExcept11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept11.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TryExcept12', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_13; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tryExcept12.py'], configOptions); + TestUtils.validateResults(analysisResults1, 3); + + configOptions.defaultPythonVersion = pythonVersion3_14; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['tryExcept12.py'], configOptions); + TestUtils.validateResults(analysisResults2, 1); +}); + +test('exceptionGroup1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['exceptionGroup1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 34); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['exceptionGroup1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 10); +}); + +test('Del1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['del1.py']); + TestUtils.validateResults(analysisResults, 6); +}); + +test('Del2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['del2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Any1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['any1.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('Type1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['type1.py']); + + TestUtils.validateResults(analysisResults, 8); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typeEvaluator8.test.ts b/python-parser/packages/pyright-internal/src/tests/typeEvaluator8.test.ts new file mode 100644 index 00000000..09e59fd1 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typeEvaluator8.test.ts @@ -0,0 +1,1026 @@ +/* + * typeEvaluator8.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for pyright type evaluator. Tests are split + * arbitrarily among multiple files so they can run in parallel. + */ + +import * as assert from 'assert'; + +import { ConfigOptions } from '../common/configOptions'; +import { pythonVersion3_10, pythonVersion3_11, pythonVersion3_8 } from '../common/pythonVersion'; +import { Uri } from '../common/uri/uri'; +import * as TestUtils from './testUtils'; + +test('Import1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import1.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Import2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import2.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('Import4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import4.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('Import6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import6.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('Import7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import7.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('Import9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import9.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Import10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import10.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Import11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import11.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Import12', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, optional diagnostics are ignored. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['import12.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 2); + + // Turn on error. + configOptions.diagnosticRuleSet.reportWildcardImportFromLibrary = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['import12.py'], configOptions); + TestUtils.validateResults(analysisResults, 2, 0); + + // Turn off diagnostic. + configOptions.diagnosticRuleSet.reportWildcardImportFromLibrary = 'none'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['import12.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 0); +}); + +test('Import14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import14.py', 'import13.py']); + + assert.strictEqual(analysisResults.length, 2); + assert.strictEqual(analysisResults[0].errors.length, 0); + assert.strictEqual(analysisResults[1].errors.length, 0); +}); + +test('Import15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import15.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Import16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import16.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Import18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import18.py']); + TestUtils.validateResults(analysisResults, 2); +}); + +test('DunderAll1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, reportUnsupportedDunderAll is a warning. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['dunderAll1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 7); + + // Turn on error. + configOptions.diagnosticRuleSet.reportUnsupportedDunderAll = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['dunderAll1.py'], configOptions); + TestUtils.validateResults(analysisResults, 7, 0); + + // Turn off diagnostic. + configOptions.diagnosticRuleSet.reportUnsupportedDunderAll = 'none'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['dunderAll1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 0); +}); + +test('DunderAll2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // By default, reportUnsupportedDunderAll is a warning. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['dunderAll2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 3); + + // Turn on error. + configOptions.diagnosticRuleSet.reportUnsupportedDunderAll = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['dunderAll2.py'], configOptions); + TestUtils.validateResults(analysisResults, 3, 0); + + // Turn off diagnostic. + configOptions.diagnosticRuleSet.reportUnsupportedDunderAll = 'none'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['dunderAll2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 0); +}); + +test('DunderAll3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Turn on error. + configOptions.diagnosticRuleSet.reportUnsupportedDunderAll = 'error'; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dunderAll3.pyi'], configOptions); + TestUtils.validateResults(analysisResults, 0, 0); +}); + +test('CodeFlow1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('CodeFlow2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('CodeFlow3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CodeFlow4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CodeFlow5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CodeFlow6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CodeFlow7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CodeFlow8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CodeFlow9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['codeFlow9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CapturedVariable1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['capturedVariable1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('CapturedVariable2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['capturedVariable2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Property1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Property2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Property3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Property4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property6', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Analyze with reportPropertyTypeMismatch enabled. + configOptions.diagnosticRuleSet.reportPropertyTypeMismatch = 'error'; + const analysisResult1 = TestUtils.typeAnalyzeSampleFiles(['property6.py'], configOptions); + TestUtils.validateResults(analysisResult1, 2); + + // Analyze with reportPropertyTypeMismatch disabled. + configOptions.diagnosticRuleSet.reportPropertyTypeMismatch = 'none'; + const analysisResult2 = TestUtils.typeAnalyzeSampleFiles(['property6.py'], configOptions); + TestUtils.validateResults(analysisResult2, 0); +}); + +test('Property7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Property8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property8.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Property9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property11.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Property12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property16.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Property17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Property18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['property18.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Operator1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Operator2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Operator3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Operator4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Operator5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator5.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Operator6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Operator7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Operator8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator8.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Operator9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Operator10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator10.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Operator11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Operator12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operator12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Optional1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Disable diagnostics. + configOptions.diagnosticRuleSet.reportOptionalSubscript = 'none'; + configOptions.diagnosticRuleSet.reportOptionalMemberAccess = 'none'; + configOptions.diagnosticRuleSet.reportOptionalCall = 'none'; + configOptions.diagnosticRuleSet.reportOptionalIterable = 'none'; + configOptions.diagnosticRuleSet.reportOptionalContextManager = 'none'; + configOptions.diagnosticRuleSet.reportOptionalOperand = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['optional1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on warnings. + configOptions.diagnosticRuleSet.reportOptionalSubscript = 'warning'; + configOptions.diagnosticRuleSet.reportOptionalMemberAccess = 'warning'; + configOptions.diagnosticRuleSet.reportOptionalCall = 'warning'; + configOptions.diagnosticRuleSet.reportOptionalIterable = 'warning'; + configOptions.diagnosticRuleSet.reportOptionalContextManager = 'warning'; + configOptions.diagnosticRuleSet.reportOptionalOperand = 'warning'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['optional1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 8); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportOptionalSubscript = 'error'; + configOptions.diagnosticRuleSet.reportOptionalMemberAccess = 'error'; + configOptions.diagnosticRuleSet.reportOptionalCall = 'error'; + configOptions.diagnosticRuleSet.reportOptionalIterable = 'error'; + configOptions.diagnosticRuleSet.reportOptionalContextManager = 'error'; + configOptions.diagnosticRuleSet.reportOptionalOperand = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['optional1.py'], configOptions); + TestUtils.validateResults(analysisResults, 8); +}); + +test('Optional2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + // Disable diagnostics. + configOptions.diagnosticRuleSet.reportOptionalOperand = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['optional2.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportOptionalOperand = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['optional2.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Tuple1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple1.py']); + + TestUtils.validateResults(analysisResults, 26); +}); + +test('Tuple2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple2.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Tuple3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple3.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Tuple4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Tuple5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple5.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Tuple6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple6.py']); + + TestUtils.validateResults(analysisResults, 10); +}); + +test('Tuple7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple7.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Tuple8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple8.py']); + + TestUtils.validateResults(analysisResults, 11); +}); + +test('Tuple9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple9.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Tuple10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Tuple11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple11.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Tuple12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Tuple13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Tuple15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Tuple16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Tuple17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple17.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Tuple18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple18.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Tuple19', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuple19.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('NamedTuple1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple1.py']); + + TestUtils.validateResults(analysisResults, 13); +}); + +test('NamedTuple2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple2.py']); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('NamedTuple3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('NamedTuple4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('NamedTuple5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('NamedTuple6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple6.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('NamedTuple7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('NamedTuple8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('NamedTuple9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple9.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('NamedTuple10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple10.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('NamedTuple11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuple11.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Slots1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['slots1.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Slots2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['slots2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Slots3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['slots3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Slots4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['slots4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Parameters1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportMissingParameterType = 'none'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['parameters1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportMissingParameterType = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['parameters1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 1); +}); + +test('Self1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self1.py']); + + TestUtils.validateResults(analysisResults, 15); +}); + +test('Self2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self2.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Self3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Self8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self9.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self10.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Self11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('UnusedVariable1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.diagnosticRuleSet.reportUnusedVariable = 'none'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['unusedVariable1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportUnusedVariable = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['unusedVariable1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 3); +}); + +test('Descriptor1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['descriptor1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Descriptor2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['descriptor2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Descriptor3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['descriptor3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Partial1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial1.py']); + + TestUtils.validateResults(analysisResults, 18); +}); + +test('Partial2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Partial3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Partial4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial4.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Partial5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial5.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Partial6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial6.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Partial7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Partial8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TotalOrdering1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['totalOrdering1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('TupleUnpack1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('TupleUnpack2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_10; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 18); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 4); +}); + +test('TupleUnpack3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_11; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack3.py'], configOptions); + TestUtils.validateResults(analysisResults1, 1); +}); + +test('TupleUnpack4', () => { + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack4.py']); + TestUtils.validateResults(analysisResults1, 2); +}); + +test('TupleUnpack5', () => { + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack5.py']); + TestUtils.validateResults(analysisResults1, 0); +}); + +test('PseudoGeneric1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['pseudoGeneric1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('PseudoGeneric2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['pseudoGeneric2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('PseudoGeneric3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['pseudoGeneric3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Strings2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['strings2.py']); + + TestUtils.validateResults(analysisResults, 2, 1); +}); + +test('LiteralString1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literalString1.py']); + + TestUtils.validateResults(analysisResults, 10); +}); + +test('LiteralString2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literalString2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('LiteralString3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literalString3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ParamInference1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['paramInference1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('ParamInference2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['paramInference2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Dictionary1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dictionary1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Dictionary2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dictionary2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Dictionary3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dictionary3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Dictionary4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dictionary4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('StaticExpression1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + + configOptions.defaultPythonVersion = pythonVersion3_8; + configOptions.defaultPythonPlatform = 'windows'; + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['staticExpression1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 9); + + configOptions.defaultPythonVersion = pythonVersion3_11; + configOptions.defaultPythonPlatform = 'Linux'; + + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['staticExpression1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 6); + + configOptions.defineConstant.set('DEFINED_TRUE', true); + configOptions.defineConstant.set('DEFINED_FALSE', false); + configOptions.defineConstant.set('DEFINED_STR', 'hi!'); + const analysisResults3 = TestUtils.typeAnalyzeSampleFiles(['staticExpression1.py'], configOptions); + TestUtils.validateResults(analysisResults3, 0); +}); + +test('StaticExpression2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['staticExpression2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SpecialForm1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['specialForm1.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('SpecialForm2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['specialForm2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('SpecialForm3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['specialForm3.py']); + + TestUtils.validateResults(analysisResults, 22); +}); + +test('SpecialForm4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['specialForm4.py']); + + TestUtils.validateResults(analysisResults, 72); +}); + +test('TypeForm1', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeForm1.py'], configOptions); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('TypeForm2', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeForm2.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeForm3', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeForm3.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeForm4', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeForm4.py'], configOptions); + + TestUtils.validateResults(analysisResults, 27); +}); + +test('TypeForm5', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeForm5.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeForm6', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeForm6.py'], configOptions); + + TestUtils.validateResults(analysisResults, 8); +}); + +test('TypeForm7', () => { + const configOptions = new ConfigOptions(Uri.empty()); + configOptions.diagnosticRuleSet.enableExperimentalFeatures = true; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeForm7.py'], configOptions); + + TestUtils.validateResults(analysisResults, 1); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/typePrinter.test.ts b/python-parser/packages/pyright-internal/src/tests/typePrinter.test.ts new file mode 100644 index 00000000..00e235b8 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/typePrinter.test.ts @@ -0,0 +1,188 @@ +/* + * typePrinter.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Unit tests for typePrinter module. + */ + +import * as assert from 'assert'; + +import { printType, PrintTypeFlags } from '../analyzer/typePrinter'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + FunctionParam, + FunctionParamFlags, + FunctionType, + FunctionTypeFlags, + ModuleType, + NeverType, + ParamSpecType, + TypeVarKind, + TypeVarTupleType, + TypeVarType, + UnboundType, + UnknownType, +} from '../analyzer/types'; +import { Uri } from '../common/uri/uri'; +import { ParamCategory } from '../parser/parseNodes'; + +function returnTypeCallback(type: FunctionType) { + return type.shared.declaredReturnType ?? UnknownType.create(/* isEllipsis */ true); +} + +test('SimpleTypes', () => { + const anyType = AnyType.create(/* isEllipsis */ false); + assert.strictEqual(printType(anyType, PrintTypeFlags.None, returnTypeCallback), 'Any'); + + const ellipsisType = AnyType.create(/* isEllipsis */ true); + assert.strictEqual(printType(ellipsisType, PrintTypeFlags.None, returnTypeCallback), '...'); + + const unknownType = UnknownType.create(); + assert.strictEqual(printType(unknownType, PrintTypeFlags.None, returnTypeCallback), 'Unknown'); + assert.strictEqual(printType(unknownType, PrintTypeFlags.PrintUnknownWithAny, returnTypeCallback), 'Any'); + assert.strictEqual(printType(unknownType, PrintTypeFlags.PythonSyntax, returnTypeCallback), 'Any'); + + const unboundType = UnboundType.create(); + assert.strictEqual(printType(unboundType, PrintTypeFlags.None, returnTypeCallback), 'Unbound'); + assert.strictEqual(printType(unboundType, PrintTypeFlags.PythonSyntax, returnTypeCallback), 'Any'); + + const moduleType = ModuleType.create('Test', Uri.empty()); + assert.strictEqual(printType(moduleType, PrintTypeFlags.None, returnTypeCallback), 'Module("Test")'); + assert.strictEqual(printType(moduleType, PrintTypeFlags.PythonSyntax, returnTypeCallback), 'Any'); +}); + +test('TypeVarTypes', () => { + const typeVarType = TypeVarType.createInstance('T'); + assert.strictEqual(printType(typeVarType, PrintTypeFlags.None, returnTypeCallback), 'T'); + + const paramSpecType = TypeVarType.createInstance('P', TypeVarKind.ParamSpec); + assert.strictEqual(printType(paramSpecType, PrintTypeFlags.None, returnTypeCallback), 'P'); + + const typeVarTupleType = TypeVarType.createInstance('Ts', TypeVarKind.TypeVarTuple); + assert.strictEqual(printType(typeVarTupleType, PrintTypeFlags.None, returnTypeCallback), 'Ts'); +}); + +test('ClassTypes', () => { + const classTypeA = ClassType.createInstantiable( + 'A', + '', + '', + Uri.empty(), + ClassTypeFlags.None, + 0, + /* declaredMetaclass*/ undefined, + /* effectiveMetaclass */ undefined + ); + + const typeVarS = TypeVarType.createInstance('S'); + const typeVarT = TypeVarType.createInstance('T'); + + classTypeA.shared.typeParams.push(typeVarS, typeVarT); + + assert.strictEqual(printType(classTypeA, PrintTypeFlags.None, returnTypeCallback), 'type[A[S, T]]'); + + const instanceA = ClassType.cloneAsInstance(classTypeA); + assert.strictEqual(printType(instanceA, PrintTypeFlags.None, returnTypeCallback), 'A[S, T]'); + + const classTypeInt = ClassType.createInstantiable( + 'int', + '', + '', + Uri.empty(), + ClassTypeFlags.None, + 0, + /* declaredMetaclass*/ undefined, + /* effectiveMetaclass */ undefined + ); + const instanceInt = ClassType.cloneAsInstance(classTypeInt); + + const specializedA = ClassType.specialize(instanceA, [instanceInt, instanceInt]); + + assert.strictEqual(printType(specializedA, PrintTypeFlags.None, returnTypeCallback), 'A[int, int]'); + + const unionType = combineTypes([instanceInt, specializedA, typeVarS]); + assert.strictEqual(printType(unionType, PrintTypeFlags.None, returnTypeCallback), 'Union[int, A[int, int], S]'); + assert.strictEqual(printType(unionType, PrintTypeFlags.PEP604, returnTypeCallback), 'int | A[int, int] | S'); +}); + +test('FunctionTypes', () => { + const funcTypeA = FunctionType.createInstance('A', '', '', FunctionTypeFlags.None); + + FunctionType.addParam( + funcTypeA, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'a') + ); + + FunctionType.addPositionOnlyParamSeparator(funcTypeA); + + FunctionType.addParam( + funcTypeA, + FunctionParam.create(ParamCategory.ArgsList, AnyType.create(), FunctionParamFlags.TypeDeclared, 'args') + ); + + FunctionType.addParam( + funcTypeA, + FunctionParam.create(ParamCategory.KwargsDict, AnyType.create(), FunctionParamFlags.TypeDeclared, 'kwargs') + ); + + funcTypeA.shared.declaredReturnType = NeverType.createNoReturn(); + + assert.strictEqual( + printType(funcTypeA, PrintTypeFlags.None, returnTypeCallback), + '(a: Any, /, *args: Any, **kwargs: Any) -> NoReturn' + ); + assert.strictEqual( + printType(funcTypeA, PrintTypeFlags.PythonSyntax, returnTypeCallback), + 'Callable[..., NoReturn]' + ); + + const funcTypeB = FunctionType.createInstance('B', '', '', FunctionTypeFlags.None); + + FunctionType.addParam( + funcTypeB, + FunctionParam.create(ParamCategory.Simple, AnyType.create(), FunctionParamFlags.TypeDeclared, 'a') + ); + + FunctionType.addPositionOnlyParamSeparator(funcTypeB); + + const paramSpecP = TypeVarType.createInstance('P', TypeVarKind.ParamSpec); + FunctionType.addParamSpecVariadics(funcTypeB, paramSpecP as ParamSpecType); + + funcTypeB.shared.declaredReturnType = NeverType.createNever(); + + assert.strictEqual(printType(funcTypeB, PrintTypeFlags.None, returnTypeCallback), '(a: Any, /, **P) -> Never'); + assert.strictEqual( + printType(funcTypeB, PrintTypeFlags.PythonSyntax, returnTypeCallback), + 'Callable[Concatenate[Any, P], Never]' + ); + + const funcTypeC = FunctionType.createInstance('C', '', '', FunctionTypeFlags.None); + + const typeVarTupleTs = TypeVarType.createInstance('Ts', TypeVarKind.TypeVarTuple); + const unpackedTs = TypeVarType.cloneForUnpacked(typeVarTupleTs as TypeVarTupleType); + + FunctionType.addParam( + funcTypeC, + FunctionParam.create(ParamCategory.ArgsList, unpackedTs, FunctionParamFlags.TypeDeclared, 'args') + ); + + assert.strictEqual(printType(funcTypeC, PrintTypeFlags.None, returnTypeCallback), '(*args: *Ts) -> Unknown'); + assert.strictEqual( + printType(funcTypeC, PrintTypeFlags.UseTypingUnpack, returnTypeCallback), + '(*args: Unpack[Ts]) -> Unknown' + ); + assert.strictEqual(printType(funcTypeC, PrintTypeFlags.PythonSyntax, returnTypeCallback), 'Callable[..., Any]'); + + const funcTypeD = FunctionType.createInstance('D', '', '', FunctionTypeFlags.None); + + funcTypeD.shared.declaredReturnType = AnyType.create(); + FunctionType.addParamSpecVariadics(funcTypeD, paramSpecP as ParamSpecType); + + assert.strictEqual(printType(funcTypeD, PrintTypeFlags.None, returnTypeCallback), '(**P) -> Any'); + assert.strictEqual(printType(funcTypeD, PrintTypeFlags.PythonSyntax, returnTypeCallback), 'Callable[P, Any]'); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/uri.test.ts b/python-parser/packages/pyright-internal/src/tests/uri.test.ts new file mode 100644 index 00000000..6fc10616 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/uri.test.ts @@ -0,0 +1,991 @@ +/* + * uri.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for Uris. + */ + +import assert from 'assert'; +import * as nodefs from 'fs-extra'; +import * as os from 'os'; +import * as path from 'path'; + +import { expandPathVariables } from '../common/envVarUtils'; +import { isRootedDiskPath, normalizeSlashes } from '../common/pathUtils'; +import { RealTempFile, createFromRealFileSystem } from '../common/realFileSystem'; +import { Uri } from '../common/uri/uri'; +import { UriEx, deduplicateFolders, getWildcardRegexPattern, getWildcardRoot } from '../common/uri/uriUtils'; +import * as vfs from './harness/vfs/filesystem'; +import { TestCaseSensitivityDetector } from './harness/testHost'; + +const caseDetector = new TestCaseSensitivityDetector(true); + +test('parse', () => { + assert.throws(() => Uri.parse('\\c:\\foo : bar', caseDetector)); + assert.throws(() => Uri.parse('foo:////server/b/c', caseDetector)); // No authority component + assert.ok(Uri.parse('foo:///a/b/c', caseDetector)); + assert.ok(Uri.parse('foo:a/b/c', caseDetector)); + assert.ok(Uri.parse('foo:/a/b/c', caseDetector)); + assert.ok(Uri.parse('foo://server/share/dir/file.py', caseDetector)); + assert.ok(Uri.parse('foo://server/share/dir/file.py?query#fragment', caseDetector)); + assert.ok(Uri.parse('foo:///c:/users/me', caseDetector)); + assert.ok(Uri.parse('foo:///c%3A%52users%52me', caseDetector)); + assert.ok(Uri.parse('', caseDetector)); + assert.ok(Uri.parse(undefined, caseDetector)); +}); + +test('file', () => { + const cwd = process.cwd(); + const uri1 = Uri.file('a/b/c', caseDetector, true); + assert.ok(uri1.getFilePath().length > 6); + assert.ok( + uri1.getFilePath().toLowerCase().startsWith(cwd.toLowerCase()), + `${uri1.getFilePath()} does not start with ${cwd}` + ); + const uri2 = Uri.file('a/b/c', caseDetector, false); + assert.equal(uri2.getFilePath().length, 6); +}); + +test('file path', () => { + // parse works with unix style file format + assert.equal(Uri.parse('/folder1/folder2', caseDetector).scheme, 'file'); + + // parse doesn't work with window style file format + assert(Uri.parse('c:\\folder1\\folder2', caseDetector).scheme !== `file`); + + // file works with both styles + assert.equal(Uri.file('/folder1/folder2', caseDetector).scheme, 'file'); + assert.equal(Uri.file('c:\\folder1\\folder2', caseDetector).scheme, 'file'); +}); + +test('key', () => { + const key = Uri.parse('foo:///a/b/c', caseDetector).key; + const key2 = Uri.parse('foo:///a/b/c', caseDetector).key; + assert.equal(key, key2); + const key3 = Uri.parse('foo:///a/b/d', caseDetector).key; + assert.notEqual(key, key3); + const key4 = UriEx.file('/a/b/c').key; + assert.notEqual(key, key4); + const key5 = Uri.parse('file:///a/b/c', caseDetector).key; + assert.equal(key4, key5); + const key6 = UriEx.file(normalizeSlashes('c:\\foo\\bar\\d.txt')).key; + const key7 = Uri.parse('file:///c%3A/foo/bar/d.txt', caseDetector).key; + const key8 = Uri.parse('file:///c:/foo/bar/d.txt', caseDetector).key; + assert.equal(key6, key7); + assert.equal(key6, key8); + const key9 = UriEx.parse('file:///c%3A/foo/bar/D.txt', true).key; + const key10 = UriEx.parse('file:///c:/foo/bar/d.txt', true).key; + assert.notEqual(key9, key10); + const key11 = UriEx.parse('file:///c%3A/foo/bar/D.txt', false).key; + const key12 = UriEx.parse('file:///c%3A/foo/bar/d.txt', false).key; + assert.equal(key11, key12); +}); + +test('filename', () => { + const filename = Uri.parse('foo:///a/b/c', caseDetector).fileName; + assert.equal(filename, 'c'); + const filename2 = Uri.parse('foo:///a/b/c/', caseDetector).fileName; + assert.equal(filename2, 'c'); + const filename3 = Uri.parse('foo:///a/b/c.py', caseDetector).fileName; + assert.equal(filename3, 'c.py'); + const filename4 = Uri.parse('foo:///a/b/c.py?query#fragment', caseDetector).fileName; + assert.equal(filename4, 'c.py'); + const filename5 = UriEx.file('/a/b/c').fileName; + assert.equal(filename5, 'c'); + const filename6 = Uri.parse('file:///a/b/c', caseDetector).fileName; + assert.equal(filename6, 'c'); +}); + +test('extname', () => { + const extname = Uri.parse('foo:///a/b/c', caseDetector).lastExtension; + assert.equal(extname, ''); + const extname2 = Uri.parse('foo:///a/b/c/', caseDetector).lastExtension; + assert.equal(extname2, ''); + const extname3 = Uri.parse('foo:///a/b/c.py', caseDetector).lastExtension; + assert.equal(extname3, '.py'); + const extname4 = Uri.parse('foo:///a/b/c.py?query#fragment', caseDetector).lastExtension; + assert.equal(extname4, '.py'); + const extname5 = UriEx.file('/a/b/c.py.foo').lastExtension; + assert.equal(extname5, '.foo'); + const extname6 = Uri.parse('file:///a/b/c.py.foo', caseDetector).lastExtension; + assert.equal(extname6, '.foo'); +}); + +test('fragment', () => { + const fragment = Uri.parse('foo:///a/b/c#bar', caseDetector).fragment; + assert.equal(fragment, 'bar'); + const fragment2 = Uri.parse('foo:///a/b/c#bar#baz', caseDetector).fragment; + assert.equal(fragment2, 'bar#baz'); + const fragment3 = Uri.parse('foo:///a/b/c?query#bar#baz', caseDetector).fragment; + assert.equal(fragment3, 'bar#baz'); + const fragment4 = Uri.parse('foo:///a/b/c?query', caseDetector).fragment; + assert.equal(fragment4, ''); + const fragment5 = Uri.parse('foo:///a/b/c', caseDetector).withFragment('bar').fragment; + assert.equal(fragment5, 'bar'); + const fragment6 = Uri.parse('foo:///a/b/c#bar', caseDetector).withFragment('').fragment; + assert.equal(fragment6, ''); +}); + +test('query', () => { + const query = Uri.parse('foo:///a/b/c?bar', caseDetector).query; + assert.equal(query, 'bar'); + const query2 = Uri.parse('foo:///a/b/c?bar?baz', caseDetector).query; + assert.equal(query2, 'bar?baz'); + const query3 = Uri.parse('foo:///a/b/c?bar?baz#fragment', caseDetector).query; + assert.equal(query3, 'bar?baz'); + const query4 = Uri.parse('foo:///a/b/c#fragment', caseDetector).query; + assert.equal(query4, ''); + const query5 = Uri.parse('foo:///a/b/c', caseDetector).withQuery('bar').query; + assert.equal(query5, 'bar'); + const query6 = Uri.parse('foo:///a/b/c?bar', caseDetector).withQuery('').query; + assert.equal(query6, ''); +}); + +test('containsExtension', () => { + const uri1 = UriEx.parse('foo:///a/b/c.py', true); + assert.ok(uri1.containsExtension('.py')); + assert.ok(!uri1.containsExtension('.PY')); + assert.ok(!uri1.containsExtension('.pyi')); + const uri2 = UriEx.parse('foo:///a/b/c.pyi', true); + assert.ok(uri2.containsExtension('.pyi')); + assert.ok(!uri2.containsExtension('.PYI')); + assert.ok(!uri2.containsExtension('.py')); + const uri3 = UriEx.parse('foo:///a/b/c.pyi.ipynb', false); + assert.ok(uri3.containsExtension('.pyi')); + assert.ok(uri3.containsExtension('.ipynb')); + assert.ok(!uri3.containsExtension('.PYI')); +}); + +test('root', () => { + const root1 = UriEx.parse('foo://authority/a/b/c').root; + assert.equal(root1.toString(), 'foo://authority/'); + const root = UriEx.parse('file://server/b/c').root; + assert.equal(root.toString(), 'file://server/'); + assert.equal(root.getRootPathLength(), 9); + const root2 = UriEx.parse('foo:/').root; + assert.equal(root2.toString(), 'foo:/'); + const root3 = UriEx.parse('foo://a/b/c/').root; + assert.equal(root3.toString(), 'foo://a/'); + assert.ok(root3.isRoot()); + const root4 = UriEx.parse('foo://a/b/c.py').root; + assert.equal(root4.toString(), 'foo://a/'); + const root5 = UriEx.parse('foo://a/b/c.py?query#fragment').root; + assert.equal(root5.toString(), 'foo://a/'); + const root6 = UriEx.file('/a/b/c.py.foo').root; + assert.equal(root6.toString(), 'file:///'); + const root7 = UriEx.parse('file:///a/b/c.py.foo').root; + assert.equal(root7.toString(), 'file:///'); + assert.equal(root7.getRootPathLength(), 1); + const root8 = UriEx.parse('untitled:Untitled-1').root; + assert.equal(root8.toString(), 'untitled:'); + assert.equal(root8.getRootPathLength(), 0); + assert.equal(root8.isRoot(), false); + const root9 = UriEx.parse('file://a/b/c/d.py').root; + assert.equal(root9.toString(), 'file://a/'); + assert.equal(root9.getRootPathLength(), 4); + assert.ok(root9.isRoot()); + const root10 = UriEx.parse('file://c%3A/b/c/d.py').root; + assert.equal(root10.toString(), 'file://c:/'); + assert.equal(root10.getRootPathLength(), 5); + assert.ok(root10.isRoot()); +}); + +test('untitled', () => { + const untitled = UriEx.parse('untitled:Untitled-1', true); + assert.equal(untitled.scheme, 'untitled'); + assert.equal(untitled.fileName, 'Untitled-1'); + assert.equal(untitled.toString(), 'untitled:Untitled-1'); + const untitled2 = UriEx.parse('untitled:Untitled-1', true); + assert.ok(untitled.equals(untitled2)); + const untitled3 = UriEx.parse('untitled:Untitled-2', true); + assert.ok(!untitled.equals(untitled3)); + const untitled4 = UriEx.parse('untitled:Untitled-1.foo.bar', false); + assert.equal(untitled4.scheme, 'untitled'); + assert.equal(untitled4.fileName, 'Untitled-1.foo.bar'); + assert(untitled4.containsExtension('.foo')); + assert(untitled4.containsExtension('.bar')); +}); + +test('empty', () => { + const empty = Uri.parse('', caseDetector); + assert.equal(empty.isEmpty(), true); + const empty2 = Uri.parse('foo:///', caseDetector).isEmpty(); + assert.equal(empty2, false); + const empty3 = Uri.empty(); + assert.equal(empty3.isEmpty(), true); + const empty4 = Uri.parse(undefined, caseDetector); + assert.equal(empty4.isEmpty(), true); + assert.ok(empty4.equals(empty3)); + assert.ok(empty3.equals(empty)); + const combined = empty.combinePaths(normalizeSlashes('/d/e/f')); + assert.equal(combined.getFilePath(), ''); +}); + +test('file', () => { + const file1 = UriEx.file(normalizeSlashes('/a/b/c')).getFilePath(); + assert.equal(file1, normalizeSlashes('/a/b/c')); + const file2 = UriEx.file('file:///a/b/c').getFilePath(); + assert.equal(file2, normalizeSlashes('/a/b/c')); + const resolved = UriEx.file(normalizeSlashes('/a/b/c')).combinePaths(normalizeSlashes('/d/e/f')); + assert.equal(resolved.getFilePath(), normalizeSlashes('/d/e/f')); +}); + +test('isUri', () => { + const isUri = Uri.is('foo:///a/b/c'); + assert.equal(isUri, false); + const isUri2 = Uri.is('/a/b/c'); + assert.equal(isUri2, false); + const isUri3 = Uri.is(undefined); + assert.equal(isUri3, false); + const isUri4 = Uri.is(Uri.parse('foo:///a/b/c', caseDetector)); + assert.equal(isUri4, true); + const isUri5 = Uri.is(Uri.empty()); + assert.equal(isUri5, true); +}); + +test('matchesRegex', () => { + const includeFiles = /\.pyi?$/; + const uri = Uri.parse('file:///a/b/c.pyi', caseDetector); + assert.ok(uri.matchesRegex(includeFiles)); + const uri2 = Uri.parse('file:///a/b/c.px', caseDetector); + assert.equal(uri2.matchesRegex(includeFiles), false); + const uri3 = Uri.parse('vscode-vfs:///a/b/c.pyi', caseDetector); + assert.ok(uri3.matchesRegex(includeFiles)); + const fileRegex = /^(c:\/foo\/bar)($|\/)/i; + const uri4 = Uri.parse('file:///C%3A/foo/bar', caseDetector); + assert.ok(uri4.matchesRegex(fileRegex)); + const uri5 = Uri.parse('file:///c%3A/foo/bar', caseDetector); + assert.ok(uri5.matchesRegex(fileRegex)); + const uri6 = Uri.parse('file:///c:/foo/bar', caseDetector); + assert.ok(uri6.matchesRegex(fileRegex)); + const uri7 = Uri.parse('file:///c:/foo/bar/', caseDetector); + assert.ok(uri7.matchesRegex(fileRegex)); + const uri8 = Uri.parse('file:///c:/foo/baz/', caseDetector); + assert.equal(uri8.matchesRegex(fileRegex), false); +}); + +test('replaceExtension', () => { + const uri = Uri.parse('file:///a/b/c.pyi', caseDetector); + const uri2 = uri.replaceExtension('.py'); + assert.equal(uri2.toString(), 'file:///a/b/c.py'); + const uri3 = Uri.parse('file:///a/b/c', caseDetector); + const uri4 = uri3.replaceExtension('.py'); + assert.equal(uri4.toString(), 'file:///a/b/c.py'); + const uri5 = Uri.parse('file:///a/b/c.foo.py', caseDetector); + const uri6 = uri5.replaceExtension('.pyi'); + assert.equal(uri6.toString(), 'file:///a/b/c.foo.pyi'); + const uri7 = Uri.parse('memfs:/notebook.ipynb.py?query#fragment', caseDetector); + const uri8 = uri7.replaceExtension(''); + assert.equal(uri8.toString(), 'memfs:/notebook.ipynb'); + const uri9 = Uri.parse('untitled:Untitled-1.ipynb.py?query#fragment', caseDetector); + const uri10 = uri9.replaceExtension(''); + assert.equal(uri10.toString(), 'untitled:Untitled-1.ipynb'); +}); + +test('addExtension', () => { + const uri = Uri.parse('file:///a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri.addExtension('.py'); + assert.equal(uri2.toString(), 'file:///a/b/c.pyi.py'); + const uri3 = Uri.parse('file:///a/b/c', caseDetector); + const uri4 = uri3.addExtension('.py'); + assert.equal(uri4.toString(), 'file:///a/b/c.py'); +}); + +test('addPath', () => { + const uri = Uri.parse('file:///a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri.addPath('d'); + assert.equal(uri2.toString(), 'file:///a/b/c.pyid'); +}); + +test('getDirectory', () => { + const uri = Uri.parse('file:///a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri.getDirectory(); + assert.equal(uri2.toString(), 'file:///a/b'); + const uri3 = uri2.getDirectory(); + assert.equal(uri3.toString(), 'file:///a'); + const uri4 = Uri.parse('file:///a/b/', caseDetector); + const uri5 = uri4.getDirectory(); + assert.equal(uri5.toString(), 'file:///a'); + const uri6 = uri4.getDirectory(); + assert.ok(uri6.equals(uri5)); + const uri7 = uri5.getDirectory(); + assert.equal(uri7.toString(), 'file:///'); + const uri8 = Uri.parse('memfs:/a', caseDetector); + const uri9 = uri8.getDirectory(); + assert.equal(uri9.toString(), 'memfs:/'); + const uri10 = Uri.parse('untitled:a', caseDetector); + const uri11 = uri10.getDirectory(); + assert.equal(uri11.toString(), 'untitled:'); +}); + +test('init and pytyped', () => { + const uri = Uri.parse('file:///a/b/c?query#fragment', caseDetector); + const uri2 = uri.pytypedUri; + assert.equal(uri2.toString(), 'file:///a/b/c/py.typed'); + const uri3 = uri.initPyUri; + assert.equal(uri3.toString(), 'file:///a/b/c/__init__.py'); + const uri4 = uri.initPyiUri; + assert.equal(uri4.toString(), 'file:///a/b/c/__init__.pyi'); + const uri5 = uri.packageUri; + assert.equal(uri5.toString(), 'file:///a/b/c.py'); + const uri6 = uri.packageStubUri; + assert.equal(uri6.toString(), 'file:///a/b/c.pyi'); + const uri7 = Uri.parse('foo://microsoft.com/a/b/c.py', caseDetector); + const uri8 = uri7.pytypedUri; + assert.equal(uri8.toString(), 'foo://microsoft.com/a/b/c.py/py.typed'); + const uri9 = uri7.initPyUri; + assert.equal(uri9.toString(), 'foo://microsoft.com/a/b/c.py/__init__.py'); + const uri10 = uri7.initPyiUri; + assert.equal(uri10.toString(), 'foo://microsoft.com/a/b/c.py/__init__.pyi'); + const uri11 = uri7.packageUri; + assert.equal(uri11.toString(), 'foo://microsoft.com/a/b/c.py.py'); + const uri12 = uri7.packageStubUri; + assert.equal(uri12.toString(), 'foo://microsoft.com/a/b/c.py.pyi'); +}); + +test('isChild', () => { + const parent = UriEx.parse('file:///a/b/?query#fragment', true); + const child = UriEx.parse('file:///a/b/c.pyi?query#fragment', true); + assert.ok(child.isChild(parent)); + const parent2 = UriEx.parse('file:///a/b', true); + const child2 = UriEx.parse('file:///a/b/c.pyi', true); + const child2DifferentCase = UriEx.parse('file:///a/B/C.pyi', false); + assert.ok(child2.isChild(parent2)); + assert.ok(child2DifferentCase.isChild(parent2)); + const parent3 = UriEx.parse('file:///a/b/', true); + const child3 = UriEx.parse('file:///a/b/c.pyi', true); + assert.ok(child3.isChild(parent3)); + const parent4 = UriEx.parse('file:///a/b/', true); + const notChild4 = UriEx.parse('file:///a/bb/c.pyi', true); + assert.ok(!notChild4.isChild(parent4)); + assert.ok(!notChild4.isChild(parent2)); + const notChild5 = UriEx.parse('file:///a/b/', true); + assert.ok(!notChild5.isChild(parent4)); +}); + +test('equals', () => { + const uri1 = UriEx.parse('file:///a/b/c.pyi?query#fragment', true); + const uri2 = UriEx.file('/a/b/c.pyi'); + assert.ok(!uri1.equals(uri2)); + const uri3 = uri1.stripExtension().addExtension('.pyi'); + assert.ok(uri2.equals(uri3)); + const uri4 = UriEx.parse('foo:///a/b/c', true); + const uri5 = UriEx.parse('foo:///a/b/c', true); + const uri6 = UriEx.parse('foo:///a/b/c/', true); + assert.ok(uri4.equals(uri5)); + assert.ok(uri4.equals(uri6)); + const uri7 = UriEx.parse('file://c%3A/b/c/d.py', true).root; + const uri8 = UriEx.parse('file://c:/', true); + assert.ok(uri7.equals(uri8)); + const uri9 = UriEx.parse('foo:///a/b/c?query', true); + assert.ok(!uri9.equals(uri4)); + // Web uris are always case sensitive + const uri10 = UriEx.parse('foo:///a/b/c', false); + const uri11 = UriEx.parse('foo:///a/B/c', false); + assert.ok(!uri10.equals(uri11)); + // Filre uris pay attention to the parameter. + const uri12 = UriEx.parse('file:///a/b/c', false); + const uri13 = UriEx.parse('file:///a/B/c', false); + assert.ok(uri12.equals(uri13)); + const uri14 = UriEx.parse('file:///a/b/c', true); + const uri15 = UriEx.parse('file:///a/B/c', true); + assert.ok(!uri14.equals(uri15)); +}); + +test('startsWith', () => { + const parent = Uri.parse('file:///a/b/?query#fragment', caseDetector); + const child = Uri.parse('file:///a/b/c.pyi?query#fragment', caseDetector); + assert.ok(child.startsWith(parent)); + const parent2 = Uri.parse('file:///a/b', caseDetector); + const child2 = Uri.parse('file:///a/b/c.pyi', caseDetector); + assert.ok(child2.startsWith(parent2)); + const parent3 = Uri.parse('file:///a/b/', caseDetector); + const child3 = Uri.parse('file:///a/b/c.pyi', caseDetector); + assert.ok(child3.startsWith(parent3)); + const parent4 = Uri.parse('file:///a/b/', caseDetector); + const notChild4 = Uri.parse('file:///a/bb/c.pyi', caseDetector); + assert.ok(!notChild4.startsWith(parent4)); + assert.ok(!notChild4.startsWith(parent2)); +}); + +test('path comparisons', () => { + const uri = Uri.parse('foo:///a/b/c.pyi?query#fragment', caseDetector); + assert.ok(uri.pathEndsWith('c.pyi')); + assert.ok(uri.pathEndsWith('b/c.pyi')); + assert.ok(uri.pathEndsWith('a/b/c.pyi')); + assert.ok(!uri.pathEndsWith('a/b/c.py')); + assert.ok(!uri.pathEndsWith('b/c.py')); + assert.ok(uri.pathIncludes('c.pyi')); + assert.ok(uri.pathIncludes('b/c')); + assert.ok(uri.pathIncludes('a/b/c')); + const uri2 = Uri.parse('file:///C%3A/a/b/c.pyi?query#fragment', caseDetector); + assert.ok(uri2.pathEndsWith('c.pyi')); + assert.ok(uri2.pathEndsWith('b/c.pyi')); + assert.ok(!uri2.pathStartsWith('C:/a')); + assert.ok(!uri2.pathStartsWith('C:/a/b')); + assert.ok(uri2.pathStartsWith('c:/a')); + assert.ok(uri2.pathStartsWith('c:/a/b')); +}); + +test('combinePaths', () => { + const uri1 = Uri.parse('file:///a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri1.combinePaths('d', 'e'); + assert.equal(uri2.toString(), 'file:///a/b/c.pyi/d/e'); + const uri4 = uri1.combinePaths('d', 'e', 'f'); + assert.equal(uri4.toString(), 'file:///a/b/c.pyi/d/e/f'); + const uri5 = uri1.combinePaths('d', '..', 'e'); + assert.equal(uri5.toString(), 'file:///a/b/c.pyi/e'); + const rootedPath = process.platform === 'win32' ? 'D:' : '/D'; + const rootedResult = process.platform === 'win32' ? 'file:///d%3A/e/f' : 'file:///D/e/f'; + const uri6 = uri1.combinePaths(rootedPath, 'e', 'f'); + assert.equal(uri6.toString(), rootedResult); + const uri7 = Uri.parse('foo:', caseDetector); + const uri8 = uri7.combinePaths('d', 'e'); + assert.equal(uri8.toString(), 'foo:d/e'); + const uri9 = Uri.parse('foo:/', caseDetector); + const uri10 = uri9.combinePaths('d', 'e'); + assert.equal(uri10.toString(), 'foo:/d/e'); + const uri11 = Uri.empty().combinePaths('d', 'e'); + assert.equal(uri11.toString(), ''); + const uri12 = uri1.combinePaths('d', 'e', 'f/'); + assert.equal(uri12.toString(), 'file:///a/b/c.pyi/d/e/f'); +}); + +test('combinePathsUnsafe', () => { + const uri1 = Uri.parse('file:///a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri1.combinePathsUnsafe('d', 'e'); + assert.equal(uri2.toString(), 'file:///a/b/c.pyi/d/e'); + const uri4 = uri1.combinePathsUnsafe('d', 'e', 'f'); + assert.equal(uri4.toString(), 'file:///a/b/c.pyi/d/e/f'); + const uri5 = uri1.combinePathsUnsafe('d', '..', 'e'); + assert.equal(uri5.toString(), 'file:///a/b/c.pyi/d/../e'); + const rootedPath = process.platform === 'win32' ? 'D:' : '/D'; + const rootedResult = process.platform === 'win32' ? 'file:///d%3A/e/f' : 'file:///D/e/f'; + const uri6 = uri1.combinePathsUnsafe(rootedPath, 'e', 'f'); + assert.equal(uri6.toString(), rootedResult); + const uri7 = Uri.parse('foo:', caseDetector); + const uri8 = uri7.combinePathsUnsafe('d', 'e'); + assert.equal(uri8.toString(), 'foo:d/e'); + const uri9 = Uri.parse('foo:/', caseDetector); + const uri10 = uri9.combinePathsUnsafe('d', 'e'); + assert.equal(uri10.toString(), 'foo:/d/e'); + const uri11 = Uri.empty().combinePathsUnsafe('d', 'e'); + assert.equal(uri11.toString(), ''); + const uri12 = uri1.combinePathsUnsafe('d', 'e', 'f/'); + assert.equal(uri12.toString(), 'file:///a/b/c.pyi/d/e/f/'); +}); + +test('resolvePaths', () => { + const uri1 = Uri.parse('file:///a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri1.resolvePaths('d', 'e'); + assert.equal(uri2.toString(), 'file:///a/b/c.pyi/d/e'); + const uri3 = uri1.resolvePaths('d', 'e/'); + assert.equal(uri3.toString(), 'file:///a/b/c.pyi/d/e'); + const uri4 = uri1.resolvePaths('d', 'e', 'f/'); + assert.equal(uri4.toString(), 'file:///a/b/c.pyi/d/e/f'); + const uri5 = uri1.resolvePaths('d', '..', 'e'); + assert.equal(uri5.toString(), 'file:///a/b/c.pyi/e'); + const rootedPath = process.platform === 'win32' ? 'D:' : '/D'; + const rootedResult = process.platform === 'win32' ? 'file:///d%3A/e/f' : 'file:///D/e/f'; + const uri6 = uri1.resolvePaths(rootedPath, 'e', 'f'); + assert.equal(uri6.toString(), rootedResult); + const uri7 = Uri.parse('foo:', caseDetector); + const uri8 = uri7.resolvePaths('d', 'e'); + assert.equal(uri8.toString(), 'foo:d/e'); + const uri9 = Uri.parse('foo:/', caseDetector); + const uri10 = uri9.resolvePaths('d', 'e'); + assert.equal(uri10.toString(), 'foo:/d/e'); + const uri11 = Uri.empty().resolvePaths('d', 'e'); + assert.equal(uri11.toString(), ''); +}); + +test('combinePaths non file', () => { + const uri1 = Uri.parse('baz://authority/a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri1.combinePaths('d', 'e'); + assert.equal(uri2.toString(), 'baz://authority/a/b/c.pyi/d/e'); + const uri4 = uri1.combinePaths('d', 'e', 'f'); + assert.equal(uri4.toString(), 'baz://authority/a/b/c.pyi/d/e/f'); +}); + +test('resolvePaths non file', () => { + const uri1 = Uri.parse('baz://authority/a/b/c.pyi?query#fragment', caseDetector); + const uri2 = uri1.resolvePaths('d', 'e'); + assert.equal(uri2.toString(), 'baz://authority/a/b/c.pyi/d/e'); + const uri3 = uri1.resolvePaths('d', 'e/'); + assert.equal(uri3.toString(), 'baz://authority/a/b/c.pyi/d/e'); + const uri4 = uri1.resolvePaths('d', 'e', 'f'); + assert.equal(uri4.toString(), 'baz://authority/a/b/c.pyi/d/e/f'); + const uri5 = uri1.resolvePaths('d', '..', 'e'); + assert.equal(uri5.toString(), 'baz://authority/a/b/c.pyi/e'); +}); + +test('getPathComponents1', () => { + const components = Uri.parse('', caseDetector).getPathComponents(); + assert.equal(components.length, 0); +}); + +test('getPathComponents2', () => { + const components = Uri.parse('/users/', caseDetector).getPathComponents(); + assert.equal(components.length, 2); + assert.equal(components[0], '/'); + assert.equal(components[1], 'users'); +}); + +test('getPathComponents3', () => { + const components = Uri.parse('/users/hello.py', caseDetector).getPathComponents(); + assert.equal(components.length, 3); + assert.equal(components[0], '/'); + assert.equal(components[1], 'users'); + assert.equal(components[2], 'hello.py'); +}); + +test('getPathComponents4', () => { + const components = Uri.parse('/users/hello/../', caseDetector).getPathComponents(); + assert.equal(components.length, 2); + assert.equal(components[0], '/'); + assert.equal(components[1], 'users'); +}); + +test('getPathComponents5', () => { + const components = Uri.parse('./hello.py', caseDetector).getPathComponents(); + assert.equal(components.length, 2); + assert.equal(components[0], '/'); + assert.equal(components[1], 'hello.py'); +}); + +test('getPathComponents6', () => { + const components = Uri.parse('file://server/share/dir/file.py', caseDetector).getPathComponents(); + assert.equal(components.length, 4); + assert.ok(components[0].slice(2).includes('server')); + assert.equal(components[1], 'share'); + assert.equal(components[2], 'dir'); + assert.equal(components[3], 'file.py'); +}); + +test('getRelativePathComponents1', () => { + const components = Uri.parse('foo:///users/', caseDetector).getRelativePathComponents( + Uri.parse('foo:///users/', caseDetector) + ); + assert.equal(components.length, 0); +}); + +test('getRelativePathComponents2', () => { + const components = Uri.parse('foo:///users/', caseDetector).getRelativePathComponents( + Uri.parse('foo:///users/bar', caseDetector) + ); + assert.equal(components.length, 1); + assert.equal(components[0], 'bar'); +}); + +test('getRelativePathComponents3', () => { + const components = Uri.parse('bar:///users/', caseDetector).getRelativePathComponents( + Uri.parse('foo:///users/bar', caseDetector) + ); + assert.equal(components.length, 1); + assert.equal(components[0], 'bar'); +}); + +test('getRelativePathComponents4', () => { + const components = Uri.parse('foo:///users', caseDetector).getRelativePathComponents( + Uri.parse('foo:///users/', caseDetector) + ); + assert.equal(components.length, 0); +}); + +test('getRelativePathComponents5', () => { + const components = Uri.parse('foo:///users/', caseDetector).getRelativePathComponents( + Uri.parse('foo:///users/bar/baz/../foo', caseDetector) + ); + assert.equal(components.length, 2); + assert.equal(components[0], 'bar'); + assert.equal(components[1], 'foo'); +}); + +test('getRelativePathComponents6', () => { + const components = Uri.parse('foo:///users/bar', caseDetector).getRelativePathComponents( + Uri.parse('foo:///users/foo', caseDetector) + ); + assert.equal(components.length, 2); + assert.equal(components[0], '..'); + assert.equal(components[1], 'foo'); +}); + +test('getRelativePathComponents7', () => { + const components = UriEx.file('\\\\SERVER\\share\\users', false).getRelativePathComponents( + UriEx.file('\\\\server\\ShArE\\users\\bar', false) + ); + assert.equal(components.length, 1); + assert.equal(components[0], 'bar'); +}); + +test('getFileExtension1', () => { + const ext = Uri.parse('foo:///blah.blah/hello.JsOn', caseDetector).lastExtension; + assert.equal(ext, '.JsOn'); +}); + +test('getFileName1', () => { + const fileName = Uri.parse('foo:///blah.blah/HeLLo.JsOn', caseDetector).fileName; + assert.equal(fileName, 'HeLLo.JsOn'); +}); + +test('getFileName2', () => { + const fileName1 = Uri.parse('foo:///blah.blah/hello.cpython-32m.so', caseDetector).fileName; + assert.equal(fileName1, 'hello.cpython-32m.so'); +}); + +test('stripFileExtension1', () => { + const path = Uri.parse('foo:///blah.blah/HeLLo.JsOn', caseDetector).stripExtension().getPath(); + assert.equal(path, '/blah.blah/HeLLo'); +}); + +test('stripFileExtension2', () => { + const path1 = Uri.parse('foo:/blah.blah/hello.cpython-32m.so', caseDetector).stripAllExtensions().getPath(); + assert.equal(path1, '/blah.blah/hello'); + const path2 = Uri.parse('foo:/blah.blah/hello.cpython-32m.so', caseDetector).stripExtension().getPath(); + assert.equal(path2, '/blah.blah/hello.cpython-32m'); +}); + +test('getWildcardRegexPattern1', () => { + const pattern = getWildcardRegexPattern(Uri.parse('foo:///users/me', caseDetector), './blah/'); + const regex = new RegExp(pattern); + assert.ok(regex.test('/users/me/blah/d')); + assert.ok(!regex.test('/users/me/blad/d')); +}); + +test('getWildcardRegexPattern2', () => { + const pattern = getWildcardRegexPattern(Uri.parse('foo:///users/me', caseDetector), './**/*.py?'); + const regex = new RegExp(pattern); + assert.ok(regex.test('/users/me/.blah/foo.pyd')); + assert.ok(!regex.test('/users/me/.blah/foo.py')); // No char after +}); + +test('getWildcardRegexPattern3', () => { + const pattern = getWildcardRegexPattern(Uri.parse('foo:///users/me', caseDetector), './**/.*.py'); + const regex = new RegExp(pattern); + assert.ok(regex.test('/users/me/.blah/.foo.py')); + assert.ok(!regex.test('/users/me/.blah/foo.py')); +}); + +test('getWildcardRegexPattern4', () => { + const pattern = getWildcardRegexPattern(Uri.parse('//server/share/dir', caseDetector), '.'); + const regex = new RegExp(pattern); + assert.ok(regex.test('//server/share/dir/foo.py')); + assert.ok(!regex.test('//server/share/dix/foo.py')); +}); + +test('getWildcardRegexPattern4', () => { + const pattern = getWildcardRegexPattern(Uri.parse('//server/share/dir++/.bar*/bid', caseDetector), '.'); + const regex = new RegExp(pattern); + assert.ok(regex.test('//server/share/dir++/.bar*/bidfoo.py')); + assert.ok(!regex.test('//server/share/dix++/.bar*/bidfoo.py')); +}); + +test('getWildcardRoot1', () => { + const p = getWildcardRoot(Uri.parse('foo:/users/me', caseDetector), './blah/'); + assert.equal(p.toString(), 'foo:/users/me/blah'); +}); + +test('getWildcardRoot2', () => { + const p = getWildcardRoot(Uri.parse('foo:/users/me', caseDetector), './**/*.py?/'); + assert.equal(p.toString(), 'foo:/users/me'); +}); + +test('getWildcardRoot with root', () => { + const p = getWildcardRoot(Uri.parse('foo:/', caseDetector), '.'); + assert.equal(p.toString(), 'foo:/'); +}); + +test('getWildcardRoot with drive letter', () => { + const p = getWildcardRoot(Uri.parse('file:///c:/', caseDetector), '.'); + assert.equal(p.toString(), 'file:///c%3A/'); +}); + +function resolvePaths(uri: string, ...paths: string[]) { + return UriEx.file(uri) + .resolvePaths(...paths) + .toString(); +} + +test('resolvePath1', () => { + assert.equal(resolvePaths('/path', 'to', 'file.ext'), 'file:///path/to/file.ext'); +}); + +test('resolvePath2', () => { + assert.equal(resolvePaths('/path', 'to', '..', 'from', 'file.ext/'), 'file:///path/from/file.ext'); +}); + +function getHomeDirUri() { + return UriEx.file(os.homedir()); +} + +test('resolvePath3 ~ escape', () => { + assert.equal( + resolvePaths(expandPathVariables('~/path', Uri.empty(), []), 'to', '..', 'from', 'file.ext/'), + `${getHomeDirUri().toString()}/path/from/file.ext` + ); +}); + +test('resolvePath4 ~ escape in middle', () => { + assert.equal( + resolvePaths('/path', expandPathVariables('~/file.ext/', Uri.empty(), [])), + `${getHomeDirUri().toString()}/file.ext` + ); +}); + +function combinePaths(uri: string, ...paths: string[]) { + return resolvePaths(uri, ...paths); +} + +test('invalid ~ without root', () => { + const path = combinePaths('Library', 'Mobile Documents', 'com~apple~CloudDocs', 'Development', 'mysuperproject'); + assert.equal(resolvePaths(expandPathVariables(path, Uri.parse('foo:///src', caseDetector), [])), path); +}); + +test('invalid ~ with root', () => { + const path = combinePaths('/', 'Library', 'com~apple~CloudDocs', 'Development', 'mysuperproject'); + assert.equal(resolvePaths(expandPathVariables(path, Uri.parse('foo:///src', caseDetector), [])), path); +}); + +function containsPath(uri: string, child: string) { + return Uri.parse(child, caseDetector).isChild(Uri.parse(uri, caseDetector)); +} + +test('containsPath1', () => { + assert.equal(containsPath('/a/b/c/', '/a/d/../b/c/./d'), true); +}); + +test('containsPath2', () => { + assert.equal(containsPath('/', '\\a'), true); +}); + +test('containsPath3', () => { + assert.equal(containsPath('/a', '/a/B'), true); +}); + +function getAnyExtensionFromPath(uri: string): string { + return Uri.parse(uri, caseDetector).lastExtension; +} +test('getAnyExtension1', () => { + assert.equal(getAnyExtensionFromPath('/path/to/file.ext'), '.ext'); +}); + +function getBaseFileName(uri: string): string { + return Uri.parse(uri, caseDetector).fileName; +} + +test('getBaseFileName1', () => { + assert.equal(getBaseFileName('/path/to/file.ext'), 'file.ext'); +}); + +test('getBaseFileName2', () => { + assert.equal(getBaseFileName('/path/to/'), 'to'); +}); + +test('getBaseFileName3', () => { + assert.equal(getBaseFileName('c:/'), ''); +}); + +function getUriRootLength(uri: string): number { + return UriEx.file(uri).getRootPathLength(); +} + +test('getRootLength1', () => { + assert.equal(getUriRootLength('a'), 1); +}); + +test('getRootLength2', () => { + assert.equal(getUriRootLength('/'), 1); +}); + +test('getRootLength3', () => { + assert.equal(getUriRootLength('c:'), 3); +}); + +test('getRootLength4', () => { + assert.equal(getUriRootLength('c:d'), 0); +}); + +test('getRootLength5', () => { + assert.equal(getUriRootLength('c:/'), 3); +}); + +test('getRootLength6', () => { + assert.equal(getUriRootLength('//server'), 9); +}); + +test('getRootLength7', () => { + assert.equal(getUriRootLength('//server/share'), 9); +}); + +test('getRootLength8', () => { + assert.equal(getUriRootLength('scheme:/no/authority'), 1); +}); + +test('getRootLength9', () => { + assert.equal(getUriRootLength('scheme://with/authority'), 1); +}); + +function isRootedDiskUri(uri: string) { + return isRootedDiskPath(UriEx.file(uri).getFilePath()); +} + +test('isRootedDiskPath1', () => { + assert(isRootedDiskUri('C:/a/b')); +}); + +test('isRootedDiskPath2', () => { + assert(isRootedDiskUri('/')); +}); + +test('isRootedDiskPath3', () => { + assert(isRootedDiskUri('a/b')); +}); + +test('isDiskPathRoot1', () => { + assert(isRootedDiskUri('/')); +}); + +test('isDiskPathRoot2', () => { + assert(isRootedDiskUri('c:/')); +}); + +test('isDiskPathRoot3', () => { + assert(isRootedDiskUri('c:')); +}); + +test('isDiskPathRoot4', () => { + assert(!isRootedDiskUri('c:d')); +}); + +function getRelativePath(parent: string, child: string) { + return Uri.parse(parent, caseDetector).getRelativePath(Uri.parse(child, caseDetector)); +} + +test('getRelativePath', () => { + assert.equal(getRelativePath('/a/b/c', '/a/b/c/d/e/f'), './d/e/f'); + assert.equal(getRelativePath('/a/b/c/d/e/f', '/a/b/c/'), undefined); + assert.equal(getRelativePath('/a/b/c', '/d/e/f'), undefined); +}); + +test('CaseSensitivity', () => { + const cwd = '/'; + + const fsCaseInsensitive = new vfs.TestFileSystem(/*ignoreCase*/ true, { cwd }); + assert.equal(fsCaseInsensitive.isLocalFileSystemCaseSensitive(), false); + + const fsCaseSensitive = new vfs.TestFileSystem(/*ignoreCase*/ false, { cwd }); + assert.equal(fsCaseSensitive.isLocalFileSystemCaseSensitive(), true); +}); + +test('deduplicateFolders', () => { + const listOfFolders = [ + ['/user', '/user/temp', '/xuser/app', '/lib/python', '/home/p/.venv/lib/site-packages'].map((p) => + UriEx.file(p) + ), + ['/user', '/user/temp', '/xuser/app', '/lib/python/Python310.zip', '/home/z/.venv/lib/site-packages'].map((p) => + UriEx.file(p) + ), + ['/main/python/lib/site-packages', '/home/p'].map((p) => UriEx.file(p)), + ]; + + const folders = deduplicateFolders(listOfFolders).map((f) => f.getPath()); + + const expected = [ + '/user', + '/xuser/app', + '/lib/python', + '/home/z/.venv/lib/site-packages', + '/main/python/lib/site-packages', + '/home/p', + ]; + + assert.deepStrictEqual(folders.sort(), expected.sort()); +}); + +test('convert UNC path', () => { + const path = UriEx.file('file:///server/c$/folder/file.py'); + + // When converting UNC path, server part shouldn't be removed. + assert(path.getPath().indexOf('server') > 0); +}); + +function lowerCaseDrive(entries: string[]) { + return entries.map((p) => (process.platform === 'win32' ? p[0].toLowerCase() + p.slice(1) : p)); +} + +test('Realcase', () => { + const tempFile = new RealTempFile(); + const fs = createFromRealFileSystem(tempFile); + const cwd = process.cwd(); + const dir = Uri.file(path.join(cwd, 'src', 'tests', '..', 'tests'), tempFile); + const dirFilePath = dir.getFilePath()!; + const entries = nodefs + .readdirSync(dirFilePath) + .map((entry) => path.basename(nodefs.realpathSync(path.join(dirFilePath, entry)))); + const normalizedEntries = lowerCaseDrive(entries); + const fsentries = fs.readdirSync(dir); + assert.deepStrictEqual(normalizedEntries, fsentries); + + const paths = entries.map((entry) => nodefs.realpathSync(path.join(dirFilePath, entry))); + const fspaths = fsentries.map((entry) => fs.realCasePath(dir.combinePaths(entry)).getFilePath()!); + assert.deepStrictEqual(lowerCaseDrive(paths), fspaths); + + // Check that the '..' has been removed. + assert.ok(!fspaths.some((p) => p.toString().indexOf('..') >= 0)); + + // If windows, check that the case is correct. + if (process.platform === 'win32') { + for (const p of fspaths) { + const upper = UriEx.file(p.toString().toUpperCase()); + const real = fs.realCasePath(upper); + assert.strictEqual(p, real.getFilePath()); + } + } + tempFile.dispose(); +}); + +test('Realcase use cwd implicitly', () => { + const tempFile = new RealTempFile(); + const fs = createFromRealFileSystem(tempFile); + const cwd = process.cwd(); + const dir = path.join(cwd, 'src', 'tests'); + const uri = Uri.file(dir, tempFile); + + const entries = nodefs.readdirSync(dir).map((entry) => path.basename(nodefs.realpathSync(path.join(dir, entry)))); + const fsentries = fs.readdirSync(uri); + const paths = entries.map((entry) => nodefs.realpathSync(path.join(dir, entry))); + + const fspaths = fsentries.map((entry) => fs.realCasePath(uri.combinePaths(entry)).getFilePath()); + assert.deepStrictEqual(lowerCaseDrive(paths), fspaths); + tempFile.dispose(); +}); + +test('Web URIs dont exist', () => { + const tempFile = new RealTempFile(); + const fs = createFromRealFileSystem(tempFile); + const uri = UriEx.parse('http://www.bing.com'); + assert(!fs.existsSync(uri)); + const stat = fs.statSync(uri); + assert(!stat.isFile()); + tempFile.dispose(); +}); + +test('constant uri test', () => { + const name = 'constant uri'; + const uri1 = Uri.constant(name); + const uri2 = Uri.constant(name); + + assert(!uri1.equals(uri2)); + assert(uri1.equals(uri1)); +}); + +test('root test', () => { + const uri1 = UriEx.file('C:\\'); + const uri2 = UriEx.file('C:'); + const uri3 = UriEx.file('/'); + + assert.strictEqual(uri1.getFilePath(), normalizeSlashes('c:/')); + assert.strictEqual(uri2.getFilePath(), normalizeSlashes('c:/')); + assert.strictEqual(uri3.getFilePath(), normalizeSlashes('/')); +}); diff --git a/python-parser/packages/pyright-internal/src/tests/workspaceEditUtils.test.ts b/python-parser/packages/pyright-internal/src/tests/workspaceEditUtils.test.ts new file mode 100644 index 00000000..4bbcd992 --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/workspaceEditUtils.test.ts @@ -0,0 +1,440 @@ +/* + * workspaceEditUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * test workspaceEditUtils + */ + +import * as assert from 'assert'; +import { CreateFile, DeleteFile, RenameFile, TextDocumentEdit, WorkspaceEdit } from 'vscode-languageserver-types'; + +import { CancellationToken } from 'vscode-languageserver'; +import { AnalyzerService } from '../analyzer/service'; +import { IPythonMode } from '../analyzer/sourceFile'; +import { combinePaths, getDirectoryPath } from '../common/pathUtils'; +import { Uri } from '../common/uri/uri'; +import { applyWorkspaceEdit, convertToWorkspaceEdit, generateWorkspaceEdit } from '../common/workspaceEditUtils'; +import { AnalyzerServiceExecutor } from '../languageService/analyzerServiceExecutor'; +import { TestLanguageService } from './harness/fourslash/testLanguageService'; +import { TestState, parseAndGetTestState } from './harness/fourslash/testState'; +import { verifyWorkspaceEdit } from './harness/fourslash/workspaceEditTestUtils'; + +test('test applyWorkspaceEdits changes', async () => { + const code = ` +// @filename: test.py +//// [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const cloned = await getClonedService(state); + const range = state.getRangeByMarkerName('marker')!; + + const fileChanged = new Map<string, Uri>(); + applyWorkspaceEditToService( + cloned, + { + changes: { + [range.fileUri.toString()]: [ + { + range: state.convertPositionRange(range), + newText: 'Text Changed', + }, + ], + }, + }, + fileChanged + ); + + assert.strictEqual(fileChanged.size, 1); + assert.strictEqual(cloned.test_program.getSourceFile(range.fileUri)?.getFileContent(), 'Text Changed'); +}); + +test('test edit mode for workspace', async () => { + const code = ` +// @filename: test.py +//// [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + const addedFileUri = Uri.file(combinePaths(getDirectoryPath(range.fileName), 'test2.py'), state.serviceProvider); + const edits = state.workspace.service.runEditMode((program) => { + const fileChanged = new Map<string, Uri>(); + applyWorkspaceEdit( + program, + { + documentChanges: [ + TextDocumentEdit.create( + { + uri: range.fileUri.toString(), + version: null, + }, + [ + { + range: state.convertPositionRange(range), + newText: 'import sys', + }, + ] + ), + ], + }, + fileChanged + ); + + assert.strictEqual(fileChanged.size, 1); + const info = program.getSourceFileInfo(range.fileUri)!; + + program.analyzeFile(info.uri, CancellationToken.None); + assert.strictEqual(info.contents, 'import sys'); + assert.strictEqual(info.imports.length, 3); + + // Add a new file. + program.setFileOpened(addedFileUri, 0, '', { + isTracked: true, + ipythonMode: IPythonMode.None, + chainedFileUri: undefined, + }); + + applyWorkspaceEdit( + program, + { + documentChanges: [ + TextDocumentEdit.create( + { + uri: addedFileUri.toString(), + version: null, + }, + [ + { + range: { + start: { line: 0, character: 0 }, + end: { line: 0, character: 0 }, + }, + newText: 'import sys', + }, + ] + ), + ], + }, + fileChanged + ); + + applyWorkspaceEdit( + program, + { + documentChanges: [ + TextDocumentEdit.create( + { + uri: addedFileUri.toString(), + version: null, + }, + [ + { + range: { + start: { line: 0, character: 7 }, + end: { line: 0, character: 10 }, + }, + newText: 'os', + }, + ] + ), + ], + }, + fileChanged + ); + + const addedInfo = program.getSourceFileInfo(addedFileUri)!; + program.analyzeFile(addedInfo.uri, CancellationToken.None); + + assert.strictEqual(addedInfo.contents, 'import os'); + assert.strictEqual(addedInfo.imports.length, 3); + }, CancellationToken.None); + + // After leaving edit mode, we should be back to where we were. + const oldSourceFile = state.workspace.service.test_program.getSourceFile(range.fileUri); + state.workspace.service.backgroundAnalysisProgram.analyzeFile(oldSourceFile!.getUri(), CancellationToken.None); + + assert.strictEqual(oldSourceFile?.getFileContent(), ''); + assert.strictEqual(oldSourceFile.getImports().length, 2); + assert.strictEqual(edits.length, 2); + + assert.deepStrictEqual(edits[0].replacementText, 'import sys'); + assert.deepStrictEqual(edits[1].replacementText, 'import os'); + + const addedSourceFile = state.workspace.service.test_program.getSourceFile(addedFileUri); + + // The added file should not be there. + assert.ok(!addedSourceFile); +}); + +test('test applyWorkspaceEdits documentChanges', async () => { + const code = ` +// @filename: test.py +//// [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const cloned = await getClonedService(state); + const range = state.getRangeByMarkerName('marker')!; + + const fileChanged = new Map<string, Uri>(); + applyWorkspaceEditToService( + cloned, + { + documentChanges: [ + TextDocumentEdit.create( + { + uri: range.fileUri.toString(), + version: null, + }, + [ + { + range: state.convertPositionRange(range), + newText: 'Text Changed', + }, + ] + ), + ], + }, + fileChanged + ); + + assert.strictEqual(fileChanged.size, 1); + assert.strictEqual(cloned.test_program.getSourceFile(range.fileUri)?.getFileContent(), 'Text Changed'); +}); + +test('test generateWorkspaceEdits', async () => { + const code = ` +// @filename: test1.py +//// [|/*marker1*/|] + +// @filename: test2.py +//// [|/*marker2*/|] + `; + + const state = parseAndGetTestState(code).state; + const cloned = await getClonedService(state); + const range1 = state.getRangeByMarkerName('marker1')!; + + const fileChanged = new Map<string, Uri>(); + applyWorkspaceEditToService( + cloned, + { + changes: { + [range1.fileUri.toString()]: [ + { + range: state.convertPositionRange(range1), + newText: 'Test1 Changed', + }, + ], + }, + }, + fileChanged + ); + + applyWorkspaceEditToService( + cloned, + { + documentChanges: [ + TextDocumentEdit.create( + { + uri: range1.fileUri.toString(), + version: null, + }, + [ + { + range: { start: { line: 0, character: 0 }, end: { line: 0, character: 5 } }, + newText: 'NewTest1', + }, + ] + ), + ], + }, + fileChanged + ); + + const range2 = state.getRangeByMarkerName('marker2')!; + applyWorkspaceEditToService( + cloned, + { + documentChanges: [ + TextDocumentEdit.create( + { + uri: range2.fileUri.toString(), + version: null, + }, + [ + { + range: state.convertPositionRange(range2), + newText: 'Test2 Changed', + }, + ] + ), + ], + }, + fileChanged + ); + + applyWorkspaceEditToService( + cloned, + { + changes: { + [range2.fileUri.toString()]: [ + { + range: { start: { line: 0, character: 0 }, end: { line: 0, character: 5 } }, + newText: 'NewTest2', + }, + ], + }, + }, + fileChanged + ); + + assert.strictEqual(fileChanged.size, 2); + + const actualEdits = generateWorkspaceEdit(state.workspace.service.fs, state.workspace.service, cloned, fileChanged); + verifyWorkspaceEdit( + { + changes: { + [range1.fileUri.toString()]: [ + { + range: state.convertPositionRange(range1), + newText: 'NewTest1 Changed', + }, + ], + [range2.fileUri.toString()]: [ + { + range: state.convertPositionRange(range1), + newText: 'NewTest2 Changed', + }, + ], + }, + }, + actualEdits + ); +}); + +test('test convertToWorkspaceEdit omits annotationId without changeAnnotations', () => { + const code = ` +// @filename: a.py +//// [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const renameTarget = Uri.file( + combinePaths(getDirectoryPath(range.fileName), 'a_renamed.py'), + state.serviceProvider + ); + const deleteTarget = Uri.file(combinePaths(getDirectoryPath(range.fileName), 'c.py'), state.serviceProvider); + + const editActions = { + edits: [ + { + fileUri: range.fileUri, + range: state.convertPositionRange(range), + replacementText: 'x', + }, + ], + fileOperations: [ + { + kind: 'create' as const, + fileUri: Uri.file(combinePaths(getDirectoryPath(range.fileName), 'b.py'), state.serviceProvider), + }, + { + kind: 'rename' as const, + oldFileUri: range.fileUri, + newFileUri: renameTarget, + }, + { + kind: 'delete' as const, + fileUri: deleteTarget, + }, + ], + }; + + const ws = convertToWorkspaceEdit(state.workspace.service.fs, editActions); + assert.strictEqual(ws.changeAnnotations, undefined); + const tde = ws.documentChanges!.find((d) => TextDocumentEdit.is(d)) as TextDocumentEdit; + const anyEdit = tde.edits[0] as any; + assert.strictEqual(anyEdit.annotationId, undefined); + const createOp = ws.documentChanges!.find((d) => (d as any).kind === 'create') as CreateFile; + assert.strictEqual(createOp.annotationId, undefined); + const renameOp = ws.documentChanges!.find((d) => (d as any).kind === 'rename') as RenameFile; + assert.strictEqual(renameOp.annotationId, undefined); + const deleteOp = ws.documentChanges!.find((d) => (d as any).kind === 'delete') as DeleteFile; + assert.strictEqual(deleteOp.annotationId, undefined); +}); + +test('test convertToWorkspaceEdit includes annotationId with changeAnnotations', () => { + const code = ` +// @filename: a.py +//// [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const renameTarget = Uri.file( + combinePaths(getDirectoryPath(range.fileName), 'a_renamed.py'), + state.serviceProvider + ); + const deleteTarget = Uri.file(combinePaths(getDirectoryPath(range.fileName), 'c.py'), state.serviceProvider); + + const editActions = { + edits: [ + { + fileUri: range.fileUri, + range: state.convertPositionRange(range), + replacementText: 'x', + }, + ], + fileOperations: [ + { + kind: 'create' as const, + fileUri: Uri.file(combinePaths(getDirectoryPath(range.fileName), 'b.py'), state.serviceProvider), + }, + { + kind: 'rename' as const, + oldFileUri: range.fileUri, + newFileUri: renameTarget, + }, + { + kind: 'delete' as const, + fileUri: deleteTarget, + }, + ], + }; + + const changeAnnotations = { + default: { label: 'label', description: 'desc', needsConfirmation: false }, + }; + + const ws = convertToWorkspaceEdit(state.workspace.service.fs, editActions, changeAnnotations, 'default'); + assert.ok(ws.changeAnnotations); + assert.ok(ws.changeAnnotations!['default']); + const tde = ws.documentChanges!.find((d) => TextDocumentEdit.is(d)) as TextDocumentEdit; + const anyEdit = tde.edits[0] as any; + assert.strictEqual(anyEdit.annotationId, 'default'); + const createOp = ws.documentChanges!.find((d) => (d as any).kind === 'create') as CreateFile; + assert.strictEqual(createOp.annotationId, 'default'); + const renameOp = ws.documentChanges!.find((d) => (d as any).kind === 'rename') as RenameFile; + assert.strictEqual(renameOp.annotationId, 'default'); + const deleteOp = ws.documentChanges!.find((d) => (d as any).kind === 'delete') as DeleteFile; + assert.strictEqual(deleteOp.annotationId, 'default'); +}); + +function applyWorkspaceEditToService(service: AnalyzerService, edits: WorkspaceEdit, filesChanged: Map<string, Uri>) { + const program = service.backgroundAnalysisProgram.program; + applyWorkspaceEdit(program, edits, filesChanged); +} + +async function getClonedService(state: TestState) { + return await AnalyzerServiceExecutor.cloneService( + new TestLanguageService(state.workspace, state.console, state.workspace.service.fs), + state.workspace, + { useBackgroundAnalysis: false } + ); +} diff --git a/python-parser/packages/pyright-internal/src/tests/zipfs.test.ts b/python-parser/packages/pyright-internal/src/tests/zipfs.test.ts new file mode 100644 index 00000000..758f1b8a --- /dev/null +++ b/python-parser/packages/pyright-internal/src/tests/zipfs.test.ts @@ -0,0 +1,126 @@ +/* + * zipfs.test.ts + * + * zip/egg file related FS tests. + */ + +import * as assert from 'assert'; +import * as path from 'path'; +import { RealTempFile, createFromRealFileSystem } from '../common/realFileSystem'; +import { compareStringsCaseSensitive } from '../common/stringUtils'; +import { Uri } from '../common/uri/uri'; + +function runTests(p: string): void { + const tempFile = new RealTempFile(); + const zipRoot = Uri.file(path.resolve(path.dirname(module.filename), p), tempFile); + const fs = createFromRealFileSystem(tempFile); + + test('stat root', () => { + const stats = fs.statSync(zipRoot); + assert.strictEqual(stats.isDirectory(), true); + assert.strictEqual(stats.isFile(), false); + assert.strictEqual((stats as any).isZipDirectory(), true); + assert.strictEqual(stats.isSymbolicLink(), false); + }); + + test('readdirEntriesSync root', () => { + const entries = fs.readdirEntriesSync(zipRoot); + assert.strictEqual(entries.length, 2); + + entries.sort((a, b) => compareStringsCaseSensitive(a.name, b.name)); + + assert.strictEqual(entries[0].name, 'EGG-INFO'); + assert.strictEqual(entries[0].isDirectory(), true); + assert.strictEqual(entries[0].isFile(), false); + + assert.strictEqual(entries[1].name, 'test'); + assert.strictEqual(entries[1].isDirectory(), true); + assert.strictEqual(entries[1].isFile(), false); + }); + + test('stat EGG-INFO', () => { + const stats = fs.statSync(zipRoot.combinePaths('EGG-INFO')); + assert.strictEqual(stats.isDirectory(), true); + assert.strictEqual(stats.isFile(), false); + }); + + test('readdirEntriesSync root', () => { + const entries = fs.readdirEntriesSync(zipRoot.combinePaths('EGG-INFO')); + assert.strictEqual(entries.length, 5); + + entries.sort((a, b) => compareStringsCaseSensitive(a.name, b.name)); + + assert.strictEqual(entries[0].name, 'PKG-INFO'); + assert.strictEqual(entries[0].isDirectory(), false); + assert.strictEqual(entries[0].isFile(), true); + + assert.strictEqual(entries[1].name, 'SOURCES.txt'); + assert.strictEqual(entries[1].isDirectory(), false); + assert.strictEqual(entries[1].isFile(), true); + + assert.strictEqual(entries[2].name, 'dependency_links.txt'); + assert.strictEqual(entries[2].isDirectory(), false); + assert.strictEqual(entries[2].isFile(), true); + + assert.strictEqual(entries[3].name, 'top_level.txt'); + assert.strictEqual(entries[3].isDirectory(), false); + assert.strictEqual(entries[3].isFile(), true); + + assert.strictEqual(entries[4].name, 'zip-safe'); + assert.strictEqual(entries[4].isDirectory(), false); + assert.strictEqual(entries[4].isFile(), true); + }); + + test('read file', () => { + const contents = fs.readFileSync(zipRoot.combinePaths('EGG-INFO', 'top_level.txt'), 'utf-8'); + assert.strictEqual(contents.trim(), 'test'); + }); + + test('read file async', async () => { + const contents = await fs.readFileText(zipRoot.combinePaths('EGG-INFO', 'top_level.txt'), 'utf-8'); + assert.strictEqual(contents.trim(), 'test'); + }); + + test('unlink fails', async () => { + expect(() => { + fs.unlinkSync(zipRoot.combinePaths('EGG-INFO', 'top_level.txt')); + }).toThrow(/read-only filesystem/); + }); + + test('isInZip', () => { + assert.strictEqual(fs.isInZip(zipRoot.combinePaths('EGG-INFO', 'top_level.txt')), true); + assert.strictEqual(fs.isInZip(Uri.file(module.filename, tempFile)), false); + }); + + tempFile.dispose(); +} + +describe('zip', () => runTests('./samples/zipfs/basic.zip')); +describe('egg', () => runTests('./samples/zipfs/basic.egg')); +describe('jar', () => runTests('./samples/zipfs/basic.jar')); + +function runBadTests(p: string): void { + const tempFile = new RealTempFile(); + const zipRoot = Uri.file(path.resolve(path.dirname(module.filename), p), tempFile); + const fs = createFromRealFileSystem(tempFile); + + test('stat root', () => { + const stats = fs.statSync(zipRoot); + assert.strictEqual(stats.isDirectory(), false); + assert.strictEqual(stats.isFile(), true); + }); + + test('isInZip', () => { + assert.strictEqual(fs.isInZip(zipRoot.combinePaths('EGG-INFO', 'top_level.txt')), false); + }); + + tempFile.dispose(); +} + +describe('corrupt zip', () => runBadTests('./samples/zipfs/bad.zip')); +describe('corrupt egg', () => runBadTests('./samples/zipfs/bad.egg')); +describe('corrupt jar', () => runBadTests('./samples/zipfs/bad.jar')); + +describe('corrupt zip with magic', () => runBadTests('./samples/zipfs/corrupt.zip')); +describe('corrupt egg with magic', () => runBadTests('./samples/zipfs/corrupt.egg')); +describe('corrupt jar with magic', () => runBadTests('./samples/zipfs/corrupt.jar')); diff --git a/python-parser/packages/pyright-internal/src/types.ts b/python-parser/packages/pyright-internal/src/types.ts new file mode 100644 index 00000000..1e23d3bf --- /dev/null +++ b/python-parser/packages/pyright-internal/src/types.ts @@ -0,0 +1,39 @@ +/* + * types.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Types used for language server capabilities. + */ +import { MarkupKind } from 'vscode-languageserver-types'; + +export interface ClientCapabilities { + hasConfigurationCapability: boolean; + hasVisualStudioExtensionsCapability: boolean; + hasWorkspaceFoldersCapability: boolean; + hasWatchFileCapability: boolean; + hasWatchFileRelativePathCapability: boolean; + hasActiveParameterCapability: boolean; + hasSignatureLabelOffsetCapability: boolean; + hasHierarchicalDocumentSymbolCapability: boolean; + hasWindowProgressCapability: boolean; + hasGoToDeclarationCapability: boolean; + hasDocumentChangeCapability: boolean; + hasDocumentAnnotationCapability: boolean; + hasCompletionCommitCharCapability: boolean; + hoverContentFormat: MarkupKind; + completionDocFormat: MarkupKind; + completionSupportsSnippet: boolean; + signatureDocFormat: MarkupKind; + supportsDeprecatedDiagnosticTag: boolean; + supportsUnnecessaryDiagnosticTag: boolean; + supportsTaskItemDiagnosticTag: boolean; + completionItemResolveSupportsAdditionalTextEdits: boolean; + supportsPullDiagnostics: boolean; + requiresPullRelatedInformationCapability: boolean; +} + +export type InitializationOptions = { + diagnosticMode?: string; + disablePullDiagnostics?: boolean; +}; diff --git a/python-parser/packages/pyright-internal/src/workspaceFactory.ts b/python-parser/packages/pyright-internal/src/workspaceFactory.ts new file mode 100644 index 00000000..446b664b --- /dev/null +++ b/python-parser/packages/pyright-internal/src/workspaceFactory.ts @@ -0,0 +1,456 @@ +/* + * workspaceFactory.ts + * + * Workspace management related functionality. + */ + +import { + InitializeParams, + WorkspaceFoldersChangeEvent, + WorkspaceFolder as lspWorkspaceFolder, +} from 'vscode-languageserver'; + +import { AnalyzerService } from './analyzer/service'; +import { ConsoleInterface } from './common/console'; +import { createDeferred } from './common/deferred'; +import { ServiceProvider } from './common/serviceProvider'; +import { Uri } from './common/uri/uri'; + +let WorkspaceFactoryIdCounter = 0; + +export enum WellKnownWorkspaceKinds { + Default = 'default', + Regular = 'regular', + Limited = 'limited', + Cloned = 'cloned', + Test = 'test', +} + +export interface InitStatus { + resolve(): void; + reset(): InitStatus; + markCalled(): void; + promise: Promise<void>; + resolved(): boolean; +} + +export function createInitStatus(): InitStatus { + // Due to the way we get `python path`, `include/exclude` from settings to initialize workspace, + // we need to wait for getSettings to finish before letting IDE features to use workspace (`isInitialized` field). + // So most of cases, whenever we create new workspace, we send request to workspace/configuration right way + // except one place which is `initialize` LSP call. + // In `initialize` method where we create `initial workspace`, we can't do that since LSP spec doesn't allow + // LSP server from sending any request to client until `initialized` method is called. + // This flag indicates whether we had our initial updateSetting call or not after `initialized` call. + let called = false; + + const deferred = createDeferred<void>(); + const self = { + promise: deferred.promise, + resolve: () => { + called = true; + deferred.resolve(); + }, + markCalled: () => { + called = true; + }, + reset: () => { + if (!called) { + return self; + } + + return createInitStatus(); + }, + resolved: () => { + return deferred.resolved; + }, + }; + + return self; +} + +export interface WorkspaceFolder { + workspaceName: string; + rootUri: Uri | undefined; +} + +// path and uri will point to a workspace itself. It could be a folder +// if the workspace represents a folder. it could be '' if it is the default workspace. +// But it also could be a file if it is a virtual workspace. +// rootPath will always point to the folder that contains the workspace. +export interface Workspace extends WorkspaceFolder { + kinds: string[]; + service: AnalyzerService; + disableLanguageServices: boolean; + disableTaggedHints: boolean; + disableOrganizeImports: boolean; + disableWorkspaceSymbol: boolean; + isInitialized: InitStatus; + searchPathsToWatch: Uri[]; +} + +export interface NormalWorkspace extends Workspace { + rootUri: Uri; +} + +export function renameWorkspace(workspace: Workspace, name: string) { + workspace.workspaceName = name; + workspace.service.setServiceName(name); +} + +export type CreateServiceFunction = (name: string, workspaceRoot: Uri | undefined, kinds: string[]) => AnalyzerService; + +export interface IWorkspaceFactory { + handleInitialize(params: InitializeParams): void; + handleWorkspaceFoldersChanged(params: WorkspaceFoldersChangeEvent, workspaces: lspWorkspaceFolder[] | null): void; + items(): AllWorkspace[]; + clear(): void; + hasMultipleWorkspaces(kind?: string): boolean; + getContainingWorkspace(filePath: Uri, pythonPath?: Uri): NormalWorkspace | undefined; + getNonDefaultWorkspaces(kind?: string): NormalWorkspace[]; + getWorkspaceForFile(uri: Uri, pythonPath: Uri | undefined): Promise<Workspace>; + getContainingWorkspacesForFile(filePath: Uri): Promise<Workspace[]>; +} + +export class WorkspaceFactory implements IWorkspaceFactory { + private _defaultWorkspacePath = '<default>'; + private _map = new Map<string, AllWorkspace>(); + private _id = WorkspaceFactoryIdCounter++; + + constructor( + private readonly _console: ConsoleInterface, + private readonly _createService: CreateServiceFunction, + private readonly _onWorkspaceCreated: (workspace: AllWorkspace) => void, + private readonly _onWorkspaceRemoved: (workspace: AllWorkspace) => void, + private readonly _serviceProvider: ServiceProvider + ) { + this._console.log(`WorkspaceFactory ${this._id} created`); + } + + handleInitialize(params: InitializeParams) { + // Create a service instance for each of the workspace folders. + if (params.workspaceFolders) { + params.workspaceFolders.forEach((folder) => { + this._add(Uri.parse(folder.uri, this._serviceProvider), folder.name, [WellKnownWorkspaceKinds.Regular]); + }); + } else if (params.rootPath) { + this._add(Uri.file(params.rootPath, this._serviceProvider), '', [WellKnownWorkspaceKinds.Regular]); + } + } + + handleWorkspaceFoldersChanged(params: WorkspaceFoldersChangeEvent, workspaces: lspWorkspaceFolder[] | null) { + params.removed.forEach((workspaceInfo) => { + const uri = Uri.parse(workspaceInfo.uri, this._serviceProvider); + // Delete all workspaces for this folder. Even the ones generated for notebook kernels. + const workspaces = this.getNonDefaultWorkspaces().filter((w) => w.rootUri.equals(uri)); + workspaces.forEach((w) => { + this._remove(w); + }); + }); + + params.added.forEach((workspaceInfo) => { + const uri = Uri.parse(workspaceInfo.uri, this._serviceProvider); + + // Skip if workspace already exists (e.g., created during initialize) + if (this.getNonDefaultWorkspaces().some((w) => w.rootUri.equals(uri))) { + return; + } + + this._add(uri, workspaceInfo.name, [WellKnownWorkspaceKinds.Regular]); + }); + + // Ensure name changes are also reflected. + const foldersToCheck = + workspaces?.filter( + (w) => !params.added.some((a) => a.uri === w.uri) && !params.removed.some((a) => a.uri === w.uri) + ) ?? []; + foldersToCheck.forEach((workspaceInfo) => { + const uri = Uri.parse(workspaceInfo.uri, this._serviceProvider); + + const workspaces = this.getNonDefaultWorkspaces().filter( + (w) => w.rootUri.equals(uri) && w.workspaceName !== workspaceInfo.name + ); + + workspaces.forEach((w) => renameWorkspace(w, workspaceInfo.name)); + }); + } + + items() { + return Array.from(this._map.values()); + } + + clear() { + this._map.forEach((workspace) => { + workspace.isInitialized.resolve(); + workspace.service.dispose(); + }); + this._map.clear(); + this._console.log(`WorkspaceFactory ${this._id} clear`); + } + + hasMultipleWorkspaces(kind?: string) { + if (this._map.size === 0 || this._map.size === 1) { + return false; + } + + let count = 0; + for (const kv of this._map) { + if (!kind || kv[1].kinds.some((k) => k === kind)) { + count++; + } + + if (count > 1) { + return true; + } + } + + return false; + } + + getContainingWorkspace(filePath: Uri, pythonPath?: Uri): NormalWorkspace | undefined { + return this._getBestRegularWorkspace( + this.getNonDefaultWorkspaces(WellKnownWorkspaceKinds.Regular).filter((w) => filePath.startsWith(w.rootUri)) + ); + } + + getNonDefaultWorkspaces(kind?: string): NormalWorkspace[] { + const workspaces: NormalWorkspace[] = []; + this._map.forEach((workspace) => { + if (!workspace.rootUri) { + return; + } + + if (kind && !workspace.kinds.some((k) => k === kind)) { + return; + } + + workspaces.push(workspace); + }); + + return workspaces; + } + + // Returns the best workspace for a file. Waits for the workspace to be finished handling other events before + // returning the appropriate workspace. + async getWorkspaceForFile(uri: Uri, pythonPath: Uri | undefined): Promise<Workspace> { + // Wait for all workspaces to be initialized before attempting to find the best workspace. Otherwise + // the list of files won't be complete and the `contains` check might fail. + await Promise.all(this.items().map((w) => w.isInitialized.promise)); + + // Find or create best match. + const workspace = await this._getOrCreateBestWorkspaceForFile(uri); + + // The workspace may have just been created. Wait for it to be initialized before returning it. + await workspace.isInitialized.promise; + + return workspace; + } + + async getContainingWorkspacesForFile(filePath: Uri): Promise<Workspace[]> { + // Wait for all workspaces to be initialized before attempting to find the best workspace. Otherwise + // the list of files won't be complete and the `contains` check might fail. + await Promise.all(this.items().map((w) => w.isInitialized.promise)); + + // Find or create best match. + // All workspaces that track the file should be considered. + const workspaces = this.items().filter((w) => w.service.isTracked(filePath)); + + // If that list is empty, get the best workspace + if (workspaces.length === 0) { + workspaces.push(this._getBestWorkspaceForFile(filePath)); + } + + // The workspaces may have just been created, wait for them all to be initialized + await Promise.all(workspaces.map((w) => w.isInitialized.promise)); + + return workspaces; + } + + private _add<T extends Uri | undefined>( + rootUri: T, + name: string, + kinds: string[] + ): ConditionalWorkspaceReturnType<T> { + const uri = rootUri ?? Uri.empty(); + + // Update the kind based if the uri is local or not + if (!kinds.includes(WellKnownWorkspaceKinds.Default) && !uri.isLocal()) { + // Web based workspace should be limited. + kinds = [...kinds, WellKnownWorkspaceKinds.Limited]; + } + + const result: Workspace = { + workspaceName: name, + rootUri, + kinds, + service: this._createService(name, uri, kinds), + disableLanguageServices: false, + disableTaggedHints: false, + disableOrganizeImports: false, + disableWorkspaceSymbol: false, + isInitialized: createInitStatus(), + searchPathsToWatch: [], + }; + + // Stick in our map + const key = this._getWorkspaceKey(result); + + // Make sure to delete existing workspaces if there are any. + this._remove(result); + this._console.log(`WorkspaceFactory ${this._id} add ${key}`); + this._map.set(key, result); + + // Tell our owner we added something. Order matters here as we + // don't want to fire the workspace created while the old copy of this + // workspace is still in the map. + this._onWorkspaceCreated(result); + + return result as ConditionalWorkspaceReturnType<T>; + } + + private _remove(value: Workspace) { + const key = this._getWorkspaceKey(value); + const workspace = this._map.get(key); + if (workspace) { + workspace.isInitialized.resolve(); + + this._onWorkspaceRemoved(workspace); + + workspace.service.dispose(); + this._console.log(`WorkspaceFactory ${this._id} remove ${key}`); + this._map.delete(key); + } + } + + private _getDefaultWorkspaceKey() { + return this._defaultWorkspacePath; + } + + private _getWorkspaceKey(value: Workspace) { + // Special the root path for the default workspace. It will be created + // without a root path + if (value.kinds.includes(WellKnownWorkspaceKinds.Default)) { + return this._getDefaultWorkspaceKey(); + } + + // Key is defined by the rootPath and the pythonPath. We might include platform in this, but for now + // platform is only used by the import resolver. + return `${value.rootUri}`; + } + + private async _getOrCreateBestWorkspaceForFile(uri: Uri): Promise<Workspace> { + // Find the current best workspace (without creating a new one) + const bestInstance = this._getBestWorkspaceForFile(uri); + + // Make sure the best instance is initialized so that it has its pythonPath. + await bestInstance.isInitialized.promise; + + return bestInstance; + } + + private _getBestWorkspaceForFile(uri: Uri): Workspace { + let bestInstance: Workspace | undefined; + + // The order of how we find the best matching workspace for the given file is + // 1. The given file is the workspace itself (ex, a file being a virtual workspace itself). + // 2. The given file matches the fileSpec of the service under the workspace + // (the file is a user file the workspace provides LSP service for). + // 3. The given file doesn't match anything but we have only 1 regular workspace + // (ex, open a library file from the workspace). + // 4. The given file doesn't match anything and there are multiple workspaces but one of workspaces + // contains the file (ex, open a library file already imported by a workspace). + // 5. If none of the above works, then it matches the default workspace. + + // First find the workspaces that are tracking the file + const trackingWorkspaces = this.items() + .filter((w) => w.service.isTracked(uri)) + .filter(isNormalWorkspace); + + // Then find the best in all of those that actually matches the pythonPath. + bestInstance = this._getBestRegularWorkspace(trackingWorkspaces); + + const regularWorkspaces = this.getNonDefaultWorkspaces(WellKnownWorkspaceKinds.Regular); + + // If it's not in a tracked workspace, see if we only have regular workspaces with the same + // length root path (basically, the same workspace with just different python paths) + if ( + bestInstance === undefined && + regularWorkspaces.every( + (w) => + w.rootUri.scheme === regularWorkspaces[0].rootUri.scheme && + (w.rootUri.scheme === uri.scheme || uri.isUntitled()) && + w.rootUri.equals(regularWorkspaces[0].rootUri) + ) + ) { + bestInstance = this._getBestRegularWorkspace(regularWorkspaces); + } + + // If the regular workspaces don't all have the same length or they don't + // actually match on the python path, then try the workspaces that already have the file open or scanned. + if (bestInstance === undefined) { + bestInstance = + this._getBestRegularWorkspace( + regularWorkspaces.filter((w) => w.service.hasSourceFile(uri) && w.rootUri.scheme === uri.scheme) + ) || bestInstance; + } + + // If that still didn't work, that must mean we don't have a workspace. Create a default one. + if (bestInstance === undefined) { + bestInstance = this._getOrCreateDefaultWorkspace(); + } + + return bestInstance; + } + + private _getOrCreateDefaultWorkspace(): DefaultWorkspace { + // Default key depends upon the pythonPath + let defaultWorkspace = this._map.get(this._getDefaultWorkspaceKey()) as DefaultWorkspace; + if (!defaultWorkspace) { + // Create a default workspace for files that are outside + // of all workspaces. + defaultWorkspace = this._add(undefined, this._defaultWorkspacePath, [WellKnownWorkspaceKinds.Default]); + } + + return defaultWorkspace; + } + + private _getLongestPathWorkspace(workspaces: NormalWorkspace[]): NormalWorkspace { + const longestPath = workspaces.reduce((previousPath, currentWorkspace) => { + if (!previousPath) { + return currentWorkspace.rootUri; + } + if (currentWorkspace.rootUri.getPathLength() > previousPath.getPathLength()) { + return currentWorkspace.rootUri; + } + + return previousPath; + }, Uri.empty()); + return workspaces.find((w) => w.rootUri.equals(longestPath))!; + } + + private _getBestRegularWorkspace(workspaces: NormalWorkspace[]): NormalWorkspace | undefined { + if (workspaces.length === 0) { + return undefined; + } + + // If there's only one, then it's the best. + if (workspaces.length === 1) { + return workspaces[0]; + } + + // Otherwise, just take the longest path. + return this._getLongestPathWorkspace(workspaces); + } +} + +interface DefaultWorkspace extends Workspace { + rootUri: undefined; +} + +type AllWorkspace = DefaultWorkspace | NormalWorkspace; + +type ConditionalWorkspaceReturnType<T> = T extends undefined ? DefaultWorkspace : NormalWorkspace; + +function isNormalWorkspace(workspace: AllWorkspace): workspace is NormalWorkspace { + return !!workspace.rootUri; +} diff --git a/python-parser/packages/pyright-internal/test_demo/a.py b/python-parser/packages/pyright-internal/test_demo/a.py new file mode 100644 index 00000000..6f96f2ff --- /dev/null +++ b/python-parser/packages/pyright-internal/test_demo/a.py @@ -0,0 +1,5 @@ +def func_a(): + pass + +class ClassA: + pass diff --git a/python-parser/packages/pyright-internal/test_demo/b.py b/python-parser/packages/pyright-internal/test_demo/b.py new file mode 100644 index 00000000..0e2c17cb --- /dev/null +++ b/python-parser/packages/pyright-internal/test_demo/b.py @@ -0,0 +1,6 @@ +from a import func_a, ClassA + +def func_b(): + result = func_a() + obj = ClassA() + return result diff --git a/python-parser/packages/pyright-internal/test_demo/test_real.py b/python-parser/packages/pyright-internal/test_demo/test_real.py new file mode 100644 index 00000000..eb5a9866 --- /dev/null +++ b/python-parser/packages/pyright-internal/test_demo/test_real.py @@ -0,0 +1,175 @@ +""" +真实的 Python 模块测试文件 +用于验证 file_structure 只提取顶层符号 +""" + +from typing import TypeAlias, List, Dict + +# ============================================ +# 顶层变量 (Top-level VAR) +# ============================================ + +TOP_STRING = "hello" +TOP_NUMBER = 42 +TOP_LIST = [1, 2, 3] +TOP_DICT = {"key": "value"} + +# ============================================ +# 顶层类型别名 (Top-level TYPE) +# ============================================ + +TopType1: TypeAlias = str +TopType2: TypeAlias = int +TopGenericType: TypeAlias = List[int] +TopDictType: TypeAlias = Dict[str, int] + +# ============================================ +# 顶层函数 (Top-level FUNC) +# ============================================ + +def top_func_no_params(): + """顶层函数:无参数""" + pass + +def top_func_with_params(a: int, b: str) -> bool: + """顶层函数:有参数和返回值""" + return True + +def top_func_calling_others(): + """顶层函数:调用其他函数和类""" + result = helper_func() + obj = TopClass() + return result + +# 辅助函数 +def helper_func(): + return 42 + +# ============================================ +# 顶层类 (Top-level CLASS) +# ============================================ + +class TopClass: + """顶层类""" + + class_var = 10 + + def method(self): + pass + +# ============================================ +# 局部符号(在函数内) +# ============================================ + +def func_with_locals(): + """ + 包含局部符号的函数 + 这些不应该出现在 file_structure 中 + """ + + # 局部变量 + local_var_1 = 1 + local_var_2: str = "local" + + # 局部类型别名 + LocalType: TypeAlias = int + LocalGenericType: TypeAlias = List[str] + + # 局部函数 + def local_func(): + pass + + def local_func_with_params(x: int) -> int: + return x * 2 + + # 局部类 + class LocalClass: + pass + + return local_var_1 + +# ============================================ +# 嵌套局部符号 +# ============================================ + +def func_with_nested_locals(): + """包含嵌套局部符号的函数""" + + def nested_func(): + # 嵌套局部变量 + nested_var = 100 + + def deep_nested(): + deep_nested_var = 200 + return deep_nested_var + + return nested_var + + return nested_func() + +# ============================================ +# 类中的局部符号(方法内) +# ============================================ + +class ClassWithMethods: + """包含方法的类""" + + def method_with_locals(self): + """方法中的局部符号""" + + method_local_var = 1 + + def method_local_func(): + return method_local_var + + class MethodLocalClass: + pass + + return method_local_func() + +# ============================================ +# 复杂的顶层符号 +# ============================================ + +class ComplexClass: + """复杂类""" + + # 类变量 + attr1 = "class attr" + attr2: int = 20 + + # 方法 + def method1(self, x: int) -> int: + return x + 1 + + @staticmethod + def static_method(): + return "static" + + @classmethod + def class_method(cls): + return "class" + +# 继承 +class ChildClass(ComplexClass): + """子类""" + + def method2(self): + return "child" + +# ============================================ +# 导出检查 +# ============================================ + +__all__ = [ + 'TOP_STRING', + 'TOP_NUMBER', + 'TopType1', + 'TopType2', + 'top_func_no_params', + 'top_func_with_params', + 'top_func_calling_others', + 'TopClass', + 'ComplexClass', + 'ChildClass', +] diff --git a/python-parser/packages/pyright-internal/test_demo/test_structure.py b/python-parser/packages/pyright-internal/test_demo/test_structure.py new file mode 100644 index 00000000..9026de7a --- /dev/null +++ b/python-parser/packages/pyright-internal/test_demo/test_structure.py @@ -0,0 +1,113 @@ +# ============================================ +# 顶层符号测试(应该出现在 symbolTable 中) +# ============================================ + +# 顶层变量(VAR) +TOP_VAR_STRING = "hello" +TOP_VAR_NUMBER = 42 +TOP_VAR_LIST = [1, 2, 3] + +# 顶层类型别名(TYPE) +from typing import TypeAlias, List, Dict +TopType: TypeAlias = str +TopGenericType: TypeAlias = List[int] +TopDictType: TypeAlias = Dict[str, int] + +# 顶层函数(FUNC) +def top_func_no_params(): + """顶层函数:无参数""" + pass + +def top_func_with_params(a: int, b: str) -> bool: + """顶层函数:有参数和返回值""" + return True + +# 顶层类(CLASS) +class TopClassSimple: + """顶层类:简单类""" + pass + +class TopClassWithMembers: + """顶层类:包含成员""" + # 类变量 + class_var = 10 + class_var_typed: int = 20 + + # 方法 + def method_simple(self): + pass + + def method_with_return(self) -> int: + return 42 + + @staticmethod + def static_method(): + pass + + @classmethod + def class_method(cls): + pass + +# ============================================ +# 局部符号测试(不应该出现在 symbolTable 中) +# ============================================ + +def func_with_local_symbols(): + """包含局部符号的函数""" + + # 局部变量 + local_var_1 = 1 + local_var_2: str = "local" + + # 局部类型别名 + LocalType: TypeAlias = int + LocalGenericType: TypeAlias = List[str] + + # 局部函数 + def local_func(): + pass + + def local_func_nested(): + """嵌套的局部函数""" + # 嵌套局部变量 + nested_var = 3 + return nested_var + + # 局部类 + class LocalClass: + local_class_var = 5 + + def local_method(self): + pass + + return local_var_1 + +# ============================================ +# 复杂场景测试 +# ============================================ + +class ClassWithNestedDefinitions: + """包含嵌套定义的类""" + + def method_with_local_defs(self): + """方法中的局部符号""" + + # 方法局部变量 + method_var = 1 + + # 方法局部函数 + def method_local_func(): + return method_var + + # 方法局部类 + class MethodLocalClass: + pass + + return method_local_func() + +# ============================================ +# 导入符号测试(应该出现在 import 中,但不是定义) +# ============================================ + +from typing import Optional, Union +import os diff --git a/python-parser/packages/pyright-internal/tsconfig.json b/python-parser/packages/pyright-internal/tsconfig.json new file mode 100644 index 00000000..a6585a1b --- /dev/null +++ b/python-parser/packages/pyright-internal/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./out", + "paths": { + }, + "typeRoots": ["./node_modules/@types", "./typings"] + }, + "include": [ + "src/**/*", + "src/localization/*.json", + "package.json", "typings/promise.d.ts", + ], + "exclude": [ + "node_modules", + "dist", + "out", + "debug.js", + "src/tests", + "test_demo", + "test-eval.js" + ] +} diff --git a/python-parser/pnpm-lock.yaml b/python-parser/pnpm-lock.yaml new file mode 100644 index 00000000..05e15945 --- /dev/null +++ b/python-parser/pnpm-lock.yaml @@ -0,0 +1,8214 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + lerna: + specifier: ^8.2.4 + version: 8.2.4(@types/node@22.19.15)(encoding@0.1.13) + + packages/pyright-internal: + dependencies: + '@yarnpkg/fslib': + specifier: 2.10.4 + version: 2.10.4 + '@yarnpkg/libzip': + specifier: 2.3.0 + version: 2.3.0 + chalk: + specifier: ^4.1.2 + version: 4.1.2 + chokidar: + specifier: ^3.6.0 + version: 3.6.0 + command-line-args: + specifier: ^5.2.1 + version: 5.2.1 + jsonc-parser: + specifier: ^3.2.0 + version: 3.3.1 + smol-toml: + specifier: ^1.4.2 + version: 1.6.0 + source-map-support: + specifier: ^0.5.21 + version: 0.5.21 + tmp: + specifier: ^0.2.5 + version: 0.2.5 + vscode-jsonrpc: + specifier: ^9.0.0-next.8 + version: 9.0.0-next.11 + vscode-languageserver: + specifier: ^10.0.0-next.13 + version: 10.0.0-next.17 + vscode-languageserver-protocol: + specifier: ^3.17.6-next.13 + version: 3.17.6-next.17 + vscode-languageserver-textdocument: + specifier: ^1.0.11 + version: 1.0.12 + vscode-languageserver-types: + specifier: ^3.17.6-next.6 + version: 3.17.6-next.6 + vscode-uri: + specifier: ^3.1.0 + version: 3.1.0 + devDependencies: + '@types/command-line-args': + specifier: ^5.2.3 + version: 5.2.3 + '@types/fs-extra': + specifier: ^11.0.4 + version: 11.0.4 + '@types/jest': + specifier: ^30.0.0 + version: 30.0.0 + '@types/lodash': + specifier: ^4.17.20 + version: 4.17.24 + '@types/node': + specifier: ^22.18.12 + version: 22.19.15 + '@types/tmp': + specifier: ^0.2.6 + version: 0.2.6 + copy-webpack-plugin: + specifier: ^12.0.2 + version: 12.0.2(webpack@5.105.4) + esbuild: + specifier: ^0.20.0 + version: 0.20.2 + esbuild-loader: + specifier: ^4.4.0 + version: 4.4.2(webpack@5.105.4) + jest: + specifier: ^30.2.0 + version: 30.2.0(@types/node@22.19.15) + jest-junit: + specifier: ^16.0.0 + version: 16.0.0 + shx: + specifier: ^0.4.0 + version: 0.4.0 + ts-jest: + specifier: ^29.4.5 + version: 29.4.6(@babel/core@7.29.0)(@jest/transform@30.2.0)(@jest/types@30.2.0)(babel-jest@30.2.0(@babel/core@7.29.0))(esbuild@0.20.2)(jest-util@30.2.0)(jest@30.2.0(@types/node@22.19.15))(typescript@5.5.4) + ts-loader: + specifier: ^9.5.4 + version: 9.5.4(typescript@5.5.4)(webpack@5.105.4) + typescript: + specifier: ~5.5.4 + version: 5.5.4 + webpack: + specifier: ^5.102.1 + version: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + webpack-cli: + specifier: ^5.1.4 + version: 5.1.4(webpack@5.105.4) + word-wrap: + specifier: 1.2.5 + version: 1.2.5 + +packages: + + '@babel/code-frame@7.29.0': + resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==} + engines: {node: '>=6.9.0'} + + '@babel/compat-data@7.29.0': + resolution: {integrity: sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==} + engines: {node: '>=6.9.0'} + + '@babel/core@7.29.0': + resolution: {integrity: sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==} + engines: {node: '>=6.9.0'} + + '@babel/generator@7.29.1': + resolution: {integrity: sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-compilation-targets@7.28.6': + resolution: {integrity: sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-globals@7.28.0': + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-imports@7.28.6': + resolution: {integrity: sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-transforms@7.28.6': + resolution: {integrity: sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-plugin-utils@7.28.6': + resolution: {integrity: sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==} + engines: {node: '>=6.9.0'} + + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.28.5': + resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.27.1': + resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} + engines: {node: '>=6.9.0'} + + '@babel/helpers@7.28.6': + resolution: {integrity: sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.29.0': + resolution: {integrity: sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/plugin-syntax-async-generators@7.8.4': + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-bigint@7.8.3': + resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-class-properties@7.12.13': + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-class-static-block@7.14.5': + resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-import-attributes@7.28.6': + resolution: {integrity: sha512-jiLC0ma9XkQT3TKJ9uYvlakm66Pamywo+qwL+oL8HJOvc6TWdZXVfhqJr8CCzbSGUAbDOzlGHJC1U+vRfLQDvw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-import-meta@7.10.4': + resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-json-strings@7.8.3': + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-jsx@7.28.6': + resolution: {integrity: sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-logical-assignment-operators@7.10.4': + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3': + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-numeric-separator@7.10.4': + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-object-rest-spread@7.8.3': + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-optional-catch-binding@7.8.3': + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-optional-chaining@7.8.3': + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-private-property-in-object@7.14.5': + resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-top-level-await@7.14.5': + resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-typescript@7.28.6': + resolution: {integrity: sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/template@7.28.6': + resolution: {integrity: sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==} + engines: {node: '>=6.9.0'} + + '@babel/traverse@7.29.0': + resolution: {integrity: sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==} + engines: {node: '>=6.9.0'} + + '@babel/types@7.29.0': + resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} + engines: {node: '>=6.9.0'} + + '@bcoe/v8-coverage@0.2.3': + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + + '@discoveryjs/json-ext@0.5.7': + resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==} + engines: {node: '>=10.0.0'} + + '@emnapi/core@1.8.1': + resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==} + + '@emnapi/runtime@1.8.1': + resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==} + + '@emnapi/wasi-threads@1.1.0': + resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} + + '@esbuild/aix-ppc64@0.20.2': + resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/aix-ppc64@0.27.3': + resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.20.2': + resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm64@0.27.3': + resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.20.2': + resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-arm@0.27.3': + resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.20.2': + resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/android-x64@0.27.3': + resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.20.2': + resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-arm64@0.27.3': + resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.20.2': + resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/darwin-x64@0.27.3': + resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.20.2': + resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-arm64@0.27.3': + resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.20.2': + resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.27.3': + resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.20.2': + resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm64@0.27.3': + resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.20.2': + resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-arm@0.27.3': + resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.20.2': + resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-ia32@0.27.3': + resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.20.2': + resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.27.3': + resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.20.2': + resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-mips64el@0.27.3': + resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.20.2': + resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-ppc64@0.27.3': + resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.20.2': + resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-riscv64@0.27.3': + resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.20.2': + resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-s390x@0.27.3': + resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.20.2': + resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/linux-x64@0.27.3': + resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.27.3': + resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.20.2': + resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.27.3': + resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.27.3': + resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.20.2': + resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.27.3': + resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.27.3': + resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.20.2': + resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/sunos-x64@0.27.3': + resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.20.2': + resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-arm64@0.27.3': + resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.20.2': + resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-ia32@0.27.3': + resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.20.2': + resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@esbuild/win32-x64@0.27.3': + resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@hutson/parse-repository-url@3.0.2': + resolution: {integrity: sha512-H9XAx3hc0BQHY6l+IFSWHDySypcXsvsuLhgYLUGywmJ5pswRVQJUHpOsobnLYp2ZUaUlKiKDrgWWhosOwAEM8Q==} + engines: {node: '>=6.9.0'} + + '@inquirer/external-editor@1.0.3': + resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@isaacs/string-locale-compare@1.1.0': + resolution: {integrity: sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ==} + + '@istanbuljs/load-nyc-config@1.1.0': + resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} + engines: {node: '>=8'} + + '@istanbuljs/schema@0.1.3': + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + + '@jest/console@30.2.0': + resolution: {integrity: sha512-+O1ifRjkvYIkBqASKWgLxrpEhQAAE7hY77ALLUufSk5717KfOShg6IbqLmdsLMPdUiFvA2kTs0R7YZy+l0IzZQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/core@30.2.0': + resolution: {integrity: sha512-03W6IhuhjqTlpzh/ojut/pDB2LPRygyWX8ExpgHtQA8H/3K7+1vKmcINx5UzeOX1se6YEsBsOHQ1CRzf3fOwTQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + '@jest/diff-sequences@30.0.1': + resolution: {integrity: sha512-n5H8QLDJ47QqbCNn5SuFjCRDrOLEZ0h8vAHCK5RL9Ls7Xa8AQLa/YxAc9UjFqoEDM48muwtBGjtMY5cr0PLDCw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/environment@30.2.0': + resolution: {integrity: sha512-/QPTL7OBJQ5ac09UDRa3EQes4gt1FTEG/8jZ/4v5IVzx+Cv7dLxlVIvfvSVRiiX2drWyXeBjkMSR8hvOWSog5g==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/expect-utils@30.2.0': + resolution: {integrity: sha512-1JnRfhqpD8HGpOmQp180Fo9Zt69zNtC+9lR+kT7NVL05tNXIi+QC8Csz7lfidMoVLPD3FnOtcmp0CEFnxExGEA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/expect@30.2.0': + resolution: {integrity: sha512-V9yxQK5erfzx99Sf+7LbhBwNWEZ9eZay8qQ9+JSC0TrMR1pMDHLMY+BnVPacWU6Jamrh252/IKo4F1Xn/zfiqA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/fake-timers@30.2.0': + resolution: {integrity: sha512-HI3tRLjRxAbBy0VO8dqqm7Hb2mIa8d5bg/NJkyQcOk7V118ObQML8RC5luTF/Zsg4474a+gDvhce7eTnP4GhYw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/get-type@30.1.0': + resolution: {integrity: sha512-eMbZE2hUnx1WV0pmURZY9XoXPkUYjpc55mb0CrhtdWLtzMQPFvu/rZkTLZFTsdaVQa+Tr4eWAteqcUzoawq/uA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/globals@30.2.0': + resolution: {integrity: sha512-b63wmnKPaK+6ZZfpYhz9K61oybvbI1aMcIs80++JI1O1rR1vaxHUCNqo3ITu6NU0d4V34yZFoHMn/uoKr/Rwfw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/pattern@30.0.1': + resolution: {integrity: sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/reporters@30.2.0': + resolution: {integrity: sha512-DRyW6baWPqKMa9CzeiBjHwjd8XeAyco2Vt8XbcLFjiwCOEKOvy82GJ8QQnJE9ofsxCMPjH4MfH8fCWIHHDKpAQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + '@jest/schemas@29.6.3': + resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/schemas@30.0.5': + resolution: {integrity: sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/snapshot-utils@30.2.0': + resolution: {integrity: sha512-0aVxM3RH6DaiLcjj/b0KrIBZhSX1373Xci4l3cW5xiUWPctZ59zQ7jj4rqcJQ/Z8JuN/4wX3FpJSa3RssVvCug==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/source-map@30.0.1': + resolution: {integrity: sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/test-result@30.2.0': + resolution: {integrity: sha512-RF+Z+0CCHkARz5HT9mcQCBulb1wgCP3FBvl9VFokMX27acKphwyQsNuWH3c+ojd1LeWBLoTYoxF0zm6S/66mjg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/test-sequencer@30.2.0': + resolution: {integrity: sha512-wXKgU/lk8fKXMu/l5Hog1R61bL4q5GCdT6OJvdAFz1P+QrpoFuLU68eoKuVc4RbrTtNnTL5FByhWdLgOPSph+Q==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/transform@30.2.0': + resolution: {integrity: sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/types@30.2.0': + resolution: {integrity: sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/remapping@2.3.5': + resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/source-map@0.3.11': + resolution: {integrity: sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + + '@lerna/create@8.2.4': + resolution: {integrity: sha512-A8AlzetnS2WIuhijdAzKUyFpR5YbLLfV3luQ4lzBgIBgRfuoBDZeF+RSZPhra+7A6/zTUlrbhKZIOi/MNhqgvQ==} + engines: {node: '>=18.0.0'} + + '@napi-rs/wasm-runtime@0.2.12': + resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + + '@napi-rs/wasm-runtime@0.2.4': + resolution: {integrity: sha512-9zESzOO5aDByvhIAsOy9TbpZ0Ur2AJbUI7UT73kcUTS2mxAMHOBaa1st/jAymNoCtvrit99kkzT1FZuXVcgfIQ==} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@npmcli/agent@2.2.2': + resolution: {integrity: sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/arborist@7.5.4': + resolution: {integrity: sha512-nWtIc6QwwoUORCRNzKx4ypHqCk3drI+5aeYdMTQQiRCcn4lOOgfQh7WyZobGYTxXPSq1VwV53lkpN/BRlRk08g==} + engines: {node: ^16.14.0 || >=18.0.0} + hasBin: true + + '@npmcli/fs@3.1.1': + resolution: {integrity: sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/git@5.0.8': + resolution: {integrity: sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/installed-package-contents@2.1.0': + resolution: {integrity: sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + + '@npmcli/map-workspaces@3.0.6': + resolution: {integrity: sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/metavuln-calculator@7.1.1': + resolution: {integrity: sha512-Nkxf96V0lAx3HCpVda7Vw4P23RILgdi/5K1fmj2tZkWIYLpXAN8k2UVVOsW16TsS5F8Ws2I7Cm+PU1/rsVF47g==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/name-from-folder@2.0.0': + resolution: {integrity: sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/node-gyp@3.0.0': + resolution: {integrity: sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/package-json@5.2.0': + resolution: {integrity: sha512-qe/kiqqkW0AGtvBjL8TJKZk/eBBSpnJkUWvHdQ9jM2lKHXRYYJuyNpJPlJw3c8QjC2ow6NZYiLExhUaeJelbxQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/promise-spawn@7.0.2': + resolution: {integrity: sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/query@3.1.0': + resolution: {integrity: sha512-C/iR0tk7KSKGldibYIB9x8GtO/0Bd0I2mhOaDb8ucQL/bQVTmGoeREaFj64Z5+iCBRf3dQfed0CjJL7I8iTkiQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + '@npmcli/redact@2.0.1': + resolution: {integrity: sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@npmcli/run-script@8.1.0': + resolution: {integrity: sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@nx/devkit@20.8.4': + resolution: {integrity: sha512-3r+6QmIXXAWL6K7m8vAbW31aniAZmZAZXeMhOhWcJoOAU7ggpCQaM8JP8/kO5ov/Bmhyf0i/SSVXI6kwiR5WNQ==} + peerDependencies: + nx: '>= 19 <= 21' + + '@nx/nx-darwin-arm64@20.8.4': + resolution: {integrity: sha512-8Y7+4wj1qoZsuDRpnuiHzSIsMt3VqtJ0su8dgd/MyGccvvi4pndan2R5yTiVw/wmbMxtBmZ6PO6Z8dgSIrMVog==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + + '@nx/nx-darwin-x64@20.8.4': + resolution: {integrity: sha512-2lfuxRc56QWnAysMhcD03tpCPiRzV1+foUq0MhV2sSBIybXmgV4wHLkPZNhlBCl4FNXrWiZiN1OJ2X9AGiOdug==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + + '@nx/nx-freebsd-x64@20.8.4': + resolution: {integrity: sha512-99vnUXZy+OUBHU+8Yhabre2qafepKg9GKkQkhmXvJGqOmuIsepK7wirUFo2PiVM8YhS6UV2rv6hKAZcQ7skYyg==} + engines: {node: '>= 10'} + cpu: [x64] + os: [freebsd] + + '@nx/nx-linux-arm-gnueabihf@20.8.4': + resolution: {integrity: sha512-dht73zpnpzEUEzMHFQs4mfiwZH3WcJgQNWkD5p7WkeJewHq2Yyd0eG5Jg3kB7wnFtwPUV1eNJRM5rephgylkLA==} + engines: {node: '>= 10'} + cpu: [arm] + os: [linux] + + '@nx/nx-linux-arm64-gnu@20.8.4': + resolution: {integrity: sha512-syXxbJZ0yPaqzVmB28QJgUtaarSiW/PQmv/5Z2Ps8rCi7kYylISPVNjP1NNiIOcGDRWbHqoBfM0bEGPfSp0rBQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@nx/nx-linux-arm64-musl@20.8.4': + resolution: {integrity: sha512-AlZZFolS/S0FahRKG7rJ0Z9CgmIkyzHgGaoy3qNEMDEjFhR3jt2ZZSLp90W7zjgrxojOo90ajNMrg2UmtcQRDA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@nx/nx-linux-x64-gnu@20.8.4': + resolution: {integrity: sha512-MSu+xVNdR95tuuO+eL/a/ZeMlhfrZ627On5xaCZXnJ+lFxNg/S4nlKZQk0Eq5hYALCd/GKgFGasRdlRdOtvGPg==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@nx/nx-linux-x64-musl@20.8.4': + resolution: {integrity: sha512-KxpQpyLCgIIHWZ4iRSUN9ohCwn1ZSDASbuFCdG3mohryzCy8WrPkuPcb+68J3wuQhmA5w//Xpp/dL0hHoit9zQ==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@nx/nx-win32-arm64-msvc@20.8.4': + resolution: {integrity: sha512-ffLBrxM9ibk+eWSY995kiFFRTSRb9HkD5T1s/uZyxV6jfxYPaZDBAWAETDneyBXps7WtaOMu+kVZlXQ3X+TfIA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + + '@nx/nx-win32-x64-msvc@20.8.4': + resolution: {integrity: sha512-JxuuZc4h8EBqoYAiRHwskimpTJx70yn4lhIRFBoW5ICkxXW1Rw0yip/1UVsWRHXg/x9BxmH7VVazdfaQWmGu6A==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + + '@octokit/auth-token@4.0.0': + resolution: {integrity: sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==} + engines: {node: '>= 18'} + + '@octokit/core@5.2.2': + resolution: {integrity: sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==} + engines: {node: '>= 18'} + + '@octokit/endpoint@9.0.6': + resolution: {integrity: sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==} + engines: {node: '>= 18'} + + '@octokit/graphql@7.1.1': + resolution: {integrity: sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==} + engines: {node: '>= 18'} + + '@octokit/openapi-types@24.2.0': + resolution: {integrity: sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==} + + '@octokit/plugin-enterprise-rest@6.0.1': + resolution: {integrity: sha512-93uGjlhUD+iNg1iWhUENAtJata6w5nE+V4urXOAlIXdco6xNZtUSfYY8dzp3Udy74aqO/B5UZL80x/YMa5PKRw==} + + '@octokit/plugin-paginate-rest@11.4.4-cjs.2': + resolution: {integrity: sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '5' + + '@octokit/plugin-request-log@4.0.1': + resolution: {integrity: sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '5' + + '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1': + resolution: {integrity: sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': ^5 + + '@octokit/request-error@5.1.1': + resolution: {integrity: sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==} + engines: {node: '>= 18'} + + '@octokit/request@8.4.1': + resolution: {integrity: sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==} + engines: {node: '>= 18'} + + '@octokit/rest@20.1.2': + resolution: {integrity: sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==} + engines: {node: '>= 18'} + + '@octokit/types@13.10.0': + resolution: {integrity: sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==} + + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@pkgr/core@0.2.9': + resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + + '@sigstore/bundle@2.3.2': + resolution: {integrity: sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/core@1.1.0': + resolution: {integrity: sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/protobuf-specs@0.3.3': + resolution: {integrity: sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==} + engines: {node: ^18.17.0 || >=20.5.0} + + '@sigstore/sign@2.3.2': + resolution: {integrity: sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/tuf@2.3.4': + resolution: {integrity: sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sigstore/verify@1.2.1': + resolution: {integrity: sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@sinclair/typebox@0.27.10': + resolution: {integrity: sha512-MTBk/3jGLNB2tVxv6uLlFh1iu64iYOQ2PbdOSK3NW8JZsmlaOh2q6sdtKowBhfw8QFLmYNzTW4/oK4uATIi6ZA==} + + '@sinclair/typebox@0.34.48': + resolution: {integrity: sha512-kKJTNuK3AQOrgjjotVxMrCn1sUJwM76wMszfq1kdU4uYVJjvEWuFQ6HgvLt4Xz3fSmZlTOxJ/Ie13KnIcWQXFA==} + + '@sindresorhus/merge-streams@2.3.0': + resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==} + engines: {node: '>=18'} + + '@sinonjs/commons@3.0.1': + resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} + + '@sinonjs/fake-timers@13.0.5': + resolution: {integrity: sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==} + + '@tufjs/canonical-json@2.0.0': + resolution: {integrity: sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@tufjs/models@2.0.1': + resolution: {integrity: sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==} + engines: {node: ^16.14.0 || >=18.0.0} + + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + + '@tybys/wasm-util@0.9.0': + resolution: {integrity: sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==} + + '@types/babel__core@7.20.5': + resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} + + '@types/babel__generator@7.27.0': + resolution: {integrity: sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==} + + '@types/babel__template@7.4.4': + resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==} + + '@types/babel__traverse@7.28.0': + resolution: {integrity: sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==} + + '@types/command-line-args@5.2.3': + resolution: {integrity: sha512-uv0aG6R0Y8WHZLTamZwtfsDLVRnOa+n+n5rEvFWL5Na5gZ8V2Teab/duDPFzIIIhs9qizDpcavCusCLJZu62Kw==} + + '@types/emscripten@1.41.5': + resolution: {integrity: sha512-cMQm7pxu6BxtHyqJ7mQZ2kXWV5SLmugybFdHCBbJ5eHzOo6VhBckEgAT3//rP5FwPHNPeEiq4SmQ5ucBwsOo4Q==} + + '@types/eslint-scope@3.7.7': + resolution: {integrity: sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==} + + '@types/eslint@9.6.1': + resolution: {integrity: sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==} + + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + + '@types/fs-extra@11.0.4': + resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} + + '@types/istanbul-lib-coverage@2.0.6': + resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} + + '@types/istanbul-lib-report@3.0.3': + resolution: {integrity: sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==} + + '@types/istanbul-reports@3.0.4': + resolution: {integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==} + + '@types/jest@30.0.0': + resolution: {integrity: sha512-XTYugzhuwqWjws0CVz8QpM36+T+Dz5mTEBKhNs/esGLnCIlGdRy+Dq78NRjd7ls7r8BC8ZRMOrKlkO1hU0JOwA==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/jsonfile@6.1.4': + resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} + + '@types/lodash@4.17.24': + resolution: {integrity: sha512-gIW7lQLZbue7lRSWEFql49QJJWThrTFFeIMJdp3eH4tKoxm1OvEPg02rm4wCCSHS0cL3/Fizimb35b7k8atwsQ==} + + '@types/minimatch@3.0.5': + resolution: {integrity: sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==} + + '@types/minimist@1.2.5': + resolution: {integrity: sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==} + + '@types/node@22.19.15': + resolution: {integrity: sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==} + + '@types/normalize-package-data@2.4.4': + resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} + + '@types/stack-utils@2.0.3': + resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} + + '@types/tmp@0.2.6': + resolution: {integrity: sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==} + + '@types/yargs-parser@21.0.3': + resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} + + '@types/yargs@17.0.35': + resolution: {integrity: sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==} + + '@ungap/structured-clone@1.3.0': + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + + '@unrs/resolver-binding-android-arm-eabi@1.11.1': + resolution: {integrity: sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==} + cpu: [arm] + os: [android] + + '@unrs/resolver-binding-android-arm64@1.11.1': + resolution: {integrity: sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==} + cpu: [arm64] + os: [android] + + '@unrs/resolver-binding-darwin-arm64@1.11.1': + resolution: {integrity: sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==} + cpu: [arm64] + os: [darwin] + + '@unrs/resolver-binding-darwin-x64@1.11.1': + resolution: {integrity: sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==} + cpu: [x64] + os: [darwin] + + '@unrs/resolver-binding-freebsd-x64@1.11.1': + resolution: {integrity: sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==} + cpu: [x64] + os: [freebsd] + + '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': + resolution: {integrity: sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==} + cpu: [arm] + os: [linux] + + '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': + resolution: {integrity: sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==} + cpu: [arm] + os: [linux] + + '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': + resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@unrs/resolver-binding-linux-arm64-musl@1.11.1': + resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': + resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': + resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==} + cpu: [riscv64] + os: [linux] + libc: [glibc] + + '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': + resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==} + cpu: [riscv64] + os: [linux] + libc: [musl] + + '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': + resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@unrs/resolver-binding-linux-x64-gnu@1.11.1': + resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@unrs/resolver-binding-linux-x64-musl@1.11.1': + resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==} + cpu: [x64] + os: [linux] + libc: [musl] + + '@unrs/resolver-binding-wasm32-wasi@1.11.1': + resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': + resolution: {integrity: sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==} + cpu: [arm64] + os: [win32] + + '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': + resolution: {integrity: sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==} + cpu: [ia32] + os: [win32] + + '@unrs/resolver-binding-win32-x64-msvc@1.11.1': + resolution: {integrity: sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==} + cpu: [x64] + os: [win32] + + '@webassemblyjs/ast@1.14.1': + resolution: {integrity: sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==} + + '@webassemblyjs/floating-point-hex-parser@1.13.2': + resolution: {integrity: sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==} + + '@webassemblyjs/helper-api-error@1.13.2': + resolution: {integrity: sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==} + + '@webassemblyjs/helper-buffer@1.14.1': + resolution: {integrity: sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==} + + '@webassemblyjs/helper-numbers@1.13.2': + resolution: {integrity: sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==} + + '@webassemblyjs/helper-wasm-bytecode@1.13.2': + resolution: {integrity: sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==} + + '@webassemblyjs/helper-wasm-section@1.14.1': + resolution: {integrity: sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==} + + '@webassemblyjs/ieee754@1.13.2': + resolution: {integrity: sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==} + + '@webassemblyjs/leb128@1.13.2': + resolution: {integrity: sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==} + + '@webassemblyjs/utf8@1.13.2': + resolution: {integrity: sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==} + + '@webassemblyjs/wasm-edit@1.14.1': + resolution: {integrity: sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==} + + '@webassemblyjs/wasm-gen@1.14.1': + resolution: {integrity: sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==} + + '@webassemblyjs/wasm-opt@1.14.1': + resolution: {integrity: sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==} + + '@webassemblyjs/wasm-parser@1.14.1': + resolution: {integrity: sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==} + + '@webassemblyjs/wast-printer@1.14.1': + resolution: {integrity: sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==} + + '@webpack-cli/configtest@2.1.1': + resolution: {integrity: sha512-wy0mglZpDSiSS0XHrVR+BAdId2+yxPSoJW8fsna3ZpYSlufjvxnP4YbKTCBZnNIcGN4r6ZPXV55X4mYExOfLmw==} + engines: {node: '>=14.15.0'} + peerDependencies: + webpack: 5.x.x + webpack-cli: 5.x.x + + '@webpack-cli/info@2.0.2': + resolution: {integrity: sha512-zLHQdI/Qs1UyT5UBdWNqsARasIA+AaF8t+4u2aS2nEpBQh2mWIVb8qAklq0eUENnC5mOItrIB4LiS9xMtph18A==} + engines: {node: '>=14.15.0'} + peerDependencies: + webpack: 5.x.x + webpack-cli: 5.x.x + + '@webpack-cli/serve@2.0.5': + resolution: {integrity: sha512-lqaoKnRYBdo1UgDX8uF24AfGMifWK19TxPmM5FHc2vAGxrJ/qtyUyFBWoY1tISZdelsQ5fBcOusifo5o5wSJxQ==} + engines: {node: '>=14.15.0'} + peerDependencies: + webpack: 5.x.x + webpack-cli: 5.x.x + webpack-dev-server: '*' + peerDependenciesMeta: + webpack-dev-server: + optional: true + + '@xtuc/ieee754@1.2.0': + resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==} + + '@xtuc/long@4.2.2': + resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==} + + '@yarnpkg/fslib@2.10.4': + resolution: {integrity: sha512-WhaLwvXEMjCjGxOraQx+Qtmst13iAPOlSElSZfQFdLohva5owlqACRapJ78zZFEW6M9ArqdQlZaHKVN5/mM+SA==} + engines: {node: '>=12 <14 || 14.2 - 14.9 || >14.10.0'} + + '@yarnpkg/libzip@2.3.0': + resolution: {integrity: sha512-6xm38yGVIa6mKm/DUCF2zFFJhERh/QWp1ufm4cNUvxsONBmfPg8uZ9pZBdOmF6qFGr/HlT6ABBkCSx/dlEtvWg==} + engines: {node: '>=12 <14 || 14.2 - 14.9 || >14.10.0'} + + '@yarnpkg/lockfile@1.1.0': + resolution: {integrity: sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==} + + '@yarnpkg/parsers@3.0.2': + resolution: {integrity: sha512-/HcYgtUSiJiot/XWGLOlGxPYUG65+/31V8oqk17vZLW1xlCoR4PampyePljOxY2n8/3jz9+tIFzICsyGujJZoA==} + engines: {node: '>=18.12.0'} + + '@zkochan/js-yaml@0.0.7': + resolution: {integrity: sha512-nrUSn7hzt7J6JWgWGz78ZYI8wj+gdIJdk0Ynjpp8l+trkn58Uqsf6RYrYkEK+3X18EX+TNdtJI0WxAtc+L84SQ==} + hasBin: true + + JSONStream@1.3.5: + resolution: {integrity: sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==} + hasBin: true + + abbrev@2.0.0: + resolution: {integrity: sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + acorn-import-phases@1.0.4: + resolution: {integrity: sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==} + engines: {node: '>=10.13.0'} + peerDependencies: + acorn: ^8.14.0 + + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} + engines: {node: '>=0.4.0'} + hasBin: true + + add-stream@1.0.0: + resolution: {integrity: sha512-qQLMr+8o0WC4FZGQTcJiKBVC59JylcPSrTtk6usvmIDFUOCKegapy1VHQwRbFMOFyb/inzUVqHs+eMYKDM1YeQ==} + + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} + + aggregate-error@3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} + + ajv-formats@2.1.1: + resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + + ajv-keywords@5.1.0: + resolution: {integrity: sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==} + peerDependencies: + ajv: ^8.8.2 + + ajv@8.18.0: + resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==} + + ansi-colors@4.1.3: + resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} + engines: {node: '>=6'} + + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.2.2: + resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + + ansi-styles@6.2.3: + resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} + engines: {node: '>=12'} + + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + + aproba@2.0.0: + resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} + + argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + array-back@3.1.0: + resolution: {integrity: sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==} + engines: {node: '>=6'} + + array-differ@3.0.0: + resolution: {integrity: sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg==} + engines: {node: '>=8'} + + array-ify@1.0.0: + resolution: {integrity: sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==} + + array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + + arrify@1.0.1: + resolution: {integrity: sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==} + engines: {node: '>=0.10.0'} + + arrify@2.0.1: + resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} + engines: {node: '>=8'} + + async@3.2.6: + resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + axios@1.13.6: + resolution: {integrity: sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==} + + babel-jest@30.2.0: + resolution: {integrity: sha512-0YiBEOxWqKkSQWL9nNGGEgndoeL0ZpWrbLMNL5u/Kaxrli3Eaxlt3ZtIDktEvXt4L/R9r3ODr2zKwGM/2BjxVw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@babel/core': ^7.11.0 || ^8.0.0-0 + + babel-plugin-istanbul@7.0.1: + resolution: {integrity: sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==} + engines: {node: '>=12'} + + babel-plugin-jest-hoist@30.2.0: + resolution: {integrity: sha512-ftzhzSGMUnOzcCXd6WHdBGMyuwy15Wnn0iyyWGKgBDLxf9/s5ABuraCSpBX2uG0jUg4rqJnxsLc5+oYBqoxVaA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + babel-preset-current-node-syntax@1.2.0: + resolution: {integrity: sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==} + peerDependencies: + '@babel/core': ^7.0.0 || ^8.0.0-0 + + babel-preset-jest@30.2.0: + resolution: {integrity: sha512-US4Z3NOieAQumwFnYdUWKvUKh8+YSnS/gB3t6YBiz0bskpu7Pine8pPCheNxlPEW4wnUkma2a94YuW2q3guvCQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@babel/core': ^7.11.0 || ^8.0.0-beta.1 + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + baseline-browser-mapping@2.10.0: + resolution: {integrity: sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==} + engines: {node: '>=6.0.0'} + hasBin: true + + before-after-hook@2.2.3: + resolution: {integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==} + + big.js@5.2.2: + resolution: {integrity: sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==} + + bin-links@4.0.4: + resolution: {integrity: sha512-cMtq4W5ZsEwcutJrVId+a/tjt8GSbS+h0oNkdl6+6rBuEv8Ot33Bevj5KPm40t309zuhVic8NjpuL42QCiJWWA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + browserslist@4.28.1: + resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + bs-logger@0.2.6: + resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} + engines: {node: '>= 6'} + + bser@2.1.1: + resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + + buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + + buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + + byte-size@8.1.1: + resolution: {integrity: sha512-tUkzZWK0M/qdoLEqikxBWe4kumyuwjl3HO6zHTr4yEI23EojPtLYXdG1+AQY7MN0cGyNDvEaJ8wiYQm6P2bPxg==} + engines: {node: '>=12.17'} + + cacache@18.0.4: + resolution: {integrity: sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + camelcase-keys@6.2.2: + resolution: {integrity: sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==} + engines: {node: '>=8'} + + camelcase@5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} + + camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + + caniuse-lite@1.0.30001777: + resolution: {integrity: sha512-tmN+fJxroPndC74efCdp12j+0rk0RHwV5Jwa1zWaFVyw2ZxAuPeG8ZgWC3Wz7uSjT3qMRQ5XHZ4COgQmsCMJAQ==} + + chalk@4.1.0: + resolution: {integrity: sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==} + engines: {node: '>=10'} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + + chardet@2.1.1: + resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + chownr@2.0.0: + resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} + engines: {node: '>=10'} + + chrome-trace-event@1.0.4: + resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==} + engines: {node: '>=6.0'} + + ci-info@3.9.0: + resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} + engines: {node: '>=8'} + + ci-info@4.4.0: + resolution: {integrity: sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==} + engines: {node: '>=8'} + + cjs-module-lexer@2.2.0: + resolution: {integrity: sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ==} + + clean-stack@2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} + + cli-cursor@3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} + + cli-spinners@2.6.1: + resolution: {integrity: sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g==} + engines: {node: '>=6'} + + cli-spinners@2.9.2: + resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} + engines: {node: '>=6'} + + cli-width@3.0.0: + resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} + engines: {node: '>= 10'} + + cliui@7.0.4: + resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + clone-deep@4.0.1: + resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==} + engines: {node: '>=6'} + + clone@1.0.4: + resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} + engines: {node: '>=0.8'} + + cmd-shim@6.0.3: + resolution: {integrity: sha512-FMabTRlc5t5zjdenF6mS0MBeFZm0XqHqeOkcskKFb/LYCcRQ5fVgLOHVc4Lq9CqABd9zhjwPjMBCJvMCziSVtA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + co@4.6.0: + resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} + engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} + + collect-v8-coverage@1.0.3: + resolution: {integrity: sha512-1L5aqIkwPfiodaMgQunkF1zRhNqifHBmtbbbxcr6yVxxBnliw4TDOW6NxpO8DJLgJ16OT+Y4ztZqP6p/FtXnAw==} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + color-support@1.1.3: + resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} + hasBin: true + + colorette@2.0.20: + resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + + columnify@1.6.0: + resolution: {integrity: sha512-lomjuFZKfM6MSAnV9aCZC9sc0qGbmZdfygNv+nCpqVkSKdCxCklLtd16O0EILGkImHw9ZpHkAnHaB+8Zxq5W6Q==} + engines: {node: '>=8.0.0'} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + command-line-args@5.2.1: + resolution: {integrity: sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==} + engines: {node: '>=4.0.0'} + + commander@10.0.1: + resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} + engines: {node: '>=14'} + + commander@2.20.3: + resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} + + common-ancestor-path@1.0.1: + resolution: {integrity: sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w==} + + compare-func@2.0.0: + resolution: {integrity: sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + concat-stream@2.0.0: + resolution: {integrity: sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==} + engines: {'0': node >= 6.0} + + console-control-strings@1.1.0: + resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==} + + conventional-changelog-angular@7.0.0: + resolution: {integrity: sha512-ROjNchA9LgfNMTTFSIWPzebCwOGFdgkEq45EnvvrmSLvCtAw0HSmrCs7/ty+wAeYUZyNay0YMUNYFTRL72PkBQ==} + engines: {node: '>=16'} + + conventional-changelog-core@5.0.1: + resolution: {integrity: sha512-Rvi5pH+LvgsqGwZPZ3Cq/tz4ty7mjijhr3qR4m9IBXNbxGGYgTVVO+duXzz9aArmHxFtwZ+LRkrNIMDQzgoY4A==} + engines: {node: '>=14'} + + conventional-changelog-preset-loader@3.0.0: + resolution: {integrity: sha512-qy9XbdSLmVnwnvzEisjxdDiLA4OmV3o8db+Zdg4WiFw14fP3B6XNz98X0swPPpkTd/pc1K7+adKgEDM1JCUMiA==} + engines: {node: '>=14'} + + conventional-changelog-writer@6.0.1: + resolution: {integrity: sha512-359t9aHorPw+U+nHzUXHS5ZnPBOizRxfQsWT5ZDHBfvfxQOAik+yfuhKXG66CN5LEWPpMNnIMHUTCKeYNprvHQ==} + engines: {node: '>=14'} + hasBin: true + + conventional-commits-filter@3.0.0: + resolution: {integrity: sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==} + engines: {node: '>=14'} + + conventional-commits-parser@4.0.0: + resolution: {integrity: sha512-WRv5j1FsVM5FISJkoYMR6tPk07fkKT0UodruX4je86V4owk451yjXAKzKAPOs9l7y59E2viHUS9eQ+dfUA9NSg==} + engines: {node: '>=14'} + hasBin: true + + conventional-recommended-bump@7.0.1: + resolution: {integrity: sha512-Ft79FF4SlOFvX4PkwFDRnaNiIVX7YbmqGU0RwccUaiGvgp3S0a8ipR2/Qxk31vclDNM+GSdJOVs2KrsUCjblVA==} + engines: {node: '>=14'} + hasBin: true + + convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + + copy-webpack-plugin@12.0.2: + resolution: {integrity: sha512-SNwdBeHyII+rWvee/bTnAYyO8vfVdcSTud4EIb6jcZ8inLeWucJE0DnxXQBjlQ5zlteuuvooGQy3LIyGxhvlOA==} + engines: {node: '>= 18.12.0'} + peerDependencies: + webpack: ^5.1.0 + + core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + + cosmiconfig@9.0.0: + resolution: {integrity: sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==} + engines: {node: '>=14'} + peerDependencies: + typescript: '>=4.9.5' + peerDependenciesMeta: + typescript: + optional: true + + cross-spawn@6.0.6: + resolution: {integrity: sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==} + engines: {node: '>=4.8'} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + cssesc@3.0.0: + resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} + engines: {node: '>=4'} + hasBin: true + + dargs@7.0.0: + resolution: {integrity: sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg==} + engines: {node: '>=8'} + + dateformat@3.0.3: + resolution: {integrity: sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==} + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decamelize-keys@1.1.1: + resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} + engines: {node: '>=0.10.0'} + + decamelize@1.2.0: + resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} + engines: {node: '>=0.10.0'} + + dedent@1.5.3: + resolution: {integrity: sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + + dedent@1.7.2: + resolution: {integrity: sha512-WzMx3mW98SN+zn3hgemf4OzdmyNhhhKz5Ay0pUfQiMQ3e1g+xmTJWp/pKdwKVXhdSkAEGIIzqeuWrL3mV/AXbA==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + + deepmerge@4.3.1: + resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} + engines: {node: '>=0.10.0'} + + defaults@1.0.4: + resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + + define-lazy-prop@2.0.0: + resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} + engines: {node: '>=8'} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + deprecation@2.3.1: + resolution: {integrity: sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==} + + detect-indent@5.0.0: + resolution: {integrity: sha512-rlpvsxUtM0PQvy9iZe640/IWwWYyBsTApREbA1pHOpmOUIl9MkP/U4z7vTtg4Oaojvqhxt7sdufnT0EzGaR31g==} + engines: {node: '>=4'} + + detect-newline@3.1.0: + resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} + engines: {node: '>=8'} + + diff-sequences@29.6.3: + resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + dot-prop@5.3.0: + resolution: {integrity: sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==} + engines: {node: '>=8'} + + dotenv-expand@11.0.7: + resolution: {integrity: sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA==} + engines: {node: '>=12'} + + dotenv@16.4.7: + resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} + engines: {node: '>=12'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + ejs@3.1.10: + resolution: {integrity: sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==} + engines: {node: '>=0.10.0'} + hasBin: true + + electron-to-chromium@1.5.307: + resolution: {integrity: sha512-5z3uFKBWjiNR44nFcYdkcXjKMbg5KXNdciu7mhTPo9tB7NbqSNP2sSnGR+fqknZSCwKkBN+oxiiajWs4dT6ORg==} + + emittery@0.13.1: + resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==} + engines: {node: '>=12'} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + emojis-list@3.0.0: + resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==} + engines: {node: '>= 4'} + + encoding@0.1.13: + resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==} + + end-of-stream@1.4.5: + resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==} + + enhanced-resolve@5.20.0: + resolution: {integrity: sha512-/ce7+jQ1PQ6rVXwe+jKEg5hW5ciicHwIQUagZkp6IufBoY3YDgdTTY1azVs0qoRgVmvsNB+rbjLJxDAeHHtwsQ==} + engines: {node: '>=10.13.0'} + + enquirer@2.3.6: + resolution: {integrity: sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==} + engines: {node: '>=8.6'} + + env-paths@2.2.1: + resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} + engines: {node: '>=6'} + + envinfo@7.13.0: + resolution: {integrity: sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==} + engines: {node: '>=4'} + hasBin: true + + envinfo@7.21.0: + resolution: {integrity: sha512-Lw7I8Zp5YKHFCXL7+Dz95g4CcbMEpgvqZNNq3AmlT5XAV6CgAAk6gyAMqn2zjw08K9BHfcNuKrMiCPLByGafow==} + engines: {node: '>=4'} + hasBin: true + + err-code@2.0.3: + resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} + + error-ex@1.3.4: + resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-module-lexer@2.0.0: + resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + esbuild-loader@4.4.2: + resolution: {integrity: sha512-8LdoT9sC7fzfvhxhsIAiWhzLJr9yT3ggmckXxsgvM07wgrRxhuT98XhLn3E7VczU5W5AFsPKv9DdWcZIubbWkQ==} + peerDependencies: + webpack: ^4.40.0 || ^5.0.0 + + esbuild@0.20.2: + resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.27.3: + resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==} + engines: {node: '>=18'} + hasBin: true + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + escape-string-regexp@1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + + escape-string-regexp@2.0.0: + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + + eslint-scope@5.1.1: + resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} + engines: {node: '>=8.0.0'} + + esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@4.3.0: + resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + + execa@1.0.0: + resolution: {integrity: sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==} + engines: {node: '>=6'} + + execa@5.0.0: + resolution: {integrity: sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ==} + engines: {node: '>=10'} + + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + + exit-x@0.2.2: + resolution: {integrity: sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==} + engines: {node: '>= 0.8.0'} + + expect@30.2.0: + resolution: {integrity: sha512-u/feCi0GPsI+988gU2FLcsHyAHTU0MX1Wg68NhAnN7z/+C5wqG+CY8J53N9ioe8RXgaoz0nBR/TYMf3AycUuPw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + exponential-backoff@3.1.3: + resolution: {integrity: sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-uri@3.1.0: + resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + + fastest-levenshtein@1.0.16: + resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==} + engines: {node: '>= 4.9.1'} + + fastq@1.20.1: + resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} + + fb-watchman@2.0.2: + resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + figures@3.2.0: + resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} + engines: {node: '>=8'} + + filelist@1.0.6: + resolution: {integrity: sha512-5giy2PkLYY1cP39p17Ech+2xlpTRL9HLspOfEgm0L6CwBXBTgsK5ou0JtzYuepxkaQ/tvhCFIJ5uXo0OrM2DxA==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-replace@3.0.0: + resolution: {integrity: sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==} + engines: {node: '>=4.0.0'} + + find-up@2.1.0: + resolution: {integrity: sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==} + engines: {node: '>=4'} + + find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + + flat@5.0.2: + resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==} + hasBin: true + + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + + form-data@4.0.5: + resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} + engines: {node: '>= 6'} + + front-matter@4.0.2: + resolution: {integrity: sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==} + + fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + + fs-extra@11.3.4: + resolution: {integrity: sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==} + engines: {node: '>=14.14'} + + fs-minipass@2.1.0: + resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} + engines: {node: '>= 8'} + + fs-minipass@3.0.3: + resolution: {integrity: sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-package-type@0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} + + get-pkg-repo@4.2.1: + resolution: {integrity: sha512-2+QbHjFRfGB74v/pYWjd5OhU3TDIC2Gv/YKUTk/tCvAz0pkn/Mz6P3uByuBimLOcPvN2jYdScl3xGFSrx0jEcA==} + engines: {node: '>=6.9.0'} + hasBin: true + + get-port@5.1.1: + resolution: {integrity: sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==} + engines: {node: '>=8'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + get-stream@4.1.0: + resolution: {integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==} + engines: {node: '>=6'} + + get-stream@6.0.0: + resolution: {integrity: sha512-A1B3Bh1UmL0bidM/YX2NsCOTnGJePL9rO/M+Mw3m9f2gUpfokS0hi5Eah0WSUEWZdZhIZtMjkIYS7mDfOqNHbg==} + engines: {node: '>=10'} + + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + + get-tsconfig@4.13.6: + resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==} + + git-raw-commits@3.0.0: + resolution: {integrity: sha512-b5OHmZ3vAgGrDn/X0kS+9qCfNKWe4K/jFnhwzVWWg0/k5eLa3060tZShrRg8Dja5kPc+YjS0Gc6y7cRr44Lpjw==} + engines: {node: '>=14'} + hasBin: true + + git-remote-origin-url@2.0.0: + resolution: {integrity: sha512-eU+GGrZgccNJcsDH5LkXR3PB9M958hxc7sbA8DFJjrv9j4L2P/eZfKhM+QD6wyzpiv+b1BpK0XrYCxkovtjSLw==} + engines: {node: '>=4'} + + git-semver-tags@5.0.1: + resolution: {integrity: sha512-hIvOeZwRbQ+7YEUmCkHqo8FOLQZCEn18yevLHADlFPZY02KJGsu5FZt9YW/lybfK2uhWFI7Qg/07LekJiTv7iA==} + engines: {node: '>=14'} + hasBin: true + + git-up@7.0.0: + resolution: {integrity: sha512-ONdIrbBCFusq1Oy0sC71F5azx8bVkvtZtMJAsv+a6lz5YAmbNnLD6HAB4gptHZVLPR8S2/kVN6Gab7lryq5+lQ==} + + git-url-parse@14.0.0: + resolution: {integrity: sha512-NnLweV+2A4nCvn4U/m2AoYu0pPKlsmhK9cknG7IMwsjFY1S2jxM+mAhsDxyxfCIGfGaD+dozsyX4b6vkYc83yQ==} + + gitconfiglocal@1.0.0: + resolution: {integrity: sha512-spLUXeTAVHxDtKsJc8FkFVgFtMdEN9qPGpL23VfSHx4fP4+Ds097IXLvymbnDH8FnmxX5Nr9bPw3A+AQ6mWEaQ==} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + glob-to-regexp@0.4.1: + resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} + + glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + hasBin: true + + glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Glob versions prior to v9 are no longer supported + + glob@9.3.4: + resolution: {integrity: sha512-qaSc49hojMOv1EPM4EuyITjDSgSKI0rthoHnvE81tcOi1SCVndHko7auqxdQ14eiQG2NDBJBE86+2xIrbIvrbA==} + engines: {node: '>=16 || 14 >=14.17'} + + globby@14.1.0: + resolution: {integrity: sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==} + engines: {node: '>=18'} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + handlebars@4.7.8: + resolution: {integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==} + engines: {node: '>=0.4.7'} + hasBin: true + + hard-rejection@2.1.0: + resolution: {integrity: sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==} + engines: {node: '>=6'} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + has-unicode@2.0.1: + resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + hosted-git-info@2.8.9: + resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} + + hosted-git-info@4.1.0: + resolution: {integrity: sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==} + engines: {node: '>=10'} + + hosted-git-info@7.0.2: + resolution: {integrity: sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==} + engines: {node: ^16.14.0 || >=18.0.0} + + html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + + http-cache-semantics@4.2.0: + resolution: {integrity: sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==} + + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} + engines: {node: '>= 14'} + + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + + ignore-walk@6.0.5: + resolution: {integrity: sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + ignore@7.0.5: + resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} + engines: {node: '>= 4'} + + import-fresh@3.3.1: + resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} + engines: {node: '>=6'} + + import-local@3.1.0: + resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} + engines: {node: '>=8'} + hasBin: true + + import-local@3.2.0: + resolution: {integrity: sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==} + engines: {node: '>=8'} + hasBin: true + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + + inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + + ini@4.1.3: + resolution: {integrity: sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + init-package-json@6.0.3: + resolution: {integrity: sha512-Zfeb5ol+H+eqJWHTaGca9BovufyGeIfr4zaaBorPmJBMrJ+KBnN+kQx2ZtXdsotUTgldHmHQV44xvUWOUA7E2w==} + engines: {node: ^16.14.0 || >=18.0.0} + + inquirer@8.2.7: + resolution: {integrity: sha512-UjOaSel/iddGZJ5xP/Eixh6dY1XghiBw4XK13rCCIJcJfyhhoul/7KhLLUGtebEj6GDYM6Vnx/mVsjx2L/mFIA==} + engines: {node: '>=12.0.0'} + + interpret@1.4.0: + resolution: {integrity: sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==} + engines: {node: '>= 0.10'} + + interpret@3.1.1: + resolution: {integrity: sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==} + engines: {node: '>=10.13.0'} + + ip-address@10.1.0: + resolution: {integrity: sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==} + engines: {node: '>= 12'} + + is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-ci@3.0.1: + resolution: {integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==} + hasBin: true + + is-core-module@2.16.1: + resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} + engines: {node: '>= 0.4'} + + is-docker@2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-generator-fn@2.1.0: + resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} + engines: {node: '>=6'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-interactive@1.0.0: + resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} + engines: {node: '>=8'} + + is-lambda@1.0.1: + resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-obj@2.0.0: + resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==} + engines: {node: '>=8'} + + is-plain-obj@1.1.0: + resolution: {integrity: sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==} + engines: {node: '>=0.10.0'} + + is-plain-object@2.0.4: + resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} + engines: {node: '>=0.10.0'} + + is-ssh@1.4.1: + resolution: {integrity: sha512-JNeu1wQsHjyHgn9NcWTaXq6zWSR6hqE0++zhfZlkFBbScNkyvxCdeV8sRkSBaeLKxmbpR21brail63ACNxJ0Tg==} + + is-stream@1.1.0: + resolution: {integrity: sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==} + engines: {node: '>=0.10.0'} + + is-stream@2.0.0: + resolution: {integrity: sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==} + engines: {node: '>=8'} + + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + is-text-path@1.0.1: + resolution: {integrity: sha512-xFuJpne9oFz5qDaodwmmG08e3CawH/2ZV8Qqza1Ko7Sk8POWbkRdwIoAWVhqvq0XeUzANEhKo2n0IXUGBm7A/w==} + engines: {node: '>=0.10.0'} + + is-unicode-supported@0.1.0: + resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} + engines: {node: '>=10'} + + is-wsl@2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + + isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + isexe@3.1.5: + resolution: {integrity: sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w==} + engines: {node: '>=18'} + + isobject@3.0.1: + resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} + engines: {node: '>=0.10.0'} + + istanbul-lib-coverage@3.2.2: + resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} + engines: {node: '>=8'} + + istanbul-lib-instrument@6.0.3: + resolution: {integrity: sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==} + engines: {node: '>=10'} + + istanbul-lib-report@3.0.1: + resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==} + engines: {node: '>=10'} + + istanbul-lib-source-maps@5.0.6: + resolution: {integrity: sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==} + engines: {node: '>=10'} + + istanbul-reports@3.2.0: + resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==} + engines: {node: '>=8'} + + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + + jake@10.9.4: + resolution: {integrity: sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==} + engines: {node: '>=10'} + hasBin: true + + jest-changed-files@30.2.0: + resolution: {integrity: sha512-L8lR1ChrRnSdfeOvTrwZMlnWV8G/LLjQ0nG9MBclwWZidA2N5FviRki0Bvh20WRMOX31/JYvzdqTJrk5oBdydQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-circus@30.2.0: + resolution: {integrity: sha512-Fh0096NC3ZkFx05EP2OXCxJAREVxj1BcW/i6EWqqymcgYKWjyyDpral3fMxVcHXg6oZM7iULer9wGRFvfpl+Tg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-cli@30.2.0: + resolution: {integrity: sha512-Os9ukIvADX/A9sLt6Zse3+nmHtHaE6hqOsjQtNiugFTbKRHYIYtZXNGNK9NChseXy7djFPjndX1tL0sCTlfpAA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + jest-config@30.2.0: + resolution: {integrity: sha512-g4WkyzFQVWHtu6uqGmQR4CQxz/CH3yDSlhzXMWzNjDx843gYjReZnMRanjRCq5XZFuQrGDxgUaiYWE8BRfVckA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@types/node': '*' + esbuild-register: '>=3.4.0' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + esbuild-register: + optional: true + ts-node: + optional: true + + jest-diff@29.7.0: + resolution: {integrity: sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-diff@30.2.0: + resolution: {integrity: sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-docblock@30.2.0: + resolution: {integrity: sha512-tR/FFgZKS1CXluOQzZvNH3+0z9jXr3ldGSD8bhyuxvlVUwbeLOGynkunvlTMxchC5urrKndYiwCFC0DLVjpOCA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-each@30.2.0: + resolution: {integrity: sha512-lpWlJlM7bCUf1mfmuqTA8+j2lNURW9eNafOy99knBM01i5CQeY5UH1vZjgT9071nDJac1M4XsbyI44oNOdhlDQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-environment-node@30.2.0: + resolution: {integrity: sha512-ElU8v92QJ9UrYsKrxDIKCxu6PfNj4Hdcktcn0JX12zqNdqWHB0N+hwOnnBBXvjLd2vApZtuLUGs1QSY+MsXoNA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-get-type@29.6.3: + resolution: {integrity: sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-haste-map@30.2.0: + resolution: {integrity: sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-junit@16.0.0: + resolution: {integrity: sha512-A94mmw6NfJab4Fg/BlvVOUXzXgF0XIH6EmTgJ5NDPp4xoKq0Kr7sErb+4Xs9nZvu58pJojz5RFGpqnZYJTrRfQ==} + engines: {node: '>=10.12.0'} + + jest-leak-detector@30.2.0: + resolution: {integrity: sha512-M6jKAjyzjHG0SrQgwhgZGy9hFazcudwCNovY/9HPIicmNSBuockPSedAP9vlPK6ONFJ1zfyH/M2/YYJxOz5cdQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-matcher-utils@30.2.0: + resolution: {integrity: sha512-dQ94Nq4dbzmUWkQ0ANAWS9tBRfqCrn0bV9AMYdOi/MHW726xn7eQmMeRTpX2ViC00bpNaWXq+7o4lIQ3AX13Hg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-message-util@30.2.0: + resolution: {integrity: sha512-y4DKFLZ2y6DxTWD4cDe07RglV88ZiNEdlRfGtqahfbIjfsw1nMCPx49Uev4IA/hWn3sDKyAnSPwoYSsAEdcimw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-mock@30.2.0: + resolution: {integrity: sha512-JNNNl2rj4b5ICpmAcq+WbLH83XswjPbjH4T7yvGzfAGCPh1rw+xVNbtk+FnRslvt9lkCcdn9i1oAoKUuFsOxRw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-pnp-resolver@1.2.3: + resolution: {integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==} + engines: {node: '>=6'} + peerDependencies: + jest-resolve: '*' + peerDependenciesMeta: + jest-resolve: + optional: true + + jest-regex-util@30.0.1: + resolution: {integrity: sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-resolve-dependencies@30.2.0: + resolution: {integrity: sha512-xTOIGug/0RmIe3mmCqCT95yO0vj6JURrn1TKWlNbhiAefJRWINNPgwVkrVgt/YaerPzY3iItufd80v3lOrFJ2w==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-resolve@30.2.0: + resolution: {integrity: sha512-TCrHSxPlx3tBY3hWNtRQKbtgLhsXa1WmbJEqBlTBrGafd5fiQFByy2GNCEoGR+Tns8d15GaL9cxEzKOO3GEb2A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-runner@30.2.0: + resolution: {integrity: sha512-PqvZ2B2XEyPEbclp+gV6KO/F1FIFSbIwewRgmROCMBo/aZ6J1w8Qypoj2pEOcg3G2HzLlaP6VUtvwCI8dM3oqQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-runtime@30.2.0: + resolution: {integrity: sha512-p1+GVX/PJqTucvsmERPMgCPvQJpFt4hFbM+VN3n8TMo47decMUcJbt+rgzwrEme0MQUA/R+1de2axftTHkKckg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-snapshot@30.2.0: + resolution: {integrity: sha512-5WEtTy2jXPFypadKNpbNkZ72puZCa6UjSr/7djeecHWOu7iYhSXSnHScT8wBz3Rn8Ena5d5RYRcsyKIeqG1IyA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-util@30.2.0: + resolution: {integrity: sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-validate@30.2.0: + resolution: {integrity: sha512-FBGWi7dP2hpdi8nBoWxSsLvBFewKAg0+uSQwBaof4Y4DPgBabXgpSYC5/lR7VmnIlSpASmCi/ntRWPbv7089Pw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-watcher@30.2.0: + resolution: {integrity: sha512-PYxa28dxJ9g777pGm/7PrbnMeA0Jr7osHP9bS7eJy9DuAjMgdGtxgf0uKMyoIsTWAkIbUW5hSDdJ3urmgXBqxg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-worker@27.5.1: + resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} + engines: {node: '>= 10.13.0'} + + jest-worker@30.2.0: + resolution: {integrity: sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest@30.2.0: + resolution: {integrity: sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + js-yaml@3.14.2: + resolution: {integrity: sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==} + hasBin: true + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true + + json-parse-better-errors@1.0.2: + resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} + + json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + + json-parse-even-better-errors@3.0.2: + resolution: {integrity: sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + + json-stringify-nice@1.1.4: + resolution: {integrity: sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw==} + + json-stringify-safe@5.0.1: + resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==} + + json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + + jsonc-parser@3.2.0: + resolution: {integrity: sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==} + + jsonc-parser@3.3.1: + resolution: {integrity: sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==} + + jsonfile@6.2.0: + resolution: {integrity: sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==} + + jsonparse@1.3.1: + resolution: {integrity: sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==} + engines: {'0': node >= 0.2.0} + + just-diff-apply@5.5.0: + resolution: {integrity: sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw==} + + just-diff@6.0.2: + resolution: {integrity: sha512-S59eriX5u3/QhMNq3v/gm8Kd0w8OS6Tz2FS1NG4blv+z0MuQcBRJyFWjdovM0Rad4/P4aUPFtnkNjMjyMlMSYA==} + + kind-of@6.0.3: + resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} + engines: {node: '>=0.10.0'} + + lerna@8.2.4: + resolution: {integrity: sha512-0gaVWDIVT7fLfprfwpYcQajb7dBJv3EGavjG7zvJ+TmGx3/wovl5GklnSwM2/WeE0Z2wrIz7ndWhBcDUHVjOcQ==} + engines: {node: '>=18.0.0'} + hasBin: true + + leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + + libnpmaccess@8.0.6: + resolution: {integrity: sha512-uM8DHDEfYG6G5gVivVl+yQd4pH3uRclHC59lzIbSvy7b5FEwR+mU49Zq1jEyRtRFv7+M99mUW9S0wL/4laT4lw==} + engines: {node: ^16.14.0 || >=18.0.0} + + libnpmpublish@9.0.9: + resolution: {integrity: sha512-26zzwoBNAvX9AWOPiqqF6FG4HrSCPsHFkQm7nT+xU1ggAujL/eae81RnCv4CJ2In9q9fh10B88sYSzKCUh/Ghg==} + engines: {node: ^16.14.0 || >=18.0.0} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + lines-and-columns@2.0.3: + resolution: {integrity: sha512-cNOjgCnLB+FnvWWtyRTzmB3POJ+cXxTA81LoW7u8JdmhfXzriropYwpjShnz1QLLWsQwY7nIxoDmcPTwphDK9w==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + load-json-file@4.0.0: + resolution: {integrity: sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==} + engines: {node: '>=4'} + + load-json-file@6.2.0: + resolution: {integrity: sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==} + engines: {node: '>=8'} + + loader-runner@4.3.1: + resolution: {integrity: sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==} + engines: {node: '>=6.11.5'} + + loader-utils@2.0.4: + resolution: {integrity: sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==} + engines: {node: '>=8.9.0'} + + locate-path@2.0.0: + resolution: {integrity: sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==} + engines: {node: '>=4'} + + locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + + lodash.ismatch@4.4.0: + resolution: {integrity: sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==} + + lodash.memoize@4.1.2: + resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} + + lodash@4.17.23: + resolution: {integrity: sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==} + + log-symbols@4.1.0: + resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} + engines: {node: '>=10'} + + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + + lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + + lru-cache@6.0.0: + resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} + engines: {node: '>=10'} + + make-dir@2.1.0: + resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} + engines: {node: '>=6'} + + make-dir@4.0.0: + resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} + engines: {node: '>=10'} + + make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + + make-fetch-happen@13.0.1: + resolution: {integrity: sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==} + engines: {node: ^16.14.0 || >=18.0.0} + + makeerror@1.0.12: + resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} + + map-obj@1.0.1: + resolution: {integrity: sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==} + engines: {node: '>=0.10.0'} + + map-obj@4.3.0: + resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==} + engines: {node: '>=8'} + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + meow@8.1.2: + resolution: {integrity: sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==} + engines: {node: '>=10'} + + merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + + min-indent@1.0.1: + resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} + engines: {node: '>=4'} + + minimatch@3.0.5: + resolution: {integrity: sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw==} + + minimatch@3.1.5: + resolution: {integrity: sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==} + + minimatch@5.1.9: + resolution: {integrity: sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==} + engines: {node: '>=10'} + + minimatch@8.0.7: + resolution: {integrity: sha512-V+1uQNdzybxa14e/p00HZnQNNcTjnRJjDxg2V8wtkjFctq4M7hXFws4oekyTP0Jebeq7QYtpFyOeBAjc88zvYg==} + engines: {node: '>=16 || 14 >=14.17'} + + minimatch@9.0.3: + resolution: {integrity: sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==} + engines: {node: '>=16 || 14 >=14.17'} + + minimatch@9.0.9: + resolution: {integrity: sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist-options@4.1.0: + resolution: {integrity: sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==} + engines: {node: '>= 6'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass-collect@2.0.1: + resolution: {integrity: sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==} + engines: {node: '>=16 || 14 >=14.17'} + + minipass-fetch@3.0.5: + resolution: {integrity: sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + minipass-flush@1.0.5: + resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} + engines: {node: '>= 8'} + + minipass-pipeline@1.2.4: + resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} + engines: {node: '>=8'} + + minipass-sized@1.0.3: + resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==} + engines: {node: '>=8'} + + minipass@3.3.6: + resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} + engines: {node: '>=8'} + + minipass@4.2.8: + resolution: {integrity: sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==} + engines: {node: '>=8'} + + minipass@5.0.0: + resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==} + engines: {node: '>=8'} + + minipass@7.1.3: + resolution: {integrity: sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==} + engines: {node: '>=16 || 14 >=14.17'} + + minizlib@2.1.2: + resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} + engines: {node: '>= 8'} + + mkdirp@1.0.4: + resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} + engines: {node: '>=10'} + hasBin: true + + modify-values@1.0.1: + resolution: {integrity: sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==} + engines: {node: '>=0.10.0'} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + multimatch@5.0.0: + resolution: {integrity: sha512-ypMKuglUrZUD99Tk2bUQ+xNQj43lPEfAeX2o9cTteAmShXy2VHDJpuwu1o0xqoKCt9jLVAvwyFKdLTPXKAfJyA==} + engines: {node: '>=10'} + + mute-stream@0.0.8: + resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + + mute-stream@1.0.0: + resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + napi-postinstall@0.3.4: + resolution: {integrity: sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + hasBin: true + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + negotiator@0.6.4: + resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==} + engines: {node: '>= 0.6'} + + neo-async@2.6.2: + resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} + + nice-try@1.0.5: + resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} + + node-fetch@2.6.7: + resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + + node-gyp@10.3.1: + resolution: {integrity: sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==} + engines: {node: ^16.14.0 || >=18.0.0} + hasBin: true + + node-int64@0.4.0: + resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} + + node-machine-id@1.1.12: + resolution: {integrity: sha512-QNABxbrPa3qEIfrE6GOJ7BYIuignnJw7iQ2YPbc3Nla1HzRJjXzZOiikfF8m7eAMfichLt3M4VgLOetqgDmgGQ==} + + node-releases@2.0.36: + resolution: {integrity: sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==} + + nopt@7.2.1: + resolution: {integrity: sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + + normalize-package-data@2.5.0: + resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} + + normalize-package-data@3.0.3: + resolution: {integrity: sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==} + engines: {node: '>=10'} + + normalize-package-data@6.0.2: + resolution: {integrity: sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==} + engines: {node: ^16.14.0 || >=18.0.0} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + npm-bundled@3.0.1: + resolution: {integrity: sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-install-checks@6.3.0: + resolution: {integrity: sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-normalize-package-bin@3.0.1: + resolution: {integrity: sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-package-arg@11.0.2: + resolution: {integrity: sha512-IGN0IAwmhDJwy13Wc8k+4PEbTPhpJnMtfR53ZbOyjkvmEcLS4nCwp6mvMWjS5sUjeiW3mpx6cHmuhKEu9XmcQw==} + engines: {node: ^16.14.0 || >=18.0.0} + + npm-packlist@8.0.2: + resolution: {integrity: sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + npm-pick-manifest@9.1.0: + resolution: {integrity: sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==} + engines: {node: ^16.14.0 || >=18.0.0} + + npm-registry-fetch@17.1.0: + resolution: {integrity: sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==} + engines: {node: ^16.14.0 || >=18.0.0} + + npm-run-path@2.0.2: + resolution: {integrity: sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==} + engines: {node: '>=4'} + + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + + nx@20.8.4: + resolution: {integrity: sha512-/++x0OM3/UTmDR+wmPeV13tSxeTr+QGzj3flgtH9DiOPmQnn2CjHWAMZiOhcSh/hHoE/V3ySL4757InQUsVtjQ==} + hasBin: true + peerDependencies: + '@swc-node/register': ^1.8.0 + '@swc/core': ^1.3.85 + peerDependenciesMeta: + '@swc-node/register': + optional: true + '@swc/core': + optional: true + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + open@8.4.2: + resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} + engines: {node: '>=12'} + + ora@5.3.0: + resolution: {integrity: sha512-zAKMgGXUim0Jyd6CXK9lraBnD3H5yPGBPPOkC23a2BG6hsm4Zu6OQSjQuEtV0BHDf4aKHcUFvJiGRrFuW3MG8g==} + engines: {node: '>=10'} + + ora@5.4.1: + resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} + engines: {node: '>=10'} + + p-finally@1.0.0: + resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} + engines: {node: '>=4'} + + p-limit@1.3.0: + resolution: {integrity: sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==} + engines: {node: '>=4'} + + p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-locate@2.0.0: + resolution: {integrity: sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==} + engines: {node: '>=4'} + + p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + + p-map-series@2.1.0: + resolution: {integrity: sha512-RpYIIK1zXSNEOdwxcfe7FdvGcs7+y5n8rifMhMNWvaxRNMPINJHF5GDeuVxWqnfrcHPSCnp7Oo5yNXHId9Av2Q==} + engines: {node: '>=8'} + + p-map@4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} + + p-pipe@3.1.0: + resolution: {integrity: sha512-08pj8ATpzMR0Y80x50yJHn37NF6vjrqHutASaX5LiH5npS9XPvrUmscd9MF5R4fuYRHOxQR1FfMIlF7AzwoPqw==} + engines: {node: '>=8'} + + p-queue@6.6.2: + resolution: {integrity: sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==} + engines: {node: '>=8'} + + p-reduce@2.1.0: + resolution: {integrity: sha512-2USApvnsutq8uoxZBGbbWM0JIYLiEMJ9RlaN7fAzVNb9OZN0SHjjTTfIcb667XynS5Y1VhwDJVDa72TnPzAYWw==} + engines: {node: '>=8'} + + p-timeout@3.2.0: + resolution: {integrity: sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==} + engines: {node: '>=8'} + + p-try@1.0.0: + resolution: {integrity: sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==} + engines: {node: '>=4'} + + p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + + p-waterfall@2.1.1: + resolution: {integrity: sha512-RRTnDb2TBG/epPRI2yYXsimO0v3BXC8Yd3ogr1545IaqKK17VGhbWVeGGN+XfCm/08OK8635nH31c8bATkHuSw==} + engines: {node: '>=8'} + + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + + pacote@18.0.6: + resolution: {integrity: sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==} + engines: {node: ^16.14.0 || >=18.0.0} + hasBin: true + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + parse-conflict-json@3.0.1: + resolution: {integrity: sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + parse-json@4.0.0: + resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} + engines: {node: '>=4'} + + parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + + parse-path@7.1.0: + resolution: {integrity: sha512-EuCycjZtfPcjWk7KTksnJ5xPMvWGA/6i4zrLYhRG0hGvC3GPU/jGUj3Cy+ZR0v30duV3e23R95T1lE2+lsndSw==} + + parse-url@8.1.0: + resolution: {integrity: sha512-xDvOoLU5XRrcOZvnI6b8zA6n9O9ejNk/GExuz1yBuWUGn9KA97GI6HTs6u02wKara1CeVmZhH+0TZFdWScR89w==} + + path-exists@3.0.0: + resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==} + engines: {node: '>=4'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + + path-key@2.0.1: + resolution: {integrity: sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==} + engines: {node: '>=4'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + path-type@3.0.0: + resolution: {integrity: sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==} + engines: {node: '>=4'} + + path-type@6.0.0: + resolution: {integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==} + engines: {node: '>=18'} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + + pify@2.3.0: + resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==} + engines: {node: '>=0.10.0'} + + pify@3.0.0: + resolution: {integrity: sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==} + engines: {node: '>=4'} + + pify@4.0.1: + resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} + engines: {node: '>=6'} + + pify@5.0.0: + resolution: {integrity: sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==} + engines: {node: '>=10'} + + pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} + engines: {node: '>= 6'} + + pkg-dir@4.2.0: + resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} + engines: {node: '>=8'} + + postcss-selector-parser@6.1.2: + resolution: {integrity: sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==} + engines: {node: '>=4'} + + pretty-format@29.7.0: + resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + pretty-format@30.2.0: + resolution: {integrity: sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + proc-log@4.2.0: + resolution: {integrity: sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + + proggy@2.0.0: + resolution: {integrity: sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + promise-all-reject-late@1.0.1: + resolution: {integrity: sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==} + + promise-call-limit@3.0.2: + resolution: {integrity: sha512-mRPQO2T1QQVw11E7+UdCJu7S61eJVWknzml9sC1heAdj1jxl0fWMBypIt9ZOcLFf8FkG995ZD7RnVk7HH72fZw==} + + promise-inflight@1.0.1: + resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + peerDependencies: + bluebird: '*' + peerDependenciesMeta: + bluebird: + optional: true + + promise-retry@2.0.1: + resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==} + engines: {node: '>=10'} + + promzard@1.0.2: + resolution: {integrity: sha512-2FPputGL+mP3jJ3UZg/Dl9YOkovB7DX0oOr+ck5QbZ5MtORtds8k/BZdn+02peDLI8/YWbmzx34k5fA+fHvCVQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + protocols@2.0.2: + resolution: {integrity: sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==} + + proxy-from-env@1.1.0: + resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + + pump@3.0.4: + resolution: {integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==} + + pure-rand@7.0.1: + resolution: {integrity: sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + quick-lru@4.0.1: + resolution: {integrity: sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==} + engines: {node: '>=8'} + + randombytes@2.1.0: + resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} + + react-is@18.3.1: + resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + + read-cmd-shim@4.0.0: + resolution: {integrity: sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + read-package-json-fast@3.0.2: + resolution: {integrity: sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + read-pkg-up@3.0.0: + resolution: {integrity: sha512-YFzFrVvpC6frF1sz8psoHDBGF7fLPc+llq/8NB43oagqWkx8ar5zYtsTORtOjw9W2RHLpWP+zTWwBvf1bCmcSw==} + engines: {node: '>=4'} + + read-pkg-up@7.0.1: + resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} + engines: {node: '>=8'} + + read-pkg@3.0.0: + resolution: {integrity: sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==} + engines: {node: '>=4'} + + read-pkg@5.2.0: + resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} + engines: {node: '>=8'} + + read@3.0.1: + resolution: {integrity: sha512-SLBrDU/Srs/9EoWhU5GdbAoxG1GzpQHo/6qiGItaoLJ1thmYpcNIM1qISEUvyHBzfGlWIyd6p2DNi1oV1VmAuw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + rechoir@0.6.2: + resolution: {integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==} + engines: {node: '>= 0.10'} + + rechoir@0.8.0: + resolution: {integrity: sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==} + engines: {node: '>= 10.13.0'} + + redent@3.0.0: + resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} + engines: {node: '>=8'} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + + resolve-cwd@3.0.0: + resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} + engines: {node: '>=8'} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + + resolve.exports@2.0.3: + resolution: {integrity: sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==} + engines: {node: '>=10'} + + resolve@1.22.11: + resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==} + engines: {node: '>= 0.4'} + hasBin: true + + restore-cursor@3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} + + retry@0.12.0: + resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} + engines: {node: '>= 4'} + + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rimraf@4.4.1: + resolution: {integrity: sha512-Gk8NlF062+T9CqNGn6h4tls3k6T1+/nXdOcSZVikNVtlRdYpA7wRJJMoXmuvOnLW844rPjdQ7JgXCYM6PPC/og==} + engines: {node: '>=14'} + hasBin: true + + run-async@2.4.1: + resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} + engines: {node: '>=0.12.0'} + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} + + safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + schema-utils@4.3.3: + resolution: {integrity: sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==} + engines: {node: '>= 10.13.0'} + + semver@5.7.2: + resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} + hasBin: true + + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + + semver@7.7.4: + resolution: {integrity: sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==} + engines: {node: '>=10'} + hasBin: true + + serialize-javascript@6.0.2: + resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==} + + set-blocking@2.0.0: + resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} + + shallow-clone@3.0.1: + resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==} + engines: {node: '>=8'} + + shebang-command@1.2.0: + resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==} + engines: {node: '>=0.10.0'} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@1.0.0: + resolution: {integrity: sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==} + engines: {node: '>=0.10.0'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + shelljs@0.9.2: + resolution: {integrity: sha512-S3I64fEiKgTZzKCC46zT/Ib9meqofLrQVbpSswtjFfAVDW+AZ54WTnAM/3/yENoxz/V1Cy6u3kiiEbQ4DNphvw==} + engines: {node: '>=18'} + hasBin: true + + shx@0.4.0: + resolution: {integrity: sha512-Z0KixSIlGPpijKgcH6oCMCbltPImvaKy0sGH8AkLRXw1KyzpKtaCTizP2xen+hNDqVF4xxgvA0KXSb9o4Q6hnA==} + engines: {node: '>=18'} + hasBin: true + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + sigstore@2.3.1: + resolution: {integrity: sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==} + engines: {node: ^16.14.0 || >=18.0.0} + + slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + slash@5.1.0: + resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==} + engines: {node: '>=14.16'} + + smart-buffer@4.2.0: + resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} + engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + + smol-toml@1.6.0: + resolution: {integrity: sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==} + engines: {node: '>= 18'} + + socks-proxy-agent@8.0.5: + resolution: {integrity: sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==} + engines: {node: '>= 14'} + + socks@2.8.7: + resolution: {integrity: sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==} + engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} + + sort-keys@2.0.0: + resolution: {integrity: sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg==} + engines: {node: '>=4'} + + source-list-map@2.0.1: + resolution: {integrity: sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==} + + source-map-support@0.5.13: + resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} + + source-map-support@0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + source-map@0.7.6: + resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} + engines: {node: '>= 12'} + + spdx-correct@3.2.0: + resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==} + + spdx-exceptions@2.5.0: + resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==} + + spdx-expression-parse@3.0.1: + resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} + + spdx-license-ids@3.0.23: + resolution: {integrity: sha512-CWLcCCH7VLu13TgOH+r8p1O/Znwhqv/dbb6lqWy67G+pT1kHmeD/+V36AVb/vq8QMIQwVShJ6Ssl5FPh0fuSdw==} + + split2@3.2.2: + resolution: {integrity: sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==} + + split@1.0.1: + resolution: {integrity: sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==} + + sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + + ssri@10.0.6: + resolution: {integrity: sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + stack-utils@2.0.6: + resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} + engines: {node: '>=10'} + + string-length@4.0.2: + resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} + engines: {node: '>=10'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.2.0: + resolution: {integrity: sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==} + engines: {node: '>=12'} + + strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + + strip-bom@4.0.0: + resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} + engines: {node: '>=8'} + + strip-eof@1.0.0: + resolution: {integrity: sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==} + engines: {node: '>=0.10.0'} + + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + + strip-indent@3.0.0: + resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} + engines: {node: '>=8'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + synckit@0.11.12: + resolution: {integrity: sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==} + engines: {node: ^14.18.0 || >=16.0.0} + + tapable@2.3.0: + resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} + engines: {node: '>=6'} + + tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + + tar@6.2.1: + resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} + engines: {node: '>=10'} + + temp-dir@1.0.0: + resolution: {integrity: sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ==} + engines: {node: '>=4'} + + terser-webpack-plugin@5.3.17: + resolution: {integrity: sha512-YR7PtUp6GMU91BgSJmlaX/rS2lGDbAF7D+Wtq7hRO+MiljNmodYvqslzCFiYVAgW+Qoaaia/QUIP4lGXufjdZw==} + engines: {node: '>= 10.13.0'} + peerDependencies: + '@swc/core': '*' + esbuild: '*' + uglify-js: '*' + webpack: ^5.1.0 + peerDependenciesMeta: + '@swc/core': + optional: true + esbuild: + optional: true + uglify-js: + optional: true + + terser@5.46.0: + resolution: {integrity: sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==} + engines: {node: '>=10'} + hasBin: true + + test-exclude@6.0.0: + resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} + engines: {node: '>=8'} + + text-extensions@1.9.0: + resolution: {integrity: sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==} + engines: {node: '>=0.10'} + + through2@2.0.5: + resolution: {integrity: sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==} + + through@2.3.8: + resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} + + tinyglobby@0.2.12: + resolution: {integrity: sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==} + engines: {node: '>=12.0.0'} + + tmp@0.2.5: + resolution: {integrity: sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==} + engines: {node: '>=14.14'} + + tmpl@1.0.5: + resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + + treeverse@3.0.0: + resolution: {integrity: sha512-gcANaAnd2QDZFmHFEOF4k7uc1J/6a6z3DJMd/QwEyxLoKGiptJRwid582r7QIsFlFMIZ3SnxfS52S4hm2DHkuQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + trim-newlines@3.0.1: + resolution: {integrity: sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==} + engines: {node: '>=8'} + + ts-jest@29.4.6: + resolution: {integrity: sha512-fSpWtOO/1AjSNQguk43hb/JCo16oJDnMJf3CdEGNkqsEX3t0KX96xvyX1D7PfLCpVoKu4MfVrqUkFyblYoY4lA==} + engines: {node: ^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@babel/core': '>=7.0.0-beta.0 <8' + '@jest/transform': ^29.0.0 || ^30.0.0 + '@jest/types': ^29.0.0 || ^30.0.0 + babel-jest: ^29.0.0 || ^30.0.0 + esbuild: '*' + jest: ^29.0.0 || ^30.0.0 + jest-util: ^29.0.0 || ^30.0.0 + typescript: '>=4.3 <6' + peerDependenciesMeta: + '@babel/core': + optional: true + '@jest/transform': + optional: true + '@jest/types': + optional: true + babel-jest: + optional: true + esbuild: + optional: true + jest-util: + optional: true + + ts-loader@9.5.4: + resolution: {integrity: sha512-nCz0rEwunlTZiy6rXFByQU1kVVpCIgUpc/psFiKVrUwrizdnIbRFu8w7bxhUF0X613DYwT4XzrZHpVyMe758hQ==} + engines: {node: '>=12.0.0'} + peerDependencies: + typescript: '*' + webpack: ^5.0.0 + + tsconfig-paths@4.2.0: + resolution: {integrity: sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==} + engines: {node: '>=6'} + + tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + tuf-js@2.2.1: + resolution: {integrity: sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==} + engines: {node: ^16.14.0 || >=18.0.0} + + type-detect@4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} + + type-fest@0.18.1: + resolution: {integrity: sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==} + engines: {node: '>=10'} + + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + + type-fest@0.4.1: + resolution: {integrity: sha512-IwzA/LSfD2vC1/YDYMv/zHP4rDF1usCwllsDpbolT3D4fUepIO7f9K70jjmUewU/LmGUKJcwcVtDCpnKk4BPMw==} + engines: {node: '>=6'} + + type-fest@0.6.0: + resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} + engines: {node: '>=8'} + + type-fest@0.8.1: + resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} + engines: {node: '>=8'} + + type-fest@4.41.0: + resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==} + engines: {node: '>=16'} + + typedarray@0.0.6: + resolution: {integrity: sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==} + + typescript@5.5.4: + resolution: {integrity: sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==} + engines: {node: '>=14.17'} + hasBin: true + + typical@4.0.0: + resolution: {integrity: sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==} + engines: {node: '>=8'} + + uglify-js@3.19.3: + resolution: {integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==} + engines: {node: '>=0.8.0'} + hasBin: true + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + unicorn-magic@0.3.0: + resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==} + engines: {node: '>=18'} + + unique-filename@3.0.0: + resolution: {integrity: sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + unique-slug@4.0.0: + resolution: {integrity: sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + universal-user-agent@6.0.1: + resolution: {integrity: sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==} + + universalify@2.0.1: + resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} + engines: {node: '>= 10.0.0'} + + unrs-resolver@1.11.1: + resolution: {integrity: sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==} + + upath@2.0.1: + resolution: {integrity: sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==} + engines: {node: '>=4'} + + update-browserslist-db@1.2.3: + resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + uuid@10.0.0: + resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} + hasBin: true + + uuid@8.3.2: + resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} + hasBin: true + + v8-to-istanbul@9.3.0: + resolution: {integrity: sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==} + engines: {node: '>=10.12.0'} + + validate-npm-package-license@3.0.4: + resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} + + validate-npm-package-name@5.0.1: + resolution: {integrity: sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + vscode-jsonrpc@9.0.0-next.11: + resolution: {integrity: sha512-u6LElQNbSiE9OugEEmrUKwH6+8BpPz2S5MDHvQUqHL//I4Q8GPikKLOUf856UnbLkZdhxaPrExac1lA3XwpIPA==} + engines: {node: '>=14.0.0'} + + vscode-languageserver-protocol@3.17.6-next.17: + resolution: {integrity: sha512-HW72YcFsuckfK6oPVuysRXhKiIFJoUvXgspPHvCMWpwe2x9aq2oGZDUSvKx4m/qUGB27+iu8ijAxsFlljYl2IQ==} + + vscode-languageserver-textdocument@1.0.12: + resolution: {integrity: sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==} + + vscode-languageserver-types@3.17.6-next.6: + resolution: {integrity: sha512-aiJY5/yW+xzw7KPNlwi3gQtddq/3EIn5z8X8nCgJfaiAij2R1APKePngv+MUdLdYJBVTLu+Qa0ODsT+pHgYguQ==} + + vscode-languageserver@10.0.0-next.17: + resolution: {integrity: sha512-/bwO/E3RUzIkQ1BQ70gcLdZeM8xvK0JS7gMvtug7yiH0dzTjciqqQTUh3H9NEXsqYEjLzGwiXgRUkt6Z8fQV0Q==} + hasBin: true + + vscode-uri@3.1.0: + resolution: {integrity: sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==} + + walk-up-path@3.0.1: + resolution: {integrity: sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==} + + walker@1.0.8: + resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + + watchpack@2.5.1: + resolution: {integrity: sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg==} + engines: {node: '>=10.13.0'} + + wcwidth@1.0.1: + resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} + + webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + + webpack-cli@5.1.4: + resolution: {integrity: sha512-pIDJHIEI9LR0yxHXQ+Qh95k2EvXpWzZ5l+d+jIo+RdSm9MiHfzazIxwwni/p7+x4eJZuvG1AJwgC4TNQ7NRgsg==} + engines: {node: '>=14.15.0'} + hasBin: true + peerDependencies: + '@webpack-cli/generators': '*' + webpack: 5.x.x + webpack-bundle-analyzer: '*' + webpack-dev-server: '*' + peerDependenciesMeta: + '@webpack-cli/generators': + optional: true + webpack-bundle-analyzer: + optional: true + webpack-dev-server: + optional: true + + webpack-merge@5.10.0: + resolution: {integrity: sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==} + engines: {node: '>=10.0.0'} + + webpack-sources@1.4.3: + resolution: {integrity: sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==} + + webpack-sources@3.3.4: + resolution: {integrity: sha512-7tP1PdV4vF+lYPnkMR0jMY5/la2ub5Fc/8VQrrU+lXkiM6C4TjVfGw7iKfyhnTQOsD+6Q/iKw0eFciziRgD58Q==} + engines: {node: '>=10.13.0'} + + webpack@5.105.4: + resolution: {integrity: sha512-jTywjboN9aHxFlToqb0K0Zs9SbBoW4zRUlGzI2tYNxVYcEi/IPpn+Xi4ye5jTLvX2YeLuic/IvxNot+Q1jMoOw==} + engines: {node: '>=10.13.0'} + hasBin: true + peerDependencies: + webpack-cli: '*' + peerDependenciesMeta: + webpack-cli: + optional: true + + whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + + which@1.3.1: + resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} + hasBin: true + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + + wide-align@1.1.5: + resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==} + + wildcard@2.0.1: + resolution: {integrity: sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==} + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + + wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + write-file-atomic@2.4.3: + resolution: {integrity: sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==} + + write-file-atomic@5.0.1: + resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + write-json-file@3.2.0: + resolution: {integrity: sha512-3xZqT7Byc2uORAatYiP3DHUUAVEkNOswEWNs9H5KXiicRTvzYzYqKjYc4G7p+8pltvAw641lVByKVtMpf+4sYQ==} + engines: {node: '>=6'} + + write-pkg@4.0.0: + resolution: {integrity: sha512-v2UQ+50TNf2rNHJ8NyWttfm/EJUBWMJcx6ZTYZr6Qp52uuegWw/lBkCtCbnYZEmPRNL61m+u67dAmGxo+HTULA==} + engines: {node: '>=8'} + + xml@1.0.1: + resolution: {integrity: sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==} + + xtend@4.0.2: + resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} + engines: {node: '>=0.4'} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + + yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + + yaml@2.8.2: + resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} + engines: {node: '>= 14.6'} + hasBin: true + + yargs-parser@20.2.9: + resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} + engines: {node: '>=10'} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@16.2.0: + resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} + engines: {node: '>=10'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + +snapshots: + + '@babel/code-frame@7.29.0': + dependencies: + '@babel/helper-validator-identifier': 7.28.5 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/compat-data@7.29.0': {} + + '@babel/core@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.1 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helpers': 7.28.6 + '@babel/parser': 7.29.0 + '@babel/template': 7.28.6 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + '@jridgewell/remapping': 2.3.5 + convert-source-map: 2.0.0 + debug: 4.4.3 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/generator@7.29.1': + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + jsesc: 3.1.0 + + '@babel/helper-compilation-targets@7.28.6': + dependencies: + '@babel/compat-data': 7.29.0 + '@babel/helper-validator-option': 7.27.1 + browserslist: 4.28.1 + lru-cache: 5.1.1 + semver: 6.3.1 + + '@babel/helper-globals@7.28.0': {} + + '@babel/helper-module-imports@7.28.6': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-module-transforms@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-imports': 7.28.6 + '@babel/helper-validator-identifier': 7.28.5 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-plugin-utils@7.28.6': {} + + '@babel/helper-string-parser@7.27.1': {} + + '@babel/helper-validator-identifier@7.28.5': {} + + '@babel/helper-validator-option@7.27.1': {} + + '@babel/helpers@7.28.6': + dependencies: + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + + '@babel/parser@7.29.0': + dependencies: + '@babel/types': 7.29.0 + + '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-import-attributes@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-jsx@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-typescript@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/template@7.28.6': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + + '@babel/traverse@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.1 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.29.0 + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + '@babel/types@7.29.0': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.28.5 + + '@bcoe/v8-coverage@0.2.3': {} + + '@discoveryjs/json-ext@0.5.7': {} + + '@emnapi/core@1.8.1': + dependencies: + '@emnapi/wasi-threads': 1.1.0 + tslib: 2.8.1 + + '@emnapi/runtime@1.8.1': + dependencies: + tslib: 2.8.1 + + '@emnapi/wasi-threads@1.1.0': + dependencies: + tslib: 2.8.1 + + '@esbuild/aix-ppc64@0.20.2': + optional: true + + '@esbuild/aix-ppc64@0.27.3': + optional: true + + '@esbuild/android-arm64@0.20.2': + optional: true + + '@esbuild/android-arm64@0.27.3': + optional: true + + '@esbuild/android-arm@0.20.2': + optional: true + + '@esbuild/android-arm@0.27.3': + optional: true + + '@esbuild/android-x64@0.20.2': + optional: true + + '@esbuild/android-x64@0.27.3': + optional: true + + '@esbuild/darwin-arm64@0.20.2': + optional: true + + '@esbuild/darwin-arm64@0.27.3': + optional: true + + '@esbuild/darwin-x64@0.20.2': + optional: true + + '@esbuild/darwin-x64@0.27.3': + optional: true + + '@esbuild/freebsd-arm64@0.20.2': + optional: true + + '@esbuild/freebsd-arm64@0.27.3': + optional: true + + '@esbuild/freebsd-x64@0.20.2': + optional: true + + '@esbuild/freebsd-x64@0.27.3': + optional: true + + '@esbuild/linux-arm64@0.20.2': + optional: true + + '@esbuild/linux-arm64@0.27.3': + optional: true + + '@esbuild/linux-arm@0.20.2': + optional: true + + '@esbuild/linux-arm@0.27.3': + optional: true + + '@esbuild/linux-ia32@0.20.2': + optional: true + + '@esbuild/linux-ia32@0.27.3': + optional: true + + '@esbuild/linux-loong64@0.20.2': + optional: true + + '@esbuild/linux-loong64@0.27.3': + optional: true + + '@esbuild/linux-mips64el@0.20.2': + optional: true + + '@esbuild/linux-mips64el@0.27.3': + optional: true + + '@esbuild/linux-ppc64@0.20.2': + optional: true + + '@esbuild/linux-ppc64@0.27.3': + optional: true + + '@esbuild/linux-riscv64@0.20.2': + optional: true + + '@esbuild/linux-riscv64@0.27.3': + optional: true + + '@esbuild/linux-s390x@0.20.2': + optional: true + + '@esbuild/linux-s390x@0.27.3': + optional: true + + '@esbuild/linux-x64@0.20.2': + optional: true + + '@esbuild/linux-x64@0.27.3': + optional: true + + '@esbuild/netbsd-arm64@0.27.3': + optional: true + + '@esbuild/netbsd-x64@0.20.2': + optional: true + + '@esbuild/netbsd-x64@0.27.3': + optional: true + + '@esbuild/openbsd-arm64@0.27.3': + optional: true + + '@esbuild/openbsd-x64@0.20.2': + optional: true + + '@esbuild/openbsd-x64@0.27.3': + optional: true + + '@esbuild/openharmony-arm64@0.27.3': + optional: true + + '@esbuild/sunos-x64@0.20.2': + optional: true + + '@esbuild/sunos-x64@0.27.3': + optional: true + + '@esbuild/win32-arm64@0.20.2': + optional: true + + '@esbuild/win32-arm64@0.27.3': + optional: true + + '@esbuild/win32-ia32@0.20.2': + optional: true + + '@esbuild/win32-ia32@0.27.3': + optional: true + + '@esbuild/win32-x64@0.20.2': + optional: true + + '@esbuild/win32-x64@0.27.3': + optional: true + + '@hutson/parse-repository-url@3.0.2': {} + + '@inquirer/external-editor@1.0.3(@types/node@22.19.15)': + dependencies: + chardet: 2.1.1 + iconv-lite: 0.7.2 + optionalDependencies: + '@types/node': 22.19.15 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.2.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@isaacs/string-locale-compare@1.1.0': {} + + '@istanbuljs/load-nyc-config@1.1.0': + dependencies: + camelcase: 5.3.1 + find-up: 4.1.0 + get-package-type: 0.1.0 + js-yaml: 3.14.2 + resolve-from: 5.0.0 + + '@istanbuljs/schema@0.1.3': {} + + '@jest/console@30.2.0': + dependencies: + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + chalk: 4.1.2 + jest-message-util: 30.2.0 + jest-util: 30.2.0 + slash: 3.0.0 + + '@jest/core@30.2.0': + dependencies: + '@jest/console': 30.2.0 + '@jest/pattern': 30.0.1 + '@jest/reporters': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + ci-info: 4.4.0 + exit-x: 0.2.2 + graceful-fs: 4.2.11 + jest-changed-files: 30.2.0 + jest-config: 30.2.0(@types/node@22.19.15) + jest-haste-map: 30.2.0 + jest-message-util: 30.2.0 + jest-regex-util: 30.0.1 + jest-resolve: 30.2.0 + jest-resolve-dependencies: 30.2.0 + jest-runner: 30.2.0 + jest-runtime: 30.2.0 + jest-snapshot: 30.2.0 + jest-util: 30.2.0 + jest-validate: 30.2.0 + jest-watcher: 30.2.0 + micromatch: 4.0.8 + pretty-format: 30.2.0 + slash: 3.0.0 + transitivePeerDependencies: + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + + '@jest/diff-sequences@30.0.1': {} + + '@jest/environment@30.2.0': + dependencies: + '@jest/fake-timers': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + jest-mock: 30.2.0 + + '@jest/expect-utils@30.2.0': + dependencies: + '@jest/get-type': 30.1.0 + + '@jest/expect@30.2.0': + dependencies: + expect: 30.2.0 + jest-snapshot: 30.2.0 + transitivePeerDependencies: + - supports-color + + '@jest/fake-timers@30.2.0': + dependencies: + '@jest/types': 30.2.0 + '@sinonjs/fake-timers': 13.0.5 + '@types/node': 22.19.15 + jest-message-util: 30.2.0 + jest-mock: 30.2.0 + jest-util: 30.2.0 + + '@jest/get-type@30.1.0': {} + + '@jest/globals@30.2.0': + dependencies: + '@jest/environment': 30.2.0 + '@jest/expect': 30.2.0 + '@jest/types': 30.2.0 + jest-mock: 30.2.0 + transitivePeerDependencies: + - supports-color + + '@jest/pattern@30.0.1': + dependencies: + '@types/node': 22.19.15 + jest-regex-util: 30.0.1 + + '@jest/reporters@30.2.0': + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@jest/console': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@jridgewell/trace-mapping': 0.3.31 + '@types/node': 22.19.15 + chalk: 4.1.2 + collect-v8-coverage: 1.0.3 + exit-x: 0.2.2 + glob: 10.4.5 + graceful-fs: 4.2.11 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-instrument: 6.0.3 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 5.0.6 + istanbul-reports: 3.2.0 + jest-message-util: 30.2.0 + jest-util: 30.2.0 + jest-worker: 30.2.0 + slash: 3.0.0 + string-length: 4.0.2 + v8-to-istanbul: 9.3.0 + transitivePeerDependencies: + - supports-color + + '@jest/schemas@29.6.3': + dependencies: + '@sinclair/typebox': 0.27.10 + + '@jest/schemas@30.0.5': + dependencies: + '@sinclair/typebox': 0.34.48 + + '@jest/snapshot-utils@30.2.0': + dependencies: + '@jest/types': 30.2.0 + chalk: 4.1.2 + graceful-fs: 4.2.11 + natural-compare: 1.4.0 + + '@jest/source-map@30.0.1': + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + callsites: 3.1.0 + graceful-fs: 4.2.11 + + '@jest/test-result@30.2.0': + dependencies: + '@jest/console': 30.2.0 + '@jest/types': 30.2.0 + '@types/istanbul-lib-coverage': 2.0.6 + collect-v8-coverage: 1.0.3 + + '@jest/test-sequencer@30.2.0': + dependencies: + '@jest/test-result': 30.2.0 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + slash: 3.0.0 + + '@jest/transform@30.2.0': + dependencies: + '@babel/core': 7.29.0 + '@jest/types': 30.2.0 + '@jridgewell/trace-mapping': 0.3.31 + babel-plugin-istanbul: 7.0.1 + chalk: 4.1.2 + convert-source-map: 2.0.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + jest-regex-util: 30.0.1 + jest-util: 30.2.0 + micromatch: 4.0.8 + pirates: 4.0.7 + slash: 3.0.0 + write-file-atomic: 5.0.1 + transitivePeerDependencies: + - supports-color + + '@jest/types@30.2.0': + dependencies: + '@jest/pattern': 30.0.1 + '@jest/schemas': 30.0.5 + '@types/istanbul-lib-coverage': 2.0.6 + '@types/istanbul-reports': 3.0.4 + '@types/node': 22.19.15 + '@types/yargs': 17.0.35 + chalk: 4.1.2 + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/remapping@2.3.5': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/source-map@0.3.11': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@lerna/create@8.2.4(@types/node@22.19.15)(encoding@0.1.13)(typescript@5.5.4)': + dependencies: + '@npmcli/arborist': 7.5.4 + '@npmcli/package-json': 5.2.0 + '@npmcli/run-script': 8.1.0 + '@nx/devkit': 20.8.4(nx@20.8.4) + '@octokit/plugin-enterprise-rest': 6.0.1 + '@octokit/rest': 20.1.2 + aproba: 2.0.0 + byte-size: 8.1.1 + chalk: 4.1.0 + clone-deep: 4.0.1 + cmd-shim: 6.0.3 + color-support: 1.1.3 + columnify: 1.6.0 + console-control-strings: 1.1.0 + conventional-changelog-core: 5.0.1 + conventional-recommended-bump: 7.0.1 + cosmiconfig: 9.0.0(typescript@5.5.4) + dedent: 1.5.3 + execa: 5.0.0 + fs-extra: 11.3.4 + get-stream: 6.0.0 + git-url-parse: 14.0.0 + glob-parent: 6.0.2 + graceful-fs: 4.2.11 + has-unicode: 2.0.1 + ini: 1.3.8 + init-package-json: 6.0.3 + inquirer: 8.2.7(@types/node@22.19.15) + is-ci: 3.0.1 + is-stream: 2.0.0 + js-yaml: 4.1.0 + libnpmpublish: 9.0.9 + load-json-file: 6.2.0 + make-dir: 4.0.0 + minimatch: 3.0.5 + multimatch: 5.0.0 + node-fetch: 2.6.7(encoding@0.1.13) + npm-package-arg: 11.0.2 + npm-packlist: 8.0.2 + npm-registry-fetch: 17.1.0 + nx: 20.8.4 + p-map: 4.0.0 + p-map-series: 2.1.0 + p-queue: 6.6.2 + p-reduce: 2.1.0 + pacote: 18.0.6 + pify: 5.0.0 + read-cmd-shim: 4.0.0 + resolve-from: 5.0.0 + rimraf: 4.4.1 + semver: 7.7.4 + set-blocking: 2.0.0 + signal-exit: 3.0.7 + slash: 3.0.0 + ssri: 10.0.6 + string-width: 4.2.3 + tar: 6.2.1 + temp-dir: 1.0.0 + through: 2.3.8 + tinyglobby: 0.2.12 + upath: 2.0.1 + uuid: 10.0.0 + validate-npm-package-license: 3.0.4 + validate-npm-package-name: 5.0.1 + wide-align: 1.1.5 + write-file-atomic: 5.0.1 + write-pkg: 4.0.0 + yargs: 17.7.2 + yargs-parser: 21.1.1 + transitivePeerDependencies: + - '@swc-node/register' + - '@swc/core' + - '@types/node' + - babel-plugin-macros + - bluebird + - debug + - encoding + - supports-color + - typescript + + '@napi-rs/wasm-runtime@0.2.12': + dependencies: + '@emnapi/core': 1.8.1 + '@emnapi/runtime': 1.8.1 + '@tybys/wasm-util': 0.10.1 + optional: true + + '@napi-rs/wasm-runtime@0.2.4': + dependencies: + '@emnapi/core': 1.8.1 + '@emnapi/runtime': 1.8.1 + '@tybys/wasm-util': 0.9.0 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.20.1 + + '@npmcli/agent@2.2.2': + dependencies: + agent-base: 7.1.4 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 + lru-cache: 10.4.3 + socks-proxy-agent: 8.0.5 + transitivePeerDependencies: + - supports-color + + '@npmcli/arborist@7.5.4': + dependencies: + '@isaacs/string-locale-compare': 1.1.0 + '@npmcli/fs': 3.1.1 + '@npmcli/installed-package-contents': 2.1.0 + '@npmcli/map-workspaces': 3.0.6 + '@npmcli/metavuln-calculator': 7.1.1 + '@npmcli/name-from-folder': 2.0.0 + '@npmcli/node-gyp': 3.0.0 + '@npmcli/package-json': 5.2.0 + '@npmcli/query': 3.1.0 + '@npmcli/redact': 2.0.1 + '@npmcli/run-script': 8.1.0 + bin-links: 4.0.4 + cacache: 18.0.4 + common-ancestor-path: 1.0.1 + hosted-git-info: 7.0.2 + json-parse-even-better-errors: 3.0.2 + json-stringify-nice: 1.1.4 + lru-cache: 10.4.3 + minimatch: 9.0.9 + nopt: 7.2.1 + npm-install-checks: 6.3.0 + npm-package-arg: 11.0.2 + npm-pick-manifest: 9.1.0 + npm-registry-fetch: 17.1.0 + pacote: 18.0.6 + parse-conflict-json: 3.0.1 + proc-log: 4.2.0 + proggy: 2.0.0 + promise-all-reject-late: 1.0.1 + promise-call-limit: 3.0.2 + read-package-json-fast: 3.0.2 + semver: 7.7.4 + ssri: 10.0.6 + treeverse: 3.0.0 + walk-up-path: 3.0.1 + transitivePeerDependencies: + - bluebird + - supports-color + + '@npmcli/fs@3.1.1': + dependencies: + semver: 7.7.4 + + '@npmcli/git@5.0.8': + dependencies: + '@npmcli/promise-spawn': 7.0.2 + ini: 4.1.3 + lru-cache: 10.4.3 + npm-pick-manifest: 9.1.0 + proc-log: 4.2.0 + promise-inflight: 1.0.1 + promise-retry: 2.0.1 + semver: 7.7.4 + which: 4.0.0 + transitivePeerDependencies: + - bluebird + + '@npmcli/installed-package-contents@2.1.0': + dependencies: + npm-bundled: 3.0.1 + npm-normalize-package-bin: 3.0.1 + + '@npmcli/map-workspaces@3.0.6': + dependencies: + '@npmcli/name-from-folder': 2.0.0 + glob: 10.4.5 + minimatch: 9.0.9 + read-package-json-fast: 3.0.2 + + '@npmcli/metavuln-calculator@7.1.1': + dependencies: + cacache: 18.0.4 + json-parse-even-better-errors: 3.0.2 + pacote: 18.0.6 + proc-log: 4.2.0 + semver: 7.7.4 + transitivePeerDependencies: + - bluebird + - supports-color + + '@npmcli/name-from-folder@2.0.0': {} + + '@npmcli/node-gyp@3.0.0': {} + + '@npmcli/package-json@5.2.0': + dependencies: + '@npmcli/git': 5.0.8 + glob: 10.4.5 + hosted-git-info: 7.0.2 + json-parse-even-better-errors: 3.0.2 + normalize-package-data: 6.0.2 + proc-log: 4.2.0 + semver: 7.7.4 + transitivePeerDependencies: + - bluebird + + '@npmcli/promise-spawn@7.0.2': + dependencies: + which: 4.0.0 + + '@npmcli/query@3.1.0': + dependencies: + postcss-selector-parser: 6.1.2 + + '@npmcli/redact@2.0.1': {} + + '@npmcli/run-script@8.1.0': + dependencies: + '@npmcli/node-gyp': 3.0.0 + '@npmcli/package-json': 5.2.0 + '@npmcli/promise-spawn': 7.0.2 + node-gyp: 10.3.1 + proc-log: 4.2.0 + which: 4.0.0 + transitivePeerDependencies: + - bluebird + - supports-color + + '@nx/devkit@20.8.4(nx@20.8.4)': + dependencies: + ejs: 3.1.10 + enquirer: 2.3.6 + ignore: 5.3.2 + minimatch: 9.0.3 + nx: 20.8.4 + semver: 7.7.4 + tmp: 0.2.5 + tslib: 2.8.1 + yargs-parser: 21.1.1 + + '@nx/nx-darwin-arm64@20.8.4': + optional: true + + '@nx/nx-darwin-x64@20.8.4': + optional: true + + '@nx/nx-freebsd-x64@20.8.4': + optional: true + + '@nx/nx-linux-arm-gnueabihf@20.8.4': + optional: true + + '@nx/nx-linux-arm64-gnu@20.8.4': + optional: true + + '@nx/nx-linux-arm64-musl@20.8.4': + optional: true + + '@nx/nx-linux-x64-gnu@20.8.4': + optional: true + + '@nx/nx-linux-x64-musl@20.8.4': + optional: true + + '@nx/nx-win32-arm64-msvc@20.8.4': + optional: true + + '@nx/nx-win32-x64-msvc@20.8.4': + optional: true + + '@octokit/auth-token@4.0.0': {} + + '@octokit/core@5.2.2': + dependencies: + '@octokit/auth-token': 4.0.0 + '@octokit/graphql': 7.1.1 + '@octokit/request': 8.4.1 + '@octokit/request-error': 5.1.1 + '@octokit/types': 13.10.0 + before-after-hook: 2.2.3 + universal-user-agent: 6.0.1 + + '@octokit/endpoint@9.0.6': + dependencies: + '@octokit/types': 13.10.0 + universal-user-agent: 6.0.1 + + '@octokit/graphql@7.1.1': + dependencies: + '@octokit/request': 8.4.1 + '@octokit/types': 13.10.0 + universal-user-agent: 6.0.1 + + '@octokit/openapi-types@24.2.0': {} + + '@octokit/plugin-enterprise-rest@6.0.1': {} + + '@octokit/plugin-paginate-rest@11.4.4-cjs.2(@octokit/core@5.2.2)': + dependencies: + '@octokit/core': 5.2.2 + '@octokit/types': 13.10.0 + + '@octokit/plugin-request-log@4.0.1(@octokit/core@5.2.2)': + dependencies: + '@octokit/core': 5.2.2 + + '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1(@octokit/core@5.2.2)': + dependencies: + '@octokit/core': 5.2.2 + '@octokit/types': 13.10.0 + + '@octokit/request-error@5.1.1': + dependencies: + '@octokit/types': 13.10.0 + deprecation: 2.3.1 + once: 1.4.0 + + '@octokit/request@8.4.1': + dependencies: + '@octokit/endpoint': 9.0.6 + '@octokit/request-error': 5.1.1 + '@octokit/types': 13.10.0 + universal-user-agent: 6.0.1 + + '@octokit/rest@20.1.2': + dependencies: + '@octokit/core': 5.2.2 + '@octokit/plugin-paginate-rest': 11.4.4-cjs.2(@octokit/core@5.2.2) + '@octokit/plugin-request-log': 4.0.1(@octokit/core@5.2.2) + '@octokit/plugin-rest-endpoint-methods': 13.3.2-cjs.1(@octokit/core@5.2.2) + + '@octokit/types@13.10.0': + dependencies: + '@octokit/openapi-types': 24.2.0 + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@pkgr/core@0.2.9': {} + + '@sigstore/bundle@2.3.2': + dependencies: + '@sigstore/protobuf-specs': 0.3.3 + + '@sigstore/core@1.1.0': {} + + '@sigstore/protobuf-specs@0.3.3': {} + + '@sigstore/sign@2.3.2': + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.3 + make-fetch-happen: 13.0.1 + proc-log: 4.2.0 + promise-retry: 2.0.1 + transitivePeerDependencies: + - supports-color + + '@sigstore/tuf@2.3.4': + dependencies: + '@sigstore/protobuf-specs': 0.3.3 + tuf-js: 2.2.1 + transitivePeerDependencies: + - supports-color + + '@sigstore/verify@1.2.1': + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.3 + + '@sinclair/typebox@0.27.10': {} + + '@sinclair/typebox@0.34.48': {} + + '@sindresorhus/merge-streams@2.3.0': {} + + '@sinonjs/commons@3.0.1': + dependencies: + type-detect: 4.0.8 + + '@sinonjs/fake-timers@13.0.5': + dependencies: + '@sinonjs/commons': 3.0.1 + + '@tufjs/canonical-json@2.0.0': {} + + '@tufjs/models@2.0.1': + dependencies: + '@tufjs/canonical-json': 2.0.0 + minimatch: 9.0.9 + + '@tybys/wasm-util@0.10.1': + dependencies: + tslib: 2.8.1 + optional: true + + '@tybys/wasm-util@0.9.0': + dependencies: + tslib: 2.8.1 + + '@types/babel__core@7.20.5': + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + '@types/babel__generator': 7.27.0 + '@types/babel__template': 7.4.4 + '@types/babel__traverse': 7.28.0 + + '@types/babel__generator@7.27.0': + dependencies: + '@babel/types': 7.29.0 + + '@types/babel__template@7.4.4': + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + + '@types/babel__traverse@7.28.0': + dependencies: + '@babel/types': 7.29.0 + + '@types/command-line-args@5.2.3': {} + + '@types/emscripten@1.41.5': {} + + '@types/eslint-scope@3.7.7': + dependencies: + '@types/eslint': 9.6.1 + '@types/estree': 1.0.8 + + '@types/eslint@9.6.1': + dependencies: + '@types/estree': 1.0.8 + '@types/json-schema': 7.0.15 + + '@types/estree@1.0.8': {} + + '@types/fs-extra@11.0.4': + dependencies: + '@types/jsonfile': 6.1.4 + '@types/node': 22.19.15 + + '@types/istanbul-lib-coverage@2.0.6': {} + + '@types/istanbul-lib-report@3.0.3': + dependencies: + '@types/istanbul-lib-coverage': 2.0.6 + + '@types/istanbul-reports@3.0.4': + dependencies: + '@types/istanbul-lib-report': 3.0.3 + + '@types/jest@30.0.0': + dependencies: + expect: 30.2.0 + pretty-format: 30.2.0 + + '@types/json-schema@7.0.15': {} + + '@types/jsonfile@6.1.4': + dependencies: + '@types/node': 22.19.15 + + '@types/lodash@4.17.24': {} + + '@types/minimatch@3.0.5': {} + + '@types/minimist@1.2.5': {} + + '@types/node@22.19.15': + dependencies: + undici-types: 6.21.0 + + '@types/normalize-package-data@2.4.4': {} + + '@types/stack-utils@2.0.3': {} + + '@types/tmp@0.2.6': {} + + '@types/yargs-parser@21.0.3': {} + + '@types/yargs@17.0.35': + dependencies: + '@types/yargs-parser': 21.0.3 + + '@ungap/structured-clone@1.3.0': {} + + '@unrs/resolver-binding-android-arm-eabi@1.11.1': + optional: true + + '@unrs/resolver-binding-android-arm64@1.11.1': + optional: true + + '@unrs/resolver-binding-darwin-arm64@1.11.1': + optional: true + + '@unrs/resolver-binding-darwin-x64@1.11.1': + optional: true + + '@unrs/resolver-binding-freebsd-x64@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm64-musl@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-x64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-x64-musl@1.11.1': + optional: true + + '@unrs/resolver-binding-wasm32-wasi@1.11.1': + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + optional: true + + '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': + optional: true + + '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': + optional: true + + '@unrs/resolver-binding-win32-x64-msvc@1.11.1': + optional: true + + '@webassemblyjs/ast@1.14.1': + dependencies: + '@webassemblyjs/helper-numbers': 1.13.2 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + + '@webassemblyjs/floating-point-hex-parser@1.13.2': {} + + '@webassemblyjs/helper-api-error@1.13.2': {} + + '@webassemblyjs/helper-buffer@1.14.1': {} + + '@webassemblyjs/helper-numbers@1.13.2': + dependencies: + '@webassemblyjs/floating-point-hex-parser': 1.13.2 + '@webassemblyjs/helper-api-error': 1.13.2 + '@xtuc/long': 4.2.2 + + '@webassemblyjs/helper-wasm-bytecode@1.13.2': {} + + '@webassemblyjs/helper-wasm-section@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/wasm-gen': 1.14.1 + + '@webassemblyjs/ieee754@1.13.2': + dependencies: + '@xtuc/ieee754': 1.2.0 + + '@webassemblyjs/leb128@1.13.2': + dependencies: + '@xtuc/long': 4.2.2 + + '@webassemblyjs/utf8@1.13.2': {} + + '@webassemblyjs/wasm-edit@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/helper-wasm-section': 1.14.1 + '@webassemblyjs/wasm-gen': 1.14.1 + '@webassemblyjs/wasm-opt': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + '@webassemblyjs/wast-printer': 1.14.1 + + '@webassemblyjs/wasm-gen@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/ieee754': 1.13.2 + '@webassemblyjs/leb128': 1.13.2 + '@webassemblyjs/utf8': 1.13.2 + + '@webassemblyjs/wasm-opt@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/wasm-gen': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + + '@webassemblyjs/wasm-parser@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-api-error': 1.13.2 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/ieee754': 1.13.2 + '@webassemblyjs/leb128': 1.13.2 + '@webassemblyjs/utf8': 1.13.2 + + '@webassemblyjs/wast-printer@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@xtuc/long': 4.2.2 + + '@webpack-cli/configtest@2.1.1(webpack-cli@5.1.4)(webpack@5.105.4)': + dependencies: + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + webpack-cli: 5.1.4(webpack@5.105.4) + + '@webpack-cli/info@2.0.2(webpack-cli@5.1.4)(webpack@5.105.4)': + dependencies: + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + webpack-cli: 5.1.4(webpack@5.105.4) + + '@webpack-cli/serve@2.0.5(webpack-cli@5.1.4)(webpack@5.105.4)': + dependencies: + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + webpack-cli: 5.1.4(webpack@5.105.4) + + '@xtuc/ieee754@1.2.0': {} + + '@xtuc/long@4.2.2': {} + + '@yarnpkg/fslib@2.10.4': + dependencies: + '@yarnpkg/libzip': 2.3.0 + tslib: 1.14.1 + + '@yarnpkg/libzip@2.3.0': + dependencies: + '@types/emscripten': 1.41.5 + tslib: 1.14.1 + + '@yarnpkg/lockfile@1.1.0': {} + + '@yarnpkg/parsers@3.0.2': + dependencies: + js-yaml: 3.14.2 + tslib: 2.8.1 + + '@zkochan/js-yaml@0.0.7': + dependencies: + argparse: 2.0.1 + + JSONStream@1.3.5: + dependencies: + jsonparse: 1.3.1 + through: 2.3.8 + + abbrev@2.0.0: {} + + acorn-import-phases@1.0.4(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + + acorn@8.16.0: {} + + add-stream@1.0.0: {} + + agent-base@7.1.4: {} + + aggregate-error@3.1.0: + dependencies: + clean-stack: 2.2.0 + indent-string: 4.0.0 + + ajv-formats@2.1.1(ajv@8.18.0): + optionalDependencies: + ajv: 8.18.0 + + ajv-keywords@5.1.0(ajv@8.18.0): + dependencies: + ajv: 8.18.0 + fast-deep-equal: 3.1.3 + + ajv@8.18.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.0 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + + ansi-colors@4.1.3: {} + + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + + ansi-regex@5.0.1: {} + + ansi-regex@6.2.2: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@5.2.0: {} + + ansi-styles@6.2.3: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + aproba@2.0.0: {} + + argparse@1.0.10: + dependencies: + sprintf-js: 1.0.3 + + argparse@2.0.1: {} + + array-back@3.1.0: {} + + array-differ@3.0.0: {} + + array-ify@1.0.0: {} + + array-union@2.1.0: {} + + arrify@1.0.1: {} + + arrify@2.0.1: {} + + async@3.2.6: {} + + asynckit@0.4.0: {} + + axios@1.13.6: + dependencies: + follow-redirects: 1.15.11 + form-data: 4.0.5 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + + babel-jest@30.2.0(@babel/core@7.29.0): + dependencies: + '@babel/core': 7.29.0 + '@jest/transform': 30.2.0 + '@types/babel__core': 7.20.5 + babel-plugin-istanbul: 7.0.1 + babel-preset-jest: 30.2.0(@babel/core@7.29.0) + chalk: 4.1.2 + graceful-fs: 4.2.11 + slash: 3.0.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-istanbul@7.0.1: + dependencies: + '@babel/helper-plugin-utils': 7.28.6 + '@istanbuljs/load-nyc-config': 1.1.0 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-instrument: 6.0.3 + test-exclude: 6.0.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-jest-hoist@30.2.0: + dependencies: + '@types/babel__core': 7.20.5 + + babel-preset-current-node-syntax@1.2.0(@babel/core@7.29.0): + dependencies: + '@babel/core': 7.29.0 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.29.0) + '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.29.0) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.29.0) + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.29.0) + '@babel/plugin-syntax-import-attributes': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.29.0) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.29.0) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.29.0) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.29.0) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.29.0) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.29.0) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.29.0) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.29.0) + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.29.0) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.29.0) + + babel-preset-jest@30.2.0(@babel/core@7.29.0): + dependencies: + '@babel/core': 7.29.0 + babel-plugin-jest-hoist: 30.2.0 + babel-preset-current-node-syntax: 1.2.0(@babel/core@7.29.0) + + balanced-match@1.0.2: {} + + base64-js@1.5.1: {} + + baseline-browser-mapping@2.10.0: {} + + before-after-hook@2.2.3: {} + + big.js@5.2.2: {} + + bin-links@4.0.4: + dependencies: + cmd-shim: 6.0.3 + npm-normalize-package-bin: 3.0.1 + read-cmd-shim: 4.0.0 + write-file-atomic: 5.0.1 + + binary-extensions@2.3.0: {} + + bl@4.1.0: + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + browserslist@4.28.1: + dependencies: + baseline-browser-mapping: 2.10.0 + caniuse-lite: 1.0.30001777 + electron-to-chromium: 1.5.307 + node-releases: 2.0.36 + update-browserslist-db: 1.2.3(browserslist@4.28.1) + + bs-logger@0.2.6: + dependencies: + fast-json-stable-stringify: 2.1.0 + + bser@2.1.1: + dependencies: + node-int64: 0.4.0 + + buffer-from@1.1.2: {} + + buffer@5.7.1: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + byte-size@8.1.1: {} + + cacache@18.0.4: + dependencies: + '@npmcli/fs': 3.1.1 + fs-minipass: 3.0.3 + glob: 10.4.5 + lru-cache: 10.4.3 + minipass: 7.1.3 + minipass-collect: 2.0.1 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + p-map: 4.0.0 + ssri: 10.0.6 + tar: 6.2.1 + unique-filename: 3.0.0 + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + callsites@3.1.0: {} + + camelcase-keys@6.2.2: + dependencies: + camelcase: 5.3.1 + map-obj: 4.3.0 + quick-lru: 4.0.1 + + camelcase@5.3.1: {} + + camelcase@6.3.0: {} + + caniuse-lite@1.0.30001777: {} + + chalk@4.1.0: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + char-regex@1.0.2: {} + + chardet@2.1.1: {} + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.3 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + chownr@2.0.0: {} + + chrome-trace-event@1.0.4: {} + + ci-info@3.9.0: {} + + ci-info@4.4.0: {} + + cjs-module-lexer@2.2.0: {} + + clean-stack@2.2.0: {} + + cli-cursor@3.1.0: + dependencies: + restore-cursor: 3.1.0 + + cli-spinners@2.6.1: {} + + cli-spinners@2.9.2: {} + + cli-width@3.0.0: {} + + cliui@7.0.4: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + clone-deep@4.0.1: + dependencies: + is-plain-object: 2.0.4 + kind-of: 6.0.3 + shallow-clone: 3.0.1 + + clone@1.0.4: {} + + cmd-shim@6.0.3: {} + + co@4.6.0: {} + + collect-v8-coverage@1.0.3: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + color-support@1.1.3: {} + + colorette@2.0.20: {} + + columnify@1.6.0: + dependencies: + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + command-line-args@5.2.1: + dependencies: + array-back: 3.1.0 + find-replace: 3.0.0 + lodash.camelcase: 4.3.0 + typical: 4.0.0 + + commander@10.0.1: {} + + commander@2.20.3: {} + + common-ancestor-path@1.0.1: {} + + compare-func@2.0.0: + dependencies: + array-ify: 1.0.0 + dot-prop: 5.3.0 + + concat-map@0.0.1: {} + + concat-stream@2.0.0: + dependencies: + buffer-from: 1.1.2 + inherits: 2.0.4 + readable-stream: 3.6.2 + typedarray: 0.0.6 + + console-control-strings@1.1.0: {} + + conventional-changelog-angular@7.0.0: + dependencies: + compare-func: 2.0.0 + + conventional-changelog-core@5.0.1: + dependencies: + add-stream: 1.0.0 + conventional-changelog-writer: 6.0.1 + conventional-commits-parser: 4.0.0 + dateformat: 3.0.3 + get-pkg-repo: 4.2.1 + git-raw-commits: 3.0.0 + git-remote-origin-url: 2.0.0 + git-semver-tags: 5.0.1 + normalize-package-data: 3.0.3 + read-pkg: 3.0.0 + read-pkg-up: 3.0.0 + + conventional-changelog-preset-loader@3.0.0: {} + + conventional-changelog-writer@6.0.1: + dependencies: + conventional-commits-filter: 3.0.0 + dateformat: 3.0.3 + handlebars: 4.7.8 + json-stringify-safe: 5.0.1 + meow: 8.1.2 + semver: 7.7.4 + split: 1.0.1 + + conventional-commits-filter@3.0.0: + dependencies: + lodash.ismatch: 4.4.0 + modify-values: 1.0.1 + + conventional-commits-parser@4.0.0: + dependencies: + JSONStream: 1.3.5 + is-text-path: 1.0.1 + meow: 8.1.2 + split2: 3.2.2 + + conventional-recommended-bump@7.0.1: + dependencies: + concat-stream: 2.0.0 + conventional-changelog-preset-loader: 3.0.0 + conventional-commits-filter: 3.0.0 + conventional-commits-parser: 4.0.0 + git-raw-commits: 3.0.0 + git-semver-tags: 5.0.1 + meow: 8.1.2 + + convert-source-map@2.0.0: {} + + copy-webpack-plugin@12.0.2(webpack@5.105.4): + dependencies: + fast-glob: 3.3.3 + glob-parent: 6.0.2 + globby: 14.1.0 + normalize-path: 3.0.0 + schema-utils: 4.3.3 + serialize-javascript: 6.0.2 + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + + core-util-is@1.0.3: {} + + cosmiconfig@9.0.0(typescript@5.5.4): + dependencies: + env-paths: 2.2.1 + import-fresh: 3.3.1 + js-yaml: 4.1.0 + parse-json: 5.2.0 + optionalDependencies: + typescript: 5.5.4 + + cross-spawn@6.0.6: + dependencies: + nice-try: 1.0.5 + path-key: 2.0.1 + semver: 5.7.2 + shebang-command: 1.2.0 + which: 1.3.1 + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + cssesc@3.0.0: {} + + dargs@7.0.0: {} + + dateformat@3.0.3: {} + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + decamelize-keys@1.1.1: + dependencies: + decamelize: 1.2.0 + map-obj: 1.0.1 + + decamelize@1.2.0: {} + + dedent@1.5.3: {} + + dedent@1.7.2: {} + + deepmerge@4.3.1: {} + + defaults@1.0.4: + dependencies: + clone: 1.0.4 + + define-lazy-prop@2.0.0: {} + + delayed-stream@1.0.0: {} + + deprecation@2.3.1: {} + + detect-indent@5.0.0: {} + + detect-newline@3.1.0: {} + + diff-sequences@29.6.3: {} + + dot-prop@5.3.0: + dependencies: + is-obj: 2.0.0 + + dotenv-expand@11.0.7: + dependencies: + dotenv: 16.4.7 + + dotenv@16.4.7: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + eastasianwidth@0.2.0: {} + + ejs@3.1.10: + dependencies: + jake: 10.9.4 + + electron-to-chromium@1.5.307: {} + + emittery@0.13.1: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + emojis-list@3.0.0: {} + + encoding@0.1.13: + dependencies: + iconv-lite: 0.6.3 + optional: true + + end-of-stream@1.4.5: + dependencies: + once: 1.4.0 + + enhanced-resolve@5.20.0: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.3.0 + + enquirer@2.3.6: + dependencies: + ansi-colors: 4.1.3 + + env-paths@2.2.1: {} + + envinfo@7.13.0: {} + + envinfo@7.21.0: {} + + err-code@2.0.3: {} + + error-ex@1.3.4: + dependencies: + is-arrayish: 0.2.1 + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-module-lexer@2.0.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + esbuild-loader@4.4.2(webpack@5.105.4): + dependencies: + esbuild: 0.27.3 + get-tsconfig: 4.13.6 + loader-utils: 2.0.4 + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + webpack-sources: 1.4.3 + + esbuild@0.20.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.20.2 + '@esbuild/android-arm': 0.20.2 + '@esbuild/android-arm64': 0.20.2 + '@esbuild/android-x64': 0.20.2 + '@esbuild/darwin-arm64': 0.20.2 + '@esbuild/darwin-x64': 0.20.2 + '@esbuild/freebsd-arm64': 0.20.2 + '@esbuild/freebsd-x64': 0.20.2 + '@esbuild/linux-arm': 0.20.2 + '@esbuild/linux-arm64': 0.20.2 + '@esbuild/linux-ia32': 0.20.2 + '@esbuild/linux-loong64': 0.20.2 + '@esbuild/linux-mips64el': 0.20.2 + '@esbuild/linux-ppc64': 0.20.2 + '@esbuild/linux-riscv64': 0.20.2 + '@esbuild/linux-s390x': 0.20.2 + '@esbuild/linux-x64': 0.20.2 + '@esbuild/netbsd-x64': 0.20.2 + '@esbuild/openbsd-x64': 0.20.2 + '@esbuild/sunos-x64': 0.20.2 + '@esbuild/win32-arm64': 0.20.2 + '@esbuild/win32-ia32': 0.20.2 + '@esbuild/win32-x64': 0.20.2 + + esbuild@0.27.3: + optionalDependencies: + '@esbuild/aix-ppc64': 0.27.3 + '@esbuild/android-arm': 0.27.3 + '@esbuild/android-arm64': 0.27.3 + '@esbuild/android-x64': 0.27.3 + '@esbuild/darwin-arm64': 0.27.3 + '@esbuild/darwin-x64': 0.27.3 + '@esbuild/freebsd-arm64': 0.27.3 + '@esbuild/freebsd-x64': 0.27.3 + '@esbuild/linux-arm': 0.27.3 + '@esbuild/linux-arm64': 0.27.3 + '@esbuild/linux-ia32': 0.27.3 + '@esbuild/linux-loong64': 0.27.3 + '@esbuild/linux-mips64el': 0.27.3 + '@esbuild/linux-ppc64': 0.27.3 + '@esbuild/linux-riscv64': 0.27.3 + '@esbuild/linux-s390x': 0.27.3 + '@esbuild/linux-x64': 0.27.3 + '@esbuild/netbsd-arm64': 0.27.3 + '@esbuild/netbsd-x64': 0.27.3 + '@esbuild/openbsd-arm64': 0.27.3 + '@esbuild/openbsd-x64': 0.27.3 + '@esbuild/openharmony-arm64': 0.27.3 + '@esbuild/sunos-x64': 0.27.3 + '@esbuild/win32-arm64': 0.27.3 + '@esbuild/win32-ia32': 0.27.3 + '@esbuild/win32-x64': 0.27.3 + + escalade@3.2.0: {} + + escape-string-regexp@1.0.5: {} + + escape-string-regexp@2.0.0: {} + + eslint-scope@5.1.1: + dependencies: + esrecurse: 4.3.0 + estraverse: 4.3.0 + + esprima@4.0.1: {} + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@4.3.0: {} + + estraverse@5.3.0: {} + + eventemitter3@4.0.7: {} + + events@3.3.0: {} + + execa@1.0.0: + dependencies: + cross-spawn: 6.0.6 + get-stream: 4.1.0 + is-stream: 1.1.0 + npm-run-path: 2.0.2 + p-finally: 1.0.0 + signal-exit: 3.0.7 + strip-eof: 1.0.0 + + execa@5.0.0: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.0 + human-signals: 2.1.0 + is-stream: 2.0.0 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + exit-x@0.2.2: {} + + expect@30.2.0: + dependencies: + '@jest/expect-utils': 30.2.0 + '@jest/get-type': 30.1.0 + jest-matcher-utils: 30.2.0 + jest-message-util: 30.2.0 + jest-mock: 30.2.0 + jest-util: 30.2.0 + + exponential-backoff@3.1.3: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-json-stable-stringify@2.1.0: {} + + fast-uri@3.1.0: {} + + fastest-levenshtein@1.0.16: {} + + fastq@1.20.1: + dependencies: + reusify: 1.1.0 + + fb-watchman@2.0.2: + dependencies: + bser: 2.1.1 + + fdir@6.5.0(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + + figures@3.2.0: + dependencies: + escape-string-regexp: 1.0.5 + + filelist@1.0.6: + dependencies: + minimatch: 5.1.9 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-replace@3.0.0: + dependencies: + array-back: 3.1.0 + + find-up@2.1.0: + dependencies: + locate-path: 2.0.0 + + find-up@4.1.0: + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + + flat@5.0.2: {} + + follow-redirects@1.15.11: {} + + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + form-data@4.0.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + front-matter@4.0.2: + dependencies: + js-yaml: 3.14.2 + + fs-constants@1.0.0: {} + + fs-extra@11.3.4: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.2.0 + universalify: 2.0.1 + + fs-minipass@2.1.0: + dependencies: + minipass: 3.3.6 + + fs-minipass@3.0.3: + dependencies: + minipass: 7.1.3 + + fs.realpath@1.0.0: {} + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + gensync@1.0.0-beta.2: {} + + get-caller-file@2.0.5: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-package-type@0.1.0: {} + + get-pkg-repo@4.2.1: + dependencies: + '@hutson/parse-repository-url': 3.0.2 + hosted-git-info: 4.1.0 + through2: 2.0.5 + yargs: 16.2.0 + + get-port@5.1.1: {} + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + get-stream@4.1.0: + dependencies: + pump: 3.0.4 + + get-stream@6.0.0: {} + + get-stream@6.0.1: {} + + get-tsconfig@4.13.6: + dependencies: + resolve-pkg-maps: 1.0.0 + + git-raw-commits@3.0.0: + dependencies: + dargs: 7.0.0 + meow: 8.1.2 + split2: 3.2.2 + + git-remote-origin-url@2.0.0: + dependencies: + gitconfiglocal: 1.0.0 + pify: 2.3.0 + + git-semver-tags@5.0.1: + dependencies: + meow: 8.1.2 + semver: 7.7.4 + + git-up@7.0.0: + dependencies: + is-ssh: 1.4.1 + parse-url: 8.1.0 + + git-url-parse@14.0.0: + dependencies: + git-up: 7.0.0 + + gitconfiglocal@1.0.0: + dependencies: + ini: 1.3.8 + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + glob-to-regexp@0.4.1: {} + + glob@10.4.5: + dependencies: + foreground-child: 3.3.1 + jackspeak: 3.4.3 + minimatch: 9.0.9 + minipass: 7.1.3 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + + glob@7.2.3: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.5 + once: 1.4.0 + path-is-absolute: 1.0.1 + + glob@9.3.4: + dependencies: + fs.realpath: 1.0.0 + minimatch: 8.0.7 + minipass: 4.2.8 + path-scurry: 1.11.1 + + globby@14.1.0: + dependencies: + '@sindresorhus/merge-streams': 2.3.0 + fast-glob: 3.3.3 + ignore: 7.0.5 + path-type: 6.0.0 + slash: 5.1.0 + unicorn-magic: 0.3.0 + + gopd@1.2.0: {} + + graceful-fs@4.2.11: {} + + handlebars@4.7.8: + dependencies: + minimist: 1.2.8 + neo-async: 2.6.2 + source-map: 0.6.1 + wordwrap: 1.0.0 + optionalDependencies: + uglify-js: 3.19.3 + + hard-rejection@2.1.0: {} + + has-flag@4.0.0: {} + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + has-unicode@2.0.1: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + hosted-git-info@2.8.9: {} + + hosted-git-info@4.1.0: + dependencies: + lru-cache: 6.0.0 + + hosted-git-info@7.0.2: + dependencies: + lru-cache: 10.4.3 + + html-escaper@2.0.2: {} + + http-cache-semantics@4.2.0: {} + + http-proxy-agent@7.0.2: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + human-signals@2.1.0: {} + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + optional: true + + iconv-lite@0.7.2: + dependencies: + safer-buffer: 2.1.2 + + ieee754@1.2.1: {} + + ignore-walk@6.0.5: + dependencies: + minimatch: 9.0.9 + + ignore@5.3.2: {} + + ignore@7.0.5: {} + + import-fresh@3.3.1: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + import-local@3.1.0: + dependencies: + pkg-dir: 4.2.0 + resolve-cwd: 3.0.0 + + import-local@3.2.0: + dependencies: + pkg-dir: 4.2.0 + resolve-cwd: 3.0.0 + + imurmurhash@0.1.4: {} + + indent-string@4.0.0: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + ini@1.3.8: {} + + ini@4.1.3: {} + + init-package-json@6.0.3: + dependencies: + '@npmcli/package-json': 5.2.0 + npm-package-arg: 11.0.2 + promzard: 1.0.2 + read: 3.0.1 + semver: 7.7.4 + validate-npm-package-license: 3.0.4 + validate-npm-package-name: 5.0.1 + transitivePeerDependencies: + - bluebird + + inquirer@8.2.7(@types/node@22.19.15): + dependencies: + '@inquirer/external-editor': 1.0.3(@types/node@22.19.15) + ansi-escapes: 4.3.2 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-width: 3.0.0 + figures: 3.2.0 + lodash: 4.17.23 + mute-stream: 0.0.8 + ora: 5.4.1 + run-async: 2.4.1 + rxjs: 7.8.2 + string-width: 4.2.3 + strip-ansi: 6.0.1 + through: 2.3.8 + wrap-ansi: 6.2.0 + transitivePeerDependencies: + - '@types/node' + + interpret@1.4.0: {} + + interpret@3.1.1: {} + + ip-address@10.1.0: {} + + is-arrayish@0.2.1: {} + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-ci@3.0.1: + dependencies: + ci-info: 3.9.0 + + is-core-module@2.16.1: + dependencies: + hasown: 2.0.2 + + is-docker@2.2.1: {} + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-generator-fn@2.1.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-interactive@1.0.0: {} + + is-lambda@1.0.1: {} + + is-number@7.0.0: {} + + is-obj@2.0.0: {} + + is-plain-obj@1.1.0: {} + + is-plain-object@2.0.4: + dependencies: + isobject: 3.0.1 + + is-ssh@1.4.1: + dependencies: + protocols: 2.0.2 + + is-stream@1.1.0: {} + + is-stream@2.0.0: {} + + is-stream@2.0.1: {} + + is-text-path@1.0.1: + dependencies: + text-extensions: 1.9.0 + + is-unicode-supported@0.1.0: {} + + is-wsl@2.2.0: + dependencies: + is-docker: 2.2.1 + + isarray@1.0.0: {} + + isexe@2.0.0: {} + + isexe@3.1.5: {} + + isobject@3.0.1: {} + + istanbul-lib-coverage@3.2.2: {} + + istanbul-lib-instrument@6.0.3: + dependencies: + '@babel/core': 7.29.0 + '@babel/parser': 7.29.0 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.2 + semver: 7.7.4 + transitivePeerDependencies: + - supports-color + + istanbul-lib-report@3.0.1: + dependencies: + istanbul-lib-coverage: 3.2.2 + make-dir: 4.0.0 + supports-color: 7.2.0 + + istanbul-lib-source-maps@5.0.6: + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + debug: 4.4.3 + istanbul-lib-coverage: 3.2.2 + transitivePeerDependencies: + - supports-color + + istanbul-reports@3.2.0: + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.1 + + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + jake@10.9.4: + dependencies: + async: 3.2.6 + filelist: 1.0.6 + picocolors: 1.1.1 + + jest-changed-files@30.2.0: + dependencies: + execa: 5.1.1 + jest-util: 30.2.0 + p-limit: 3.1.0 + + jest-circus@30.2.0: + dependencies: + '@jest/environment': 30.2.0 + '@jest/expect': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + chalk: 4.1.2 + co: 4.6.0 + dedent: 1.7.2 + is-generator-fn: 2.1.0 + jest-each: 30.2.0 + jest-matcher-utils: 30.2.0 + jest-message-util: 30.2.0 + jest-runtime: 30.2.0 + jest-snapshot: 30.2.0 + jest-util: 30.2.0 + p-limit: 3.1.0 + pretty-format: 30.2.0 + pure-rand: 7.0.1 + slash: 3.0.0 + stack-utils: 2.0.6 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-cli@30.2.0(@types/node@22.19.15): + dependencies: + '@jest/core': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/types': 30.2.0 + chalk: 4.1.2 + exit-x: 0.2.2 + import-local: 3.2.0 + jest-config: 30.2.0(@types/node@22.19.15) + jest-util: 30.2.0 + jest-validate: 30.2.0 + yargs: 17.7.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + + jest-config@30.2.0(@types/node@22.19.15): + dependencies: + '@babel/core': 7.29.0 + '@jest/get-type': 30.1.0 + '@jest/pattern': 30.0.1 + '@jest/test-sequencer': 30.2.0 + '@jest/types': 30.2.0 + babel-jest: 30.2.0(@babel/core@7.29.0) + chalk: 4.1.2 + ci-info: 4.4.0 + deepmerge: 4.3.1 + glob: 10.4.5 + graceful-fs: 4.2.11 + jest-circus: 30.2.0 + jest-docblock: 30.2.0 + jest-environment-node: 30.2.0 + jest-regex-util: 30.0.1 + jest-resolve: 30.2.0 + jest-runner: 30.2.0 + jest-util: 30.2.0 + jest-validate: 30.2.0 + micromatch: 4.0.8 + parse-json: 5.2.0 + pretty-format: 30.2.0 + slash: 3.0.0 + strip-json-comments: 3.1.1 + optionalDependencies: + '@types/node': 22.19.15 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-diff@29.7.0: + dependencies: + chalk: 4.1.2 + diff-sequences: 29.6.3 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + + jest-diff@30.2.0: + dependencies: + '@jest/diff-sequences': 30.0.1 + '@jest/get-type': 30.1.0 + chalk: 4.1.2 + pretty-format: 30.2.0 + + jest-docblock@30.2.0: + dependencies: + detect-newline: 3.1.0 + + jest-each@30.2.0: + dependencies: + '@jest/get-type': 30.1.0 + '@jest/types': 30.2.0 + chalk: 4.1.2 + jest-util: 30.2.0 + pretty-format: 30.2.0 + + jest-environment-node@30.2.0: + dependencies: + '@jest/environment': 30.2.0 + '@jest/fake-timers': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + jest-mock: 30.2.0 + jest-util: 30.2.0 + jest-validate: 30.2.0 + + jest-get-type@29.6.3: {} + + jest-haste-map@30.2.0: + dependencies: + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + anymatch: 3.1.3 + fb-watchman: 2.0.2 + graceful-fs: 4.2.11 + jest-regex-util: 30.0.1 + jest-util: 30.2.0 + jest-worker: 30.2.0 + micromatch: 4.0.8 + walker: 1.0.8 + optionalDependencies: + fsevents: 2.3.3 + + jest-junit@16.0.0: + dependencies: + mkdirp: 1.0.4 + strip-ansi: 6.0.1 + uuid: 8.3.2 + xml: 1.0.1 + + jest-leak-detector@30.2.0: + dependencies: + '@jest/get-type': 30.1.0 + pretty-format: 30.2.0 + + jest-matcher-utils@30.2.0: + dependencies: + '@jest/get-type': 30.1.0 + chalk: 4.1.2 + jest-diff: 30.2.0 + pretty-format: 30.2.0 + + jest-message-util@30.2.0: + dependencies: + '@babel/code-frame': 7.29.0 + '@jest/types': 30.2.0 + '@types/stack-utils': 2.0.3 + chalk: 4.1.2 + graceful-fs: 4.2.11 + micromatch: 4.0.8 + pretty-format: 30.2.0 + slash: 3.0.0 + stack-utils: 2.0.6 + + jest-mock@30.2.0: + dependencies: + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + jest-util: 30.2.0 + + jest-pnp-resolver@1.2.3(jest-resolve@30.2.0): + optionalDependencies: + jest-resolve: 30.2.0 + + jest-regex-util@30.0.1: {} + + jest-resolve-dependencies@30.2.0: + dependencies: + jest-regex-util: 30.0.1 + jest-snapshot: 30.2.0 + transitivePeerDependencies: + - supports-color + + jest-resolve@30.2.0: + dependencies: + chalk: 4.1.2 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + jest-pnp-resolver: 1.2.3(jest-resolve@30.2.0) + jest-util: 30.2.0 + jest-validate: 30.2.0 + slash: 3.0.0 + unrs-resolver: 1.11.1 + + jest-runner@30.2.0: + dependencies: + '@jest/console': 30.2.0 + '@jest/environment': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + chalk: 4.1.2 + emittery: 0.13.1 + exit-x: 0.2.2 + graceful-fs: 4.2.11 + jest-docblock: 30.2.0 + jest-environment-node: 30.2.0 + jest-haste-map: 30.2.0 + jest-leak-detector: 30.2.0 + jest-message-util: 30.2.0 + jest-resolve: 30.2.0 + jest-runtime: 30.2.0 + jest-util: 30.2.0 + jest-watcher: 30.2.0 + jest-worker: 30.2.0 + p-limit: 3.1.0 + source-map-support: 0.5.13 + transitivePeerDependencies: + - supports-color + + jest-runtime@30.2.0: + dependencies: + '@jest/environment': 30.2.0 + '@jest/fake-timers': 30.2.0 + '@jest/globals': 30.2.0 + '@jest/source-map': 30.0.1 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + chalk: 4.1.2 + cjs-module-lexer: 2.2.0 + collect-v8-coverage: 1.0.3 + glob: 10.4.5 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + jest-message-util: 30.2.0 + jest-mock: 30.2.0 + jest-regex-util: 30.0.1 + jest-resolve: 30.2.0 + jest-snapshot: 30.2.0 + jest-util: 30.2.0 + slash: 3.0.0 + strip-bom: 4.0.0 + transitivePeerDependencies: + - supports-color + + jest-snapshot@30.2.0: + dependencies: + '@babel/core': 7.29.0 + '@babel/generator': 7.29.1 + '@babel/plugin-syntax-jsx': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-syntax-typescript': 7.28.6(@babel/core@7.29.0) + '@babel/types': 7.29.0 + '@jest/expect-utils': 30.2.0 + '@jest/get-type': 30.1.0 + '@jest/snapshot-utils': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + babel-preset-current-node-syntax: 1.2.0(@babel/core@7.29.0) + chalk: 4.1.2 + expect: 30.2.0 + graceful-fs: 4.2.11 + jest-diff: 30.2.0 + jest-matcher-utils: 30.2.0 + jest-message-util: 30.2.0 + jest-util: 30.2.0 + pretty-format: 30.2.0 + semver: 7.7.4 + synckit: 0.11.12 + transitivePeerDependencies: + - supports-color + + jest-util@30.2.0: + dependencies: + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + chalk: 4.1.2 + ci-info: 4.4.0 + graceful-fs: 4.2.11 + picomatch: 4.0.3 + + jest-validate@30.2.0: + dependencies: + '@jest/get-type': 30.1.0 + '@jest/types': 30.2.0 + camelcase: 6.3.0 + chalk: 4.1.2 + leven: 3.1.0 + pretty-format: 30.2.0 + + jest-watcher@30.2.0: + dependencies: + '@jest/test-result': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 22.19.15 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + emittery: 0.13.1 + jest-util: 30.2.0 + string-length: 4.0.2 + + jest-worker@27.5.1: + dependencies: + '@types/node': 22.19.15 + merge-stream: 2.0.0 + supports-color: 8.1.1 + + jest-worker@30.2.0: + dependencies: + '@types/node': 22.19.15 + '@ungap/structured-clone': 1.3.0 + jest-util: 30.2.0 + merge-stream: 2.0.0 + supports-color: 8.1.1 + + jest@30.2.0(@types/node@22.19.15): + dependencies: + '@jest/core': 30.2.0 + '@jest/types': 30.2.0 + import-local: 3.2.0 + jest-cli: 30.2.0(@types/node@22.19.15) + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + + js-tokens@4.0.0: {} + + js-yaml@3.14.2: + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + jsesc@3.1.0: {} + + json-parse-better-errors@1.0.2: {} + + json-parse-even-better-errors@2.3.1: {} + + json-parse-even-better-errors@3.0.2: {} + + json-schema-traverse@1.0.0: {} + + json-stringify-nice@1.1.4: {} + + json-stringify-safe@5.0.1: {} + + json5@2.2.3: {} + + jsonc-parser@3.2.0: {} + + jsonc-parser@3.3.1: {} + + jsonfile@6.2.0: + dependencies: + universalify: 2.0.1 + optionalDependencies: + graceful-fs: 4.2.11 + + jsonparse@1.3.1: {} + + just-diff-apply@5.5.0: {} + + just-diff@6.0.2: {} + + kind-of@6.0.3: {} + + lerna@8.2.4(@types/node@22.19.15)(encoding@0.1.13): + dependencies: + '@lerna/create': 8.2.4(@types/node@22.19.15)(encoding@0.1.13)(typescript@5.5.4) + '@npmcli/arborist': 7.5.4 + '@npmcli/package-json': 5.2.0 + '@npmcli/run-script': 8.1.0 + '@nx/devkit': 20.8.4(nx@20.8.4) + '@octokit/plugin-enterprise-rest': 6.0.1 + '@octokit/rest': 20.1.2 + aproba: 2.0.0 + byte-size: 8.1.1 + chalk: 4.1.0 + clone-deep: 4.0.1 + cmd-shim: 6.0.3 + color-support: 1.1.3 + columnify: 1.6.0 + console-control-strings: 1.1.0 + conventional-changelog-angular: 7.0.0 + conventional-changelog-core: 5.0.1 + conventional-recommended-bump: 7.0.1 + cosmiconfig: 9.0.0(typescript@5.5.4) + dedent: 1.5.3 + envinfo: 7.13.0 + execa: 5.0.0 + fs-extra: 11.3.4 + get-port: 5.1.1 + get-stream: 6.0.0 + git-url-parse: 14.0.0 + glob-parent: 6.0.2 + graceful-fs: 4.2.11 + has-unicode: 2.0.1 + import-local: 3.1.0 + ini: 1.3.8 + init-package-json: 6.0.3 + inquirer: 8.2.7(@types/node@22.19.15) + is-ci: 3.0.1 + is-stream: 2.0.0 + jest-diff: 29.7.0 + js-yaml: 4.1.0 + libnpmaccess: 8.0.6 + libnpmpublish: 9.0.9 + load-json-file: 6.2.0 + make-dir: 4.0.0 + minimatch: 3.0.5 + multimatch: 5.0.0 + node-fetch: 2.6.7(encoding@0.1.13) + npm-package-arg: 11.0.2 + npm-packlist: 8.0.2 + npm-registry-fetch: 17.1.0 + nx: 20.8.4 + p-map: 4.0.0 + p-map-series: 2.1.0 + p-pipe: 3.1.0 + p-queue: 6.6.2 + p-reduce: 2.1.0 + p-waterfall: 2.1.1 + pacote: 18.0.6 + pify: 5.0.0 + read-cmd-shim: 4.0.0 + resolve-from: 5.0.0 + rimraf: 4.4.1 + semver: 7.7.4 + set-blocking: 2.0.0 + signal-exit: 3.0.7 + slash: 3.0.0 + ssri: 10.0.6 + string-width: 4.2.3 + tar: 6.2.1 + temp-dir: 1.0.0 + through: 2.3.8 + tinyglobby: 0.2.12 + typescript: 5.5.4 + upath: 2.0.1 + uuid: 10.0.0 + validate-npm-package-license: 3.0.4 + validate-npm-package-name: 5.0.1 + wide-align: 1.1.5 + write-file-atomic: 5.0.1 + write-pkg: 4.0.0 + yargs: 17.7.2 + yargs-parser: 21.1.1 + transitivePeerDependencies: + - '@swc-node/register' + - '@swc/core' + - '@types/node' + - babel-plugin-macros + - bluebird + - debug + - encoding + - supports-color + + leven@3.1.0: {} + + libnpmaccess@8.0.6: + dependencies: + npm-package-arg: 11.0.2 + npm-registry-fetch: 17.1.0 + transitivePeerDependencies: + - supports-color + + libnpmpublish@9.0.9: + dependencies: + ci-info: 4.4.0 + normalize-package-data: 6.0.2 + npm-package-arg: 11.0.2 + npm-registry-fetch: 17.1.0 + proc-log: 4.2.0 + semver: 7.7.4 + sigstore: 2.3.1 + ssri: 10.0.6 + transitivePeerDependencies: + - supports-color + + lines-and-columns@1.2.4: {} + + lines-and-columns@2.0.3: {} + + load-json-file@4.0.0: + dependencies: + graceful-fs: 4.2.11 + parse-json: 4.0.0 + pify: 3.0.0 + strip-bom: 3.0.0 + + load-json-file@6.2.0: + dependencies: + graceful-fs: 4.2.11 + parse-json: 5.2.0 + strip-bom: 4.0.0 + type-fest: 0.6.0 + + loader-runner@4.3.1: {} + + loader-utils@2.0.4: + dependencies: + big.js: 5.2.2 + emojis-list: 3.0.0 + json5: 2.2.3 + + locate-path@2.0.0: + dependencies: + p-locate: 2.0.0 + path-exists: 3.0.0 + + locate-path@5.0.0: + dependencies: + p-locate: 4.1.0 + + lodash.camelcase@4.3.0: {} + + lodash.ismatch@4.4.0: {} + + lodash.memoize@4.1.2: {} + + lodash@4.17.23: {} + + log-symbols@4.1.0: + dependencies: + chalk: 4.1.2 + is-unicode-supported: 0.1.0 + + lru-cache@10.4.3: {} + + lru-cache@5.1.1: + dependencies: + yallist: 3.1.1 + + lru-cache@6.0.0: + dependencies: + yallist: 4.0.0 + + make-dir@2.1.0: + dependencies: + pify: 4.0.1 + semver: 5.7.2 + + make-dir@4.0.0: + dependencies: + semver: 7.7.4 + + make-error@1.3.6: {} + + make-fetch-happen@13.0.1: + dependencies: + '@npmcli/agent': 2.2.2 + cacache: 18.0.4 + http-cache-semantics: 4.2.0 + is-lambda: 1.0.1 + minipass: 7.1.3 + minipass-fetch: 3.0.5 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + negotiator: 0.6.4 + proc-log: 4.2.0 + promise-retry: 2.0.1 + ssri: 10.0.6 + transitivePeerDependencies: + - supports-color + + makeerror@1.0.12: + dependencies: + tmpl: 1.0.5 + + map-obj@1.0.1: {} + + map-obj@4.3.0: {} + + math-intrinsics@1.1.0: {} + + meow@8.1.2: + dependencies: + '@types/minimist': 1.2.5 + camelcase-keys: 6.2.2 + decamelize-keys: 1.1.1 + hard-rejection: 2.1.0 + minimist-options: 4.1.0 + normalize-package-data: 3.0.3 + read-pkg-up: 7.0.1 + redent: 3.0.0 + trim-newlines: 3.0.1 + type-fest: 0.18.1 + yargs-parser: 20.2.9 + + merge-stream@2.0.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + mimic-fn@2.1.0: {} + + min-indent@1.0.1: {} + + minimatch@3.0.5: + dependencies: + brace-expansion: 1.1.12 + + minimatch@3.1.5: + dependencies: + brace-expansion: 1.1.12 + + minimatch@5.1.9: + dependencies: + brace-expansion: 2.0.2 + + minimatch@8.0.7: + dependencies: + brace-expansion: 2.0.2 + + minimatch@9.0.3: + dependencies: + brace-expansion: 2.0.2 + + minimatch@9.0.9: + dependencies: + brace-expansion: 2.0.2 + + minimist-options@4.1.0: + dependencies: + arrify: 1.0.1 + is-plain-obj: 1.1.0 + kind-of: 6.0.3 + + minimist@1.2.8: {} + + minipass-collect@2.0.1: + dependencies: + minipass: 7.1.3 + + minipass-fetch@3.0.5: + dependencies: + minipass: 7.1.3 + minipass-sized: 1.0.3 + minizlib: 2.1.2 + optionalDependencies: + encoding: 0.1.13 + + minipass-flush@1.0.5: + dependencies: + minipass: 3.3.6 + + minipass-pipeline@1.2.4: + dependencies: + minipass: 3.3.6 + + minipass-sized@1.0.3: + dependencies: + minipass: 3.3.6 + + minipass@3.3.6: + dependencies: + yallist: 4.0.0 + + minipass@4.2.8: {} + + minipass@5.0.0: {} + + minipass@7.1.3: {} + + minizlib@2.1.2: + dependencies: + minipass: 3.3.6 + yallist: 4.0.0 + + mkdirp@1.0.4: {} + + modify-values@1.0.1: {} + + ms@2.1.3: {} + + multimatch@5.0.0: + dependencies: + '@types/minimatch': 3.0.5 + array-differ: 3.0.0 + array-union: 2.1.0 + arrify: 2.0.1 + minimatch: 3.0.5 + + mute-stream@0.0.8: {} + + mute-stream@1.0.0: {} + + napi-postinstall@0.3.4: {} + + natural-compare@1.4.0: {} + + negotiator@0.6.4: {} + + neo-async@2.6.2: {} + + nice-try@1.0.5: {} + + node-fetch@2.6.7(encoding@0.1.13): + dependencies: + whatwg-url: 5.0.0 + optionalDependencies: + encoding: 0.1.13 + + node-gyp@10.3.1: + dependencies: + env-paths: 2.2.1 + exponential-backoff: 3.1.3 + glob: 10.4.5 + graceful-fs: 4.2.11 + make-fetch-happen: 13.0.1 + nopt: 7.2.1 + proc-log: 4.2.0 + semver: 7.7.4 + tar: 6.2.1 + which: 4.0.0 + transitivePeerDependencies: + - supports-color + + node-int64@0.4.0: {} + + node-machine-id@1.1.12: {} + + node-releases@2.0.36: {} + + nopt@7.2.1: + dependencies: + abbrev: 2.0.0 + + normalize-package-data@2.5.0: + dependencies: + hosted-git-info: 2.8.9 + resolve: 1.22.11 + semver: 5.7.2 + validate-npm-package-license: 3.0.4 + + normalize-package-data@3.0.3: + dependencies: + hosted-git-info: 4.1.0 + is-core-module: 2.16.1 + semver: 7.7.4 + validate-npm-package-license: 3.0.4 + + normalize-package-data@6.0.2: + dependencies: + hosted-git-info: 7.0.2 + semver: 7.7.4 + validate-npm-package-license: 3.0.4 + + normalize-path@3.0.0: {} + + npm-bundled@3.0.1: + dependencies: + npm-normalize-package-bin: 3.0.1 + + npm-install-checks@6.3.0: + dependencies: + semver: 7.7.4 + + npm-normalize-package-bin@3.0.1: {} + + npm-package-arg@11.0.2: + dependencies: + hosted-git-info: 7.0.2 + proc-log: 4.2.0 + semver: 7.7.4 + validate-npm-package-name: 5.0.1 + + npm-packlist@8.0.2: + dependencies: + ignore-walk: 6.0.5 + + npm-pick-manifest@9.1.0: + dependencies: + npm-install-checks: 6.3.0 + npm-normalize-package-bin: 3.0.1 + npm-package-arg: 11.0.2 + semver: 7.7.4 + + npm-registry-fetch@17.1.0: + dependencies: + '@npmcli/redact': 2.0.1 + jsonparse: 1.3.1 + make-fetch-happen: 13.0.1 + minipass: 7.1.3 + minipass-fetch: 3.0.5 + minizlib: 2.1.2 + npm-package-arg: 11.0.2 + proc-log: 4.2.0 + transitivePeerDependencies: + - supports-color + + npm-run-path@2.0.2: + dependencies: + path-key: 2.0.1 + + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + + nx@20.8.4: + dependencies: + '@napi-rs/wasm-runtime': 0.2.4 + '@yarnpkg/lockfile': 1.1.0 + '@yarnpkg/parsers': 3.0.2 + '@zkochan/js-yaml': 0.0.7 + axios: 1.13.6 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-spinners: 2.6.1 + cliui: 8.0.1 + dotenv: 16.4.7 + dotenv-expand: 11.0.7 + enquirer: 2.3.6 + figures: 3.2.0 + flat: 5.0.2 + front-matter: 4.0.2 + ignore: 5.3.2 + jest-diff: 29.7.0 + jsonc-parser: 3.2.0 + lines-and-columns: 2.0.3 + minimatch: 9.0.3 + node-machine-id: 1.1.12 + npm-run-path: 4.0.1 + open: 8.4.2 + ora: 5.3.0 + resolve.exports: 2.0.3 + semver: 7.7.4 + string-width: 4.2.3 + tar-stream: 2.2.0 + tmp: 0.2.5 + tsconfig-paths: 4.2.0 + tslib: 2.8.1 + yaml: 2.8.2 + yargs: 17.7.2 + yargs-parser: 21.1.1 + optionalDependencies: + '@nx/nx-darwin-arm64': 20.8.4 + '@nx/nx-darwin-x64': 20.8.4 + '@nx/nx-freebsd-x64': 20.8.4 + '@nx/nx-linux-arm-gnueabihf': 20.8.4 + '@nx/nx-linux-arm64-gnu': 20.8.4 + '@nx/nx-linux-arm64-musl': 20.8.4 + '@nx/nx-linux-x64-gnu': 20.8.4 + '@nx/nx-linux-x64-musl': 20.8.4 + '@nx/nx-win32-arm64-msvc': 20.8.4 + '@nx/nx-win32-x64-msvc': 20.8.4 + transitivePeerDependencies: + - debug + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + open@8.4.2: + dependencies: + define-lazy-prop: 2.0.0 + is-docker: 2.2.1 + is-wsl: 2.2.0 + + ora@5.3.0: + dependencies: + bl: 4.1.0 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-spinners: 2.6.1 + is-interactive: 1.0.0 + log-symbols: 4.1.0 + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + + ora@5.4.1: + dependencies: + bl: 4.1.0 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-spinners: 2.9.2 + is-interactive: 1.0.0 + is-unicode-supported: 0.1.0 + log-symbols: 4.1.0 + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + + p-finally@1.0.0: {} + + p-limit@1.3.0: + dependencies: + p-try: 1.0.0 + + p-limit@2.3.0: + dependencies: + p-try: 2.2.0 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@2.0.0: + dependencies: + p-limit: 1.3.0 + + p-locate@4.1.0: + dependencies: + p-limit: 2.3.0 + + p-map-series@2.1.0: {} + + p-map@4.0.0: + dependencies: + aggregate-error: 3.1.0 + + p-pipe@3.1.0: {} + + p-queue@6.6.2: + dependencies: + eventemitter3: 4.0.7 + p-timeout: 3.2.0 + + p-reduce@2.1.0: {} + + p-timeout@3.2.0: + dependencies: + p-finally: 1.0.0 + + p-try@1.0.0: {} + + p-try@2.2.0: {} + + p-waterfall@2.1.1: + dependencies: + p-reduce: 2.1.0 + + package-json-from-dist@1.0.1: {} + + pacote@18.0.6: + dependencies: + '@npmcli/git': 5.0.8 + '@npmcli/installed-package-contents': 2.1.0 + '@npmcli/package-json': 5.2.0 + '@npmcli/promise-spawn': 7.0.2 + '@npmcli/run-script': 8.1.0 + cacache: 18.0.4 + fs-minipass: 3.0.3 + minipass: 7.1.3 + npm-package-arg: 11.0.2 + npm-packlist: 8.0.2 + npm-pick-manifest: 9.1.0 + npm-registry-fetch: 17.1.0 + proc-log: 4.2.0 + promise-retry: 2.0.1 + sigstore: 2.3.1 + ssri: 10.0.6 + tar: 6.2.1 + transitivePeerDependencies: + - bluebird + - supports-color + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + parse-conflict-json@3.0.1: + dependencies: + json-parse-even-better-errors: 3.0.2 + just-diff: 6.0.2 + just-diff-apply: 5.5.0 + + parse-json@4.0.0: + dependencies: + error-ex: 1.3.4 + json-parse-better-errors: 1.0.2 + + parse-json@5.2.0: + dependencies: + '@babel/code-frame': 7.29.0 + error-ex: 1.3.4 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + + parse-path@7.1.0: + dependencies: + protocols: 2.0.2 + + parse-url@8.1.0: + dependencies: + parse-path: 7.1.0 + + path-exists@3.0.0: {} + + path-exists@4.0.0: {} + + path-is-absolute@1.0.1: {} + + path-key@2.0.1: {} + + path-key@3.1.1: {} + + path-parse@1.0.7: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.3 + + path-type@3.0.0: + dependencies: + pify: 3.0.0 + + path-type@6.0.0: {} + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + picomatch@4.0.3: {} + + pify@2.3.0: {} + + pify@3.0.0: {} + + pify@4.0.1: {} + + pify@5.0.0: {} + + pirates@4.0.7: {} + + pkg-dir@4.2.0: + dependencies: + find-up: 4.1.0 + + postcss-selector-parser@6.1.2: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + + pretty-format@29.7.0: + dependencies: + '@jest/schemas': 29.6.3 + ansi-styles: 5.2.0 + react-is: 18.3.1 + + pretty-format@30.2.0: + dependencies: + '@jest/schemas': 30.0.5 + ansi-styles: 5.2.0 + react-is: 18.3.1 + + proc-log@4.2.0: {} + + process-nextick-args@2.0.1: {} + + proggy@2.0.0: {} + + promise-all-reject-late@1.0.1: {} + + promise-call-limit@3.0.2: {} + + promise-inflight@1.0.1: {} + + promise-retry@2.0.1: + dependencies: + err-code: 2.0.3 + retry: 0.12.0 + + promzard@1.0.2: + dependencies: + read: 3.0.1 + + protocols@2.0.2: {} + + proxy-from-env@1.1.0: {} + + pump@3.0.4: + dependencies: + end-of-stream: 1.4.5 + once: 1.4.0 + + pure-rand@7.0.1: {} + + queue-microtask@1.2.3: {} + + quick-lru@4.0.1: {} + + randombytes@2.1.0: + dependencies: + safe-buffer: 5.2.1 + + react-is@18.3.1: {} + + read-cmd-shim@4.0.0: {} + + read-package-json-fast@3.0.2: + dependencies: + json-parse-even-better-errors: 3.0.2 + npm-normalize-package-bin: 3.0.1 + + read-pkg-up@3.0.0: + dependencies: + find-up: 2.1.0 + read-pkg: 3.0.0 + + read-pkg-up@7.0.1: + dependencies: + find-up: 4.1.0 + read-pkg: 5.2.0 + type-fest: 0.8.1 + + read-pkg@3.0.0: + dependencies: + load-json-file: 4.0.0 + normalize-package-data: 2.5.0 + path-type: 3.0.0 + + read-pkg@5.2.0: + dependencies: + '@types/normalize-package-data': 2.4.4 + normalize-package-data: 2.5.0 + parse-json: 5.2.0 + type-fest: 0.6.0 + + read@3.0.1: + dependencies: + mute-stream: 1.0.0 + + readable-stream@2.3.8: + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + rechoir@0.6.2: + dependencies: + resolve: 1.22.11 + + rechoir@0.8.0: + dependencies: + resolve: 1.22.11 + + redent@3.0.0: + dependencies: + indent-string: 4.0.0 + strip-indent: 3.0.0 + + require-directory@2.1.1: {} + + require-from-string@2.0.2: {} + + resolve-cwd@3.0.0: + dependencies: + resolve-from: 5.0.0 + + resolve-from@4.0.0: {} + + resolve-from@5.0.0: {} + + resolve-pkg-maps@1.0.0: {} + + resolve.exports@2.0.3: {} + + resolve@1.22.11: + dependencies: + is-core-module: 2.16.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + restore-cursor@3.1.0: + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + + retry@0.12.0: {} + + reusify@1.1.0: {} + + rimraf@4.4.1: + dependencies: + glob: 9.3.4 + + run-async@2.4.1: {} + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + rxjs@7.8.2: + dependencies: + tslib: 2.8.1 + + safe-buffer@5.1.2: {} + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + schema-utils@4.3.3: + dependencies: + '@types/json-schema': 7.0.15 + ajv: 8.18.0 + ajv-formats: 2.1.1(ajv@8.18.0) + ajv-keywords: 5.1.0(ajv@8.18.0) + + semver@5.7.2: {} + + semver@6.3.1: {} + + semver@7.7.4: {} + + serialize-javascript@6.0.2: + dependencies: + randombytes: 2.1.0 + + set-blocking@2.0.0: {} + + shallow-clone@3.0.1: + dependencies: + kind-of: 6.0.3 + + shebang-command@1.2.0: + dependencies: + shebang-regex: 1.0.0 + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@1.0.0: {} + + shebang-regex@3.0.0: {} + + shelljs@0.9.2: + dependencies: + execa: 1.0.0 + fast-glob: 3.3.3 + interpret: 1.4.0 + rechoir: 0.6.2 + + shx@0.4.0: + dependencies: + minimist: 1.2.8 + shelljs: 0.9.2 + + signal-exit@3.0.7: {} + + signal-exit@4.1.0: {} + + sigstore@2.3.1: + dependencies: + '@sigstore/bundle': 2.3.2 + '@sigstore/core': 1.1.0 + '@sigstore/protobuf-specs': 0.3.3 + '@sigstore/sign': 2.3.2 + '@sigstore/tuf': 2.3.4 + '@sigstore/verify': 1.2.1 + transitivePeerDependencies: + - supports-color + + slash@3.0.0: {} + + slash@5.1.0: {} + + smart-buffer@4.2.0: {} + + smol-toml@1.6.0: {} + + socks-proxy-agent@8.0.5: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + socks: 2.8.7 + transitivePeerDependencies: + - supports-color + + socks@2.8.7: + dependencies: + ip-address: 10.1.0 + smart-buffer: 4.2.0 + + sort-keys@2.0.0: + dependencies: + is-plain-obj: 1.1.0 + + source-list-map@2.0.1: {} + + source-map-support@0.5.13: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map-support@0.5.21: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map@0.6.1: {} + + source-map@0.7.6: {} + + spdx-correct@3.2.0: + dependencies: + spdx-expression-parse: 3.0.1 + spdx-license-ids: 3.0.23 + + spdx-exceptions@2.5.0: {} + + spdx-expression-parse@3.0.1: + dependencies: + spdx-exceptions: 2.5.0 + spdx-license-ids: 3.0.23 + + spdx-license-ids@3.0.23: {} + + split2@3.2.2: + dependencies: + readable-stream: 3.6.2 + + split@1.0.1: + dependencies: + through: 2.3.8 + + sprintf-js@1.0.3: {} + + ssri@10.0.6: + dependencies: + minipass: 7.1.3 + + stack-utils@2.0.6: + dependencies: + escape-string-regexp: 2.0.0 + + string-length@4.0.2: + dependencies: + char-regex: 1.0.2 + strip-ansi: 6.0.1 + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.2.0 + + string_decoder@1.1.1: + dependencies: + safe-buffer: 5.1.2 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.2.0: + dependencies: + ansi-regex: 6.2.2 + + strip-bom@3.0.0: {} + + strip-bom@4.0.0: {} + + strip-eof@1.0.0: {} + + strip-final-newline@2.0.0: {} + + strip-indent@3.0.0: + dependencies: + min-indent: 1.0.1 + + strip-json-comments@3.1.1: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-color@8.1.1: + dependencies: + has-flag: 4.0.0 + + supports-preserve-symlinks-flag@1.0.0: {} + + synckit@0.11.12: + dependencies: + '@pkgr/core': 0.2.9 + + tapable@2.3.0: {} + + tar-stream@2.2.0: + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.5 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + + tar@6.2.1: + dependencies: + chownr: 2.0.0 + fs-minipass: 2.1.0 + minipass: 5.0.0 + minizlib: 2.1.2 + mkdirp: 1.0.4 + yallist: 4.0.0 + + temp-dir@1.0.0: {} + + terser-webpack-plugin@5.3.17(esbuild@0.20.2)(webpack@5.105.4): + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + jest-worker: 27.5.1 + schema-utils: 4.3.3 + terser: 5.46.0 + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + optionalDependencies: + esbuild: 0.20.2 + + terser@5.46.0: + dependencies: + '@jridgewell/source-map': 0.3.11 + acorn: 8.16.0 + commander: 2.20.3 + source-map-support: 0.5.21 + + test-exclude@6.0.0: + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 7.2.3 + minimatch: 3.1.5 + + text-extensions@1.9.0: {} + + through2@2.0.5: + dependencies: + readable-stream: 2.3.8 + xtend: 4.0.2 + + through@2.3.8: {} + + tinyglobby@0.2.12: + dependencies: + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + + tmp@0.2.5: {} + + tmpl@1.0.5: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + tr46@0.0.3: {} + + treeverse@3.0.0: {} + + trim-newlines@3.0.1: {} + + ts-jest@29.4.6(@babel/core@7.29.0)(@jest/transform@30.2.0)(@jest/types@30.2.0)(babel-jest@30.2.0(@babel/core@7.29.0))(esbuild@0.20.2)(jest-util@30.2.0)(jest@30.2.0(@types/node@22.19.15))(typescript@5.5.4): + dependencies: + bs-logger: 0.2.6 + fast-json-stable-stringify: 2.1.0 + handlebars: 4.7.8 + jest: 30.2.0(@types/node@22.19.15) + json5: 2.2.3 + lodash.memoize: 4.1.2 + make-error: 1.3.6 + semver: 7.7.4 + type-fest: 4.41.0 + typescript: 5.5.4 + yargs-parser: 21.1.1 + optionalDependencies: + '@babel/core': 7.29.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + babel-jest: 30.2.0(@babel/core@7.29.0) + esbuild: 0.20.2 + jest-util: 30.2.0 + + ts-loader@9.5.4(typescript@5.5.4)(webpack@5.105.4): + dependencies: + chalk: 4.1.2 + enhanced-resolve: 5.20.0 + micromatch: 4.0.8 + semver: 7.7.4 + source-map: 0.7.6 + typescript: 5.5.4 + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + + tsconfig-paths@4.2.0: + dependencies: + json5: 2.2.3 + minimist: 1.2.8 + strip-bom: 3.0.0 + + tslib@1.14.1: {} + + tslib@2.8.1: {} + + tuf-js@2.2.1: + dependencies: + '@tufjs/models': 2.0.1 + debug: 4.4.3 + make-fetch-happen: 13.0.1 + transitivePeerDependencies: + - supports-color + + type-detect@4.0.8: {} + + type-fest@0.18.1: {} + + type-fest@0.21.3: {} + + type-fest@0.4.1: {} + + type-fest@0.6.0: {} + + type-fest@0.8.1: {} + + type-fest@4.41.0: {} + + typedarray@0.0.6: {} + + typescript@5.5.4: {} + + typical@4.0.0: {} + + uglify-js@3.19.3: + optional: true + + undici-types@6.21.0: {} + + unicorn-magic@0.3.0: {} + + unique-filename@3.0.0: + dependencies: + unique-slug: 4.0.0 + + unique-slug@4.0.0: + dependencies: + imurmurhash: 0.1.4 + + universal-user-agent@6.0.1: {} + + universalify@2.0.1: {} + + unrs-resolver@1.11.1: + dependencies: + napi-postinstall: 0.3.4 + optionalDependencies: + '@unrs/resolver-binding-android-arm-eabi': 1.11.1 + '@unrs/resolver-binding-android-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-x64': 1.11.1 + '@unrs/resolver-binding-freebsd-x64': 1.11.1 + '@unrs/resolver-binding-linux-arm-gnueabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm-musleabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-arm64-musl': 1.11.1 + '@unrs/resolver-binding-linux-ppc64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-musl': 1.11.1 + '@unrs/resolver-binding-linux-s390x-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-musl': 1.11.1 + '@unrs/resolver-binding-wasm32-wasi': 1.11.1 + '@unrs/resolver-binding-win32-arm64-msvc': 1.11.1 + '@unrs/resolver-binding-win32-ia32-msvc': 1.11.1 + '@unrs/resolver-binding-win32-x64-msvc': 1.11.1 + + upath@2.0.1: {} + + update-browserslist-db@1.2.3(browserslist@4.28.1): + dependencies: + browserslist: 4.28.1 + escalade: 3.2.0 + picocolors: 1.1.1 + + util-deprecate@1.0.2: {} + + uuid@10.0.0: {} + + uuid@8.3.2: {} + + v8-to-istanbul@9.3.0: + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + '@types/istanbul-lib-coverage': 2.0.6 + convert-source-map: 2.0.0 + + validate-npm-package-license@3.0.4: + dependencies: + spdx-correct: 3.2.0 + spdx-expression-parse: 3.0.1 + + validate-npm-package-name@5.0.1: {} + + vscode-jsonrpc@9.0.0-next.11: {} + + vscode-languageserver-protocol@3.17.6-next.17: + dependencies: + vscode-jsonrpc: 9.0.0-next.11 + vscode-languageserver-types: 3.17.6-next.6 + + vscode-languageserver-textdocument@1.0.12: {} + + vscode-languageserver-types@3.17.6-next.6: {} + + vscode-languageserver@10.0.0-next.17: + dependencies: + vscode-languageserver-protocol: 3.17.6-next.17 + + vscode-uri@3.1.0: {} + + walk-up-path@3.0.1: {} + + walker@1.0.8: + dependencies: + makeerror: 1.0.12 + + watchpack@2.5.1: + dependencies: + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.11 + + wcwidth@1.0.1: + dependencies: + defaults: 1.0.4 + + webidl-conversions@3.0.1: {} + + webpack-cli@5.1.4(webpack@5.105.4): + dependencies: + '@discoveryjs/json-ext': 0.5.7 + '@webpack-cli/configtest': 2.1.1(webpack-cli@5.1.4)(webpack@5.105.4) + '@webpack-cli/info': 2.0.2(webpack-cli@5.1.4)(webpack@5.105.4) + '@webpack-cli/serve': 2.0.5(webpack-cli@5.1.4)(webpack@5.105.4) + colorette: 2.0.20 + commander: 10.0.1 + cross-spawn: 7.0.6 + envinfo: 7.21.0 + fastest-levenshtein: 1.0.16 + import-local: 3.2.0 + interpret: 3.1.1 + rechoir: 0.8.0 + webpack: 5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4) + webpack-merge: 5.10.0 + + webpack-merge@5.10.0: + dependencies: + clone-deep: 4.0.1 + flat: 5.0.2 + wildcard: 2.0.1 + + webpack-sources@1.4.3: + dependencies: + source-list-map: 2.0.1 + source-map: 0.6.1 + + webpack-sources@3.3.4: {} + + webpack@5.105.4(esbuild@0.20.2)(webpack-cli@5.1.4): + dependencies: + '@types/eslint-scope': 3.7.7 + '@types/estree': 1.0.8 + '@types/json-schema': 7.0.15 + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/wasm-edit': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + acorn: 8.16.0 + acorn-import-phases: 1.0.4(acorn@8.16.0) + browserslist: 4.28.1 + chrome-trace-event: 1.0.4 + enhanced-resolve: 5.20.0 + es-module-lexer: 2.0.0 + eslint-scope: 5.1.1 + events: 3.3.0 + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.11 + json-parse-even-better-errors: 2.3.1 + loader-runner: 4.3.1 + mime-types: 2.1.35 + neo-async: 2.6.2 + schema-utils: 4.3.3 + tapable: 2.3.0 + terser-webpack-plugin: 5.3.17(esbuild@0.20.2)(webpack@5.105.4) + watchpack: 2.5.1 + webpack-sources: 3.3.4 + optionalDependencies: + webpack-cli: 5.1.4(webpack@5.105.4) + transitivePeerDependencies: + - '@swc/core' + - esbuild + - uglify-js + + whatwg-url@5.0.0: + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + + which@1.3.1: + dependencies: + isexe: 2.0.0 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + which@4.0.0: + dependencies: + isexe: 3.1.5 + + wide-align@1.1.5: + dependencies: + string-width: 4.2.3 + + wildcard@2.0.1: {} + + word-wrap@1.2.5: {} + + wordwrap@1.0.0: {} + + wrap-ansi@6.2.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.3 + string-width: 5.1.2 + strip-ansi: 7.2.0 + + wrappy@1.0.2: {} + + write-file-atomic@2.4.3: + dependencies: + graceful-fs: 4.2.11 + imurmurhash: 0.1.4 + signal-exit: 3.0.7 + + write-file-atomic@5.0.1: + dependencies: + imurmurhash: 0.1.4 + signal-exit: 4.1.0 + + write-json-file@3.2.0: + dependencies: + detect-indent: 5.0.0 + graceful-fs: 4.2.11 + make-dir: 2.1.0 + pify: 4.0.1 + sort-keys: 2.0.0 + write-file-atomic: 2.4.3 + + write-pkg@4.0.0: + dependencies: + sort-keys: 2.0.0 + type-fest: 0.4.1 + write-json-file: 3.2.0 + + xml@1.0.1: {} + + xtend@4.0.2: {} + + y18n@5.0.8: {} + + yallist@3.1.1: {} + + yallist@4.0.0: {} + + yaml@2.8.2: {} + + yargs-parser@20.2.9: {} + + yargs-parser@21.1.1: {} + + yargs@16.2.0: + dependencies: + cliui: 7.0.4 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 20.2.9 + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yocto-queue@0.1.0: {} diff --git a/python-parser/pnpm-workspace.yaml b/python-parser/pnpm-workspace.yaml new file mode 100644 index 00000000..26146707 --- /dev/null +++ b/python-parser/pnpm-workspace.yaml @@ -0,0 +1,4 @@ +packages: + - 'packages/*' +onlyBuiltDependencies: + - esbuild diff --git a/python-parser/tsconfig.json b/python-parser/tsconfig.json new file mode 100644 index 00000000..101ac454 --- /dev/null +++ b/python-parser/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "node16", + "lib": [ + "es2020" + ], + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node16", + "resolveJsonModule": true, + "sourceMap": true, + "outDir": "./out", + "noImplicitReturns": true, + "noImplicitOverride": true, + "checkJs": true, + "experimentalDecorators": true + }, + "exclude": [ + "node_modules", + "dist", + "out" + ] +}